cl2.hpp 318 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653865486558656865786588659866086618662866386648665866686678668866986708671867286738674867586768677867886798680868186828683868486858686868786888689869086918692869386948695869686978698869987008701870287038704870587068707870887098710871187128713871487158716871787188719872087218722872387248725872687278728872987308731873287338734873587368737873887398740874187428743874487458746874787488749875087518752875387548755875687578758875987608761876287638764876587668767876887698770877187728773877487758776877787788779878087818782878387848785878687878788878987908791879287938794879587968797879887998800880188028803880488058806880788088809881088118812881388148815881688178818881988208821882288238824882588268827882888298830883188328833883488358836883788388839884088418842884388448845884688478848884988508851885288538854885588568857885888598860886188628863886488658866886788688869887088718872887388748875887688778878887988808881888288838884888588868887888888898890889188928893889488958896889788988899890089018902890389048905890689078908890989108911891289138914891589168917891889198920892189228923892489258926892789288929893089318932893389348935893689378938893989408941894289438944894589468947894889498950895189528953895489558956895789588959896089618962896389648965896689678968896989708971897289738974897589768977897889798980898189828983898489858986898789888989899089918992899389948995899689978998899990009001900290039004900590069007900890099010901190129013901490159016901790189019902090219022902390249025902690279028902990309031903290339034903590369037903890399040904190429043904490459046904790489049905090519052905390549055905690579058905990609061906290639064906590669067906890699070907190729073907490759076907790789079908090819082908390849085908690879088908990909091909290939094909590969097909890999100910191029103910491059106910791089109911091119112911391149115911691179118911991209121912291239124912591269127912891299130913191329133913491359136913791389139914091419142914391449145914691479148914991509151915291539154915591569157915891599160916191629163916491659166916791689169917091719172917391749175917691779178917991809181918291839184918591869187918891899190919191929193919491959196919791989199920092019202920392049205920692079208920992109211921292139214921592169217921892199220922192229223922492259226922792289229923092319232923392349235923692379238923992409241924292439244924592469247924892499250925192529253925492559256925792589259926092619262926392649265926692679268926992709271927292739274927592769277927892799280928192829283928492859286928792889289929092919292929392949295929692979298929993009301930293039304930593069307930893099310931193129313931493159316931793189319932093219322932393249325932693279328932993309331933293339334933593369337933893399340934193429343934493459346934793489349935093519352935393549355935693579358935993609361936293639364936593669367936893699370937193729373937493759376937793789379938093819382938393849385938693879388938993909391939293939394939593969397939893999400940194029403940494059406940794089409941094119412941394149415941694179418941994209421942294239424942594269427942894299430943194329433943494359436943794389439944094419442944394449445944694479448944994509451945294539454945594569457945894599460946194629463946494659466946794689469947094719472947394749475947694779478947994809481948294839484948594869487948894899490949194929493949494959496949794989499950095019502950395049505950695079508950995109511951295139514951595169517951895199520952195229523952495259526952795289529953095319532953395349535953695379538953995409541954295439544954595469547954895499550955195529553955495559556955795589559956095619562956395649565956695679568956995709571957295739574957595769577957895799580958195829583958495859586958795889589959095919592959395949595959695979598959996009601960296039604960596069607960896099610961196129613961496159616961796189619962096219622962396249625962696279628962996309631963296339634963596369637963896399640964196429643964496459646964796489649965096519652965396549655965696579658965996609661966296639664966596669667966896699670967196729673967496759676967796789679968096819682968396849685968696879688968996909691969296939694969596969697969896999700970197029703970497059706970797089709971097119712971397149715971697179718971997209721972297239724972597269727972897299730973197329733973497359736973797389739974097419742974397449745974697479748974997509751975297539754975597569757975897599760976197629763976497659766976797689769977097719772977397749775977697779778977997809781978297839784978597869787978897899790979197929793979497959796979797989799980098019802980398049805980698079808980998109811981298139814981598169817981898199820982198229823982498259826982798289829983098319832983398349835983698379838983998409841984298439844984598469847984898499850985198529853985498559856985798589859986098619862986398649865986698679868986998709871987298739874987598769877987898799880988198829883988498859886988798889889989098919892989398949895989698979898989999009901990299039904990599069907990899099910991199129913991499159916991799189919992099219922992399249925992699279928992999309931993299339934993599369937993899399940994199429943994499459946994799489949995099519952995399549955995699579958995999609961996299639964996599669967996899699970997199729973997499759976997799789979998099819982998399849985998699879988998999909991999299939994999599969997999899991000010001100021000310004100051000610007100081000910010100111001210013100141001510016100171001810019100201002110022100231002410025100261002710028100291003010031100321003310034100351003610037100381003910040100411004210043100441004510046100471004810049100501005110052100531005410055100561005710058100591006010061100621006310064100651006610067100681006910070100711007210073100741007510076100771007810079100801008110082100831008410085100861008710088100891009010091100921009310094100951009610097100981009910100101011010210103101041010510106101071010810109101101011110112101131011410115101161011710118101191012010121101221012310124101251012610127101281012910130101311013210133101341013510136101371013810139101401014110142101431014410145101461014710148101491015010151101521015310154101551015610157101581015910160101611016210163101641016510166101671016810169101701017110172101731017410175101761017710178101791018010181101821018310184101851018610187101881018910190101911019210193101941019510196101971019810199102001020110202102031020410205102061020710208102091021010211102121021310214102151021610217102181021910220
  1. /*******************************************************************************
  2. * Copyright (c) 2008-2016 The Khronos Group Inc.
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and/or associated documentation files (the
  6. * "Materials"), to deal in the Materials without restriction, including
  7. * without limitation the rights to use, copy, modify, merge, publish,
  8. * distribute, sublicense, and/or sell copies of the Materials, and to
  9. * permit persons to whom the Materials are furnished to do so, subject to
  10. * the following conditions:
  11. *
  12. * The above copyright notice and this permission notice shall be included
  13. * in all copies or substantial portions of the Materials.
  14. *
  15. * MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS
  16. * KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS
  17. * SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT
  18. * https://www.khronos.org/registry/
  19. *
  20. * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
  21. * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
  22. * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
  23. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
  24. * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
  25. * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
  26. * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
  27. ******************************************************************************/
  28. /*! \file
  29. *
  30. * \brief C++ bindings for OpenCL 1.0 (rev 48), OpenCL 1.1 (rev 33),
  31. * OpenCL 1.2 (rev 15), OpenCL 2.0 (rev 29), OpenCL 2.1 (rev 17),
  32. * and OpenCL 2.2 (V2.2-11).
  33. * \author Lee Howes and Bruce Merry
  34. *
  35. * Derived from the OpenCL 1.x C++ bindings written by
  36. * Benedict R. Gaster, Laurent Morichetti and Lee Howes
  37. * With additions and fixes from:
  38. * Brian Cole, March 3rd 2010 and April 2012
  39. * Matt Gruenke, April 2012.
  40. * Bruce Merry, February 2013.
  41. * Tom Deakin and Simon McIntosh-Smith, July 2013
  42. * James Price, 2015-
  43. * \version 2.2.0
  44. * \date 2019-09-18
  45. *
  46. * Optional extension support
  47. *
  48. * cl_ext_device_fission
  49. * #define CL_HPP_USE_CL_DEVICE_FISSION
  50. * cl_khr_d3d10_sharing
  51. * #define CL_HPP_USE_DX_INTEROP
  52. * cl_khr_sub_groups
  53. * #define CL_HPP_USE_CL_SUB_GROUPS_KHR
  54. * cl_khr_image2d_from_buffer
  55. * #define CL_HPP_USE_CL_IMAGE2D_FROM_BUFFER_KHR
  56. *
  57. * Doxygen documentation for this header is available here:
  58. *
  59. * http://khronosgroup.github.io/OpenCL-CLHPP/
  60. *
  61. * The latest version of this header can be found on the GitHub releases page:
  62. *
  63. * https://github.com/KhronosGroup/OpenCL-CLHPP/releases
  64. *
  65. * Bugs and patches can be submitted to the GitHub repository:
  66. *
  67. * https://github.com/KhronosGroup/OpenCL-CLHPP
  68. */
  69. /*! \mainpage
  70. * \section intro Introduction
  71. * For many large applications C++ is the language of choice and so it seems
  72. * reasonable to define C++ bindings for OpenCL.
  73. *
  74. * The interface is contained with a single C++ header file \em cl2.hpp and all
  75. * definitions are contained within the namespace \em cl. There is no additional
  76. * requirement to include \em cl.h and to use either the C++ or original C
  77. * bindings; it is enough to simply include \em cl2.hpp.
  78. *
  79. * The bindings themselves are lightweight and correspond closely to the
  80. * underlying C API. Using the C++ bindings introduces no additional execution
  81. * overhead.
  82. *
  83. * There are numerous compatibility, portability and memory management
  84. * fixes in the new header as well as additional OpenCL 2.0 features.
  85. * As a result the header is not directly backward compatible and for this
  86. * reason we release it as cl2.hpp rather than a new version of cl.hpp.
  87. *
  88. *
  89. * \section compatibility Compatibility
  90. * Due to the evolution of the underlying OpenCL API the 2.0 C++ bindings
  91. * include an updated approach to defining supported feature versions
  92. * and the range of valid underlying OpenCL runtime versions supported.
  93. *
  94. * The combination of preprocessor macros CL_HPP_TARGET_OPENCL_VERSION and
  95. * CL_HPP_MINIMUM_OPENCL_VERSION control this range. These are three digit
  96. * decimal values representing OpenCL runime versions. The default for
  97. * the target is 200, representing OpenCL 2.0 and the minimum is also
  98. * defined as 200. These settings would use 2.0 API calls only.
  99. * If backward compatibility with a 1.2 runtime is required, the minimum
  100. * version may be set to 120.
  101. *
  102. * Note that this is a compile-time setting, and so affects linking against
  103. * a particular SDK version rather than the versioning of the loaded runtime.
  104. *
  105. * The earlier versions of the header included basic vector and string
  106. * classes based loosely on STL versions. These were difficult to
  107. * maintain and very rarely used. For the 2.0 header we now assume
  108. * the presence of the standard library unless requested otherwise.
  109. * We use std::array, std::vector, std::shared_ptr and std::string
  110. * throughout to safely manage memory and reduce the chance of a
  111. * recurrance of earlier memory management bugs.
  112. *
  113. * These classes are used through typedefs in the cl namespace:
  114. * cl::array, cl::vector, cl::pointer and cl::string.
  115. * In addition cl::allocate_pointer forwards to std::allocate_shared
  116. * by default.
  117. * In all cases these standard library classes can be replaced with
  118. * custom interface-compatible versions using the CL_HPP_NO_STD_ARRAY,
  119. * CL_HPP_NO_STD_VECTOR, CL_HPP_NO_STD_UNIQUE_PTR and
  120. * CL_HPP_NO_STD_STRING macros.
  121. *
  122. * The OpenCL 1.x versions of the C++ bindings included a size_t wrapper
  123. * class to interface with kernel enqueue. This caused unpleasant interactions
  124. * with the standard size_t declaration and led to namespacing bugs.
  125. * In the 2.0 version we have replaced this with a std::array-based interface.
  126. * However, the old behaviour can be regained for backward compatibility
  127. * using the CL_HPP_ENABLE_SIZE_T_COMPATIBILITY macro.
  128. *
  129. * Finally, the program construction interface used a clumsy vector-of-pairs
  130. * design in the earlier versions. We have replaced that with a cleaner
  131. * vector-of-vectors and vector-of-strings design. However, for backward
  132. * compatibility old behaviour can be regained with the
  133. * CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY macro.
  134. *
  135. * In OpenCL 2.0 OpenCL C is not entirely backward compatibility with
  136. * earlier versions. As a result a flag must be passed to the OpenCL C
  137. * compiled to request OpenCL 2.0 compilation of kernels with 1.2 as
  138. * the default in the absence of the flag.
  139. * In some cases the C++ bindings automatically compile code for ease.
  140. * For those cases the compilation defaults to OpenCL C 2.0.
  141. * If this is not wanted, the CL_HPP_CL_1_2_DEFAULT_BUILD macro may
  142. * be specified to assume 1.2 compilation.
  143. * If more fine-grained decisions on a per-kernel bases are required
  144. * then explicit build operations that take the flag should be used.
  145. *
  146. *
  147. * \section parameterization Parameters
  148. * This header may be parameterized by a set of preprocessor macros.
  149. *
  150. * - CL_HPP_TARGET_OPENCL_VERSION
  151. *
  152. * Defines the target OpenCL runtime version to build the header
  153. * against. Defaults to 200, representing OpenCL 2.0.
  154. *
  155. * - CL_HPP_NO_STD_STRING
  156. *
  157. * Do not use the standard library string class. cl::string is not
  158. * defined and may be defined by the user before cl2.hpp is
  159. * included.
  160. *
  161. * - CL_HPP_NO_STD_VECTOR
  162. *
  163. * Do not use the standard library vector class. cl::vector is not
  164. * defined and may be defined by the user before cl2.hpp is
  165. * included.
  166. *
  167. * - CL_HPP_NO_STD_ARRAY
  168. *
  169. * Do not use the standard library array class. cl::array is not
  170. * defined and may be defined by the user before cl2.hpp is
  171. * included.
  172. *
  173. * - CL_HPP_NO_STD_UNIQUE_PTR
  174. *
  175. * Do not use the standard library unique_ptr class. cl::pointer and
  176. * the cl::allocate_pointer functions are not defined and may be
  177. * defined by the user before cl2.hpp is included.
  178. *
  179. * - CL_HPP_ENABLE_DEVICE_FISSION
  180. *
  181. * Enables device fission for OpenCL 1.2 platforms.
  182. *
  183. * - CL_HPP_ENABLE_EXCEPTIONS
  184. *
  185. * Enable exceptions for use in the C++ bindings header. This is the
  186. * preferred error handling mechanism but is not required.
  187. *
  188. * - CL_HPP_ENABLE_SIZE_T_COMPATIBILITY
  189. *
  190. * Backward compatibility option to support cl.hpp-style size_t
  191. * class. Replaces the updated std::array derived version and
  192. * removal of size_t from the namespace. Note that in this case the
  193. * new size_t class is placed in the cl::compatibility namespace and
  194. * thus requires an additional using declaration for direct backward
  195. * compatibility.
  196. *
  197. * - CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY
  198. *
  199. * Enable older vector of pairs interface for construction of
  200. * programs.
  201. *
  202. * - CL_HPP_CL_1_2_DEFAULT_BUILD
  203. *
  204. * Default to OpenCL C 1.2 compilation rather than OpenCL C 2.0
  205. * applies to use of cl::Program construction and other program
  206. * build variants.
  207. *
  208. * - CL_HPP_USE_CL_SUB_GROUPS_KHR
  209. *
  210. * Enable the cl_khr_subgroups extension.
  211. *
  212. * - CL_HPP_USE_IL_KHR
  213. *
  214. * Enable the cl_khr_il_program extension.
  215. *
  216. *
  217. * \section example Example
  218. *
  219. * The following example shows a general use case for the C++
  220. * bindings, including support for the optional exception feature and
  221. * also the supplied vector and string classes, see following sections for
  222. * decriptions of these features.
  223. *
  224. * \code
  225. #define CL_HPP_ENABLE_EXCEPTIONS
  226. #define CL_HPP_TARGET_OPENCL_VERSION 200
  227. #include <CL/cl2.hpp>
  228. #include <iostream>
  229. #include <vector>
  230. #include <memory>
  231. #include <algorithm>
  232. const int numElements = 32;
  233. int main(void)
  234. {
  235. // Filter for a 2.0 platform and set it as the default
  236. std::vector<cl::Platform> platforms;
  237. cl::Platform::get(&platforms);
  238. cl::Platform plat;
  239. for (auto &p : platforms) {
  240. std::string platver = p.getInfo<CL_PLATFORM_VERSION>();
  241. if (platver.find("OpenCL 2.") != std::string::npos) {
  242. plat = p;
  243. }
  244. }
  245. if (plat() == 0) {
  246. std::cout << "No OpenCL 2.0 platform found.";
  247. return -1;
  248. }
  249. cl::Platform newP = cl::Platform::setDefault(plat);
  250. if (newP != plat) {
  251. std::cout << "Error setting default platform.";
  252. return -1;
  253. }
  254. // Use C++11 raw string literals for kernel source code
  255. std::string kernel1{R"CLC(
  256. global int globalA;
  257. kernel void updateGlobal()
  258. {
  259. globalA = 75;
  260. }
  261. )CLC"};
  262. std::string kernel2{R"CLC(
  263. typedef struct { global int *bar; } Foo;
  264. kernel void vectorAdd(global const Foo* aNum, global const int *inputA, global const int *inputB,
  265. global int *output, int val, write_only pipe int outPipe, queue_t childQueue)
  266. {
  267. output[get_global_id(0)] = inputA[get_global_id(0)] + inputB[get_global_id(0)] + val + *(aNum->bar);
  268. write_pipe(outPipe, &val);
  269. queue_t default_queue = get_default_queue();
  270. ndrange_t ndrange = ndrange_1D(get_global_size(0)/2, get_global_size(0)/2);
  271. // Have a child kernel write into third quarter of output
  272. enqueue_kernel(default_queue, CLK_ENQUEUE_FLAGS_WAIT_KERNEL, ndrange,
  273. ^{
  274. output[get_global_size(0)*2 + get_global_id(0)] =
  275. inputA[get_global_size(0)*2 + get_global_id(0)] + inputB[get_global_size(0)*2 + get_global_id(0)] + globalA;
  276. });
  277. // Have a child kernel write into last quarter of output
  278. enqueue_kernel(childQueue, CLK_ENQUEUE_FLAGS_WAIT_KERNEL, ndrange,
  279. ^{
  280. output[get_global_size(0)*3 + get_global_id(0)] =
  281. inputA[get_global_size(0)*3 + get_global_id(0)] + inputB[get_global_size(0)*3 + get_global_id(0)] + globalA + 2;
  282. });
  283. }
  284. )CLC"};
  285. // New simpler string interface style
  286. std::vector<std::string> programStrings {kernel1, kernel2};
  287. cl::Program vectorAddProgram(programStrings);
  288. try {
  289. vectorAddProgram.build("-cl-std=CL2.0");
  290. }
  291. catch (...) {
  292. // Print build info for all devices
  293. cl_int buildErr = CL_SUCCESS;
  294. auto buildInfo = vectorAddProgram.getBuildInfo<CL_PROGRAM_BUILD_LOG>(&buildErr);
  295. for (auto &pair : buildInfo) {
  296. std::cerr << pair.second << std::endl << std::endl;
  297. }
  298. return 1;
  299. }
  300. typedef struct { int *bar; } Foo;
  301. // Get and run kernel that initializes the program-scope global
  302. // A test for kernels that take no arguments
  303. auto program2Kernel =
  304. cl::KernelFunctor<>(vectorAddProgram, "updateGlobal");
  305. program2Kernel(
  306. cl::EnqueueArgs(
  307. cl::NDRange(1)));
  308. //////////////////
  309. // SVM allocations
  310. auto anSVMInt = cl::allocate_svm<int, cl::SVMTraitCoarse<>>();
  311. *anSVMInt = 5;
  312. cl::SVMAllocator<Foo, cl::SVMTraitCoarse<cl::SVMTraitReadOnly<>>> svmAllocReadOnly;
  313. auto fooPointer = cl::allocate_pointer<Foo>(svmAllocReadOnly);
  314. fooPointer->bar = anSVMInt.get();
  315. cl::SVMAllocator<int, cl::SVMTraitCoarse<>> svmAlloc;
  316. std::vector<int, cl::SVMAllocator<int, cl::SVMTraitCoarse<>>> inputA(numElements, 1, svmAlloc);
  317. cl::coarse_svm_vector<int> inputB(numElements, 2, svmAlloc);
  318. //
  319. //////////////
  320. // Traditional cl_mem allocations
  321. std::vector<int> output(numElements, 0xdeadbeef);
  322. cl::Buffer outputBuffer(begin(output), end(output), false);
  323. cl::Pipe aPipe(sizeof(cl_int), numElements / 2);
  324. // Default command queue, also passed in as a parameter
  325. cl::DeviceCommandQueue defaultDeviceQueue = cl::DeviceCommandQueue::makeDefault(
  326. cl::Context::getDefault(), cl::Device::getDefault());
  327. auto vectorAddKernel =
  328. cl::KernelFunctor<
  329. decltype(fooPointer)&,
  330. int*,
  331. cl::coarse_svm_vector<int>&,
  332. cl::Buffer,
  333. int,
  334. cl::Pipe&,
  335. cl::DeviceCommandQueue
  336. >(vectorAddProgram, "vectorAdd");
  337. // Ensure that the additional SVM pointer is available to the kernel
  338. // This one was not passed as a parameter
  339. vectorAddKernel.setSVMPointers(anSVMInt);
  340. // Hand control of coarse allocations to runtime
  341. cl::enqueueUnmapSVM(anSVMInt);
  342. cl::enqueueUnmapSVM(fooPointer);
  343. cl::unmapSVM(inputB);
  344. cl::unmapSVM(output2);
  345. cl_int error;
  346. vectorAddKernel(
  347. cl::EnqueueArgs(
  348. cl::NDRange(numElements/2),
  349. cl::NDRange(numElements/2)),
  350. fooPointer,
  351. inputA.data(),
  352. inputB,
  353. outputBuffer,
  354. 3,
  355. aPipe,
  356. defaultDeviceQueue,
  357. error
  358. );
  359. cl::copy(outputBuffer, begin(output), end(output));
  360. // Grab the SVM output vector using a map
  361. cl::mapSVM(output2);
  362. cl::Device d = cl::Device::getDefault();
  363. std::cout << "Output:\n";
  364. for (int i = 1; i < numElements; ++i) {
  365. std::cout << "\t" << output[i] << "\n";
  366. }
  367. std::cout << "\n\n";
  368. return 0;
  369. }
  370. *
  371. * \endcode
  372. *
  373. */
  374. #ifndef CL_HPP_
  375. #define CL_HPP_
  376. /* Handle deprecated preprocessor definitions. In each case, we only check for
  377. * the old name if the new name is not defined, so that user code can define
  378. * both and hence work with either version of the bindings.
  379. */
  380. #if !defined(CL_HPP_USE_DX_INTEROP) && defined(USE_DX_INTEROP)
  381. # pragma message("cl2.hpp: USE_DX_INTEROP is deprecated. Define CL_HPP_USE_DX_INTEROP instead")
  382. # define CL_HPP_USE_DX_INTEROP
  383. #endif
  384. #if !defined(CL_HPP_USE_CL_DEVICE_FISSION) && defined(USE_CL_DEVICE_FISSION)
  385. # pragma message("cl2.hpp: USE_CL_DEVICE_FISSION is deprecated. Define CL_HPP_USE_CL_DEVICE_FISSION instead")
  386. # define CL_HPP_USE_CL_DEVICE_FISSION
  387. #endif
  388. #if !defined(CL_HPP_ENABLE_EXCEPTIONS) && defined(__CL_ENABLE_EXCEPTIONS)
  389. # pragma message("cl2.hpp: __CL_ENABLE_EXCEPTIONS is deprecated. Define CL_HPP_ENABLE_EXCEPTIONS instead")
  390. # define CL_HPP_ENABLE_EXCEPTIONS
  391. #endif
  392. #if !defined(CL_HPP_NO_STD_VECTOR) && defined(__NO_STD_VECTOR)
  393. # pragma message("cl2.hpp: __NO_STD_VECTOR is deprecated. Define CL_HPP_NO_STD_VECTOR instead")
  394. # define CL_HPP_NO_STD_VECTOR
  395. #endif
  396. #if !defined(CL_HPP_NO_STD_STRING) && defined(__NO_STD_STRING)
  397. # pragma message("cl2.hpp: __NO_STD_STRING is deprecated. Define CL_HPP_NO_STD_STRING instead")
  398. # define CL_HPP_NO_STD_STRING
  399. #endif
  400. #if defined(VECTOR_CLASS)
  401. # pragma message("cl2.hpp: VECTOR_CLASS is deprecated. Alias cl::vector instead")
  402. #endif
  403. #if defined(STRING_CLASS)
  404. # pragma message("cl2.hpp: STRING_CLASS is deprecated. Alias cl::string instead.")
  405. #endif
  406. #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS) && defined(__CL_USER_OVERRIDE_ERROR_STRINGS)
  407. # pragma message("cl2.hpp: __CL_USER_OVERRIDE_ERROR_STRINGS is deprecated. Define CL_HPP_USER_OVERRIDE_ERROR_STRINGS instead")
  408. # define CL_HPP_USER_OVERRIDE_ERROR_STRINGS
  409. #endif
  410. /* Warn about features that are no longer supported
  411. */
  412. #if defined(__USE_DEV_VECTOR)
  413. # pragma message("cl2.hpp: __USE_DEV_VECTOR is no longer supported. Expect compilation errors")
  414. #endif
  415. #if defined(__USE_DEV_STRING)
  416. # pragma message("cl2.hpp: __USE_DEV_STRING is no longer supported. Expect compilation errors")
  417. #endif
  418. /* Detect which version to target */
  419. #if !defined(CL_HPP_TARGET_OPENCL_VERSION)
  420. # pragma message("cl2.hpp: CL_HPP_TARGET_OPENCL_VERSION is not defined. It will default to 220 (OpenCL 2.2)")
  421. # define CL_HPP_TARGET_OPENCL_VERSION 220
  422. #endif
  423. #if CL_HPP_TARGET_OPENCL_VERSION != 100 && \
  424. CL_HPP_TARGET_OPENCL_VERSION != 110 && \
  425. CL_HPP_TARGET_OPENCL_VERSION != 120 && \
  426. CL_HPP_TARGET_OPENCL_VERSION != 200 && \
  427. CL_HPP_TARGET_OPENCL_VERSION != 210 && \
  428. CL_HPP_TARGET_OPENCL_VERSION != 220
  429. # pragma message("cl2.hpp: CL_HPP_TARGET_OPENCL_VERSION is not a valid value (100, 110, 120, 200, 210 or 220). It will be set to 220")
  430. # undef CL_HPP_TARGET_OPENCL_VERSION
  431. # define CL_HPP_TARGET_OPENCL_VERSION 220
  432. #endif
  433. /* Forward target OpenCL version to C headers if necessary */
  434. #if defined(CL_TARGET_OPENCL_VERSION)
  435. /* Warn if prior definition of CL_TARGET_OPENCL_VERSION is lower than
  436. * requested C++ bindings version */
  437. #if CL_TARGET_OPENCL_VERSION < CL_HPP_TARGET_OPENCL_VERSION
  438. # pragma message("CL_TARGET_OPENCL_VERSION is already defined as is lower than CL_HPP_TARGET_OPENCL_VERSION")
  439. #endif
  440. #else
  441. # define CL_TARGET_OPENCL_VERSION CL_HPP_TARGET_OPENCL_VERSION
  442. #endif
  443. #if !defined(CL_HPP_MINIMUM_OPENCL_VERSION)
  444. # define CL_HPP_MINIMUM_OPENCL_VERSION 200
  445. #endif
  446. #if CL_HPP_MINIMUM_OPENCL_VERSION != 100 && \
  447. CL_HPP_MINIMUM_OPENCL_VERSION != 110 && \
  448. CL_HPP_MINIMUM_OPENCL_VERSION != 120 && \
  449. CL_HPP_MINIMUM_OPENCL_VERSION != 200 && \
  450. CL_HPP_MINIMUM_OPENCL_VERSION != 210 && \
  451. CL_HPP_MINIMUM_OPENCL_VERSION != 220
  452. # pragma message("cl2.hpp: CL_HPP_MINIMUM_OPENCL_VERSION is not a valid value (100, 110, 120, 200, 210 or 220). It will be set to 100")
  453. # undef CL_HPP_MINIMUM_OPENCL_VERSION
  454. # define CL_HPP_MINIMUM_OPENCL_VERSION 100
  455. #endif
  456. #if CL_HPP_MINIMUM_OPENCL_VERSION > CL_HPP_TARGET_OPENCL_VERSION
  457. # error "CL_HPP_MINIMUM_OPENCL_VERSION must not be greater than CL_HPP_TARGET_OPENCL_VERSION"
  458. #endif
  459. #if CL_HPP_MINIMUM_OPENCL_VERSION <= 100 && !defined(CL_USE_DEPRECATED_OPENCL_1_0_APIS)
  460. # define CL_USE_DEPRECATED_OPENCL_1_0_APIS
  461. #endif
  462. #if CL_HPP_MINIMUM_OPENCL_VERSION <= 110 && !defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
  463. # define CL_USE_DEPRECATED_OPENCL_1_1_APIS
  464. #endif
  465. #if CL_HPP_MINIMUM_OPENCL_VERSION <= 120 && !defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
  466. # define CL_USE_DEPRECATED_OPENCL_1_2_APIS
  467. #endif
  468. #if CL_HPP_MINIMUM_OPENCL_VERSION <= 200 && !defined(CL_USE_DEPRECATED_OPENCL_2_0_APIS)
  469. # define CL_USE_DEPRECATED_OPENCL_2_0_APIS
  470. #endif
  471. #if CL_HPP_MINIMUM_OPENCL_VERSION <= 210 && !defined(CL_USE_DEPRECATED_OPENCL_2_1_APIS)
  472. # define CL_USE_DEPRECATED_OPENCL_2_1_APIS
  473. #endif
  474. #if CL_HPP_MINIMUM_OPENCL_VERSION <= 220 && !defined(CL_USE_DEPRECATED_OPENCL_2_2_APIS)
  475. # define CL_USE_DEPRECATED_OPENCL_2_2_APIS
  476. #endif
  477. #ifdef _WIN32
  478. #include <malloc.h>
  479. #if defined(CL_HPP_USE_DX_INTEROP)
  480. #include <CL/cl_d3d10.h>
  481. #include <CL/cl_dx9_media_sharing.h>
  482. #endif
  483. #endif // _WIN32
  484. #if defined(_MSC_VER)
  485. #include <intrin.h>
  486. #endif // _MSC_VER
  487. // Check for a valid C++ version
  488. // Need to do both tests here because for some reason __cplusplus is not
  489. // updated in visual studio
  490. #if (!defined(_MSC_VER) && __cplusplus < 201103L) || (defined(_MSC_VER) && _MSC_VER < 1700)
  491. #error Visual studio 2013 or another C++11-supporting compiler required
  492. #endif
  493. //
  494. #if defined(CL_HPP_USE_CL_DEVICE_FISSION) || defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)
  495. #include <CL/cl_ext.h>
  496. #endif
  497. #if defined(__APPLE__) || defined(__MACOSX)
  498. #include <OpenCL/opencl.h>
  499. #else
  500. #include <CL/opencl.h>
  501. #endif // !__APPLE__
  502. #if (__cplusplus >= 201103L)
  503. #define CL_HPP_NOEXCEPT_ noexcept
  504. #else
  505. #define CL_HPP_NOEXCEPT_
  506. #endif
  507. #if defined(_MSC_VER)
  508. # define CL_HPP_DEFINE_STATIC_MEMBER_ __declspec(selectany)
  509. #elif defined(__MINGW32__)
  510. # define CL_HPP_DEFINE_STATIC_MEMBER_ __attribute__((selectany))
  511. #else
  512. # define CL_HPP_DEFINE_STATIC_MEMBER_ __attribute__((weak))
  513. #endif // !_MSC_VER
  514. // Define deprecated prefixes and suffixes to ensure compilation
  515. // in case they are not pre-defined
  516. #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
  517. #define CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
  518. #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
  519. #if !defined(CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED)
  520. #define CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
  521. #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
  522. #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
  523. #define CL_EXT_PREFIX__VERSION_1_2_DEPRECATED
  524. #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
  525. #if !defined(CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED)
  526. #define CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED
  527. #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
  528. #if !defined(CL_CALLBACK)
  529. #define CL_CALLBACK
  530. #endif //CL_CALLBACK
  531. #include <utility>
  532. #include <limits>
  533. #include <iterator>
  534. #include <mutex>
  535. #include <cstring>
  536. #include <functional>
  537. // Define a size_type to represent a correctly resolved size_t
  538. #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
  539. namespace cl {
  540. using size_type = ::size_t;
  541. } // namespace cl
  542. #else // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
  543. namespace cl {
  544. using size_type = size_t;
  545. } // namespace cl
  546. #endif // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
  547. #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  548. #include <exception>
  549. #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  550. #if !defined(CL_HPP_NO_STD_VECTOR)
  551. #include <vector>
  552. namespace cl {
  553. template < class T, class Alloc = std::allocator<T> >
  554. using vector = std::vector<T, Alloc>;
  555. } // namespace cl
  556. #endif // #if !defined(CL_HPP_NO_STD_VECTOR)
  557. #if !defined(CL_HPP_NO_STD_STRING)
  558. #include <string>
  559. namespace cl {
  560. using string = std::string;
  561. } // namespace cl
  562. #endif // #if !defined(CL_HPP_NO_STD_STRING)
  563. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  564. #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
  565. #include <memory>
  566. namespace cl {
  567. // Replace unique_ptr and allocate_pointer for internal use
  568. // to allow user to replace them
  569. template<class T, class D>
  570. using pointer = std::unique_ptr<T, D>;
  571. } // namespace cl
  572. #endif
  573. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  574. #if !defined(CL_HPP_NO_STD_ARRAY)
  575. #include <array>
  576. namespace cl {
  577. template < class T, size_type N >
  578. using array = std::array<T, N>;
  579. } // namespace cl
  580. #endif // #if !defined(CL_HPP_NO_STD_ARRAY)
  581. // Define size_type appropriately to allow backward-compatibility
  582. // use of the old size_t interface class
  583. #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
  584. namespace cl {
  585. namespace compatibility {
  586. /*! \brief class used to interface between C++ and
  587. * OpenCL C calls that require arrays of size_t values, whose
  588. * size is known statically.
  589. */
  590. template <int N>
  591. class size_t
  592. {
  593. private:
  594. size_type data_[N];
  595. public:
  596. //! \brief Initialize size_t to all 0s
  597. size_t()
  598. {
  599. for (int i = 0; i < N; ++i) {
  600. data_[i] = 0;
  601. }
  602. }
  603. size_t(const array<size_type, N> &rhs)
  604. {
  605. for (int i = 0; i < N; ++i) {
  606. data_[i] = rhs[i];
  607. }
  608. }
  609. size_type& operator[](int index)
  610. {
  611. return data_[index];
  612. }
  613. const size_type& operator[](int index) const
  614. {
  615. return data_[index];
  616. }
  617. //! \brief Conversion operator to T*.
  618. operator size_type* () { return data_; }
  619. //! \brief Conversion operator to const T*.
  620. operator const size_type* () const { return data_; }
  621. operator array<size_type, N>() const
  622. {
  623. array<size_type, N> ret;
  624. for (int i = 0; i < N; ++i) {
  625. ret[i] = data_[i];
  626. }
  627. return ret;
  628. }
  629. };
  630. } // namespace compatibility
  631. template<int N>
  632. using size_t = compatibility::size_t<N>;
  633. } // namespace cl
  634. #endif // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
  635. // Helper alias to avoid confusing the macros
  636. namespace cl {
  637. namespace detail {
  638. using size_t_array = array<size_type, 3>;
  639. } // namespace detail
  640. } // namespace cl
  641. /*! \namespace cl
  642. *
  643. * \brief The OpenCL C++ bindings are defined within this namespace.
  644. *
  645. */
  646. namespace cl {
  647. class Memory;
  648. #define CL_HPP_INIT_CL_EXT_FCN_PTR_(name) \
  649. if (!pfn_##name) { \
  650. pfn_##name = (PFN_##name) \
  651. clGetExtensionFunctionAddress(#name); \
  652. if (!pfn_##name) { \
  653. } \
  654. }
  655. #define CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, name) \
  656. if (!pfn_##name) { \
  657. pfn_##name = (PFN_##name) \
  658. clGetExtensionFunctionAddressForPlatform(platform, #name); \
  659. if (!pfn_##name) { \
  660. } \
  661. }
  662. class Program;
  663. class Device;
  664. class Context;
  665. class CommandQueue;
  666. class DeviceCommandQueue;
  667. class Memory;
  668. class Buffer;
  669. class Pipe;
  670. #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  671. /*! \brief Exception class
  672. *
  673. * This may be thrown by API functions when CL_HPP_ENABLE_EXCEPTIONS is defined.
  674. */
  675. class Error : public std::exception
  676. {
  677. private:
  678. cl_int err_;
  679. const char * errStr_;
  680. public:
  681. /*! \brief Create a new CL error exception for a given error code
  682. * and corresponding message.
  683. *
  684. * \param err error code value.
  685. *
  686. * \param errStr a descriptive string that must remain in scope until
  687. * handling of the exception has concluded. If set, it
  688. * will be returned by what().
  689. */
  690. Error(cl_int err, const char * errStr = NULL) : err_(err), errStr_(errStr)
  691. {}
  692. ~Error() throw() {}
  693. /*! \brief Get error string associated with exception
  694. *
  695. * \return A memory pointer to the error message string.
  696. */
  697. virtual const char * what() const throw ()
  698. {
  699. if (errStr_ == NULL) {
  700. return "empty";
  701. }
  702. else {
  703. return errStr_;
  704. }
  705. }
  706. /*! \brief Get error code associated with exception
  707. *
  708. * \return The error code.
  709. */
  710. cl_int err(void) const { return err_; }
  711. };
  712. #define CL_HPP_ERR_STR_(x) #x
  713. #else
  714. #define CL_HPP_ERR_STR_(x) NULL
  715. #endif // CL_HPP_ENABLE_EXCEPTIONS
  716. namespace detail
  717. {
  718. #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  719. static inline cl_int errHandler (
  720. cl_int err,
  721. const char * errStr = NULL)
  722. {
  723. if (err != CL_SUCCESS) {
  724. throw Error(err, errStr);
  725. }
  726. return err;
  727. }
  728. #else
  729. static inline cl_int errHandler (cl_int err, const char * errStr = NULL)
  730. {
  731. (void) errStr; // suppress unused variable warning
  732. return err;
  733. }
  734. #endif // CL_HPP_ENABLE_EXCEPTIONS
  735. }
  736. //! \cond DOXYGEN_DETAIL
  737. #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS)
  738. #define __GET_DEVICE_INFO_ERR CL_HPP_ERR_STR_(clGetDeviceInfo)
  739. #define __GET_PLATFORM_INFO_ERR CL_HPP_ERR_STR_(clGetPlatformInfo)
  740. #define __GET_DEVICE_IDS_ERR CL_HPP_ERR_STR_(clGetDeviceIDs)
  741. #define __GET_PLATFORM_IDS_ERR CL_HPP_ERR_STR_(clGetPlatformIDs)
  742. #define __GET_CONTEXT_INFO_ERR CL_HPP_ERR_STR_(clGetContextInfo)
  743. #define __GET_EVENT_INFO_ERR CL_HPP_ERR_STR_(clGetEventInfo)
  744. #define __GET_EVENT_PROFILE_INFO_ERR CL_HPP_ERR_STR_(clGetEventProfileInfo)
  745. #define __GET_MEM_OBJECT_INFO_ERR CL_HPP_ERR_STR_(clGetMemObjectInfo)
  746. #define __GET_IMAGE_INFO_ERR CL_HPP_ERR_STR_(clGetImageInfo)
  747. #define __GET_SAMPLER_INFO_ERR CL_HPP_ERR_STR_(clGetSamplerInfo)
  748. #define __GET_KERNEL_INFO_ERR CL_HPP_ERR_STR_(clGetKernelInfo)
  749. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  750. #define __GET_KERNEL_ARG_INFO_ERR CL_HPP_ERR_STR_(clGetKernelArgInfo)
  751. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  752. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  753. #define __GET_KERNEL_SUB_GROUP_INFO_ERR CL_HPP_ERR_STR_(clGetKernelSubGroupInfo)
  754. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
  755. #define __GET_KERNEL_WORK_GROUP_INFO_ERR CL_HPP_ERR_STR_(clGetKernelWorkGroupInfo)
  756. #define __GET_PROGRAM_INFO_ERR CL_HPP_ERR_STR_(clGetProgramInfo)
  757. #define __GET_PROGRAM_BUILD_INFO_ERR CL_HPP_ERR_STR_(clGetProgramBuildInfo)
  758. #define __GET_COMMAND_QUEUE_INFO_ERR CL_HPP_ERR_STR_(clGetCommandQueueInfo)
  759. #define __CREATE_CONTEXT_ERR CL_HPP_ERR_STR_(clCreateContext)
  760. #define __CREATE_CONTEXT_FROM_TYPE_ERR CL_HPP_ERR_STR_(clCreateContextFromType)
  761. #define __GET_SUPPORTED_IMAGE_FORMATS_ERR CL_HPP_ERR_STR_(clGetSupportedImageFormats)
  762. #define __CREATE_BUFFER_ERR CL_HPP_ERR_STR_(clCreateBuffer)
  763. #define __COPY_ERR CL_HPP_ERR_STR_(cl::copy)
  764. #define __CREATE_SUBBUFFER_ERR CL_HPP_ERR_STR_(clCreateSubBuffer)
  765. #define __CREATE_GL_BUFFER_ERR CL_HPP_ERR_STR_(clCreateFromGLBuffer)
  766. #define __CREATE_GL_RENDER_BUFFER_ERR CL_HPP_ERR_STR_(clCreateFromGLBuffer)
  767. #define __GET_GL_OBJECT_INFO_ERR CL_HPP_ERR_STR_(clGetGLObjectInfo)
  768. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  769. #define __CREATE_IMAGE_ERR CL_HPP_ERR_STR_(clCreateImage)
  770. #define __CREATE_GL_TEXTURE_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture)
  771. #define __IMAGE_DIMENSION_ERR CL_HPP_ERR_STR_(Incorrect image dimensions)
  772. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  773. #define __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR CL_HPP_ERR_STR_(clSetMemObjectDestructorCallback)
  774. #define __CREATE_USER_EVENT_ERR CL_HPP_ERR_STR_(clCreateUserEvent)
  775. #define __SET_USER_EVENT_STATUS_ERR CL_HPP_ERR_STR_(clSetUserEventStatus)
  776. #define __SET_EVENT_CALLBACK_ERR CL_HPP_ERR_STR_(clSetEventCallback)
  777. #define __WAIT_FOR_EVENTS_ERR CL_HPP_ERR_STR_(clWaitForEvents)
  778. #define __CREATE_KERNEL_ERR CL_HPP_ERR_STR_(clCreateKernel)
  779. #define __SET_KERNEL_ARGS_ERR CL_HPP_ERR_STR_(clSetKernelArg)
  780. #define __CREATE_PROGRAM_WITH_SOURCE_ERR CL_HPP_ERR_STR_(clCreateProgramWithSource)
  781. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  782. #define __CREATE_PROGRAM_WITH_IL_ERR CL_HPP_ERR_STR_(clCreateProgramWithIL)
  783. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  784. #define __CREATE_PROGRAM_WITH_BINARY_ERR CL_HPP_ERR_STR_(clCreateProgramWithBinary)
  785. #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  786. #define __CREATE_PROGRAM_WITH_IL_ERR CL_HPP_ERR_STR_(clCreateProgramWithIL)
  787. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 210
  788. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  789. #define __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR CL_HPP_ERR_STR_(clCreateProgramWithBuiltInKernels)
  790. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  791. #define __BUILD_PROGRAM_ERR CL_HPP_ERR_STR_(clBuildProgram)
  792. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  793. #define __COMPILE_PROGRAM_ERR CL_HPP_ERR_STR_(clCompileProgram)
  794. #define __LINK_PROGRAM_ERR CL_HPP_ERR_STR_(clLinkProgram)
  795. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  796. #define __CREATE_KERNELS_IN_PROGRAM_ERR CL_HPP_ERR_STR_(clCreateKernelsInProgram)
  797. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  798. #define __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR CL_HPP_ERR_STR_(clCreateCommandQueueWithProperties)
  799. #define __CREATE_SAMPLER_WITH_PROPERTIES_ERR CL_HPP_ERR_STR_(clCreateSamplerWithProperties)
  800. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
  801. #define __SET_COMMAND_QUEUE_PROPERTY_ERR CL_HPP_ERR_STR_(clSetCommandQueueProperty)
  802. #define __ENQUEUE_READ_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueReadBuffer)
  803. #define __ENQUEUE_READ_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueReadBufferRect)
  804. #define __ENQUEUE_WRITE_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueWriteBuffer)
  805. #define __ENQUEUE_WRITE_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueWriteBufferRect)
  806. #define __ENQEUE_COPY_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueCopyBuffer)
  807. #define __ENQEUE_COPY_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueCopyBufferRect)
  808. #define __ENQUEUE_FILL_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueFillBuffer)
  809. #define __ENQUEUE_READ_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueReadImage)
  810. #define __ENQUEUE_WRITE_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueWriteImage)
  811. #define __ENQUEUE_COPY_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueCopyImage)
  812. #define __ENQUEUE_FILL_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueFillImage)
  813. #define __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueCopyImageToBuffer)
  814. #define __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueCopyBufferToImage)
  815. #define __ENQUEUE_MAP_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueMapBuffer)
  816. #define __ENQUEUE_MAP_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueMapImage)
  817. #define __ENQUEUE_UNMAP_MEM_OBJECT_ERR CL_HPP_ERR_STR_(clEnqueueUnMapMemObject)
  818. #define __ENQUEUE_NDRANGE_KERNEL_ERR CL_HPP_ERR_STR_(clEnqueueNDRangeKernel)
  819. #define __ENQUEUE_NATIVE_KERNEL CL_HPP_ERR_STR_(clEnqueueNativeKernel)
  820. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  821. #define __ENQUEUE_MIGRATE_MEM_OBJECTS_ERR CL_HPP_ERR_STR_(clEnqueueMigrateMemObjects)
  822. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  823. #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  824. #define __ENQUEUE_MIGRATE_SVM_ERR CL_HPP_ERR_STR_(clEnqueueSVMMigrateMem)
  825. #define __SET_DEFAULT_DEVICE_COMMAND_QUEUE_ERR CL_HPP_ERR_STR_(clSetDefaultDeviceCommandQueue)
  826. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 210
  827. #define __ENQUEUE_ACQUIRE_GL_ERR CL_HPP_ERR_STR_(clEnqueueAcquireGLObjects)
  828. #define __ENQUEUE_RELEASE_GL_ERR CL_HPP_ERR_STR_(clEnqueueReleaseGLObjects)
  829. #define __CREATE_PIPE_ERR CL_HPP_ERR_STR_(clCreatePipe)
  830. #define __GET_PIPE_INFO_ERR CL_HPP_ERR_STR_(clGetPipeInfo)
  831. #define __RETAIN_ERR CL_HPP_ERR_STR_(Retain Object)
  832. #define __RELEASE_ERR CL_HPP_ERR_STR_(Release Object)
  833. #define __FLUSH_ERR CL_HPP_ERR_STR_(clFlush)
  834. #define __FINISH_ERR CL_HPP_ERR_STR_(clFinish)
  835. #define __VECTOR_CAPACITY_ERR CL_HPP_ERR_STR_(Vector capacity error)
  836. #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  837. #define __GET_HOST_TIMER_ERR CL_HPP_ERR_STR_(clGetHostTimer)
  838. #define __GET_DEVICE_AND_HOST_TIMER_ERR CL_HPP_ERR_STR_(clGetDeviceAndHostTimer)
  839. #endif
  840. #if CL_HPP_TARGET_OPENCL_VERSION >= 220
  841. #define __SET_PROGRAM_RELEASE_CALLBACK_ERR CL_HPP_ERR_STR_(clSetProgramReleaseCallback)
  842. #define __SET_PROGRAM_SPECIALIZATION_CONSTANT_ERR CL_HPP_ERR_STR_(clSetProgramSpecializationConstant)
  843. #endif
  844. /**
  845. * CL 1.2 version that uses device fission.
  846. */
  847. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  848. #define __CREATE_SUB_DEVICES_ERR CL_HPP_ERR_STR_(clCreateSubDevices)
  849. #else
  850. #define __CREATE_SUB_DEVICES_ERR CL_HPP_ERR_STR_(clCreateSubDevicesEXT)
  851. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  852. /**
  853. * Deprecated APIs for 1.2
  854. */
  855. #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
  856. #define __ENQUEUE_MARKER_ERR CL_HPP_ERR_STR_(clEnqueueMarker)
  857. #define __ENQUEUE_WAIT_FOR_EVENTS_ERR CL_HPP_ERR_STR_(clEnqueueWaitForEvents)
  858. #define __ENQUEUE_BARRIER_ERR CL_HPP_ERR_STR_(clEnqueueBarrier)
  859. #define __UNLOAD_COMPILER_ERR CL_HPP_ERR_STR_(clUnloadCompiler)
  860. #define __CREATE_GL_TEXTURE_2D_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture2D)
  861. #define __CREATE_GL_TEXTURE_3D_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture3D)
  862. #define __CREATE_IMAGE2D_ERR CL_HPP_ERR_STR_(clCreateImage2D)
  863. #define __CREATE_IMAGE3D_ERR CL_HPP_ERR_STR_(clCreateImage3D)
  864. #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
  865. /**
  866. * Deprecated APIs for 2.0
  867. */
  868. #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
  869. #define __CREATE_COMMAND_QUEUE_ERR CL_HPP_ERR_STR_(clCreateCommandQueue)
  870. #define __ENQUEUE_TASK_ERR CL_HPP_ERR_STR_(clEnqueueTask)
  871. #define __CREATE_SAMPLER_ERR CL_HPP_ERR_STR_(clCreateSampler)
  872. #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
  873. /**
  874. * CL 1.2 marker and barrier commands
  875. */
  876. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  877. #define __ENQUEUE_MARKER_WAIT_LIST_ERR CL_HPP_ERR_STR_(clEnqueueMarkerWithWaitList)
  878. #define __ENQUEUE_BARRIER_WAIT_LIST_ERR CL_HPP_ERR_STR_(clEnqueueBarrierWithWaitList)
  879. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  880. #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  881. #define __CLONE_KERNEL_ERR CL_HPP_ERR_STR_(clCloneKernel)
  882. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 210
  883. #endif // CL_HPP_USER_OVERRIDE_ERROR_STRINGS
  884. //! \endcond
  885. namespace detail {
  886. // Generic getInfoHelper. The final parameter is used to guide overload
  887. // resolution: the actual parameter passed is an int, which makes this
  888. // a worse conversion sequence than a specialization that declares the
  889. // parameter as an int.
  890. template<typename Functor, typename T>
  891. inline cl_int getInfoHelper(Functor f, cl_uint name, T* param, long)
  892. {
  893. return f(name, sizeof(T), param, NULL);
  894. }
  895. // Specialized for getInfo<CL_PROGRAM_BINARIES>
  896. // Assumes that the output vector was correctly resized on the way in
  897. template <typename Func>
  898. inline cl_int getInfoHelper(Func f, cl_uint name, vector<vector<unsigned char>>* param, int)
  899. {
  900. if (name != CL_PROGRAM_BINARIES) {
  901. return CL_INVALID_VALUE;
  902. }
  903. if (param) {
  904. // Create array of pointers, calculate total size and pass pointer array in
  905. size_type numBinaries = param->size();
  906. vector<unsigned char*> binariesPointers(numBinaries);
  907. for (size_type i = 0; i < numBinaries; ++i)
  908. {
  909. binariesPointers[i] = (*param)[i].data();
  910. }
  911. cl_int err = f(name, numBinaries * sizeof(unsigned char*), binariesPointers.data(), NULL);
  912. if (err != CL_SUCCESS) {
  913. return err;
  914. }
  915. }
  916. return CL_SUCCESS;
  917. }
  918. // Specialized getInfoHelper for vector params
  919. template <typename Func, typename T>
  920. inline cl_int getInfoHelper(Func f, cl_uint name, vector<T>* param, long)
  921. {
  922. size_type required;
  923. cl_int err = f(name, 0, NULL, &required);
  924. if (err != CL_SUCCESS) {
  925. return err;
  926. }
  927. const size_type elements = required / sizeof(T);
  928. // Temporary to avoid changing param on an error
  929. vector<T> localData(elements);
  930. err = f(name, required, localData.data(), NULL);
  931. if (err != CL_SUCCESS) {
  932. return err;
  933. }
  934. if (param) {
  935. *param = std::move(localData);
  936. }
  937. return CL_SUCCESS;
  938. }
  939. /* Specialization for reference-counted types. This depends on the
  940. * existence of Wrapper<T>::cl_type, and none of the other types having the
  941. * cl_type member. Note that simplify specifying the parameter as Wrapper<T>
  942. * does not work, because when using a derived type (e.g. Context) the generic
  943. * template will provide a better match.
  944. */
  945. template <typename Func, typename T>
  946. inline cl_int getInfoHelper(
  947. Func f, cl_uint name, vector<T>* param, int, typename T::cl_type = 0)
  948. {
  949. size_type required;
  950. cl_int err = f(name, 0, NULL, &required);
  951. if (err != CL_SUCCESS) {
  952. return err;
  953. }
  954. const size_type elements = required / sizeof(typename T::cl_type);
  955. vector<typename T::cl_type> value(elements);
  956. err = f(name, required, value.data(), NULL);
  957. if (err != CL_SUCCESS) {
  958. return err;
  959. }
  960. if (param) {
  961. // Assign to convert CL type to T for each element
  962. param->resize(elements);
  963. // Assign to param, constructing with retain behaviour
  964. // to correctly capture each underlying CL object
  965. for (size_type i = 0; i < elements; i++) {
  966. (*param)[i] = T(value[i], true);
  967. }
  968. }
  969. return CL_SUCCESS;
  970. }
  971. // Specialized GetInfoHelper for string params
  972. template <typename Func>
  973. inline cl_int getInfoHelper(Func f, cl_uint name, string* param, long)
  974. {
  975. size_type required;
  976. cl_int err = f(name, 0, NULL, &required);
  977. if (err != CL_SUCCESS) {
  978. return err;
  979. }
  980. // std::string has a constant data member
  981. // a char vector does not
  982. if (required > 0) {
  983. vector<char> value(required);
  984. err = f(name, required, value.data(), NULL);
  985. if (err != CL_SUCCESS) {
  986. return err;
  987. }
  988. if (param) {
  989. param->assign(begin(value), prev(end(value)));
  990. }
  991. }
  992. else if (param) {
  993. param->assign("");
  994. }
  995. return CL_SUCCESS;
  996. }
  997. // Specialized GetInfoHelper for clsize_t params
  998. template <typename Func, size_type N>
  999. inline cl_int getInfoHelper(Func f, cl_uint name, array<size_type, N>* param, long)
  1000. {
  1001. size_type required;
  1002. cl_int err = f(name, 0, NULL, &required);
  1003. if (err != CL_SUCCESS) {
  1004. return err;
  1005. }
  1006. size_type elements = required / sizeof(size_type);
  1007. vector<size_type> value(elements, 0);
  1008. err = f(name, required, value.data(), NULL);
  1009. if (err != CL_SUCCESS) {
  1010. return err;
  1011. }
  1012. // Bound the copy with N to prevent overruns
  1013. // if passed N > than the amount copied
  1014. if (elements > N) {
  1015. elements = N;
  1016. }
  1017. for (size_type i = 0; i < elements; ++i) {
  1018. (*param)[i] = value[i];
  1019. }
  1020. return CL_SUCCESS;
  1021. }
  1022. template<typename T> struct ReferenceHandler;
  1023. /* Specialization for reference-counted types. This depends on the
  1024. * existence of Wrapper<T>::cl_type, and none of the other types having the
  1025. * cl_type member. Note that simplify specifying the parameter as Wrapper<T>
  1026. * does not work, because when using a derived type (e.g. Context) the generic
  1027. * template will provide a better match.
  1028. */
  1029. template<typename Func, typename T>
  1030. inline cl_int getInfoHelper(Func f, cl_uint name, T* param, int, typename T::cl_type = 0)
  1031. {
  1032. typename T::cl_type value;
  1033. cl_int err = f(name, sizeof(value), &value, NULL);
  1034. if (err != CL_SUCCESS) {
  1035. return err;
  1036. }
  1037. *param = value;
  1038. if (value != NULL)
  1039. {
  1040. err = param->retain();
  1041. if (err != CL_SUCCESS) {
  1042. return err;
  1043. }
  1044. }
  1045. return CL_SUCCESS;
  1046. }
  1047. #define CL_HPP_PARAM_NAME_INFO_1_0_(F) \
  1048. F(cl_platform_info, CL_PLATFORM_PROFILE, string) \
  1049. F(cl_platform_info, CL_PLATFORM_VERSION, string) \
  1050. F(cl_platform_info, CL_PLATFORM_NAME, string) \
  1051. F(cl_platform_info, CL_PLATFORM_VENDOR, string) \
  1052. F(cl_platform_info, CL_PLATFORM_EXTENSIONS, string) \
  1053. \
  1054. F(cl_device_info, CL_DEVICE_TYPE, cl_device_type) \
  1055. F(cl_device_info, CL_DEVICE_VENDOR_ID, cl_uint) \
  1056. F(cl_device_info, CL_DEVICE_MAX_COMPUTE_UNITS, cl_uint) \
  1057. F(cl_device_info, CL_DEVICE_MAX_WORK_ITEM_DIMENSIONS, cl_uint) \
  1058. F(cl_device_info, CL_DEVICE_MAX_WORK_GROUP_SIZE, size_type) \
  1059. F(cl_device_info, CL_DEVICE_MAX_WORK_ITEM_SIZES, cl::vector<size_type>) \
  1060. F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_CHAR, cl_uint) \
  1061. F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_SHORT, cl_uint) \
  1062. F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_INT, cl_uint) \
  1063. F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_LONG, cl_uint) \
  1064. F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_FLOAT, cl_uint) \
  1065. F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_DOUBLE, cl_uint) \
  1066. F(cl_device_info, CL_DEVICE_MAX_CLOCK_FREQUENCY, cl_uint) \
  1067. F(cl_device_info, CL_DEVICE_ADDRESS_BITS, cl_uint) \
  1068. F(cl_device_info, CL_DEVICE_MAX_READ_IMAGE_ARGS, cl_uint) \
  1069. F(cl_device_info, CL_DEVICE_MAX_WRITE_IMAGE_ARGS, cl_uint) \
  1070. F(cl_device_info, CL_DEVICE_MAX_MEM_ALLOC_SIZE, cl_ulong) \
  1071. F(cl_device_info, CL_DEVICE_IMAGE2D_MAX_WIDTH, size_type) \
  1072. F(cl_device_info, CL_DEVICE_IMAGE2D_MAX_HEIGHT, size_type) \
  1073. F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_WIDTH, size_type) \
  1074. F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_HEIGHT, size_type) \
  1075. F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_DEPTH, size_type) \
  1076. F(cl_device_info, CL_DEVICE_IMAGE_SUPPORT, cl_bool) \
  1077. F(cl_device_info, CL_DEVICE_MAX_PARAMETER_SIZE, size_type) \
  1078. F(cl_device_info, CL_DEVICE_MAX_SAMPLERS, cl_uint) \
  1079. F(cl_device_info, CL_DEVICE_MEM_BASE_ADDR_ALIGN, cl_uint) \
  1080. F(cl_device_info, CL_DEVICE_MIN_DATA_TYPE_ALIGN_SIZE, cl_uint) \
  1081. F(cl_device_info, CL_DEVICE_SINGLE_FP_CONFIG, cl_device_fp_config) \
  1082. F(cl_device_info, CL_DEVICE_DOUBLE_FP_CONFIG, cl_device_fp_config) \
  1083. F(cl_device_info, CL_DEVICE_HALF_FP_CONFIG, cl_device_fp_config) \
  1084. F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHE_TYPE, cl_device_mem_cache_type) \
  1085. F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHELINE_SIZE, cl_uint)\
  1086. F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHE_SIZE, cl_ulong) \
  1087. F(cl_device_info, CL_DEVICE_GLOBAL_MEM_SIZE, cl_ulong) \
  1088. F(cl_device_info, CL_DEVICE_MAX_CONSTANT_BUFFER_SIZE, cl_ulong) \
  1089. F(cl_device_info, CL_DEVICE_MAX_CONSTANT_ARGS, cl_uint) \
  1090. F(cl_device_info, CL_DEVICE_LOCAL_MEM_TYPE, cl_device_local_mem_type) \
  1091. F(cl_device_info, CL_DEVICE_LOCAL_MEM_SIZE, cl_ulong) \
  1092. F(cl_device_info, CL_DEVICE_ERROR_CORRECTION_SUPPORT, cl_bool) \
  1093. F(cl_device_info, CL_DEVICE_PROFILING_TIMER_RESOLUTION, size_type) \
  1094. F(cl_device_info, CL_DEVICE_ENDIAN_LITTLE, cl_bool) \
  1095. F(cl_device_info, CL_DEVICE_AVAILABLE, cl_bool) \
  1096. F(cl_device_info, CL_DEVICE_COMPILER_AVAILABLE, cl_bool) \
  1097. F(cl_device_info, CL_DEVICE_EXECUTION_CAPABILITIES, cl_device_exec_capabilities) \
  1098. F(cl_device_info, CL_DEVICE_PLATFORM, cl_platform_id) \
  1099. F(cl_device_info, CL_DEVICE_NAME, string) \
  1100. F(cl_device_info, CL_DEVICE_VENDOR, string) \
  1101. F(cl_device_info, CL_DRIVER_VERSION, string) \
  1102. F(cl_device_info, CL_DEVICE_PROFILE, string) \
  1103. F(cl_device_info, CL_DEVICE_VERSION, string) \
  1104. F(cl_device_info, CL_DEVICE_EXTENSIONS, string) \
  1105. \
  1106. F(cl_context_info, CL_CONTEXT_REFERENCE_COUNT, cl_uint) \
  1107. F(cl_context_info, CL_CONTEXT_DEVICES, cl::vector<Device>) \
  1108. F(cl_context_info, CL_CONTEXT_PROPERTIES, cl::vector<cl_context_properties>) \
  1109. \
  1110. F(cl_event_info, CL_EVENT_COMMAND_QUEUE, cl::CommandQueue) \
  1111. F(cl_event_info, CL_EVENT_COMMAND_TYPE, cl_command_type) \
  1112. F(cl_event_info, CL_EVENT_REFERENCE_COUNT, cl_uint) \
  1113. F(cl_event_info, CL_EVENT_COMMAND_EXECUTION_STATUS, cl_int) \
  1114. \
  1115. F(cl_profiling_info, CL_PROFILING_COMMAND_QUEUED, cl_ulong) \
  1116. F(cl_profiling_info, CL_PROFILING_COMMAND_SUBMIT, cl_ulong) \
  1117. F(cl_profiling_info, CL_PROFILING_COMMAND_START, cl_ulong) \
  1118. F(cl_profiling_info, CL_PROFILING_COMMAND_END, cl_ulong) \
  1119. \
  1120. F(cl_mem_info, CL_MEM_TYPE, cl_mem_object_type) \
  1121. F(cl_mem_info, CL_MEM_FLAGS, cl_mem_flags) \
  1122. F(cl_mem_info, CL_MEM_SIZE, size_type) \
  1123. F(cl_mem_info, CL_MEM_HOST_PTR, void*) \
  1124. F(cl_mem_info, CL_MEM_MAP_COUNT, cl_uint) \
  1125. F(cl_mem_info, CL_MEM_REFERENCE_COUNT, cl_uint) \
  1126. F(cl_mem_info, CL_MEM_CONTEXT, cl::Context) \
  1127. \
  1128. F(cl_image_info, CL_IMAGE_FORMAT, cl_image_format) \
  1129. F(cl_image_info, CL_IMAGE_ELEMENT_SIZE, size_type) \
  1130. F(cl_image_info, CL_IMAGE_ROW_PITCH, size_type) \
  1131. F(cl_image_info, CL_IMAGE_SLICE_PITCH, size_type) \
  1132. F(cl_image_info, CL_IMAGE_WIDTH, size_type) \
  1133. F(cl_image_info, CL_IMAGE_HEIGHT, size_type) \
  1134. F(cl_image_info, CL_IMAGE_DEPTH, size_type) \
  1135. \
  1136. F(cl_sampler_info, CL_SAMPLER_REFERENCE_COUNT, cl_uint) \
  1137. F(cl_sampler_info, CL_SAMPLER_CONTEXT, cl::Context) \
  1138. F(cl_sampler_info, CL_SAMPLER_NORMALIZED_COORDS, cl_bool) \
  1139. F(cl_sampler_info, CL_SAMPLER_ADDRESSING_MODE, cl_addressing_mode) \
  1140. F(cl_sampler_info, CL_SAMPLER_FILTER_MODE, cl_filter_mode) \
  1141. \
  1142. F(cl_program_info, CL_PROGRAM_REFERENCE_COUNT, cl_uint) \
  1143. F(cl_program_info, CL_PROGRAM_CONTEXT, cl::Context) \
  1144. F(cl_program_info, CL_PROGRAM_NUM_DEVICES, cl_uint) \
  1145. F(cl_program_info, CL_PROGRAM_DEVICES, cl::vector<Device>) \
  1146. F(cl_program_info, CL_PROGRAM_SOURCE, string) \
  1147. F(cl_program_info, CL_PROGRAM_BINARY_SIZES, cl::vector<size_type>) \
  1148. F(cl_program_info, CL_PROGRAM_BINARIES, cl::vector<cl::vector<unsigned char>>) \
  1149. \
  1150. F(cl_program_build_info, CL_PROGRAM_BUILD_STATUS, cl_build_status) \
  1151. F(cl_program_build_info, CL_PROGRAM_BUILD_OPTIONS, string) \
  1152. F(cl_program_build_info, CL_PROGRAM_BUILD_LOG, string) \
  1153. \
  1154. F(cl_kernel_info, CL_KERNEL_FUNCTION_NAME, string) \
  1155. F(cl_kernel_info, CL_KERNEL_NUM_ARGS, cl_uint) \
  1156. F(cl_kernel_info, CL_KERNEL_REFERENCE_COUNT, cl_uint) \
  1157. F(cl_kernel_info, CL_KERNEL_CONTEXT, cl::Context) \
  1158. F(cl_kernel_info, CL_KERNEL_PROGRAM, cl::Program) \
  1159. \
  1160. F(cl_kernel_work_group_info, CL_KERNEL_WORK_GROUP_SIZE, size_type) \
  1161. F(cl_kernel_work_group_info, CL_KERNEL_COMPILE_WORK_GROUP_SIZE, cl::detail::size_t_array) \
  1162. F(cl_kernel_work_group_info, CL_KERNEL_LOCAL_MEM_SIZE, cl_ulong) \
  1163. \
  1164. F(cl_command_queue_info, CL_QUEUE_CONTEXT, cl::Context) \
  1165. F(cl_command_queue_info, CL_QUEUE_DEVICE, cl::Device) \
  1166. F(cl_command_queue_info, CL_QUEUE_REFERENCE_COUNT, cl_uint) \
  1167. F(cl_command_queue_info, CL_QUEUE_PROPERTIES, cl_command_queue_properties)
  1168. #define CL_HPP_PARAM_NAME_INFO_1_1_(F) \
  1169. F(cl_context_info, CL_CONTEXT_NUM_DEVICES, cl_uint)\
  1170. F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_HALF, cl_uint) \
  1171. F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_CHAR, cl_uint) \
  1172. F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_SHORT, cl_uint) \
  1173. F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_INT, cl_uint) \
  1174. F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_LONG, cl_uint) \
  1175. F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_FLOAT, cl_uint) \
  1176. F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_DOUBLE, cl_uint) \
  1177. F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_HALF, cl_uint) \
  1178. F(cl_device_info, CL_DEVICE_OPENCL_C_VERSION, string) \
  1179. \
  1180. F(cl_mem_info, CL_MEM_ASSOCIATED_MEMOBJECT, cl::Memory) \
  1181. F(cl_mem_info, CL_MEM_OFFSET, size_type) \
  1182. \
  1183. F(cl_kernel_work_group_info, CL_KERNEL_PREFERRED_WORK_GROUP_SIZE_MULTIPLE, size_type) \
  1184. F(cl_kernel_work_group_info, CL_KERNEL_PRIVATE_MEM_SIZE, cl_ulong) \
  1185. \
  1186. F(cl_event_info, CL_EVENT_CONTEXT, cl::Context)
  1187. #define CL_HPP_PARAM_NAME_INFO_1_2_(F) \
  1188. F(cl_program_info, CL_PROGRAM_NUM_KERNELS, size_type) \
  1189. F(cl_program_info, CL_PROGRAM_KERNEL_NAMES, string) \
  1190. \
  1191. F(cl_program_build_info, CL_PROGRAM_BINARY_TYPE, cl_program_binary_type) \
  1192. \
  1193. F(cl_kernel_info, CL_KERNEL_ATTRIBUTES, string) \
  1194. \
  1195. F(cl_kernel_arg_info, CL_KERNEL_ARG_ADDRESS_QUALIFIER, cl_kernel_arg_address_qualifier) \
  1196. F(cl_kernel_arg_info, CL_KERNEL_ARG_ACCESS_QUALIFIER, cl_kernel_arg_access_qualifier) \
  1197. F(cl_kernel_arg_info, CL_KERNEL_ARG_TYPE_NAME, string) \
  1198. F(cl_kernel_arg_info, CL_KERNEL_ARG_NAME, string) \
  1199. F(cl_kernel_arg_info, CL_KERNEL_ARG_TYPE_QUALIFIER, cl_kernel_arg_type_qualifier) \
  1200. \
  1201. F(cl_device_info, CL_DEVICE_PARENT_DEVICE, cl::Device) \
  1202. F(cl_device_info, CL_DEVICE_PARTITION_PROPERTIES, cl::vector<cl_device_partition_property>) \
  1203. F(cl_device_info, CL_DEVICE_PARTITION_TYPE, cl::vector<cl_device_partition_property>) \
  1204. F(cl_device_info, CL_DEVICE_REFERENCE_COUNT, cl_uint) \
  1205. F(cl_device_info, CL_DEVICE_PREFERRED_INTEROP_USER_SYNC, size_type) \
  1206. F(cl_device_info, CL_DEVICE_PARTITION_AFFINITY_DOMAIN, cl_device_affinity_domain) \
  1207. F(cl_device_info, CL_DEVICE_BUILT_IN_KERNELS, string) \
  1208. \
  1209. F(cl_image_info, CL_IMAGE_ARRAY_SIZE, size_type) \
  1210. F(cl_image_info, CL_IMAGE_NUM_MIP_LEVELS, cl_uint) \
  1211. F(cl_image_info, CL_IMAGE_NUM_SAMPLES, cl_uint)
  1212. #define CL_HPP_PARAM_NAME_INFO_2_0_(F) \
  1213. F(cl_device_info, CL_DEVICE_QUEUE_ON_HOST_PROPERTIES, cl_command_queue_properties) \
  1214. F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_PROPERTIES, cl_command_queue_properties) \
  1215. F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_PREFERRED_SIZE, cl_uint) \
  1216. F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_MAX_SIZE, cl_uint) \
  1217. F(cl_device_info, CL_DEVICE_MAX_ON_DEVICE_QUEUES, cl_uint) \
  1218. F(cl_device_info, CL_DEVICE_MAX_ON_DEVICE_EVENTS, cl_uint) \
  1219. F(cl_device_info, CL_DEVICE_MAX_PIPE_ARGS, cl_uint) \
  1220. F(cl_device_info, CL_DEVICE_PIPE_MAX_ACTIVE_RESERVATIONS, cl_uint) \
  1221. F(cl_device_info, CL_DEVICE_PIPE_MAX_PACKET_SIZE, cl_uint) \
  1222. F(cl_device_info, CL_DEVICE_SVM_CAPABILITIES, cl_device_svm_capabilities) \
  1223. F(cl_device_info, CL_DEVICE_PREFERRED_PLATFORM_ATOMIC_ALIGNMENT, cl_uint) \
  1224. F(cl_device_info, CL_DEVICE_PREFERRED_GLOBAL_ATOMIC_ALIGNMENT, cl_uint) \
  1225. F(cl_device_info, CL_DEVICE_PREFERRED_LOCAL_ATOMIC_ALIGNMENT, cl_uint) \
  1226. F(cl_command_queue_info, CL_QUEUE_SIZE, cl_uint) \
  1227. F(cl_mem_info, CL_MEM_USES_SVM_POINTER, cl_bool) \
  1228. F(cl_program_build_info, CL_PROGRAM_BUILD_GLOBAL_VARIABLE_TOTAL_SIZE, size_type) \
  1229. F(cl_pipe_info, CL_PIPE_PACKET_SIZE, cl_uint) \
  1230. F(cl_pipe_info, CL_PIPE_MAX_PACKETS, cl_uint)
  1231. #define CL_HPP_PARAM_NAME_INFO_SUBGROUP_KHR_(F) \
  1232. F(cl_kernel_sub_group_info, CL_KERNEL_MAX_SUB_GROUP_SIZE_FOR_NDRANGE_KHR, size_type) \
  1233. F(cl_kernel_sub_group_info, CL_KERNEL_SUB_GROUP_COUNT_FOR_NDRANGE_KHR, size_type)
  1234. #define CL_HPP_PARAM_NAME_INFO_IL_KHR_(F) \
  1235. F(cl_device_info, CL_DEVICE_IL_VERSION_KHR, string) \
  1236. F(cl_program_info, CL_PROGRAM_IL_KHR, cl::vector<unsigned char>)
  1237. #define CL_HPP_PARAM_NAME_INFO_2_1_(F) \
  1238. F(cl_platform_info, CL_PLATFORM_HOST_TIMER_RESOLUTION, size_type) \
  1239. F(cl_program_info, CL_PROGRAM_IL, cl::vector<unsigned char>) \
  1240. F(cl_kernel_info, CL_KERNEL_MAX_NUM_SUB_GROUPS, size_type) \
  1241. F(cl_kernel_info, CL_KERNEL_COMPILE_NUM_SUB_GROUPS, size_type) \
  1242. F(cl_device_info, CL_DEVICE_MAX_NUM_SUB_GROUPS, cl_uint) \
  1243. F(cl_device_info, CL_DEVICE_IL_VERSION, string) \
  1244. F(cl_device_info, CL_DEVICE_SUB_GROUP_INDEPENDENT_FORWARD_PROGRESS, cl_bool) \
  1245. F(cl_command_queue_info, CL_QUEUE_DEVICE_DEFAULT, cl::DeviceCommandQueue) \
  1246. F(cl_kernel_sub_group_info, CL_KERNEL_MAX_SUB_GROUP_SIZE_FOR_NDRANGE, size_type) \
  1247. F(cl_kernel_sub_group_info, CL_KERNEL_SUB_GROUP_COUNT_FOR_NDRANGE, size_type) \
  1248. F(cl_kernel_sub_group_info, CL_KERNEL_LOCAL_SIZE_FOR_SUB_GROUP_COUNT, cl::detail::size_t_array)
  1249. #define CL_HPP_PARAM_NAME_INFO_2_2_(F) \
  1250. F(cl_program_info, CL_PROGRAM_SCOPE_GLOBAL_CTORS_PRESENT, cl_bool) \
  1251. F(cl_program_info, CL_PROGRAM_SCOPE_GLOBAL_DTORS_PRESENT, cl_bool)
  1252. #define CL_HPP_PARAM_NAME_DEVICE_FISSION_(F) \
  1253. F(cl_device_info, CL_DEVICE_PARENT_DEVICE_EXT, cl_device_id) \
  1254. F(cl_device_info, CL_DEVICE_PARTITION_TYPES_EXT, cl::vector<cl_device_partition_property_ext>) \
  1255. F(cl_device_info, CL_DEVICE_AFFINITY_DOMAINS_EXT, cl::vector<cl_device_partition_property_ext>) \
  1256. F(cl_device_info, CL_DEVICE_REFERENCE_COUNT_EXT , cl_uint) \
  1257. F(cl_device_info, CL_DEVICE_PARTITION_STYLE_EXT, cl::vector<cl_device_partition_property_ext>)
  1258. template <typename enum_type, cl_int Name>
  1259. struct param_traits {};
  1260. #define CL_HPP_DECLARE_PARAM_TRAITS_(token, param_name, T) \
  1261. struct token; \
  1262. template<> \
  1263. struct param_traits<detail:: token,param_name> \
  1264. { \
  1265. enum { value = param_name }; \
  1266. typedef T param_type; \
  1267. };
  1268. CL_HPP_PARAM_NAME_INFO_1_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
  1269. #if CL_HPP_TARGET_OPENCL_VERSION >= 110
  1270. CL_HPP_PARAM_NAME_INFO_1_1_(CL_HPP_DECLARE_PARAM_TRAITS_)
  1271. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
  1272. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  1273. CL_HPP_PARAM_NAME_INFO_1_2_(CL_HPP_DECLARE_PARAM_TRAITS_)
  1274. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  1275. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  1276. CL_HPP_PARAM_NAME_INFO_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
  1277. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
  1278. #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  1279. CL_HPP_PARAM_NAME_INFO_2_1_(CL_HPP_DECLARE_PARAM_TRAITS_)
  1280. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 210
  1281. #if CL_HPP_TARGET_OPENCL_VERSION >= 220
  1282. CL_HPP_PARAM_NAME_INFO_2_2_(CL_HPP_DECLARE_PARAM_TRAITS_)
  1283. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 220
  1284. #if defined(CL_HPP_USE_CL_SUB_GROUPS_KHR) && CL_HPP_TARGET_OPENCL_VERSION < 210
  1285. CL_HPP_PARAM_NAME_INFO_SUBGROUP_KHR_(CL_HPP_DECLARE_PARAM_TRAITS_)
  1286. #endif // #if defined(CL_HPP_USE_CL_SUB_GROUPS_KHR) && CL_HPP_TARGET_OPENCL_VERSION < 210
  1287. #if defined(CL_HPP_USE_IL_KHR)
  1288. CL_HPP_PARAM_NAME_INFO_IL_KHR_(CL_HPP_DECLARE_PARAM_TRAITS_)
  1289. #endif // #if defined(CL_HPP_USE_IL_KHR)
  1290. // Flags deprecated in OpenCL 2.0
  1291. #define CL_HPP_PARAM_NAME_INFO_1_0_DEPRECATED_IN_2_0_(F) \
  1292. F(cl_device_info, CL_DEVICE_QUEUE_PROPERTIES, cl_command_queue_properties)
  1293. #define CL_HPP_PARAM_NAME_INFO_1_1_DEPRECATED_IN_2_0_(F) \
  1294. F(cl_device_info, CL_DEVICE_HOST_UNIFIED_MEMORY, cl_bool)
  1295. #define CL_HPP_PARAM_NAME_INFO_1_2_DEPRECATED_IN_2_0_(F) \
  1296. F(cl_image_info, CL_IMAGE_BUFFER, cl::Buffer)
  1297. // Include deprecated query flags based on versions
  1298. // Only include deprecated 1.0 flags if 2.0 not active as there is an enum clash
  1299. #if CL_HPP_TARGET_OPENCL_VERSION > 100 && CL_HPP_MINIMUM_OPENCL_VERSION < 200 && CL_HPP_TARGET_OPENCL_VERSION < 200
  1300. CL_HPP_PARAM_NAME_INFO_1_0_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
  1301. #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 110
  1302. #if CL_HPP_TARGET_OPENCL_VERSION > 110 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
  1303. CL_HPP_PARAM_NAME_INFO_1_1_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
  1304. #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
  1305. #if CL_HPP_TARGET_OPENCL_VERSION > 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
  1306. CL_HPP_PARAM_NAME_INFO_1_2_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
  1307. #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
  1308. #if defined(CL_HPP_USE_CL_DEVICE_FISSION)
  1309. CL_HPP_PARAM_NAME_DEVICE_FISSION_(CL_HPP_DECLARE_PARAM_TRAITS_);
  1310. #endif // CL_HPP_USE_CL_DEVICE_FISSION
  1311. #ifdef CL_PLATFORM_ICD_SUFFIX_KHR
  1312. CL_HPP_DECLARE_PARAM_TRAITS_(cl_platform_info, CL_PLATFORM_ICD_SUFFIX_KHR, string)
  1313. #endif
  1314. #ifdef CL_DEVICE_PROFILING_TIMER_OFFSET_AMD
  1315. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_PROFILING_TIMER_OFFSET_AMD, cl_ulong)
  1316. #endif
  1317. #ifdef CL_DEVICE_GLOBAL_FREE_MEMORY_AMD
  1318. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_FREE_MEMORY_AMD, vector<size_type>)
  1319. #endif
  1320. #ifdef CL_DEVICE_SIMD_PER_COMPUTE_UNIT_AMD
  1321. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_PER_COMPUTE_UNIT_AMD, cl_uint)
  1322. #endif
  1323. #ifdef CL_DEVICE_SIMD_WIDTH_AMD
  1324. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_WIDTH_AMD, cl_uint)
  1325. #endif
  1326. #ifdef CL_DEVICE_SIMD_INSTRUCTION_WIDTH_AMD
  1327. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_INSTRUCTION_WIDTH_AMD, cl_uint)
  1328. #endif
  1329. #ifdef CL_DEVICE_WAVEFRONT_WIDTH_AMD
  1330. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_WAVEFRONT_WIDTH_AMD, cl_uint)
  1331. #endif
  1332. #ifdef CL_DEVICE_GLOBAL_MEM_CHANNELS_AMD
  1333. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNELS_AMD, cl_uint)
  1334. #endif
  1335. #ifdef CL_DEVICE_GLOBAL_MEM_CHANNEL_BANKS_AMD
  1336. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNEL_BANKS_AMD, cl_uint)
  1337. #endif
  1338. #ifdef CL_DEVICE_GLOBAL_MEM_CHANNEL_BANK_WIDTH_AMD
  1339. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNEL_BANK_WIDTH_AMD, cl_uint)
  1340. #endif
  1341. #ifdef CL_DEVICE_LOCAL_MEM_SIZE_PER_COMPUTE_UNIT_AMD
  1342. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_LOCAL_MEM_SIZE_PER_COMPUTE_UNIT_AMD, cl_uint)
  1343. #endif
  1344. #ifdef CL_DEVICE_LOCAL_MEM_BANKS_AMD
  1345. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_LOCAL_MEM_BANKS_AMD, cl_uint)
  1346. #endif
  1347. #ifdef CL_DEVICE_COMPUTE_UNITS_BITFIELD_ARM
  1348. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_UNITS_BITFIELD_ARM, cl_ulong)
  1349. #endif
  1350. #ifdef CL_DEVICE_JOB_SLOTS_ARM
  1351. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_JOB_SLOTS_ARM, cl_uint)
  1352. #endif
  1353. #ifdef CL_DEVICE_COMPUTE_CAPABILITY_MAJOR_NV
  1354. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_CAPABILITY_MAJOR_NV, cl_uint)
  1355. #endif
  1356. #ifdef CL_DEVICE_COMPUTE_CAPABILITY_MINOR_NV
  1357. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_CAPABILITY_MINOR_NV, cl_uint)
  1358. #endif
  1359. #ifdef CL_DEVICE_REGISTERS_PER_BLOCK_NV
  1360. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_REGISTERS_PER_BLOCK_NV, cl_uint)
  1361. #endif
  1362. #ifdef CL_DEVICE_WARP_SIZE_NV
  1363. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_WARP_SIZE_NV, cl_uint)
  1364. #endif
  1365. #ifdef CL_DEVICE_GPU_OVERLAP_NV
  1366. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GPU_OVERLAP_NV, cl_bool)
  1367. #endif
  1368. #ifdef CL_DEVICE_KERNEL_EXEC_TIMEOUT_NV
  1369. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_KERNEL_EXEC_TIMEOUT_NV, cl_bool)
  1370. #endif
  1371. #ifdef CL_DEVICE_INTEGRATED_MEMORY_NV
  1372. CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_INTEGRATED_MEMORY_NV, cl_bool)
  1373. #endif
  1374. // Convenience functions
  1375. template <typename Func, typename T>
  1376. inline cl_int
  1377. getInfo(Func f, cl_uint name, T* param)
  1378. {
  1379. return getInfoHelper(f, name, param, 0);
  1380. }
  1381. template <typename Func, typename Arg0>
  1382. struct GetInfoFunctor0
  1383. {
  1384. Func f_; const Arg0& arg0_;
  1385. cl_int operator ()(
  1386. cl_uint param, size_type size, void* value, size_type* size_ret)
  1387. { return f_(arg0_, param, size, value, size_ret); }
  1388. };
  1389. template <typename Func, typename Arg0, typename Arg1>
  1390. struct GetInfoFunctor1
  1391. {
  1392. Func f_; const Arg0& arg0_; const Arg1& arg1_;
  1393. cl_int operator ()(
  1394. cl_uint param, size_type size, void* value, size_type* size_ret)
  1395. { return f_(arg0_, arg1_, param, size, value, size_ret); }
  1396. };
  1397. template <typename Func, typename Arg0, typename T>
  1398. inline cl_int
  1399. getInfo(Func f, const Arg0& arg0, cl_uint name, T* param)
  1400. {
  1401. GetInfoFunctor0<Func, Arg0> f0 = { f, arg0 };
  1402. return getInfoHelper(f0, name, param, 0);
  1403. }
  1404. template <typename Func, typename Arg0, typename Arg1, typename T>
  1405. inline cl_int
  1406. getInfo(Func f, const Arg0& arg0, const Arg1& arg1, cl_uint name, T* param)
  1407. {
  1408. GetInfoFunctor1<Func, Arg0, Arg1> f0 = { f, arg0, arg1 };
  1409. return getInfoHelper(f0, name, param, 0);
  1410. }
  1411. template<typename T>
  1412. struct ReferenceHandler
  1413. { };
  1414. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  1415. /**
  1416. * OpenCL 1.2 devices do have retain/release.
  1417. */
  1418. template <>
  1419. struct ReferenceHandler<cl_device_id>
  1420. {
  1421. /**
  1422. * Retain the device.
  1423. * \param device A valid device created using createSubDevices
  1424. * \return
  1425. * CL_SUCCESS if the function executed successfully.
  1426. * CL_INVALID_DEVICE if device was not a valid subdevice
  1427. * CL_OUT_OF_RESOURCES
  1428. * CL_OUT_OF_HOST_MEMORY
  1429. */
  1430. static cl_int retain(cl_device_id device)
  1431. { return ::clRetainDevice(device); }
  1432. /**
  1433. * Retain the device.
  1434. * \param device A valid device created using createSubDevices
  1435. * \return
  1436. * CL_SUCCESS if the function executed successfully.
  1437. * CL_INVALID_DEVICE if device was not a valid subdevice
  1438. * CL_OUT_OF_RESOURCES
  1439. * CL_OUT_OF_HOST_MEMORY
  1440. */
  1441. static cl_int release(cl_device_id device)
  1442. { return ::clReleaseDevice(device); }
  1443. };
  1444. #else // CL_HPP_TARGET_OPENCL_VERSION >= 120
  1445. /**
  1446. * OpenCL 1.1 devices do not have retain/release.
  1447. */
  1448. template <>
  1449. struct ReferenceHandler<cl_device_id>
  1450. {
  1451. // cl_device_id does not have retain().
  1452. static cl_int retain(cl_device_id)
  1453. { return CL_SUCCESS; }
  1454. // cl_device_id does not have release().
  1455. static cl_int release(cl_device_id)
  1456. { return CL_SUCCESS; }
  1457. };
  1458. #endif // ! (CL_HPP_TARGET_OPENCL_VERSION >= 120)
  1459. template <>
  1460. struct ReferenceHandler<cl_platform_id>
  1461. {
  1462. // cl_platform_id does not have retain().
  1463. static cl_int retain(cl_platform_id)
  1464. { return CL_SUCCESS; }
  1465. // cl_platform_id does not have release().
  1466. static cl_int release(cl_platform_id)
  1467. { return CL_SUCCESS; }
  1468. };
  1469. template <>
  1470. struct ReferenceHandler<cl_context>
  1471. {
  1472. static cl_int retain(cl_context context)
  1473. { return ::clRetainContext(context); }
  1474. static cl_int release(cl_context context)
  1475. { return ::clReleaseContext(context); }
  1476. };
  1477. template <>
  1478. struct ReferenceHandler<cl_command_queue>
  1479. {
  1480. static cl_int retain(cl_command_queue queue)
  1481. { return ::clRetainCommandQueue(queue); }
  1482. static cl_int release(cl_command_queue queue)
  1483. { return ::clReleaseCommandQueue(queue); }
  1484. };
  1485. template <>
  1486. struct ReferenceHandler<cl_mem>
  1487. {
  1488. static cl_int retain(cl_mem memory)
  1489. { return ::clRetainMemObject(memory); }
  1490. static cl_int release(cl_mem memory)
  1491. { return ::clReleaseMemObject(memory); }
  1492. };
  1493. template <>
  1494. struct ReferenceHandler<cl_sampler>
  1495. {
  1496. static cl_int retain(cl_sampler sampler)
  1497. { return ::clRetainSampler(sampler); }
  1498. static cl_int release(cl_sampler sampler)
  1499. { return ::clReleaseSampler(sampler); }
  1500. };
  1501. template <>
  1502. struct ReferenceHandler<cl_program>
  1503. {
  1504. static cl_int retain(cl_program program)
  1505. { return ::clRetainProgram(program); }
  1506. static cl_int release(cl_program program)
  1507. { return ::clReleaseProgram(program); }
  1508. };
  1509. template <>
  1510. struct ReferenceHandler<cl_kernel>
  1511. {
  1512. static cl_int retain(cl_kernel kernel)
  1513. { return ::clRetainKernel(kernel); }
  1514. static cl_int release(cl_kernel kernel)
  1515. { return ::clReleaseKernel(kernel); }
  1516. };
  1517. template <>
  1518. struct ReferenceHandler<cl_event>
  1519. {
  1520. static cl_int retain(cl_event event)
  1521. { return ::clRetainEvent(event); }
  1522. static cl_int release(cl_event event)
  1523. { return ::clReleaseEvent(event); }
  1524. };
  1525. #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
  1526. // Extracts version number with major in the upper 16 bits, minor in the lower 16
  1527. static cl_uint getVersion(const vector<char> &versionInfo)
  1528. {
  1529. int highVersion = 0;
  1530. int lowVersion = 0;
  1531. int index = 7;
  1532. while(versionInfo[index] != '.' ) {
  1533. highVersion *= 10;
  1534. highVersion += versionInfo[index]-'0';
  1535. ++index;
  1536. }
  1537. ++index;
  1538. while(versionInfo[index] != ' ' && versionInfo[index] != '\0') {
  1539. lowVersion *= 10;
  1540. lowVersion += versionInfo[index]-'0';
  1541. ++index;
  1542. }
  1543. return (highVersion << 16) | lowVersion;
  1544. }
  1545. static cl_uint getPlatformVersion(cl_platform_id platform)
  1546. {
  1547. size_type size = 0;
  1548. clGetPlatformInfo(platform, CL_PLATFORM_VERSION, 0, NULL, &size);
  1549. vector<char> versionInfo(size);
  1550. clGetPlatformInfo(platform, CL_PLATFORM_VERSION, size, versionInfo.data(), &size);
  1551. return getVersion(versionInfo);
  1552. }
  1553. static cl_uint getDevicePlatformVersion(cl_device_id device)
  1554. {
  1555. cl_platform_id platform;
  1556. clGetDeviceInfo(device, CL_DEVICE_PLATFORM, sizeof(platform), &platform, NULL);
  1557. return getPlatformVersion(platform);
  1558. }
  1559. static cl_uint getContextPlatformVersion(cl_context context)
  1560. {
  1561. // The platform cannot be queried directly, so we first have to grab a
  1562. // device and obtain its context
  1563. size_type size = 0;
  1564. clGetContextInfo(context, CL_CONTEXT_DEVICES, 0, NULL, &size);
  1565. if (size == 0)
  1566. return 0;
  1567. vector<cl_device_id> devices(size/sizeof(cl_device_id));
  1568. clGetContextInfo(context, CL_CONTEXT_DEVICES, size, devices.data(), NULL);
  1569. return getDevicePlatformVersion(devices[0]);
  1570. }
  1571. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
  1572. template <typename T>
  1573. class Wrapper
  1574. {
  1575. public:
  1576. typedef T cl_type;
  1577. protected:
  1578. cl_type object_;
  1579. public:
  1580. Wrapper() : object_(NULL) { }
  1581. Wrapper(const cl_type &obj, bool retainObject) : object_(obj)
  1582. {
  1583. if (retainObject) {
  1584. detail::errHandler(retain(), __RETAIN_ERR);
  1585. }
  1586. }
  1587. ~Wrapper()
  1588. {
  1589. if (object_ != NULL) { release(); }
  1590. }
  1591. Wrapper(const Wrapper<cl_type>& rhs)
  1592. {
  1593. object_ = rhs.object_;
  1594. detail::errHandler(retain(), __RETAIN_ERR);
  1595. }
  1596. Wrapper(Wrapper<cl_type>&& rhs) CL_HPP_NOEXCEPT_
  1597. {
  1598. object_ = rhs.object_;
  1599. rhs.object_ = NULL;
  1600. }
  1601. Wrapper<cl_type>& operator = (const Wrapper<cl_type>& rhs)
  1602. {
  1603. if (this != &rhs) {
  1604. detail::errHandler(release(), __RELEASE_ERR);
  1605. object_ = rhs.object_;
  1606. detail::errHandler(retain(), __RETAIN_ERR);
  1607. }
  1608. return *this;
  1609. }
  1610. Wrapper<cl_type>& operator = (Wrapper<cl_type>&& rhs)
  1611. {
  1612. if (this != &rhs) {
  1613. detail::errHandler(release(), __RELEASE_ERR);
  1614. object_ = rhs.object_;
  1615. rhs.object_ = NULL;
  1616. }
  1617. return *this;
  1618. }
  1619. Wrapper<cl_type>& operator = (const cl_type &rhs)
  1620. {
  1621. detail::errHandler(release(), __RELEASE_ERR);
  1622. object_ = rhs;
  1623. return *this;
  1624. }
  1625. const cl_type& operator ()() const { return object_; }
  1626. cl_type& operator ()() { return object_; }
  1627. cl_type get() const { return object_; }
  1628. protected:
  1629. template<typename Func, typename U>
  1630. friend inline cl_int getInfoHelper(Func, cl_uint, U*, int, typename U::cl_type);
  1631. cl_int retain() const
  1632. {
  1633. if (object_ != nullptr) {
  1634. return ReferenceHandler<cl_type>::retain(object_);
  1635. }
  1636. else {
  1637. return CL_SUCCESS;
  1638. }
  1639. }
  1640. cl_int release() const
  1641. {
  1642. if (object_ != nullptr) {
  1643. return ReferenceHandler<cl_type>::release(object_);
  1644. }
  1645. else {
  1646. return CL_SUCCESS;
  1647. }
  1648. }
  1649. };
  1650. template <>
  1651. class Wrapper<cl_device_id>
  1652. {
  1653. public:
  1654. typedef cl_device_id cl_type;
  1655. protected:
  1656. cl_type object_;
  1657. bool referenceCountable_;
  1658. static bool isReferenceCountable(cl_device_id device)
  1659. {
  1660. bool retVal = false;
  1661. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  1662. #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
  1663. if (device != NULL) {
  1664. int version = getDevicePlatformVersion(device);
  1665. if(version > ((1 << 16) + 1)) {
  1666. retVal = true;
  1667. }
  1668. }
  1669. #else // CL_HPP_MINIMUM_OPENCL_VERSION < 120
  1670. retVal = true;
  1671. #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
  1672. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  1673. return retVal;
  1674. }
  1675. public:
  1676. Wrapper() : object_(NULL), referenceCountable_(false)
  1677. {
  1678. }
  1679. Wrapper(const cl_type &obj, bool retainObject) :
  1680. object_(obj),
  1681. referenceCountable_(false)
  1682. {
  1683. referenceCountable_ = isReferenceCountable(obj);
  1684. if (retainObject) {
  1685. detail::errHandler(retain(), __RETAIN_ERR);
  1686. }
  1687. }
  1688. ~Wrapper()
  1689. {
  1690. release();
  1691. }
  1692. Wrapper(const Wrapper<cl_type>& rhs)
  1693. {
  1694. object_ = rhs.object_;
  1695. referenceCountable_ = isReferenceCountable(object_);
  1696. detail::errHandler(retain(), __RETAIN_ERR);
  1697. }
  1698. Wrapper(Wrapper<cl_type>&& rhs) CL_HPP_NOEXCEPT_
  1699. {
  1700. object_ = rhs.object_;
  1701. referenceCountable_ = rhs.referenceCountable_;
  1702. rhs.object_ = NULL;
  1703. rhs.referenceCountable_ = false;
  1704. }
  1705. Wrapper<cl_type>& operator = (const Wrapper<cl_type>& rhs)
  1706. {
  1707. if (this != &rhs) {
  1708. detail::errHandler(release(), __RELEASE_ERR);
  1709. object_ = rhs.object_;
  1710. referenceCountable_ = rhs.referenceCountable_;
  1711. detail::errHandler(retain(), __RETAIN_ERR);
  1712. }
  1713. return *this;
  1714. }
  1715. Wrapper<cl_type>& operator = (Wrapper<cl_type>&& rhs)
  1716. {
  1717. if (this != &rhs) {
  1718. detail::errHandler(release(), __RELEASE_ERR);
  1719. object_ = rhs.object_;
  1720. referenceCountable_ = rhs.referenceCountable_;
  1721. rhs.object_ = NULL;
  1722. rhs.referenceCountable_ = false;
  1723. }
  1724. return *this;
  1725. }
  1726. Wrapper<cl_type>& operator = (const cl_type &rhs)
  1727. {
  1728. detail::errHandler(release(), __RELEASE_ERR);
  1729. object_ = rhs;
  1730. referenceCountable_ = isReferenceCountable(object_);
  1731. return *this;
  1732. }
  1733. const cl_type& operator ()() const { return object_; }
  1734. cl_type& operator ()() { return object_; }
  1735. cl_type get() const { return object_; }
  1736. protected:
  1737. template<typename Func, typename U>
  1738. friend inline cl_int getInfoHelper(Func, cl_uint, U*, int, typename U::cl_type);
  1739. template<typename Func, typename U>
  1740. friend inline cl_int getInfoHelper(Func, cl_uint, vector<U>*, int, typename U::cl_type);
  1741. cl_int retain() const
  1742. {
  1743. if( object_ != nullptr && referenceCountable_ ) {
  1744. return ReferenceHandler<cl_type>::retain(object_);
  1745. }
  1746. else {
  1747. return CL_SUCCESS;
  1748. }
  1749. }
  1750. cl_int release() const
  1751. {
  1752. if (object_ != nullptr && referenceCountable_) {
  1753. return ReferenceHandler<cl_type>::release(object_);
  1754. }
  1755. else {
  1756. return CL_SUCCESS;
  1757. }
  1758. }
  1759. };
  1760. template <typename T>
  1761. inline bool operator==(const Wrapper<T> &lhs, const Wrapper<T> &rhs)
  1762. {
  1763. return lhs() == rhs();
  1764. }
  1765. template <typename T>
  1766. inline bool operator!=(const Wrapper<T> &lhs, const Wrapper<T> &rhs)
  1767. {
  1768. return !operator==(lhs, rhs);
  1769. }
  1770. } // namespace detail
  1771. //! \endcond
  1772. using BuildLogType = vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, CL_PROGRAM_BUILD_LOG>::param_type>>;
  1773. #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  1774. /**
  1775. * Exception class for build errors to carry build info
  1776. */
  1777. class BuildError : public Error
  1778. {
  1779. private:
  1780. BuildLogType buildLogs;
  1781. public:
  1782. BuildError(cl_int err, const char * errStr, const BuildLogType &vec) : Error(err, errStr), buildLogs(vec)
  1783. {
  1784. }
  1785. BuildLogType getBuildLog() const
  1786. {
  1787. return buildLogs;
  1788. }
  1789. };
  1790. namespace detail {
  1791. static inline cl_int buildErrHandler(
  1792. cl_int err,
  1793. const char * errStr,
  1794. const BuildLogType &buildLogs)
  1795. {
  1796. if (err != CL_SUCCESS) {
  1797. throw BuildError(err, errStr, buildLogs);
  1798. }
  1799. return err;
  1800. }
  1801. } // namespace detail
  1802. #else
  1803. namespace detail {
  1804. static inline cl_int buildErrHandler(
  1805. cl_int err,
  1806. const char * errStr,
  1807. const BuildLogType &buildLogs)
  1808. {
  1809. (void)buildLogs; // suppress unused variable warning
  1810. (void)errStr;
  1811. return err;
  1812. }
  1813. } // namespace detail
  1814. #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  1815. /*! \stuct ImageFormat
  1816. * \brief Adds constructors and member functions for cl_image_format.
  1817. *
  1818. * \see cl_image_format
  1819. */
  1820. struct ImageFormat : public cl_image_format
  1821. {
  1822. //! \brief Default constructor - performs no initialization.
  1823. ImageFormat(){}
  1824. //! \brief Initializing constructor.
  1825. ImageFormat(cl_channel_order order, cl_channel_type type)
  1826. {
  1827. image_channel_order = order;
  1828. image_channel_data_type = type;
  1829. }
  1830. //! \brief Assignment operator.
  1831. ImageFormat& operator = (const ImageFormat& rhs)
  1832. {
  1833. if (this != &rhs) {
  1834. this->image_channel_data_type = rhs.image_channel_data_type;
  1835. this->image_channel_order = rhs.image_channel_order;
  1836. }
  1837. return *this;
  1838. }
  1839. };
  1840. /*! \brief Class interface for cl_device_id.
  1841. *
  1842. * \note Copies of these objects are inexpensive, since they don't 'own'
  1843. * any underlying resources or data structures.
  1844. *
  1845. * \see cl_device_id
  1846. */
  1847. class Device : public detail::Wrapper<cl_device_id>
  1848. {
  1849. private:
  1850. static std::once_flag default_initialized_;
  1851. static Device default_;
  1852. static cl_int default_error_;
  1853. /*! \brief Create the default context.
  1854. *
  1855. * This sets @c default_ and @c default_error_. It does not throw
  1856. * @c cl::Error.
  1857. */
  1858. static void makeDefault();
  1859. /*! \brief Create the default platform from a provided platform.
  1860. *
  1861. * This sets @c default_. It does not throw
  1862. * @c cl::Error.
  1863. */
  1864. static void makeDefaultProvided(const Device &p) {
  1865. default_ = p;
  1866. }
  1867. public:
  1868. #ifdef CL_HPP_UNIT_TEST_ENABLE
  1869. /*! \brief Reset the default.
  1870. *
  1871. * This sets @c default_ to an empty value to support cleanup in
  1872. * the unit test framework.
  1873. * This function is not thread safe.
  1874. */
  1875. static void unitTestClearDefault() {
  1876. default_ = Device();
  1877. }
  1878. #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
  1879. //! \brief Default constructor - initializes to NULL.
  1880. Device() : detail::Wrapper<cl_type>() { }
  1881. /*! \brief Constructor from cl_device_id.
  1882. *
  1883. * This simply copies the device ID value, which is an inexpensive operation.
  1884. */
  1885. explicit Device(const cl_device_id &device, bool retainObject = false) :
  1886. detail::Wrapper<cl_type>(device, retainObject) { }
  1887. /*! \brief Returns the first device on the default context.
  1888. *
  1889. * \see Context::getDefault()
  1890. */
  1891. static Device getDefault(
  1892. cl_int *errResult = NULL)
  1893. {
  1894. std::call_once(default_initialized_, makeDefault);
  1895. detail::errHandler(default_error_);
  1896. if (errResult != NULL) {
  1897. *errResult = default_error_;
  1898. }
  1899. return default_;
  1900. }
  1901. /**
  1902. * Modify the default device to be used by
  1903. * subsequent operations.
  1904. * Will only set the default if no default was previously created.
  1905. * @return updated default device.
  1906. * Should be compared to the passed value to ensure that it was updated.
  1907. */
  1908. static Device setDefault(const Device &default_device)
  1909. {
  1910. std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_device));
  1911. detail::errHandler(default_error_);
  1912. return default_;
  1913. }
  1914. /*! \brief Assignment operator from cl_device_id.
  1915. *
  1916. * This simply copies the device ID value, which is an inexpensive operation.
  1917. */
  1918. Device& operator = (const cl_device_id& rhs)
  1919. {
  1920. detail::Wrapper<cl_type>::operator=(rhs);
  1921. return *this;
  1922. }
  1923. /*! \brief Copy constructor to forward copy to the superclass correctly.
  1924. * Required for MSVC.
  1925. */
  1926. Device(const Device& dev) : detail::Wrapper<cl_type>(dev) {}
  1927. /*! \brief Copy assignment to forward copy to the superclass correctly.
  1928. * Required for MSVC.
  1929. */
  1930. Device& operator = (const Device &dev)
  1931. {
  1932. detail::Wrapper<cl_type>::operator=(dev);
  1933. return *this;
  1934. }
  1935. /*! \brief Move constructor to forward move to the superclass correctly.
  1936. * Required for MSVC.
  1937. */
  1938. Device(Device&& dev) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(dev)) {}
  1939. /*! \brief Move assignment to forward move to the superclass correctly.
  1940. * Required for MSVC.
  1941. */
  1942. Device& operator = (Device &&dev)
  1943. {
  1944. detail::Wrapper<cl_type>::operator=(std::move(dev));
  1945. return *this;
  1946. }
  1947. //! \brief Wrapper for clGetDeviceInfo().
  1948. template <typename T>
  1949. cl_int getInfo(cl_device_info name, T* param) const
  1950. {
  1951. return detail::errHandler(
  1952. detail::getInfo(&::clGetDeviceInfo, object_, name, param),
  1953. __GET_DEVICE_INFO_ERR);
  1954. }
  1955. //! \brief Wrapper for clGetDeviceInfo() that returns by value.
  1956. template <cl_int name> typename
  1957. detail::param_traits<detail::cl_device_info, name>::param_type
  1958. getInfo(cl_int* err = NULL) const
  1959. {
  1960. typename detail::param_traits<
  1961. detail::cl_device_info, name>::param_type param;
  1962. cl_int result = getInfo(name, &param);
  1963. if (err != NULL) {
  1964. *err = result;
  1965. }
  1966. return param;
  1967. }
  1968. #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  1969. /**
  1970. * Return the current value of the host clock as seen by the device.
  1971. * The resolution of the device timer may be queried with the
  1972. * CL_DEVICE_PROFILING_TIMER_RESOLUTION query.
  1973. * @return The host timer value.
  1974. */
  1975. cl_ulong getHostTimer(cl_int *error = nullptr)
  1976. {
  1977. cl_ulong retVal = 0;
  1978. cl_int err =
  1979. clGetHostTimer(this->get(), &retVal);
  1980. detail::errHandler(
  1981. err,
  1982. __GET_HOST_TIMER_ERR);
  1983. if (error) {
  1984. *error = err;
  1985. }
  1986. return retVal;
  1987. }
  1988. /**
  1989. * Return a synchronized pair of host and device timestamps as seen by device.
  1990. * Use to correlate the clocks and get the host timer only using getHostTimer
  1991. * as a lower cost mechanism in between calls.
  1992. * The resolution of the host timer may be queried with the
  1993. * CL_PLATFORM_HOST_TIMER_RESOLUTION query.
  1994. * The resolution of the device timer may be queried with the
  1995. * CL_DEVICE_PROFILING_TIMER_RESOLUTION query.
  1996. * @return A pair of (device timer, host timer) timer values.
  1997. */
  1998. std::pair<cl_ulong, cl_ulong> getDeviceAndHostTimer(cl_int *error = nullptr)
  1999. {
  2000. std::pair<cl_ulong, cl_ulong> retVal;
  2001. cl_int err =
  2002. clGetDeviceAndHostTimer(this->get(), &(retVal.first), &(retVal.second));
  2003. detail::errHandler(
  2004. err,
  2005. __GET_DEVICE_AND_HOST_TIMER_ERR);
  2006. if (error) {
  2007. *error = err;
  2008. }
  2009. return retVal;
  2010. }
  2011. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  2012. /**
  2013. * CL 1.2 version
  2014. */
  2015. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  2016. //! \brief Wrapper for clCreateSubDevices().
  2017. cl_int createSubDevices(
  2018. const cl_device_partition_property * properties,
  2019. vector<Device>* devices)
  2020. {
  2021. cl_uint n = 0;
  2022. cl_int err = clCreateSubDevices(object_, properties, 0, NULL, &n);
  2023. if (err != CL_SUCCESS) {
  2024. return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
  2025. }
  2026. vector<cl_device_id> ids(n);
  2027. err = clCreateSubDevices(object_, properties, n, ids.data(), NULL);
  2028. if (err != CL_SUCCESS) {
  2029. return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
  2030. }
  2031. // Cannot trivially assign because we need to capture intermediates
  2032. // with safe construction
  2033. if (devices) {
  2034. devices->resize(ids.size());
  2035. // Assign to param, constructing with retain behaviour
  2036. // to correctly capture each underlying CL object
  2037. for (size_type i = 0; i < ids.size(); i++) {
  2038. // We do not need to retain because this device is being created
  2039. // by the runtime
  2040. (*devices)[i] = Device(ids[i], false);
  2041. }
  2042. }
  2043. return CL_SUCCESS;
  2044. }
  2045. #elif defined(CL_HPP_USE_CL_DEVICE_FISSION)
  2046. /**
  2047. * CL 1.1 version that uses device fission extension.
  2048. */
  2049. cl_int createSubDevices(
  2050. const cl_device_partition_property_ext * properties,
  2051. vector<Device>* devices)
  2052. {
  2053. typedef CL_API_ENTRY cl_int
  2054. ( CL_API_CALL * PFN_clCreateSubDevicesEXT)(
  2055. cl_device_id /*in_device*/,
  2056. const cl_device_partition_property_ext * /* properties */,
  2057. cl_uint /*num_entries*/,
  2058. cl_device_id * /*out_devices*/,
  2059. cl_uint * /*num_devices*/ ) CL_EXT_SUFFIX__VERSION_1_1;
  2060. static PFN_clCreateSubDevicesEXT pfn_clCreateSubDevicesEXT = NULL;
  2061. CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateSubDevicesEXT);
  2062. cl_uint n = 0;
  2063. cl_int err = pfn_clCreateSubDevicesEXT(object_, properties, 0, NULL, &n);
  2064. if (err != CL_SUCCESS) {
  2065. return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
  2066. }
  2067. vector<cl_device_id> ids(n);
  2068. err = pfn_clCreateSubDevicesEXT(object_, properties, n, ids.data(), NULL);
  2069. if (err != CL_SUCCESS) {
  2070. return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
  2071. }
  2072. // Cannot trivially assign because we need to capture intermediates
  2073. // with safe construction
  2074. if (devices) {
  2075. devices->resize(ids.size());
  2076. // Assign to param, constructing with retain behaviour
  2077. // to correctly capture each underlying CL object
  2078. for (size_type i = 0; i < ids.size(); i++) {
  2079. // We do not need to retain because this device is being created
  2080. // by the runtime
  2081. (*devices)[i] = Device(ids[i], false);
  2082. }
  2083. }
  2084. return CL_SUCCESS;
  2085. }
  2086. #endif // defined(CL_HPP_USE_CL_DEVICE_FISSION)
  2087. };
  2088. CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Device::default_initialized_;
  2089. CL_HPP_DEFINE_STATIC_MEMBER_ Device Device::default_;
  2090. CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Device::default_error_ = CL_SUCCESS;
  2091. /*! \brief Class interface for cl_platform_id.
  2092. *
  2093. * \note Copies of these objects are inexpensive, since they don't 'own'
  2094. * any underlying resources or data structures.
  2095. *
  2096. * \see cl_platform_id
  2097. */
  2098. class Platform : public detail::Wrapper<cl_platform_id>
  2099. {
  2100. private:
  2101. static std::once_flag default_initialized_;
  2102. static Platform default_;
  2103. static cl_int default_error_;
  2104. /*! \brief Create the default context.
  2105. *
  2106. * This sets @c default_ and @c default_error_. It does not throw
  2107. * @c cl::Error.
  2108. */
  2109. static void makeDefault() {
  2110. /* Throwing an exception from a call_once invocation does not do
  2111. * what we wish, so we catch it and save the error.
  2112. */
  2113. #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  2114. try
  2115. #endif
  2116. {
  2117. // If default wasn't passed ,generate one
  2118. // Otherwise set it
  2119. cl_uint n = 0;
  2120. cl_int err = ::clGetPlatformIDs(0, NULL, &n);
  2121. if (err != CL_SUCCESS) {
  2122. default_error_ = err;
  2123. return;
  2124. }
  2125. if (n == 0) {
  2126. default_error_ = CL_INVALID_PLATFORM;
  2127. return;
  2128. }
  2129. vector<cl_platform_id> ids(n);
  2130. err = ::clGetPlatformIDs(n, ids.data(), NULL);
  2131. if (err != CL_SUCCESS) {
  2132. default_error_ = err;
  2133. return;
  2134. }
  2135. default_ = Platform(ids[0]);
  2136. }
  2137. #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  2138. catch (cl::Error &e) {
  2139. default_error_ = e.err();
  2140. }
  2141. #endif
  2142. }
  2143. /*! \brief Create the default platform from a provided platform.
  2144. *
  2145. * This sets @c default_. It does not throw
  2146. * @c cl::Error.
  2147. */
  2148. static void makeDefaultProvided(const Platform &p) {
  2149. default_ = p;
  2150. }
  2151. public:
  2152. #ifdef CL_HPP_UNIT_TEST_ENABLE
  2153. /*! \brief Reset the default.
  2154. *
  2155. * This sets @c default_ to an empty value to support cleanup in
  2156. * the unit test framework.
  2157. * This function is not thread safe.
  2158. */
  2159. static void unitTestClearDefault() {
  2160. default_ = Platform();
  2161. }
  2162. #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
  2163. //! \brief Default constructor - initializes to NULL.
  2164. Platform() : detail::Wrapper<cl_type>() { }
  2165. /*! \brief Constructor from cl_platform_id.
  2166. *
  2167. * \param retainObject will cause the constructor to retain its cl object.
  2168. * Defaults to false to maintain compatibility with
  2169. * earlier versions.
  2170. * This simply copies the platform ID value, which is an inexpensive operation.
  2171. */
  2172. explicit Platform(const cl_platform_id &platform, bool retainObject = false) :
  2173. detail::Wrapper<cl_type>(platform, retainObject) { }
  2174. /*! \brief Assignment operator from cl_platform_id.
  2175. *
  2176. * This simply copies the platform ID value, which is an inexpensive operation.
  2177. */
  2178. Platform& operator = (const cl_platform_id& rhs)
  2179. {
  2180. detail::Wrapper<cl_type>::operator=(rhs);
  2181. return *this;
  2182. }
  2183. static Platform getDefault(
  2184. cl_int *errResult = NULL)
  2185. {
  2186. std::call_once(default_initialized_, makeDefault);
  2187. detail::errHandler(default_error_);
  2188. if (errResult != NULL) {
  2189. *errResult = default_error_;
  2190. }
  2191. return default_;
  2192. }
  2193. /**
  2194. * Modify the default platform to be used by
  2195. * subsequent operations.
  2196. * Will only set the default if no default was previously created.
  2197. * @return updated default platform.
  2198. * Should be compared to the passed value to ensure that it was updated.
  2199. */
  2200. static Platform setDefault(const Platform &default_platform)
  2201. {
  2202. std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_platform));
  2203. detail::errHandler(default_error_);
  2204. return default_;
  2205. }
  2206. //! \brief Wrapper for clGetPlatformInfo().
  2207. cl_int getInfo(cl_platform_info name, string* param) const
  2208. {
  2209. return detail::errHandler(
  2210. detail::getInfo(&::clGetPlatformInfo, object_, name, param),
  2211. __GET_PLATFORM_INFO_ERR);
  2212. }
  2213. //! \brief Wrapper for clGetPlatformInfo() that returns by value.
  2214. template <cl_int name> typename
  2215. detail::param_traits<detail::cl_platform_info, name>::param_type
  2216. getInfo(cl_int* err = NULL) const
  2217. {
  2218. typename detail::param_traits<
  2219. detail::cl_platform_info, name>::param_type param;
  2220. cl_int result = getInfo(name, &param);
  2221. if (err != NULL) {
  2222. *err = result;
  2223. }
  2224. return param;
  2225. }
  2226. /*! \brief Gets a list of devices for this platform.
  2227. *
  2228. * Wraps clGetDeviceIDs().
  2229. */
  2230. cl_int getDevices(
  2231. cl_device_type type,
  2232. vector<Device>* devices) const
  2233. {
  2234. cl_uint n = 0;
  2235. if( devices == NULL ) {
  2236. return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_DEVICE_IDS_ERR);
  2237. }
  2238. cl_int err = ::clGetDeviceIDs(object_, type, 0, NULL, &n);
  2239. if (err != CL_SUCCESS) {
  2240. return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
  2241. }
  2242. vector<cl_device_id> ids(n);
  2243. err = ::clGetDeviceIDs(object_, type, n, ids.data(), NULL);
  2244. if (err != CL_SUCCESS) {
  2245. return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
  2246. }
  2247. // Cannot trivially assign because we need to capture intermediates
  2248. // with safe construction
  2249. // We must retain things we obtain from the API to avoid releasing
  2250. // API-owned objects.
  2251. if (devices) {
  2252. devices->resize(ids.size());
  2253. // Assign to param, constructing with retain behaviour
  2254. // to correctly capture each underlying CL object
  2255. for (size_type i = 0; i < ids.size(); i++) {
  2256. (*devices)[i] = Device(ids[i], true);
  2257. }
  2258. }
  2259. return CL_SUCCESS;
  2260. }
  2261. #if defined(CL_HPP_USE_DX_INTEROP)
  2262. /*! \brief Get the list of available D3D10 devices.
  2263. *
  2264. * \param d3d_device_source.
  2265. *
  2266. * \param d3d_object.
  2267. *
  2268. * \param d3d_device_set.
  2269. *
  2270. * \param devices returns a vector of OpenCL D3D10 devices found. The cl::Device
  2271. * values returned in devices can be used to identify a specific OpenCL
  2272. * device. If \a devices argument is NULL, this argument is ignored.
  2273. *
  2274. * \return One of the following values:
  2275. * - CL_SUCCESS if the function is executed successfully.
  2276. *
  2277. * The application can query specific capabilities of the OpenCL device(s)
  2278. * returned by cl::getDevices. This can be used by the application to
  2279. * determine which device(s) to use.
  2280. *
  2281. * \note In the case that exceptions are enabled and a return value
  2282. * other than CL_SUCCESS is generated, then cl::Error exception is
  2283. * generated.
  2284. */
  2285. cl_int getDevices(
  2286. cl_d3d10_device_source_khr d3d_device_source,
  2287. void * d3d_object,
  2288. cl_d3d10_device_set_khr d3d_device_set,
  2289. vector<Device>* devices) const
  2290. {
  2291. typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clGetDeviceIDsFromD3D10KHR)(
  2292. cl_platform_id platform,
  2293. cl_d3d10_device_source_khr d3d_device_source,
  2294. void * d3d_object,
  2295. cl_d3d10_device_set_khr d3d_device_set,
  2296. cl_uint num_entries,
  2297. cl_device_id * devices,
  2298. cl_uint* num_devices);
  2299. if( devices == NULL ) {
  2300. return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_DEVICE_IDS_ERR);
  2301. }
  2302. static PFN_clGetDeviceIDsFromD3D10KHR pfn_clGetDeviceIDsFromD3D10KHR = NULL;
  2303. CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(object_, clGetDeviceIDsFromD3D10KHR);
  2304. cl_uint n = 0;
  2305. cl_int err = pfn_clGetDeviceIDsFromD3D10KHR(
  2306. object_,
  2307. d3d_device_source,
  2308. d3d_object,
  2309. d3d_device_set,
  2310. 0,
  2311. NULL,
  2312. &n);
  2313. if (err != CL_SUCCESS) {
  2314. return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
  2315. }
  2316. vector<cl_device_id> ids(n);
  2317. err = pfn_clGetDeviceIDsFromD3D10KHR(
  2318. object_,
  2319. d3d_device_source,
  2320. d3d_object,
  2321. d3d_device_set,
  2322. n,
  2323. ids.data(),
  2324. NULL);
  2325. if (err != CL_SUCCESS) {
  2326. return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
  2327. }
  2328. // Cannot trivially assign because we need to capture intermediates
  2329. // with safe construction
  2330. // We must retain things we obtain from the API to avoid releasing
  2331. // API-owned objects.
  2332. if (devices) {
  2333. devices->resize(ids.size());
  2334. // Assign to param, constructing with retain behaviour
  2335. // to correctly capture each underlying CL object
  2336. for (size_type i = 0; i < ids.size(); i++) {
  2337. (*devices)[i] = Device(ids[i], true);
  2338. }
  2339. }
  2340. return CL_SUCCESS;
  2341. }
  2342. #endif
  2343. /*! \brief Gets a list of available platforms.
  2344. *
  2345. * Wraps clGetPlatformIDs().
  2346. */
  2347. static cl_int get(
  2348. vector<Platform>* platforms)
  2349. {
  2350. cl_uint n = 0;
  2351. if( platforms == NULL ) {
  2352. return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_PLATFORM_IDS_ERR);
  2353. }
  2354. cl_int err = ::clGetPlatformIDs(0, NULL, &n);
  2355. if (err != CL_SUCCESS) {
  2356. return detail::errHandler(err, __GET_PLATFORM_IDS_ERR);
  2357. }
  2358. vector<cl_platform_id> ids(n);
  2359. err = ::clGetPlatformIDs(n, ids.data(), NULL);
  2360. if (err != CL_SUCCESS) {
  2361. return detail::errHandler(err, __GET_PLATFORM_IDS_ERR);
  2362. }
  2363. if (platforms) {
  2364. platforms->resize(ids.size());
  2365. // Platforms don't reference count
  2366. for (size_type i = 0; i < ids.size(); i++) {
  2367. (*platforms)[i] = Platform(ids[i]);
  2368. }
  2369. }
  2370. return CL_SUCCESS;
  2371. }
  2372. /*! \brief Gets the first available platform.
  2373. *
  2374. * Wraps clGetPlatformIDs(), returning the first result.
  2375. */
  2376. static cl_int get(
  2377. Platform * platform)
  2378. {
  2379. cl_int err;
  2380. Platform default_platform = Platform::getDefault(&err);
  2381. if (platform) {
  2382. *platform = default_platform;
  2383. }
  2384. return err;
  2385. }
  2386. /*! \brief Gets the first available platform, returning it by value.
  2387. *
  2388. * \return Returns a valid platform if one is available.
  2389. * If no platform is available will return a null platform.
  2390. * Throws an exception if no platforms are available
  2391. * or an error condition occurs.
  2392. * Wraps clGetPlatformIDs(), returning the first result.
  2393. */
  2394. static Platform get(
  2395. cl_int * errResult = NULL)
  2396. {
  2397. cl_int err;
  2398. Platform default_platform = Platform::getDefault(&err);
  2399. if (errResult) {
  2400. *errResult = err;
  2401. }
  2402. return default_platform;
  2403. }
  2404. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  2405. //! \brief Wrapper for clUnloadCompiler().
  2406. cl_int
  2407. unloadCompiler()
  2408. {
  2409. return ::clUnloadPlatformCompiler(object_);
  2410. }
  2411. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  2412. }; // class Platform
  2413. CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Platform::default_initialized_;
  2414. CL_HPP_DEFINE_STATIC_MEMBER_ Platform Platform::default_;
  2415. CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Platform::default_error_ = CL_SUCCESS;
  2416. /**
  2417. * Deprecated APIs for 1.2
  2418. */
  2419. #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
  2420. /**
  2421. * Unload the OpenCL compiler.
  2422. * \note Deprecated for OpenCL 1.2. Use Platform::unloadCompiler instead.
  2423. */
  2424. inline CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int
  2425. UnloadCompiler() CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED;
  2426. inline cl_int
  2427. UnloadCompiler()
  2428. {
  2429. return ::clUnloadCompiler();
  2430. }
  2431. #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
  2432. /*! \brief Class interface for cl_context.
  2433. *
  2434. * \note Copies of these objects are shallow, meaning that the copy will refer
  2435. * to the same underlying cl_context as the original. For details, see
  2436. * clRetainContext() and clReleaseContext().
  2437. *
  2438. * \see cl_context
  2439. */
  2440. class Context
  2441. : public detail::Wrapper<cl_context>
  2442. {
  2443. private:
  2444. static std::once_flag default_initialized_;
  2445. static Context default_;
  2446. static cl_int default_error_;
  2447. /*! \brief Create the default context from the default device type in the default platform.
  2448. *
  2449. * This sets @c default_ and @c default_error_. It does not throw
  2450. * @c cl::Error.
  2451. */
  2452. static void makeDefault() {
  2453. /* Throwing an exception from a call_once invocation does not do
  2454. * what we wish, so we catch it and save the error.
  2455. */
  2456. #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  2457. try
  2458. #endif
  2459. {
  2460. #if !defined(__APPLE__) && !defined(__MACOS)
  2461. const Platform &p = Platform::getDefault();
  2462. cl_platform_id defaultPlatform = p();
  2463. cl_context_properties properties[3] = {
  2464. CL_CONTEXT_PLATFORM, (cl_context_properties)defaultPlatform, 0
  2465. };
  2466. #else // #if !defined(__APPLE__) && !defined(__MACOS)
  2467. cl_context_properties *properties = nullptr;
  2468. #endif // #if !defined(__APPLE__) && !defined(__MACOS)
  2469. default_ = Context(
  2470. CL_DEVICE_TYPE_DEFAULT,
  2471. properties,
  2472. NULL,
  2473. NULL,
  2474. &default_error_);
  2475. }
  2476. #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  2477. catch (cl::Error &e) {
  2478. default_error_ = e.err();
  2479. }
  2480. #endif
  2481. }
  2482. /*! \brief Create the default context from a provided Context.
  2483. *
  2484. * This sets @c default_. It does not throw
  2485. * @c cl::Error.
  2486. */
  2487. static void makeDefaultProvided(const Context &c) {
  2488. default_ = c;
  2489. }
  2490. public:
  2491. #ifdef CL_HPP_UNIT_TEST_ENABLE
  2492. /*! \brief Reset the default.
  2493. *
  2494. * This sets @c default_ to an empty value to support cleanup in
  2495. * the unit test framework.
  2496. * This function is not thread safe.
  2497. */
  2498. static void unitTestClearDefault() {
  2499. default_ = Context();
  2500. }
  2501. #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
  2502. /*! \brief Constructs a context including a list of specified devices.
  2503. *
  2504. * Wraps clCreateContext().
  2505. */
  2506. Context(
  2507. const vector<Device>& devices,
  2508. cl_context_properties* properties = NULL,
  2509. void (CL_CALLBACK * notifyFptr)(
  2510. const char *,
  2511. const void *,
  2512. size_type,
  2513. void *) = NULL,
  2514. void* data = NULL,
  2515. cl_int* err = NULL)
  2516. {
  2517. cl_int error;
  2518. size_type numDevices = devices.size();
  2519. vector<cl_device_id> deviceIDs(numDevices);
  2520. for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
  2521. deviceIDs[deviceIndex] = (devices[deviceIndex])();
  2522. }
  2523. object_ = ::clCreateContext(
  2524. properties, (cl_uint) numDevices,
  2525. deviceIDs.data(),
  2526. notifyFptr, data, &error);
  2527. detail::errHandler(error, __CREATE_CONTEXT_ERR);
  2528. if (err != NULL) {
  2529. *err = error;
  2530. }
  2531. }
  2532. Context(
  2533. const Device& device,
  2534. cl_context_properties* properties = NULL,
  2535. void (CL_CALLBACK * notifyFptr)(
  2536. const char *,
  2537. const void *,
  2538. size_type,
  2539. void *) = NULL,
  2540. void* data = NULL,
  2541. cl_int* err = NULL)
  2542. {
  2543. cl_int error;
  2544. cl_device_id deviceID = device();
  2545. object_ = ::clCreateContext(
  2546. properties, 1,
  2547. &deviceID,
  2548. notifyFptr, data, &error);
  2549. detail::errHandler(error, __CREATE_CONTEXT_ERR);
  2550. if (err != NULL) {
  2551. *err = error;
  2552. }
  2553. }
  2554. /*! \brief Constructs a context including all or a subset of devices of a specified type.
  2555. *
  2556. * Wraps clCreateContextFromType().
  2557. */
  2558. Context(
  2559. cl_device_type type,
  2560. cl_context_properties* properties = NULL,
  2561. void (CL_CALLBACK * notifyFptr)(
  2562. const char *,
  2563. const void *,
  2564. size_type,
  2565. void *) = NULL,
  2566. void* data = NULL,
  2567. cl_int* err = NULL)
  2568. {
  2569. cl_int error;
  2570. #if !defined(__APPLE__) && !defined(__MACOS)
  2571. cl_context_properties prop[4] = {CL_CONTEXT_PLATFORM, 0, 0, 0 };
  2572. if (properties == NULL) {
  2573. // Get a valid platform ID as we cannot send in a blank one
  2574. vector<Platform> platforms;
  2575. error = Platform::get(&platforms);
  2576. if (error != CL_SUCCESS) {
  2577. detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
  2578. if (err != NULL) {
  2579. *err = error;
  2580. }
  2581. return;
  2582. }
  2583. // Check the platforms we found for a device of our specified type
  2584. cl_context_properties platform_id = 0;
  2585. for (unsigned int i = 0; i < platforms.size(); i++) {
  2586. vector<Device> devices;
  2587. #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  2588. try {
  2589. #endif
  2590. error = platforms[i].getDevices(type, &devices);
  2591. #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  2592. } catch (cl::Error& e) {
  2593. error = e.err();
  2594. }
  2595. // Catch if exceptions are enabled as we don't want to exit if first platform has no devices of type
  2596. // We do error checking next anyway, and can throw there if needed
  2597. #endif
  2598. // Only squash CL_SUCCESS and CL_DEVICE_NOT_FOUND
  2599. if (error != CL_SUCCESS && error != CL_DEVICE_NOT_FOUND) {
  2600. detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
  2601. if (err != NULL) {
  2602. *err = error;
  2603. }
  2604. }
  2605. if (devices.size() > 0) {
  2606. platform_id = (cl_context_properties)platforms[i]();
  2607. break;
  2608. }
  2609. }
  2610. if (platform_id == 0) {
  2611. detail::errHandler(CL_DEVICE_NOT_FOUND, __CREATE_CONTEXT_FROM_TYPE_ERR);
  2612. if (err != NULL) {
  2613. *err = CL_DEVICE_NOT_FOUND;
  2614. }
  2615. return;
  2616. }
  2617. prop[1] = platform_id;
  2618. properties = &prop[0];
  2619. }
  2620. #endif
  2621. object_ = ::clCreateContextFromType(
  2622. properties, type, notifyFptr, data, &error);
  2623. detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
  2624. if (err != NULL) {
  2625. *err = error;
  2626. }
  2627. }
  2628. /*! \brief Copy constructor to forward copy to the superclass correctly.
  2629. * Required for MSVC.
  2630. */
  2631. Context(const Context& ctx) : detail::Wrapper<cl_type>(ctx) {}
  2632. /*! \brief Copy assignment to forward copy to the superclass correctly.
  2633. * Required for MSVC.
  2634. */
  2635. Context& operator = (const Context &ctx)
  2636. {
  2637. detail::Wrapper<cl_type>::operator=(ctx);
  2638. return *this;
  2639. }
  2640. /*! \brief Move constructor to forward move to the superclass correctly.
  2641. * Required for MSVC.
  2642. */
  2643. Context(Context&& ctx) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(ctx)) {}
  2644. /*! \brief Move assignment to forward move to the superclass correctly.
  2645. * Required for MSVC.
  2646. */
  2647. Context& operator = (Context &&ctx)
  2648. {
  2649. detail::Wrapper<cl_type>::operator=(std::move(ctx));
  2650. return *this;
  2651. }
  2652. /*! \brief Returns a singleton context including all devices of CL_DEVICE_TYPE_DEFAULT.
  2653. *
  2654. * \note All calls to this function return the same cl_context as the first.
  2655. */
  2656. static Context getDefault(cl_int * err = NULL)
  2657. {
  2658. std::call_once(default_initialized_, makeDefault);
  2659. detail::errHandler(default_error_);
  2660. if (err != NULL) {
  2661. *err = default_error_;
  2662. }
  2663. return default_;
  2664. }
  2665. /**
  2666. * Modify the default context to be used by
  2667. * subsequent operations.
  2668. * Will only set the default if no default was previously created.
  2669. * @return updated default context.
  2670. * Should be compared to the passed value to ensure that it was updated.
  2671. */
  2672. static Context setDefault(const Context &default_context)
  2673. {
  2674. std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_context));
  2675. detail::errHandler(default_error_);
  2676. return default_;
  2677. }
  2678. //! \brief Default constructor - initializes to NULL.
  2679. Context() : detail::Wrapper<cl_type>() { }
  2680. /*! \brief Constructor from cl_context - takes ownership.
  2681. *
  2682. * This effectively transfers ownership of a refcount on the cl_context
  2683. * into the new Context object.
  2684. */
  2685. explicit Context(const cl_context& context, bool retainObject = false) :
  2686. detail::Wrapper<cl_type>(context, retainObject) { }
  2687. /*! \brief Assignment operator from cl_context - takes ownership.
  2688. *
  2689. * This effectively transfers ownership of a refcount on the rhs and calls
  2690. * clReleaseContext() on the value previously held by this instance.
  2691. */
  2692. Context& operator = (const cl_context& rhs)
  2693. {
  2694. detail::Wrapper<cl_type>::operator=(rhs);
  2695. return *this;
  2696. }
  2697. //! \brief Wrapper for clGetContextInfo().
  2698. template <typename T>
  2699. cl_int getInfo(cl_context_info name, T* param) const
  2700. {
  2701. return detail::errHandler(
  2702. detail::getInfo(&::clGetContextInfo, object_, name, param),
  2703. __GET_CONTEXT_INFO_ERR);
  2704. }
  2705. //! \brief Wrapper for clGetContextInfo() that returns by value.
  2706. template <cl_int name> typename
  2707. detail::param_traits<detail::cl_context_info, name>::param_type
  2708. getInfo(cl_int* err = NULL) const
  2709. {
  2710. typename detail::param_traits<
  2711. detail::cl_context_info, name>::param_type param;
  2712. cl_int result = getInfo(name, &param);
  2713. if (err != NULL) {
  2714. *err = result;
  2715. }
  2716. return param;
  2717. }
  2718. /*! \brief Gets a list of supported image formats.
  2719. *
  2720. * Wraps clGetSupportedImageFormats().
  2721. */
  2722. cl_int getSupportedImageFormats(
  2723. cl_mem_flags flags,
  2724. cl_mem_object_type type,
  2725. vector<ImageFormat>* formats) const
  2726. {
  2727. cl_uint numEntries;
  2728. if (!formats) {
  2729. return CL_SUCCESS;
  2730. }
  2731. cl_int err = ::clGetSupportedImageFormats(
  2732. object_,
  2733. flags,
  2734. type,
  2735. 0,
  2736. NULL,
  2737. &numEntries);
  2738. if (err != CL_SUCCESS) {
  2739. return detail::errHandler(err, __GET_SUPPORTED_IMAGE_FORMATS_ERR);
  2740. }
  2741. if (numEntries > 0) {
  2742. vector<ImageFormat> value(numEntries);
  2743. err = ::clGetSupportedImageFormats(
  2744. object_,
  2745. flags,
  2746. type,
  2747. numEntries,
  2748. (cl_image_format*)value.data(),
  2749. NULL);
  2750. if (err != CL_SUCCESS) {
  2751. return detail::errHandler(err, __GET_SUPPORTED_IMAGE_FORMATS_ERR);
  2752. }
  2753. formats->assign(begin(value), end(value));
  2754. }
  2755. else {
  2756. // If no values are being returned, ensure an empty vector comes back
  2757. formats->clear();
  2758. }
  2759. return CL_SUCCESS;
  2760. }
  2761. };
  2762. inline void Device::makeDefault()
  2763. {
  2764. /* Throwing an exception from a call_once invocation does not do
  2765. * what we wish, so we catch it and save the error.
  2766. */
  2767. #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  2768. try
  2769. #endif
  2770. {
  2771. cl_int error = 0;
  2772. Context context = Context::getDefault(&error);
  2773. detail::errHandler(error, __CREATE_CONTEXT_ERR);
  2774. if (error != CL_SUCCESS) {
  2775. default_error_ = error;
  2776. }
  2777. else {
  2778. default_ = context.getInfo<CL_CONTEXT_DEVICES>()[0];
  2779. default_error_ = CL_SUCCESS;
  2780. }
  2781. }
  2782. #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  2783. catch (cl::Error &e) {
  2784. default_error_ = e.err();
  2785. }
  2786. #endif
  2787. }
  2788. CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Context::default_initialized_;
  2789. CL_HPP_DEFINE_STATIC_MEMBER_ Context Context::default_;
  2790. CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Context::default_error_ = CL_SUCCESS;
  2791. /*! \brief Class interface for cl_event.
  2792. *
  2793. * \note Copies of these objects are shallow, meaning that the copy will refer
  2794. * to the same underlying cl_event as the original. For details, see
  2795. * clRetainEvent() and clReleaseEvent().
  2796. *
  2797. * \see cl_event
  2798. */
  2799. class Event : public detail::Wrapper<cl_event>
  2800. {
  2801. public:
  2802. //! \brief Default constructor - initializes to NULL.
  2803. Event() : detail::Wrapper<cl_type>() { }
  2804. /*! \brief Constructor from cl_event - takes ownership.
  2805. *
  2806. * \param retainObject will cause the constructor to retain its cl object.
  2807. * Defaults to false to maintain compatibility with
  2808. * earlier versions.
  2809. * This effectively transfers ownership of a refcount on the cl_event
  2810. * into the new Event object.
  2811. */
  2812. explicit Event(const cl_event& event, bool retainObject = false) :
  2813. detail::Wrapper<cl_type>(event, retainObject) { }
  2814. /*! \brief Assignment operator from cl_event - takes ownership.
  2815. *
  2816. * This effectively transfers ownership of a refcount on the rhs and calls
  2817. * clReleaseEvent() on the value previously held by this instance.
  2818. */
  2819. Event& operator = (const cl_event& rhs)
  2820. {
  2821. detail::Wrapper<cl_type>::operator=(rhs);
  2822. return *this;
  2823. }
  2824. //! \brief Wrapper for clGetEventInfo().
  2825. template <typename T>
  2826. cl_int getInfo(cl_event_info name, T* param) const
  2827. {
  2828. return detail::errHandler(
  2829. detail::getInfo(&::clGetEventInfo, object_, name, param),
  2830. __GET_EVENT_INFO_ERR);
  2831. }
  2832. //! \brief Wrapper for clGetEventInfo() that returns by value.
  2833. template <cl_int name> typename
  2834. detail::param_traits<detail::cl_event_info, name>::param_type
  2835. getInfo(cl_int* err = NULL) const
  2836. {
  2837. typename detail::param_traits<
  2838. detail::cl_event_info, name>::param_type param;
  2839. cl_int result = getInfo(name, &param);
  2840. if (err != NULL) {
  2841. *err = result;
  2842. }
  2843. return param;
  2844. }
  2845. //! \brief Wrapper for clGetEventProfilingInfo().
  2846. template <typename T>
  2847. cl_int getProfilingInfo(cl_profiling_info name, T* param) const
  2848. {
  2849. return detail::errHandler(detail::getInfo(
  2850. &::clGetEventProfilingInfo, object_, name, param),
  2851. __GET_EVENT_PROFILE_INFO_ERR);
  2852. }
  2853. //! \brief Wrapper for clGetEventProfilingInfo() that returns by value.
  2854. template <cl_int name> typename
  2855. detail::param_traits<detail::cl_profiling_info, name>::param_type
  2856. getProfilingInfo(cl_int* err = NULL) const
  2857. {
  2858. typename detail::param_traits<
  2859. detail::cl_profiling_info, name>::param_type param;
  2860. cl_int result = getProfilingInfo(name, &param);
  2861. if (err != NULL) {
  2862. *err = result;
  2863. }
  2864. return param;
  2865. }
  2866. /*! \brief Blocks the calling thread until this event completes.
  2867. *
  2868. * Wraps clWaitForEvents().
  2869. */
  2870. cl_int wait() const
  2871. {
  2872. return detail::errHandler(
  2873. ::clWaitForEvents(1, &object_),
  2874. __WAIT_FOR_EVENTS_ERR);
  2875. }
  2876. #if CL_HPP_TARGET_OPENCL_VERSION >= 110
  2877. /*! \brief Registers a user callback function for a specific command execution status.
  2878. *
  2879. * Wraps clSetEventCallback().
  2880. */
  2881. cl_int setCallback(
  2882. cl_int type,
  2883. void (CL_CALLBACK * pfn_notify)(cl_event, cl_int, void *),
  2884. void * user_data = NULL)
  2885. {
  2886. return detail::errHandler(
  2887. ::clSetEventCallback(
  2888. object_,
  2889. type,
  2890. pfn_notify,
  2891. user_data),
  2892. __SET_EVENT_CALLBACK_ERR);
  2893. }
  2894. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
  2895. /*! \brief Blocks the calling thread until every event specified is complete.
  2896. *
  2897. * Wraps clWaitForEvents().
  2898. */
  2899. static cl_int
  2900. waitForEvents(const vector<Event>& events)
  2901. {
  2902. return detail::errHandler(
  2903. ::clWaitForEvents(
  2904. (cl_uint) events.size(), (events.size() > 0) ? (cl_event*)&events.front() : NULL),
  2905. __WAIT_FOR_EVENTS_ERR);
  2906. }
  2907. };
  2908. #if CL_HPP_TARGET_OPENCL_VERSION >= 110
  2909. /*! \brief Class interface for user events (a subset of cl_event's).
  2910. *
  2911. * See Event for details about copy semantics, etc.
  2912. */
  2913. class UserEvent : public Event
  2914. {
  2915. public:
  2916. /*! \brief Constructs a user event on a given context.
  2917. *
  2918. * Wraps clCreateUserEvent().
  2919. */
  2920. UserEvent(
  2921. const Context& context,
  2922. cl_int * err = NULL)
  2923. {
  2924. cl_int error;
  2925. object_ = ::clCreateUserEvent(
  2926. context(),
  2927. &error);
  2928. detail::errHandler(error, __CREATE_USER_EVENT_ERR);
  2929. if (err != NULL) {
  2930. *err = error;
  2931. }
  2932. }
  2933. //! \brief Default constructor - initializes to NULL.
  2934. UserEvent() : Event() { }
  2935. /*! \brief Sets the execution status of a user event object.
  2936. *
  2937. * Wraps clSetUserEventStatus().
  2938. */
  2939. cl_int setStatus(cl_int status)
  2940. {
  2941. return detail::errHandler(
  2942. ::clSetUserEventStatus(object_,status),
  2943. __SET_USER_EVENT_STATUS_ERR);
  2944. }
  2945. };
  2946. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
  2947. /*! \brief Blocks the calling thread until every event specified is complete.
  2948. *
  2949. * Wraps clWaitForEvents().
  2950. */
  2951. inline static cl_int
  2952. WaitForEvents(const vector<Event>& events)
  2953. {
  2954. return detail::errHandler(
  2955. ::clWaitForEvents(
  2956. (cl_uint) events.size(), (events.size() > 0) ? (cl_event*)&events.front() : NULL),
  2957. __WAIT_FOR_EVENTS_ERR);
  2958. }
  2959. /*! \brief Class interface for cl_mem.
  2960. *
  2961. * \note Copies of these objects are shallow, meaning that the copy will refer
  2962. * to the same underlying cl_mem as the original. For details, see
  2963. * clRetainMemObject() and clReleaseMemObject().
  2964. *
  2965. * \see cl_mem
  2966. */
  2967. class Memory : public detail::Wrapper<cl_mem>
  2968. {
  2969. public:
  2970. //! \brief Default constructor - initializes to NULL.
  2971. Memory() : detail::Wrapper<cl_type>() { }
  2972. /*! \brief Constructor from cl_mem - takes ownership.
  2973. *
  2974. * Optionally transfer ownership of a refcount on the cl_mem
  2975. * into the new Memory object.
  2976. *
  2977. * \param retainObject will cause the constructor to retain its cl object.
  2978. * Defaults to false to maintain compatibility with
  2979. * earlier versions.
  2980. *
  2981. * See Memory for further details.
  2982. */
  2983. explicit Memory(const cl_mem& memory, bool retainObject) :
  2984. detail::Wrapper<cl_type>(memory, retainObject) { }
  2985. /*! \brief Assignment operator from cl_mem - takes ownership.
  2986. *
  2987. * This effectively transfers ownership of a refcount on the rhs and calls
  2988. * clReleaseMemObject() on the value previously held by this instance.
  2989. */
  2990. Memory& operator = (const cl_mem& rhs)
  2991. {
  2992. detail::Wrapper<cl_type>::operator=(rhs);
  2993. return *this;
  2994. }
  2995. /*! \brief Copy constructor to forward copy to the superclass correctly.
  2996. * Required for MSVC.
  2997. */
  2998. Memory(const Memory& mem) : detail::Wrapper<cl_type>(mem) {}
  2999. /*! \brief Copy assignment to forward copy to the superclass correctly.
  3000. * Required for MSVC.
  3001. */
  3002. Memory& operator = (const Memory &mem)
  3003. {
  3004. detail::Wrapper<cl_type>::operator=(mem);
  3005. return *this;
  3006. }
  3007. /*! \brief Move constructor to forward move to the superclass correctly.
  3008. * Required for MSVC.
  3009. */
  3010. Memory(Memory&& mem) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(mem)) {}
  3011. /*! \brief Move assignment to forward move to the superclass correctly.
  3012. * Required for MSVC.
  3013. */
  3014. Memory& operator = (Memory &&mem)
  3015. {
  3016. detail::Wrapper<cl_type>::operator=(std::move(mem));
  3017. return *this;
  3018. }
  3019. //! \brief Wrapper for clGetMemObjectInfo().
  3020. template <typename T>
  3021. cl_int getInfo(cl_mem_info name, T* param) const
  3022. {
  3023. return detail::errHandler(
  3024. detail::getInfo(&::clGetMemObjectInfo, object_, name, param),
  3025. __GET_MEM_OBJECT_INFO_ERR);
  3026. }
  3027. //! \brief Wrapper for clGetMemObjectInfo() that returns by value.
  3028. template <cl_int name> typename
  3029. detail::param_traits<detail::cl_mem_info, name>::param_type
  3030. getInfo(cl_int* err = NULL) const
  3031. {
  3032. typename detail::param_traits<
  3033. detail::cl_mem_info, name>::param_type param;
  3034. cl_int result = getInfo(name, &param);
  3035. if (err != NULL) {
  3036. *err = result;
  3037. }
  3038. return param;
  3039. }
  3040. #if CL_HPP_TARGET_OPENCL_VERSION >= 110
  3041. /*! \brief Registers a callback function to be called when the memory object
  3042. * is no longer needed.
  3043. *
  3044. * Wraps clSetMemObjectDestructorCallback().
  3045. *
  3046. * Repeated calls to this function, for a given cl_mem value, will append
  3047. * to the list of functions called (in reverse order) when memory object's
  3048. * resources are freed and the memory object is deleted.
  3049. *
  3050. * \note
  3051. * The registered callbacks are associated with the underlying cl_mem
  3052. * value - not the Memory class instance.
  3053. */
  3054. cl_int setDestructorCallback(
  3055. void (CL_CALLBACK * pfn_notify)(cl_mem, void *),
  3056. void * user_data = NULL)
  3057. {
  3058. return detail::errHandler(
  3059. ::clSetMemObjectDestructorCallback(
  3060. object_,
  3061. pfn_notify,
  3062. user_data),
  3063. __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR);
  3064. }
  3065. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
  3066. };
  3067. // Pre-declare copy functions
  3068. class Buffer;
  3069. template< typename IteratorType >
  3070. cl_int copy( IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer );
  3071. template< typename IteratorType >
  3072. cl_int copy( const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator );
  3073. template< typename IteratorType >
  3074. cl_int copy( const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer );
  3075. template< typename IteratorType >
  3076. cl_int copy( const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator );
  3077. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  3078. namespace detail
  3079. {
  3080. class SVMTraitNull
  3081. {
  3082. public:
  3083. static cl_svm_mem_flags getSVMMemFlags()
  3084. {
  3085. return 0;
  3086. }
  3087. };
  3088. } // namespace detail
  3089. template<class Trait = detail::SVMTraitNull>
  3090. class SVMTraitReadWrite
  3091. {
  3092. public:
  3093. static cl_svm_mem_flags getSVMMemFlags()
  3094. {
  3095. return CL_MEM_READ_WRITE |
  3096. Trait::getSVMMemFlags();
  3097. }
  3098. };
  3099. template<class Trait = detail::SVMTraitNull>
  3100. class SVMTraitReadOnly
  3101. {
  3102. public:
  3103. static cl_svm_mem_flags getSVMMemFlags()
  3104. {
  3105. return CL_MEM_READ_ONLY |
  3106. Trait::getSVMMemFlags();
  3107. }
  3108. };
  3109. template<class Trait = detail::SVMTraitNull>
  3110. class SVMTraitWriteOnly
  3111. {
  3112. public:
  3113. static cl_svm_mem_flags getSVMMemFlags()
  3114. {
  3115. return CL_MEM_WRITE_ONLY |
  3116. Trait::getSVMMemFlags();
  3117. }
  3118. };
  3119. template<class Trait = SVMTraitReadWrite<>>
  3120. class SVMTraitCoarse
  3121. {
  3122. public:
  3123. static cl_svm_mem_flags getSVMMemFlags()
  3124. {
  3125. return Trait::getSVMMemFlags();
  3126. }
  3127. };
  3128. template<class Trait = SVMTraitReadWrite<>>
  3129. class SVMTraitFine
  3130. {
  3131. public:
  3132. static cl_svm_mem_flags getSVMMemFlags()
  3133. {
  3134. return CL_MEM_SVM_FINE_GRAIN_BUFFER |
  3135. Trait::getSVMMemFlags();
  3136. }
  3137. };
  3138. template<class Trait = SVMTraitReadWrite<>>
  3139. class SVMTraitAtomic
  3140. {
  3141. public:
  3142. static cl_svm_mem_flags getSVMMemFlags()
  3143. {
  3144. return
  3145. CL_MEM_SVM_FINE_GRAIN_BUFFER |
  3146. CL_MEM_SVM_ATOMICS |
  3147. Trait::getSVMMemFlags();
  3148. }
  3149. };
  3150. // Pre-declare SVM map function
  3151. template<typename T>
  3152. inline cl_int enqueueMapSVM(
  3153. T* ptr,
  3154. cl_bool blocking,
  3155. cl_map_flags flags,
  3156. size_type size,
  3157. const vector<Event>* events = NULL,
  3158. Event* event = NULL);
  3159. /**
  3160. * STL-like allocator class for managing SVM objects provided for convenience.
  3161. *
  3162. * Note that while this behaves like an allocator for the purposes of constructing vectors and similar objects,
  3163. * care must be taken when using with smart pointers.
  3164. * The allocator should not be used to construct a unique_ptr if we are using coarse-grained SVM mode because
  3165. * the coarse-grained management behaviour would behave incorrectly with respect to reference counting.
  3166. *
  3167. * Instead the allocator embeds a Deleter which may be used with unique_ptr and is used
  3168. * with the allocate_shared and allocate_ptr supplied operations.
  3169. */
  3170. template<typename T, class SVMTrait>
  3171. class SVMAllocator {
  3172. private:
  3173. Context context_;
  3174. public:
  3175. typedef T value_type;
  3176. typedef value_type* pointer;
  3177. typedef const value_type* const_pointer;
  3178. typedef value_type& reference;
  3179. typedef const value_type& const_reference;
  3180. typedef std::size_t size_type;
  3181. typedef std::ptrdiff_t difference_type;
  3182. template<typename U>
  3183. struct rebind
  3184. {
  3185. typedef SVMAllocator<U, SVMTrait> other;
  3186. };
  3187. template<typename U, typename V>
  3188. friend class SVMAllocator;
  3189. SVMAllocator() :
  3190. context_(Context::getDefault())
  3191. {
  3192. }
  3193. explicit SVMAllocator(cl::Context context) :
  3194. context_(context)
  3195. {
  3196. }
  3197. SVMAllocator(const SVMAllocator &other) :
  3198. context_(other.context_)
  3199. {
  3200. }
  3201. template<typename U>
  3202. SVMAllocator(const SVMAllocator<U, SVMTrait> &other) :
  3203. context_(other.context_)
  3204. {
  3205. }
  3206. ~SVMAllocator()
  3207. {
  3208. }
  3209. pointer address(reference r) CL_HPP_NOEXCEPT_
  3210. {
  3211. return std::addressof(r);
  3212. }
  3213. const_pointer address(const_reference r) CL_HPP_NOEXCEPT_
  3214. {
  3215. return std::addressof(r);
  3216. }
  3217. /**
  3218. * Allocate an SVM pointer.
  3219. *
  3220. * If the allocator is coarse-grained, this will take ownership to allow
  3221. * containers to correctly construct data in place.
  3222. */
  3223. pointer allocate(
  3224. size_type size,
  3225. typename cl::SVMAllocator<void, SVMTrait>::const_pointer = 0)
  3226. {
  3227. // Allocate memory with default alignment matching the size of the type
  3228. void* voidPointer =
  3229. clSVMAlloc(
  3230. context_(),
  3231. SVMTrait::getSVMMemFlags(),
  3232. size*sizeof(T),
  3233. 0);
  3234. pointer retValue = reinterpret_cast<pointer>(
  3235. voidPointer);
  3236. #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  3237. if (!retValue) {
  3238. std::bad_alloc excep;
  3239. throw excep;
  3240. }
  3241. #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  3242. // If allocation was coarse-grained then map it
  3243. if (!(SVMTrait::getSVMMemFlags() & CL_MEM_SVM_FINE_GRAIN_BUFFER)) {
  3244. cl_int err = enqueueMapSVM(retValue, CL_TRUE, CL_MAP_READ | CL_MAP_WRITE, size*sizeof(T));
  3245. if (err != CL_SUCCESS) {
  3246. std::bad_alloc excep;
  3247. throw excep;
  3248. }
  3249. }
  3250. // If exceptions disabled, return null pointer from allocator
  3251. return retValue;
  3252. }
  3253. void deallocate(pointer p, size_type)
  3254. {
  3255. clSVMFree(context_(), p);
  3256. }
  3257. /**
  3258. * Return the maximum possible allocation size.
  3259. * This is the minimum of the maximum sizes of all devices in the context.
  3260. */
  3261. size_type max_size() const CL_HPP_NOEXCEPT_
  3262. {
  3263. size_type maxSize = std::numeric_limits<size_type>::max() / sizeof(T);
  3264. for (const Device &d : context_.getInfo<CL_CONTEXT_DEVICES>()) {
  3265. maxSize = std::min(
  3266. maxSize,
  3267. static_cast<size_type>(d.getInfo<CL_DEVICE_MAX_MEM_ALLOC_SIZE>()));
  3268. }
  3269. return maxSize;
  3270. }
  3271. template< class U, class... Args >
  3272. void construct(U* p, Args&&... args)
  3273. {
  3274. new(p)T(args...);
  3275. }
  3276. template< class U >
  3277. void destroy(U* p)
  3278. {
  3279. p->~U();
  3280. }
  3281. /**
  3282. * Returns true if the contexts match.
  3283. */
  3284. inline bool operator==(SVMAllocator const& rhs)
  3285. {
  3286. return (context_==rhs.context_);
  3287. }
  3288. inline bool operator!=(SVMAllocator const& a)
  3289. {
  3290. return !operator==(a);
  3291. }
  3292. }; // class SVMAllocator return cl::pointer<T>(tmp, detail::Deleter<T, Alloc>{alloc, copies});
  3293. template<class SVMTrait>
  3294. class SVMAllocator<void, SVMTrait> {
  3295. public:
  3296. typedef void value_type;
  3297. typedef value_type* pointer;
  3298. typedef const value_type* const_pointer;
  3299. template<typename U>
  3300. struct rebind
  3301. {
  3302. typedef SVMAllocator<U, SVMTrait> other;
  3303. };
  3304. template<typename U, typename V>
  3305. friend class SVMAllocator;
  3306. };
  3307. #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
  3308. namespace detail
  3309. {
  3310. template<class Alloc>
  3311. class Deleter {
  3312. private:
  3313. Alloc alloc_;
  3314. size_type copies_;
  3315. public:
  3316. typedef typename std::allocator_traits<Alloc>::pointer pointer;
  3317. Deleter(const Alloc &alloc, size_type copies) : alloc_{ alloc }, copies_{ copies }
  3318. {
  3319. }
  3320. void operator()(pointer ptr) const {
  3321. Alloc tmpAlloc{ alloc_ };
  3322. std::allocator_traits<Alloc>::destroy(tmpAlloc, std::addressof(*ptr));
  3323. std::allocator_traits<Alloc>::deallocate(tmpAlloc, ptr, copies_);
  3324. }
  3325. };
  3326. } // namespace detail
  3327. /**
  3328. * Allocation operation compatible with std::allocate_ptr.
  3329. * Creates a unique_ptr<T> by default.
  3330. * This requirement is to ensure that the control block is not
  3331. * allocated in memory inaccessible to the host.
  3332. */
  3333. template <class T, class Alloc, class... Args>
  3334. cl::pointer<T, detail::Deleter<Alloc>> allocate_pointer(const Alloc &alloc_, Args&&... args)
  3335. {
  3336. Alloc alloc(alloc_);
  3337. static const size_type copies = 1;
  3338. // Ensure that creation of the management block and the
  3339. // object are dealt with separately such that we only provide a deleter
  3340. T* tmp = std::allocator_traits<Alloc>::allocate(alloc, copies);
  3341. if (!tmp) {
  3342. std::bad_alloc excep;
  3343. throw excep;
  3344. }
  3345. try {
  3346. std::allocator_traits<Alloc>::construct(
  3347. alloc,
  3348. std::addressof(*tmp),
  3349. std::forward<Args>(args)...);
  3350. return cl::pointer<T, detail::Deleter<Alloc>>(tmp, detail::Deleter<Alloc>{alloc, copies});
  3351. }
  3352. catch (std::bad_alloc& b)
  3353. {
  3354. std::allocator_traits<Alloc>::deallocate(alloc, tmp, copies);
  3355. throw;
  3356. }
  3357. }
  3358. template< class T, class SVMTrait, class... Args >
  3359. cl::pointer<T, detail::Deleter<SVMAllocator<T, SVMTrait>>> allocate_svm(Args... args)
  3360. {
  3361. SVMAllocator<T, SVMTrait> alloc;
  3362. return cl::allocate_pointer<T>(alloc, args...);
  3363. }
  3364. template< class T, class SVMTrait, class... Args >
  3365. cl::pointer<T, detail::Deleter<SVMAllocator<T, SVMTrait>>> allocate_svm(const cl::Context &c, Args... args)
  3366. {
  3367. SVMAllocator<T, SVMTrait> alloc(c);
  3368. return cl::allocate_pointer<T>(alloc, args...);
  3369. }
  3370. #endif // #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
  3371. /*! \brief Vector alias to simplify contruction of coarse-grained SVM containers.
  3372. *
  3373. */
  3374. template < class T >
  3375. using coarse_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitCoarse<>>>;
  3376. /*! \brief Vector alias to simplify contruction of fine-grained SVM containers.
  3377. *
  3378. */
  3379. template < class T >
  3380. using fine_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitFine<>>>;
  3381. /*! \brief Vector alias to simplify contruction of fine-grained SVM containers that support platform atomics.
  3382. *
  3383. */
  3384. template < class T >
  3385. using atomic_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitAtomic<>>>;
  3386. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  3387. /*! \brief Class interface for Buffer Memory Objects.
  3388. *
  3389. * See Memory for details about copy semantics, etc.
  3390. *
  3391. * \see Memory
  3392. */
  3393. class Buffer : public Memory
  3394. {
  3395. public:
  3396. /*! \brief Constructs a Buffer in a specified context.
  3397. *
  3398. * Wraps clCreateBuffer().
  3399. *
  3400. * \param host_ptr Storage to be used if the CL_MEM_USE_HOST_PTR flag was
  3401. * specified. Note alignment & exclusivity requirements.
  3402. */
  3403. Buffer(
  3404. const Context& context,
  3405. cl_mem_flags flags,
  3406. size_type size,
  3407. void* host_ptr = NULL,
  3408. cl_int* err = NULL)
  3409. {
  3410. cl_int error;
  3411. object_ = ::clCreateBuffer(context(), flags, size, host_ptr, &error);
  3412. detail::errHandler(error, __CREATE_BUFFER_ERR);
  3413. if (err != NULL) {
  3414. *err = error;
  3415. }
  3416. }
  3417. /*! \brief Constructs a Buffer in the default context.
  3418. *
  3419. * Wraps clCreateBuffer().
  3420. *
  3421. * \param host_ptr Storage to be used if the CL_MEM_USE_HOST_PTR flag was
  3422. * specified. Note alignment & exclusivity requirements.
  3423. *
  3424. * \see Context::getDefault()
  3425. */
  3426. Buffer(
  3427. cl_mem_flags flags,
  3428. size_type size,
  3429. void* host_ptr = NULL,
  3430. cl_int* err = NULL)
  3431. {
  3432. cl_int error;
  3433. Context context = Context::getDefault(err);
  3434. object_ = ::clCreateBuffer(context(), flags, size, host_ptr, &error);
  3435. detail::errHandler(error, __CREATE_BUFFER_ERR);
  3436. if (err != NULL) {
  3437. *err = error;
  3438. }
  3439. }
  3440. /*!
  3441. * \brief Construct a Buffer from a host container via iterators.
  3442. * IteratorType must be random access.
  3443. * If useHostPtr is specified iterators must represent contiguous data.
  3444. */
  3445. template< typename IteratorType >
  3446. Buffer(
  3447. IteratorType startIterator,
  3448. IteratorType endIterator,
  3449. bool readOnly,
  3450. bool useHostPtr = false,
  3451. cl_int* err = NULL)
  3452. {
  3453. typedef typename std::iterator_traits<IteratorType>::value_type DataType;
  3454. cl_int error;
  3455. cl_mem_flags flags = 0;
  3456. if( readOnly ) {
  3457. flags |= CL_MEM_READ_ONLY;
  3458. }
  3459. else {
  3460. flags |= CL_MEM_READ_WRITE;
  3461. }
  3462. if( useHostPtr ) {
  3463. flags |= CL_MEM_USE_HOST_PTR;
  3464. }
  3465. size_type size = sizeof(DataType)*(endIterator - startIterator);
  3466. Context context = Context::getDefault(err);
  3467. if( useHostPtr ) {
  3468. object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
  3469. } else {
  3470. object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
  3471. }
  3472. detail::errHandler(error, __CREATE_BUFFER_ERR);
  3473. if (err != NULL) {
  3474. *err = error;
  3475. }
  3476. if( !useHostPtr ) {
  3477. error = cl::copy(startIterator, endIterator, *this);
  3478. detail::errHandler(error, __CREATE_BUFFER_ERR);
  3479. if (err != NULL) {
  3480. *err = error;
  3481. }
  3482. }
  3483. }
  3484. /*!
  3485. * \brief Construct a Buffer from a host container via iterators using a specified context.
  3486. * IteratorType must be random access.
  3487. * If useHostPtr is specified iterators must represent contiguous data.
  3488. */
  3489. template< typename IteratorType >
  3490. Buffer(const Context &context, IteratorType startIterator, IteratorType endIterator,
  3491. bool readOnly, bool useHostPtr = false, cl_int* err = NULL);
  3492. /*!
  3493. * \brief Construct a Buffer from a host container via iterators using a specified queue.
  3494. * If useHostPtr is specified iterators must be random access.
  3495. */
  3496. template< typename IteratorType >
  3497. Buffer(const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator,
  3498. bool readOnly, bool useHostPtr = false, cl_int* err = NULL);
  3499. //! \brief Default constructor - initializes to NULL.
  3500. Buffer() : Memory() { }
  3501. /*! \brief Constructor from cl_mem - takes ownership.
  3502. *
  3503. * \param retainObject will cause the constructor to retain its cl object.
  3504. * Defaults to false to maintain compatibility with earlier versions.
  3505. *
  3506. * See Memory for further details.
  3507. */
  3508. explicit Buffer(const cl_mem& buffer, bool retainObject = false) :
  3509. Memory(buffer, retainObject) { }
  3510. /*! \brief Assignment from cl_mem - performs shallow copy.
  3511. *
  3512. * See Memory for further details.
  3513. */
  3514. Buffer& operator = (const cl_mem& rhs)
  3515. {
  3516. Memory::operator=(rhs);
  3517. return *this;
  3518. }
  3519. /*! \brief Copy constructor to forward copy to the superclass correctly.
  3520. * Required for MSVC.
  3521. */
  3522. Buffer(const Buffer& buf) : Memory(buf) {}
  3523. /*! \brief Copy assignment to forward copy to the superclass correctly.
  3524. * Required for MSVC.
  3525. */
  3526. Buffer& operator = (const Buffer &buf)
  3527. {
  3528. Memory::operator=(buf);
  3529. return *this;
  3530. }
  3531. /*! \brief Move constructor to forward move to the superclass correctly.
  3532. * Required for MSVC.
  3533. */
  3534. Buffer(Buffer&& buf) CL_HPP_NOEXCEPT_ : Memory(std::move(buf)) {}
  3535. /*! \brief Move assignment to forward move to the superclass correctly.
  3536. * Required for MSVC.
  3537. */
  3538. Buffer& operator = (Buffer &&buf)
  3539. {
  3540. Memory::operator=(std::move(buf));
  3541. return *this;
  3542. }
  3543. #if CL_HPP_TARGET_OPENCL_VERSION >= 110
  3544. /*! \brief Creates a new buffer object from this.
  3545. *
  3546. * Wraps clCreateSubBuffer().
  3547. */
  3548. Buffer createSubBuffer(
  3549. cl_mem_flags flags,
  3550. cl_buffer_create_type buffer_create_type,
  3551. const void * buffer_create_info,
  3552. cl_int * err = NULL)
  3553. {
  3554. Buffer result;
  3555. cl_int error;
  3556. result.object_ = ::clCreateSubBuffer(
  3557. object_,
  3558. flags,
  3559. buffer_create_type,
  3560. buffer_create_info,
  3561. &error);
  3562. detail::errHandler(error, __CREATE_SUBBUFFER_ERR);
  3563. if (err != NULL) {
  3564. *err = error;
  3565. }
  3566. return result;
  3567. }
  3568. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
  3569. };
  3570. #if defined (CL_HPP_USE_DX_INTEROP)
  3571. /*! \brief Class interface for creating OpenCL buffers from ID3D10Buffer's.
  3572. *
  3573. * This is provided to facilitate interoperability with Direct3D.
  3574. *
  3575. * See Memory for details about copy semantics, etc.
  3576. *
  3577. * \see Memory
  3578. */
  3579. class BufferD3D10 : public Buffer
  3580. {
  3581. public:
  3582. /*! \brief Constructs a BufferD3D10, in a specified context, from a
  3583. * given ID3D10Buffer.
  3584. *
  3585. * Wraps clCreateFromD3D10BufferKHR().
  3586. */
  3587. BufferD3D10(
  3588. const Context& context,
  3589. cl_mem_flags flags,
  3590. ID3D10Buffer* bufobj,
  3591. cl_int * err = NULL) : pfn_clCreateFromD3D10BufferKHR(nullptr)
  3592. {
  3593. typedef CL_API_ENTRY cl_mem (CL_API_CALL *PFN_clCreateFromD3D10BufferKHR)(
  3594. cl_context context, cl_mem_flags flags, ID3D10Buffer* buffer,
  3595. cl_int* errcode_ret);
  3596. PFN_clCreateFromD3D10BufferKHR pfn_clCreateFromD3D10BufferKHR;
  3597. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  3598. vector<cl_context_properties> props = context.getInfo<CL_CONTEXT_PROPERTIES>();
  3599. cl_platform platform = -1;
  3600. for( int i = 0; i < props.size(); ++i ) {
  3601. if( props[i] == CL_CONTEXT_PLATFORM ) {
  3602. platform = props[i+1];
  3603. }
  3604. }
  3605. CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clCreateFromD3D10BufferKHR);
  3606. #elif CL_HPP_TARGET_OPENCL_VERSION >= 110
  3607. CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateFromD3D10BufferKHR);
  3608. #endif
  3609. cl_int error;
  3610. object_ = pfn_clCreateFromD3D10BufferKHR(
  3611. context(),
  3612. flags,
  3613. bufobj,
  3614. &error);
  3615. detail::errHandler(error, __CREATE_GL_BUFFER_ERR);
  3616. if (err != NULL) {
  3617. *err = error;
  3618. }
  3619. }
  3620. //! \brief Default constructor - initializes to NULL.
  3621. BufferD3D10() : Buffer() { }
  3622. /*! \brief Constructor from cl_mem - takes ownership.
  3623. *
  3624. * \param retainObject will cause the constructor to retain its cl object.
  3625. * Defaults to false to maintain compatibility with
  3626. * earlier versions.
  3627. * See Memory for further details.
  3628. */
  3629. explicit BufferD3D10(const cl_mem& buffer, bool retainObject = false) :
  3630. Buffer(buffer, retainObject) { }
  3631. /*! \brief Assignment from cl_mem - performs shallow copy.
  3632. *
  3633. * See Memory for further details.
  3634. */
  3635. BufferD3D10& operator = (const cl_mem& rhs)
  3636. {
  3637. Buffer::operator=(rhs);
  3638. return *this;
  3639. }
  3640. /*! \brief Copy constructor to forward copy to the superclass correctly.
  3641. * Required for MSVC.
  3642. */
  3643. BufferD3D10(const BufferD3D10& buf) :
  3644. Buffer(buf) {}
  3645. /*! \brief Copy assignment to forward copy to the superclass correctly.
  3646. * Required for MSVC.
  3647. */
  3648. BufferD3D10& operator = (const BufferD3D10 &buf)
  3649. {
  3650. Buffer::operator=(buf);
  3651. return *this;
  3652. }
  3653. /*! \brief Move constructor to forward move to the superclass correctly.
  3654. * Required for MSVC.
  3655. */
  3656. BufferD3D10(BufferD3D10&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
  3657. /*! \brief Move assignment to forward move to the superclass correctly.
  3658. * Required for MSVC.
  3659. */
  3660. BufferD3D10& operator = (BufferD3D10 &&buf)
  3661. {
  3662. Buffer::operator=(std::move(buf));
  3663. return *this;
  3664. }
  3665. };
  3666. #endif
  3667. /*! \brief Class interface for GL Buffer Memory Objects.
  3668. *
  3669. * This is provided to facilitate interoperability with OpenGL.
  3670. *
  3671. * See Memory for details about copy semantics, etc.
  3672. *
  3673. * \see Memory
  3674. */
  3675. class BufferGL : public Buffer
  3676. {
  3677. public:
  3678. /*! \brief Constructs a BufferGL in a specified context, from a given
  3679. * GL buffer.
  3680. *
  3681. * Wraps clCreateFromGLBuffer().
  3682. */
  3683. BufferGL(
  3684. const Context& context,
  3685. cl_mem_flags flags,
  3686. cl_GLuint bufobj,
  3687. cl_int * err = NULL)
  3688. {
  3689. cl_int error;
  3690. object_ = ::clCreateFromGLBuffer(
  3691. context(),
  3692. flags,
  3693. bufobj,
  3694. &error);
  3695. detail::errHandler(error, __CREATE_GL_BUFFER_ERR);
  3696. if (err != NULL) {
  3697. *err = error;
  3698. }
  3699. }
  3700. //! \brief Default constructor - initializes to NULL.
  3701. BufferGL() : Buffer() { }
  3702. /*! \brief Constructor from cl_mem - takes ownership.
  3703. *
  3704. * \param retainObject will cause the constructor to retain its cl object.
  3705. * Defaults to false to maintain compatibility with
  3706. * earlier versions.
  3707. * See Memory for further details.
  3708. */
  3709. explicit BufferGL(const cl_mem& buffer, bool retainObject = false) :
  3710. Buffer(buffer, retainObject) { }
  3711. /*! \brief Assignment from cl_mem - performs shallow copy.
  3712. *
  3713. * See Memory for further details.
  3714. */
  3715. BufferGL& operator = (const cl_mem& rhs)
  3716. {
  3717. Buffer::operator=(rhs);
  3718. return *this;
  3719. }
  3720. /*! \brief Copy constructor to forward copy to the superclass correctly.
  3721. * Required for MSVC.
  3722. */
  3723. BufferGL(const BufferGL& buf) : Buffer(buf) {}
  3724. /*! \brief Copy assignment to forward copy to the superclass correctly.
  3725. * Required for MSVC.
  3726. */
  3727. BufferGL& operator = (const BufferGL &buf)
  3728. {
  3729. Buffer::operator=(buf);
  3730. return *this;
  3731. }
  3732. /*! \brief Move constructor to forward move to the superclass correctly.
  3733. * Required for MSVC.
  3734. */
  3735. BufferGL(BufferGL&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
  3736. /*! \brief Move assignment to forward move to the superclass correctly.
  3737. * Required for MSVC.
  3738. */
  3739. BufferGL& operator = (BufferGL &&buf)
  3740. {
  3741. Buffer::operator=(std::move(buf));
  3742. return *this;
  3743. }
  3744. //! \brief Wrapper for clGetGLObjectInfo().
  3745. cl_int getObjectInfo(
  3746. cl_gl_object_type *type,
  3747. cl_GLuint * gl_object_name)
  3748. {
  3749. return detail::errHandler(
  3750. ::clGetGLObjectInfo(object_,type,gl_object_name),
  3751. __GET_GL_OBJECT_INFO_ERR);
  3752. }
  3753. };
  3754. /*! \brief Class interface for GL Render Buffer Memory Objects.
  3755. *
  3756. * This is provided to facilitate interoperability with OpenGL.
  3757. *
  3758. * See Memory for details about copy semantics, etc.
  3759. *
  3760. * \see Memory
  3761. */
  3762. class BufferRenderGL : public Buffer
  3763. {
  3764. public:
  3765. /*! \brief Constructs a BufferRenderGL in a specified context, from a given
  3766. * GL Renderbuffer.
  3767. *
  3768. * Wraps clCreateFromGLRenderbuffer().
  3769. */
  3770. BufferRenderGL(
  3771. const Context& context,
  3772. cl_mem_flags flags,
  3773. cl_GLuint bufobj,
  3774. cl_int * err = NULL)
  3775. {
  3776. cl_int error;
  3777. object_ = ::clCreateFromGLRenderbuffer(
  3778. context(),
  3779. flags,
  3780. bufobj,
  3781. &error);
  3782. detail::errHandler(error, __CREATE_GL_RENDER_BUFFER_ERR);
  3783. if (err != NULL) {
  3784. *err = error;
  3785. }
  3786. }
  3787. //! \brief Default constructor - initializes to NULL.
  3788. BufferRenderGL() : Buffer() { }
  3789. /*! \brief Constructor from cl_mem - takes ownership.
  3790. *
  3791. * \param retainObject will cause the constructor to retain its cl object.
  3792. * Defaults to false to maintain compatibility with
  3793. * earlier versions.
  3794. * See Memory for further details.
  3795. */
  3796. explicit BufferRenderGL(const cl_mem& buffer, bool retainObject = false) :
  3797. Buffer(buffer, retainObject) { }
  3798. /*! \brief Assignment from cl_mem - performs shallow copy.
  3799. *
  3800. * See Memory for further details.
  3801. */
  3802. BufferRenderGL& operator = (const cl_mem& rhs)
  3803. {
  3804. Buffer::operator=(rhs);
  3805. return *this;
  3806. }
  3807. /*! \brief Copy constructor to forward copy to the superclass correctly.
  3808. * Required for MSVC.
  3809. */
  3810. BufferRenderGL(const BufferRenderGL& buf) : Buffer(buf) {}
  3811. /*! \brief Copy assignment to forward copy to the superclass correctly.
  3812. * Required for MSVC.
  3813. */
  3814. BufferRenderGL& operator = (const BufferRenderGL &buf)
  3815. {
  3816. Buffer::operator=(buf);
  3817. return *this;
  3818. }
  3819. /*! \brief Move constructor to forward move to the superclass correctly.
  3820. * Required for MSVC.
  3821. */
  3822. BufferRenderGL(BufferRenderGL&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
  3823. /*! \brief Move assignment to forward move to the superclass correctly.
  3824. * Required for MSVC.
  3825. */
  3826. BufferRenderGL& operator = (BufferRenderGL &&buf)
  3827. {
  3828. Buffer::operator=(std::move(buf));
  3829. return *this;
  3830. }
  3831. //! \brief Wrapper for clGetGLObjectInfo().
  3832. cl_int getObjectInfo(
  3833. cl_gl_object_type *type,
  3834. cl_GLuint * gl_object_name)
  3835. {
  3836. return detail::errHandler(
  3837. ::clGetGLObjectInfo(object_,type,gl_object_name),
  3838. __GET_GL_OBJECT_INFO_ERR);
  3839. }
  3840. };
  3841. /*! \brief C++ base class for Image Memory objects.
  3842. *
  3843. * See Memory for details about copy semantics, etc.
  3844. *
  3845. * \see Memory
  3846. */
  3847. class Image : public Memory
  3848. {
  3849. protected:
  3850. //! \brief Default constructor - initializes to NULL.
  3851. Image() : Memory() { }
  3852. /*! \brief Constructor from cl_mem - takes ownership.
  3853. *
  3854. * \param retainObject will cause the constructor to retain its cl object.
  3855. * Defaults to false to maintain compatibility with
  3856. * earlier versions.
  3857. * See Memory for further details.
  3858. */
  3859. explicit Image(const cl_mem& image, bool retainObject = false) :
  3860. Memory(image, retainObject) { }
  3861. /*! \brief Assignment from cl_mem - performs shallow copy.
  3862. *
  3863. * See Memory for further details.
  3864. */
  3865. Image& operator = (const cl_mem& rhs)
  3866. {
  3867. Memory::operator=(rhs);
  3868. return *this;
  3869. }
  3870. /*! \brief Copy constructor to forward copy to the superclass correctly.
  3871. * Required for MSVC.
  3872. */
  3873. Image(const Image& img) : Memory(img) {}
  3874. /*! \brief Copy assignment to forward copy to the superclass correctly.
  3875. * Required for MSVC.
  3876. */
  3877. Image& operator = (const Image &img)
  3878. {
  3879. Memory::operator=(img);
  3880. return *this;
  3881. }
  3882. /*! \brief Move constructor to forward move to the superclass correctly.
  3883. * Required for MSVC.
  3884. */
  3885. Image(Image&& img) CL_HPP_NOEXCEPT_ : Memory(std::move(img)) {}
  3886. /*! \brief Move assignment to forward move to the superclass correctly.
  3887. * Required for MSVC.
  3888. */
  3889. Image& operator = (Image &&img)
  3890. {
  3891. Memory::operator=(std::move(img));
  3892. return *this;
  3893. }
  3894. public:
  3895. //! \brief Wrapper for clGetImageInfo().
  3896. template <typename T>
  3897. cl_int getImageInfo(cl_image_info name, T* param) const
  3898. {
  3899. return detail::errHandler(
  3900. detail::getInfo(&::clGetImageInfo, object_, name, param),
  3901. __GET_IMAGE_INFO_ERR);
  3902. }
  3903. //! \brief Wrapper for clGetImageInfo() that returns by value.
  3904. template <cl_int name> typename
  3905. detail::param_traits<detail::cl_image_info, name>::param_type
  3906. getImageInfo(cl_int* err = NULL) const
  3907. {
  3908. typename detail::param_traits<
  3909. detail::cl_image_info, name>::param_type param;
  3910. cl_int result = getImageInfo(name, &param);
  3911. if (err != NULL) {
  3912. *err = result;
  3913. }
  3914. return param;
  3915. }
  3916. };
  3917. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  3918. /*! \brief Class interface for 1D Image Memory objects.
  3919. *
  3920. * See Memory for details about copy semantics, etc.
  3921. *
  3922. * \see Memory
  3923. */
  3924. class Image1D : public Image
  3925. {
  3926. public:
  3927. /*! \brief Constructs a 1D Image in a specified context.
  3928. *
  3929. * Wraps clCreateImage().
  3930. */
  3931. Image1D(
  3932. const Context& context,
  3933. cl_mem_flags flags,
  3934. ImageFormat format,
  3935. size_type width,
  3936. void* host_ptr = NULL,
  3937. cl_int* err = NULL)
  3938. {
  3939. cl_int error;
  3940. cl_image_desc desc =
  3941. {
  3942. CL_MEM_OBJECT_IMAGE1D,
  3943. width,
  3944. 0, 0, 0, 0, 0, 0, 0, 0
  3945. };
  3946. object_ = ::clCreateImage(
  3947. context(),
  3948. flags,
  3949. &format,
  3950. &desc,
  3951. host_ptr,
  3952. &error);
  3953. detail::errHandler(error, __CREATE_IMAGE_ERR);
  3954. if (err != NULL) {
  3955. *err = error;
  3956. }
  3957. }
  3958. //! \brief Default constructor - initializes to NULL.
  3959. Image1D() { }
  3960. /*! \brief Constructor from cl_mem - takes ownership.
  3961. *
  3962. * \param retainObject will cause the constructor to retain its cl object.
  3963. * Defaults to false to maintain compatibility with
  3964. * earlier versions.
  3965. * See Memory for further details.
  3966. */
  3967. explicit Image1D(const cl_mem& image1D, bool retainObject = false) :
  3968. Image(image1D, retainObject) { }
  3969. /*! \brief Assignment from cl_mem - performs shallow copy.
  3970. *
  3971. * See Memory for further details.
  3972. */
  3973. Image1D& operator = (const cl_mem& rhs)
  3974. {
  3975. Image::operator=(rhs);
  3976. return *this;
  3977. }
  3978. /*! \brief Copy constructor to forward copy to the superclass correctly.
  3979. * Required for MSVC.
  3980. */
  3981. Image1D(const Image1D& img) : Image(img) {}
  3982. /*! \brief Copy assignment to forward copy to the superclass correctly.
  3983. * Required for MSVC.
  3984. */
  3985. Image1D& operator = (const Image1D &img)
  3986. {
  3987. Image::operator=(img);
  3988. return *this;
  3989. }
  3990. /*! \brief Move constructor to forward move to the superclass correctly.
  3991. * Required for MSVC.
  3992. */
  3993. Image1D(Image1D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
  3994. /*! \brief Move assignment to forward move to the superclass correctly.
  3995. * Required for MSVC.
  3996. */
  3997. Image1D& operator = (Image1D &&img)
  3998. {
  3999. Image::operator=(std::move(img));
  4000. return *this;
  4001. }
  4002. };
  4003. /*! \class Image1DBuffer
  4004. * \brief Image interface for 1D buffer images.
  4005. */
  4006. class Image1DBuffer : public Image
  4007. {
  4008. public:
  4009. Image1DBuffer(
  4010. const Context& context,
  4011. cl_mem_flags flags,
  4012. ImageFormat format,
  4013. size_type width,
  4014. const Buffer &buffer,
  4015. cl_int* err = NULL)
  4016. {
  4017. cl_int error;
  4018. cl_image_desc desc =
  4019. {
  4020. CL_MEM_OBJECT_IMAGE1D_BUFFER,
  4021. width,
  4022. 0, 0, 0, 0, 0, 0, 0,
  4023. buffer()
  4024. };
  4025. object_ = ::clCreateImage(
  4026. context(),
  4027. flags,
  4028. &format,
  4029. &desc,
  4030. NULL,
  4031. &error);
  4032. detail::errHandler(error, __CREATE_IMAGE_ERR);
  4033. if (err != NULL) {
  4034. *err = error;
  4035. }
  4036. }
  4037. Image1DBuffer() { }
  4038. /*! \brief Constructor from cl_mem - takes ownership.
  4039. *
  4040. * \param retainObject will cause the constructor to retain its cl object.
  4041. * Defaults to false to maintain compatibility with
  4042. * earlier versions.
  4043. * See Memory for further details.
  4044. */
  4045. explicit Image1DBuffer(const cl_mem& image1D, bool retainObject = false) :
  4046. Image(image1D, retainObject) { }
  4047. Image1DBuffer& operator = (const cl_mem& rhs)
  4048. {
  4049. Image::operator=(rhs);
  4050. return *this;
  4051. }
  4052. /*! \brief Copy constructor to forward copy to the superclass correctly.
  4053. * Required for MSVC.
  4054. */
  4055. Image1DBuffer(const Image1DBuffer& img) : Image(img) {}
  4056. /*! \brief Copy assignment to forward copy to the superclass correctly.
  4057. * Required for MSVC.
  4058. */
  4059. Image1DBuffer& operator = (const Image1DBuffer &img)
  4060. {
  4061. Image::operator=(img);
  4062. return *this;
  4063. }
  4064. /*! \brief Move constructor to forward move to the superclass correctly.
  4065. * Required for MSVC.
  4066. */
  4067. Image1DBuffer(Image1DBuffer&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
  4068. /*! \brief Move assignment to forward move to the superclass correctly.
  4069. * Required for MSVC.
  4070. */
  4071. Image1DBuffer& operator = (Image1DBuffer &&img)
  4072. {
  4073. Image::operator=(std::move(img));
  4074. return *this;
  4075. }
  4076. };
  4077. /*! \class Image1DArray
  4078. * \brief Image interface for arrays of 1D images.
  4079. */
  4080. class Image1DArray : public Image
  4081. {
  4082. public:
  4083. Image1DArray(
  4084. const Context& context,
  4085. cl_mem_flags flags,
  4086. ImageFormat format,
  4087. size_type arraySize,
  4088. size_type width,
  4089. size_type rowPitch,
  4090. void* host_ptr = NULL,
  4091. cl_int* err = NULL)
  4092. {
  4093. cl_int error;
  4094. cl_image_desc desc =
  4095. {
  4096. CL_MEM_OBJECT_IMAGE1D_ARRAY,
  4097. width,
  4098. 0, 0, // height, depth (unused)
  4099. arraySize,
  4100. rowPitch,
  4101. 0, 0, 0, 0
  4102. };
  4103. object_ = ::clCreateImage(
  4104. context(),
  4105. flags,
  4106. &format,
  4107. &desc,
  4108. host_ptr,
  4109. &error);
  4110. detail::errHandler(error, __CREATE_IMAGE_ERR);
  4111. if (err != NULL) {
  4112. *err = error;
  4113. }
  4114. }
  4115. Image1DArray() { }
  4116. /*! \brief Constructor from cl_mem - takes ownership.
  4117. *
  4118. * \param retainObject will cause the constructor to retain its cl object.
  4119. * Defaults to false to maintain compatibility with
  4120. * earlier versions.
  4121. * See Memory for further details.
  4122. */
  4123. explicit Image1DArray(const cl_mem& imageArray, bool retainObject = false) :
  4124. Image(imageArray, retainObject) { }
  4125. Image1DArray& operator = (const cl_mem& rhs)
  4126. {
  4127. Image::operator=(rhs);
  4128. return *this;
  4129. }
  4130. /*! \brief Copy constructor to forward copy to the superclass correctly.
  4131. * Required for MSVC.
  4132. */
  4133. Image1DArray(const Image1DArray& img) : Image(img) {}
  4134. /*! \brief Copy assignment to forward copy to the superclass correctly.
  4135. * Required for MSVC.
  4136. */
  4137. Image1DArray& operator = (const Image1DArray &img)
  4138. {
  4139. Image::operator=(img);
  4140. return *this;
  4141. }
  4142. /*! \brief Move constructor to forward move to the superclass correctly.
  4143. * Required for MSVC.
  4144. */
  4145. Image1DArray(Image1DArray&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
  4146. /*! \brief Move assignment to forward move to the superclass correctly.
  4147. * Required for MSVC.
  4148. */
  4149. Image1DArray& operator = (Image1DArray &&img)
  4150. {
  4151. Image::operator=(std::move(img));
  4152. return *this;
  4153. }
  4154. };
  4155. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  4156. /*! \brief Class interface for 2D Image Memory objects.
  4157. *
  4158. * See Memory for details about copy semantics, etc.
  4159. *
  4160. * \see Memory
  4161. */
  4162. class Image2D : public Image
  4163. {
  4164. public:
  4165. /*! \brief Constructs a 2D Image in a specified context.
  4166. *
  4167. * Wraps clCreateImage().
  4168. */
  4169. Image2D(
  4170. const Context& context,
  4171. cl_mem_flags flags,
  4172. ImageFormat format,
  4173. size_type width,
  4174. size_type height,
  4175. size_type row_pitch = 0,
  4176. void* host_ptr = NULL,
  4177. cl_int* err = NULL)
  4178. {
  4179. cl_int error;
  4180. bool useCreateImage;
  4181. #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
  4182. // Run-time decision based on the actual platform
  4183. {
  4184. cl_uint version = detail::getContextPlatformVersion(context());
  4185. useCreateImage = (version >= 0x10002); // OpenCL 1.2 or above
  4186. }
  4187. #elif CL_HPP_TARGET_OPENCL_VERSION >= 120
  4188. useCreateImage = true;
  4189. #else
  4190. useCreateImage = false;
  4191. #endif
  4192. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  4193. if (useCreateImage)
  4194. {
  4195. cl_image_desc desc =
  4196. {
  4197. CL_MEM_OBJECT_IMAGE2D,
  4198. width,
  4199. height,
  4200. 0, 0, // depth, array size (unused)
  4201. row_pitch,
  4202. 0, 0, 0, 0
  4203. };
  4204. object_ = ::clCreateImage(
  4205. context(),
  4206. flags,
  4207. &format,
  4208. &desc,
  4209. host_ptr,
  4210. &error);
  4211. detail::errHandler(error, __CREATE_IMAGE_ERR);
  4212. if (err != NULL) {
  4213. *err = error;
  4214. }
  4215. }
  4216. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  4217. #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
  4218. if (!useCreateImage)
  4219. {
  4220. object_ = ::clCreateImage2D(
  4221. context(), flags,&format, width, height, row_pitch, host_ptr, &error);
  4222. detail::errHandler(error, __CREATE_IMAGE2D_ERR);
  4223. if (err != NULL) {
  4224. *err = error;
  4225. }
  4226. }
  4227. #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
  4228. }
  4229. #if CL_HPP_TARGET_OPENCL_VERSION >= 200 || defined(CL_HPP_USE_CL_IMAGE2D_FROM_BUFFER_KHR)
  4230. /*! \brief Constructs a 2D Image from a buffer.
  4231. * \note This will share storage with the underlying buffer.
  4232. *
  4233. * Wraps clCreateImage().
  4234. */
  4235. Image2D(
  4236. const Context& context,
  4237. ImageFormat format,
  4238. const Buffer &sourceBuffer,
  4239. size_type width,
  4240. size_type height,
  4241. size_type row_pitch = 0,
  4242. cl_int* err = nullptr)
  4243. {
  4244. cl_int error;
  4245. cl_image_desc desc =
  4246. {
  4247. CL_MEM_OBJECT_IMAGE2D,
  4248. width,
  4249. height,
  4250. 0, 0, // depth, array size (unused)
  4251. row_pitch,
  4252. 0, 0, 0,
  4253. // Use buffer as input to image
  4254. sourceBuffer()
  4255. };
  4256. object_ = ::clCreateImage(
  4257. context(),
  4258. 0, // flags inherited from buffer
  4259. &format,
  4260. &desc,
  4261. nullptr,
  4262. &error);
  4263. detail::errHandler(error, __CREATE_IMAGE_ERR);
  4264. if (err != nullptr) {
  4265. *err = error;
  4266. }
  4267. }
  4268. #endif //#if CL_HPP_TARGET_OPENCL_VERSION >= 200 || defined(CL_HPP_USE_CL_IMAGE2D_FROM_BUFFER_KHR)
  4269. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  4270. /*! \brief Constructs a 2D Image from an image.
  4271. * \note This will share storage with the underlying image but may
  4272. * reinterpret the channel order and type.
  4273. *
  4274. * The image will be created matching with a descriptor matching the source.
  4275. *
  4276. * \param order is the channel order to reinterpret the image data as.
  4277. * The channel order may differ as described in the OpenCL
  4278. * 2.0 API specification.
  4279. *
  4280. * Wraps clCreateImage().
  4281. */
  4282. Image2D(
  4283. const Context& context,
  4284. cl_channel_order order,
  4285. const Image &sourceImage,
  4286. cl_int* err = nullptr)
  4287. {
  4288. cl_int error;
  4289. // Descriptor fields have to match source image
  4290. size_type sourceWidth =
  4291. sourceImage.getImageInfo<CL_IMAGE_WIDTH>();
  4292. size_type sourceHeight =
  4293. sourceImage.getImageInfo<CL_IMAGE_HEIGHT>();
  4294. size_type sourceRowPitch =
  4295. sourceImage.getImageInfo<CL_IMAGE_ROW_PITCH>();
  4296. cl_uint sourceNumMIPLevels =
  4297. sourceImage.getImageInfo<CL_IMAGE_NUM_MIP_LEVELS>();
  4298. cl_uint sourceNumSamples =
  4299. sourceImage.getImageInfo<CL_IMAGE_NUM_SAMPLES>();
  4300. cl_image_format sourceFormat =
  4301. sourceImage.getImageInfo<CL_IMAGE_FORMAT>();
  4302. // Update only the channel order.
  4303. // Channel format inherited from source.
  4304. sourceFormat.image_channel_order = order;
  4305. cl_image_desc desc =
  4306. {
  4307. CL_MEM_OBJECT_IMAGE2D,
  4308. sourceWidth,
  4309. sourceHeight,
  4310. 0, 0, // depth (unused), array size (unused)
  4311. sourceRowPitch,
  4312. 0, // slice pitch (unused)
  4313. sourceNumMIPLevels,
  4314. sourceNumSamples,
  4315. // Use buffer as input to image
  4316. sourceImage()
  4317. };
  4318. object_ = ::clCreateImage(
  4319. context(),
  4320. 0, // flags should be inherited from mem_object
  4321. &sourceFormat,
  4322. &desc,
  4323. nullptr,
  4324. &error);
  4325. detail::errHandler(error, __CREATE_IMAGE_ERR);
  4326. if (err != nullptr) {
  4327. *err = error;
  4328. }
  4329. }
  4330. #endif //#if CL_HPP_TARGET_OPENCL_VERSION >= 200
  4331. //! \brief Default constructor - initializes to NULL.
  4332. Image2D() { }
  4333. /*! \brief Constructor from cl_mem - takes ownership.
  4334. *
  4335. * \param retainObject will cause the constructor to retain its cl object.
  4336. * Defaults to false to maintain compatibility with
  4337. * earlier versions.
  4338. * See Memory for further details.
  4339. */
  4340. explicit Image2D(const cl_mem& image2D, bool retainObject = false) :
  4341. Image(image2D, retainObject) { }
  4342. /*! \brief Assignment from cl_mem - performs shallow copy.
  4343. *
  4344. * See Memory for further details.
  4345. */
  4346. Image2D& operator = (const cl_mem& rhs)
  4347. {
  4348. Image::operator=(rhs);
  4349. return *this;
  4350. }
  4351. /*! \brief Copy constructor to forward copy to the superclass correctly.
  4352. * Required for MSVC.
  4353. */
  4354. Image2D(const Image2D& img) : Image(img) {}
  4355. /*! \brief Copy assignment to forward copy to the superclass correctly.
  4356. * Required for MSVC.
  4357. */
  4358. Image2D& operator = (const Image2D &img)
  4359. {
  4360. Image::operator=(img);
  4361. return *this;
  4362. }
  4363. /*! \brief Move constructor to forward move to the superclass correctly.
  4364. * Required for MSVC.
  4365. */
  4366. Image2D(Image2D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
  4367. /*! \brief Move assignment to forward move to the superclass correctly.
  4368. * Required for MSVC.
  4369. */
  4370. Image2D& operator = (Image2D &&img)
  4371. {
  4372. Image::operator=(std::move(img));
  4373. return *this;
  4374. }
  4375. };
  4376. #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
  4377. /*! \brief Class interface for GL 2D Image Memory objects.
  4378. *
  4379. * This is provided to facilitate interoperability with OpenGL.
  4380. *
  4381. * See Memory for details about copy semantics, etc.
  4382. *
  4383. * \see Memory
  4384. * \note Deprecated for OpenCL 1.2. Please use ImageGL instead.
  4385. */
  4386. class CL_EXT_PREFIX__VERSION_1_1_DEPRECATED Image2DGL : public Image2D
  4387. {
  4388. public:
  4389. /*! \brief Constructs an Image2DGL in a specified context, from a given
  4390. * GL Texture.
  4391. *
  4392. * Wraps clCreateFromGLTexture2D().
  4393. */
  4394. Image2DGL(
  4395. const Context& context,
  4396. cl_mem_flags flags,
  4397. cl_GLenum target,
  4398. cl_GLint miplevel,
  4399. cl_GLuint texobj,
  4400. cl_int * err = NULL)
  4401. {
  4402. cl_int error;
  4403. object_ = ::clCreateFromGLTexture2D(
  4404. context(),
  4405. flags,
  4406. target,
  4407. miplevel,
  4408. texobj,
  4409. &error);
  4410. detail::errHandler(error, __CREATE_GL_TEXTURE_2D_ERR);
  4411. if (err != NULL) {
  4412. *err = error;
  4413. }
  4414. }
  4415. //! \brief Default constructor - initializes to NULL.
  4416. Image2DGL() : Image2D() { }
  4417. /*! \brief Constructor from cl_mem - takes ownership.
  4418. *
  4419. * \param retainObject will cause the constructor to retain its cl object.
  4420. * Defaults to false to maintain compatibility with
  4421. * earlier versions.
  4422. * See Memory for further details.
  4423. */
  4424. explicit Image2DGL(const cl_mem& image, bool retainObject = false) :
  4425. Image2D(image, retainObject) { }
  4426. /*! \brief Assignment from cl_mem - performs shallow copy.
  4427. *c
  4428. * See Memory for further details.
  4429. */
  4430. Image2DGL& operator = (const cl_mem& rhs)
  4431. {
  4432. Image2D::operator=(rhs);
  4433. return *this;
  4434. }
  4435. /*! \brief Copy constructor to forward copy to the superclass correctly.
  4436. * Required for MSVC.
  4437. */
  4438. Image2DGL(const Image2DGL& img) : Image2D(img) {}
  4439. /*! \brief Copy assignment to forward copy to the superclass correctly.
  4440. * Required for MSVC.
  4441. */
  4442. Image2DGL& operator = (const Image2DGL &img)
  4443. {
  4444. Image2D::operator=(img);
  4445. return *this;
  4446. }
  4447. /*! \brief Move constructor to forward move to the superclass correctly.
  4448. * Required for MSVC.
  4449. */
  4450. Image2DGL(Image2DGL&& img) CL_HPP_NOEXCEPT_ : Image2D(std::move(img)) {}
  4451. /*! \brief Move assignment to forward move to the superclass correctly.
  4452. * Required for MSVC.
  4453. */
  4454. Image2DGL& operator = (Image2DGL &&img)
  4455. {
  4456. Image2D::operator=(std::move(img));
  4457. return *this;
  4458. }
  4459. } CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED;
  4460. #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
  4461. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  4462. /*! \class Image2DArray
  4463. * \brief Image interface for arrays of 2D images.
  4464. */
  4465. class Image2DArray : public Image
  4466. {
  4467. public:
  4468. Image2DArray(
  4469. const Context& context,
  4470. cl_mem_flags flags,
  4471. ImageFormat format,
  4472. size_type arraySize,
  4473. size_type width,
  4474. size_type height,
  4475. size_type rowPitch,
  4476. size_type slicePitch,
  4477. void* host_ptr = NULL,
  4478. cl_int* err = NULL)
  4479. {
  4480. cl_int error;
  4481. cl_image_desc desc =
  4482. {
  4483. CL_MEM_OBJECT_IMAGE2D_ARRAY,
  4484. width,
  4485. height,
  4486. 0, // depth (unused)
  4487. arraySize,
  4488. rowPitch,
  4489. slicePitch,
  4490. 0, 0, 0
  4491. };
  4492. object_ = ::clCreateImage(
  4493. context(),
  4494. flags,
  4495. &format,
  4496. &desc,
  4497. host_ptr,
  4498. &error);
  4499. detail::errHandler(error, __CREATE_IMAGE_ERR);
  4500. if (err != NULL) {
  4501. *err = error;
  4502. }
  4503. }
  4504. Image2DArray() { }
  4505. /*! \brief Constructor from cl_mem - takes ownership.
  4506. *
  4507. * \param retainObject will cause the constructor to retain its cl object.
  4508. * Defaults to false to maintain compatibility with
  4509. * earlier versions.
  4510. * See Memory for further details.
  4511. */
  4512. explicit Image2DArray(const cl_mem& imageArray, bool retainObject = false) : Image(imageArray, retainObject) { }
  4513. Image2DArray& operator = (const cl_mem& rhs)
  4514. {
  4515. Image::operator=(rhs);
  4516. return *this;
  4517. }
  4518. /*! \brief Copy constructor to forward copy to the superclass correctly.
  4519. * Required for MSVC.
  4520. */
  4521. Image2DArray(const Image2DArray& img) : Image(img) {}
  4522. /*! \brief Copy assignment to forward copy to the superclass correctly.
  4523. * Required for MSVC.
  4524. */
  4525. Image2DArray& operator = (const Image2DArray &img)
  4526. {
  4527. Image::operator=(img);
  4528. return *this;
  4529. }
  4530. /*! \brief Move constructor to forward move to the superclass correctly.
  4531. * Required for MSVC.
  4532. */
  4533. Image2DArray(Image2DArray&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
  4534. /*! \brief Move assignment to forward move to the superclass correctly.
  4535. * Required for MSVC.
  4536. */
  4537. Image2DArray& operator = (Image2DArray &&img)
  4538. {
  4539. Image::operator=(std::move(img));
  4540. return *this;
  4541. }
  4542. };
  4543. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  4544. /*! \brief Class interface for 3D Image Memory objects.
  4545. *
  4546. * See Memory for details about copy semantics, etc.
  4547. *
  4548. * \see Memory
  4549. */
  4550. class Image3D : public Image
  4551. {
  4552. public:
  4553. /*! \brief Constructs a 3D Image in a specified context.
  4554. *
  4555. * Wraps clCreateImage().
  4556. */
  4557. Image3D(
  4558. const Context& context,
  4559. cl_mem_flags flags,
  4560. ImageFormat format,
  4561. size_type width,
  4562. size_type height,
  4563. size_type depth,
  4564. size_type row_pitch = 0,
  4565. size_type slice_pitch = 0,
  4566. void* host_ptr = NULL,
  4567. cl_int* err = NULL)
  4568. {
  4569. cl_int error;
  4570. bool useCreateImage;
  4571. #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
  4572. // Run-time decision based on the actual platform
  4573. {
  4574. cl_uint version = detail::getContextPlatformVersion(context());
  4575. useCreateImage = (version >= 0x10002); // OpenCL 1.2 or above
  4576. }
  4577. #elif CL_HPP_TARGET_OPENCL_VERSION >= 120
  4578. useCreateImage = true;
  4579. #else
  4580. useCreateImage = false;
  4581. #endif
  4582. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  4583. if (useCreateImage)
  4584. {
  4585. cl_image_desc desc =
  4586. {
  4587. CL_MEM_OBJECT_IMAGE3D,
  4588. width,
  4589. height,
  4590. depth,
  4591. 0, // array size (unused)
  4592. row_pitch,
  4593. slice_pitch,
  4594. 0, 0, 0
  4595. };
  4596. object_ = ::clCreateImage(
  4597. context(),
  4598. flags,
  4599. &format,
  4600. &desc,
  4601. host_ptr,
  4602. &error);
  4603. detail::errHandler(error, __CREATE_IMAGE_ERR);
  4604. if (err != NULL) {
  4605. *err = error;
  4606. }
  4607. }
  4608. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  4609. #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
  4610. if (!useCreateImage)
  4611. {
  4612. object_ = ::clCreateImage3D(
  4613. context(), flags, &format, width, height, depth, row_pitch,
  4614. slice_pitch, host_ptr, &error);
  4615. detail::errHandler(error, __CREATE_IMAGE3D_ERR);
  4616. if (err != NULL) {
  4617. *err = error;
  4618. }
  4619. }
  4620. #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
  4621. }
  4622. //! \brief Default constructor - initializes to NULL.
  4623. Image3D() : Image() { }
  4624. /*! \brief Constructor from cl_mem - takes ownership.
  4625. *
  4626. * \param retainObject will cause the constructor to retain its cl object.
  4627. * Defaults to false to maintain compatibility with
  4628. * earlier versions.
  4629. * See Memory for further details.
  4630. */
  4631. explicit Image3D(const cl_mem& image3D, bool retainObject = false) :
  4632. Image(image3D, retainObject) { }
  4633. /*! \brief Assignment from cl_mem - performs shallow copy.
  4634. *
  4635. * See Memory for further details.
  4636. */
  4637. Image3D& operator = (const cl_mem& rhs)
  4638. {
  4639. Image::operator=(rhs);
  4640. return *this;
  4641. }
  4642. /*! \brief Copy constructor to forward copy to the superclass correctly.
  4643. * Required for MSVC.
  4644. */
  4645. Image3D(const Image3D& img) : Image(img) {}
  4646. /*! \brief Copy assignment to forward copy to the superclass correctly.
  4647. * Required for MSVC.
  4648. */
  4649. Image3D& operator = (const Image3D &img)
  4650. {
  4651. Image::operator=(img);
  4652. return *this;
  4653. }
  4654. /*! \brief Move constructor to forward move to the superclass correctly.
  4655. * Required for MSVC.
  4656. */
  4657. Image3D(Image3D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
  4658. /*! \brief Move assignment to forward move to the superclass correctly.
  4659. * Required for MSVC.
  4660. */
  4661. Image3D& operator = (Image3D &&img)
  4662. {
  4663. Image::operator=(std::move(img));
  4664. return *this;
  4665. }
  4666. };
  4667. #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
  4668. /*! \brief Class interface for GL 3D Image Memory objects.
  4669. *
  4670. * This is provided to facilitate interoperability with OpenGL.
  4671. *
  4672. * See Memory for details about copy semantics, etc.
  4673. *
  4674. * \see Memory
  4675. */
  4676. class Image3DGL : public Image3D
  4677. {
  4678. public:
  4679. /*! \brief Constructs an Image3DGL in a specified context, from a given
  4680. * GL Texture.
  4681. *
  4682. * Wraps clCreateFromGLTexture3D().
  4683. */
  4684. Image3DGL(
  4685. const Context& context,
  4686. cl_mem_flags flags,
  4687. cl_GLenum target,
  4688. cl_GLint miplevel,
  4689. cl_GLuint texobj,
  4690. cl_int * err = NULL)
  4691. {
  4692. cl_int error;
  4693. object_ = ::clCreateFromGLTexture3D(
  4694. context(),
  4695. flags,
  4696. target,
  4697. miplevel,
  4698. texobj,
  4699. &error);
  4700. detail::errHandler(error, __CREATE_GL_TEXTURE_3D_ERR);
  4701. if (err != NULL) {
  4702. *err = error;
  4703. }
  4704. }
  4705. //! \brief Default constructor - initializes to NULL.
  4706. Image3DGL() : Image3D() { }
  4707. /*! \brief Constructor from cl_mem - takes ownership.
  4708. *
  4709. * \param retainObject will cause the constructor to retain its cl object.
  4710. * Defaults to false to maintain compatibility with
  4711. * earlier versions.
  4712. * See Memory for further details.
  4713. */
  4714. explicit Image3DGL(const cl_mem& image, bool retainObject = false) :
  4715. Image3D(image, retainObject) { }
  4716. /*! \brief Assignment from cl_mem - performs shallow copy.
  4717. *
  4718. * See Memory for further details.
  4719. */
  4720. Image3DGL& operator = (const cl_mem& rhs)
  4721. {
  4722. Image3D::operator=(rhs);
  4723. return *this;
  4724. }
  4725. /*! \brief Copy constructor to forward copy to the superclass correctly.
  4726. * Required for MSVC.
  4727. */
  4728. Image3DGL(const Image3DGL& img) : Image3D(img) {}
  4729. /*! \brief Copy assignment to forward copy to the superclass correctly.
  4730. * Required for MSVC.
  4731. */
  4732. Image3DGL& operator = (const Image3DGL &img)
  4733. {
  4734. Image3D::operator=(img);
  4735. return *this;
  4736. }
  4737. /*! \brief Move constructor to forward move to the superclass correctly.
  4738. * Required for MSVC.
  4739. */
  4740. Image3DGL(Image3DGL&& img) CL_HPP_NOEXCEPT_ : Image3D(std::move(img)) {}
  4741. /*! \brief Move assignment to forward move to the superclass correctly.
  4742. * Required for MSVC.
  4743. */
  4744. Image3DGL& operator = (Image3DGL &&img)
  4745. {
  4746. Image3D::operator=(std::move(img));
  4747. return *this;
  4748. }
  4749. };
  4750. #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
  4751. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  4752. /*! \class ImageGL
  4753. * \brief general image interface for GL interop.
  4754. * We abstract the 2D and 3D GL images into a single instance here
  4755. * that wraps all GL sourced images on the grounds that setup information
  4756. * was performed by OpenCL anyway.
  4757. */
  4758. class ImageGL : public Image
  4759. {
  4760. public:
  4761. ImageGL(
  4762. const Context& context,
  4763. cl_mem_flags flags,
  4764. cl_GLenum target,
  4765. cl_GLint miplevel,
  4766. cl_GLuint texobj,
  4767. cl_int * err = NULL)
  4768. {
  4769. cl_int error;
  4770. object_ = ::clCreateFromGLTexture(
  4771. context(),
  4772. flags,
  4773. target,
  4774. miplevel,
  4775. texobj,
  4776. &error);
  4777. detail::errHandler(error, __CREATE_GL_TEXTURE_ERR);
  4778. if (err != NULL) {
  4779. *err = error;
  4780. }
  4781. }
  4782. ImageGL() : Image() { }
  4783. /*! \brief Constructor from cl_mem - takes ownership.
  4784. *
  4785. * \param retainObject will cause the constructor to retain its cl object.
  4786. * Defaults to false to maintain compatibility with
  4787. * earlier versions.
  4788. * See Memory for further details.
  4789. */
  4790. explicit ImageGL(const cl_mem& image, bool retainObject = false) :
  4791. Image(image, retainObject) { }
  4792. ImageGL& operator = (const cl_mem& rhs)
  4793. {
  4794. Image::operator=(rhs);
  4795. return *this;
  4796. }
  4797. /*! \brief Copy constructor to forward copy to the superclass correctly.
  4798. * Required for MSVC.
  4799. */
  4800. ImageGL(const ImageGL& img) : Image(img) {}
  4801. /*! \brief Copy assignment to forward copy to the superclass correctly.
  4802. * Required for MSVC.
  4803. */
  4804. ImageGL& operator = (const ImageGL &img)
  4805. {
  4806. Image::operator=(img);
  4807. return *this;
  4808. }
  4809. /*! \brief Move constructor to forward move to the superclass correctly.
  4810. * Required for MSVC.
  4811. */
  4812. ImageGL(ImageGL&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
  4813. /*! \brief Move assignment to forward move to the superclass correctly.
  4814. * Required for MSVC.
  4815. */
  4816. ImageGL& operator = (ImageGL &&img)
  4817. {
  4818. Image::operator=(std::move(img));
  4819. return *this;
  4820. }
  4821. };
  4822. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  4823. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  4824. /*! \brief Class interface for Pipe Memory Objects.
  4825. *
  4826. * See Memory for details about copy semantics, etc.
  4827. *
  4828. * \see Memory
  4829. */
  4830. class Pipe : public Memory
  4831. {
  4832. public:
  4833. /*! \brief Constructs a Pipe in a specified context.
  4834. *
  4835. * Wraps clCreatePipe().
  4836. * @param context Context in which to create the pipe.
  4837. * @param flags Bitfield. Only CL_MEM_READ_WRITE and CL_MEM_HOST_NO_ACCESS are valid.
  4838. * @param packet_size Size in bytes of a single packet of the pipe.
  4839. * @param max_packets Number of packets that may be stored in the pipe.
  4840. *
  4841. */
  4842. Pipe(
  4843. const Context& context,
  4844. cl_uint packet_size,
  4845. cl_uint max_packets,
  4846. cl_int* err = NULL)
  4847. {
  4848. cl_int error;
  4849. cl_mem_flags flags = CL_MEM_READ_WRITE | CL_MEM_HOST_NO_ACCESS;
  4850. object_ = ::clCreatePipe(context(), flags, packet_size, max_packets, nullptr, &error);
  4851. detail::errHandler(error, __CREATE_PIPE_ERR);
  4852. if (err != NULL) {
  4853. *err = error;
  4854. }
  4855. }
  4856. /*! \brief Constructs a Pipe in a the default context.
  4857. *
  4858. * Wraps clCreatePipe().
  4859. * @param flags Bitfield. Only CL_MEM_READ_WRITE and CL_MEM_HOST_NO_ACCESS are valid.
  4860. * @param packet_size Size in bytes of a single packet of the pipe.
  4861. * @param max_packets Number of packets that may be stored in the pipe.
  4862. *
  4863. */
  4864. Pipe(
  4865. cl_uint packet_size,
  4866. cl_uint max_packets,
  4867. cl_int* err = NULL)
  4868. {
  4869. cl_int error;
  4870. Context context = Context::getDefault(err);
  4871. cl_mem_flags flags = CL_MEM_READ_WRITE | CL_MEM_HOST_NO_ACCESS;
  4872. object_ = ::clCreatePipe(context(), flags, packet_size, max_packets, nullptr, &error);
  4873. detail::errHandler(error, __CREATE_PIPE_ERR);
  4874. if (err != NULL) {
  4875. *err = error;
  4876. }
  4877. }
  4878. //! \brief Default constructor - initializes to NULL.
  4879. Pipe() : Memory() { }
  4880. /*! \brief Constructor from cl_mem - takes ownership.
  4881. *
  4882. * \param retainObject will cause the constructor to retain its cl object.
  4883. * Defaults to false to maintain compatibility with earlier versions.
  4884. *
  4885. * See Memory for further details.
  4886. */
  4887. explicit Pipe(const cl_mem& pipe, bool retainObject = false) :
  4888. Memory(pipe, retainObject) { }
  4889. /*! \brief Assignment from cl_mem - performs shallow copy.
  4890. *
  4891. * See Memory for further details.
  4892. */
  4893. Pipe& operator = (const cl_mem& rhs)
  4894. {
  4895. Memory::operator=(rhs);
  4896. return *this;
  4897. }
  4898. /*! \brief Copy constructor to forward copy to the superclass correctly.
  4899. * Required for MSVC.
  4900. */
  4901. Pipe(const Pipe& pipe) : Memory(pipe) {}
  4902. /*! \brief Copy assignment to forward copy to the superclass correctly.
  4903. * Required for MSVC.
  4904. */
  4905. Pipe& operator = (const Pipe &pipe)
  4906. {
  4907. Memory::operator=(pipe);
  4908. return *this;
  4909. }
  4910. /*! \brief Move constructor to forward move to the superclass correctly.
  4911. * Required for MSVC.
  4912. */
  4913. Pipe(Pipe&& pipe) CL_HPP_NOEXCEPT_ : Memory(std::move(pipe)) {}
  4914. /*! \brief Move assignment to forward move to the superclass correctly.
  4915. * Required for MSVC.
  4916. */
  4917. Pipe& operator = (Pipe &&pipe)
  4918. {
  4919. Memory::operator=(std::move(pipe));
  4920. return *this;
  4921. }
  4922. //! \brief Wrapper for clGetMemObjectInfo().
  4923. template <typename T>
  4924. cl_int getInfo(cl_pipe_info name, T* param) const
  4925. {
  4926. return detail::errHandler(
  4927. detail::getInfo(&::clGetPipeInfo, object_, name, param),
  4928. __GET_PIPE_INFO_ERR);
  4929. }
  4930. //! \brief Wrapper for clGetMemObjectInfo() that returns by value.
  4931. template <cl_int name> typename
  4932. detail::param_traits<detail::cl_pipe_info, name>::param_type
  4933. getInfo(cl_int* err = NULL) const
  4934. {
  4935. typename detail::param_traits<
  4936. detail::cl_pipe_info, name>::param_type param;
  4937. cl_int result = getInfo(name, &param);
  4938. if (err != NULL) {
  4939. *err = result;
  4940. }
  4941. return param;
  4942. }
  4943. }; // class Pipe
  4944. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
  4945. /*! \brief Class interface for cl_sampler.
  4946. *
  4947. * \note Copies of these objects are shallow, meaning that the copy will refer
  4948. * to the same underlying cl_sampler as the original. For details, see
  4949. * clRetainSampler() and clReleaseSampler().
  4950. *
  4951. * \see cl_sampler
  4952. */
  4953. class Sampler : public detail::Wrapper<cl_sampler>
  4954. {
  4955. public:
  4956. //! \brief Default constructor - initializes to NULL.
  4957. Sampler() { }
  4958. /*! \brief Constructs a Sampler in a specified context.
  4959. *
  4960. * Wraps clCreateSampler().
  4961. */
  4962. Sampler(
  4963. const Context& context,
  4964. cl_bool normalized_coords,
  4965. cl_addressing_mode addressing_mode,
  4966. cl_filter_mode filter_mode,
  4967. cl_int* err = NULL)
  4968. {
  4969. cl_int error;
  4970. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  4971. cl_sampler_properties sampler_properties[] = {
  4972. CL_SAMPLER_NORMALIZED_COORDS, normalized_coords,
  4973. CL_SAMPLER_ADDRESSING_MODE, addressing_mode,
  4974. CL_SAMPLER_FILTER_MODE, filter_mode,
  4975. 0 };
  4976. object_ = ::clCreateSamplerWithProperties(
  4977. context(),
  4978. sampler_properties,
  4979. &error);
  4980. detail::errHandler(error, __CREATE_SAMPLER_WITH_PROPERTIES_ERR);
  4981. if (err != NULL) {
  4982. *err = error;
  4983. }
  4984. #else
  4985. object_ = ::clCreateSampler(
  4986. context(),
  4987. normalized_coords,
  4988. addressing_mode,
  4989. filter_mode,
  4990. &error);
  4991. detail::errHandler(error, __CREATE_SAMPLER_ERR);
  4992. if (err != NULL) {
  4993. *err = error;
  4994. }
  4995. #endif
  4996. }
  4997. /*! \brief Constructor from cl_sampler - takes ownership.
  4998. *
  4999. * \param retainObject will cause the constructor to retain its cl object.
  5000. * Defaults to false to maintain compatibility with
  5001. * earlier versions.
  5002. * This effectively transfers ownership of a refcount on the cl_sampler
  5003. * into the new Sampler object.
  5004. */
  5005. explicit Sampler(const cl_sampler& sampler, bool retainObject = false) :
  5006. detail::Wrapper<cl_type>(sampler, retainObject) { }
  5007. /*! \brief Assignment operator from cl_sampler - takes ownership.
  5008. *
  5009. * This effectively transfers ownership of a refcount on the rhs and calls
  5010. * clReleaseSampler() on the value previously held by this instance.
  5011. */
  5012. Sampler& operator = (const cl_sampler& rhs)
  5013. {
  5014. detail::Wrapper<cl_type>::operator=(rhs);
  5015. return *this;
  5016. }
  5017. /*! \brief Copy constructor to forward copy to the superclass correctly.
  5018. * Required for MSVC.
  5019. */
  5020. Sampler(const Sampler& sam) : detail::Wrapper<cl_type>(sam) {}
  5021. /*! \brief Copy assignment to forward copy to the superclass correctly.
  5022. * Required for MSVC.
  5023. */
  5024. Sampler& operator = (const Sampler &sam)
  5025. {
  5026. detail::Wrapper<cl_type>::operator=(sam);
  5027. return *this;
  5028. }
  5029. /*! \brief Move constructor to forward move to the superclass correctly.
  5030. * Required for MSVC.
  5031. */
  5032. Sampler(Sampler&& sam) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(sam)) {}
  5033. /*! \brief Move assignment to forward move to the superclass correctly.
  5034. * Required for MSVC.
  5035. */
  5036. Sampler& operator = (Sampler &&sam)
  5037. {
  5038. detail::Wrapper<cl_type>::operator=(std::move(sam));
  5039. return *this;
  5040. }
  5041. //! \brief Wrapper for clGetSamplerInfo().
  5042. template <typename T>
  5043. cl_int getInfo(cl_sampler_info name, T* param) const
  5044. {
  5045. return detail::errHandler(
  5046. detail::getInfo(&::clGetSamplerInfo, object_, name, param),
  5047. __GET_SAMPLER_INFO_ERR);
  5048. }
  5049. //! \brief Wrapper for clGetSamplerInfo() that returns by value.
  5050. template <cl_int name> typename
  5051. detail::param_traits<detail::cl_sampler_info, name>::param_type
  5052. getInfo(cl_int* err = NULL) const
  5053. {
  5054. typename detail::param_traits<
  5055. detail::cl_sampler_info, name>::param_type param;
  5056. cl_int result = getInfo(name, &param);
  5057. if (err != NULL) {
  5058. *err = result;
  5059. }
  5060. return param;
  5061. }
  5062. };
  5063. class Program;
  5064. class CommandQueue;
  5065. class DeviceCommandQueue;
  5066. class Kernel;
  5067. //! \brief Class interface for specifying NDRange values.
  5068. class NDRange
  5069. {
  5070. private:
  5071. size_type sizes_[3];
  5072. cl_uint dimensions_;
  5073. public:
  5074. //! \brief Default constructor - resulting range has zero dimensions.
  5075. NDRange()
  5076. : dimensions_(0)
  5077. {
  5078. sizes_[0] = 0;
  5079. sizes_[1] = 0;
  5080. sizes_[2] = 0;
  5081. }
  5082. //! \brief Constructs one-dimensional range.
  5083. NDRange(size_type size0)
  5084. : dimensions_(1)
  5085. {
  5086. sizes_[0] = size0;
  5087. sizes_[1] = 1;
  5088. sizes_[2] = 1;
  5089. }
  5090. //! \brief Constructs two-dimensional range.
  5091. NDRange(size_type size0, size_type size1)
  5092. : dimensions_(2)
  5093. {
  5094. sizes_[0] = size0;
  5095. sizes_[1] = size1;
  5096. sizes_[2] = 1;
  5097. }
  5098. //! \brief Constructs three-dimensional range.
  5099. NDRange(size_type size0, size_type size1, size_type size2)
  5100. : dimensions_(3)
  5101. {
  5102. sizes_[0] = size0;
  5103. sizes_[1] = size1;
  5104. sizes_[2] = size2;
  5105. }
  5106. /*! \brief Conversion operator to const size_type *.
  5107. *
  5108. * \returns a pointer to the size of the first dimension.
  5109. */
  5110. operator const size_type*() const {
  5111. return sizes_;
  5112. }
  5113. //! \brief Queries the number of dimensions in the range.
  5114. size_type dimensions() const
  5115. {
  5116. return dimensions_;
  5117. }
  5118. //! \brief Returns the size of the object in bytes based on the
  5119. // runtime number of dimensions
  5120. size_type size() const
  5121. {
  5122. return dimensions_*sizeof(size_type);
  5123. }
  5124. size_type* get()
  5125. {
  5126. return sizes_;
  5127. }
  5128. const size_type* get() const
  5129. {
  5130. return sizes_;
  5131. }
  5132. };
  5133. //! \brief A zero-dimensional range.
  5134. static const NDRange NullRange;
  5135. //! \brief Local address wrapper for use with Kernel::setArg
  5136. struct LocalSpaceArg
  5137. {
  5138. size_type size_;
  5139. };
  5140. namespace detail {
  5141. template <typename T, class Enable = void>
  5142. struct KernelArgumentHandler;
  5143. // Enable for objects that are not subclasses of memory
  5144. // Pointers, constants etc
  5145. template <typename T>
  5146. struct KernelArgumentHandler<T, typename std::enable_if<!std::is_base_of<cl::Memory, T>::value>::type>
  5147. {
  5148. static size_type size(const T&) { return sizeof(T); }
  5149. static const T* ptr(const T& value) { return &value; }
  5150. };
  5151. // Enable for subclasses of memory where we want to get a reference to the cl_mem out
  5152. // and pass that in for safety
  5153. template <typename T>
  5154. struct KernelArgumentHandler<T, typename std::enable_if<std::is_base_of<cl::Memory, T>::value>::type>
  5155. {
  5156. static size_type size(const T&) { return sizeof(cl_mem); }
  5157. static const cl_mem* ptr(const T& value) { return &(value()); }
  5158. };
  5159. // Specialization for DeviceCommandQueue defined later
  5160. template <>
  5161. struct KernelArgumentHandler<LocalSpaceArg, void>
  5162. {
  5163. static size_type size(const LocalSpaceArg& value) { return value.size_; }
  5164. static const void* ptr(const LocalSpaceArg&) { return NULL; }
  5165. };
  5166. }
  5167. //! \endcond
  5168. /*! Local
  5169. * \brief Helper function for generating LocalSpaceArg objects.
  5170. */
  5171. inline LocalSpaceArg
  5172. Local(size_type size)
  5173. {
  5174. LocalSpaceArg ret = { size };
  5175. return ret;
  5176. }
  5177. /*! \brief Class interface for cl_kernel.
  5178. *
  5179. * \note Copies of these objects are shallow, meaning that the copy will refer
  5180. * to the same underlying cl_kernel as the original. For details, see
  5181. * clRetainKernel() and clReleaseKernel().
  5182. *
  5183. * \see cl_kernel
  5184. */
  5185. class Kernel : public detail::Wrapper<cl_kernel>
  5186. {
  5187. public:
  5188. inline Kernel(const Program& program, const char* name, cl_int* err = NULL);
  5189. //! \brief Default constructor - initializes to NULL.
  5190. Kernel() { }
  5191. /*! \brief Constructor from cl_kernel - takes ownership.
  5192. *
  5193. * \param retainObject will cause the constructor to retain its cl object.
  5194. * Defaults to false to maintain compatibility with
  5195. * earlier versions.
  5196. * This effectively transfers ownership of a refcount on the cl_kernel
  5197. * into the new Kernel object.
  5198. */
  5199. explicit Kernel(const cl_kernel& kernel, bool retainObject = false) :
  5200. detail::Wrapper<cl_type>(kernel, retainObject) { }
  5201. /*! \brief Assignment operator from cl_kernel - takes ownership.
  5202. *
  5203. * This effectively transfers ownership of a refcount on the rhs and calls
  5204. * clReleaseKernel() on the value previously held by this instance.
  5205. */
  5206. Kernel& operator = (const cl_kernel& rhs)
  5207. {
  5208. detail::Wrapper<cl_type>::operator=(rhs);
  5209. return *this;
  5210. }
  5211. /*! \brief Copy constructor to forward copy to the superclass correctly.
  5212. * Required for MSVC.
  5213. */
  5214. Kernel(const Kernel& kernel) : detail::Wrapper<cl_type>(kernel) {}
  5215. /*! \brief Copy assignment to forward copy to the superclass correctly.
  5216. * Required for MSVC.
  5217. */
  5218. Kernel& operator = (const Kernel &kernel)
  5219. {
  5220. detail::Wrapper<cl_type>::operator=(kernel);
  5221. return *this;
  5222. }
  5223. /*! \brief Move constructor to forward move to the superclass correctly.
  5224. * Required for MSVC.
  5225. */
  5226. Kernel(Kernel&& kernel) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(kernel)) {}
  5227. /*! \brief Move assignment to forward move to the superclass correctly.
  5228. * Required for MSVC.
  5229. */
  5230. Kernel& operator = (Kernel &&kernel)
  5231. {
  5232. detail::Wrapper<cl_type>::operator=(std::move(kernel));
  5233. return *this;
  5234. }
  5235. template <typename T>
  5236. cl_int getInfo(cl_kernel_info name, T* param) const
  5237. {
  5238. return detail::errHandler(
  5239. detail::getInfo(&::clGetKernelInfo, object_, name, param),
  5240. __GET_KERNEL_INFO_ERR);
  5241. }
  5242. template <cl_int name> typename
  5243. detail::param_traits<detail::cl_kernel_info, name>::param_type
  5244. getInfo(cl_int* err = NULL) const
  5245. {
  5246. typename detail::param_traits<
  5247. detail::cl_kernel_info, name>::param_type param;
  5248. cl_int result = getInfo(name, &param);
  5249. if (err != NULL) {
  5250. *err = result;
  5251. }
  5252. return param;
  5253. }
  5254. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  5255. template <typename T>
  5256. cl_int getArgInfo(cl_uint argIndex, cl_kernel_arg_info name, T* param) const
  5257. {
  5258. return detail::errHandler(
  5259. detail::getInfo(&::clGetKernelArgInfo, object_, argIndex, name, param),
  5260. __GET_KERNEL_ARG_INFO_ERR);
  5261. }
  5262. template <cl_int name> typename
  5263. detail::param_traits<detail::cl_kernel_arg_info, name>::param_type
  5264. getArgInfo(cl_uint argIndex, cl_int* err = NULL) const
  5265. {
  5266. typename detail::param_traits<
  5267. detail::cl_kernel_arg_info, name>::param_type param;
  5268. cl_int result = getArgInfo(argIndex, name, &param);
  5269. if (err != NULL) {
  5270. *err = result;
  5271. }
  5272. return param;
  5273. }
  5274. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  5275. template <typename T>
  5276. cl_int getWorkGroupInfo(
  5277. const Device& device, cl_kernel_work_group_info name, T* param) const
  5278. {
  5279. return detail::errHandler(
  5280. detail::getInfo(
  5281. &::clGetKernelWorkGroupInfo, object_, device(), name, param),
  5282. __GET_KERNEL_WORK_GROUP_INFO_ERR);
  5283. }
  5284. template <cl_int name> typename
  5285. detail::param_traits<detail::cl_kernel_work_group_info, name>::param_type
  5286. getWorkGroupInfo(const Device& device, cl_int* err = NULL) const
  5287. {
  5288. typename detail::param_traits<
  5289. detail::cl_kernel_work_group_info, name>::param_type param;
  5290. cl_int result = getWorkGroupInfo(device, name, &param);
  5291. if (err != NULL) {
  5292. *err = result;
  5293. }
  5294. return param;
  5295. }
  5296. #if (CL_HPP_TARGET_OPENCL_VERSION >= 200 && defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)) || CL_HPP_TARGET_OPENCL_VERSION >= 210
  5297. cl_int getSubGroupInfo(const cl::Device &dev, cl_kernel_sub_group_info name, const cl::NDRange &range, size_type* param) const
  5298. {
  5299. #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  5300. return detail::errHandler(
  5301. clGetKernelSubGroupInfo(object_, dev(), name, range.size(), range.get(), sizeof(size_type), param, nullptr),
  5302. __GET_KERNEL_SUB_GROUP_INFO_ERR);
  5303. #else // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  5304. typedef clGetKernelSubGroupInfoKHR_fn PFN_clGetKernelSubGroupInfoKHR;
  5305. static PFN_clGetKernelSubGroupInfoKHR pfn_clGetKernelSubGroupInfoKHR = NULL;
  5306. CL_HPP_INIT_CL_EXT_FCN_PTR_(clGetKernelSubGroupInfoKHR);
  5307. return detail::errHandler(
  5308. pfn_clGetKernelSubGroupInfoKHR(object_, dev(), name, range.size(), range.get(), sizeof(size_type), param, nullptr),
  5309. __GET_KERNEL_SUB_GROUP_INFO_ERR);
  5310. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  5311. }
  5312. template <cl_int name>
  5313. size_type getSubGroupInfo(const cl::Device &dev, const cl::NDRange &range, cl_int* err = NULL) const
  5314. {
  5315. size_type param;
  5316. cl_int result = getSubGroupInfo(dev, name, range, &param);
  5317. if (err != NULL) {
  5318. *err = result;
  5319. }
  5320. return param;
  5321. }
  5322. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  5323. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  5324. /*! \brief setArg overload taking a shared_ptr type
  5325. */
  5326. template<typename T, class D>
  5327. cl_int setArg(cl_uint index, const cl::pointer<T, D> &argPtr)
  5328. {
  5329. return detail::errHandler(
  5330. ::clSetKernelArgSVMPointer(object_, index, argPtr.get()),
  5331. __SET_KERNEL_ARGS_ERR);
  5332. }
  5333. /*! \brief setArg overload taking a vector type.
  5334. */
  5335. template<typename T, class Alloc>
  5336. cl_int setArg(cl_uint index, const cl::vector<T, Alloc> &argPtr)
  5337. {
  5338. return detail::errHandler(
  5339. ::clSetKernelArgSVMPointer(object_, index, argPtr.data()),
  5340. __SET_KERNEL_ARGS_ERR);
  5341. }
  5342. /*! \brief setArg overload taking a pointer type
  5343. */
  5344. template<typename T>
  5345. typename std::enable_if<std::is_pointer<T>::value, cl_int>::type
  5346. setArg(cl_uint index, const T argPtr)
  5347. {
  5348. return detail::errHandler(
  5349. ::clSetKernelArgSVMPointer(object_, index, argPtr),
  5350. __SET_KERNEL_ARGS_ERR);
  5351. }
  5352. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  5353. /*! \brief setArg overload taking a POD type
  5354. */
  5355. template <typename T>
  5356. typename std::enable_if<!std::is_pointer<T>::value, cl_int>::type
  5357. setArg(cl_uint index, const T &value)
  5358. {
  5359. return detail::errHandler(
  5360. ::clSetKernelArg(
  5361. object_,
  5362. index,
  5363. detail::KernelArgumentHandler<T>::size(value),
  5364. detail::KernelArgumentHandler<T>::ptr(value)),
  5365. __SET_KERNEL_ARGS_ERR);
  5366. }
  5367. cl_int setArg(cl_uint index, size_type size, const void* argPtr)
  5368. {
  5369. return detail::errHandler(
  5370. ::clSetKernelArg(object_, index, size, argPtr),
  5371. __SET_KERNEL_ARGS_ERR);
  5372. }
  5373. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  5374. /*!
  5375. * Specify a vector of SVM pointers that the kernel may access in
  5376. * addition to its arguments.
  5377. */
  5378. cl_int setSVMPointers(const vector<void*> &pointerList)
  5379. {
  5380. return detail::errHandler(
  5381. ::clSetKernelExecInfo(
  5382. object_,
  5383. CL_KERNEL_EXEC_INFO_SVM_PTRS,
  5384. sizeof(void*)*pointerList.size(),
  5385. pointerList.data()));
  5386. }
  5387. /*!
  5388. * Specify a std::array of SVM pointers that the kernel may access in
  5389. * addition to its arguments.
  5390. */
  5391. template<int ArrayLength>
  5392. cl_int setSVMPointers(const std::array<void*, ArrayLength> &pointerList)
  5393. {
  5394. return detail::errHandler(
  5395. ::clSetKernelExecInfo(
  5396. object_,
  5397. CL_KERNEL_EXEC_INFO_SVM_PTRS,
  5398. sizeof(void*)*pointerList.size(),
  5399. pointerList.data()));
  5400. }
  5401. /*! \brief Enable fine-grained system SVM.
  5402. *
  5403. * \note It is only possible to enable fine-grained system SVM if all devices
  5404. * in the context associated with kernel support it.
  5405. *
  5406. * \param svmEnabled True if fine-grained system SVM is requested. False otherwise.
  5407. * \return CL_SUCCESS if the function was executed succesfully. CL_INVALID_OPERATION
  5408. * if no devices in the context support fine-grained system SVM.
  5409. *
  5410. * \see clSetKernelExecInfo
  5411. */
  5412. cl_int enableFineGrainedSystemSVM(bool svmEnabled)
  5413. {
  5414. cl_bool svmEnabled_ = svmEnabled ? CL_TRUE : CL_FALSE;
  5415. return detail::errHandler(
  5416. ::clSetKernelExecInfo(
  5417. object_,
  5418. CL_KERNEL_EXEC_INFO_SVM_FINE_GRAIN_SYSTEM,
  5419. sizeof(cl_bool),
  5420. &svmEnabled_
  5421. )
  5422. );
  5423. }
  5424. template<int index, int ArrayLength, class D, typename T0, typename T1, typename... Ts>
  5425. void setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, const pointer<T0, D> &t0, const pointer<T1, D> &t1, Ts & ... ts)
  5426. {
  5427. pointerList[index] = static_cast<void*>(t0.get());
  5428. setSVMPointersHelper<index + 1, ArrayLength>(pointerList, t1, ts...);
  5429. }
  5430. template<int index, int ArrayLength, typename T0, typename T1, typename... Ts>
  5431. typename std::enable_if<std::is_pointer<T0>::value, void>::type
  5432. setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, T0 t0, T1 t1, Ts... ts)
  5433. {
  5434. pointerList[index] = static_cast<void*>(t0);
  5435. setSVMPointersHelper<index + 1, ArrayLength>(pointerList, t1, ts...);
  5436. }
  5437. template<int index, int ArrayLength, typename T0, class D>
  5438. void setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, const pointer<T0, D> &t0)
  5439. {
  5440. pointerList[index] = static_cast<void*>(t0.get());
  5441. }
  5442. template<int index, int ArrayLength, typename T0>
  5443. typename std::enable_if<std::is_pointer<T0>::value, void>::type
  5444. setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, T0 t0)
  5445. {
  5446. pointerList[index] = static_cast<void*>(t0);
  5447. }
  5448. template<typename T0, typename... Ts>
  5449. cl_int setSVMPointers(const T0 &t0, Ts & ... ts)
  5450. {
  5451. std::array<void*, 1 + sizeof...(Ts)> pointerList;
  5452. setSVMPointersHelper<0, 1 + sizeof...(Ts)>(pointerList, t0, ts...);
  5453. return detail::errHandler(
  5454. ::clSetKernelExecInfo(
  5455. object_,
  5456. CL_KERNEL_EXEC_INFO_SVM_PTRS,
  5457. sizeof(void*)*(1 + sizeof...(Ts)),
  5458. pointerList.data()));
  5459. }
  5460. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  5461. #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  5462. /**
  5463. * Make a deep copy of the kernel object including its arguments.
  5464. * @return A new kernel object with internal state entirely separate from that
  5465. * of the original but with any arguments set on the original intact.
  5466. */
  5467. Kernel clone()
  5468. {
  5469. cl_int error;
  5470. Kernel retValue(clCloneKernel(this->get(), &error));
  5471. detail::errHandler(error, __CLONE_KERNEL_ERR);
  5472. return retValue;
  5473. }
  5474. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  5475. };
  5476. /*! \class Program
  5477. * \brief Program interface that implements cl_program.
  5478. */
  5479. class Program : public detail::Wrapper<cl_program>
  5480. {
  5481. public:
  5482. #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
  5483. typedef vector<vector<unsigned char>> Binaries;
  5484. typedef vector<string> Sources;
  5485. #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
  5486. typedef vector<std::pair<const void*, size_type> > Binaries;
  5487. typedef vector<std::pair<const char*, size_type> > Sources;
  5488. #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
  5489. Program(
  5490. const string& source,
  5491. bool build = false,
  5492. cl_int* err = NULL)
  5493. {
  5494. cl_int error;
  5495. const char * strings = source.c_str();
  5496. const size_type length = source.size();
  5497. Context context = Context::getDefault(err);
  5498. object_ = ::clCreateProgramWithSource(
  5499. context(), (cl_uint)1, &strings, &length, &error);
  5500. detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
  5501. if (error == CL_SUCCESS && build) {
  5502. error = ::clBuildProgram(
  5503. object_,
  5504. 0,
  5505. NULL,
  5506. #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
  5507. "-cl-std=CL2.0",
  5508. #else
  5509. "",
  5510. #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
  5511. NULL,
  5512. NULL);
  5513. detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
  5514. }
  5515. if (err != NULL) {
  5516. *err = error;
  5517. }
  5518. }
  5519. Program(
  5520. const Context& context,
  5521. const string& source,
  5522. bool build = false,
  5523. cl_int* err = NULL)
  5524. {
  5525. cl_int error;
  5526. const char * strings = source.c_str();
  5527. const size_type length = source.size();
  5528. object_ = ::clCreateProgramWithSource(
  5529. context(), (cl_uint)1, &strings, &length, &error);
  5530. detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
  5531. if (error == CL_SUCCESS && build) {
  5532. error = ::clBuildProgram(
  5533. object_,
  5534. 0,
  5535. NULL,
  5536. #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
  5537. "-cl-std=CL2.0",
  5538. #else
  5539. "",
  5540. #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
  5541. NULL,
  5542. NULL);
  5543. detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
  5544. }
  5545. if (err != NULL) {
  5546. *err = error;
  5547. }
  5548. }
  5549. /**
  5550. * Create a program from a vector of source strings and the default context.
  5551. * Does not compile or link the program.
  5552. */
  5553. Program(
  5554. const Sources& sources,
  5555. cl_int* err = NULL)
  5556. {
  5557. cl_int error;
  5558. Context context = Context::getDefault(err);
  5559. const size_type n = (size_type)sources.size();
  5560. vector<size_type> lengths(n);
  5561. vector<const char*> strings(n);
  5562. for (size_type i = 0; i < n; ++i) {
  5563. #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
  5564. strings[i] = sources[(int)i].data();
  5565. lengths[i] = sources[(int)i].length();
  5566. #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
  5567. strings[i] = sources[(int)i].first;
  5568. lengths[i] = sources[(int)i].second;
  5569. #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
  5570. }
  5571. object_ = ::clCreateProgramWithSource(
  5572. context(), (cl_uint)n, strings.data(), lengths.data(), &error);
  5573. detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
  5574. if (err != NULL) {
  5575. *err = error;
  5576. }
  5577. }
  5578. /**
  5579. * Create a program from a vector of source strings and a provided context.
  5580. * Does not compile or link the program.
  5581. */
  5582. Program(
  5583. const Context& context,
  5584. const Sources& sources,
  5585. cl_int* err = NULL)
  5586. {
  5587. cl_int error;
  5588. const size_type n = (size_type)sources.size();
  5589. vector<size_type> lengths(n);
  5590. vector<const char*> strings(n);
  5591. for (size_type i = 0; i < n; ++i) {
  5592. #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
  5593. strings[i] = sources[(int)i].data();
  5594. lengths[i] = sources[(int)i].length();
  5595. #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
  5596. strings[i] = sources[(int)i].first;
  5597. lengths[i] = sources[(int)i].second;
  5598. #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
  5599. }
  5600. object_ = ::clCreateProgramWithSource(
  5601. context(), (cl_uint)n, strings.data(), lengths.data(), &error);
  5602. detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
  5603. if (err != NULL) {
  5604. *err = error;
  5605. }
  5606. }
  5607. #if CL_HPP_TARGET_OPENCL_VERSION >= 210 || (CL_HPP_TARGET_OPENCL_VERSION==200 && defined(CL_HPP_USE_IL_KHR))
  5608. /**
  5609. * Program constructor to allow construction of program from SPIR-V or another IL.
  5610. * Valid for either OpenCL >= 2.1 or when CL_HPP_USE_IL_KHR is defined.
  5611. */
  5612. Program(
  5613. const vector<char>& IL,
  5614. bool build = false,
  5615. cl_int* err = NULL)
  5616. {
  5617. cl_int error;
  5618. Context context = Context::getDefault(err);
  5619. #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  5620. object_ = ::clCreateProgramWithIL(
  5621. context(), static_cast<const void*>(IL.data()), IL.size(), &error);
  5622. #else // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  5623. typedef clCreateProgramWithILKHR_fn PFN_clCreateProgramWithILKHR;
  5624. static PFN_clCreateProgramWithILKHR pfn_clCreateProgramWithILKHR = NULL;
  5625. CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateProgramWithILKHR);
  5626. return detail::errHandler(
  5627. pfn_clCreateProgramWithILKHR(
  5628. context(), static_cast<const void*>(IL.data()), IL.size(), &error);
  5629. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  5630. detail::errHandler(error, __CREATE_PROGRAM_WITH_IL_ERR);
  5631. if (error == CL_SUCCESS && build) {
  5632. error = ::clBuildProgram(
  5633. object_,
  5634. 0,
  5635. NULL,
  5636. #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
  5637. "-cl-std=CL2.0",
  5638. #else
  5639. "",
  5640. #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
  5641. NULL,
  5642. NULL);
  5643. detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
  5644. }
  5645. if (err != NULL) {
  5646. *err = error;
  5647. }
  5648. }
  5649. /**
  5650. * Program constructor to allow construction of program from SPIR-V or another IL
  5651. * for a specific context.
  5652. * Valid for either OpenCL >= 2.1 or when CL_HPP_USE_IL_KHR is defined.
  5653. */
  5654. Program(
  5655. const Context& context,
  5656. const vector<char>& IL,
  5657. bool build = false,
  5658. cl_int* err = NULL)
  5659. {
  5660. cl_int error;
  5661. #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  5662. object_ = ::clCreateProgramWithIL(
  5663. context(), static_cast<const void*>(IL.data()), IL.size(), &error);
  5664. #else // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  5665. typedef clCreateProgramWithILKHR_fn PFN_clCreateProgramWithILKHR;
  5666. static PFN_clCreateProgramWithILKHR pfn_clCreateProgramWithILKHR = NULL;
  5667. CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateProgramWithILKHR);
  5668. return detail::errHandler(
  5669. pfn_clCreateProgramWithILKHR(
  5670. context(), static_cast<const void*>(IL.data()), IL.size(), &error);
  5671. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  5672. detail::errHandler(error, __CREATE_PROGRAM_WITH_IL_ERR);
  5673. if (error == CL_SUCCESS && build) {
  5674. error = ::clBuildProgram(
  5675. object_,
  5676. 0,
  5677. NULL,
  5678. #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
  5679. "-cl-std=CL2.0",
  5680. #else
  5681. "",
  5682. #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
  5683. NULL,
  5684. NULL);
  5685. detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
  5686. }
  5687. if (err != NULL) {
  5688. *err = error;
  5689. }
  5690. }
  5691. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  5692. /**
  5693. * Construct a program object from a list of devices and a per-device list of binaries.
  5694. * \param context A valid OpenCL context in which to construct the program.
  5695. * \param devices A vector of OpenCL device objects for which the program will be created.
  5696. * \param binaries A vector of pairs of a pointer to a binary object and its length.
  5697. * \param binaryStatus An optional vector that on completion will be resized to
  5698. * match the size of binaries and filled with values to specify if each binary
  5699. * was successfully loaded.
  5700. * Set to CL_SUCCESS if the binary was successfully loaded.
  5701. * Set to CL_INVALID_VALUE if the length is 0 or the binary pointer is NULL.
  5702. * Set to CL_INVALID_BINARY if the binary provided is not valid for the matching device.
  5703. * \param err if non-NULL will be set to CL_SUCCESS on successful operation or one of the following errors:
  5704. * CL_INVALID_CONTEXT if context is not a valid context.
  5705. * CL_INVALID_VALUE if the length of devices is zero; or if the length of binaries does not match the length of devices;
  5706. * or if any entry in binaries is NULL or has length 0.
  5707. * CL_INVALID_DEVICE if OpenCL devices listed in devices are not in the list of devices associated with context.
  5708. * CL_INVALID_BINARY if an invalid program binary was encountered for any device. binaryStatus will return specific status for each device.
  5709. * CL_OUT_OF_HOST_MEMORY if there is a failure to allocate resources required by the OpenCL implementation on the host.
  5710. */
  5711. Program(
  5712. const Context& context,
  5713. const vector<Device>& devices,
  5714. const Binaries& binaries,
  5715. vector<cl_int>* binaryStatus = NULL,
  5716. cl_int* err = NULL)
  5717. {
  5718. cl_int error;
  5719. const size_type numDevices = devices.size();
  5720. // Catch size mismatch early and return
  5721. if(binaries.size() != numDevices) {
  5722. error = CL_INVALID_VALUE;
  5723. detail::errHandler(error, __CREATE_PROGRAM_WITH_BINARY_ERR);
  5724. if (err != NULL) {
  5725. *err = error;
  5726. }
  5727. return;
  5728. }
  5729. vector<size_type> lengths(numDevices);
  5730. vector<const unsigned char*> images(numDevices);
  5731. #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
  5732. for (size_type i = 0; i < numDevices; ++i) {
  5733. images[i] = binaries[i].data();
  5734. lengths[i] = binaries[(int)i].size();
  5735. }
  5736. #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
  5737. for (size_type i = 0; i < numDevices; ++i) {
  5738. images[i] = (const unsigned char*)binaries[i].first;
  5739. lengths[i] = binaries[(int)i].second;
  5740. }
  5741. #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
  5742. vector<cl_device_id> deviceIDs(numDevices);
  5743. for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
  5744. deviceIDs[deviceIndex] = (devices[deviceIndex])();
  5745. }
  5746. if(binaryStatus) {
  5747. binaryStatus->resize(numDevices);
  5748. }
  5749. object_ = ::clCreateProgramWithBinary(
  5750. context(), (cl_uint) devices.size(),
  5751. deviceIDs.data(),
  5752. lengths.data(), images.data(), (binaryStatus != NULL && numDevices > 0)
  5753. ? &binaryStatus->front()
  5754. : NULL, &error);
  5755. detail::errHandler(error, __CREATE_PROGRAM_WITH_BINARY_ERR);
  5756. if (err != NULL) {
  5757. *err = error;
  5758. }
  5759. }
  5760. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  5761. /**
  5762. * Create program using builtin kernels.
  5763. * \param kernelNames Semi-colon separated list of builtin kernel names
  5764. */
  5765. Program(
  5766. const Context& context,
  5767. const vector<Device>& devices,
  5768. const string& kernelNames,
  5769. cl_int* err = NULL)
  5770. {
  5771. cl_int error;
  5772. size_type numDevices = devices.size();
  5773. vector<cl_device_id> deviceIDs(numDevices);
  5774. for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
  5775. deviceIDs[deviceIndex] = (devices[deviceIndex])();
  5776. }
  5777. object_ = ::clCreateProgramWithBuiltInKernels(
  5778. context(),
  5779. (cl_uint) devices.size(),
  5780. deviceIDs.data(),
  5781. kernelNames.c_str(),
  5782. &error);
  5783. detail::errHandler(error, __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR);
  5784. if (err != NULL) {
  5785. *err = error;
  5786. }
  5787. }
  5788. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  5789. Program() { }
  5790. /*! \brief Constructor from cl_mem - takes ownership.
  5791. *
  5792. * \param retainObject will cause the constructor to retain its cl object.
  5793. * Defaults to false to maintain compatibility with
  5794. * earlier versions.
  5795. */
  5796. explicit Program(const cl_program& program, bool retainObject = false) :
  5797. detail::Wrapper<cl_type>(program, retainObject) { }
  5798. Program& operator = (const cl_program& rhs)
  5799. {
  5800. detail::Wrapper<cl_type>::operator=(rhs);
  5801. return *this;
  5802. }
  5803. /*! \brief Copy constructor to forward copy to the superclass correctly.
  5804. * Required for MSVC.
  5805. */
  5806. Program(const Program& program) : detail::Wrapper<cl_type>(program) {}
  5807. /*! \brief Copy assignment to forward copy to the superclass correctly.
  5808. * Required for MSVC.
  5809. */
  5810. Program& operator = (const Program &program)
  5811. {
  5812. detail::Wrapper<cl_type>::operator=(program);
  5813. return *this;
  5814. }
  5815. /*! \brief Move constructor to forward move to the superclass correctly.
  5816. * Required for MSVC.
  5817. */
  5818. Program(Program&& program) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(program)) {}
  5819. /*! \brief Move assignment to forward move to the superclass correctly.
  5820. * Required for MSVC.
  5821. */
  5822. Program& operator = (Program &&program)
  5823. {
  5824. detail::Wrapper<cl_type>::operator=(std::move(program));
  5825. return *this;
  5826. }
  5827. cl_int build(
  5828. const vector<Device>& devices,
  5829. const char* options = NULL,
  5830. void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
  5831. void* data = NULL) const
  5832. {
  5833. size_type numDevices = devices.size();
  5834. vector<cl_device_id> deviceIDs(numDevices);
  5835. for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
  5836. deviceIDs[deviceIndex] = (devices[deviceIndex])();
  5837. }
  5838. cl_int buildError = ::clBuildProgram(
  5839. object_,
  5840. (cl_uint)
  5841. devices.size(),
  5842. deviceIDs.data(),
  5843. options,
  5844. notifyFptr,
  5845. data);
  5846. return detail::buildErrHandler(buildError, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
  5847. }
  5848. cl_int build(
  5849. const char* options = NULL,
  5850. void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
  5851. void* data = NULL) const
  5852. {
  5853. cl_int buildError = ::clBuildProgram(
  5854. object_,
  5855. 0,
  5856. NULL,
  5857. options,
  5858. notifyFptr,
  5859. data);
  5860. return detail::buildErrHandler(buildError, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
  5861. }
  5862. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  5863. cl_int compile(
  5864. const char* options = NULL,
  5865. void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
  5866. void* data = NULL) const
  5867. {
  5868. cl_int error = ::clCompileProgram(
  5869. object_,
  5870. 0,
  5871. NULL,
  5872. options,
  5873. 0,
  5874. NULL,
  5875. NULL,
  5876. notifyFptr,
  5877. data);
  5878. return detail::buildErrHandler(error, __COMPILE_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
  5879. }
  5880. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  5881. template <typename T>
  5882. cl_int getInfo(cl_program_info name, T* param) const
  5883. {
  5884. return detail::errHandler(
  5885. detail::getInfo(&::clGetProgramInfo, object_, name, param),
  5886. __GET_PROGRAM_INFO_ERR);
  5887. }
  5888. template <cl_int name> typename
  5889. detail::param_traits<detail::cl_program_info, name>::param_type
  5890. getInfo(cl_int* err = NULL) const
  5891. {
  5892. typename detail::param_traits<
  5893. detail::cl_program_info, name>::param_type param;
  5894. cl_int result = getInfo(name, &param);
  5895. if (err != NULL) {
  5896. *err = result;
  5897. }
  5898. return param;
  5899. }
  5900. template <typename T>
  5901. cl_int getBuildInfo(
  5902. const Device& device, cl_program_build_info name, T* param) const
  5903. {
  5904. return detail::errHandler(
  5905. detail::getInfo(
  5906. &::clGetProgramBuildInfo, object_, device(), name, param),
  5907. __GET_PROGRAM_BUILD_INFO_ERR);
  5908. }
  5909. template <cl_int name> typename
  5910. detail::param_traits<detail::cl_program_build_info, name>::param_type
  5911. getBuildInfo(const Device& device, cl_int* err = NULL) const
  5912. {
  5913. typename detail::param_traits<
  5914. detail::cl_program_build_info, name>::param_type param;
  5915. cl_int result = getBuildInfo(device, name, &param);
  5916. if (err != NULL) {
  5917. *err = result;
  5918. }
  5919. return param;
  5920. }
  5921. /**
  5922. * Build info function that returns a vector of device/info pairs for the specified
  5923. * info type and for all devices in the program.
  5924. * On an error reading the info for any device, an empty vector of info will be returned.
  5925. */
  5926. template <cl_int name>
  5927. vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, name>::param_type>>
  5928. getBuildInfo(cl_int *err = NULL) const
  5929. {
  5930. cl_int result = CL_SUCCESS;
  5931. auto devs = getInfo<CL_PROGRAM_DEVICES>(&result);
  5932. vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, name>::param_type>>
  5933. devInfo;
  5934. // If there was an initial error from getInfo return the error
  5935. if (result != CL_SUCCESS) {
  5936. if (err != NULL) {
  5937. *err = result;
  5938. }
  5939. return devInfo;
  5940. }
  5941. for (const cl::Device &d : devs) {
  5942. typename detail::param_traits<
  5943. detail::cl_program_build_info, name>::param_type param;
  5944. result = getBuildInfo(d, name, &param);
  5945. devInfo.push_back(
  5946. std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, name>::param_type>
  5947. (d, param));
  5948. if (result != CL_SUCCESS) {
  5949. // On error, leave the loop and return the error code
  5950. break;
  5951. }
  5952. }
  5953. if (err != NULL) {
  5954. *err = result;
  5955. }
  5956. if (result != CL_SUCCESS) {
  5957. devInfo.clear();
  5958. }
  5959. return devInfo;
  5960. }
  5961. cl_int createKernels(vector<Kernel>* kernels)
  5962. {
  5963. cl_uint numKernels;
  5964. cl_int err = ::clCreateKernelsInProgram(object_, 0, NULL, &numKernels);
  5965. if (err != CL_SUCCESS) {
  5966. return detail::errHandler(err, __CREATE_KERNELS_IN_PROGRAM_ERR);
  5967. }
  5968. vector<cl_kernel> value(numKernels);
  5969. err = ::clCreateKernelsInProgram(
  5970. object_, numKernels, value.data(), NULL);
  5971. if (err != CL_SUCCESS) {
  5972. return detail::errHandler(err, __CREATE_KERNELS_IN_PROGRAM_ERR);
  5973. }
  5974. if (kernels) {
  5975. kernels->resize(value.size());
  5976. // Assign to param, constructing with retain behaviour
  5977. // to correctly capture each underlying CL object
  5978. for (size_type i = 0; i < value.size(); i++) {
  5979. // We do not need to retain because this kernel is being created
  5980. // by the runtime
  5981. (*kernels)[i] = Kernel(value[i], false);
  5982. }
  5983. }
  5984. return CL_SUCCESS;
  5985. }
  5986. #if CL_HPP_TARGET_OPENCL_VERSION >= 220
  5987. /*! \brief Registers a callback function to be called when destructors for
  5988. * program scope global variables are complete and before the
  5989. * program is released.
  5990. *
  5991. * Wraps clSetProgramReleaseCallback().
  5992. *
  5993. * Each call to this function registers the specified user callback function
  5994. * on a callback stack associated with program. The registered user callback
  5995. * functions are called in the reverse order in which they were registered.
  5996. */
  5997. cl_int setReleaseCallback(
  5998. void (CL_CALLBACK * pfn_notify)(cl_program program, void * user_data),
  5999. void * user_data = NULL)
  6000. {
  6001. return detail::errHandler(
  6002. ::clSetProgramReleaseCallback(
  6003. object_,
  6004. pfn_notify,
  6005. user_data),
  6006. __SET_PROGRAM_RELEASE_CALLBACK_ERR);
  6007. }
  6008. /*! \brief Sets a SPIR-V specialization constant.
  6009. *
  6010. * Wraps clSetProgramSpecializationConstant().
  6011. */
  6012. template <typename T>
  6013. typename std::enable_if<!std::is_pointer<T>::value, cl_int>::type
  6014. setSpecializationConstant(cl_uint index, const T &value)
  6015. {
  6016. return detail::errHandler(
  6017. ::clSetProgramSpecializationConstant(
  6018. object_,
  6019. index,
  6020. sizeof(value),
  6021. &value),
  6022. __SET_PROGRAM_SPECIALIZATION_CONSTANT_ERR);
  6023. }
  6024. /*! \brief Sets a SPIR-V specialization constant.
  6025. *
  6026. * Wraps clSetProgramSpecializationConstant().
  6027. */
  6028. cl_int setSpecializationConstant(cl_uint index, size_type size, const void* value)
  6029. {
  6030. return detail::errHandler(
  6031. ::clSetProgramSpecializationConstant(
  6032. object_,
  6033. index,
  6034. size,
  6035. value),
  6036. __SET_PROGRAM_SPECIALIZATION_CONSTANT_ERR);
  6037. }
  6038. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 220
  6039. };
  6040. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  6041. inline Program linkProgram(
  6042. Program input1,
  6043. Program input2,
  6044. const char* options = NULL,
  6045. void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
  6046. void* data = NULL,
  6047. cl_int* err = NULL)
  6048. {
  6049. cl_int error_local = CL_SUCCESS;
  6050. cl_program programs[2] = { input1(), input2() };
  6051. Context ctx = input1.getInfo<CL_PROGRAM_CONTEXT>(&error_local);
  6052. if(error_local!=CL_SUCCESS) {
  6053. detail::errHandler(error_local, __LINK_PROGRAM_ERR);
  6054. }
  6055. cl_program prog = ::clLinkProgram(
  6056. ctx(),
  6057. 0,
  6058. NULL,
  6059. options,
  6060. 2,
  6061. programs,
  6062. notifyFptr,
  6063. data,
  6064. &error_local);
  6065. detail::errHandler(error_local,__COMPILE_PROGRAM_ERR);
  6066. if (err != NULL) {
  6067. *err = error_local;
  6068. }
  6069. return Program(prog);
  6070. }
  6071. inline Program linkProgram(
  6072. vector<Program> inputPrograms,
  6073. const char* options = NULL,
  6074. void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
  6075. void* data = NULL,
  6076. cl_int* err = NULL)
  6077. {
  6078. cl_int error_local = CL_SUCCESS;
  6079. vector<cl_program> programs(inputPrograms.size());
  6080. for (unsigned int i = 0; i < inputPrograms.size(); i++) {
  6081. programs[i] = inputPrograms[i]();
  6082. }
  6083. Context ctx;
  6084. if(inputPrograms.size() > 0) {
  6085. ctx = inputPrograms[0].getInfo<CL_PROGRAM_CONTEXT>(&error_local);
  6086. if(error_local!=CL_SUCCESS) {
  6087. detail::errHandler(error_local, __LINK_PROGRAM_ERR);
  6088. }
  6089. }
  6090. cl_program prog = ::clLinkProgram(
  6091. ctx(),
  6092. 0,
  6093. NULL,
  6094. options,
  6095. (cl_uint)inputPrograms.size(),
  6096. programs.data(),
  6097. notifyFptr,
  6098. data,
  6099. &error_local);
  6100. detail::errHandler(error_local,__COMPILE_PROGRAM_ERR);
  6101. if (err != NULL) {
  6102. *err = error_local;
  6103. }
  6104. return Program(prog, false);
  6105. }
  6106. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  6107. // Template specialization for CL_PROGRAM_BINARIES
  6108. template <>
  6109. inline cl_int cl::Program::getInfo(cl_program_info name, vector<vector<unsigned char>>* param) const
  6110. {
  6111. if (name != CL_PROGRAM_BINARIES) {
  6112. return CL_INVALID_VALUE;
  6113. }
  6114. if (param) {
  6115. // Resize the parameter array appropriately for each allocation
  6116. // and pass down to the helper
  6117. vector<size_type> sizes = getInfo<CL_PROGRAM_BINARY_SIZES>();
  6118. size_type numBinaries = sizes.size();
  6119. // Resize the parameter array and constituent arrays
  6120. param->resize(numBinaries);
  6121. for (size_type i = 0; i < numBinaries; ++i) {
  6122. (*param)[i].resize(sizes[i]);
  6123. }
  6124. return detail::errHandler(
  6125. detail::getInfo(&::clGetProgramInfo, object_, name, param),
  6126. __GET_PROGRAM_INFO_ERR);
  6127. }
  6128. return CL_SUCCESS;
  6129. }
  6130. template<>
  6131. inline vector<vector<unsigned char>> cl::Program::getInfo<CL_PROGRAM_BINARIES>(cl_int* err) const
  6132. {
  6133. vector<vector<unsigned char>> binariesVectors;
  6134. cl_int result = getInfo(CL_PROGRAM_BINARIES, &binariesVectors);
  6135. if (err != NULL) {
  6136. *err = result;
  6137. }
  6138. return binariesVectors;
  6139. }
  6140. #if CL_HPP_TARGET_OPENCL_VERSION >= 220
  6141. // Template specialization for clSetProgramSpecializationConstant
  6142. template <>
  6143. inline cl_int cl::Program::setSpecializationConstant(cl_uint index, const bool &value)
  6144. {
  6145. cl_uchar ucValue = value ? CL_UCHAR_MAX : 0;
  6146. return detail::errHandler(
  6147. ::clSetProgramSpecializationConstant(
  6148. object_,
  6149. index,
  6150. sizeof(ucValue),
  6151. &ucValue),
  6152. __SET_PROGRAM_SPECIALIZATION_CONSTANT_ERR);
  6153. }
  6154. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 220
  6155. inline Kernel::Kernel(const Program& program, const char* name, cl_int* err)
  6156. {
  6157. cl_int error;
  6158. object_ = ::clCreateKernel(program(), name, &error);
  6159. detail::errHandler(error, __CREATE_KERNEL_ERR);
  6160. if (err != NULL) {
  6161. *err = error;
  6162. }
  6163. }
  6164. enum class QueueProperties : cl_command_queue_properties
  6165. {
  6166. None = 0,
  6167. Profiling = CL_QUEUE_PROFILING_ENABLE,
  6168. OutOfOrder = CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE,
  6169. };
  6170. inline QueueProperties operator|(QueueProperties lhs, QueueProperties rhs)
  6171. {
  6172. return static_cast<QueueProperties>(static_cast<cl_command_queue_properties>(lhs) | static_cast<cl_command_queue_properties>(rhs));
  6173. }
  6174. /*! \class CommandQueue
  6175. * \brief CommandQueue interface for cl_command_queue.
  6176. */
  6177. class CommandQueue : public detail::Wrapper<cl_command_queue>
  6178. {
  6179. private:
  6180. static std::once_flag default_initialized_;
  6181. static CommandQueue default_;
  6182. static cl_int default_error_;
  6183. /*! \brief Create the default command queue returned by @ref getDefault.
  6184. *
  6185. * It sets default_error_ to indicate success or failure. It does not throw
  6186. * @c cl::Error.
  6187. */
  6188. static void makeDefault()
  6189. {
  6190. /* We don't want to throw an error from this function, so we have to
  6191. * catch and set the error flag.
  6192. */
  6193. #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  6194. try
  6195. #endif
  6196. {
  6197. int error;
  6198. Context context = Context::getDefault(&error);
  6199. if (error != CL_SUCCESS) {
  6200. default_error_ = error;
  6201. }
  6202. else {
  6203. Device device = Device::getDefault();
  6204. default_ = CommandQueue(context, device, 0, &default_error_);
  6205. }
  6206. }
  6207. #if defined(CL_HPP_ENABLE_EXCEPTIONS)
  6208. catch (cl::Error &e) {
  6209. default_error_ = e.err();
  6210. }
  6211. #endif
  6212. }
  6213. /*! \brief Create the default command queue.
  6214. *
  6215. * This sets @c default_. It does not throw
  6216. * @c cl::Error.
  6217. */
  6218. static void makeDefaultProvided(const CommandQueue &c) {
  6219. default_ = c;
  6220. }
  6221. public:
  6222. #ifdef CL_HPP_UNIT_TEST_ENABLE
  6223. /*! \brief Reset the default.
  6224. *
  6225. * This sets @c default_ to an empty value to support cleanup in
  6226. * the unit test framework.
  6227. * This function is not thread safe.
  6228. */
  6229. static void unitTestClearDefault() {
  6230. default_ = CommandQueue();
  6231. }
  6232. #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
  6233. /*!
  6234. * \brief Constructs a CommandQueue based on passed properties.
  6235. * Will return an CL_INVALID_QUEUE_PROPERTIES error if CL_QUEUE_ON_DEVICE is specified.
  6236. */
  6237. CommandQueue(
  6238. cl_command_queue_properties properties,
  6239. cl_int* err = NULL)
  6240. {
  6241. cl_int error;
  6242. Context context = Context::getDefault(&error);
  6243. detail::errHandler(error, __CREATE_CONTEXT_ERR);
  6244. if (error != CL_SUCCESS) {
  6245. if (err != NULL) {
  6246. *err = error;
  6247. }
  6248. }
  6249. else {
  6250. Device device = context.getInfo<CL_CONTEXT_DEVICES>()[0];
  6251. bool useWithProperties;
  6252. #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6253. // Run-time decision based on the actual platform
  6254. {
  6255. cl_uint version = detail::getContextPlatformVersion(context());
  6256. useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
  6257. }
  6258. #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
  6259. useWithProperties = true;
  6260. #else
  6261. useWithProperties = false;
  6262. #endif
  6263. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  6264. if (useWithProperties) {
  6265. cl_queue_properties queue_properties[] = {
  6266. CL_QUEUE_PROPERTIES, properties, 0 };
  6267. if ((properties & CL_QUEUE_ON_DEVICE) == 0) {
  6268. object_ = ::clCreateCommandQueueWithProperties(
  6269. context(), device(), queue_properties, &error);
  6270. }
  6271. else {
  6272. error = CL_INVALID_QUEUE_PROPERTIES;
  6273. }
  6274. detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
  6275. if (err != NULL) {
  6276. *err = error;
  6277. }
  6278. }
  6279. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
  6280. #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6281. if (!useWithProperties) {
  6282. object_ = ::clCreateCommandQueue(
  6283. context(), device(), properties, &error);
  6284. detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
  6285. if (err != NULL) {
  6286. *err = error;
  6287. }
  6288. }
  6289. #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6290. }
  6291. }
  6292. /*!
  6293. * \brief Constructs a CommandQueue based on passed properties.
  6294. * Will return an CL_INVALID_QUEUE_PROPERTIES error if CL_QUEUE_ON_DEVICE is specified.
  6295. */
  6296. CommandQueue(
  6297. QueueProperties properties,
  6298. cl_int* err = NULL)
  6299. {
  6300. cl_int error;
  6301. Context context = Context::getDefault(&error);
  6302. detail::errHandler(error, __CREATE_CONTEXT_ERR);
  6303. if (error != CL_SUCCESS) {
  6304. if (err != NULL) {
  6305. *err = error;
  6306. }
  6307. }
  6308. else {
  6309. Device device = context.getInfo<CL_CONTEXT_DEVICES>()[0];
  6310. bool useWithProperties;
  6311. #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6312. // Run-time decision based on the actual platform
  6313. {
  6314. cl_uint version = detail::getContextPlatformVersion(context());
  6315. useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
  6316. }
  6317. #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
  6318. useWithProperties = true;
  6319. #else
  6320. useWithProperties = false;
  6321. #endif
  6322. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  6323. if (useWithProperties) {
  6324. cl_queue_properties queue_properties[] = {
  6325. CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
  6326. object_ = ::clCreateCommandQueueWithProperties(
  6327. context(), device(), queue_properties, &error);
  6328. detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
  6329. if (err != NULL) {
  6330. *err = error;
  6331. }
  6332. }
  6333. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
  6334. #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6335. if (!useWithProperties) {
  6336. object_ = ::clCreateCommandQueue(
  6337. context(), device(), static_cast<cl_command_queue_properties>(properties), &error);
  6338. detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
  6339. if (err != NULL) {
  6340. *err = error;
  6341. }
  6342. }
  6343. #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6344. }
  6345. }
  6346. /*!
  6347. * \brief Constructs a CommandQueue for an implementation defined device in the given context
  6348. * Will return an CL_INVALID_QUEUE_PROPERTIES error if CL_QUEUE_ON_DEVICE is specified.
  6349. */
  6350. explicit CommandQueue(
  6351. const Context& context,
  6352. cl_command_queue_properties properties = 0,
  6353. cl_int* err = NULL)
  6354. {
  6355. cl_int error;
  6356. bool useWithProperties;
  6357. vector<cl::Device> devices;
  6358. error = context.getInfo(CL_CONTEXT_DEVICES, &devices);
  6359. detail::errHandler(error, __CREATE_CONTEXT_ERR);
  6360. if (error != CL_SUCCESS)
  6361. {
  6362. if (err != NULL) {
  6363. *err = error;
  6364. }
  6365. return;
  6366. }
  6367. #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6368. // Run-time decision based on the actual platform
  6369. {
  6370. cl_uint version = detail::getContextPlatformVersion(context());
  6371. useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
  6372. }
  6373. #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
  6374. useWithProperties = true;
  6375. #else
  6376. useWithProperties = false;
  6377. #endif
  6378. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  6379. if (useWithProperties) {
  6380. cl_queue_properties queue_properties[] = {
  6381. CL_QUEUE_PROPERTIES, properties, 0 };
  6382. if ((properties & CL_QUEUE_ON_DEVICE) == 0) {
  6383. object_ = ::clCreateCommandQueueWithProperties(
  6384. context(), devices[0](), queue_properties, &error);
  6385. }
  6386. else {
  6387. error = CL_INVALID_QUEUE_PROPERTIES;
  6388. }
  6389. detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
  6390. if (err != NULL) {
  6391. *err = error;
  6392. }
  6393. }
  6394. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
  6395. #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6396. if (!useWithProperties) {
  6397. object_ = ::clCreateCommandQueue(
  6398. context(), devices[0](), properties, &error);
  6399. detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
  6400. if (err != NULL) {
  6401. *err = error;
  6402. }
  6403. }
  6404. #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6405. }
  6406. /*!
  6407. * \brief Constructs a CommandQueue for an implementation defined device in the given context
  6408. * Will return an CL_INVALID_QUEUE_PROPERTIES error if CL_QUEUE_ON_DEVICE is specified.
  6409. */
  6410. explicit CommandQueue(
  6411. const Context& context,
  6412. QueueProperties properties,
  6413. cl_int* err = NULL)
  6414. {
  6415. cl_int error;
  6416. bool useWithProperties;
  6417. vector<cl::Device> devices;
  6418. error = context.getInfo(CL_CONTEXT_DEVICES, &devices);
  6419. detail::errHandler(error, __CREATE_CONTEXT_ERR);
  6420. if (error != CL_SUCCESS)
  6421. {
  6422. if (err != NULL) {
  6423. *err = error;
  6424. }
  6425. return;
  6426. }
  6427. #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6428. // Run-time decision based on the actual platform
  6429. {
  6430. cl_uint version = detail::getContextPlatformVersion(context());
  6431. useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
  6432. }
  6433. #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
  6434. useWithProperties = true;
  6435. #else
  6436. useWithProperties = false;
  6437. #endif
  6438. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  6439. if (useWithProperties) {
  6440. cl_queue_properties queue_properties[] = {
  6441. CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
  6442. object_ = ::clCreateCommandQueueWithProperties(
  6443. context(), devices[0](), queue_properties, &error);
  6444. detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
  6445. if (err != NULL) {
  6446. *err = error;
  6447. }
  6448. }
  6449. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
  6450. #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6451. if (!useWithProperties) {
  6452. object_ = ::clCreateCommandQueue(
  6453. context(), devices[0](), static_cast<cl_command_queue_properties>(properties), &error);
  6454. detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
  6455. if (err != NULL) {
  6456. *err = error;
  6457. }
  6458. }
  6459. #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6460. }
  6461. /*!
  6462. * \brief Constructs a CommandQueue for a passed device and context
  6463. * Will return an CL_INVALID_QUEUE_PROPERTIES error if CL_QUEUE_ON_DEVICE is specified.
  6464. */
  6465. CommandQueue(
  6466. const Context& context,
  6467. const Device& device,
  6468. cl_command_queue_properties properties = 0,
  6469. cl_int* err = NULL)
  6470. {
  6471. cl_int error;
  6472. bool useWithProperties;
  6473. #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6474. // Run-time decision based on the actual platform
  6475. {
  6476. cl_uint version = detail::getContextPlatformVersion(context());
  6477. useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
  6478. }
  6479. #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
  6480. useWithProperties = true;
  6481. #else
  6482. useWithProperties = false;
  6483. #endif
  6484. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  6485. if (useWithProperties) {
  6486. cl_queue_properties queue_properties[] = {
  6487. CL_QUEUE_PROPERTIES, properties, 0 };
  6488. object_ = ::clCreateCommandQueueWithProperties(
  6489. context(), device(), queue_properties, &error);
  6490. detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
  6491. if (err != NULL) {
  6492. *err = error;
  6493. }
  6494. }
  6495. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
  6496. #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6497. if (!useWithProperties) {
  6498. object_ = ::clCreateCommandQueue(
  6499. context(), device(), properties, &error);
  6500. detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
  6501. if (err != NULL) {
  6502. *err = error;
  6503. }
  6504. }
  6505. #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6506. }
  6507. /*!
  6508. * \brief Constructs a CommandQueue for a passed device and context
  6509. * Will return an CL_INVALID_QUEUE_PROPERTIES error if CL_QUEUE_ON_DEVICE is specified.
  6510. */
  6511. CommandQueue(
  6512. const Context& context,
  6513. const Device& device,
  6514. QueueProperties properties,
  6515. cl_int* err = NULL)
  6516. {
  6517. cl_int error;
  6518. bool useWithProperties;
  6519. #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6520. // Run-time decision based on the actual platform
  6521. {
  6522. cl_uint version = detail::getContextPlatformVersion(context());
  6523. useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
  6524. }
  6525. #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
  6526. useWithProperties = true;
  6527. #else
  6528. useWithProperties = false;
  6529. #endif
  6530. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  6531. if (useWithProperties) {
  6532. cl_queue_properties queue_properties[] = {
  6533. CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
  6534. object_ = ::clCreateCommandQueueWithProperties(
  6535. context(), device(), queue_properties, &error);
  6536. detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
  6537. if (err != NULL) {
  6538. *err = error;
  6539. }
  6540. }
  6541. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
  6542. #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6543. if (!useWithProperties) {
  6544. object_ = ::clCreateCommandQueue(
  6545. context(), device(), static_cast<cl_command_queue_properties>(properties), &error);
  6546. detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
  6547. if (err != NULL) {
  6548. *err = error;
  6549. }
  6550. }
  6551. #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
  6552. }
  6553. static CommandQueue getDefault(cl_int * err = NULL)
  6554. {
  6555. std::call_once(default_initialized_, makeDefault);
  6556. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  6557. detail::errHandler(default_error_, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
  6558. #else // CL_HPP_TARGET_OPENCL_VERSION >= 200
  6559. detail::errHandler(default_error_, __CREATE_COMMAND_QUEUE_ERR);
  6560. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
  6561. if (err != NULL) {
  6562. *err = default_error_;
  6563. }
  6564. return default_;
  6565. }
  6566. /**
  6567. * Modify the default command queue to be used by
  6568. * subsequent operations.
  6569. * Will only set the default if no default was previously created.
  6570. * @return updated default command queue.
  6571. * Should be compared to the passed value to ensure that it was updated.
  6572. */
  6573. static CommandQueue setDefault(const CommandQueue &default_queue)
  6574. {
  6575. std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_queue));
  6576. detail::errHandler(default_error_);
  6577. return default_;
  6578. }
  6579. CommandQueue() { }
  6580. /*! \brief Constructor from cl_mem - takes ownership.
  6581. *
  6582. * \param retainObject will cause the constructor to retain its cl object.
  6583. * Defaults to false to maintain compatibility with
  6584. * earlier versions.
  6585. */
  6586. explicit CommandQueue(const cl_command_queue& commandQueue, bool retainObject = false) :
  6587. detail::Wrapper<cl_type>(commandQueue, retainObject) { }
  6588. CommandQueue& operator = (const cl_command_queue& rhs)
  6589. {
  6590. detail::Wrapper<cl_type>::operator=(rhs);
  6591. return *this;
  6592. }
  6593. /*! \brief Copy constructor to forward copy to the superclass correctly.
  6594. * Required for MSVC.
  6595. */
  6596. CommandQueue(const CommandQueue& queue) : detail::Wrapper<cl_type>(queue) {}
  6597. /*! \brief Copy assignment to forward copy to the superclass correctly.
  6598. * Required for MSVC.
  6599. */
  6600. CommandQueue& operator = (const CommandQueue &queue)
  6601. {
  6602. detail::Wrapper<cl_type>::operator=(queue);
  6603. return *this;
  6604. }
  6605. /*! \brief Move constructor to forward move to the superclass correctly.
  6606. * Required for MSVC.
  6607. */
  6608. CommandQueue(CommandQueue&& queue) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(queue)) {}
  6609. /*! \brief Move assignment to forward move to the superclass correctly.
  6610. * Required for MSVC.
  6611. */
  6612. CommandQueue& operator = (CommandQueue &&queue)
  6613. {
  6614. detail::Wrapper<cl_type>::operator=(std::move(queue));
  6615. return *this;
  6616. }
  6617. template <typename T>
  6618. cl_int getInfo(cl_command_queue_info name, T* param) const
  6619. {
  6620. return detail::errHandler(
  6621. detail::getInfo(
  6622. &::clGetCommandQueueInfo, object_, name, param),
  6623. __GET_COMMAND_QUEUE_INFO_ERR);
  6624. }
  6625. template <cl_int name> typename
  6626. detail::param_traits<detail::cl_command_queue_info, name>::param_type
  6627. getInfo(cl_int* err = NULL) const
  6628. {
  6629. typename detail::param_traits<
  6630. detail::cl_command_queue_info, name>::param_type param;
  6631. cl_int result = getInfo(name, &param);
  6632. if (err != NULL) {
  6633. *err = result;
  6634. }
  6635. return param;
  6636. }
  6637. cl_int enqueueReadBuffer(
  6638. const Buffer& buffer,
  6639. cl_bool blocking,
  6640. size_type offset,
  6641. size_type size,
  6642. void* ptr,
  6643. const vector<Event>* events = NULL,
  6644. Event* event = NULL) const
  6645. {
  6646. cl_event tmp;
  6647. cl_int err = detail::errHandler(
  6648. ::clEnqueueReadBuffer(
  6649. object_, buffer(), blocking, offset, size,
  6650. ptr,
  6651. (events != NULL) ? (cl_uint) events->size() : 0,
  6652. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  6653. (event != NULL) ? &tmp : NULL),
  6654. __ENQUEUE_READ_BUFFER_ERR);
  6655. if (event != NULL && err == CL_SUCCESS)
  6656. *event = tmp;
  6657. return err;
  6658. }
  6659. cl_int enqueueWriteBuffer(
  6660. const Buffer& buffer,
  6661. cl_bool blocking,
  6662. size_type offset,
  6663. size_type size,
  6664. const void* ptr,
  6665. const vector<Event>* events = NULL,
  6666. Event* event = NULL) const
  6667. {
  6668. cl_event tmp;
  6669. cl_int err = detail::errHandler(
  6670. ::clEnqueueWriteBuffer(
  6671. object_, buffer(), blocking, offset, size,
  6672. ptr,
  6673. (events != NULL) ? (cl_uint) events->size() : 0,
  6674. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  6675. (event != NULL) ? &tmp : NULL),
  6676. __ENQUEUE_WRITE_BUFFER_ERR);
  6677. if (event != NULL && err == CL_SUCCESS)
  6678. *event = tmp;
  6679. return err;
  6680. }
  6681. cl_int enqueueCopyBuffer(
  6682. const Buffer& src,
  6683. const Buffer& dst,
  6684. size_type src_offset,
  6685. size_type dst_offset,
  6686. size_type size,
  6687. const vector<Event>* events = NULL,
  6688. Event* event = NULL) const
  6689. {
  6690. cl_event tmp;
  6691. cl_int err = detail::errHandler(
  6692. ::clEnqueueCopyBuffer(
  6693. object_, src(), dst(), src_offset, dst_offset, size,
  6694. (events != NULL) ? (cl_uint) events->size() : 0,
  6695. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  6696. (event != NULL) ? &tmp : NULL),
  6697. __ENQEUE_COPY_BUFFER_ERR);
  6698. if (event != NULL && err == CL_SUCCESS)
  6699. *event = tmp;
  6700. return err;
  6701. }
  6702. #if CL_HPP_TARGET_OPENCL_VERSION >= 110
  6703. cl_int enqueueReadBufferRect(
  6704. const Buffer& buffer,
  6705. cl_bool blocking,
  6706. const array<size_type, 3>& buffer_offset,
  6707. const array<size_type, 3>& host_offset,
  6708. const array<size_type, 3>& region,
  6709. size_type buffer_row_pitch,
  6710. size_type buffer_slice_pitch,
  6711. size_type host_row_pitch,
  6712. size_type host_slice_pitch,
  6713. void *ptr,
  6714. const vector<Event>* events = NULL,
  6715. Event* event = NULL) const
  6716. {
  6717. cl_event tmp;
  6718. cl_int err = detail::errHandler(
  6719. ::clEnqueueReadBufferRect(
  6720. object_,
  6721. buffer(),
  6722. blocking,
  6723. buffer_offset.data(),
  6724. host_offset.data(),
  6725. region.data(),
  6726. buffer_row_pitch,
  6727. buffer_slice_pitch,
  6728. host_row_pitch,
  6729. host_slice_pitch,
  6730. ptr,
  6731. (events != NULL) ? (cl_uint) events->size() : 0,
  6732. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  6733. (event != NULL) ? &tmp : NULL),
  6734. __ENQUEUE_READ_BUFFER_RECT_ERR);
  6735. if (event != NULL && err == CL_SUCCESS)
  6736. *event = tmp;
  6737. return err;
  6738. }
  6739. cl_int enqueueWriteBufferRect(
  6740. const Buffer& buffer,
  6741. cl_bool blocking,
  6742. const array<size_type, 3>& buffer_offset,
  6743. const array<size_type, 3>& host_offset,
  6744. const array<size_type, 3>& region,
  6745. size_type buffer_row_pitch,
  6746. size_type buffer_slice_pitch,
  6747. size_type host_row_pitch,
  6748. size_type host_slice_pitch,
  6749. const void *ptr,
  6750. const vector<Event>* events = NULL,
  6751. Event* event = NULL) const
  6752. {
  6753. cl_event tmp;
  6754. cl_int err = detail::errHandler(
  6755. ::clEnqueueWriteBufferRect(
  6756. object_,
  6757. buffer(),
  6758. blocking,
  6759. buffer_offset.data(),
  6760. host_offset.data(),
  6761. region.data(),
  6762. buffer_row_pitch,
  6763. buffer_slice_pitch,
  6764. host_row_pitch,
  6765. host_slice_pitch,
  6766. ptr,
  6767. (events != NULL) ? (cl_uint) events->size() : 0,
  6768. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  6769. (event != NULL) ? &tmp : NULL),
  6770. __ENQUEUE_WRITE_BUFFER_RECT_ERR);
  6771. if (event != NULL && err == CL_SUCCESS)
  6772. *event = tmp;
  6773. return err;
  6774. }
  6775. cl_int enqueueCopyBufferRect(
  6776. const Buffer& src,
  6777. const Buffer& dst,
  6778. const array<size_type, 3>& src_origin,
  6779. const array<size_type, 3>& dst_origin,
  6780. const array<size_type, 3>& region,
  6781. size_type src_row_pitch,
  6782. size_type src_slice_pitch,
  6783. size_type dst_row_pitch,
  6784. size_type dst_slice_pitch,
  6785. const vector<Event>* events = NULL,
  6786. Event* event = NULL) const
  6787. {
  6788. cl_event tmp;
  6789. cl_int err = detail::errHandler(
  6790. ::clEnqueueCopyBufferRect(
  6791. object_,
  6792. src(),
  6793. dst(),
  6794. src_origin.data(),
  6795. dst_origin.data(),
  6796. region.data(),
  6797. src_row_pitch,
  6798. src_slice_pitch,
  6799. dst_row_pitch,
  6800. dst_slice_pitch,
  6801. (events != NULL) ? (cl_uint) events->size() : 0,
  6802. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  6803. (event != NULL) ? &tmp : NULL),
  6804. __ENQEUE_COPY_BUFFER_RECT_ERR);
  6805. if (event != NULL && err == CL_SUCCESS)
  6806. *event = tmp;
  6807. return err;
  6808. }
  6809. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
  6810. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  6811. /**
  6812. * Enqueue a command to fill a buffer object with a pattern
  6813. * of a given size. The pattern is specified as a vector type.
  6814. * \tparam PatternType The datatype of the pattern field.
  6815. * The pattern type must be an accepted OpenCL data type.
  6816. * \tparam offset Is the offset in bytes into the buffer at
  6817. * which to start filling. This must be a multiple of
  6818. * the pattern size.
  6819. * \tparam size Is the size in bytes of the region to fill.
  6820. * This must be a multiple of the pattern size.
  6821. */
  6822. template<typename PatternType>
  6823. cl_int enqueueFillBuffer(
  6824. const Buffer& buffer,
  6825. PatternType pattern,
  6826. size_type offset,
  6827. size_type size,
  6828. const vector<Event>* events = NULL,
  6829. Event* event = NULL) const
  6830. {
  6831. cl_event tmp;
  6832. cl_int err = detail::errHandler(
  6833. ::clEnqueueFillBuffer(
  6834. object_,
  6835. buffer(),
  6836. static_cast<void*>(&pattern),
  6837. sizeof(PatternType),
  6838. offset,
  6839. size,
  6840. (events != NULL) ? (cl_uint) events->size() : 0,
  6841. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  6842. (event != NULL) ? &tmp : NULL),
  6843. __ENQUEUE_FILL_BUFFER_ERR);
  6844. if (event != NULL && err == CL_SUCCESS)
  6845. *event = tmp;
  6846. return err;
  6847. }
  6848. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  6849. cl_int enqueueReadImage(
  6850. const Image& image,
  6851. cl_bool blocking,
  6852. const array<size_type, 3>& origin,
  6853. const array<size_type, 3>& region,
  6854. size_type row_pitch,
  6855. size_type slice_pitch,
  6856. void* ptr,
  6857. const vector<Event>* events = NULL,
  6858. Event* event = NULL) const
  6859. {
  6860. cl_event tmp;
  6861. cl_int err = detail::errHandler(
  6862. ::clEnqueueReadImage(
  6863. object_,
  6864. image(),
  6865. blocking,
  6866. origin.data(),
  6867. region.data(),
  6868. row_pitch,
  6869. slice_pitch,
  6870. ptr,
  6871. (events != NULL) ? (cl_uint) events->size() : 0,
  6872. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  6873. (event != NULL) ? &tmp : NULL),
  6874. __ENQUEUE_READ_IMAGE_ERR);
  6875. if (event != NULL && err == CL_SUCCESS)
  6876. *event = tmp;
  6877. return err;
  6878. }
  6879. cl_int enqueueWriteImage(
  6880. const Image& image,
  6881. cl_bool blocking,
  6882. const array<size_type, 3>& origin,
  6883. const array<size_type, 3>& region,
  6884. size_type row_pitch,
  6885. size_type slice_pitch,
  6886. const void* ptr,
  6887. const vector<Event>* events = NULL,
  6888. Event* event = NULL) const
  6889. {
  6890. cl_event tmp;
  6891. cl_int err = detail::errHandler(
  6892. ::clEnqueueWriteImage(
  6893. object_,
  6894. image(),
  6895. blocking,
  6896. origin.data(),
  6897. region.data(),
  6898. row_pitch,
  6899. slice_pitch,
  6900. ptr,
  6901. (events != NULL) ? (cl_uint) events->size() : 0,
  6902. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  6903. (event != NULL) ? &tmp : NULL),
  6904. __ENQUEUE_WRITE_IMAGE_ERR);
  6905. if (event != NULL && err == CL_SUCCESS)
  6906. *event = tmp;
  6907. return err;
  6908. }
  6909. cl_int enqueueCopyImage(
  6910. const Image& src,
  6911. const Image& dst,
  6912. const array<size_type, 3>& src_origin,
  6913. const array<size_type, 3>& dst_origin,
  6914. const array<size_type, 3>& region,
  6915. const vector<Event>* events = NULL,
  6916. Event* event = NULL) const
  6917. {
  6918. cl_event tmp;
  6919. cl_int err = detail::errHandler(
  6920. ::clEnqueueCopyImage(
  6921. object_,
  6922. src(),
  6923. dst(),
  6924. src_origin.data(),
  6925. dst_origin.data(),
  6926. region.data(),
  6927. (events != NULL) ? (cl_uint) events->size() : 0,
  6928. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  6929. (event != NULL) ? &tmp : NULL),
  6930. __ENQUEUE_COPY_IMAGE_ERR);
  6931. if (event != NULL && err == CL_SUCCESS)
  6932. *event = tmp;
  6933. return err;
  6934. }
  6935. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  6936. /**
  6937. * Enqueue a command to fill an image object with a specified color.
  6938. * \param fillColor is the color to use to fill the image.
  6939. * This is a four component RGBA floating-point color value if
  6940. * the image channel data type is not an unnormalized signed or
  6941. * unsigned data type.
  6942. */
  6943. cl_int enqueueFillImage(
  6944. const Image& image,
  6945. cl_float4 fillColor,
  6946. const array<size_type, 3>& origin,
  6947. const array<size_type, 3>& region,
  6948. const vector<Event>* events = NULL,
  6949. Event* event = NULL) const
  6950. {
  6951. cl_event tmp;
  6952. cl_int err = detail::errHandler(
  6953. ::clEnqueueFillImage(
  6954. object_,
  6955. image(),
  6956. static_cast<void*>(&fillColor),
  6957. origin.data(),
  6958. region.data(),
  6959. (events != NULL) ? (cl_uint) events->size() : 0,
  6960. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  6961. (event != NULL) ? &tmp : NULL),
  6962. __ENQUEUE_FILL_IMAGE_ERR);
  6963. if (event != NULL && err == CL_SUCCESS)
  6964. *event = tmp;
  6965. return err;
  6966. }
  6967. /**
  6968. * Enqueue a command to fill an image object with a specified color.
  6969. * \param fillColor is the color to use to fill the image.
  6970. * This is a four component RGBA signed integer color value if
  6971. * the image channel data type is an unnormalized signed integer
  6972. * type.
  6973. */
  6974. cl_int enqueueFillImage(
  6975. const Image& image,
  6976. cl_int4 fillColor,
  6977. const array<size_type, 3>& origin,
  6978. const array<size_type, 3>& region,
  6979. const vector<Event>* events = NULL,
  6980. Event* event = NULL) const
  6981. {
  6982. cl_event tmp;
  6983. cl_int err = detail::errHandler(
  6984. ::clEnqueueFillImage(
  6985. object_,
  6986. image(),
  6987. static_cast<void*>(&fillColor),
  6988. origin.data(),
  6989. region.data(),
  6990. (events != NULL) ? (cl_uint) events->size() : 0,
  6991. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  6992. (event != NULL) ? &tmp : NULL),
  6993. __ENQUEUE_FILL_IMAGE_ERR);
  6994. if (event != NULL && err == CL_SUCCESS)
  6995. *event = tmp;
  6996. return err;
  6997. }
  6998. /**
  6999. * Enqueue a command to fill an image object with a specified color.
  7000. * \param fillColor is the color to use to fill the image.
  7001. * This is a four component RGBA unsigned integer color value if
  7002. * the image channel data type is an unnormalized unsigned integer
  7003. * type.
  7004. */
  7005. cl_int enqueueFillImage(
  7006. const Image& image,
  7007. cl_uint4 fillColor,
  7008. const array<size_type, 3>& origin,
  7009. const array<size_type, 3>& region,
  7010. const vector<Event>* events = NULL,
  7011. Event* event = NULL) const
  7012. {
  7013. cl_event tmp;
  7014. cl_int err = detail::errHandler(
  7015. ::clEnqueueFillImage(
  7016. object_,
  7017. image(),
  7018. static_cast<void*>(&fillColor),
  7019. origin.data(),
  7020. region.data(),
  7021. (events != NULL) ? (cl_uint) events->size() : 0,
  7022. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  7023. (event != NULL) ? &tmp : NULL),
  7024. __ENQUEUE_FILL_IMAGE_ERR);
  7025. if (event != NULL && err == CL_SUCCESS)
  7026. *event = tmp;
  7027. return err;
  7028. }
  7029. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  7030. cl_int enqueueCopyImageToBuffer(
  7031. const Image& src,
  7032. const Buffer& dst,
  7033. const array<size_type, 3>& src_origin,
  7034. const array<size_type, 3>& region,
  7035. size_type dst_offset,
  7036. const vector<Event>* events = NULL,
  7037. Event* event = NULL) const
  7038. {
  7039. cl_event tmp;
  7040. cl_int err = detail::errHandler(
  7041. ::clEnqueueCopyImageToBuffer(
  7042. object_,
  7043. src(),
  7044. dst(),
  7045. src_origin.data(),
  7046. region.data(),
  7047. dst_offset,
  7048. (events != NULL) ? (cl_uint) events->size() : 0,
  7049. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  7050. (event != NULL) ? &tmp : NULL),
  7051. __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR);
  7052. if (event != NULL && err == CL_SUCCESS)
  7053. *event = tmp;
  7054. return err;
  7055. }
  7056. cl_int enqueueCopyBufferToImage(
  7057. const Buffer& src,
  7058. const Image& dst,
  7059. size_type src_offset,
  7060. const array<size_type, 3>& dst_origin,
  7061. const array<size_type, 3>& region,
  7062. const vector<Event>* events = NULL,
  7063. Event* event = NULL) const
  7064. {
  7065. cl_event tmp;
  7066. cl_int err = detail::errHandler(
  7067. ::clEnqueueCopyBufferToImage(
  7068. object_,
  7069. src(),
  7070. dst(),
  7071. src_offset,
  7072. dst_origin.data(),
  7073. region.data(),
  7074. (events != NULL) ? (cl_uint) events->size() : 0,
  7075. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  7076. (event != NULL) ? &tmp : NULL),
  7077. __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR);
  7078. if (event != NULL && err == CL_SUCCESS)
  7079. *event = tmp;
  7080. return err;
  7081. }
  7082. void* enqueueMapBuffer(
  7083. const Buffer& buffer,
  7084. cl_bool blocking,
  7085. cl_map_flags flags,
  7086. size_type offset,
  7087. size_type size,
  7088. const vector<Event>* events = NULL,
  7089. Event* event = NULL,
  7090. cl_int* err = NULL) const
  7091. {
  7092. cl_event tmp;
  7093. cl_int error;
  7094. void * result = ::clEnqueueMapBuffer(
  7095. object_, buffer(), blocking, flags, offset, size,
  7096. (events != NULL) ? (cl_uint) events->size() : 0,
  7097. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  7098. (event != NULL) ? &tmp : NULL,
  7099. &error);
  7100. detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
  7101. if (err != NULL) {
  7102. *err = error;
  7103. }
  7104. if (event != NULL && error == CL_SUCCESS)
  7105. *event = tmp;
  7106. return result;
  7107. }
  7108. void* enqueueMapImage(
  7109. const Image& buffer,
  7110. cl_bool blocking,
  7111. cl_map_flags flags,
  7112. const array<size_type, 3>& origin,
  7113. const array<size_type, 3>& region,
  7114. size_type * row_pitch,
  7115. size_type * slice_pitch,
  7116. const vector<Event>* events = NULL,
  7117. Event* event = NULL,
  7118. cl_int* err = NULL) const
  7119. {
  7120. cl_event tmp;
  7121. cl_int error;
  7122. void * result = ::clEnqueueMapImage(
  7123. object_, buffer(), blocking, flags,
  7124. origin.data(),
  7125. region.data(),
  7126. row_pitch, slice_pitch,
  7127. (events != NULL) ? (cl_uint) events->size() : 0,
  7128. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  7129. (event != NULL) ? &tmp : NULL,
  7130. &error);
  7131. detail::errHandler(error, __ENQUEUE_MAP_IMAGE_ERR);
  7132. if (err != NULL) {
  7133. *err = error;
  7134. }
  7135. if (event != NULL && error == CL_SUCCESS)
  7136. *event = tmp;
  7137. return result;
  7138. }
  7139. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  7140. /**
  7141. * Enqueues a command that will allow the host to update a region of a coarse-grained SVM buffer.
  7142. * This variant takes a raw SVM pointer.
  7143. */
  7144. template<typename T>
  7145. cl_int enqueueMapSVM(
  7146. T* ptr,
  7147. cl_bool blocking,
  7148. cl_map_flags flags,
  7149. size_type size,
  7150. const vector<Event>* events = NULL,
  7151. Event* event = NULL) const
  7152. {
  7153. cl_event tmp;
  7154. cl_int err = detail::errHandler(::clEnqueueSVMMap(
  7155. object_, blocking, flags, static_cast<void*>(ptr), size,
  7156. (events != NULL) ? (cl_uint)events->size() : 0,
  7157. (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
  7158. (event != NULL) ? &tmp : NULL),
  7159. __ENQUEUE_MAP_BUFFER_ERR);
  7160. if (event != NULL && err == CL_SUCCESS)
  7161. *event = tmp;
  7162. return err;
  7163. }
  7164. /**
  7165. * Enqueues a command that will allow the host to update a region of a coarse-grained SVM buffer.
  7166. * This variant takes a cl::pointer instance.
  7167. */
  7168. template<typename T, class D>
  7169. cl_int enqueueMapSVM(
  7170. cl::pointer<T, D> &ptr,
  7171. cl_bool blocking,
  7172. cl_map_flags flags,
  7173. size_type size,
  7174. const vector<Event>* events = NULL,
  7175. Event* event = NULL) const
  7176. {
  7177. cl_event tmp;
  7178. cl_int err = detail::errHandler(::clEnqueueSVMMap(
  7179. object_, blocking, flags, static_cast<void*>(ptr.get()), size,
  7180. (events != NULL) ? (cl_uint)events->size() : 0,
  7181. (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
  7182. (event != NULL) ? &tmp : NULL),
  7183. __ENQUEUE_MAP_BUFFER_ERR);
  7184. if (event != NULL && err == CL_SUCCESS)
  7185. *event = tmp;
  7186. return err;
  7187. }
  7188. /**
  7189. * Enqueues a command that will allow the host to update a region of a coarse-grained SVM buffer.
  7190. * This variant takes a cl::vector instance.
  7191. */
  7192. template<typename T, class Alloc>
  7193. cl_int enqueueMapSVM(
  7194. cl::vector<T, Alloc> &container,
  7195. cl_bool blocking,
  7196. cl_map_flags flags,
  7197. const vector<Event>* events = NULL,
  7198. Event* event = NULL) const
  7199. {
  7200. cl_event tmp;
  7201. cl_int err = detail::errHandler(::clEnqueueSVMMap(
  7202. object_, blocking, flags, static_cast<void*>(container.data()), container.size(),
  7203. (events != NULL) ? (cl_uint)events->size() : 0,
  7204. (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
  7205. (event != NULL) ? &tmp : NULL),
  7206. __ENQUEUE_MAP_BUFFER_ERR);
  7207. if (event != NULL && err == CL_SUCCESS)
  7208. *event = tmp;
  7209. return err;
  7210. }
  7211. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  7212. cl_int enqueueUnmapMemObject(
  7213. const Memory& memory,
  7214. void* mapped_ptr,
  7215. const vector<Event>* events = NULL,
  7216. Event* event = NULL) const
  7217. {
  7218. cl_event tmp;
  7219. cl_int err = detail::errHandler(
  7220. ::clEnqueueUnmapMemObject(
  7221. object_, memory(), mapped_ptr,
  7222. (events != NULL) ? (cl_uint) events->size() : 0,
  7223. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  7224. (event != NULL) ? &tmp : NULL),
  7225. __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
  7226. if (event != NULL && err == CL_SUCCESS)
  7227. *event = tmp;
  7228. return err;
  7229. }
  7230. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  7231. /**
  7232. * Enqueues a command that will release a coarse-grained SVM buffer back to the OpenCL runtime.
  7233. * This variant takes a raw SVM pointer.
  7234. */
  7235. template<typename T>
  7236. cl_int enqueueUnmapSVM(
  7237. T* ptr,
  7238. const vector<Event>* events = NULL,
  7239. Event* event = NULL) const
  7240. {
  7241. cl_event tmp;
  7242. cl_int err = detail::errHandler(
  7243. ::clEnqueueSVMUnmap(
  7244. object_, static_cast<void*>(ptr),
  7245. (events != NULL) ? (cl_uint)events->size() : 0,
  7246. (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
  7247. (event != NULL) ? &tmp : NULL),
  7248. __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
  7249. if (event != NULL && err == CL_SUCCESS)
  7250. *event = tmp;
  7251. return err;
  7252. }
  7253. /**
  7254. * Enqueues a command that will release a coarse-grained SVM buffer back to the OpenCL runtime.
  7255. * This variant takes a cl::pointer instance.
  7256. */
  7257. template<typename T, class D>
  7258. cl_int enqueueUnmapSVM(
  7259. cl::pointer<T, D> &ptr,
  7260. const vector<Event>* events = NULL,
  7261. Event* event = NULL) const
  7262. {
  7263. cl_event tmp;
  7264. cl_int err = detail::errHandler(
  7265. ::clEnqueueSVMUnmap(
  7266. object_, static_cast<void*>(ptr.get()),
  7267. (events != NULL) ? (cl_uint)events->size() : 0,
  7268. (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
  7269. (event != NULL) ? &tmp : NULL),
  7270. __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
  7271. if (event != NULL && err == CL_SUCCESS)
  7272. *event = tmp;
  7273. return err;
  7274. }
  7275. /**
  7276. * Enqueues a command that will release a coarse-grained SVM buffer back to the OpenCL runtime.
  7277. * This variant takes a cl::vector instance.
  7278. */
  7279. template<typename T, class Alloc>
  7280. cl_int enqueueUnmapSVM(
  7281. cl::vector<T, Alloc> &container,
  7282. const vector<Event>* events = NULL,
  7283. Event* event = NULL) const
  7284. {
  7285. cl_event tmp;
  7286. cl_int err = detail::errHandler(
  7287. ::clEnqueueSVMUnmap(
  7288. object_, static_cast<void*>(container.data()),
  7289. (events != NULL) ? (cl_uint)events->size() : 0,
  7290. (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
  7291. (event != NULL) ? &tmp : NULL),
  7292. __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
  7293. if (event != NULL && err == CL_SUCCESS)
  7294. *event = tmp;
  7295. return err;
  7296. }
  7297. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  7298. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  7299. /**
  7300. * Enqueues a marker command which waits for either a list of events to complete,
  7301. * or all previously enqueued commands to complete.
  7302. *
  7303. * Enqueues a marker command which waits for either a list of events to complete,
  7304. * or if the list is empty it waits for all commands previously enqueued in command_queue
  7305. * to complete before it completes. This command returns an event which can be waited on,
  7306. * i.e. this event can be waited on to insure that all events either in the event_wait_list
  7307. * or all previously enqueued commands, queued before this command to command_queue,
  7308. * have completed.
  7309. */
  7310. cl_int enqueueMarkerWithWaitList(
  7311. const vector<Event> *events = 0,
  7312. Event *event = 0) const
  7313. {
  7314. cl_event tmp;
  7315. cl_int err = detail::errHandler(
  7316. ::clEnqueueMarkerWithWaitList(
  7317. object_,
  7318. (events != NULL) ? (cl_uint) events->size() : 0,
  7319. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  7320. (event != NULL) ? &tmp : NULL),
  7321. __ENQUEUE_MARKER_WAIT_LIST_ERR);
  7322. if (event != NULL && err == CL_SUCCESS)
  7323. *event = tmp;
  7324. return err;
  7325. }
  7326. /**
  7327. * A synchronization point that enqueues a barrier operation.
  7328. *
  7329. * Enqueues a barrier command which waits for either a list of events to complete,
  7330. * or if the list is empty it waits for all commands previously enqueued in command_queue
  7331. * to complete before it completes. This command blocks command execution, that is, any
  7332. * following commands enqueued after it do not execute until it completes. This command
  7333. * returns an event which can be waited on, i.e. this event can be waited on to insure that
  7334. * all events either in the event_wait_list or all previously enqueued commands, queued
  7335. * before this command to command_queue, have completed.
  7336. */
  7337. cl_int enqueueBarrierWithWaitList(
  7338. const vector<Event> *events = 0,
  7339. Event *event = 0) const
  7340. {
  7341. cl_event tmp;
  7342. cl_int err = detail::errHandler(
  7343. ::clEnqueueBarrierWithWaitList(
  7344. object_,
  7345. (events != NULL) ? (cl_uint) events->size() : 0,
  7346. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  7347. (event != NULL) ? &tmp : NULL),
  7348. __ENQUEUE_BARRIER_WAIT_LIST_ERR);
  7349. if (event != NULL && err == CL_SUCCESS)
  7350. *event = tmp;
  7351. return err;
  7352. }
  7353. /**
  7354. * Enqueues a command to indicate with which device a set of memory objects
  7355. * should be associated.
  7356. */
  7357. cl_int enqueueMigrateMemObjects(
  7358. const vector<Memory> &memObjects,
  7359. cl_mem_migration_flags flags,
  7360. const vector<Event>* events = NULL,
  7361. Event* event = NULL
  7362. ) const
  7363. {
  7364. cl_event tmp;
  7365. vector<cl_mem> localMemObjects(memObjects.size());
  7366. for( int i = 0; i < (int)memObjects.size(); ++i ) {
  7367. localMemObjects[i] = memObjects[i]();
  7368. }
  7369. cl_int err = detail::errHandler(
  7370. ::clEnqueueMigrateMemObjects(
  7371. object_,
  7372. (cl_uint)memObjects.size(),
  7373. localMemObjects.data(),
  7374. flags,
  7375. (events != NULL) ? (cl_uint) events->size() : 0,
  7376. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  7377. (event != NULL) ? &tmp : NULL),
  7378. __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
  7379. if (event != NULL && err == CL_SUCCESS)
  7380. *event = tmp;
  7381. return err;
  7382. }
  7383. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  7384. #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  7385. /**
  7386. * Enqueues a command that will allow the host associate ranges within a set of
  7387. * SVM allocations with a device.
  7388. * @param sizes - The length from each pointer to migrate.
  7389. */
  7390. template<typename T>
  7391. cl_int enqueueMigrateSVM(
  7392. const cl::vector<T*> &svmRawPointers,
  7393. const cl::vector<size_type> &sizes,
  7394. cl_mem_migration_flags flags = 0,
  7395. const vector<Event>* events = NULL,
  7396. Event* event = NULL) const
  7397. {
  7398. cl_event tmp;
  7399. cl_int err = detail::errHandler(::clEnqueueSVMMigrateMem(
  7400. object_,
  7401. svmRawPointers.size(), static_cast<void**>(svmRawPointers.data()),
  7402. sizes.data(), // array of sizes not passed
  7403. flags,
  7404. (events != NULL) ? (cl_uint)events->size() : 0,
  7405. (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
  7406. (event != NULL) ? &tmp : NULL),
  7407. __ENQUEUE_MIGRATE_SVM_ERR);
  7408. if (event != NULL && err == CL_SUCCESS)
  7409. *event = tmp;
  7410. return err;
  7411. }
  7412. /**
  7413. * Enqueues a command that will allow the host associate a set of SVM allocations with
  7414. * a device.
  7415. */
  7416. template<typename T>
  7417. cl_int enqueueMigrateSVM(
  7418. const cl::vector<T*> &svmRawPointers,
  7419. cl_mem_migration_flags flags = 0,
  7420. const vector<Event>* events = NULL,
  7421. Event* event = NULL) const
  7422. {
  7423. return enqueueMigrateSVM(svmRawPointers, cl::vector<size_type>(svmRawPointers.size()), flags, events, event);
  7424. }
  7425. /**
  7426. * Enqueues a command that will allow the host associate ranges within a set of
  7427. * SVM allocations with a device.
  7428. * @param sizes - The length from each pointer to migrate.
  7429. */
  7430. template<typename T, class D>
  7431. cl_int enqueueMigrateSVM(
  7432. const cl::vector<cl::pointer<T, D>> &svmPointers,
  7433. const cl::vector<size_type> &sizes,
  7434. cl_mem_migration_flags flags = 0,
  7435. const vector<Event>* events = NULL,
  7436. Event* event = NULL) const
  7437. {
  7438. cl::vector<void*> svmRawPointers;
  7439. svmRawPointers.reserve(svmPointers.size());
  7440. for (auto p : svmPointers) {
  7441. svmRawPointers.push_back(static_cast<void*>(p.get()));
  7442. }
  7443. return enqueueMigrateSVM(svmRawPointers, sizes, flags, events, event);
  7444. }
  7445. /**
  7446. * Enqueues a command that will allow the host associate a set of SVM allocations with
  7447. * a device.
  7448. */
  7449. template<typename T, class D>
  7450. cl_int enqueueMigrateSVM(
  7451. const cl::vector<cl::pointer<T, D>> &svmPointers,
  7452. cl_mem_migration_flags flags = 0,
  7453. const vector<Event>* events = NULL,
  7454. Event* event = NULL) const
  7455. {
  7456. return enqueueMigrateSVM(svmPointers, cl::vector<size_type>(svmPointers.size()), flags, events, event);
  7457. }
  7458. /**
  7459. * Enqueues a command that will allow the host associate ranges within a set of
  7460. * SVM allocations with a device.
  7461. * @param sizes - The length from the beginning of each container to migrate.
  7462. */
  7463. template<typename T, class Alloc>
  7464. cl_int enqueueMigrateSVM(
  7465. const cl::vector<cl::vector<T, Alloc>> &svmContainers,
  7466. const cl::vector<size_type> &sizes,
  7467. cl_mem_migration_flags flags = 0,
  7468. const vector<Event>* events = NULL,
  7469. Event* event = NULL) const
  7470. {
  7471. cl::vector<void*> svmRawPointers;
  7472. svmRawPointers.reserve(svmContainers.size());
  7473. for (auto p : svmContainers) {
  7474. svmRawPointers.push_back(static_cast<void*>(p.data()));
  7475. }
  7476. return enqueueMigrateSVM(svmRawPointers, sizes, flags, events, event);
  7477. }
  7478. /**
  7479. * Enqueues a command that will allow the host associate a set of SVM allocations with
  7480. * a device.
  7481. */
  7482. template<typename T, class Alloc>
  7483. cl_int enqueueMigrateSVM(
  7484. const cl::vector<cl::vector<T, Alloc>> &svmContainers,
  7485. cl_mem_migration_flags flags = 0,
  7486. const vector<Event>* events = NULL,
  7487. Event* event = NULL) const
  7488. {
  7489. return enqueueMigrateSVM(svmContainers, cl::vector<size_type>(svmContainers.size()), flags, events, event);
  7490. }
  7491. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  7492. cl_int enqueueNDRangeKernel(
  7493. const Kernel& kernel,
  7494. const NDRange& offset,
  7495. const NDRange& global,
  7496. const NDRange& local = NullRange,
  7497. const vector<Event>* events = NULL,
  7498. Event* event = NULL) const
  7499. {
  7500. cl_event tmp;
  7501. cl_int err = detail::errHandler(
  7502. ::clEnqueueNDRangeKernel(
  7503. object_, kernel(), (cl_uint) global.dimensions(),
  7504. offset.dimensions() != 0 ? (const size_type*) offset : NULL,
  7505. (const size_type*) global,
  7506. local.dimensions() != 0 ? (const size_type*) local : NULL,
  7507. (events != NULL) ? (cl_uint) events->size() : 0,
  7508. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  7509. (event != NULL) ? &tmp : NULL),
  7510. __ENQUEUE_NDRANGE_KERNEL_ERR);
  7511. if (event != NULL && err == CL_SUCCESS)
  7512. *event = tmp;
  7513. return err;
  7514. }
  7515. #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
  7516. CL_EXT_PREFIX__VERSION_1_2_DEPRECATED cl_int enqueueTask(
  7517. const Kernel& kernel,
  7518. const vector<Event>* events = NULL,
  7519. Event* event = NULL) const CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED
  7520. {
  7521. cl_event tmp;
  7522. cl_int err = detail::errHandler(
  7523. ::clEnqueueTask(
  7524. object_, kernel(),
  7525. (events != NULL) ? (cl_uint) events->size() : 0,
  7526. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  7527. (event != NULL) ? &tmp : NULL),
  7528. __ENQUEUE_TASK_ERR);
  7529. if (event != NULL && err == CL_SUCCESS)
  7530. *event = tmp;
  7531. return err;
  7532. }
  7533. #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
  7534. cl_int enqueueNativeKernel(
  7535. void (CL_CALLBACK *userFptr)(void *),
  7536. std::pair<void*, size_type> args,
  7537. const vector<Memory>* mem_objects = NULL,
  7538. const vector<const void*>* mem_locs = NULL,
  7539. const vector<Event>* events = NULL,
  7540. Event* event = NULL) const
  7541. {
  7542. size_type elements = 0;
  7543. if (mem_objects != NULL) {
  7544. elements = mem_objects->size();
  7545. }
  7546. vector<cl_mem> mems(elements);
  7547. for (unsigned int i = 0; i < elements; i++) {
  7548. mems[i] = ((*mem_objects)[i])();
  7549. }
  7550. cl_event tmp;
  7551. cl_int err = detail::errHandler(
  7552. ::clEnqueueNativeKernel(
  7553. object_, userFptr, args.first, args.second,
  7554. (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
  7555. mems.data(),
  7556. (mem_locs != NULL && mem_locs->size() > 0) ? (const void **) &mem_locs->front() : NULL,
  7557. (events != NULL) ? (cl_uint) events->size() : 0,
  7558. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  7559. (event != NULL) ? &tmp : NULL),
  7560. __ENQUEUE_NATIVE_KERNEL);
  7561. if (event != NULL && err == CL_SUCCESS)
  7562. *event = tmp;
  7563. return err;
  7564. }
  7565. /**
  7566. * Deprecated APIs for 1.2
  7567. */
  7568. #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
  7569. CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
  7570. cl_int enqueueMarker(Event* event = NULL) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
  7571. {
  7572. cl_event tmp;
  7573. cl_int err = detail::errHandler(
  7574. ::clEnqueueMarker(
  7575. object_,
  7576. (event != NULL) ? &tmp : NULL),
  7577. __ENQUEUE_MARKER_ERR);
  7578. if (event != NULL && err == CL_SUCCESS)
  7579. *event = tmp;
  7580. return err;
  7581. }
  7582. CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
  7583. cl_int enqueueWaitForEvents(const vector<Event>& events) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
  7584. {
  7585. return detail::errHandler(
  7586. ::clEnqueueWaitForEvents(
  7587. object_,
  7588. (cl_uint) events.size(),
  7589. events.size() > 0 ? (const cl_event*) &events.front() : NULL),
  7590. __ENQUEUE_WAIT_FOR_EVENTS_ERR);
  7591. }
  7592. #endif // defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
  7593. cl_int enqueueAcquireGLObjects(
  7594. const vector<Memory>* mem_objects = NULL,
  7595. const vector<Event>* events = NULL,
  7596. Event* event = NULL) const
  7597. {
  7598. cl_event tmp;
  7599. cl_int err = detail::errHandler(
  7600. ::clEnqueueAcquireGLObjects(
  7601. object_,
  7602. (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
  7603. (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
  7604. (events != NULL) ? (cl_uint) events->size() : 0,
  7605. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  7606. (event != NULL) ? &tmp : NULL),
  7607. __ENQUEUE_ACQUIRE_GL_ERR);
  7608. if (event != NULL && err == CL_SUCCESS)
  7609. *event = tmp;
  7610. return err;
  7611. }
  7612. cl_int enqueueReleaseGLObjects(
  7613. const vector<Memory>* mem_objects = NULL,
  7614. const vector<Event>* events = NULL,
  7615. Event* event = NULL) const
  7616. {
  7617. cl_event tmp;
  7618. cl_int err = detail::errHandler(
  7619. ::clEnqueueReleaseGLObjects(
  7620. object_,
  7621. (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
  7622. (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
  7623. (events != NULL) ? (cl_uint) events->size() : 0,
  7624. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  7625. (event != NULL) ? &tmp : NULL),
  7626. __ENQUEUE_RELEASE_GL_ERR);
  7627. if (event != NULL && err == CL_SUCCESS)
  7628. *event = tmp;
  7629. return err;
  7630. }
  7631. #if defined (CL_HPP_USE_DX_INTEROP)
  7632. typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clEnqueueAcquireD3D10ObjectsKHR)(
  7633. cl_command_queue command_queue, cl_uint num_objects,
  7634. const cl_mem* mem_objects, cl_uint num_events_in_wait_list,
  7635. const cl_event* event_wait_list, cl_event* event);
  7636. typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clEnqueueReleaseD3D10ObjectsKHR)(
  7637. cl_command_queue command_queue, cl_uint num_objects,
  7638. const cl_mem* mem_objects, cl_uint num_events_in_wait_list,
  7639. const cl_event* event_wait_list, cl_event* event);
  7640. cl_int enqueueAcquireD3D10Objects(
  7641. const vector<Memory>* mem_objects = NULL,
  7642. const vector<Event>* events = NULL,
  7643. Event* event = NULL) const
  7644. {
  7645. static PFN_clEnqueueAcquireD3D10ObjectsKHR pfn_clEnqueueAcquireD3D10ObjectsKHR = NULL;
  7646. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  7647. cl_context context = getInfo<CL_QUEUE_CONTEXT>();
  7648. cl::Device device(getInfo<CL_QUEUE_DEVICE>());
  7649. cl_platform_id platform = device.getInfo<CL_DEVICE_PLATFORM>();
  7650. CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clEnqueueAcquireD3D10ObjectsKHR);
  7651. #endif
  7652. #if CL_HPP_TARGET_OPENCL_VERSION >= 110
  7653. CL_HPP_INIT_CL_EXT_FCN_PTR_(clEnqueueAcquireD3D10ObjectsKHR);
  7654. #endif
  7655. cl_event tmp;
  7656. cl_int err = detail::errHandler(
  7657. pfn_clEnqueueAcquireD3D10ObjectsKHR(
  7658. object_,
  7659. (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
  7660. (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
  7661. (events != NULL) ? (cl_uint) events->size() : 0,
  7662. (events != NULL) ? (cl_event*) &events->front() : NULL,
  7663. (event != NULL) ? &tmp : NULL),
  7664. __ENQUEUE_ACQUIRE_GL_ERR);
  7665. if (event != NULL && err == CL_SUCCESS)
  7666. *event = tmp;
  7667. return err;
  7668. }
  7669. cl_int enqueueReleaseD3D10Objects(
  7670. const vector<Memory>* mem_objects = NULL,
  7671. const vector<Event>* events = NULL,
  7672. Event* event = NULL) const
  7673. {
  7674. static PFN_clEnqueueReleaseD3D10ObjectsKHR pfn_clEnqueueReleaseD3D10ObjectsKHR = NULL;
  7675. #if CL_HPP_TARGET_OPENCL_VERSION >= 120
  7676. cl_context context = getInfo<CL_QUEUE_CONTEXT>();
  7677. cl::Device device(getInfo<CL_QUEUE_DEVICE>());
  7678. cl_platform_id platform = device.getInfo<CL_DEVICE_PLATFORM>();
  7679. CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clEnqueueReleaseD3D10ObjectsKHR);
  7680. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
  7681. #if CL_HPP_TARGET_OPENCL_VERSION >= 110
  7682. CL_HPP_INIT_CL_EXT_FCN_PTR_(clEnqueueReleaseD3D10ObjectsKHR);
  7683. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
  7684. cl_event tmp;
  7685. cl_int err = detail::errHandler(
  7686. pfn_clEnqueueReleaseD3D10ObjectsKHR(
  7687. object_,
  7688. (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
  7689. (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
  7690. (events != NULL) ? (cl_uint) events->size() : 0,
  7691. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  7692. (event != NULL) ? &tmp : NULL),
  7693. __ENQUEUE_RELEASE_GL_ERR);
  7694. if (event != NULL && err == CL_SUCCESS)
  7695. *event = tmp;
  7696. return err;
  7697. }
  7698. #endif
  7699. /**
  7700. * Deprecated APIs for 1.2
  7701. */
  7702. #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
  7703. CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
  7704. cl_int enqueueBarrier() const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
  7705. {
  7706. return detail::errHandler(
  7707. ::clEnqueueBarrier(object_),
  7708. __ENQUEUE_BARRIER_ERR);
  7709. }
  7710. #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
  7711. cl_int flush() const
  7712. {
  7713. return detail::errHandler(::clFlush(object_), __FLUSH_ERR);
  7714. }
  7715. cl_int finish() const
  7716. {
  7717. return detail::errHandler(::clFinish(object_), __FINISH_ERR);
  7718. }
  7719. }; // CommandQueue
  7720. CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag CommandQueue::default_initialized_;
  7721. CL_HPP_DEFINE_STATIC_MEMBER_ CommandQueue CommandQueue::default_;
  7722. CL_HPP_DEFINE_STATIC_MEMBER_ cl_int CommandQueue::default_error_ = CL_SUCCESS;
  7723. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  7724. enum class DeviceQueueProperties : cl_command_queue_properties
  7725. {
  7726. None = 0,
  7727. Profiling = CL_QUEUE_PROFILING_ENABLE,
  7728. };
  7729. inline DeviceQueueProperties operator|(DeviceQueueProperties lhs, DeviceQueueProperties rhs)
  7730. {
  7731. return static_cast<DeviceQueueProperties>(static_cast<cl_command_queue_properties>(lhs) | static_cast<cl_command_queue_properties>(rhs));
  7732. }
  7733. /*! \class DeviceCommandQueue
  7734. * \brief DeviceCommandQueue interface for device cl_command_queues.
  7735. */
  7736. class DeviceCommandQueue : public detail::Wrapper<cl_command_queue>
  7737. {
  7738. public:
  7739. /*!
  7740. * Trivial empty constructor to create a null queue.
  7741. */
  7742. DeviceCommandQueue() { }
  7743. /*!
  7744. * Default construct device command queue on default context and device
  7745. */
  7746. DeviceCommandQueue(DeviceQueueProperties properties, cl_int* err = NULL)
  7747. {
  7748. cl_int error;
  7749. cl::Context context = cl::Context::getDefault();
  7750. cl::Device device = cl::Device::getDefault();
  7751. cl_command_queue_properties mergedProperties =
  7752. CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
  7753. cl_queue_properties queue_properties[] = {
  7754. CL_QUEUE_PROPERTIES, mergedProperties, 0 };
  7755. object_ = ::clCreateCommandQueueWithProperties(
  7756. context(), device(), queue_properties, &error);
  7757. detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
  7758. if (err != NULL) {
  7759. *err = error;
  7760. }
  7761. }
  7762. /*!
  7763. * Create a device command queue for a specified device in the passed context.
  7764. */
  7765. DeviceCommandQueue(
  7766. const Context& context,
  7767. const Device& device,
  7768. DeviceQueueProperties properties = DeviceQueueProperties::None,
  7769. cl_int* err = NULL)
  7770. {
  7771. cl_int error;
  7772. cl_command_queue_properties mergedProperties =
  7773. CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
  7774. cl_queue_properties queue_properties[] = {
  7775. CL_QUEUE_PROPERTIES, mergedProperties, 0 };
  7776. object_ = ::clCreateCommandQueueWithProperties(
  7777. context(), device(), queue_properties, &error);
  7778. detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
  7779. if (err != NULL) {
  7780. *err = error;
  7781. }
  7782. }
  7783. /*!
  7784. * Create a device command queue for a specified device in the passed context.
  7785. */
  7786. DeviceCommandQueue(
  7787. const Context& context,
  7788. const Device& device,
  7789. cl_uint queueSize,
  7790. DeviceQueueProperties properties = DeviceQueueProperties::None,
  7791. cl_int* err = NULL)
  7792. {
  7793. cl_int error;
  7794. cl_command_queue_properties mergedProperties =
  7795. CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
  7796. cl_queue_properties queue_properties[] = {
  7797. CL_QUEUE_PROPERTIES, mergedProperties,
  7798. CL_QUEUE_SIZE, queueSize,
  7799. 0 };
  7800. object_ = ::clCreateCommandQueueWithProperties(
  7801. context(), device(), queue_properties, &error);
  7802. detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
  7803. if (err != NULL) {
  7804. *err = error;
  7805. }
  7806. }
  7807. /*! \brief Constructor from cl_command_queue - takes ownership.
  7808. *
  7809. * \param retainObject will cause the constructor to retain its cl object.
  7810. * Defaults to false to maintain compatibility with
  7811. * earlier versions.
  7812. */
  7813. explicit DeviceCommandQueue(const cl_command_queue& commandQueue, bool retainObject = false) :
  7814. detail::Wrapper<cl_type>(commandQueue, retainObject) { }
  7815. DeviceCommandQueue& operator = (const cl_command_queue& rhs)
  7816. {
  7817. detail::Wrapper<cl_type>::operator=(rhs);
  7818. return *this;
  7819. }
  7820. /*! \brief Copy constructor to forward copy to the superclass correctly.
  7821. * Required for MSVC.
  7822. */
  7823. DeviceCommandQueue(const DeviceCommandQueue& queue) : detail::Wrapper<cl_type>(queue) {}
  7824. /*! \brief Copy assignment to forward copy to the superclass correctly.
  7825. * Required for MSVC.
  7826. */
  7827. DeviceCommandQueue& operator = (const DeviceCommandQueue &queue)
  7828. {
  7829. detail::Wrapper<cl_type>::operator=(queue);
  7830. return *this;
  7831. }
  7832. /*! \brief Move constructor to forward move to the superclass correctly.
  7833. * Required for MSVC.
  7834. */
  7835. DeviceCommandQueue(DeviceCommandQueue&& queue) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(queue)) {}
  7836. /*! \brief Move assignment to forward move to the superclass correctly.
  7837. * Required for MSVC.
  7838. */
  7839. DeviceCommandQueue& operator = (DeviceCommandQueue &&queue)
  7840. {
  7841. detail::Wrapper<cl_type>::operator=(std::move(queue));
  7842. return *this;
  7843. }
  7844. template <typename T>
  7845. cl_int getInfo(cl_command_queue_info name, T* param) const
  7846. {
  7847. return detail::errHandler(
  7848. detail::getInfo(
  7849. &::clGetCommandQueueInfo, object_, name, param),
  7850. __GET_COMMAND_QUEUE_INFO_ERR);
  7851. }
  7852. template <cl_int name> typename
  7853. detail::param_traits<detail::cl_command_queue_info, name>::param_type
  7854. getInfo(cl_int* err = NULL) const
  7855. {
  7856. typename detail::param_traits<
  7857. detail::cl_command_queue_info, name>::param_type param;
  7858. cl_int result = getInfo(name, &param);
  7859. if (err != NULL) {
  7860. *err = result;
  7861. }
  7862. return param;
  7863. }
  7864. /*!
  7865. * Create a new default device command queue for the default device,
  7866. * in the default context and of the default size.
  7867. * If there is already a default queue for the specified device this
  7868. * function will return the pre-existing queue.
  7869. */
  7870. static DeviceCommandQueue makeDefault(
  7871. cl_int *err = nullptr)
  7872. {
  7873. cl_int error;
  7874. cl::Context context = cl::Context::getDefault();
  7875. cl::Device device = cl::Device::getDefault();
  7876. cl_command_queue_properties properties =
  7877. CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
  7878. cl_queue_properties queue_properties[] = {
  7879. CL_QUEUE_PROPERTIES, properties,
  7880. 0 };
  7881. DeviceCommandQueue deviceQueue(
  7882. ::clCreateCommandQueueWithProperties(
  7883. context(), device(), queue_properties, &error));
  7884. detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
  7885. if (err != NULL) {
  7886. *err = error;
  7887. }
  7888. return deviceQueue;
  7889. }
  7890. /*!
  7891. * Create a new default device command queue for the specified device
  7892. * and of the default size.
  7893. * If there is already a default queue for the specified device this
  7894. * function will return the pre-existing queue.
  7895. */
  7896. static DeviceCommandQueue makeDefault(
  7897. const Context &context, const Device &device, cl_int *err = nullptr)
  7898. {
  7899. cl_int error;
  7900. cl_command_queue_properties properties =
  7901. CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
  7902. cl_queue_properties queue_properties[] = {
  7903. CL_QUEUE_PROPERTIES, properties,
  7904. 0 };
  7905. DeviceCommandQueue deviceQueue(
  7906. ::clCreateCommandQueueWithProperties(
  7907. context(), device(), queue_properties, &error));
  7908. detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
  7909. if (err != NULL) {
  7910. *err = error;
  7911. }
  7912. return deviceQueue;
  7913. }
  7914. /*!
  7915. * Create a new default device command queue for the specified device
  7916. * and of the requested size in bytes.
  7917. * If there is already a default queue for the specified device this
  7918. * function will return the pre-existing queue.
  7919. */
  7920. static DeviceCommandQueue makeDefault(
  7921. const Context &context, const Device &device, cl_uint queueSize, cl_int *err = nullptr)
  7922. {
  7923. cl_int error;
  7924. cl_command_queue_properties properties =
  7925. CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
  7926. cl_queue_properties queue_properties[] = {
  7927. CL_QUEUE_PROPERTIES, properties,
  7928. CL_QUEUE_SIZE, queueSize,
  7929. 0 };
  7930. DeviceCommandQueue deviceQueue(
  7931. ::clCreateCommandQueueWithProperties(
  7932. context(), device(), queue_properties, &error));
  7933. detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
  7934. if (err != NULL) {
  7935. *err = error;
  7936. }
  7937. return deviceQueue;
  7938. }
  7939. #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  7940. /*!
  7941. * Modify the default device command queue to be used for subsequent kernels.
  7942. * This can update the default command queue for a device repeatedly to account
  7943. * for kernels that rely on the default.
  7944. * @return updated default device command queue.
  7945. */
  7946. static DeviceCommandQueue updateDefault(const Context &context, const Device &device, const DeviceCommandQueue &default_queue, cl_int *err = nullptr)
  7947. {
  7948. cl_int error;
  7949. error = clSetDefaultDeviceCommandQueue(context.get(), device.get(), default_queue.get());
  7950. detail::errHandler(error, __SET_DEFAULT_DEVICE_COMMAND_QUEUE_ERR);
  7951. if (err != NULL) {
  7952. *err = error;
  7953. }
  7954. return default_queue;
  7955. }
  7956. /*!
  7957. * Return the current default command queue for the specified command queue
  7958. */
  7959. static DeviceCommandQueue getDefault(const CommandQueue &queue, cl_int * err = NULL)
  7960. {
  7961. return queue.getInfo<CL_QUEUE_DEVICE_DEFAULT>(err);
  7962. }
  7963. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
  7964. }; // DeviceCommandQueue
  7965. namespace detail
  7966. {
  7967. // Specialization for device command queue
  7968. template <>
  7969. struct KernelArgumentHandler<cl::DeviceCommandQueue, void>
  7970. {
  7971. static size_type size(const cl::DeviceCommandQueue&) { return sizeof(cl_command_queue); }
  7972. static const cl_command_queue* ptr(const cl::DeviceCommandQueue& value) { return &(value()); }
  7973. };
  7974. } // namespace detail
  7975. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  7976. template< typename IteratorType >
  7977. Buffer::Buffer(
  7978. const Context &context,
  7979. IteratorType startIterator,
  7980. IteratorType endIterator,
  7981. bool readOnly,
  7982. bool useHostPtr,
  7983. cl_int* err)
  7984. {
  7985. typedef typename std::iterator_traits<IteratorType>::value_type DataType;
  7986. cl_int error;
  7987. cl_mem_flags flags = 0;
  7988. if( readOnly ) {
  7989. flags |= CL_MEM_READ_ONLY;
  7990. }
  7991. else {
  7992. flags |= CL_MEM_READ_WRITE;
  7993. }
  7994. if( useHostPtr ) {
  7995. flags |= CL_MEM_USE_HOST_PTR;
  7996. }
  7997. size_type size = sizeof(DataType)*(endIterator - startIterator);
  7998. if( useHostPtr ) {
  7999. object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
  8000. } else {
  8001. object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
  8002. }
  8003. detail::errHandler(error, __CREATE_BUFFER_ERR);
  8004. if (err != NULL) {
  8005. *err = error;
  8006. }
  8007. if( !useHostPtr ) {
  8008. CommandQueue queue(context, 0, &error);
  8009. detail::errHandler(error, __CREATE_BUFFER_ERR);
  8010. if (err != NULL) {
  8011. *err = error;
  8012. }
  8013. error = cl::copy(queue, startIterator, endIterator, *this);
  8014. detail::errHandler(error, __CREATE_BUFFER_ERR);
  8015. if (err != NULL) {
  8016. *err = error;
  8017. }
  8018. }
  8019. }
  8020. template< typename IteratorType >
  8021. Buffer::Buffer(
  8022. const CommandQueue &queue,
  8023. IteratorType startIterator,
  8024. IteratorType endIterator,
  8025. bool readOnly,
  8026. bool useHostPtr,
  8027. cl_int* err)
  8028. {
  8029. typedef typename std::iterator_traits<IteratorType>::value_type DataType;
  8030. cl_int error;
  8031. cl_mem_flags flags = 0;
  8032. if (readOnly) {
  8033. flags |= CL_MEM_READ_ONLY;
  8034. }
  8035. else {
  8036. flags |= CL_MEM_READ_WRITE;
  8037. }
  8038. if (useHostPtr) {
  8039. flags |= CL_MEM_USE_HOST_PTR;
  8040. }
  8041. size_type size = sizeof(DataType)*(endIterator - startIterator);
  8042. Context context = queue.getInfo<CL_QUEUE_CONTEXT>();
  8043. if (useHostPtr) {
  8044. object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
  8045. }
  8046. else {
  8047. object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
  8048. }
  8049. detail::errHandler(error, __CREATE_BUFFER_ERR);
  8050. if (err != NULL) {
  8051. *err = error;
  8052. }
  8053. if (!useHostPtr) {
  8054. error = cl::copy(queue, startIterator, endIterator, *this);
  8055. detail::errHandler(error, __CREATE_BUFFER_ERR);
  8056. if (err != NULL) {
  8057. *err = error;
  8058. }
  8059. }
  8060. }
  8061. inline cl_int enqueueReadBuffer(
  8062. const Buffer& buffer,
  8063. cl_bool blocking,
  8064. size_type offset,
  8065. size_type size,
  8066. void* ptr,
  8067. const vector<Event>* events = NULL,
  8068. Event* event = NULL)
  8069. {
  8070. cl_int error;
  8071. CommandQueue queue = CommandQueue::getDefault(&error);
  8072. if (error != CL_SUCCESS) {
  8073. return error;
  8074. }
  8075. return queue.enqueueReadBuffer(buffer, blocking, offset, size, ptr, events, event);
  8076. }
  8077. inline cl_int enqueueWriteBuffer(
  8078. const Buffer& buffer,
  8079. cl_bool blocking,
  8080. size_type offset,
  8081. size_type size,
  8082. const void* ptr,
  8083. const vector<Event>* events = NULL,
  8084. Event* event = NULL)
  8085. {
  8086. cl_int error;
  8087. CommandQueue queue = CommandQueue::getDefault(&error);
  8088. if (error != CL_SUCCESS) {
  8089. return error;
  8090. }
  8091. return queue.enqueueWriteBuffer(buffer, blocking, offset, size, ptr, events, event);
  8092. }
  8093. inline void* enqueueMapBuffer(
  8094. const Buffer& buffer,
  8095. cl_bool blocking,
  8096. cl_map_flags flags,
  8097. size_type offset,
  8098. size_type size,
  8099. const vector<Event>* events = NULL,
  8100. Event* event = NULL,
  8101. cl_int* err = NULL)
  8102. {
  8103. cl_int error;
  8104. CommandQueue queue = CommandQueue::getDefault(&error);
  8105. detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
  8106. if (err != NULL) {
  8107. *err = error;
  8108. }
  8109. void * result = ::clEnqueueMapBuffer(
  8110. queue(), buffer(), blocking, flags, offset, size,
  8111. (events != NULL) ? (cl_uint) events->size() : 0,
  8112. (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
  8113. (cl_event*) event,
  8114. &error);
  8115. detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
  8116. if (err != NULL) {
  8117. *err = error;
  8118. }
  8119. return result;
  8120. }
  8121. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  8122. /**
  8123. * Enqueues to the default queue a command that will allow the host to
  8124. * update a region of a coarse-grained SVM buffer.
  8125. * This variant takes a raw SVM pointer.
  8126. */
  8127. template<typename T>
  8128. inline cl_int enqueueMapSVM(
  8129. T* ptr,
  8130. cl_bool blocking,
  8131. cl_map_flags flags,
  8132. size_type size,
  8133. const vector<Event>* events,
  8134. Event* event)
  8135. {
  8136. cl_int error;
  8137. CommandQueue queue = CommandQueue::getDefault(&error);
  8138. if (error != CL_SUCCESS) {
  8139. return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
  8140. }
  8141. return queue.enqueueMapSVM(
  8142. ptr, blocking, flags, size, events, event);
  8143. }
  8144. /**
  8145. * Enqueues to the default queue a command that will allow the host to
  8146. * update a region of a coarse-grained SVM buffer.
  8147. * This variant takes a cl::pointer instance.
  8148. */
  8149. template<typename T, class D>
  8150. inline cl_int enqueueMapSVM(
  8151. cl::pointer<T, D> ptr,
  8152. cl_bool blocking,
  8153. cl_map_flags flags,
  8154. size_type size,
  8155. const vector<Event>* events = NULL,
  8156. Event* event = NULL)
  8157. {
  8158. cl_int error;
  8159. CommandQueue queue = CommandQueue::getDefault(&error);
  8160. if (error != CL_SUCCESS) {
  8161. return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
  8162. }
  8163. return queue.enqueueMapSVM(
  8164. ptr, blocking, flags, size, events, event);
  8165. }
  8166. /**
  8167. * Enqueues to the default queue a command that will allow the host to
  8168. * update a region of a coarse-grained SVM buffer.
  8169. * This variant takes a cl::vector instance.
  8170. */
  8171. template<typename T, class Alloc>
  8172. inline cl_int enqueueMapSVM(
  8173. cl::vector<T, Alloc> container,
  8174. cl_bool blocking,
  8175. cl_map_flags flags,
  8176. const vector<Event>* events = NULL,
  8177. Event* event = NULL)
  8178. {
  8179. cl_int error;
  8180. CommandQueue queue = CommandQueue::getDefault(&error);
  8181. if (error != CL_SUCCESS) {
  8182. return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
  8183. }
  8184. return queue.enqueueMapSVM(
  8185. container, blocking, flags, events, event);
  8186. }
  8187. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  8188. inline cl_int enqueueUnmapMemObject(
  8189. const Memory& memory,
  8190. void* mapped_ptr,
  8191. const vector<Event>* events = NULL,
  8192. Event* event = NULL)
  8193. {
  8194. cl_int error;
  8195. CommandQueue queue = CommandQueue::getDefault(&error);
  8196. detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
  8197. if (error != CL_SUCCESS) {
  8198. return error;
  8199. }
  8200. cl_event tmp;
  8201. cl_int err = detail::errHandler(
  8202. ::clEnqueueUnmapMemObject(
  8203. queue(), memory(), mapped_ptr,
  8204. (events != NULL) ? (cl_uint)events->size() : 0,
  8205. (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
  8206. (event != NULL) ? &tmp : NULL),
  8207. __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
  8208. if (event != NULL && err == CL_SUCCESS)
  8209. *event = tmp;
  8210. return err;
  8211. }
  8212. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  8213. /**
  8214. * Enqueues to the default queue a command that will release a coarse-grained
  8215. * SVM buffer back to the OpenCL runtime.
  8216. * This variant takes a raw SVM pointer.
  8217. */
  8218. template<typename T>
  8219. inline cl_int enqueueUnmapSVM(
  8220. T* ptr,
  8221. const vector<Event>* events = NULL,
  8222. Event* event = NULL)
  8223. {
  8224. cl_int error;
  8225. CommandQueue queue = CommandQueue::getDefault(&error);
  8226. if (error != CL_SUCCESS) {
  8227. return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
  8228. }
  8229. return detail::errHandler(queue.enqueueUnmapSVM(ptr, events, event),
  8230. __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
  8231. }
  8232. /**
  8233. * Enqueues to the default queue a command that will release a coarse-grained
  8234. * SVM buffer back to the OpenCL runtime.
  8235. * This variant takes a cl::pointer instance.
  8236. */
  8237. template<typename T, class D>
  8238. inline cl_int enqueueUnmapSVM(
  8239. cl::pointer<T, D> &ptr,
  8240. const vector<Event>* events = NULL,
  8241. Event* event = NULL)
  8242. {
  8243. cl_int error;
  8244. CommandQueue queue = CommandQueue::getDefault(&error);
  8245. if (error != CL_SUCCESS) {
  8246. return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
  8247. }
  8248. return detail::errHandler(queue.enqueueUnmapSVM(ptr, events, event),
  8249. __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
  8250. }
  8251. /**
  8252. * Enqueues to the default queue a command that will release a coarse-grained
  8253. * SVM buffer back to the OpenCL runtime.
  8254. * This variant takes a cl::vector instance.
  8255. */
  8256. template<typename T, class Alloc>
  8257. inline cl_int enqueueUnmapSVM(
  8258. cl::vector<T, Alloc> &container,
  8259. const vector<Event>* events = NULL,
  8260. Event* event = NULL)
  8261. {
  8262. cl_int error;
  8263. CommandQueue queue = CommandQueue::getDefault(&error);
  8264. if (error != CL_SUCCESS) {
  8265. return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
  8266. }
  8267. return detail::errHandler(queue.enqueueUnmapSVM(container, events, event),
  8268. __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
  8269. }
  8270. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  8271. inline cl_int enqueueCopyBuffer(
  8272. const Buffer& src,
  8273. const Buffer& dst,
  8274. size_type src_offset,
  8275. size_type dst_offset,
  8276. size_type size,
  8277. const vector<Event>* events = NULL,
  8278. Event* event = NULL)
  8279. {
  8280. cl_int error;
  8281. CommandQueue queue = CommandQueue::getDefault(&error);
  8282. if (error != CL_SUCCESS) {
  8283. return error;
  8284. }
  8285. return queue.enqueueCopyBuffer(src, dst, src_offset, dst_offset, size, events, event);
  8286. }
  8287. /**
  8288. * Blocking copy operation between iterators and a buffer.
  8289. * Host to Device.
  8290. * Uses default command queue.
  8291. */
  8292. template< typename IteratorType >
  8293. inline cl_int copy( IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer )
  8294. {
  8295. cl_int error;
  8296. CommandQueue queue = CommandQueue::getDefault(&error);
  8297. if (error != CL_SUCCESS)
  8298. return error;
  8299. return cl::copy(queue, startIterator, endIterator, buffer);
  8300. }
  8301. /**
  8302. * Blocking copy operation between iterators and a buffer.
  8303. * Device to Host.
  8304. * Uses default command queue.
  8305. */
  8306. template< typename IteratorType >
  8307. inline cl_int copy( const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator )
  8308. {
  8309. cl_int error;
  8310. CommandQueue queue = CommandQueue::getDefault(&error);
  8311. if (error != CL_SUCCESS)
  8312. return error;
  8313. return cl::copy(queue, buffer, startIterator, endIterator);
  8314. }
  8315. /**
  8316. * Blocking copy operation between iterators and a buffer.
  8317. * Host to Device.
  8318. * Uses specified queue.
  8319. */
  8320. template< typename IteratorType >
  8321. inline cl_int copy( const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer )
  8322. {
  8323. typedef typename std::iterator_traits<IteratorType>::value_type DataType;
  8324. cl_int error;
  8325. size_type length = endIterator-startIterator;
  8326. size_type byteLength = length*sizeof(DataType);
  8327. DataType *pointer =
  8328. static_cast<DataType*>(queue.enqueueMapBuffer(buffer, CL_TRUE, CL_MAP_WRITE, 0, byteLength, 0, 0, &error));
  8329. // if exceptions enabled, enqueueMapBuffer will throw
  8330. if( error != CL_SUCCESS ) {
  8331. return error;
  8332. }
  8333. #if defined(_MSC_VER)
  8334. std::copy(
  8335. startIterator,
  8336. endIterator,
  8337. stdext::checked_array_iterator<DataType*>(
  8338. pointer, length));
  8339. #else
  8340. std::copy(startIterator, endIterator, pointer);
  8341. #endif
  8342. Event endEvent;
  8343. error = queue.enqueueUnmapMemObject(buffer, pointer, 0, &endEvent);
  8344. // if exceptions enabled, enqueueUnmapMemObject will throw
  8345. if( error != CL_SUCCESS ) {
  8346. return error;
  8347. }
  8348. endEvent.wait();
  8349. return CL_SUCCESS;
  8350. }
  8351. /**
  8352. * Blocking copy operation between iterators and a buffer.
  8353. * Device to Host.
  8354. * Uses specified queue.
  8355. */
  8356. template< typename IteratorType >
  8357. inline cl_int copy( const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator )
  8358. {
  8359. typedef typename std::iterator_traits<IteratorType>::value_type DataType;
  8360. cl_int error;
  8361. size_type length = endIterator-startIterator;
  8362. size_type byteLength = length*sizeof(DataType);
  8363. DataType *pointer =
  8364. static_cast<DataType*>(queue.enqueueMapBuffer(buffer, CL_TRUE, CL_MAP_READ, 0, byteLength, 0, 0, &error));
  8365. // if exceptions enabled, enqueueMapBuffer will throw
  8366. if( error != CL_SUCCESS ) {
  8367. return error;
  8368. }
  8369. std::copy(pointer, pointer + length, startIterator);
  8370. Event endEvent;
  8371. error = queue.enqueueUnmapMemObject(buffer, pointer, 0, &endEvent);
  8372. // if exceptions enabled, enqueueUnmapMemObject will throw
  8373. if( error != CL_SUCCESS ) {
  8374. return error;
  8375. }
  8376. endEvent.wait();
  8377. return CL_SUCCESS;
  8378. }
  8379. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  8380. /**
  8381. * Blocking SVM map operation - performs a blocking map underneath.
  8382. */
  8383. template<typename T, class Alloc>
  8384. inline cl_int mapSVM(cl::vector<T, Alloc> &container)
  8385. {
  8386. return enqueueMapSVM(container, CL_TRUE, CL_MAP_READ | CL_MAP_WRITE);
  8387. }
  8388. /**
  8389. * Blocking SVM map operation - performs a blocking map underneath.
  8390. */
  8391. template<typename T, class Alloc>
  8392. inline cl_int unmapSVM(cl::vector<T, Alloc> &container)
  8393. {
  8394. return enqueueUnmapSVM(container);
  8395. }
  8396. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  8397. #if CL_HPP_TARGET_OPENCL_VERSION >= 110
  8398. inline cl_int enqueueReadBufferRect(
  8399. const Buffer& buffer,
  8400. cl_bool blocking,
  8401. const array<size_type, 3>& buffer_offset,
  8402. const array<size_type, 3>& host_offset,
  8403. const array<size_type, 3>& region,
  8404. size_type buffer_row_pitch,
  8405. size_type buffer_slice_pitch,
  8406. size_type host_row_pitch,
  8407. size_type host_slice_pitch,
  8408. void *ptr,
  8409. const vector<Event>* events = NULL,
  8410. Event* event = NULL)
  8411. {
  8412. cl_int error;
  8413. CommandQueue queue = CommandQueue::getDefault(&error);
  8414. if (error != CL_SUCCESS) {
  8415. return error;
  8416. }
  8417. return queue.enqueueReadBufferRect(
  8418. buffer,
  8419. blocking,
  8420. buffer_offset,
  8421. host_offset,
  8422. region,
  8423. buffer_row_pitch,
  8424. buffer_slice_pitch,
  8425. host_row_pitch,
  8426. host_slice_pitch,
  8427. ptr,
  8428. events,
  8429. event);
  8430. }
  8431. inline cl_int enqueueWriteBufferRect(
  8432. const Buffer& buffer,
  8433. cl_bool blocking,
  8434. const array<size_type, 3>& buffer_offset,
  8435. const array<size_type, 3>& host_offset,
  8436. const array<size_type, 3>& region,
  8437. size_type buffer_row_pitch,
  8438. size_type buffer_slice_pitch,
  8439. size_type host_row_pitch,
  8440. size_type host_slice_pitch,
  8441. const void *ptr,
  8442. const vector<Event>* events = NULL,
  8443. Event* event = NULL)
  8444. {
  8445. cl_int error;
  8446. CommandQueue queue = CommandQueue::getDefault(&error);
  8447. if (error != CL_SUCCESS) {
  8448. return error;
  8449. }
  8450. return queue.enqueueWriteBufferRect(
  8451. buffer,
  8452. blocking,
  8453. buffer_offset,
  8454. host_offset,
  8455. region,
  8456. buffer_row_pitch,
  8457. buffer_slice_pitch,
  8458. host_row_pitch,
  8459. host_slice_pitch,
  8460. ptr,
  8461. events,
  8462. event);
  8463. }
  8464. inline cl_int enqueueCopyBufferRect(
  8465. const Buffer& src,
  8466. const Buffer& dst,
  8467. const array<size_type, 3>& src_origin,
  8468. const array<size_type, 3>& dst_origin,
  8469. const array<size_type, 3>& region,
  8470. size_type src_row_pitch,
  8471. size_type src_slice_pitch,
  8472. size_type dst_row_pitch,
  8473. size_type dst_slice_pitch,
  8474. const vector<Event>* events = NULL,
  8475. Event* event = NULL)
  8476. {
  8477. cl_int error;
  8478. CommandQueue queue = CommandQueue::getDefault(&error);
  8479. if (error != CL_SUCCESS) {
  8480. return error;
  8481. }
  8482. return queue.enqueueCopyBufferRect(
  8483. src,
  8484. dst,
  8485. src_origin,
  8486. dst_origin,
  8487. region,
  8488. src_row_pitch,
  8489. src_slice_pitch,
  8490. dst_row_pitch,
  8491. dst_slice_pitch,
  8492. events,
  8493. event);
  8494. }
  8495. #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
  8496. inline cl_int enqueueReadImage(
  8497. const Image& image,
  8498. cl_bool blocking,
  8499. const array<size_type, 3>& origin,
  8500. const array<size_type, 3>& region,
  8501. size_type row_pitch,
  8502. size_type slice_pitch,
  8503. void* ptr,
  8504. const vector<Event>* events = NULL,
  8505. Event* event = NULL)
  8506. {
  8507. cl_int error;
  8508. CommandQueue queue = CommandQueue::getDefault(&error);
  8509. if (error != CL_SUCCESS) {
  8510. return error;
  8511. }
  8512. return queue.enqueueReadImage(
  8513. image,
  8514. blocking,
  8515. origin,
  8516. region,
  8517. row_pitch,
  8518. slice_pitch,
  8519. ptr,
  8520. events,
  8521. event);
  8522. }
  8523. inline cl_int enqueueWriteImage(
  8524. const Image& image,
  8525. cl_bool blocking,
  8526. const array<size_type, 3>& origin,
  8527. const array<size_type, 3>& region,
  8528. size_type row_pitch,
  8529. size_type slice_pitch,
  8530. const void* ptr,
  8531. const vector<Event>* events = NULL,
  8532. Event* event = NULL)
  8533. {
  8534. cl_int error;
  8535. CommandQueue queue = CommandQueue::getDefault(&error);
  8536. if (error != CL_SUCCESS) {
  8537. return error;
  8538. }
  8539. return queue.enqueueWriteImage(
  8540. image,
  8541. blocking,
  8542. origin,
  8543. region,
  8544. row_pitch,
  8545. slice_pitch,
  8546. ptr,
  8547. events,
  8548. event);
  8549. }
  8550. inline cl_int enqueueCopyImage(
  8551. const Image& src,
  8552. const Image& dst,
  8553. const array<size_type, 3>& src_origin,
  8554. const array<size_type, 3>& dst_origin,
  8555. const array<size_type, 3>& region,
  8556. const vector<Event>* events = NULL,
  8557. Event* event = NULL)
  8558. {
  8559. cl_int error;
  8560. CommandQueue queue = CommandQueue::getDefault(&error);
  8561. if (error != CL_SUCCESS) {
  8562. return error;
  8563. }
  8564. return queue.enqueueCopyImage(
  8565. src,
  8566. dst,
  8567. src_origin,
  8568. dst_origin,
  8569. region,
  8570. events,
  8571. event);
  8572. }
  8573. inline cl_int enqueueCopyImageToBuffer(
  8574. const Image& src,
  8575. const Buffer& dst,
  8576. const array<size_type, 3>& src_origin,
  8577. const array<size_type, 3>& region,
  8578. size_type dst_offset,
  8579. const vector<Event>* events = NULL,
  8580. Event* event = NULL)
  8581. {
  8582. cl_int error;
  8583. CommandQueue queue = CommandQueue::getDefault(&error);
  8584. if (error != CL_SUCCESS) {
  8585. return error;
  8586. }
  8587. return queue.enqueueCopyImageToBuffer(
  8588. src,
  8589. dst,
  8590. src_origin,
  8591. region,
  8592. dst_offset,
  8593. events,
  8594. event);
  8595. }
  8596. inline cl_int enqueueCopyBufferToImage(
  8597. const Buffer& src,
  8598. const Image& dst,
  8599. size_type src_offset,
  8600. const array<size_type, 3>& dst_origin,
  8601. const array<size_type, 3>& region,
  8602. const vector<Event>* events = NULL,
  8603. Event* event = NULL)
  8604. {
  8605. cl_int error;
  8606. CommandQueue queue = CommandQueue::getDefault(&error);
  8607. if (error != CL_SUCCESS) {
  8608. return error;
  8609. }
  8610. return queue.enqueueCopyBufferToImage(
  8611. src,
  8612. dst,
  8613. src_offset,
  8614. dst_origin,
  8615. region,
  8616. events,
  8617. event);
  8618. }
  8619. inline cl_int flush(void)
  8620. {
  8621. cl_int error;
  8622. CommandQueue queue = CommandQueue::getDefault(&error);
  8623. if (error != CL_SUCCESS) {
  8624. return error;
  8625. }
  8626. return queue.flush();
  8627. }
  8628. inline cl_int finish(void)
  8629. {
  8630. cl_int error;
  8631. CommandQueue queue = CommandQueue::getDefault(&error);
  8632. if (error != CL_SUCCESS) {
  8633. return error;
  8634. }
  8635. return queue.finish();
  8636. }
  8637. class EnqueueArgs
  8638. {
  8639. private:
  8640. CommandQueue queue_;
  8641. const NDRange offset_;
  8642. const NDRange global_;
  8643. const NDRange local_;
  8644. vector<Event> events_;
  8645. template<typename... Ts>
  8646. friend class KernelFunctor;
  8647. public:
  8648. EnqueueArgs(NDRange global) :
  8649. queue_(CommandQueue::getDefault()),
  8650. offset_(NullRange),
  8651. global_(global),
  8652. local_(NullRange)
  8653. {
  8654. }
  8655. EnqueueArgs(NDRange global, NDRange local) :
  8656. queue_(CommandQueue::getDefault()),
  8657. offset_(NullRange),
  8658. global_(global),
  8659. local_(local)
  8660. {
  8661. }
  8662. EnqueueArgs(NDRange offset, NDRange global, NDRange local) :
  8663. queue_(CommandQueue::getDefault()),
  8664. offset_(offset),
  8665. global_(global),
  8666. local_(local)
  8667. {
  8668. }
  8669. EnqueueArgs(Event e, NDRange global) :
  8670. queue_(CommandQueue::getDefault()),
  8671. offset_(NullRange),
  8672. global_(global),
  8673. local_(NullRange)
  8674. {
  8675. events_.push_back(e);
  8676. }
  8677. EnqueueArgs(Event e, NDRange global, NDRange local) :
  8678. queue_(CommandQueue::getDefault()),
  8679. offset_(NullRange),
  8680. global_(global),
  8681. local_(local)
  8682. {
  8683. events_.push_back(e);
  8684. }
  8685. EnqueueArgs(Event e, NDRange offset, NDRange global, NDRange local) :
  8686. queue_(CommandQueue::getDefault()),
  8687. offset_(offset),
  8688. global_(global),
  8689. local_(local)
  8690. {
  8691. events_.push_back(e);
  8692. }
  8693. EnqueueArgs(const vector<Event> &events, NDRange global) :
  8694. queue_(CommandQueue::getDefault()),
  8695. offset_(NullRange),
  8696. global_(global),
  8697. local_(NullRange),
  8698. events_(events)
  8699. {
  8700. }
  8701. EnqueueArgs(const vector<Event> &events, NDRange global, NDRange local) :
  8702. queue_(CommandQueue::getDefault()),
  8703. offset_(NullRange),
  8704. global_(global),
  8705. local_(local),
  8706. events_(events)
  8707. {
  8708. }
  8709. EnqueueArgs(const vector<Event> &events, NDRange offset, NDRange global, NDRange local) :
  8710. queue_(CommandQueue::getDefault()),
  8711. offset_(offset),
  8712. global_(global),
  8713. local_(local),
  8714. events_(events)
  8715. {
  8716. }
  8717. EnqueueArgs(CommandQueue &queue, NDRange global) :
  8718. queue_(queue),
  8719. offset_(NullRange),
  8720. global_(global),
  8721. local_(NullRange)
  8722. {
  8723. }
  8724. EnqueueArgs(CommandQueue &queue, NDRange global, NDRange local) :
  8725. queue_(queue),
  8726. offset_(NullRange),
  8727. global_(global),
  8728. local_(local)
  8729. {
  8730. }
  8731. EnqueueArgs(CommandQueue &queue, NDRange offset, NDRange global, NDRange local) :
  8732. queue_(queue),
  8733. offset_(offset),
  8734. global_(global),
  8735. local_(local)
  8736. {
  8737. }
  8738. EnqueueArgs(CommandQueue &queue, Event e, NDRange global) :
  8739. queue_(queue),
  8740. offset_(NullRange),
  8741. global_(global),
  8742. local_(NullRange)
  8743. {
  8744. events_.push_back(e);
  8745. }
  8746. EnqueueArgs(CommandQueue &queue, Event e, NDRange global, NDRange local) :
  8747. queue_(queue),
  8748. offset_(NullRange),
  8749. global_(global),
  8750. local_(local)
  8751. {
  8752. events_.push_back(e);
  8753. }
  8754. EnqueueArgs(CommandQueue &queue, Event e, NDRange offset, NDRange global, NDRange local) :
  8755. queue_(queue),
  8756. offset_(offset),
  8757. global_(global),
  8758. local_(local)
  8759. {
  8760. events_.push_back(e);
  8761. }
  8762. EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange global) :
  8763. queue_(queue),
  8764. offset_(NullRange),
  8765. global_(global),
  8766. local_(NullRange),
  8767. events_(events)
  8768. {
  8769. }
  8770. EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange global, NDRange local) :
  8771. queue_(queue),
  8772. offset_(NullRange),
  8773. global_(global),
  8774. local_(local),
  8775. events_(events)
  8776. {
  8777. }
  8778. EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange offset, NDRange global, NDRange local) :
  8779. queue_(queue),
  8780. offset_(offset),
  8781. global_(global),
  8782. local_(local),
  8783. events_(events)
  8784. {
  8785. }
  8786. };
  8787. //----------------------------------------------------------------------------------------------
  8788. /**
  8789. * Type safe kernel functor.
  8790. *
  8791. */
  8792. template<typename... Ts>
  8793. class KernelFunctor
  8794. {
  8795. private:
  8796. Kernel kernel_;
  8797. template<int index, typename T0, typename... T1s>
  8798. void setArgs(T0&& t0, T1s&&... t1s)
  8799. {
  8800. kernel_.setArg(index, t0);
  8801. setArgs<index + 1, T1s...>(std::forward<T1s>(t1s)...);
  8802. }
  8803. template<int index, typename T0>
  8804. void setArgs(T0&& t0)
  8805. {
  8806. kernel_.setArg(index, t0);
  8807. }
  8808. template<int index>
  8809. void setArgs()
  8810. {
  8811. }
  8812. public:
  8813. KernelFunctor(Kernel kernel) : kernel_(kernel)
  8814. {}
  8815. KernelFunctor(
  8816. const Program& program,
  8817. const string name,
  8818. cl_int * err = NULL) :
  8819. kernel_(program, name.c_str(), err)
  8820. {}
  8821. //! \brief Return type of the functor
  8822. typedef Event result_type;
  8823. /**
  8824. * Enqueue kernel.
  8825. * @param args Launch parameters of the kernel.
  8826. * @param t0... List of kernel arguments based on the template type of the functor.
  8827. */
  8828. Event operator() (
  8829. const EnqueueArgs& args,
  8830. Ts... ts)
  8831. {
  8832. Event event;
  8833. setArgs<0>(std::forward<Ts>(ts)...);
  8834. args.queue_.enqueueNDRangeKernel(
  8835. kernel_,
  8836. args.offset_,
  8837. args.global_,
  8838. args.local_,
  8839. &args.events_,
  8840. &event);
  8841. return event;
  8842. }
  8843. /**
  8844. * Enqueue kernel with support for error code.
  8845. * @param args Launch parameters of the kernel.
  8846. * @param t0... List of kernel arguments based on the template type of the functor.
  8847. * @param error Out parameter returning the error code from the execution.
  8848. */
  8849. Event operator() (
  8850. const EnqueueArgs& args,
  8851. Ts... ts,
  8852. cl_int &error)
  8853. {
  8854. Event event;
  8855. setArgs<0>(std::forward<Ts>(ts)...);
  8856. error = args.queue_.enqueueNDRangeKernel(
  8857. kernel_,
  8858. args.offset_,
  8859. args.global_,
  8860. args.local_,
  8861. &args.events_,
  8862. &event);
  8863. return event;
  8864. }
  8865. #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  8866. cl_int setSVMPointers(const vector<void*> &pointerList)
  8867. {
  8868. return kernel_.setSVMPointers(pointerList);
  8869. }
  8870. template<typename T0, typename... T1s>
  8871. cl_int setSVMPointers(const T0 &t0, T1s &... ts)
  8872. {
  8873. return kernel_.setSVMPointers(t0, ts...);
  8874. }
  8875. #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
  8876. Kernel getKernel()
  8877. {
  8878. return kernel_;
  8879. }
  8880. };
  8881. namespace compatibility {
  8882. /**
  8883. * Backward compatibility class to ensure that cl.hpp code works with cl2.hpp.
  8884. * Please use KernelFunctor directly.
  8885. */
  8886. template<typename... Ts>
  8887. struct make_kernel
  8888. {
  8889. typedef KernelFunctor<Ts...> FunctorType;
  8890. FunctorType functor_;
  8891. make_kernel(
  8892. const Program& program,
  8893. const string name,
  8894. cl_int * err = NULL) :
  8895. functor_(FunctorType(program, name, err))
  8896. {}
  8897. make_kernel(
  8898. const Kernel kernel) :
  8899. functor_(FunctorType(kernel))
  8900. {}
  8901. //! \brief Return type of the functor
  8902. typedef Event result_type;
  8903. //! \brief Function signature of kernel functor with no event dependency.
  8904. typedef Event type_(
  8905. const EnqueueArgs&,
  8906. Ts...);
  8907. Event operator()(
  8908. const EnqueueArgs& enqueueArgs,
  8909. Ts... args)
  8910. {
  8911. return functor_(
  8912. enqueueArgs, args...);
  8913. }
  8914. };
  8915. } // namespace compatibility
  8916. //----------------------------------------------------------------------------------------------------------------------
  8917. #undef CL_HPP_ERR_STR_
  8918. #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS)
  8919. #undef __GET_DEVICE_INFO_ERR
  8920. #undef __GET_PLATFORM_INFO_ERR
  8921. #undef __GET_DEVICE_IDS_ERR
  8922. #undef __GET_PLATFORM_IDS_ERR
  8923. #undef __GET_CONTEXT_INFO_ERR
  8924. #undef __GET_EVENT_INFO_ERR
  8925. #undef __GET_EVENT_PROFILE_INFO_ERR
  8926. #undef __GET_MEM_OBJECT_INFO_ERR
  8927. #undef __GET_IMAGE_INFO_ERR
  8928. #undef __GET_SAMPLER_INFO_ERR
  8929. #undef __GET_KERNEL_INFO_ERR
  8930. #undef __GET_KERNEL_ARG_INFO_ERR
  8931. #undef __GET_KERNEL_SUB_GROUP_INFO_ERR
  8932. #undef __GET_KERNEL_WORK_GROUP_INFO_ERR
  8933. #undef __GET_PROGRAM_INFO_ERR
  8934. #undef __GET_PROGRAM_BUILD_INFO_ERR
  8935. #undef __GET_COMMAND_QUEUE_INFO_ERR
  8936. #undef __CREATE_CONTEXT_ERR
  8937. #undef __CREATE_CONTEXT_FROM_TYPE_ERR
  8938. #undef __GET_SUPPORTED_IMAGE_FORMATS_ERR
  8939. #undef __CREATE_BUFFER_ERR
  8940. #undef __COPY_ERR
  8941. #undef __CREATE_SUBBUFFER_ERR
  8942. #undef __CREATE_GL_BUFFER_ERR
  8943. #undef __CREATE_GL_RENDER_BUFFER_ERR
  8944. #undef __GET_GL_OBJECT_INFO_ERR
  8945. #undef __CREATE_IMAGE_ERR
  8946. #undef __CREATE_GL_TEXTURE_ERR
  8947. #undef __IMAGE_DIMENSION_ERR
  8948. #undef __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR
  8949. #undef __CREATE_USER_EVENT_ERR
  8950. #undef __SET_USER_EVENT_STATUS_ERR
  8951. #undef __SET_EVENT_CALLBACK_ERR
  8952. #undef __WAIT_FOR_EVENTS_ERR
  8953. #undef __CREATE_KERNEL_ERR
  8954. #undef __SET_KERNEL_ARGS_ERR
  8955. #undef __CREATE_PROGRAM_WITH_SOURCE_ERR
  8956. #undef __CREATE_PROGRAM_WITH_IL_ERR
  8957. #undef __CREATE_PROGRAM_WITH_BINARY_ERR
  8958. #undef __CREATE_PROGRAM_WITH_IL_ERR
  8959. #undef __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR
  8960. #undef __BUILD_PROGRAM_ERR
  8961. #undef __COMPILE_PROGRAM_ERR
  8962. #undef __LINK_PROGRAM_ERR
  8963. #undef __CREATE_KERNELS_IN_PROGRAM_ERR
  8964. #undef __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR
  8965. #undef __CREATE_SAMPLER_WITH_PROPERTIES_ERR
  8966. #undef __SET_COMMAND_QUEUE_PROPERTY_ERR
  8967. #undef __ENQUEUE_READ_BUFFER_ERR
  8968. #undef __ENQUEUE_READ_BUFFER_RECT_ERR
  8969. #undef __ENQUEUE_WRITE_BUFFER_ERR
  8970. #undef __ENQUEUE_WRITE_BUFFER_RECT_ERR
  8971. #undef __ENQEUE_COPY_BUFFER_ERR
  8972. #undef __ENQEUE_COPY_BUFFER_RECT_ERR
  8973. #undef __ENQUEUE_FILL_BUFFER_ERR
  8974. #undef __ENQUEUE_READ_IMAGE_ERR
  8975. #undef __ENQUEUE_WRITE_IMAGE_ERR
  8976. #undef __ENQUEUE_COPY_IMAGE_ERR
  8977. #undef __ENQUEUE_FILL_IMAGE_ERR
  8978. #undef __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR
  8979. #undef __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR
  8980. #undef __ENQUEUE_MAP_BUFFER_ERR
  8981. #undef __ENQUEUE_MAP_IMAGE_ERR
  8982. #undef __ENQUEUE_UNMAP_MEM_OBJECT_ERR
  8983. #undef __ENQUEUE_NDRANGE_KERNEL_ERR
  8984. #undef __ENQUEUE_NATIVE_KERNEL
  8985. #undef __ENQUEUE_MIGRATE_MEM_OBJECTS_ERR
  8986. #undef __ENQUEUE_MIGRATE_SVM_ERR
  8987. #undef __ENQUEUE_ACQUIRE_GL_ERR
  8988. #undef __ENQUEUE_RELEASE_GL_ERR
  8989. #undef __CREATE_PIPE_ERR
  8990. #undef __GET_PIPE_INFO_ERR
  8991. #undef __RETAIN_ERR
  8992. #undef __RELEASE_ERR
  8993. #undef __FLUSH_ERR
  8994. #undef __FINISH_ERR
  8995. #undef __VECTOR_CAPACITY_ERR
  8996. #undef __CREATE_SUB_DEVICES_ERR
  8997. #undef __CREATE_SUB_DEVICES_ERR
  8998. #undef __ENQUEUE_MARKER_ERR
  8999. #undef __ENQUEUE_WAIT_FOR_EVENTS_ERR
  9000. #undef __ENQUEUE_BARRIER_ERR
  9001. #undef __UNLOAD_COMPILER_ERR
  9002. #undef __CREATE_GL_TEXTURE_2D_ERR
  9003. #undef __CREATE_GL_TEXTURE_3D_ERR
  9004. #undef __CREATE_IMAGE2D_ERR
  9005. #undef __CREATE_IMAGE3D_ERR
  9006. #undef __CREATE_COMMAND_QUEUE_ERR
  9007. #undef __ENQUEUE_TASK_ERR
  9008. #undef __CREATE_SAMPLER_ERR
  9009. #undef __ENQUEUE_MARKER_WAIT_LIST_ERR
  9010. #undef __ENQUEUE_BARRIER_WAIT_LIST_ERR
  9011. #undef __CLONE_KERNEL_ERR
  9012. #undef __GET_HOST_TIMER_ERR
  9013. #undef __GET_DEVICE_AND_HOST_TIMER_ERR
  9014. #endif //CL_HPP_USER_OVERRIDE_ERROR_STRINGS
  9015. // Extensions
  9016. #undef CL_HPP_INIT_CL_EXT_FCN_PTR_
  9017. #undef CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_
  9018. #if defined(CL_HPP_USE_CL_DEVICE_FISSION)
  9019. #undef CL_HPP_PARAM_NAME_DEVICE_FISSION_
  9020. #endif // CL_HPP_USE_CL_DEVICE_FISSION
  9021. #undef CL_HPP_NOEXCEPT_
  9022. #undef CL_HPP_DEFINE_STATIC_MEMBER_
  9023. } // namespace cl
  9024. #endif // CL_HPP_