123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740137411374213743137441374513746137471374813749137501375113752137531375413755137561375713758137591376013761137621376313764137651376613767137681376913770137711377213773137741377513776137771377813779137801378113782137831378413785137861378713788137891379013791137921379313794137951379613797137981379913800138011380213803138041380513806138071380813809138101381113812138131381413815138161381713818138191382013821138221382313824138251382613827138281382913830138311383213833138341383513836138371383813839138401384113842138431384413845138461384713848138491385013851138521385313854138551385613857138581385913860138611386213863138641386513866138671386813869138701387113872138731387413875138761387713878138791388013881138821388313884138851388613887138881388913890138911389213893138941389513896138971389813899139001390113902139031390413905139061390713908139091391013911139121391313914139151391613917139181391913920139211392213923139241392513926139271392813929139301393113932139331393413935139361393713938139391394013941139421394313944139451394613947139481394913950139511395213953139541395513956139571395813959139601396113962139631396413965139661396713968139691397013971139721397313974139751397613977139781397913980139811398213983139841398513986139871398813989139901399113992139931399413995139961399713998139991400014001140021400314004140051400614007140081400914010140111401214013140141401514016140171401814019140201402114022140231402414025140261402714028140291403014031140321403314034140351403614037140381403914040140411404214043140441404514046140471404814049140501405114052140531405414055140561405714058140591406014061140621406314064140651406614067140681406914070140711407214073140741407514076140771407814079140801408114082140831408414085140861408714088140891409014091140921409314094140951409614097140981409914100141011410214103141041410514106141071410814109141101411114112141131411414115141161411714118141191412014121141221412314124141251412614127141281412914130141311413214133141341413514136141371413814139141401414114142141431414414145141461414714148141491415014151141521415314154141551415614157141581415914160141611416214163141641416514166141671416814169141701417114172141731417414175141761417714178141791418014181141821418314184141851418614187141881418914190141911419214193141941419514196141971419814199142001420114202142031420414205142061420714208142091421014211142121421314214142151421614217142181421914220142211422214223142241422514226142271422814229142301423114232142331423414235142361423714238142391424014241142421424314244142451424614247142481424914250142511425214253142541425514256142571425814259142601426114262142631426414265142661426714268142691427014271142721427314274142751427614277142781427914280142811428214283142841428514286142871428814289142901429114292142931429414295142961429714298142991430014301143021430314304143051430614307143081430914310143111431214313143141431514316143171431814319143201432114322143231432414325143261432714328143291433014331143321433314334143351433614337143381433914340143411434214343143441434514346143471434814349143501435114352143531435414355143561435714358143591436014361143621436314364143651436614367143681436914370143711437214373143741437514376143771437814379143801438114382143831438414385143861438714388143891439014391143921439314394143951439614397143981439914400144011440214403144041440514406144071440814409144101441114412144131441414415144161441714418144191442014421144221442314424144251442614427144281442914430144311443214433144341443514436144371443814439144401444114442144431444414445144461444714448144491445014451144521445314454144551445614457144581445914460144611446214463144641446514466144671446814469144701447114472144731447414475144761447714478144791448014481144821448314484144851448614487144881448914490144911449214493144941449514496144971449814499145001450114502145031450414505145061450714508145091451014511145121451314514145151451614517145181451914520145211452214523145241452514526145271452814529145301453114532145331453414535145361453714538145391454014541145421454314544145451454614547145481454914550145511455214553145541455514556145571455814559145601456114562145631456414565145661456714568145691457014571145721457314574145751457614577145781457914580145811458214583145841458514586145871458814589145901459114592145931459414595145961459714598145991460014601146021460314604146051460614607146081460914610146111461214613146141461514616146171461814619146201462114622146231462414625146261462714628146291463014631146321463314634146351463614637146381463914640146411464214643146441464514646146471464814649146501465114652146531465414655146561465714658146591466014661146621466314664146651466614667146681466914670146711467214673146741467514676146771467814679146801468114682146831468414685146861468714688146891469014691146921469314694146951469614697146981469914700147011470214703147041470514706147071470814709147101471114712147131471414715147161471714718147191472014721147221472314724147251472614727147281472914730147311473214733147341473514736147371473814739147401474114742147431474414745147461474714748147491475014751147521475314754147551475614757147581475914760147611476214763147641476514766147671476814769147701477114772147731477414775147761477714778147791478014781147821478314784147851478614787147881478914790147911479214793147941479514796147971479814799148001480114802148031480414805148061480714808148091481014811148121481314814148151481614817148181481914820148211482214823148241482514826148271482814829148301483114832148331483414835148361483714838148391484014841148421484314844148451484614847148481484914850148511485214853148541485514856148571485814859148601486114862148631486414865148661486714868148691487014871148721487314874148751487614877148781487914880148811488214883148841488514886148871488814889148901489114892148931489414895148961489714898148991490014901149021490314904149051490614907149081490914910149111491214913149141491514916149171491814919149201492114922149231492414925149261492714928149291493014931149321493314934149351493614937149381493914940149411494214943149441494514946149471494814949149501495114952149531495414955149561495714958149591496014961149621496314964149651496614967149681496914970149711497214973149741497514976149771497814979149801498114982149831498414985149861498714988149891499014991149921499314994149951499614997149981499915000150011500215003150041500515006150071500815009150101501115012150131501415015150161501715018150191502015021150221502315024150251502615027150281502915030150311503215033150341503515036150371503815039150401504115042150431504415045150461504715048150491505015051150521505315054150551505615057150581505915060150611506215063150641506515066150671506815069150701507115072150731507415075150761507715078150791508015081150821508315084150851508615087150881508915090150911509215093150941509515096150971509815099151001510115102151031510415105151061510715108151091511015111151121511315114151151511615117151181511915120151211512215123151241512515126151271512815129151301513115132151331513415135151361513715138151391514015141151421514315144151451514615147151481514915150151511515215153151541515515156151571515815159151601516115162151631516415165151661516715168151691517015171151721517315174151751517615177151781517915180151811518215183151841518515186151871518815189151901519115192151931519415195151961519715198151991520015201152021520315204152051520615207152081520915210152111521215213152141521515216152171521815219152201522115222152231522415225152261522715228152291523015231152321523315234152351523615237152381523915240152411524215243152441524515246152471524815249152501525115252152531525415255152561525715258152591526015261152621526315264152651526615267152681526915270152711527215273152741527515276152771527815279152801528115282152831528415285152861528715288152891529015291152921529315294152951529615297152981529915300153011530215303153041530515306153071530815309153101531115312153131531415315153161531715318153191532015321153221532315324153251532615327153281532915330153311533215333153341533515336153371533815339153401534115342153431534415345153461534715348153491535015351153521535315354153551535615357153581535915360153611536215363153641536515366153671536815369153701537115372153731537415375153761537715378153791538015381153821538315384153851538615387153881538915390153911539215393153941539515396153971539815399154001540115402154031540415405154061540715408154091541015411154121541315414154151541615417154181541915420154211542215423154241542515426154271542815429154301543115432154331543415435154361543715438154391544015441154421544315444154451544615447154481544915450154511545215453154541545515456154571545815459154601546115462154631546415465154661546715468154691547015471154721547315474154751547615477154781547915480154811548215483154841548515486154871548815489154901549115492154931549415495154961549715498154991550015501155021550315504155051550615507155081550915510155111551215513155141551515516155171551815519155201552115522155231552415525155261552715528155291553015531155321553315534155351553615537155381553915540155411554215543155441554515546155471554815549155501555115552155531555415555155561555715558155591556015561155621556315564155651556615567155681556915570155711557215573155741557515576155771557815579155801558115582155831558415585155861558715588155891559015591155921559315594155951559615597155981559915600156011560215603156041560515606156071560815609156101561115612156131561415615156161561715618156191562015621156221562315624156251562615627156281562915630156311563215633156341563515636156371563815639156401564115642156431564415645156461564715648156491565015651156521565315654156551565615657156581565915660156611566215663156641566515666156671566815669156701567115672156731567415675156761567715678156791568015681156821568315684156851568615687156881568915690156911569215693156941569515696156971569815699157001570115702157031570415705157061570715708157091571015711157121571315714157151571615717157181571915720157211572215723157241572515726157271572815729157301573115732157331573415735157361573715738157391574015741157421574315744157451574615747157481574915750157511575215753157541575515756157571575815759157601576115762157631576415765157661576715768157691577015771157721577315774157751577615777157781577915780157811578215783157841578515786157871578815789157901579115792157931579415795157961579715798157991580015801158021580315804158051580615807158081580915810158111581215813158141581515816158171581815819158201582115822158231582415825158261582715828158291583015831158321583315834158351583615837158381583915840158411584215843158441584515846158471584815849158501585115852158531585415855158561585715858158591586015861158621586315864158651586615867158681586915870158711587215873158741587515876158771587815879158801588115882158831588415885158861588715888158891589015891158921589315894158951589615897158981589915900159011590215903159041590515906159071590815909159101591115912159131591415915159161591715918159191592015921159221592315924159251592615927159281592915930159311593215933159341593515936159371593815939159401594115942159431594415945159461594715948159491595015951159521595315954159551595615957159581595915960159611596215963159641596515966159671596815969159701597115972159731597415975159761597715978159791598015981159821598315984159851598615987159881598915990159911599215993159941599515996159971599815999160001600116002160031600416005160061600716008160091601016011160121601316014160151601616017160181601916020160211602216023160241602516026160271602816029160301603116032160331603416035160361603716038160391604016041160421604316044160451604616047160481604916050160511605216053160541605516056160571605816059160601606116062160631606416065160661606716068160691607016071160721607316074160751607616077160781607916080160811608216083160841608516086160871608816089160901609116092160931609416095160961609716098160991610016101161021610316104161051610616107161081610916110161111611216113161141611516116161171611816119161201612116122161231612416125161261612716128161291613016131161321613316134161351613616137161381613916140161411614216143161441614516146161471614816149161501615116152161531615416155161561615716158161591616016161161621616316164161651616616167161681616916170161711617216173161741617516176161771617816179161801618116182161831618416185161861618716188161891619016191161921619316194161951619616197161981619916200162011620216203162041620516206162071620816209162101621116212162131621416215162161621716218162191622016221162221622316224162251622616227162281622916230162311623216233162341623516236162371623816239162401624116242162431624416245162461624716248162491625016251162521625316254162551625616257162581625916260162611626216263162641626516266162671626816269162701627116272162731627416275162761627716278162791628016281162821628316284162851628616287162881628916290162911629216293162941629516296162971629816299163001630116302163031630416305163061630716308163091631016311163121631316314163151631616317163181631916320163211632216323163241632516326163271632816329163301633116332163331633416335163361633716338163391634016341163421634316344163451634616347163481634916350163511635216353163541635516356163571635816359163601636116362163631636416365163661636716368163691637016371163721637316374163751637616377163781637916380163811638216383163841638516386163871638816389163901639116392163931639416395163961639716398163991640016401164021640316404164051640616407164081640916410164111641216413164141641516416164171641816419164201642116422164231642416425164261642716428164291643016431164321643316434164351643616437164381643916440164411644216443164441644516446164471644816449164501645116452164531645416455164561645716458164591646016461164621646316464164651646616467164681646916470164711647216473164741647516476164771647816479164801648116482164831648416485164861648716488164891649016491164921649316494164951649616497164981649916500165011650216503165041650516506165071650816509165101651116512165131651416515165161651716518165191652016521165221652316524165251652616527165281652916530165311653216533165341653516536165371653816539165401654116542165431654416545165461654716548165491655016551165521655316554165551655616557165581655916560165611656216563165641656516566165671656816569165701657116572165731657416575165761657716578165791658016581165821658316584165851658616587165881658916590165911659216593165941659516596165971659816599166001660116602166031660416605166061660716608166091661016611166121661316614166151661616617166181661916620166211662216623166241662516626166271662816629166301663116632166331663416635166361663716638166391664016641166421664316644166451664616647166481664916650166511665216653166541665516656166571665816659166601666116662166631666416665166661666716668166691667016671166721667316674166751667616677166781667916680166811668216683166841668516686166871668816689166901669116692166931669416695166961669716698166991670016701167021670316704167051670616707167081670916710167111671216713167141671516716167171671816719167201672116722167231672416725167261672716728167291673016731167321673316734167351673616737167381673916740167411674216743167441674516746167471674816749167501675116752167531675416755167561675716758167591676016761167621676316764167651676616767167681676916770167711677216773167741677516776167771677816779167801678116782167831678416785167861678716788167891679016791167921679316794167951679616797167981679916800168011680216803168041680516806168071680816809168101681116812168131681416815168161681716818168191682016821168221682316824168251682616827168281682916830168311683216833168341683516836168371683816839168401684116842168431684416845168461684716848168491685016851168521685316854168551685616857168581685916860168611686216863168641686516866168671686816869168701687116872168731687416875168761687716878168791688016881168821688316884168851688616887168881688916890168911689216893168941689516896168971689816899169001690116902169031690416905169061690716908169091691016911169121691316914169151691616917169181691916920169211692216923169241692516926169271692816929169301693116932169331693416935169361693716938169391694016941169421694316944169451694616947169481694916950169511695216953169541695516956169571695816959169601696116962169631696416965169661696716968169691697016971169721697316974169751697616977169781697916980169811698216983169841698516986169871698816989169901699116992169931699416995169961699716998169991700017001170021700317004170051700617007170081700917010170111701217013170141701517016170171701817019170201702117022170231702417025170261702717028170291703017031170321703317034170351703617037170381703917040170411704217043170441704517046170471704817049170501705117052170531705417055170561705717058170591706017061170621706317064170651706617067170681706917070170711707217073170741707517076170771707817079170801708117082170831708417085170861708717088170891709017091170921709317094170951709617097170981709917100171011710217103171041710517106171071710817109171101711117112171131711417115171161711717118171191712017121171221712317124171251712617127171281712917130171311713217133171341713517136171371713817139171401714117142171431714417145171461714717148171491715017151171521715317154171551715617157171581715917160171611716217163171641716517166171671716817169171701717117172171731717417175171761717717178171791718017181171821718317184171851718617187171881718917190171911719217193171941719517196171971719817199172001720117202172031720417205172061720717208172091721017211172121721317214172151721617217172181721917220172211722217223172241722517226172271722817229172301723117232172331723417235172361723717238172391724017241172421724317244172451724617247172481724917250172511725217253172541725517256172571725817259172601726117262172631726417265172661726717268172691727017271172721727317274172751727617277172781727917280172811728217283172841728517286172871728817289172901729117292172931729417295172961729717298172991730017301173021730317304173051730617307173081730917310173111731217313173141731517316173171731817319173201732117322173231732417325173261732717328173291733017331173321733317334173351733617337173381733917340173411734217343173441734517346173471734817349173501735117352173531735417355173561735717358173591736017361173621736317364173651736617367173681736917370173711737217373173741737517376173771737817379173801738117382173831738417385173861738717388173891739017391173921739317394173951739617397173981739917400174011740217403174041740517406174071740817409174101741117412174131741417415174161741717418174191742017421174221742317424174251742617427174281742917430174311743217433174341743517436174371743817439174401744117442174431744417445174461744717448174491745017451174521745317454174551745617457174581745917460174611746217463174641746517466174671746817469174701747117472174731747417475174761747717478174791748017481174821748317484174851748617487174881748917490174911749217493174941749517496174971749817499175001750117502175031750417505175061750717508175091751017511175121751317514175151751617517175181751917520175211752217523175241752517526175271752817529175301753117532175331753417535175361753717538175391754017541175421754317544175451754617547175481754917550175511755217553175541755517556175571755817559175601756117562175631756417565175661756717568175691757017571175721757317574175751757617577175781757917580175811758217583175841758517586175871758817589175901759117592175931759417595175961759717598175991760017601176021760317604176051760617607176081760917610176111761217613176141761517616176171761817619176201762117622176231762417625176261762717628176291763017631176321763317634176351763617637176381763917640176411764217643176441764517646176471764817649176501765117652176531765417655176561765717658176591766017661176621766317664176651766617667176681766917670176711767217673176741767517676176771767817679176801768117682176831768417685176861768717688176891769017691176921769317694176951769617697176981769917700177011770217703177041770517706177071770817709177101771117712177131771417715177161771717718177191772017721177221772317724177251772617727177281772917730177311773217733177341773517736177371773817739177401774117742177431774417745177461774717748177491775017751177521775317754177551775617757177581775917760177611776217763177641776517766177671776817769177701777117772177731777417775177761777717778177791778017781177821778317784177851778617787177881778917790177911779217793177941779517796177971779817799178001780117802178031780417805178061780717808178091781017811178121781317814178151781617817178181781917820178211782217823178241782517826178271782817829178301783117832178331783417835178361783717838178391784017841178421784317844178451784617847178481784917850178511785217853178541785517856178571785817859178601786117862178631786417865178661786717868178691787017871178721787317874178751787617877178781787917880178811788217883178841788517886178871788817889178901789117892178931789417895178961789717898178991790017901179021790317904179051790617907179081790917910179111791217913179141791517916179171791817919179201792117922179231792417925179261792717928179291793017931179321793317934179351793617937179381793917940179411794217943179441794517946179471794817949179501795117952179531795417955179561795717958179591796017961179621796317964179651796617967179681796917970179711797217973179741797517976179771797817979179801798117982179831798417985179861798717988179891799017991179921799317994179951799617997179981799918000180011800218003180041800518006180071800818009 |
- /* Generated automatically by the program `genflags'
- from the machine description file `md'. */
- #ifndef GCC_INSN_FLAGS_H
- #define GCC_INSN_FLAGS_H
- #define HAVE_indirect_jump 1
- #define HAVE_jump 1
- #define HAVE_ccmpccsi 1
- #define HAVE_ccmpccdi 1
- #define HAVE_ccmpccfpsf (TARGET_FLOAT)
- #define HAVE_ccmpccfpdf (TARGET_FLOAT)
- #define HAVE_ccmpccfpesf (TARGET_FLOAT)
- #define HAVE_ccmpccfpedf (TARGET_FLOAT)
- #define HAVE_ccmpccsi_rev 1
- #define HAVE_ccmpccdi_rev 1
- #define HAVE_ccmpccfpsf_rev (TARGET_FLOAT)
- #define HAVE_ccmpccfpdf_rev (TARGET_FLOAT)
- #define HAVE_ccmpccfpesf_rev (TARGET_FLOAT)
- #define HAVE_ccmpccfpedf_rev (TARGET_FLOAT)
- #define HAVE_condjump 1
- #define HAVE_nop 1
- #define HAVE_prefetch 1
- #define HAVE_trap 1
- #define HAVE_simple_return 1
- #define HAVE_insv_immsi (UINTVAL (operands[1]) < GET_MODE_BITSIZE (SImode) \
- && UINTVAL (operands[1]) % 16 == 0)
- #define HAVE_insv_immdi (UINTVAL (operands[1]) < GET_MODE_BITSIZE (DImode) \
- && UINTVAL (operands[1]) % 16 == 0)
- #define HAVE_aarch64_movksi (aarch64_movk_shift (rtx_mode_t (operands[2], SImode), \
- rtx_mode_t (operands[3], SImode)) >= 0)
- #define HAVE_aarch64_movkdi (aarch64_movk_shift (rtx_mode_t (operands[2], DImode), \
- rtx_mode_t (operands[3], DImode)) >= 0)
- #define HAVE_load_pair_sw_sisi (rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (SImode))))
- #define HAVE_load_pair_sw_sfsi (rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (SFmode))))
- #define HAVE_load_pair_sw_sisf (rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (SImode))))
- #define HAVE_load_pair_sw_sfsf (rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (SFmode))))
- #define HAVE_load_pair_dw_didi (rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (DImode))))
- #define HAVE_load_pair_dw_didf (rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (DImode))))
- #define HAVE_load_pair_dw_dfdi (rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_load_pair_dw_dfdf (rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_load_pair_dw_tftf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (TFmode))))
- #define HAVE_store_pair_sw_sisi (rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (SImode))))
- #define HAVE_store_pair_sw_sfsi (rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (SFmode))))
- #define HAVE_store_pair_sw_sisf (rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (SImode))))
- #define HAVE_store_pair_sw_sfsf (rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (SFmode))))
- #define HAVE_store_pair_dw_didi (rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (DImode))))
- #define HAVE_store_pair_dw_didf (rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (DImode))))
- #define HAVE_store_pair_dw_dfdi (rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_store_pair_dw_dfdf (rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_store_pair_dw_tftf (TARGET_SIMD && \
- rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (TFmode))))
- #define HAVE_loadwb_pairsi_si ((INTVAL (operands[5]) == GET_MODE_SIZE (SImode)) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_loadwb_pairsi_di ((INTVAL (operands[5]) == GET_MODE_SIZE (SImode)) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_loadwb_pairdi_si ((INTVAL (operands[5]) == GET_MODE_SIZE (DImode)) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_loadwb_pairdi_di ((INTVAL (operands[5]) == GET_MODE_SIZE (DImode)) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_loadwb_pairsf_si ((INTVAL (operands[5]) == GET_MODE_SIZE (SFmode)) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_loadwb_pairdf_si ((INTVAL (operands[5]) == GET_MODE_SIZE (DFmode)) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_loadwb_pairsf_di ((INTVAL (operands[5]) == GET_MODE_SIZE (SFmode)) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_loadwb_pairdf_di ((INTVAL (operands[5]) == GET_MODE_SIZE (DFmode)) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_loadwb_pairti_si ((TARGET_SIMD && INTVAL (operands[5]) == GET_MODE_SIZE (TImode)) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_loadwb_pairtf_si ((TARGET_SIMD && INTVAL (operands[5]) == GET_MODE_SIZE (TFmode)) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_loadwb_pairti_di ((TARGET_SIMD && INTVAL (operands[5]) == GET_MODE_SIZE (TImode)) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_loadwb_pairtf_di ((TARGET_SIMD && INTVAL (operands[5]) == GET_MODE_SIZE (TFmode)) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_storewb_pairsi_si ((INTVAL (operands[5]) == INTVAL (operands[4]) + GET_MODE_SIZE (SImode)) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_storewb_pairsi_di ((INTVAL (operands[5]) == INTVAL (operands[4]) + GET_MODE_SIZE (SImode)) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_storewb_pairdi_si ((INTVAL (operands[5]) == INTVAL (operands[4]) + GET_MODE_SIZE (DImode)) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_storewb_pairdi_di ((INTVAL (operands[5]) == INTVAL (operands[4]) + GET_MODE_SIZE (DImode)) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_storewb_pairsf_si ((INTVAL (operands[5]) == INTVAL (operands[4]) + GET_MODE_SIZE (SFmode)) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_storewb_pairdf_si ((INTVAL (operands[5]) == INTVAL (operands[4]) + GET_MODE_SIZE (DFmode)) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_storewb_pairsf_di ((INTVAL (operands[5]) == INTVAL (operands[4]) + GET_MODE_SIZE (SFmode)) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_storewb_pairdf_di ((INTVAL (operands[5]) == INTVAL (operands[4]) + GET_MODE_SIZE (DFmode)) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_storewb_pairti_si ((TARGET_SIMD \
- && INTVAL (operands[5]) \
- == INTVAL (operands[4]) + GET_MODE_SIZE (TImode)) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_storewb_pairtf_si ((TARGET_SIMD \
- && INTVAL (operands[5]) \
- == INTVAL (operands[4]) + GET_MODE_SIZE (TFmode)) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_storewb_pairti_di ((TARGET_SIMD \
- && INTVAL (operands[5]) \
- == INTVAL (operands[4]) + GET_MODE_SIZE (TImode)) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_storewb_pairtf_di ((TARGET_SIMD \
- && INTVAL (operands[5]) \
- == INTVAL (operands[4]) + GET_MODE_SIZE (TFmode)) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_addsi3_compare0 1
- #define HAVE_adddi3_compare0 1
- #define HAVE_addsi3_compareC 1
- #define HAVE_adddi3_compareC 1
- #define HAVE_addsi3_compareV_imm 1
- #define HAVE_adddi3_compareV_imm 1
- #define HAVE_addsi3_compareV 1
- #define HAVE_adddi3_compareV 1
- #define HAVE_aarch64_subsi_compare0 1
- #define HAVE_aarch64_subdi_compare0 1
- #define HAVE_subsi3 1
- #define HAVE_subdi3 1
- #define HAVE_subvsi_insn 1
- #define HAVE_subvdi_insn 1
- #define HAVE_subvsi_imm 1
- #define HAVE_subvdi_imm 1
- #define HAVE_negvsi_insn 1
- #define HAVE_negvdi_insn 1
- #define HAVE_negvsi_cmp_only 1
- #define HAVE_negvdi_cmp_only 1
- #define HAVE_negdi_carryout 1
- #define HAVE_negvdi_carryinV 1
- #define HAVE_subsi3_compare1_imm (UINTVAL (operands[2]) == -UINTVAL (operands[3]))
- #define HAVE_subdi3_compare1_imm (UINTVAL (operands[2]) == -UINTVAL (operands[3]))
- #define HAVE_subsi3_compare1 1
- #define HAVE_subdi3_compare1 1
- #define HAVE_negsi2 1
- #define HAVE_negdi2 1
- #define HAVE_negsi2_compare0 1
- #define HAVE_negdi2_compare0 1
- #define HAVE_mulsi3 1
- #define HAVE_muldi3 1
- #define HAVE_maddsi 1
- #define HAVE_madddi 1
- #define HAVE_mulsidi3 1
- #define HAVE_umulsidi3 1
- #define HAVE_maddsidi4 1
- #define HAVE_umaddsidi4 1
- #define HAVE_msubsidi4 1
- #define HAVE_umsubsidi4 1
- #define HAVE_smuldi3_highpart 1
- #define HAVE_umuldi3_highpart 1
- #define HAVE_divsi3 1
- #define HAVE_udivsi3 1
- #define HAVE_divdi3 1
- #define HAVE_udivdi3 1
- #define HAVE_cmpsi 1
- #define HAVE_cmpdi 1
- #define HAVE_fcmpsf (TARGET_FLOAT)
- #define HAVE_fcmpdf (TARGET_FLOAT)
- #define HAVE_fcmpesf (TARGET_FLOAT)
- #define HAVE_fcmpedf (TARGET_FLOAT)
- #define HAVE_aarch64_cstoreqi 1
- #define HAVE_aarch64_cstorehi 1
- #define HAVE_aarch64_cstoresi 1
- #define HAVE_aarch64_cstoredi 1
- #define HAVE_cstoreqi_neg 1
- #define HAVE_cstorehi_neg 1
- #define HAVE_cstoresi_neg 1
- #define HAVE_cstoredi_neg 1
- #define HAVE_aarch64_crc32b (TARGET_CRC32)
- #define HAVE_aarch64_crc32h (TARGET_CRC32)
- #define HAVE_aarch64_crc32w (TARGET_CRC32)
- #define HAVE_aarch64_crc32x (TARGET_CRC32)
- #define HAVE_aarch64_crc32cb (TARGET_CRC32)
- #define HAVE_aarch64_crc32ch (TARGET_CRC32)
- #define HAVE_aarch64_crc32cw (TARGET_CRC32)
- #define HAVE_aarch64_crc32cx (TARGET_CRC32)
- #define HAVE_csinc3si_insn 1
- #define HAVE_csinc3di_insn 1
- #define HAVE_csneg3_uxtw_insn 1
- #define HAVE_csneg3si_insn 1
- #define HAVE_csneg3di_insn 1
- #define HAVE_aarch64_uqdecsi (TARGET_SVE)
- #define HAVE_aarch64_uqdecdi (TARGET_SVE)
- #define HAVE_andsi3 1
- #define HAVE_iorsi3 1
- #define HAVE_xorsi3 1
- #define HAVE_anddi3 1
- #define HAVE_iordi3 1
- #define HAVE_xordi3 1
- #define HAVE_one_cmplsi2 1
- #define HAVE_one_cmpldi2 1
- #define HAVE_and_one_cmpl_ashlsi3 1
- #define HAVE_ior_one_cmpl_ashlsi3 1
- #define HAVE_xor_one_cmpl_ashlsi3 1
- #define HAVE_and_one_cmpl_ashrsi3 1
- #define HAVE_ior_one_cmpl_ashrsi3 1
- #define HAVE_xor_one_cmpl_ashrsi3 1
- #define HAVE_and_one_cmpl_lshrsi3 1
- #define HAVE_ior_one_cmpl_lshrsi3 1
- #define HAVE_xor_one_cmpl_lshrsi3 1
- #define HAVE_and_one_cmpl_rotrsi3 1
- #define HAVE_ior_one_cmpl_rotrsi3 1
- #define HAVE_xor_one_cmpl_rotrsi3 1
- #define HAVE_and_one_cmpl_ashldi3 1
- #define HAVE_ior_one_cmpl_ashldi3 1
- #define HAVE_xor_one_cmpl_ashldi3 1
- #define HAVE_and_one_cmpl_ashrdi3 1
- #define HAVE_ior_one_cmpl_ashrdi3 1
- #define HAVE_xor_one_cmpl_ashrdi3 1
- #define HAVE_and_one_cmpl_lshrdi3 1
- #define HAVE_ior_one_cmpl_lshrdi3 1
- #define HAVE_xor_one_cmpl_lshrdi3 1
- #define HAVE_and_one_cmpl_rotrdi3 1
- #define HAVE_ior_one_cmpl_rotrdi3 1
- #define HAVE_xor_one_cmpl_rotrdi3 1
- #define HAVE_clzsi2 1
- #define HAVE_clzdi2 1
- #define HAVE_clrsbsi2 1
- #define HAVE_clrsbdi2 1
- #define HAVE_rbitsi2 1
- #define HAVE_rbitdi2 1
- #define HAVE_ctzsi2 1
- #define HAVE_ctzdi2 1
- #define HAVE_bswapsi2 1
- #define HAVE_bswapdi2 1
- #define HAVE_bswaphi2 1
- #define HAVE_rev16si2 (aarch_rev16_shleft_mask_imm_p (operands[3], SImode) \
- && aarch_rev16_shright_mask_imm_p (operands[2], SImode))
- #define HAVE_rev16di2 (aarch_rev16_shleft_mask_imm_p (operands[3], DImode) \
- && aarch_rev16_shright_mask_imm_p (operands[2], DImode))
- #define HAVE_rev16si2_alt (aarch_rev16_shleft_mask_imm_p (operands[3], SImode) \
- && aarch_rev16_shright_mask_imm_p (operands[2], SImode))
- #define HAVE_rev16di2_alt (aarch_rev16_shleft_mask_imm_p (operands[3], DImode) \
- && aarch_rev16_shright_mask_imm_p (operands[2], DImode))
- #define HAVE_btrunchf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_ceilhf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_floorhf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_frintnhf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_nearbyinthf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_rinthf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_roundhf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_btruncsf2 (TARGET_FLOAT)
- #define HAVE_ceilsf2 (TARGET_FLOAT)
- #define HAVE_floorsf2 (TARGET_FLOAT)
- #define HAVE_frintnsf2 (TARGET_FLOAT)
- #define HAVE_nearbyintsf2 (TARGET_FLOAT)
- #define HAVE_rintsf2 (TARGET_FLOAT)
- #define HAVE_roundsf2 (TARGET_FLOAT)
- #define HAVE_btruncdf2 (TARGET_FLOAT)
- #define HAVE_ceildf2 (TARGET_FLOAT)
- #define HAVE_floordf2 (TARGET_FLOAT)
- #define HAVE_frintndf2 (TARGET_FLOAT)
- #define HAVE_nearbyintdf2 (TARGET_FLOAT)
- #define HAVE_rintdf2 (TARGET_FLOAT)
- #define HAVE_rounddf2 (TARGET_FLOAT)
- #define HAVE_lbtrunchfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lceilhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lfloorhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lroundhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lfrintnhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lbtruncuhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lceiluhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lflooruhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lrounduhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lfrintnuhfsi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lbtruncsfsi2 (TARGET_FLOAT)
- #define HAVE_lceilsfsi2 (TARGET_FLOAT)
- #define HAVE_lfloorsfsi2 (TARGET_FLOAT)
- #define HAVE_lroundsfsi2 (TARGET_FLOAT)
- #define HAVE_lfrintnsfsi2 (TARGET_FLOAT)
- #define HAVE_lbtruncusfsi2 (TARGET_FLOAT)
- #define HAVE_lceilusfsi2 (TARGET_FLOAT)
- #define HAVE_lfloorusfsi2 (TARGET_FLOAT)
- #define HAVE_lroundusfsi2 (TARGET_FLOAT)
- #define HAVE_lfrintnusfsi2 (TARGET_FLOAT)
- #define HAVE_lbtruncdfsi2 (TARGET_FLOAT)
- #define HAVE_lceildfsi2 (TARGET_FLOAT)
- #define HAVE_lfloordfsi2 (TARGET_FLOAT)
- #define HAVE_lrounddfsi2 (TARGET_FLOAT)
- #define HAVE_lfrintndfsi2 (TARGET_FLOAT)
- #define HAVE_lbtruncudfsi2 (TARGET_FLOAT)
- #define HAVE_lceiludfsi2 (TARGET_FLOAT)
- #define HAVE_lfloorudfsi2 (TARGET_FLOAT)
- #define HAVE_lroundudfsi2 (TARGET_FLOAT)
- #define HAVE_lfrintnudfsi2 (TARGET_FLOAT)
- #define HAVE_lbtrunchfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lceilhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lfloorhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lroundhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lfrintnhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lbtruncuhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lceiluhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lflooruhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lrounduhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lfrintnuhfdi2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_lbtruncsfdi2 (TARGET_FLOAT)
- #define HAVE_lceilsfdi2 (TARGET_FLOAT)
- #define HAVE_lfloorsfdi2 (TARGET_FLOAT)
- #define HAVE_lroundsfdi2 (TARGET_FLOAT)
- #define HAVE_lfrintnsfdi2 (TARGET_FLOAT)
- #define HAVE_lbtruncusfdi2 (TARGET_FLOAT)
- #define HAVE_lceilusfdi2 (TARGET_FLOAT)
- #define HAVE_lfloorusfdi2 (TARGET_FLOAT)
- #define HAVE_lroundusfdi2 (TARGET_FLOAT)
- #define HAVE_lfrintnusfdi2 (TARGET_FLOAT)
- #define HAVE_lbtruncdfdi2 (TARGET_FLOAT)
- #define HAVE_lceildfdi2 (TARGET_FLOAT)
- #define HAVE_lfloordfdi2 (TARGET_FLOAT)
- #define HAVE_lrounddfdi2 (TARGET_FLOAT)
- #define HAVE_lfrintndfdi2 (TARGET_FLOAT)
- #define HAVE_lbtruncudfdi2 (TARGET_FLOAT)
- #define HAVE_lceiludfdi2 (TARGET_FLOAT)
- #define HAVE_lfloorudfdi2 (TARGET_FLOAT)
- #define HAVE_lroundudfdi2 (TARGET_FLOAT)
- #define HAVE_lfrintnudfdi2 (TARGET_FLOAT)
- #define HAVE_extendsfdf2 (TARGET_FLOAT)
- #define HAVE_extendhfsf2 (TARGET_FLOAT)
- #define HAVE_extendhfdf2 (TARGET_FLOAT)
- #define HAVE_truncdfsf2 (TARGET_FLOAT)
- #define HAVE_truncsfhf2 (TARGET_FLOAT)
- #define HAVE_truncdfhf2 (TARGET_FLOAT)
- #define HAVE_fix_truncsfsi2 (TARGET_FLOAT)
- #define HAVE_fixuns_truncsfsi2 (TARGET_FLOAT)
- #define HAVE_fix_truncdfdi2 (TARGET_FLOAT)
- #define HAVE_fixuns_truncdfdi2 (TARGET_FLOAT)
- #define HAVE_fix_trunchfsi2 (TARGET_FP_F16INST)
- #define HAVE_fixuns_trunchfsi2 (TARGET_FP_F16INST)
- #define HAVE_fix_trunchfdi2 (TARGET_FP_F16INST)
- #define HAVE_fixuns_trunchfdi2 (TARGET_FP_F16INST)
- #define HAVE_fix_truncdfsi2 (TARGET_FLOAT)
- #define HAVE_fixuns_truncdfsi2 (TARGET_FLOAT)
- #define HAVE_fix_truncsfdi2 (TARGET_FLOAT)
- #define HAVE_fixuns_truncsfdi2 (TARGET_FLOAT)
- #define HAVE_floatsisf2 (TARGET_FLOAT)
- #define HAVE_floatunssisf2 (TARGET_FLOAT)
- #define HAVE_floatdidf2 (TARGET_FLOAT)
- #define HAVE_floatunsdidf2 (TARGET_FLOAT)
- #define HAVE_floatdisf2 (TARGET_FLOAT)
- #define HAVE_floatunsdisf2 (TARGET_FLOAT)
- #define HAVE_floatsidf2 (TARGET_FLOAT)
- #define HAVE_floatunssidf2 (TARGET_FLOAT)
- #define HAVE_aarch64_fp16_floatsihf2 (TARGET_FP_F16INST)
- #define HAVE_aarch64_fp16_floatunssihf2 (TARGET_FP_F16INST)
- #define HAVE_aarch64_fp16_floatdihf2 (TARGET_FP_F16INST)
- #define HAVE_aarch64_fp16_floatunsdihf2 (TARGET_FP_F16INST)
- #define HAVE_fcvtzssf3 1
- #define HAVE_fcvtzusf3 1
- #define HAVE_fcvtzsdf3 1
- #define HAVE_fcvtzudf3 1
- #define HAVE_scvtfsi3 1
- #define HAVE_ucvtfsi3 1
- #define HAVE_scvtfdi3 1
- #define HAVE_ucvtfdi3 1
- #define HAVE_fcvtzshfsi3 (TARGET_FP_F16INST)
- #define HAVE_fcvtzuhfsi3 (TARGET_FP_F16INST)
- #define HAVE_fcvtzshfdi3 (TARGET_FP_F16INST)
- #define HAVE_fcvtzuhfdi3 (TARGET_FP_F16INST)
- #define HAVE_scvtfsihf3 (TARGET_FP_F16INST)
- #define HAVE_ucvtfsihf3 (TARGET_FP_F16INST)
- #define HAVE_scvtfdihf3 (TARGET_FP_F16INST)
- #define HAVE_ucvtfdihf3 (TARGET_FP_F16INST)
- #define HAVE_fcvtzshf3 (TARGET_SIMD)
- #define HAVE_fcvtzuhf3 (TARGET_SIMD)
- #define HAVE_scvtfhi3 (TARGET_SIMD)
- #define HAVE_ucvtfhi3 (TARGET_SIMD)
- #define HAVE_addhf3 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_addsf3 (TARGET_FLOAT)
- #define HAVE_adddf3 (TARGET_FLOAT)
- #define HAVE_subhf3 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_subsf3 (TARGET_FLOAT)
- #define HAVE_subdf3 (TARGET_FLOAT)
- #define HAVE_mulhf3 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_mulsf3 (TARGET_FLOAT)
- #define HAVE_muldf3 (TARGET_FLOAT)
- #define HAVE_neghf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_negsf2 (TARGET_FLOAT)
- #define HAVE_negdf2 (TARGET_FLOAT)
- #define HAVE_abshf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_abssf2 (TARGET_FLOAT)
- #define HAVE_absdf2 (TARGET_FLOAT)
- #define HAVE_smaxsf3 (TARGET_FLOAT)
- #define HAVE_smaxdf3 (TARGET_FLOAT)
- #define HAVE_sminsf3 (TARGET_FLOAT)
- #define HAVE_smindf3 (TARGET_FLOAT)
- #define HAVE_smax_nanhf3 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_smin_nanhf3 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_fmaxhf3 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_fminhf3 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_smax_nansf3 (TARGET_FLOAT)
- #define HAVE_smin_nansf3 (TARGET_FLOAT)
- #define HAVE_fmaxsf3 (TARGET_FLOAT)
- #define HAVE_fminsf3 (TARGET_FLOAT)
- #define HAVE_smax_nandf3 (TARGET_FLOAT)
- #define HAVE_smin_nandf3 (TARGET_FLOAT)
- #define HAVE_fmaxdf3 (TARGET_FLOAT)
- #define HAVE_fmindf3 (TARGET_FLOAT)
- #define HAVE_copysignsf3_insn (TARGET_FLOAT && TARGET_SIMD)
- #define HAVE_copysigndf3_insn (TARGET_FLOAT && TARGET_SIMD)
- #define HAVE_aarch64_movdi_tilow (TARGET_FLOAT && (reload_completed || reload_in_progress))
- #define HAVE_aarch64_movdi_tflow (TARGET_FLOAT && (reload_completed || reload_in_progress))
- #define HAVE_aarch64_movdi_tihigh (TARGET_FLOAT && (reload_completed || reload_in_progress))
- #define HAVE_aarch64_movdi_tfhigh (TARGET_FLOAT && (reload_completed || reload_in_progress))
- #define HAVE_aarch64_movtihigh_di (TARGET_FLOAT && (reload_completed || reload_in_progress))
- #define HAVE_aarch64_movtfhigh_di (TARGET_FLOAT && (reload_completed || reload_in_progress))
- #define HAVE_aarch64_movtilow_di (TARGET_FLOAT && (reload_completed || reload_in_progress))
- #define HAVE_aarch64_movtflow_di (TARGET_FLOAT && (reload_completed || reload_in_progress))
- #define HAVE_aarch64_movtilow_tilow (TARGET_FLOAT && (reload_completed || reload_in_progress))
- #define HAVE_add_losym_si (ptr_mode == SImode || Pmode == SImode)
- #define HAVE_add_losym_di (ptr_mode == DImode || Pmode == DImode)
- #define HAVE_ldr_got_small_si (ptr_mode == SImode)
- #define HAVE_ldr_got_small_di (ptr_mode == DImode)
- #define HAVE_ldr_got_small_sidi (TARGET_ILP32)
- #define HAVE_ldr_got_small_28k_si (ptr_mode == SImode)
- #define HAVE_ldr_got_small_28k_di (ptr_mode == DImode)
- #define HAVE_ldr_got_small_28k_sidi (TARGET_ILP32)
- #define HAVE_ldr_got_tiny_si (ptr_mode == SImode)
- #define HAVE_ldr_got_tiny_di (ptr_mode == DImode)
- #define HAVE_ldr_got_tiny_sidi (TARGET_ILP32)
- #define HAVE_aarch64_load_tp_hard 1
- #define HAVE_tlsie_small_si (ptr_mode == SImode)
- #define HAVE_tlsie_small_di (ptr_mode == DImode)
- #define HAVE_tlsie_small_sidi 1
- #define HAVE_tlsie_tiny_si (ptr_mode == SImode)
- #define HAVE_tlsie_tiny_di (ptr_mode == DImode)
- #define HAVE_tlsie_tiny_sidi 1
- #define HAVE_tlsle12_si (ptr_mode == SImode || Pmode == SImode)
- #define HAVE_tlsle12_di (ptr_mode == DImode || Pmode == DImode)
- #define HAVE_tlsle24_si (ptr_mode == SImode || Pmode == SImode)
- #define HAVE_tlsle24_di (ptr_mode == DImode || Pmode == DImode)
- #define HAVE_tlsle32_si (ptr_mode == SImode || Pmode == SImode)
- #define HAVE_tlsle32_di (ptr_mode == DImode || Pmode == DImode)
- #define HAVE_tlsle48_si (ptr_mode == SImode || Pmode == SImode)
- #define HAVE_tlsle48_di (ptr_mode == DImode || Pmode == DImode)
- #define HAVE_tlsdesc_small_advsimd_si ((TARGET_TLS_DESC && !TARGET_SVE) && (ptr_mode == SImode))
- #define HAVE_tlsdesc_small_advsimd_di ((TARGET_TLS_DESC && !TARGET_SVE) && (ptr_mode == DImode))
- #define HAVE_tlsdesc_small_sve_si ((TARGET_TLS_DESC && TARGET_SVE) && (ptr_mode == SImode))
- #define HAVE_tlsdesc_small_sve_di ((TARGET_TLS_DESC && TARGET_SVE) && (ptr_mode == DImode))
- #define HAVE_stack_tie 1
- #define HAVE_aarch64_fjcvtzs (TARGET_JSCVT)
- #define HAVE_paciasp 1
- #define HAVE_autiasp 1
- #define HAVE_pacibsp 1
- #define HAVE_autibsp 1
- #define HAVE_pacia1716 1
- #define HAVE_autia1716 1
- #define HAVE_pacib1716 1
- #define HAVE_autib1716 1
- #define HAVE_xpaclri 1
- #define HAVE_blockage 1
- #define HAVE_probe_stack_range 1
- #define HAVE_probe_sve_stack_clash_si ((TARGET_SVE) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_probe_sve_stack_clash_di ((TARGET_SVE) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_reg_stack_protect_address_si ((aarch64_stack_protector_guard != SSP_GLOBAL) && (ptr_mode == SImode))
- #define HAVE_reg_stack_protect_address_di ((aarch64_stack_protector_guard != SSP_GLOBAL) && (ptr_mode == DImode))
- #define HAVE_stack_protect_set_si (ptr_mode == SImode)
- #define HAVE_stack_protect_set_di (ptr_mode == DImode)
- #define HAVE_stack_protect_test_si (ptr_mode == SImode)
- #define HAVE_stack_protect_test_di (ptr_mode == DImode)
- #define HAVE_set_fpcr 1
- #define HAVE_get_fpcr 1
- #define HAVE_set_fpsr 1
- #define HAVE_get_fpsr 1
- #define HAVE_speculation_tracker 1
- #define HAVE_speculation_tracker_rev 1
- #define HAVE_bti_noarg 1
- #define HAVE_bti_c 1
- #define HAVE_bti_j 1
- #define HAVE_bti_jc 1
- #define HAVE_speculation_barrier 1
- #define HAVE_despeculate_simpleqi 1
- #define HAVE_despeculate_simplehi 1
- #define HAVE_despeculate_simplesi 1
- #define HAVE_despeculate_simpledi 1
- #define HAVE_despeculate_simpleti 1
- #define HAVE_aarch64_frint32zv2sf (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (V2SFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint32xv2sf (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (V2SFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint64zv2sf (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (V2SFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint64xv2sf (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (V2SFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint32zv4sf (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (V4SFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint32xv4sf (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (V4SFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint64zv4sf (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (V4SFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint64xv4sf (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (V4SFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint32zv2df (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (V2DFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint32xv2df (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (V2DFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint64zv2df (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (V2DFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint64xv2df (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (V2DFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint32zdf (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (DFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint32xdf (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (DFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint64zdf (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (DFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint64xdf (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (DFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint32zsf (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (SFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint32xsf (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (SFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint64zsf (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (SFmode) && !TARGET_SIMD))
- #define HAVE_aarch64_frint64xsf (TARGET_FRINT && TARGET_FLOAT \
- && !(VECTOR_MODE_P (SFmode) && !TARGET_SIMD))
- #define HAVE_tstart (TARGET_TME)
- #define HAVE_ttest (TARGET_TME)
- #define HAVE_tcommit (TARGET_TME)
- #define HAVE_tcancel (TARGET_TME && (UINTVAL (operands[0]) <= 65535))
- #define HAVE_aarch64_rndr (TARGET_RNG)
- #define HAVE_aarch64_rndrrs (TARGET_RNG)
- #define HAVE_irg (TARGET_MEMTAG)
- #define HAVE_gmi (TARGET_MEMTAG)
- #define HAVE_addg (TARGET_MEMTAG)
- #define HAVE_subp (TARGET_MEMTAG)
- #define HAVE_ldg (TARGET_MEMTAG)
- #define HAVE_stg (TARGET_MEMTAG)
- #define HAVE_aarch64_simd_dupv8qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_dupv16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_dupv4hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_dupv8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_dupv2si (TARGET_SIMD)
- #define HAVE_aarch64_simd_dupv4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_dupv2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_dupv4hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_dupv8hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_dupv2sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_dupv4sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_dupv2df (TARGET_SIMD)
- #define HAVE_aarch64_simd_dupv4bf (TARGET_SIMD)
- #define HAVE_aarch64_simd_dupv8bf (TARGET_SIMD)
- #define HAVE_aarch64_dup_lanev8qi (TARGET_SIMD)
- #define HAVE_aarch64_dup_lanev16qi (TARGET_SIMD)
- #define HAVE_aarch64_dup_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_dup_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_dup_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_dup_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_dup_lanev2di (TARGET_SIMD)
- #define HAVE_aarch64_dup_lanev4hf (TARGET_SIMD)
- #define HAVE_aarch64_dup_lanev8hf (TARGET_SIMD)
- #define HAVE_aarch64_dup_lanev4bf (TARGET_SIMD)
- #define HAVE_aarch64_dup_lanev8bf (TARGET_SIMD)
- #define HAVE_aarch64_dup_lanev2sf (TARGET_SIMD)
- #define HAVE_aarch64_dup_lanev4sf (TARGET_SIMD)
- #define HAVE_aarch64_dup_lanev2df (TARGET_SIMD)
- #define HAVE_aarch64_dup_lane_to_128v8qi (TARGET_SIMD)
- #define HAVE_aarch64_dup_lane_to_64v16qi (TARGET_SIMD)
- #define HAVE_aarch64_dup_lane_to_128v4hi (TARGET_SIMD)
- #define HAVE_aarch64_dup_lane_to_64v8hi (TARGET_SIMD)
- #define HAVE_aarch64_dup_lane_to_128v2si (TARGET_SIMD)
- #define HAVE_aarch64_dup_lane_to_64v4si (TARGET_SIMD)
- #define HAVE_aarch64_dup_lane_to_128v4hf (TARGET_SIMD)
- #define HAVE_aarch64_dup_lane_to_64v8hf (TARGET_SIMD)
- #define HAVE_aarch64_dup_lane_to_128v2sf (TARGET_SIMD)
- #define HAVE_aarch64_dup_lane_to_64v4sf (TARGET_SIMD)
- #define HAVE_aarch64_store_lane0v8qi (TARGET_SIMD \
- && ENDIAN_LANE_N (8, INTVAL (operands[2])) == 0)
- #define HAVE_aarch64_store_lane0v16qi (TARGET_SIMD \
- && ENDIAN_LANE_N (16, INTVAL (operands[2])) == 0)
- #define HAVE_aarch64_store_lane0v4hi (TARGET_SIMD \
- && ENDIAN_LANE_N (4, INTVAL (operands[2])) == 0)
- #define HAVE_aarch64_store_lane0v8hi (TARGET_SIMD \
- && ENDIAN_LANE_N (8, INTVAL (operands[2])) == 0)
- #define HAVE_aarch64_store_lane0v2si (TARGET_SIMD \
- && ENDIAN_LANE_N (2, INTVAL (operands[2])) == 0)
- #define HAVE_aarch64_store_lane0v4si (TARGET_SIMD \
- && ENDIAN_LANE_N (4, INTVAL (operands[2])) == 0)
- #define HAVE_aarch64_store_lane0v2di (TARGET_SIMD \
- && ENDIAN_LANE_N (2, INTVAL (operands[2])) == 0)
- #define HAVE_aarch64_store_lane0v4hf (TARGET_SIMD \
- && ENDIAN_LANE_N (4, INTVAL (operands[2])) == 0)
- #define HAVE_aarch64_store_lane0v8hf (TARGET_SIMD \
- && ENDIAN_LANE_N (8, INTVAL (operands[2])) == 0)
- #define HAVE_aarch64_store_lane0v4bf (TARGET_SIMD \
- && ENDIAN_LANE_N (4, INTVAL (operands[2])) == 0)
- #define HAVE_aarch64_store_lane0v8bf (TARGET_SIMD \
- && ENDIAN_LANE_N (8, INTVAL (operands[2])) == 0)
- #define HAVE_aarch64_store_lane0v2sf (TARGET_SIMD \
- && ENDIAN_LANE_N (2, INTVAL (operands[2])) == 0)
- #define HAVE_aarch64_store_lane0v4sf (TARGET_SIMD \
- && ENDIAN_LANE_N (4, INTVAL (operands[2])) == 0)
- #define HAVE_aarch64_store_lane0v2df (TARGET_SIMD \
- && ENDIAN_LANE_N (2, INTVAL (operands[2])) == 0)
- #define HAVE_load_pairv8qiv8qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8QImode))))
- #define HAVE_load_pairv4hiv8qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4HImode))))
- #define HAVE_load_pairv4hfv8qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4HFmode))))
- #define HAVE_load_pairv2siv8qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2SImode))))
- #define HAVE_load_pairv2sfv8qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2SFmode))))
- #define HAVE_load_pairdfv8qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_load_pairv8qiv4hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8QImode))))
- #define HAVE_load_pairv4hiv4hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4HImode))))
- #define HAVE_load_pairv4hfv4hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4HFmode))))
- #define HAVE_load_pairv2siv4hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2SImode))))
- #define HAVE_load_pairv2sfv4hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2SFmode))))
- #define HAVE_load_pairdfv4hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_load_pairv8qiv4hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8QImode))))
- #define HAVE_load_pairv4hiv4hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4HImode))))
- #define HAVE_load_pairv4hfv4hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4HFmode))))
- #define HAVE_load_pairv2siv4hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2SImode))))
- #define HAVE_load_pairv2sfv4hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2SFmode))))
- #define HAVE_load_pairdfv4hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_load_pairv8qiv2si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8QImode))))
- #define HAVE_load_pairv4hiv2si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4HImode))))
- #define HAVE_load_pairv4hfv2si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4HFmode))))
- #define HAVE_load_pairv2siv2si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2SImode))))
- #define HAVE_load_pairv2sfv2si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2SFmode))))
- #define HAVE_load_pairdfv2si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_load_pairv8qiv2sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8QImode))))
- #define HAVE_load_pairv4hiv2sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4HImode))))
- #define HAVE_load_pairv4hfv2sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4HFmode))))
- #define HAVE_load_pairv2siv2sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2SImode))))
- #define HAVE_load_pairv2sfv2sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2SFmode))))
- #define HAVE_load_pairdfv2sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_load_pairv8qidf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8QImode))))
- #define HAVE_load_pairv4hidf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4HImode))))
- #define HAVE_load_pairv4hfdf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4HFmode))))
- #define HAVE_load_pairv2sidf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2SImode))))
- #define HAVE_load_pairv2sfdf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2SFmode))))
- #define HAVE_load_pairdfdf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_vec_store_pairv8qiv8qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8QImode))))
- #define HAVE_vec_store_pairv4hiv8qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4HImode))))
- #define HAVE_vec_store_pairv4hfv8qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4HFmode))))
- #define HAVE_vec_store_pairv2siv8qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2SImode))))
- #define HAVE_vec_store_pairv2sfv8qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2SFmode))))
- #define HAVE_vec_store_pairdfv8qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_vec_store_pairv8qiv4hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8QImode))))
- #define HAVE_vec_store_pairv4hiv4hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4HImode))))
- #define HAVE_vec_store_pairv4hfv4hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4HFmode))))
- #define HAVE_vec_store_pairv2siv4hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2SImode))))
- #define HAVE_vec_store_pairv2sfv4hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2SFmode))))
- #define HAVE_vec_store_pairdfv4hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_vec_store_pairv8qiv4hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8QImode))))
- #define HAVE_vec_store_pairv4hiv4hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4HImode))))
- #define HAVE_vec_store_pairv4hfv4hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4HFmode))))
- #define HAVE_vec_store_pairv2siv4hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2SImode))))
- #define HAVE_vec_store_pairv2sfv4hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2SFmode))))
- #define HAVE_vec_store_pairdfv4hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_vec_store_pairv8qiv2si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8QImode))))
- #define HAVE_vec_store_pairv4hiv2si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4HImode))))
- #define HAVE_vec_store_pairv4hfv2si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4HFmode))))
- #define HAVE_vec_store_pairv2siv2si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2SImode))))
- #define HAVE_vec_store_pairv2sfv2si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2SFmode))))
- #define HAVE_vec_store_pairdfv2si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_vec_store_pairv8qiv2sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8QImode))))
- #define HAVE_vec_store_pairv4hiv2sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4HImode))))
- #define HAVE_vec_store_pairv4hfv2sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4HFmode))))
- #define HAVE_vec_store_pairv2siv2sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2SImode))))
- #define HAVE_vec_store_pairv2sfv2sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2SFmode))))
- #define HAVE_vec_store_pairdfv2sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_vec_store_pairv8qidf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8QImode))))
- #define HAVE_vec_store_pairv4hidf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4HImode))))
- #define HAVE_vec_store_pairv4hfdf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4HFmode))))
- #define HAVE_vec_store_pairv2sidf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2SImode))))
- #define HAVE_vec_store_pairv2sfdf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2SFmode))))
- #define HAVE_vec_store_pairdfdf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_load_pairv16qiv16qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V16QImode))))
- #define HAVE_load_pairv16qiv8hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V16QImode))))
- #define HAVE_load_pairv16qiv4si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V16QImode))))
- #define HAVE_load_pairv16qiv2di (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V16QImode))))
- #define HAVE_load_pairv16qiv8hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V16QImode))))
- #define HAVE_load_pairv16qiv8bf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V16QImode))))
- #define HAVE_load_pairv16qiv4sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V16QImode))))
- #define HAVE_load_pairv16qiv2df (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V16QImode))))
- #define HAVE_load_pairv8hiv16qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8HImode))))
- #define HAVE_load_pairv8hiv8hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8HImode))))
- #define HAVE_load_pairv8hiv4si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8HImode))))
- #define HAVE_load_pairv8hiv2di (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8HImode))))
- #define HAVE_load_pairv8hiv8hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8HImode))))
- #define HAVE_load_pairv8hiv8bf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8HImode))))
- #define HAVE_load_pairv8hiv4sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8HImode))))
- #define HAVE_load_pairv8hiv2df (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8HImode))))
- #define HAVE_load_pairv4siv16qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4SImode))))
- #define HAVE_load_pairv4siv8hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4SImode))))
- #define HAVE_load_pairv4siv4si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4SImode))))
- #define HAVE_load_pairv4siv2di (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4SImode))))
- #define HAVE_load_pairv4siv8hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4SImode))))
- #define HAVE_load_pairv4siv8bf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4SImode))))
- #define HAVE_load_pairv4siv4sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4SImode))))
- #define HAVE_load_pairv4siv2df (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4SImode))))
- #define HAVE_load_pairv2div16qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2DImode))))
- #define HAVE_load_pairv2div8hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2DImode))))
- #define HAVE_load_pairv2div4si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2DImode))))
- #define HAVE_load_pairv2div2di (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2DImode))))
- #define HAVE_load_pairv2div8hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2DImode))))
- #define HAVE_load_pairv2div8bf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2DImode))))
- #define HAVE_load_pairv2div4sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2DImode))))
- #define HAVE_load_pairv2div2df (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2DImode))))
- #define HAVE_load_pairv8hfv16qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8HFmode))))
- #define HAVE_load_pairv8hfv8hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8HFmode))))
- #define HAVE_load_pairv8hfv4si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8HFmode))))
- #define HAVE_load_pairv8hfv2di (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8HFmode))))
- #define HAVE_load_pairv8hfv8hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8HFmode))))
- #define HAVE_load_pairv8hfv8bf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8HFmode))))
- #define HAVE_load_pairv8hfv4sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8HFmode))))
- #define HAVE_load_pairv8hfv2df (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8HFmode))))
- #define HAVE_load_pairv4sfv16qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4SFmode))))
- #define HAVE_load_pairv4sfv8hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4SFmode))))
- #define HAVE_load_pairv4sfv4si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4SFmode))))
- #define HAVE_load_pairv4sfv2di (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4SFmode))))
- #define HAVE_load_pairv4sfv8hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4SFmode))))
- #define HAVE_load_pairv4sfv8bf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4SFmode))))
- #define HAVE_load_pairv4sfv4sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4SFmode))))
- #define HAVE_load_pairv4sfv2df (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4SFmode))))
- #define HAVE_load_pairv2dfv16qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2DFmode))))
- #define HAVE_load_pairv2dfv8hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2DFmode))))
- #define HAVE_load_pairv2dfv4si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2DFmode))))
- #define HAVE_load_pairv2dfv2di (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2DFmode))))
- #define HAVE_load_pairv2dfv8hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2DFmode))))
- #define HAVE_load_pairv2dfv8bf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2DFmode))))
- #define HAVE_load_pairv2dfv4sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2DFmode))))
- #define HAVE_load_pairv2dfv2df (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2DFmode))))
- #define HAVE_load_pairv8bfv16qi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8BFmode))))
- #define HAVE_load_pairv8bfv8hi (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8BFmode))))
- #define HAVE_load_pairv8bfv4si (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8BFmode))))
- #define HAVE_load_pairv8bfv2di (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8BFmode))))
- #define HAVE_load_pairv8bfv8hf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8BFmode))))
- #define HAVE_load_pairv8bfv8bf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8BFmode))))
- #define HAVE_load_pairv8bfv4sf (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8BFmode))))
- #define HAVE_load_pairv8bfv2df (TARGET_SIMD \
- && rtx_equal_p (XEXP (operands[3], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8BFmode))))
- #define HAVE_vec_store_pairv16qiv16qi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V16QImode))))
- #define HAVE_vec_store_pairv16qiv8hi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V16QImode))))
- #define HAVE_vec_store_pairv16qiv4si (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V16QImode))))
- #define HAVE_vec_store_pairv16qiv2di (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V16QImode))))
- #define HAVE_vec_store_pairv16qiv8hf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V16QImode))))
- #define HAVE_vec_store_pairv16qiv8bf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V16QImode))))
- #define HAVE_vec_store_pairv16qiv4sf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V16QImode))))
- #define HAVE_vec_store_pairv16qiv2df (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V16QImode))))
- #define HAVE_vec_store_pairv8hiv16qi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8HImode))))
- #define HAVE_vec_store_pairv8hiv8hi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8HImode))))
- #define HAVE_vec_store_pairv8hiv4si (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8HImode))))
- #define HAVE_vec_store_pairv8hiv2di (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8HImode))))
- #define HAVE_vec_store_pairv8hiv8hf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8HImode))))
- #define HAVE_vec_store_pairv8hiv8bf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8HImode))))
- #define HAVE_vec_store_pairv8hiv4sf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8HImode))))
- #define HAVE_vec_store_pairv8hiv2df (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8HImode))))
- #define HAVE_vec_store_pairv4siv16qi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4SImode))))
- #define HAVE_vec_store_pairv4siv8hi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4SImode))))
- #define HAVE_vec_store_pairv4siv4si (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4SImode))))
- #define HAVE_vec_store_pairv4siv2di (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4SImode))))
- #define HAVE_vec_store_pairv4siv8hf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4SImode))))
- #define HAVE_vec_store_pairv4siv8bf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4SImode))))
- #define HAVE_vec_store_pairv4siv4sf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4SImode))))
- #define HAVE_vec_store_pairv4siv2df (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4SImode))))
- #define HAVE_vec_store_pairv2div16qi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2DImode))))
- #define HAVE_vec_store_pairv2div8hi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2DImode))))
- #define HAVE_vec_store_pairv2div4si (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2DImode))))
- #define HAVE_vec_store_pairv2div2di (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2DImode))))
- #define HAVE_vec_store_pairv2div8hf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2DImode))))
- #define HAVE_vec_store_pairv2div8bf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2DImode))))
- #define HAVE_vec_store_pairv2div4sf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2DImode))))
- #define HAVE_vec_store_pairv2div2df (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2DImode))))
- #define HAVE_vec_store_pairv8hfv16qi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8HFmode))))
- #define HAVE_vec_store_pairv8hfv8hi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8HFmode))))
- #define HAVE_vec_store_pairv8hfv4si (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8HFmode))))
- #define HAVE_vec_store_pairv8hfv2di (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8HFmode))))
- #define HAVE_vec_store_pairv8hfv8hf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8HFmode))))
- #define HAVE_vec_store_pairv8hfv8bf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8HFmode))))
- #define HAVE_vec_store_pairv8hfv4sf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8HFmode))))
- #define HAVE_vec_store_pairv8hfv2df (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8HFmode))))
- #define HAVE_vec_store_pairv4sfv16qi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4SFmode))))
- #define HAVE_vec_store_pairv4sfv8hi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4SFmode))))
- #define HAVE_vec_store_pairv4sfv4si (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4SFmode))))
- #define HAVE_vec_store_pairv4sfv2di (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4SFmode))))
- #define HAVE_vec_store_pairv4sfv8hf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4SFmode))))
- #define HAVE_vec_store_pairv4sfv8bf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4SFmode))))
- #define HAVE_vec_store_pairv4sfv4sf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4SFmode))))
- #define HAVE_vec_store_pairv4sfv2df (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V4SFmode))))
- #define HAVE_vec_store_pairv2dfv16qi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2DFmode))))
- #define HAVE_vec_store_pairv2dfv8hi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2DFmode))))
- #define HAVE_vec_store_pairv2dfv4si (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2DFmode))))
- #define HAVE_vec_store_pairv2dfv2di (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2DFmode))))
- #define HAVE_vec_store_pairv2dfv8hf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2DFmode))))
- #define HAVE_vec_store_pairv2dfv8bf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2DFmode))))
- #define HAVE_vec_store_pairv2dfv4sf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2DFmode))))
- #define HAVE_vec_store_pairv2dfv2df (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V2DFmode))))
- #define HAVE_vec_store_pairv8bfv16qi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8BFmode))))
- #define HAVE_vec_store_pairv8bfv8hi (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8BFmode))))
- #define HAVE_vec_store_pairv8bfv4si (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8BFmode))))
- #define HAVE_vec_store_pairv8bfv2di (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8BFmode))))
- #define HAVE_vec_store_pairv8bfv8hf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8BFmode))))
- #define HAVE_vec_store_pairv8bfv8bf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8BFmode))))
- #define HAVE_vec_store_pairv8bfv4sf (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8BFmode))))
- #define HAVE_vec_store_pairv8bfv2df (TARGET_SIMD && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[0], 0), \
- GET_MODE_SIZE (V8BFmode))))
- #define HAVE_aarch64_simd_mov_from_v16qilow (TARGET_SIMD)
- #define HAVE_aarch64_simd_mov_from_v8hilow (TARGET_SIMD)
- #define HAVE_aarch64_simd_mov_from_v4silow (TARGET_SIMD)
- #define HAVE_aarch64_simd_mov_from_v8hflow (TARGET_SIMD)
- #define HAVE_aarch64_simd_mov_from_v8bflow (TARGET_SIMD)
- #define HAVE_aarch64_simd_mov_from_v4sflow (TARGET_SIMD)
- #define HAVE_aarch64_simd_mov_from_v16qihigh (TARGET_SIMD)
- #define HAVE_aarch64_simd_mov_from_v8hihigh (TARGET_SIMD)
- #define HAVE_aarch64_simd_mov_from_v4sihigh (TARGET_SIMD)
- #define HAVE_aarch64_simd_mov_from_v8hfhigh (TARGET_SIMD)
- #define HAVE_aarch64_simd_mov_from_v8bfhigh (TARGET_SIMD)
- #define HAVE_aarch64_simd_mov_from_v4sfhigh (TARGET_SIMD)
- #define HAVE_ornv8qi3 (TARGET_SIMD)
- #define HAVE_ornv16qi3 (TARGET_SIMD)
- #define HAVE_ornv4hi3 (TARGET_SIMD)
- #define HAVE_ornv8hi3 (TARGET_SIMD)
- #define HAVE_ornv2si3 (TARGET_SIMD)
- #define HAVE_ornv4si3 (TARGET_SIMD)
- #define HAVE_ornv2di3 (TARGET_SIMD)
- #define HAVE_bicv8qi3 (TARGET_SIMD)
- #define HAVE_bicv16qi3 (TARGET_SIMD)
- #define HAVE_bicv4hi3 (TARGET_SIMD)
- #define HAVE_bicv8hi3 (TARGET_SIMD)
- #define HAVE_bicv2si3 (TARGET_SIMD)
- #define HAVE_bicv4si3 (TARGET_SIMD)
- #define HAVE_bicv2di3 (TARGET_SIMD)
- #define HAVE_addv8qi3 (TARGET_SIMD)
- #define HAVE_addv16qi3 (TARGET_SIMD)
- #define HAVE_addv4hi3 (TARGET_SIMD)
- #define HAVE_addv8hi3 (TARGET_SIMD)
- #define HAVE_addv2si3 (TARGET_SIMD)
- #define HAVE_addv4si3 (TARGET_SIMD)
- #define HAVE_addv2di3 (TARGET_SIMD)
- #define HAVE_subv8qi3 (TARGET_SIMD)
- #define HAVE_subv16qi3 (TARGET_SIMD)
- #define HAVE_subv4hi3 (TARGET_SIMD)
- #define HAVE_subv8hi3 (TARGET_SIMD)
- #define HAVE_subv2si3 (TARGET_SIMD)
- #define HAVE_subv4si3 (TARGET_SIMD)
- #define HAVE_subv2di3 (TARGET_SIMD)
- #define HAVE_mulv8qi3 (TARGET_SIMD)
- #define HAVE_mulv16qi3 (TARGET_SIMD)
- #define HAVE_mulv4hi3 (TARGET_SIMD)
- #define HAVE_mulv8hi3 (TARGET_SIMD)
- #define HAVE_mulv2si3 (TARGET_SIMD)
- #define HAVE_mulv4si3 (TARGET_SIMD)
- #define HAVE_bswapv4hi2 (TARGET_SIMD)
- #define HAVE_bswapv8hi2 (TARGET_SIMD)
- #define HAVE_bswapv2si2 (TARGET_SIMD)
- #define HAVE_bswapv4si2 (TARGET_SIMD)
- #define HAVE_bswapv2di2 (TARGET_SIMD)
- #define HAVE_aarch64_rbitv8qi (TARGET_SIMD)
- #define HAVE_aarch64_rbitv16qi (TARGET_SIMD)
- #define HAVE_aarch64_fcadd90v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcadd270v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcadd90v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcadd270v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcadd90v2sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcadd270v2sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcadd90v4sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcadd270v4sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcadd90v2df (TARGET_COMPLEX)
- #define HAVE_aarch64_fcadd270v2df (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla0v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcmla90v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcmla180v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcmla270v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcmla0v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcmla90v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcmla180v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcmla270v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcmla0v2sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla90v2sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla180v2sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla270v2sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla0v4sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla90v4sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla180v4sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla270v4sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla0v2df (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla90v2df (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla180v2df (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla270v2df (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla_lane0v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcmla_lane90v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcmla_lane180v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcmla_lane270v4hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcmla_lane0v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcmla_lane90v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcmla_lane180v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcmla_lane270v8hf ((TARGET_COMPLEX) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fcmla_lane0v2sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla_lane90v2sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla_lane180v2sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla_lane270v2sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla_lane0v4sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla_lane90v4sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla_lane180v4sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla_lane270v4sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla_lane0v2df (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla_lane90v2df (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla_lane180v2df (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla_lane270v2df (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla_laneq0v4hf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla_laneq90v4hf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla_laneq180v4hf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmla_laneq270v4hf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmlaq_lane0v8hf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmlaq_lane90v8hf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmlaq_lane180v8hf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmlaq_lane270v8hf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmlaq_lane0v4sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmlaq_lane90v4sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmlaq_lane180v4sf (TARGET_COMPLEX)
- #define HAVE_aarch64_fcmlaq_lane270v4sf (TARGET_COMPLEX)
- #define HAVE_aarch64_sdotv8qi (TARGET_DOTPROD)
- #define HAVE_aarch64_udotv8qi (TARGET_DOTPROD)
- #define HAVE_aarch64_sdotv16qi (TARGET_DOTPROD)
- #define HAVE_aarch64_udotv16qi (TARGET_DOTPROD)
- #define HAVE_aarch64_usdotv8qi (TARGET_I8MM)
- #define HAVE_aarch64_usdotv16qi (TARGET_I8MM)
- #define HAVE_aarch64_sdot_lanev8qi (TARGET_DOTPROD)
- #define HAVE_aarch64_udot_lanev8qi (TARGET_DOTPROD)
- #define HAVE_aarch64_sdot_lanev16qi (TARGET_DOTPROD)
- #define HAVE_aarch64_udot_lanev16qi (TARGET_DOTPROD)
- #define HAVE_aarch64_sdot_laneqv8qi (TARGET_DOTPROD)
- #define HAVE_aarch64_udot_laneqv8qi (TARGET_DOTPROD)
- #define HAVE_aarch64_sdot_laneqv16qi (TARGET_DOTPROD)
- #define HAVE_aarch64_udot_laneqv16qi (TARGET_DOTPROD)
- #define HAVE_aarch64_usdot_lanev8qi (TARGET_I8MM)
- #define HAVE_aarch64_sudot_lanev8qi (TARGET_I8MM)
- #define HAVE_aarch64_usdot_lanev16qi (TARGET_I8MM)
- #define HAVE_aarch64_sudot_lanev16qi (TARGET_I8MM)
- #define HAVE_aarch64_usdot_laneqv8qi (TARGET_I8MM)
- #define HAVE_aarch64_sudot_laneqv8qi (TARGET_I8MM)
- #define HAVE_aarch64_usdot_laneqv16qi (TARGET_I8MM)
- #define HAVE_aarch64_sudot_laneqv16qi (TARGET_I8MM)
- #define HAVE_aarch64_rsqrtev4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_rsqrtev8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_rsqrtev2sf (TARGET_SIMD)
- #define HAVE_aarch64_rsqrtev4sf (TARGET_SIMD)
- #define HAVE_aarch64_rsqrtev2df (TARGET_SIMD)
- #define HAVE_aarch64_rsqrtehf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_rsqrtesf (TARGET_SIMD)
- #define HAVE_aarch64_rsqrtedf (TARGET_SIMD)
- #define HAVE_aarch64_rsqrtsv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_rsqrtsv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_rsqrtsv2sf (TARGET_SIMD)
- #define HAVE_aarch64_rsqrtsv4sf (TARGET_SIMD)
- #define HAVE_aarch64_rsqrtsv2df (TARGET_SIMD)
- #define HAVE_aarch64_rsqrtshf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_rsqrtssf (TARGET_SIMD)
- #define HAVE_aarch64_rsqrtsdf (TARGET_SIMD)
- #define HAVE_negv8qi2 (TARGET_SIMD)
- #define HAVE_negv16qi2 (TARGET_SIMD)
- #define HAVE_negv4hi2 (TARGET_SIMD)
- #define HAVE_negv8hi2 (TARGET_SIMD)
- #define HAVE_negv2si2 (TARGET_SIMD)
- #define HAVE_negv4si2 (TARGET_SIMD)
- #define HAVE_negv2di2 (TARGET_SIMD)
- #define HAVE_absv8qi2 (TARGET_SIMD)
- #define HAVE_absv16qi2 (TARGET_SIMD)
- #define HAVE_absv4hi2 (TARGET_SIMD)
- #define HAVE_absv8hi2 (TARGET_SIMD)
- #define HAVE_absv2si2 (TARGET_SIMD)
- #define HAVE_absv4si2 (TARGET_SIMD)
- #define HAVE_absv2di2 (TARGET_SIMD)
- #define HAVE_aarch64_absv8qi (TARGET_SIMD)
- #define HAVE_aarch64_absv16qi (TARGET_SIMD)
- #define HAVE_aarch64_absv4hi (TARGET_SIMD)
- #define HAVE_aarch64_absv8hi (TARGET_SIMD)
- #define HAVE_aarch64_absv2si (TARGET_SIMD)
- #define HAVE_aarch64_absv4si (TARGET_SIMD)
- #define HAVE_aarch64_absv2di (TARGET_SIMD)
- #define HAVE_aarch64_absdi (TARGET_SIMD)
- #define HAVE_aarch64_sabdv8qi_3 (TARGET_SIMD)
- #define HAVE_aarch64_uabdv8qi_3 (TARGET_SIMD)
- #define HAVE_aarch64_sabdv16qi_3 (TARGET_SIMD)
- #define HAVE_aarch64_uabdv16qi_3 (TARGET_SIMD)
- #define HAVE_aarch64_sabdv4hi_3 (TARGET_SIMD)
- #define HAVE_aarch64_uabdv4hi_3 (TARGET_SIMD)
- #define HAVE_aarch64_sabdv8hi_3 (TARGET_SIMD)
- #define HAVE_aarch64_uabdv8hi_3 (TARGET_SIMD)
- #define HAVE_aarch64_sabdv2si_3 (TARGET_SIMD)
- #define HAVE_aarch64_uabdv2si_3 (TARGET_SIMD)
- #define HAVE_aarch64_sabdv4si_3 (TARGET_SIMD)
- #define HAVE_aarch64_uabdv4si_3 (TARGET_SIMD)
- #define HAVE_aarch64_sabdl2v8qi_3 (TARGET_SIMD)
- #define HAVE_aarch64_uabdl2v8qi_3 (TARGET_SIMD)
- #define HAVE_aarch64_sabdl2v16qi_3 (TARGET_SIMD)
- #define HAVE_aarch64_uabdl2v16qi_3 (TARGET_SIMD)
- #define HAVE_aarch64_sabdl2v4hi_3 (TARGET_SIMD)
- #define HAVE_aarch64_uabdl2v4hi_3 (TARGET_SIMD)
- #define HAVE_aarch64_sabdl2v8hi_3 (TARGET_SIMD)
- #define HAVE_aarch64_uabdl2v8hi_3 (TARGET_SIMD)
- #define HAVE_aarch64_sabdl2v4si_3 (TARGET_SIMD)
- #define HAVE_aarch64_uabdl2v4si_3 (TARGET_SIMD)
- #define HAVE_aarch64_sabalv8qi_4 (TARGET_SIMD)
- #define HAVE_aarch64_uabalv8qi_4 (TARGET_SIMD)
- #define HAVE_aarch64_sabalv16qi_4 (TARGET_SIMD)
- #define HAVE_aarch64_uabalv16qi_4 (TARGET_SIMD)
- #define HAVE_aarch64_sabalv4hi_4 (TARGET_SIMD)
- #define HAVE_aarch64_uabalv4hi_4 (TARGET_SIMD)
- #define HAVE_aarch64_sabalv8hi_4 (TARGET_SIMD)
- #define HAVE_aarch64_uabalv8hi_4 (TARGET_SIMD)
- #define HAVE_aarch64_sabalv4si_4 (TARGET_SIMD)
- #define HAVE_aarch64_uabalv4si_4 (TARGET_SIMD)
- #define HAVE_aarch64_sadalpv8qi_3 (TARGET_SIMD)
- #define HAVE_aarch64_uadalpv8qi_3 (TARGET_SIMD)
- #define HAVE_aarch64_sadalpv16qi_3 (TARGET_SIMD)
- #define HAVE_aarch64_uadalpv16qi_3 (TARGET_SIMD)
- #define HAVE_aarch64_sadalpv4hi_3 (TARGET_SIMD)
- #define HAVE_aarch64_uadalpv4hi_3 (TARGET_SIMD)
- #define HAVE_aarch64_sadalpv8hi_3 (TARGET_SIMD)
- #define HAVE_aarch64_uadalpv8hi_3 (TARGET_SIMD)
- #define HAVE_aarch64_sadalpv4si_3 (TARGET_SIMD)
- #define HAVE_aarch64_uadalpv4si_3 (TARGET_SIMD)
- #define HAVE_abav8qi_3 (TARGET_SIMD)
- #define HAVE_abav16qi_3 (TARGET_SIMD)
- #define HAVE_abav4hi_3 (TARGET_SIMD)
- #define HAVE_abav8hi_3 (TARGET_SIMD)
- #define HAVE_abav2si_3 (TARGET_SIMD)
- #define HAVE_abav4si_3 (TARGET_SIMD)
- #define HAVE_fabdv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fabdv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fabdv2sf3 (TARGET_SIMD)
- #define HAVE_fabdv4sf3 (TARGET_SIMD)
- #define HAVE_fabdv2df3 (TARGET_SIMD)
- #define HAVE_fabdhf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fabdsf3 (TARGET_SIMD)
- #define HAVE_fabddf3 (TARGET_SIMD)
- #define HAVE_andv8qi3 (TARGET_SIMD)
- #define HAVE_andv16qi3 (TARGET_SIMD)
- #define HAVE_andv4hi3 (TARGET_SIMD)
- #define HAVE_andv8hi3 (TARGET_SIMD)
- #define HAVE_andv2si3 (TARGET_SIMD)
- #define HAVE_andv4si3 (TARGET_SIMD)
- #define HAVE_andv2di3 (TARGET_SIMD)
- #define HAVE_iorv8qi3 (TARGET_SIMD)
- #define HAVE_iorv16qi3 (TARGET_SIMD)
- #define HAVE_iorv4hi3 (TARGET_SIMD)
- #define HAVE_iorv8hi3 (TARGET_SIMD)
- #define HAVE_iorv2si3 (TARGET_SIMD)
- #define HAVE_iorv4si3 (TARGET_SIMD)
- #define HAVE_iorv2di3 (TARGET_SIMD)
- #define HAVE_xorv8qi3 (TARGET_SIMD)
- #define HAVE_xorv16qi3 (TARGET_SIMD)
- #define HAVE_xorv4hi3 (TARGET_SIMD)
- #define HAVE_xorv8hi3 (TARGET_SIMD)
- #define HAVE_xorv2si3 (TARGET_SIMD)
- #define HAVE_xorv4si3 (TARGET_SIMD)
- #define HAVE_xorv2di3 (TARGET_SIMD)
- #define HAVE_one_cmplv8qi2 (TARGET_SIMD)
- #define HAVE_one_cmplv16qi2 (TARGET_SIMD)
- #define HAVE_one_cmplv4hi2 (TARGET_SIMD)
- #define HAVE_one_cmplv8hi2 (TARGET_SIMD)
- #define HAVE_one_cmplv2si2 (TARGET_SIMD)
- #define HAVE_one_cmplv4si2 (TARGET_SIMD)
- #define HAVE_one_cmplv2di2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_setv8qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_setv16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_setv4hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_setv8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_setv2si (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_setv4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_setv2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_setv4hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_setv8hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_setv4bf (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_setv8bf (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_setv2sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_setv4sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_setv2df (TARGET_SIMD)
- #define HAVE_aarch64_simd_lshrv8qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_lshrv16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_lshrv4hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_lshrv8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_lshrv2si (TARGET_SIMD)
- #define HAVE_aarch64_simd_lshrv4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_lshrv2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_ashrv8qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ashrv16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ashrv4hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ashrv8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ashrv2si (TARGET_SIMD)
- #define HAVE_aarch64_simd_ashrv4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_ashrv2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_imm_shlv8qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_imm_shlv16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_imm_shlv4hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_imm_shlv8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_imm_shlv2si (TARGET_SIMD)
- #define HAVE_aarch64_simd_imm_shlv4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_imm_shlv2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_sshlv8qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_sshlv16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_sshlv4hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_sshlv8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_sshlv2si (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_sshlv4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_sshlv2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_shlv8qi_unsigned (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_shlv16qi_unsigned (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_shlv4hi_unsigned (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_shlv8hi_unsigned (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_shlv2si_unsigned (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_shlv4si_unsigned (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_shlv2di_unsigned (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_shlv8qi_signed (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_shlv16qi_signed (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_shlv4hi_signed (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_shlv8hi_signed (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_shlv2si_signed (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_shlv4si_signed (TARGET_SIMD)
- #define HAVE_aarch64_simd_reg_shlv2di_signed (TARGET_SIMD)
- #define HAVE_vec_shr_v8qi (TARGET_SIMD)
- #define HAVE_vec_shr_v4hi (TARGET_SIMD)
- #define HAVE_vec_shr_v4hf (TARGET_SIMD)
- #define HAVE_vec_shr_v2si (TARGET_SIMD)
- #define HAVE_vec_shr_v2sf (TARGET_SIMD)
- #define HAVE_vec_shr_v4bf (TARGET_SIMD)
- #define HAVE_aarch64_mlav8qi (TARGET_SIMD)
- #define HAVE_aarch64_mlav16qi (TARGET_SIMD)
- #define HAVE_aarch64_mlav4hi (TARGET_SIMD)
- #define HAVE_aarch64_mlav8hi (TARGET_SIMD)
- #define HAVE_aarch64_mlav2si (TARGET_SIMD)
- #define HAVE_aarch64_mlav4si (TARGET_SIMD)
- #define HAVE_aarch64_mlsv8qi (TARGET_SIMD)
- #define HAVE_aarch64_mlsv16qi (TARGET_SIMD)
- #define HAVE_aarch64_mlsv4hi (TARGET_SIMD)
- #define HAVE_aarch64_mlsv8hi (TARGET_SIMD)
- #define HAVE_aarch64_mlsv2si (TARGET_SIMD)
- #define HAVE_aarch64_mlsv4si (TARGET_SIMD)
- #define HAVE_smaxv8qi3 (TARGET_SIMD)
- #define HAVE_sminv8qi3 (TARGET_SIMD)
- #define HAVE_umaxv8qi3 (TARGET_SIMD)
- #define HAVE_uminv8qi3 (TARGET_SIMD)
- #define HAVE_smaxv16qi3 (TARGET_SIMD)
- #define HAVE_sminv16qi3 (TARGET_SIMD)
- #define HAVE_umaxv16qi3 (TARGET_SIMD)
- #define HAVE_uminv16qi3 (TARGET_SIMD)
- #define HAVE_smaxv4hi3 (TARGET_SIMD)
- #define HAVE_sminv4hi3 (TARGET_SIMD)
- #define HAVE_umaxv4hi3 (TARGET_SIMD)
- #define HAVE_uminv4hi3 (TARGET_SIMD)
- #define HAVE_smaxv8hi3 (TARGET_SIMD)
- #define HAVE_sminv8hi3 (TARGET_SIMD)
- #define HAVE_umaxv8hi3 (TARGET_SIMD)
- #define HAVE_uminv8hi3 (TARGET_SIMD)
- #define HAVE_smaxv2si3 (TARGET_SIMD)
- #define HAVE_sminv2si3 (TARGET_SIMD)
- #define HAVE_umaxv2si3 (TARGET_SIMD)
- #define HAVE_uminv2si3 (TARGET_SIMD)
- #define HAVE_smaxv4si3 (TARGET_SIMD)
- #define HAVE_sminv4si3 (TARGET_SIMD)
- #define HAVE_umaxv4si3 (TARGET_SIMD)
- #define HAVE_uminv4si3 (TARGET_SIMD)
- #define HAVE_aarch64_umaxpv8qi (TARGET_SIMD)
- #define HAVE_aarch64_uminpv8qi (TARGET_SIMD)
- #define HAVE_aarch64_smaxpv8qi (TARGET_SIMD)
- #define HAVE_aarch64_sminpv8qi (TARGET_SIMD)
- #define HAVE_aarch64_umaxpv16qi (TARGET_SIMD)
- #define HAVE_aarch64_uminpv16qi (TARGET_SIMD)
- #define HAVE_aarch64_smaxpv16qi (TARGET_SIMD)
- #define HAVE_aarch64_sminpv16qi (TARGET_SIMD)
- #define HAVE_aarch64_umaxpv4hi (TARGET_SIMD)
- #define HAVE_aarch64_uminpv4hi (TARGET_SIMD)
- #define HAVE_aarch64_smaxpv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sminpv4hi (TARGET_SIMD)
- #define HAVE_aarch64_umaxpv8hi (TARGET_SIMD)
- #define HAVE_aarch64_uminpv8hi (TARGET_SIMD)
- #define HAVE_aarch64_smaxpv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sminpv8hi (TARGET_SIMD)
- #define HAVE_aarch64_umaxpv2si (TARGET_SIMD)
- #define HAVE_aarch64_uminpv2si (TARGET_SIMD)
- #define HAVE_aarch64_smaxpv2si (TARGET_SIMD)
- #define HAVE_aarch64_sminpv2si (TARGET_SIMD)
- #define HAVE_aarch64_umaxpv4si (TARGET_SIMD)
- #define HAVE_aarch64_uminpv4si (TARGET_SIMD)
- #define HAVE_aarch64_smaxpv4si (TARGET_SIMD)
- #define HAVE_aarch64_sminpv4si (TARGET_SIMD)
- #define HAVE_aarch64_smax_nanpv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_smin_nanpv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_smaxpv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_sminpv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_smax_nanpv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_smin_nanpv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_smaxpv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_sminpv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_smax_nanpv2sf (TARGET_SIMD)
- #define HAVE_aarch64_smin_nanpv2sf (TARGET_SIMD)
- #define HAVE_aarch64_smaxpv2sf (TARGET_SIMD)
- #define HAVE_aarch64_sminpv2sf (TARGET_SIMD)
- #define HAVE_aarch64_smax_nanpv4sf (TARGET_SIMD)
- #define HAVE_aarch64_smin_nanpv4sf (TARGET_SIMD)
- #define HAVE_aarch64_smaxpv4sf (TARGET_SIMD)
- #define HAVE_aarch64_sminpv4sf (TARGET_SIMD)
- #define HAVE_aarch64_smax_nanpv2df (TARGET_SIMD)
- #define HAVE_aarch64_smin_nanpv2df (TARGET_SIMD)
- #define HAVE_aarch64_smaxpv2df (TARGET_SIMD)
- #define HAVE_aarch64_sminpv2df (TARGET_SIMD)
- #define HAVE_move_lo_quad_internal_v16qi (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_move_lo_quad_internal_v8hi (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_move_lo_quad_internal_v4si (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_move_lo_quad_internal_v2di (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_move_lo_quad_internal_v8hf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_move_lo_quad_internal_v8bf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_move_lo_quad_internal_v4sf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_move_lo_quad_internal_v2df (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_move_lo_quad_internal_be_v16qi (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_move_lo_quad_internal_be_v8hi (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_move_lo_quad_internal_be_v4si (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_move_lo_quad_internal_be_v2di (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_move_lo_quad_internal_be_v8hf (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_move_lo_quad_internal_be_v8bf (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_move_lo_quad_internal_be_v4sf (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_move_lo_quad_internal_be_v2df (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_move_hi_quad_v16qi (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_move_hi_quad_v8hi (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_move_hi_quad_v4si (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_move_hi_quad_v2di (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_move_hi_quad_v8hf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_move_hi_quad_v8bf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_move_hi_quad_v4sf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_move_hi_quad_v2df (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_move_hi_quad_be_v16qi (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_move_hi_quad_be_v8hi (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_move_hi_quad_be_v4si (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_move_hi_quad_be_v2di (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_move_hi_quad_be_v8hf (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_move_hi_quad_be_v8bf (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_move_hi_quad_be_v4sf (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_move_hi_quad_be_v2df (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_simd_vec_pack_trunc_v8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_pack_trunc_v4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_pack_trunc_v2di (TARGET_SIMD)
- #define HAVE_vec_pack_trunc_v8hi (TARGET_SIMD)
- #define HAVE_vec_pack_trunc_v4si (TARGET_SIMD)
- #define HAVE_vec_pack_trunc_v2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_unpacks_lo_v16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_unpacku_lo_v16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_unpacks_lo_v8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_unpacku_lo_v8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_unpacks_lo_v4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_unpacku_lo_v4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_unpacks_hi_v16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_unpacku_hi_v16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_unpacks_hi_v8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_unpacku_hi_v8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_unpacks_hi_v4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_unpacku_hi_v4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_smult_lo_v16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_umult_lo_v16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_smult_lo_v8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_umult_lo_v8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_smult_lo_v4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_umult_lo_v4si (TARGET_SIMD)
- #define HAVE_aarch64_intrinsic_vec_smult_lo_v8qi (TARGET_SIMD)
- #define HAVE_aarch64_intrinsic_vec_umult_lo_v8qi (TARGET_SIMD)
- #define HAVE_aarch64_intrinsic_vec_smult_lo_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_intrinsic_vec_umult_lo_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_intrinsic_vec_smult_lo_v2si (TARGET_SIMD)
- #define HAVE_aarch64_intrinsic_vec_umult_lo_v2si (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_smult_hi_v16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_umult_hi_v16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_smult_hi_v8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_umult_hi_v8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_smult_hi_v4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_umult_hi_v4si (TARGET_SIMD)
- #define HAVE_aarch64_vec_smult_lane_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_umult_lane_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_smult_laneq_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_umult_laneq_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_smult_lane_v2si (TARGET_SIMD)
- #define HAVE_aarch64_vec_umult_lane_v2si (TARGET_SIMD)
- #define HAVE_aarch64_vec_smult_laneq_v2si (TARGET_SIMD)
- #define HAVE_aarch64_vec_umult_laneq_v2si (TARGET_SIMD)
- #define HAVE_aarch64_vec_smlal_lane_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_umlal_lane_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_smlal_laneq_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_umlal_laneq_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_smlal_lane_v2si (TARGET_SIMD)
- #define HAVE_aarch64_vec_umlal_lane_v2si (TARGET_SIMD)
- #define HAVE_aarch64_vec_smlal_laneq_v2si (TARGET_SIMD)
- #define HAVE_aarch64_vec_umlal_laneq_v2si (TARGET_SIMD)
- #define HAVE_addv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_addv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_addv2sf3 (TARGET_SIMD)
- #define HAVE_addv4sf3 (TARGET_SIMD)
- #define HAVE_addv2df3 (TARGET_SIMD)
- #define HAVE_subv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_subv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_subv2sf3 (TARGET_SIMD)
- #define HAVE_subv4sf3 (TARGET_SIMD)
- #define HAVE_subv2df3 (TARGET_SIMD)
- #define HAVE_mulv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_mulv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_mulv2sf3 (TARGET_SIMD)
- #define HAVE_mulv4sf3 (TARGET_SIMD)
- #define HAVE_mulv2df3 (TARGET_SIMD)
- #define HAVE_negv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_negv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_negv2sf2 (TARGET_SIMD)
- #define HAVE_negv4sf2 (TARGET_SIMD)
- #define HAVE_negv2df2 (TARGET_SIMD)
- #define HAVE_absv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_absv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_absv2sf2 (TARGET_SIMD)
- #define HAVE_absv4sf2 (TARGET_SIMD)
- #define HAVE_absv2df2 (TARGET_SIMD)
- #define HAVE_fmav4hf4 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fmav8hf4 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fmav2sf4 (TARGET_SIMD)
- #define HAVE_fmav4sf4 (TARGET_SIMD)
- #define HAVE_fmav2df4 (TARGET_SIMD)
- #define HAVE_fnmav4hf4 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fnmav8hf4 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fnmav2sf4 (TARGET_SIMD)
- #define HAVE_fnmav4sf4 (TARGET_SIMD)
- #define HAVE_fnmav2df4 (TARGET_SIMD)
- #define HAVE_btruncv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_ceilv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_floorv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_frintnv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_nearbyintv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_rintv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_roundv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_btruncv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_ceilv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_floorv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_frintnv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_nearbyintv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_rintv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_roundv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_btruncv2sf2 (TARGET_SIMD)
- #define HAVE_ceilv2sf2 (TARGET_SIMD)
- #define HAVE_floorv2sf2 (TARGET_SIMD)
- #define HAVE_frintnv2sf2 (TARGET_SIMD)
- #define HAVE_nearbyintv2sf2 (TARGET_SIMD)
- #define HAVE_rintv2sf2 (TARGET_SIMD)
- #define HAVE_roundv2sf2 (TARGET_SIMD)
- #define HAVE_btruncv4sf2 (TARGET_SIMD)
- #define HAVE_ceilv4sf2 (TARGET_SIMD)
- #define HAVE_floorv4sf2 (TARGET_SIMD)
- #define HAVE_frintnv4sf2 (TARGET_SIMD)
- #define HAVE_nearbyintv4sf2 (TARGET_SIMD)
- #define HAVE_rintv4sf2 (TARGET_SIMD)
- #define HAVE_roundv4sf2 (TARGET_SIMD)
- #define HAVE_btruncv2df2 (TARGET_SIMD)
- #define HAVE_ceilv2df2 (TARGET_SIMD)
- #define HAVE_floorv2df2 (TARGET_SIMD)
- #define HAVE_frintnv2df2 (TARGET_SIMD)
- #define HAVE_nearbyintv2df2 (TARGET_SIMD)
- #define HAVE_rintv2df2 (TARGET_SIMD)
- #define HAVE_roundv2df2 (TARGET_SIMD)
- #define HAVE_lbtruncv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lceilv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lfloorv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lroundv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lfrintnv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lbtruncuv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lceiluv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lflooruv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lrounduv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lfrintnuv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lbtruncv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lceilv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lfloorv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lroundv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lfrintnv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lbtruncuv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lceiluv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lflooruv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lrounduv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lfrintnuv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_lbtruncv2sfv2si2 (TARGET_SIMD)
- #define HAVE_lceilv2sfv2si2 (TARGET_SIMD)
- #define HAVE_lfloorv2sfv2si2 (TARGET_SIMD)
- #define HAVE_lroundv2sfv2si2 (TARGET_SIMD)
- #define HAVE_lfrintnv2sfv2si2 (TARGET_SIMD)
- #define HAVE_lbtruncuv2sfv2si2 (TARGET_SIMD)
- #define HAVE_lceiluv2sfv2si2 (TARGET_SIMD)
- #define HAVE_lflooruv2sfv2si2 (TARGET_SIMD)
- #define HAVE_lrounduv2sfv2si2 (TARGET_SIMD)
- #define HAVE_lfrintnuv2sfv2si2 (TARGET_SIMD)
- #define HAVE_lbtruncv4sfv4si2 (TARGET_SIMD)
- #define HAVE_lceilv4sfv4si2 (TARGET_SIMD)
- #define HAVE_lfloorv4sfv4si2 (TARGET_SIMD)
- #define HAVE_lroundv4sfv4si2 (TARGET_SIMD)
- #define HAVE_lfrintnv4sfv4si2 (TARGET_SIMD)
- #define HAVE_lbtruncuv4sfv4si2 (TARGET_SIMD)
- #define HAVE_lceiluv4sfv4si2 (TARGET_SIMD)
- #define HAVE_lflooruv4sfv4si2 (TARGET_SIMD)
- #define HAVE_lrounduv4sfv4si2 (TARGET_SIMD)
- #define HAVE_lfrintnuv4sfv4si2 (TARGET_SIMD)
- #define HAVE_lbtruncv2dfv2di2 (TARGET_SIMD)
- #define HAVE_lceilv2dfv2di2 (TARGET_SIMD)
- #define HAVE_lfloorv2dfv2di2 (TARGET_SIMD)
- #define HAVE_lroundv2dfv2di2 (TARGET_SIMD)
- #define HAVE_lfrintnv2dfv2di2 (TARGET_SIMD)
- #define HAVE_lbtruncuv2dfv2di2 (TARGET_SIMD)
- #define HAVE_lceiluv2dfv2di2 (TARGET_SIMD)
- #define HAVE_lflooruv2dfv2di2 (TARGET_SIMD)
- #define HAVE_lrounduv2dfv2di2 (TARGET_SIMD)
- #define HAVE_lfrintnuv2dfv2di2 (TARGET_SIMD)
- #define HAVE_lbtrunchfhi2 (TARGET_SIMD_F16INST)
- #define HAVE_lceilhfhi2 (TARGET_SIMD_F16INST)
- #define HAVE_lfloorhfhi2 (TARGET_SIMD_F16INST)
- #define HAVE_lroundhfhi2 (TARGET_SIMD_F16INST)
- #define HAVE_lfrintnhfhi2 (TARGET_SIMD_F16INST)
- #define HAVE_lbtruncuhfhi2 (TARGET_SIMD_F16INST)
- #define HAVE_lceiluhfhi2 (TARGET_SIMD_F16INST)
- #define HAVE_lflooruhfhi2 (TARGET_SIMD_F16INST)
- #define HAVE_lrounduhfhi2 (TARGET_SIMD_F16INST)
- #define HAVE_lfrintnuhfhi2 (TARGET_SIMD_F16INST)
- #define HAVE_fix_trunchfhi2 (TARGET_SIMD_F16INST)
- #define HAVE_fixuns_trunchfhi2 (TARGET_SIMD_F16INST)
- #define HAVE_floathihf2 (TARGET_SIMD_F16INST)
- #define HAVE_floatunshihf2 (TARGET_SIMD_F16INST)
- #define HAVE_floatv4hiv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_floatunsv4hiv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_floatv8hiv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_floatunsv8hiv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_floatv2siv2sf2 (TARGET_SIMD)
- #define HAVE_floatunsv2siv2sf2 (TARGET_SIMD)
- #define HAVE_floatv4siv4sf2 (TARGET_SIMD)
- #define HAVE_floatunsv4siv4sf2 (TARGET_SIMD)
- #define HAVE_floatv2div2df2 (TARGET_SIMD)
- #define HAVE_floatunsv2div2df2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_unpacks_lo_v8hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_unpacks_lo_v4sf (TARGET_SIMD)
- #define HAVE_fcvtzsv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fcvtzuv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fcvtzsv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fcvtzuv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fcvtzsv2sf3 (TARGET_SIMD)
- #define HAVE_fcvtzuv2sf3 (TARGET_SIMD)
- #define HAVE_fcvtzsv4sf3 (TARGET_SIMD)
- #define HAVE_fcvtzuv4sf3 (TARGET_SIMD)
- #define HAVE_fcvtzsv2df3 (TARGET_SIMD)
- #define HAVE_fcvtzuv2df3 (TARGET_SIMD)
- #define HAVE_scvtfv4hi3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_ucvtfv4hi3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_scvtfv8hi3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_ucvtfv8hi3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_scvtfv2si3 (TARGET_SIMD)
- #define HAVE_ucvtfv2si3 (TARGET_SIMD)
- #define HAVE_scvtfv4si3 (TARGET_SIMD)
- #define HAVE_ucvtfv4si3 (TARGET_SIMD)
- #define HAVE_scvtfv2di3 (TARGET_SIMD)
- #define HAVE_ucvtfv2di3 (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_unpacks_hi_v8hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_vec_unpacks_hi_v4sf (TARGET_SIMD)
- #define HAVE_aarch64_float_extend_lo_v2df (TARGET_SIMD)
- #define HAVE_aarch64_float_extend_lo_v4sf (TARGET_SIMD)
- #define HAVE_aarch64_float_truncate_lo_v2sf (TARGET_SIMD)
- #define HAVE_aarch64_float_truncate_lo_v4hf (TARGET_SIMD)
- #define HAVE_aarch64_float_truncate_hi_v4sf_le (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_float_truncate_hi_v8hf_le (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_float_truncate_hi_v4sf_be (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_float_truncate_hi_v8hf_be (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_smaxv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_sminv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_smaxv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_sminv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_smaxv2sf3 (TARGET_SIMD)
- #define HAVE_sminv2sf3 (TARGET_SIMD)
- #define HAVE_smaxv4sf3 (TARGET_SIMD)
- #define HAVE_sminv4sf3 (TARGET_SIMD)
- #define HAVE_smaxv2df3 (TARGET_SIMD)
- #define HAVE_sminv2df3 (TARGET_SIMD)
- #define HAVE_smax_nanv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_smin_nanv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fmaxv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fminv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_smax_nanv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_smin_nanv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fmaxv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fminv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_smax_nanv2sf3 (TARGET_SIMD)
- #define HAVE_smin_nanv2sf3 (TARGET_SIMD)
- #define HAVE_fmaxv2sf3 (TARGET_SIMD)
- #define HAVE_fminv2sf3 (TARGET_SIMD)
- #define HAVE_smax_nanv4sf3 (TARGET_SIMD)
- #define HAVE_smin_nanv4sf3 (TARGET_SIMD)
- #define HAVE_fmaxv4sf3 (TARGET_SIMD)
- #define HAVE_fminv4sf3 (TARGET_SIMD)
- #define HAVE_smax_nanv2df3 (TARGET_SIMD)
- #define HAVE_smin_nanv2df3 (TARGET_SIMD)
- #define HAVE_fmaxv2df3 (TARGET_SIMD)
- #define HAVE_fminv2df3 (TARGET_SIMD)
- #define HAVE_aarch64_faddpv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_faddpv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_faddpv2sf (TARGET_SIMD)
- #define HAVE_aarch64_faddpv4sf (TARGET_SIMD)
- #define HAVE_aarch64_faddpv2df (TARGET_SIMD)
- #define HAVE_aarch64_reduc_plus_internalv8qi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_plus_internalv16qi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_plus_internalv4hi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_plus_internalv8hi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_plus_internalv4si (TARGET_SIMD)
- #define HAVE_aarch64_reduc_plus_internalv2di (TARGET_SIMD)
- #define HAVE_aarch64_zero_extendsi_reduc_plus_v8qi (TARGET_SIMD)
- #define HAVE_aarch64_zero_extenddi_reduc_plus_v8qi (TARGET_SIMD)
- #define HAVE_aarch64_zero_extendsi_reduc_plus_v16qi (TARGET_SIMD)
- #define HAVE_aarch64_zero_extenddi_reduc_plus_v16qi (TARGET_SIMD)
- #define HAVE_aarch64_zero_extendsi_reduc_plus_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_zero_extenddi_reduc_plus_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_zero_extendsi_reduc_plus_v8hi (TARGET_SIMD)
- #define HAVE_aarch64_zero_extenddi_reduc_plus_v8hi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_plus_internalv2si (TARGET_SIMD)
- #define HAVE_reduc_plus_scal_v2sf (TARGET_SIMD)
- #define HAVE_reduc_plus_scal_v2df (TARGET_SIMD)
- #define HAVE_clrsbv8qi2 (TARGET_SIMD)
- #define HAVE_clrsbv16qi2 (TARGET_SIMD)
- #define HAVE_clrsbv4hi2 (TARGET_SIMD)
- #define HAVE_clrsbv8hi2 (TARGET_SIMD)
- #define HAVE_clrsbv2si2 (TARGET_SIMD)
- #define HAVE_clrsbv4si2 (TARGET_SIMD)
- #define HAVE_clzv8qi2 (TARGET_SIMD)
- #define HAVE_clzv16qi2 (TARGET_SIMD)
- #define HAVE_clzv4hi2 (TARGET_SIMD)
- #define HAVE_clzv8hi2 (TARGET_SIMD)
- #define HAVE_clzv2si2 (TARGET_SIMD)
- #define HAVE_clzv4si2 (TARGET_SIMD)
- #define HAVE_popcountv8qi2 (TARGET_SIMD)
- #define HAVE_popcountv16qi2 (TARGET_SIMD)
- #define HAVE_aarch64_reduc_umax_internalv8qi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_umin_internalv8qi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smax_internalv8qi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smin_internalv8qi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_umax_internalv16qi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_umin_internalv16qi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smax_internalv16qi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smin_internalv16qi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_umax_internalv4hi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_umin_internalv4hi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smax_internalv4hi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smin_internalv4hi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_umax_internalv8hi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_umin_internalv8hi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smax_internalv8hi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smin_internalv8hi (TARGET_SIMD)
- #define HAVE_aarch64_reduc_umax_internalv4si (TARGET_SIMD)
- #define HAVE_aarch64_reduc_umin_internalv4si (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smax_internalv4si (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smin_internalv4si (TARGET_SIMD)
- #define HAVE_aarch64_reduc_umax_internalv2si (TARGET_SIMD)
- #define HAVE_aarch64_reduc_umin_internalv2si (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smax_internalv2si (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smin_internalv2si (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smax_nan_internalv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_reduc_smin_nan_internalv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_reduc_smax_internalv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_reduc_smin_internalv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_reduc_smax_nan_internalv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_reduc_smin_nan_internalv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_reduc_smax_internalv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_reduc_smin_internalv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_reduc_smax_nan_internalv2sf (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smin_nan_internalv2sf (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smax_internalv2sf (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smin_internalv2sf (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smax_nan_internalv4sf (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smin_nan_internalv4sf (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smax_internalv4sf (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smin_internalv4sf (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smax_nan_internalv2df (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smin_nan_internalv2df (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smax_internalv2df (TARGET_SIMD)
- #define HAVE_aarch64_reduc_smin_internalv2df (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv8qi_internal (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv16qi_internal (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv4hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv2si_internal (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv2di_internal (TARGET_SIMD)
- #define HAVE_aarch64_simd_bsldi_internal (TARGET_SIMD)
- #define HAVE_aarch64_simd_bsldi_alt (TARGET_SIMD)
- #define HAVE_aarch64_get_lanev8qi (TARGET_SIMD)
- #define HAVE_aarch64_get_lanev16qi (TARGET_SIMD)
- #define HAVE_aarch64_get_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_get_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_get_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_get_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_get_lanev2di (TARGET_SIMD)
- #define HAVE_aarch64_get_lanev4hf (TARGET_SIMD)
- #define HAVE_aarch64_get_lanev8hf (TARGET_SIMD)
- #define HAVE_aarch64_get_lanev4bf (TARGET_SIMD)
- #define HAVE_aarch64_get_lanev8bf (TARGET_SIMD)
- #define HAVE_aarch64_get_lanev2sf (TARGET_SIMD)
- #define HAVE_aarch64_get_lanev4sf (TARGET_SIMD)
- #define HAVE_aarch64_get_lanev2df (TARGET_SIMD)
- #define HAVE_load_pair_lanesv8qi (TARGET_SIMD && !STRICT_ALIGNMENT \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V8QImode))))
- #define HAVE_load_pair_lanesv4hi (TARGET_SIMD && !STRICT_ALIGNMENT \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4HImode))))
- #define HAVE_load_pair_lanesv4bf (TARGET_SIMD && !STRICT_ALIGNMENT \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4BFmode))))
- #define HAVE_load_pair_lanesv4hf (TARGET_SIMD && !STRICT_ALIGNMENT \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V4HFmode))))
- #define HAVE_load_pair_lanesv2si (TARGET_SIMD && !STRICT_ALIGNMENT \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2SImode))))
- #define HAVE_load_pair_lanesv2sf (TARGET_SIMD && !STRICT_ALIGNMENT \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (V2SFmode))))
- #define HAVE_load_pair_lanesdi (TARGET_SIMD && !STRICT_ALIGNMENT \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (DImode))))
- #define HAVE_load_pair_lanesdf (TARGET_SIMD && !STRICT_ALIGNMENT \
- && rtx_equal_p (XEXP (operands[2], 0), \
- plus_constant (Pmode, \
- XEXP (operands[1], 0), \
- GET_MODE_SIZE (DFmode))))
- #define HAVE_store_pair_lanesv8qi (TARGET_SIMD)
- #define HAVE_store_pair_lanesv4hi (TARGET_SIMD)
- #define HAVE_store_pair_lanesv4bf (TARGET_SIMD)
- #define HAVE_store_pair_lanesv4hf (TARGET_SIMD)
- #define HAVE_store_pair_lanesv2si (TARGET_SIMD)
- #define HAVE_store_pair_lanesv2sf (TARGET_SIMD)
- #define HAVE_store_pair_lanesdi (TARGET_SIMD)
- #define HAVE_store_pair_lanesdf (TARGET_SIMD)
- #define HAVE_aarch64_combinezv8qi (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_combinezv4hi (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_combinezv4bf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_combinezv4hf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_combinezv2si (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_combinezv2sf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_combinezdi (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_combinezdf (TARGET_SIMD && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_combinez_bev8qi (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_combinez_bev4hi (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_combinez_bev4bf (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_combinez_bev4hf (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_combinez_bev2si (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_combinez_bev2sf (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_combinez_bedi (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_combinez_bedf (TARGET_SIMD && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_saddlv16qi_hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_ssublv16qi_hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_uaddlv16qi_hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_usublv16qi_hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_saddlv8hi_hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_ssublv8hi_hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_uaddlv8hi_hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_usublv8hi_hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_saddlv4si_hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_ssublv4si_hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_uaddlv4si_hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_usublv4si_hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_saddlv16qi_lo_internal (TARGET_SIMD)
- #define HAVE_aarch64_ssublv16qi_lo_internal (TARGET_SIMD)
- #define HAVE_aarch64_uaddlv16qi_lo_internal (TARGET_SIMD)
- #define HAVE_aarch64_usublv16qi_lo_internal (TARGET_SIMD)
- #define HAVE_aarch64_saddlv8hi_lo_internal (TARGET_SIMD)
- #define HAVE_aarch64_ssublv8hi_lo_internal (TARGET_SIMD)
- #define HAVE_aarch64_uaddlv8hi_lo_internal (TARGET_SIMD)
- #define HAVE_aarch64_usublv8hi_lo_internal (TARGET_SIMD)
- #define HAVE_aarch64_saddlv4si_lo_internal (TARGET_SIMD)
- #define HAVE_aarch64_ssublv4si_lo_internal (TARGET_SIMD)
- #define HAVE_aarch64_uaddlv4si_lo_internal (TARGET_SIMD)
- #define HAVE_aarch64_usublv4si_lo_internal (TARGET_SIMD)
- #define HAVE_aarch64_saddlv8qi (TARGET_SIMD)
- #define HAVE_aarch64_ssublv8qi (TARGET_SIMD)
- #define HAVE_aarch64_uaddlv8qi (TARGET_SIMD)
- #define HAVE_aarch64_usublv8qi (TARGET_SIMD)
- #define HAVE_aarch64_saddlv4hi (TARGET_SIMD)
- #define HAVE_aarch64_ssublv4hi (TARGET_SIMD)
- #define HAVE_aarch64_uaddlv4hi (TARGET_SIMD)
- #define HAVE_aarch64_usublv4hi (TARGET_SIMD)
- #define HAVE_aarch64_saddlv2si (TARGET_SIMD)
- #define HAVE_aarch64_ssublv2si (TARGET_SIMD)
- #define HAVE_aarch64_uaddlv2si (TARGET_SIMD)
- #define HAVE_aarch64_usublv2si (TARGET_SIMD)
- #define HAVE_aarch64_ssubwv8qi (TARGET_SIMD)
- #define HAVE_aarch64_usubwv8qi (TARGET_SIMD)
- #define HAVE_aarch64_ssubwv4hi (TARGET_SIMD)
- #define HAVE_aarch64_usubwv4hi (TARGET_SIMD)
- #define HAVE_aarch64_ssubwv2si (TARGET_SIMD)
- #define HAVE_aarch64_usubwv2si (TARGET_SIMD)
- #define HAVE_aarch64_ssubwv16qi_internal (TARGET_SIMD)
- #define HAVE_aarch64_usubwv16qi_internal (TARGET_SIMD)
- #define HAVE_aarch64_ssubwv8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_usubwv8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_ssubwv4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_usubwv4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_ssubw2v16qi_internal (TARGET_SIMD)
- #define HAVE_aarch64_usubw2v16qi_internal (TARGET_SIMD)
- #define HAVE_aarch64_ssubw2v8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_usubw2v8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_ssubw2v4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_usubw2v4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_saddwv8qi (TARGET_SIMD)
- #define HAVE_aarch64_uaddwv8qi (TARGET_SIMD)
- #define HAVE_aarch64_saddwv4hi (TARGET_SIMD)
- #define HAVE_aarch64_uaddwv4hi (TARGET_SIMD)
- #define HAVE_aarch64_saddwv2si (TARGET_SIMD)
- #define HAVE_aarch64_uaddwv2si (TARGET_SIMD)
- #define HAVE_aarch64_saddwv16qi_internal (TARGET_SIMD)
- #define HAVE_aarch64_uaddwv16qi_internal (TARGET_SIMD)
- #define HAVE_aarch64_saddwv8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_uaddwv8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_saddwv4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_uaddwv4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_saddw2v16qi_internal (TARGET_SIMD)
- #define HAVE_aarch64_uaddw2v16qi_internal (TARGET_SIMD)
- #define HAVE_aarch64_saddw2v8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_uaddw2v8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_saddw2v4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_uaddw2v4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_shaddv8qi (TARGET_SIMD)
- #define HAVE_aarch64_uhaddv8qi (TARGET_SIMD)
- #define HAVE_aarch64_srhaddv8qi (TARGET_SIMD)
- #define HAVE_aarch64_urhaddv8qi (TARGET_SIMD)
- #define HAVE_aarch64_shsubv8qi (TARGET_SIMD)
- #define HAVE_aarch64_uhsubv8qi (TARGET_SIMD)
- #define HAVE_aarch64_shaddv16qi (TARGET_SIMD)
- #define HAVE_aarch64_uhaddv16qi (TARGET_SIMD)
- #define HAVE_aarch64_srhaddv16qi (TARGET_SIMD)
- #define HAVE_aarch64_urhaddv16qi (TARGET_SIMD)
- #define HAVE_aarch64_shsubv16qi (TARGET_SIMD)
- #define HAVE_aarch64_uhsubv16qi (TARGET_SIMD)
- #define HAVE_aarch64_shaddv4hi (TARGET_SIMD)
- #define HAVE_aarch64_uhaddv4hi (TARGET_SIMD)
- #define HAVE_aarch64_srhaddv4hi (TARGET_SIMD)
- #define HAVE_aarch64_urhaddv4hi (TARGET_SIMD)
- #define HAVE_aarch64_shsubv4hi (TARGET_SIMD)
- #define HAVE_aarch64_uhsubv4hi (TARGET_SIMD)
- #define HAVE_aarch64_shaddv8hi (TARGET_SIMD)
- #define HAVE_aarch64_uhaddv8hi (TARGET_SIMD)
- #define HAVE_aarch64_srhaddv8hi (TARGET_SIMD)
- #define HAVE_aarch64_urhaddv8hi (TARGET_SIMD)
- #define HAVE_aarch64_shsubv8hi (TARGET_SIMD)
- #define HAVE_aarch64_uhsubv8hi (TARGET_SIMD)
- #define HAVE_aarch64_shaddv2si (TARGET_SIMD)
- #define HAVE_aarch64_uhaddv2si (TARGET_SIMD)
- #define HAVE_aarch64_srhaddv2si (TARGET_SIMD)
- #define HAVE_aarch64_urhaddv2si (TARGET_SIMD)
- #define HAVE_aarch64_shsubv2si (TARGET_SIMD)
- #define HAVE_aarch64_uhsubv2si (TARGET_SIMD)
- #define HAVE_aarch64_shaddv4si (TARGET_SIMD)
- #define HAVE_aarch64_uhaddv4si (TARGET_SIMD)
- #define HAVE_aarch64_srhaddv4si (TARGET_SIMD)
- #define HAVE_aarch64_urhaddv4si (TARGET_SIMD)
- #define HAVE_aarch64_shsubv4si (TARGET_SIMD)
- #define HAVE_aarch64_uhsubv4si (TARGET_SIMD)
- #define HAVE_aarch64_addhnv8hi (TARGET_SIMD)
- #define HAVE_aarch64_raddhnv8hi (TARGET_SIMD)
- #define HAVE_aarch64_subhnv8hi (TARGET_SIMD)
- #define HAVE_aarch64_rsubhnv8hi (TARGET_SIMD)
- #define HAVE_aarch64_addhnv4si (TARGET_SIMD)
- #define HAVE_aarch64_raddhnv4si (TARGET_SIMD)
- #define HAVE_aarch64_subhnv4si (TARGET_SIMD)
- #define HAVE_aarch64_rsubhnv4si (TARGET_SIMD)
- #define HAVE_aarch64_addhnv2di (TARGET_SIMD)
- #define HAVE_aarch64_raddhnv2di (TARGET_SIMD)
- #define HAVE_aarch64_subhnv2di (TARGET_SIMD)
- #define HAVE_aarch64_rsubhnv2di (TARGET_SIMD)
- #define HAVE_aarch64_addhn2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_raddhn2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_subhn2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_rsubhn2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_addhn2v4si (TARGET_SIMD)
- #define HAVE_aarch64_raddhn2v4si (TARGET_SIMD)
- #define HAVE_aarch64_subhn2v4si (TARGET_SIMD)
- #define HAVE_aarch64_rsubhn2v4si (TARGET_SIMD)
- #define HAVE_aarch64_addhn2v2di (TARGET_SIMD)
- #define HAVE_aarch64_raddhn2v2di (TARGET_SIMD)
- #define HAVE_aarch64_subhn2v2di (TARGET_SIMD)
- #define HAVE_aarch64_rsubhn2v2di (TARGET_SIMD)
- #define HAVE_aarch64_pmulv8qi (TARGET_SIMD)
- #define HAVE_aarch64_pmulv16qi (TARGET_SIMD)
- #define HAVE_aarch64_fmulxv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fmulxv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fmulxv2sf (TARGET_SIMD)
- #define HAVE_aarch64_fmulxv4sf (TARGET_SIMD)
- #define HAVE_aarch64_fmulxv2df (TARGET_SIMD)
- #define HAVE_aarch64_fmulxhf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_fmulxsf (TARGET_SIMD)
- #define HAVE_aarch64_fmulxdf (TARGET_SIMD)
- #define HAVE_aarch64_sqaddv8qi (TARGET_SIMD)
- #define HAVE_aarch64_uqaddv8qi (TARGET_SIMD)
- #define HAVE_aarch64_sqsubv8qi (TARGET_SIMD)
- #define HAVE_aarch64_uqsubv8qi (TARGET_SIMD)
- #define HAVE_aarch64_sqaddv16qi (TARGET_SIMD)
- #define HAVE_aarch64_uqaddv16qi (TARGET_SIMD)
- #define HAVE_aarch64_sqsubv16qi (TARGET_SIMD)
- #define HAVE_aarch64_uqsubv16qi (TARGET_SIMD)
- #define HAVE_aarch64_sqaddv4hi (TARGET_SIMD)
- #define HAVE_aarch64_uqaddv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqsubv4hi (TARGET_SIMD)
- #define HAVE_aarch64_uqsubv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqaddv8hi (TARGET_SIMD)
- #define HAVE_aarch64_uqaddv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqsubv8hi (TARGET_SIMD)
- #define HAVE_aarch64_uqsubv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqaddv2si (TARGET_SIMD)
- #define HAVE_aarch64_uqaddv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqsubv2si (TARGET_SIMD)
- #define HAVE_aarch64_uqsubv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqaddv4si (TARGET_SIMD)
- #define HAVE_aarch64_uqaddv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqsubv4si (TARGET_SIMD)
- #define HAVE_aarch64_uqsubv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqaddv2di (TARGET_SIMD)
- #define HAVE_aarch64_uqaddv2di (TARGET_SIMD)
- #define HAVE_aarch64_sqsubv2di (TARGET_SIMD)
- #define HAVE_aarch64_uqsubv2di (TARGET_SIMD)
- #define HAVE_aarch64_sqaddqi (TARGET_SIMD)
- #define HAVE_aarch64_uqaddqi (TARGET_SIMD)
- #define HAVE_aarch64_sqsubqi (TARGET_SIMD)
- #define HAVE_aarch64_uqsubqi (TARGET_SIMD)
- #define HAVE_aarch64_sqaddhi (TARGET_SIMD)
- #define HAVE_aarch64_uqaddhi (TARGET_SIMD)
- #define HAVE_aarch64_sqsubhi (TARGET_SIMD)
- #define HAVE_aarch64_uqsubhi (TARGET_SIMD)
- #define HAVE_aarch64_sqaddsi (TARGET_SIMD)
- #define HAVE_aarch64_uqaddsi (TARGET_SIMD)
- #define HAVE_aarch64_sqsubsi (TARGET_SIMD)
- #define HAVE_aarch64_uqsubsi (TARGET_SIMD)
- #define HAVE_aarch64_sqadddi (TARGET_SIMD)
- #define HAVE_aarch64_uqadddi (TARGET_SIMD)
- #define HAVE_aarch64_sqsubdi (TARGET_SIMD)
- #define HAVE_aarch64_uqsubdi (TARGET_SIMD)
- #define HAVE_aarch64_suqaddv8qi (TARGET_SIMD)
- #define HAVE_aarch64_usqaddv8qi (TARGET_SIMD)
- #define HAVE_aarch64_suqaddv16qi (TARGET_SIMD)
- #define HAVE_aarch64_usqaddv16qi (TARGET_SIMD)
- #define HAVE_aarch64_suqaddv4hi (TARGET_SIMD)
- #define HAVE_aarch64_usqaddv4hi (TARGET_SIMD)
- #define HAVE_aarch64_suqaddv8hi (TARGET_SIMD)
- #define HAVE_aarch64_usqaddv8hi (TARGET_SIMD)
- #define HAVE_aarch64_suqaddv2si (TARGET_SIMD)
- #define HAVE_aarch64_usqaddv2si (TARGET_SIMD)
- #define HAVE_aarch64_suqaddv4si (TARGET_SIMD)
- #define HAVE_aarch64_usqaddv4si (TARGET_SIMD)
- #define HAVE_aarch64_suqaddv2di (TARGET_SIMD)
- #define HAVE_aarch64_usqaddv2di (TARGET_SIMD)
- #define HAVE_aarch64_suqaddqi (TARGET_SIMD)
- #define HAVE_aarch64_usqaddqi (TARGET_SIMD)
- #define HAVE_aarch64_suqaddhi (TARGET_SIMD)
- #define HAVE_aarch64_usqaddhi (TARGET_SIMD)
- #define HAVE_aarch64_suqaddsi (TARGET_SIMD)
- #define HAVE_aarch64_usqaddsi (TARGET_SIMD)
- #define HAVE_aarch64_suqadddi (TARGET_SIMD)
- #define HAVE_aarch64_usqadddi (TARGET_SIMD)
- #define HAVE_aarch64_sqmovunv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqmovunv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqmovunv2di (TARGET_SIMD)
- #define HAVE_aarch64_sqmovunhi (TARGET_SIMD)
- #define HAVE_aarch64_sqmovunsi (TARGET_SIMD)
- #define HAVE_aarch64_sqmovundi (TARGET_SIMD)
- #define HAVE_aarch64_sqmovnv8hi (TARGET_SIMD)
- #define HAVE_aarch64_uqmovnv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqmovnv4si (TARGET_SIMD)
- #define HAVE_aarch64_uqmovnv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqmovnv2di (TARGET_SIMD)
- #define HAVE_aarch64_uqmovnv2di (TARGET_SIMD)
- #define HAVE_aarch64_sqmovnhi (TARGET_SIMD)
- #define HAVE_aarch64_uqmovnhi (TARGET_SIMD)
- #define HAVE_aarch64_sqmovnsi (TARGET_SIMD)
- #define HAVE_aarch64_uqmovnsi (TARGET_SIMD)
- #define HAVE_aarch64_sqmovndi (TARGET_SIMD)
- #define HAVE_aarch64_uqmovndi (TARGET_SIMD)
- #define HAVE_aarch64_sqnegv8qi (TARGET_SIMD)
- #define HAVE_aarch64_sqabsv8qi (TARGET_SIMD)
- #define HAVE_aarch64_sqnegv16qi (TARGET_SIMD)
- #define HAVE_aarch64_sqabsv16qi (TARGET_SIMD)
- #define HAVE_aarch64_sqnegv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqabsv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqnegv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqabsv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqnegv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqabsv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqnegv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqabsv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqnegv2di (TARGET_SIMD)
- #define HAVE_aarch64_sqabsv2di (TARGET_SIMD)
- #define HAVE_aarch64_sqnegqi (TARGET_SIMD)
- #define HAVE_aarch64_sqabsqi (TARGET_SIMD)
- #define HAVE_aarch64_sqneghi (TARGET_SIMD)
- #define HAVE_aarch64_sqabshi (TARGET_SIMD)
- #define HAVE_aarch64_sqnegsi (TARGET_SIMD)
- #define HAVE_aarch64_sqabssi (TARGET_SIMD)
- #define HAVE_aarch64_sqnegdi (TARGET_SIMD)
- #define HAVE_aarch64_sqabsdi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulhv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulhv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulhv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulhv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulhv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulhv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulhv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulhv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulhhi (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulhhi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulhsi (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulhsi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulh_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulh_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulh_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulh_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulh_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulh_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulh_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulh_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulh_laneqv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulh_laneqv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulh_laneqv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulh_laneqv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulh_laneqv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulh_laneqv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulh_laneqv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulh_laneqv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulh_lanehi (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulh_lanehi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulh_lanesi (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulh_lanesi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulh_laneqhi (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulh_laneqhi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmulh_laneqsi (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmulh_laneqsi (TARGET_SIMD)
- #define HAVE_aarch64_sqrdmlahv4hi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlshv4hi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlahv8hi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlshv8hi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlahv2si (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlshv2si (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlahv4si (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlshv4si (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlahhi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlshhi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlahsi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlshsi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlah_lanev4hi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlsh_lanev4hi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlah_lanev8hi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlsh_lanev8hi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlah_lanev2si (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlsh_lanev2si (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlah_lanev4si (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlsh_lanev4si (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlah_lanehi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlsh_lanehi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlah_lanesi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlsh_lanesi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlah_laneqv4hi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlsh_laneqv4hi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlah_laneqv8hi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlsh_laneqv8hi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlah_laneqv2si (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlsh_laneqv2si (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlah_laneqv4si (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlsh_laneqv4si (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlah_laneqhi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlsh_laneqhi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlah_laneqsi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqrdmlsh_laneqsi (TARGET_SIMD_RDMA)
- #define HAVE_aarch64_sqdmlalv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlslv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlalv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlslv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlalhi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlslhi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlalsi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlslsi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal_laneqv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl_laneqv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal_laneqv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl_laneqv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal_lanehi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl_lanehi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal_lanesi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl_lanesi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal_laneqhi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl_laneqhi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal_laneqsi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl_laneqsi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal2v8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl2v8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal2v4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl2v4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal2_lanev8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl2_lanev8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal2_lanev4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl2_lanev4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal2_laneqv8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl2_laneqv8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal2_laneqv4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl2_laneqv4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal2_nv8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl2_nv8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal2_nv4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl2_nv4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmullv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmullv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmullhi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmullsi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull_laneqv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull_laneqv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull_lanehi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull_lanesi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull_laneqhi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull_laneqsi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull2v8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull2v4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull2_lanev8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull2_lanev4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull2_laneqv8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull2_laneqv4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull2_nv8hi_internal (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull2_nv4si_internal (TARGET_SIMD)
- #define HAVE_aarch64_sshlv8qi (TARGET_SIMD)
- #define HAVE_aarch64_ushlv8qi (TARGET_SIMD)
- #define HAVE_aarch64_srshlv8qi (TARGET_SIMD)
- #define HAVE_aarch64_urshlv8qi (TARGET_SIMD)
- #define HAVE_aarch64_sshlv16qi (TARGET_SIMD)
- #define HAVE_aarch64_ushlv16qi (TARGET_SIMD)
- #define HAVE_aarch64_srshlv16qi (TARGET_SIMD)
- #define HAVE_aarch64_urshlv16qi (TARGET_SIMD)
- #define HAVE_aarch64_sshlv4hi (TARGET_SIMD)
- #define HAVE_aarch64_ushlv4hi (TARGET_SIMD)
- #define HAVE_aarch64_srshlv4hi (TARGET_SIMD)
- #define HAVE_aarch64_urshlv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sshlv8hi (TARGET_SIMD)
- #define HAVE_aarch64_ushlv8hi (TARGET_SIMD)
- #define HAVE_aarch64_srshlv8hi (TARGET_SIMD)
- #define HAVE_aarch64_urshlv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sshlv2si (TARGET_SIMD)
- #define HAVE_aarch64_ushlv2si (TARGET_SIMD)
- #define HAVE_aarch64_srshlv2si (TARGET_SIMD)
- #define HAVE_aarch64_urshlv2si (TARGET_SIMD)
- #define HAVE_aarch64_sshlv4si (TARGET_SIMD)
- #define HAVE_aarch64_ushlv4si (TARGET_SIMD)
- #define HAVE_aarch64_srshlv4si (TARGET_SIMD)
- #define HAVE_aarch64_urshlv4si (TARGET_SIMD)
- #define HAVE_aarch64_sshlv2di (TARGET_SIMD)
- #define HAVE_aarch64_ushlv2di (TARGET_SIMD)
- #define HAVE_aarch64_srshlv2di (TARGET_SIMD)
- #define HAVE_aarch64_urshlv2di (TARGET_SIMD)
- #define HAVE_aarch64_sshldi (TARGET_SIMD)
- #define HAVE_aarch64_ushldi (TARGET_SIMD)
- #define HAVE_aarch64_srshldi (TARGET_SIMD)
- #define HAVE_aarch64_urshldi (TARGET_SIMD)
- #define HAVE_aarch64_sqshlv8qi (TARGET_SIMD)
- #define HAVE_aarch64_uqshlv8qi (TARGET_SIMD)
- #define HAVE_aarch64_sqrshlv8qi (TARGET_SIMD)
- #define HAVE_aarch64_uqrshlv8qi (TARGET_SIMD)
- #define HAVE_aarch64_sqshlv16qi (TARGET_SIMD)
- #define HAVE_aarch64_uqshlv16qi (TARGET_SIMD)
- #define HAVE_aarch64_sqrshlv16qi (TARGET_SIMD)
- #define HAVE_aarch64_uqrshlv16qi (TARGET_SIMD)
- #define HAVE_aarch64_sqshlv4hi (TARGET_SIMD)
- #define HAVE_aarch64_uqshlv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqrshlv4hi (TARGET_SIMD)
- #define HAVE_aarch64_uqrshlv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqshlv8hi (TARGET_SIMD)
- #define HAVE_aarch64_uqshlv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqrshlv8hi (TARGET_SIMD)
- #define HAVE_aarch64_uqrshlv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqshlv2si (TARGET_SIMD)
- #define HAVE_aarch64_uqshlv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqrshlv2si (TARGET_SIMD)
- #define HAVE_aarch64_uqrshlv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqshlv4si (TARGET_SIMD)
- #define HAVE_aarch64_uqshlv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqrshlv4si (TARGET_SIMD)
- #define HAVE_aarch64_uqrshlv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqshlv2di (TARGET_SIMD)
- #define HAVE_aarch64_uqshlv2di (TARGET_SIMD)
- #define HAVE_aarch64_sqrshlv2di (TARGET_SIMD)
- #define HAVE_aarch64_uqrshlv2di (TARGET_SIMD)
- #define HAVE_aarch64_sqshlqi (TARGET_SIMD)
- #define HAVE_aarch64_uqshlqi (TARGET_SIMD)
- #define HAVE_aarch64_sqrshlqi (TARGET_SIMD)
- #define HAVE_aarch64_uqrshlqi (TARGET_SIMD)
- #define HAVE_aarch64_sqshlhi (TARGET_SIMD)
- #define HAVE_aarch64_uqshlhi (TARGET_SIMD)
- #define HAVE_aarch64_sqrshlhi (TARGET_SIMD)
- #define HAVE_aarch64_uqrshlhi (TARGET_SIMD)
- #define HAVE_aarch64_sqshlsi (TARGET_SIMD)
- #define HAVE_aarch64_uqshlsi (TARGET_SIMD)
- #define HAVE_aarch64_sqrshlsi (TARGET_SIMD)
- #define HAVE_aarch64_uqrshlsi (TARGET_SIMD)
- #define HAVE_aarch64_sqshldi (TARGET_SIMD)
- #define HAVE_aarch64_uqshldi (TARGET_SIMD)
- #define HAVE_aarch64_sqrshldi (TARGET_SIMD)
- #define HAVE_aarch64_uqrshldi (TARGET_SIMD)
- #define HAVE_aarch64_sshll_nv8qi (TARGET_SIMD)
- #define HAVE_aarch64_ushll_nv8qi (TARGET_SIMD)
- #define HAVE_aarch64_sshll_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_ushll_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sshll_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_ushll_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_sshll2_nv16qi (TARGET_SIMD)
- #define HAVE_aarch64_ushll2_nv16qi (TARGET_SIMD)
- #define HAVE_aarch64_sshll2_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_ushll2_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sshll2_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_ushll2_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_srshr_nv8qi (TARGET_SIMD)
- #define HAVE_aarch64_urshr_nv8qi (TARGET_SIMD)
- #define HAVE_aarch64_srshr_nv16qi (TARGET_SIMD)
- #define HAVE_aarch64_urshr_nv16qi (TARGET_SIMD)
- #define HAVE_aarch64_srshr_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_urshr_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_srshr_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_urshr_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_srshr_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_urshr_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_srshr_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_urshr_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_srshr_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_urshr_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_srshr_ndi (TARGET_SIMD)
- #define HAVE_aarch64_urshr_ndi (TARGET_SIMD)
- #define HAVE_aarch64_ssra_nv8qi (TARGET_SIMD)
- #define HAVE_aarch64_usra_nv8qi (TARGET_SIMD)
- #define HAVE_aarch64_srsra_nv8qi (TARGET_SIMD)
- #define HAVE_aarch64_ursra_nv8qi (TARGET_SIMD)
- #define HAVE_aarch64_ssra_nv16qi (TARGET_SIMD)
- #define HAVE_aarch64_usra_nv16qi (TARGET_SIMD)
- #define HAVE_aarch64_srsra_nv16qi (TARGET_SIMD)
- #define HAVE_aarch64_ursra_nv16qi (TARGET_SIMD)
- #define HAVE_aarch64_ssra_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_usra_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_srsra_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_ursra_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_ssra_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_usra_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_srsra_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_ursra_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_ssra_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_usra_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_srsra_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_ursra_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_ssra_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_usra_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_srsra_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_ursra_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_ssra_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_usra_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_srsra_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_ursra_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_ssra_ndi (TARGET_SIMD)
- #define HAVE_aarch64_usra_ndi (TARGET_SIMD)
- #define HAVE_aarch64_srsra_ndi (TARGET_SIMD)
- #define HAVE_aarch64_ursra_ndi (TARGET_SIMD)
- #define HAVE_aarch64_ssli_nv8qi (TARGET_SIMD)
- #define HAVE_aarch64_usli_nv8qi (TARGET_SIMD)
- #define HAVE_aarch64_ssri_nv8qi (TARGET_SIMD)
- #define HAVE_aarch64_usri_nv8qi (TARGET_SIMD)
- #define HAVE_aarch64_ssli_nv16qi (TARGET_SIMD)
- #define HAVE_aarch64_usli_nv16qi (TARGET_SIMD)
- #define HAVE_aarch64_ssri_nv16qi (TARGET_SIMD)
- #define HAVE_aarch64_usri_nv16qi (TARGET_SIMD)
- #define HAVE_aarch64_ssli_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_usli_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_ssri_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_usri_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_ssli_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_usli_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_ssri_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_usri_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_ssli_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_usli_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_ssri_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_usri_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_ssli_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_usli_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_ssri_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_usri_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_ssli_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_usli_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_ssri_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_usri_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_ssli_ndi (TARGET_SIMD)
- #define HAVE_aarch64_usli_ndi (TARGET_SIMD)
- #define HAVE_aarch64_ssri_ndi (TARGET_SIMD)
- #define HAVE_aarch64_usri_ndi (TARGET_SIMD)
- #define HAVE_aarch64_sqshlu_nv8qi (TARGET_SIMD)
- #define HAVE_aarch64_sqshl_nv8qi (TARGET_SIMD)
- #define HAVE_aarch64_uqshl_nv8qi (TARGET_SIMD)
- #define HAVE_aarch64_sqshlu_nv16qi (TARGET_SIMD)
- #define HAVE_aarch64_sqshl_nv16qi (TARGET_SIMD)
- #define HAVE_aarch64_uqshl_nv16qi (TARGET_SIMD)
- #define HAVE_aarch64_sqshlu_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqshl_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_uqshl_nv4hi (TARGET_SIMD)
- #define HAVE_aarch64_sqshlu_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqshl_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_uqshl_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqshlu_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqshl_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_uqshl_nv2si (TARGET_SIMD)
- #define HAVE_aarch64_sqshlu_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqshl_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_uqshl_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqshlu_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_sqshl_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_uqshl_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_sqshlu_nqi (TARGET_SIMD)
- #define HAVE_aarch64_sqshl_nqi (TARGET_SIMD)
- #define HAVE_aarch64_uqshl_nqi (TARGET_SIMD)
- #define HAVE_aarch64_sqshlu_nhi (TARGET_SIMD)
- #define HAVE_aarch64_sqshl_nhi (TARGET_SIMD)
- #define HAVE_aarch64_uqshl_nhi (TARGET_SIMD)
- #define HAVE_aarch64_sqshlu_nsi (TARGET_SIMD)
- #define HAVE_aarch64_sqshl_nsi (TARGET_SIMD)
- #define HAVE_aarch64_uqshl_nsi (TARGET_SIMD)
- #define HAVE_aarch64_sqshlu_ndi (TARGET_SIMD)
- #define HAVE_aarch64_sqshl_ndi (TARGET_SIMD)
- #define HAVE_aarch64_uqshl_ndi (TARGET_SIMD)
- #define HAVE_aarch64_sqshrun_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqrshrun_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqshrn_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_uqshrn_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqrshrn_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_uqrshrn_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqshrun_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqrshrun_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqshrn_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_uqshrn_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqrshrn_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_uqrshrn_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqshrun_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_sqrshrun_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_sqshrn_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_uqshrn_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_sqrshrn_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_uqrshrn_nv2di (TARGET_SIMD)
- #define HAVE_aarch64_sqshrun_nhi (TARGET_SIMD)
- #define HAVE_aarch64_sqrshrun_nhi (TARGET_SIMD)
- #define HAVE_aarch64_sqshrn_nhi (TARGET_SIMD)
- #define HAVE_aarch64_uqshrn_nhi (TARGET_SIMD)
- #define HAVE_aarch64_sqrshrn_nhi (TARGET_SIMD)
- #define HAVE_aarch64_uqrshrn_nhi (TARGET_SIMD)
- #define HAVE_aarch64_sqshrun_nsi (TARGET_SIMD)
- #define HAVE_aarch64_sqrshrun_nsi (TARGET_SIMD)
- #define HAVE_aarch64_sqshrn_nsi (TARGET_SIMD)
- #define HAVE_aarch64_uqshrn_nsi (TARGET_SIMD)
- #define HAVE_aarch64_sqrshrn_nsi (TARGET_SIMD)
- #define HAVE_aarch64_uqrshrn_nsi (TARGET_SIMD)
- #define HAVE_aarch64_sqshrun_ndi (TARGET_SIMD)
- #define HAVE_aarch64_sqrshrun_ndi (TARGET_SIMD)
- #define HAVE_aarch64_sqshrn_ndi (TARGET_SIMD)
- #define HAVE_aarch64_uqshrn_ndi (TARGET_SIMD)
- #define HAVE_aarch64_sqrshrn_ndi (TARGET_SIMD)
- #define HAVE_aarch64_uqrshrn_ndi (TARGET_SIMD)
- #define HAVE_aarch64_cmltv8qi (TARGET_SIMD)
- #define HAVE_aarch64_cmlev8qi (TARGET_SIMD)
- #define HAVE_aarch64_cmeqv8qi (TARGET_SIMD)
- #define HAVE_aarch64_cmgev8qi (TARGET_SIMD)
- #define HAVE_aarch64_cmgtv8qi (TARGET_SIMD)
- #define HAVE_aarch64_cmltv16qi (TARGET_SIMD)
- #define HAVE_aarch64_cmlev16qi (TARGET_SIMD)
- #define HAVE_aarch64_cmeqv16qi (TARGET_SIMD)
- #define HAVE_aarch64_cmgev16qi (TARGET_SIMD)
- #define HAVE_aarch64_cmgtv16qi (TARGET_SIMD)
- #define HAVE_aarch64_cmltv4hi (TARGET_SIMD)
- #define HAVE_aarch64_cmlev4hi (TARGET_SIMD)
- #define HAVE_aarch64_cmeqv4hi (TARGET_SIMD)
- #define HAVE_aarch64_cmgev4hi (TARGET_SIMD)
- #define HAVE_aarch64_cmgtv4hi (TARGET_SIMD)
- #define HAVE_aarch64_cmltv8hi (TARGET_SIMD)
- #define HAVE_aarch64_cmlev8hi (TARGET_SIMD)
- #define HAVE_aarch64_cmeqv8hi (TARGET_SIMD)
- #define HAVE_aarch64_cmgev8hi (TARGET_SIMD)
- #define HAVE_aarch64_cmgtv8hi (TARGET_SIMD)
- #define HAVE_aarch64_cmltv2si (TARGET_SIMD)
- #define HAVE_aarch64_cmlev2si (TARGET_SIMD)
- #define HAVE_aarch64_cmeqv2si (TARGET_SIMD)
- #define HAVE_aarch64_cmgev2si (TARGET_SIMD)
- #define HAVE_aarch64_cmgtv2si (TARGET_SIMD)
- #define HAVE_aarch64_cmltv4si (TARGET_SIMD)
- #define HAVE_aarch64_cmlev4si (TARGET_SIMD)
- #define HAVE_aarch64_cmeqv4si (TARGET_SIMD)
- #define HAVE_aarch64_cmgev4si (TARGET_SIMD)
- #define HAVE_aarch64_cmgtv4si (TARGET_SIMD)
- #define HAVE_aarch64_cmltv2di (TARGET_SIMD)
- #define HAVE_aarch64_cmlev2di (TARGET_SIMD)
- #define HAVE_aarch64_cmeqv2di (TARGET_SIMD)
- #define HAVE_aarch64_cmgev2di (TARGET_SIMD)
- #define HAVE_aarch64_cmgtv2di (TARGET_SIMD)
- #define HAVE_aarch64_cmltdi (TARGET_SIMD)
- #define HAVE_aarch64_cmledi (TARGET_SIMD)
- #define HAVE_aarch64_cmeqdi (TARGET_SIMD)
- #define HAVE_aarch64_cmgedi (TARGET_SIMD)
- #define HAVE_aarch64_cmgtdi (TARGET_SIMD)
- #define HAVE_aarch64_cmltuv8qi (TARGET_SIMD)
- #define HAVE_aarch64_cmleuv8qi (TARGET_SIMD)
- #define HAVE_aarch64_cmgeuv8qi (TARGET_SIMD)
- #define HAVE_aarch64_cmgtuv8qi (TARGET_SIMD)
- #define HAVE_aarch64_cmltuv16qi (TARGET_SIMD)
- #define HAVE_aarch64_cmleuv16qi (TARGET_SIMD)
- #define HAVE_aarch64_cmgeuv16qi (TARGET_SIMD)
- #define HAVE_aarch64_cmgtuv16qi (TARGET_SIMD)
- #define HAVE_aarch64_cmltuv4hi (TARGET_SIMD)
- #define HAVE_aarch64_cmleuv4hi (TARGET_SIMD)
- #define HAVE_aarch64_cmgeuv4hi (TARGET_SIMD)
- #define HAVE_aarch64_cmgtuv4hi (TARGET_SIMD)
- #define HAVE_aarch64_cmltuv8hi (TARGET_SIMD)
- #define HAVE_aarch64_cmleuv8hi (TARGET_SIMD)
- #define HAVE_aarch64_cmgeuv8hi (TARGET_SIMD)
- #define HAVE_aarch64_cmgtuv8hi (TARGET_SIMD)
- #define HAVE_aarch64_cmltuv2si (TARGET_SIMD)
- #define HAVE_aarch64_cmleuv2si (TARGET_SIMD)
- #define HAVE_aarch64_cmgeuv2si (TARGET_SIMD)
- #define HAVE_aarch64_cmgtuv2si (TARGET_SIMD)
- #define HAVE_aarch64_cmltuv4si (TARGET_SIMD)
- #define HAVE_aarch64_cmleuv4si (TARGET_SIMD)
- #define HAVE_aarch64_cmgeuv4si (TARGET_SIMD)
- #define HAVE_aarch64_cmgtuv4si (TARGET_SIMD)
- #define HAVE_aarch64_cmltuv2di (TARGET_SIMD)
- #define HAVE_aarch64_cmleuv2di (TARGET_SIMD)
- #define HAVE_aarch64_cmgeuv2di (TARGET_SIMD)
- #define HAVE_aarch64_cmgtuv2di (TARGET_SIMD)
- #define HAVE_aarch64_cmltudi (TARGET_SIMD)
- #define HAVE_aarch64_cmleudi (TARGET_SIMD)
- #define HAVE_aarch64_cmgeudi (TARGET_SIMD)
- #define HAVE_aarch64_cmgtudi (TARGET_SIMD)
- #define HAVE_aarch64_cmtstv8qi (TARGET_SIMD)
- #define HAVE_aarch64_cmtstv16qi (TARGET_SIMD)
- #define HAVE_aarch64_cmtstv4hi (TARGET_SIMD)
- #define HAVE_aarch64_cmtstv8hi (TARGET_SIMD)
- #define HAVE_aarch64_cmtstv2si (TARGET_SIMD)
- #define HAVE_aarch64_cmtstv4si (TARGET_SIMD)
- #define HAVE_aarch64_cmtstv2di (TARGET_SIMD)
- #define HAVE_aarch64_cmtstdi (TARGET_SIMD)
- #define HAVE_aarch64_cmltv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_cmlev4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_cmeqv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_cmgev4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_cmgtv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_cmltv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_cmlev8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_cmeqv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_cmgev8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_cmgtv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_cmltv2sf (TARGET_SIMD)
- #define HAVE_aarch64_cmlev2sf (TARGET_SIMD)
- #define HAVE_aarch64_cmeqv2sf (TARGET_SIMD)
- #define HAVE_aarch64_cmgev2sf (TARGET_SIMD)
- #define HAVE_aarch64_cmgtv2sf (TARGET_SIMD)
- #define HAVE_aarch64_cmltv4sf (TARGET_SIMD)
- #define HAVE_aarch64_cmlev4sf (TARGET_SIMD)
- #define HAVE_aarch64_cmeqv4sf (TARGET_SIMD)
- #define HAVE_aarch64_cmgev4sf (TARGET_SIMD)
- #define HAVE_aarch64_cmgtv4sf (TARGET_SIMD)
- #define HAVE_aarch64_cmltv2df (TARGET_SIMD)
- #define HAVE_aarch64_cmlev2df (TARGET_SIMD)
- #define HAVE_aarch64_cmeqv2df (TARGET_SIMD)
- #define HAVE_aarch64_cmgev2df (TARGET_SIMD)
- #define HAVE_aarch64_cmgtv2df (TARGET_SIMD)
- #define HAVE_aarch64_cmlthf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_cmlehf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_cmeqhf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_cmgehf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_cmgthf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_cmltsf (TARGET_SIMD)
- #define HAVE_aarch64_cmlesf (TARGET_SIMD)
- #define HAVE_aarch64_cmeqsf (TARGET_SIMD)
- #define HAVE_aarch64_cmgesf (TARGET_SIMD)
- #define HAVE_aarch64_cmgtsf (TARGET_SIMD)
- #define HAVE_aarch64_cmltdf (TARGET_SIMD)
- #define HAVE_aarch64_cmledf (TARGET_SIMD)
- #define HAVE_aarch64_cmeqdf (TARGET_SIMD)
- #define HAVE_aarch64_cmgedf (TARGET_SIMD)
- #define HAVE_aarch64_cmgtdf (TARGET_SIMD)
- #define HAVE_aarch64_facltv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_faclev4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_facgev4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_facgtv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_facltv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_faclev8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_facgev8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_facgtv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_facltv2sf (TARGET_SIMD)
- #define HAVE_aarch64_faclev2sf (TARGET_SIMD)
- #define HAVE_aarch64_facgev2sf (TARGET_SIMD)
- #define HAVE_aarch64_facgtv2sf (TARGET_SIMD)
- #define HAVE_aarch64_facltv4sf (TARGET_SIMD)
- #define HAVE_aarch64_faclev4sf (TARGET_SIMD)
- #define HAVE_aarch64_facgev4sf (TARGET_SIMD)
- #define HAVE_aarch64_facgtv4sf (TARGET_SIMD)
- #define HAVE_aarch64_facltv2df (TARGET_SIMD)
- #define HAVE_aarch64_faclev2df (TARGET_SIMD)
- #define HAVE_aarch64_facgev2df (TARGET_SIMD)
- #define HAVE_aarch64_facgtv2df (TARGET_SIMD)
- #define HAVE_aarch64_faclthf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_faclehf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_facgehf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_facgthf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_facltsf (TARGET_SIMD)
- #define HAVE_aarch64_faclesf (TARGET_SIMD)
- #define HAVE_aarch64_facgesf (TARGET_SIMD)
- #define HAVE_aarch64_facgtsf (TARGET_SIMD)
- #define HAVE_aarch64_facltdf (TARGET_SIMD)
- #define HAVE_aarch64_facledf (TARGET_SIMD)
- #define HAVE_aarch64_facgedf (TARGET_SIMD)
- #define HAVE_aarch64_facgtdf (TARGET_SIMD)
- #define HAVE_aarch64_addpv8qi (TARGET_SIMD)
- #define HAVE_aarch64_addpv4hi (TARGET_SIMD)
- #define HAVE_aarch64_addpv2si (TARGET_SIMD)
- #define HAVE_aarch64_addpdi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2v4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2v2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2v8hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2v4sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2v2df (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2v8bf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2rv8qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2rv16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2rv4hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2rv8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2rv2si (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2rv4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2rv4bf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2rv8bf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2rv2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2rv4hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2rv8hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2rv2sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2rv4sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2rv2df (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2rdi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld2rdf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesoi_lanev8qi (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesoi_lanev16qi (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesoi_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesoi_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesoi_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesoi_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesoi_lanev4bf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesoi_lanev8bf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesoi_lanev2di (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesoi_lanev4hf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesoi_lanev8hf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesoi_lanev2sf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesoi_lanev4sf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesoi_lanev2df (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesoi_lanedi (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesoi_lanedf (TARGET_SIMD)
- #define HAVE_aarch64_simd_st2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_st2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_st2v4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_st2v2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_st2v8hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_st2v4sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_st2v2df (TARGET_SIMD)
- #define HAVE_aarch64_simd_st2v8bf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesoi_lanev8qi (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesoi_lanev16qi (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesoi_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesoi_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesoi_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesoi_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesoi_lanev4bf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesoi_lanev8bf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesoi_lanev2di (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesoi_lanev4hf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesoi_lanev8hf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesoi_lanev2sf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesoi_lanev4sf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesoi_lanev2df (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesoi_lanedi (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesoi_lanedf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3v16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3v8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3v4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3v2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3v8hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3v4sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3v2df (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3v8bf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3rv8qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3rv16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3rv4hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3rv8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3rv2si (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3rv4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3rv4bf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3rv8bf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3rv2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3rv4hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3rv8hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3rv2sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3rv4sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3rv2df (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3rdi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld3rdf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesci_lanev8qi (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesci_lanev16qi (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesci_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesci_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesci_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesci_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesci_lanev4bf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesci_lanev8bf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesci_lanev2di (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesci_lanev4hf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesci_lanev8hf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesci_lanev2sf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesci_lanev4sf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesci_lanev2df (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesci_lanedi (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesci_lanedf (TARGET_SIMD)
- #define HAVE_aarch64_simd_st3v16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_st3v8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_st3v4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_st3v2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_st3v8hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_st3v4sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_st3v2df (TARGET_SIMD)
- #define HAVE_aarch64_simd_st3v8bf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesci_lanev8qi (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesci_lanev16qi (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesci_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesci_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesci_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesci_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesci_lanev4bf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesci_lanev8bf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesci_lanev2di (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesci_lanev4hf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesci_lanev8hf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesci_lanev2sf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesci_lanev4sf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesci_lanev2df (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesci_lanedi (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesci_lanedf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4v16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4v8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4v4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4v2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4v8hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4v4sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4v2df (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4v8bf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4rv8qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4rv16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4rv4hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4rv8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4rv2si (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4rv4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4rv4bf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4rv8bf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4rv2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4rv4hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4rv8hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4rv2sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4rv4sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4rv2df (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4rdi (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld4rdf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesxi_lanev8qi (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesxi_lanev16qi (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesxi_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesxi_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesxi_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesxi_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesxi_lanev4bf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesxi_lanev8bf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesxi_lanev2di (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesxi_lanev4hf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesxi_lanev8hf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesxi_lanev2sf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesxi_lanev4sf (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesxi_lanev2df (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesxi_lanedi (TARGET_SIMD)
- #define HAVE_aarch64_vec_load_lanesxi_lanedf (TARGET_SIMD)
- #define HAVE_aarch64_simd_st4v16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_st4v8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_st4v4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_st4v2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_st4v8hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_st4v4sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_st4v2df (TARGET_SIMD)
- #define HAVE_aarch64_simd_st4v8bf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesxi_lanev8qi (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesxi_lanev16qi (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesxi_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesxi_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesxi_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesxi_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesxi_lanev4bf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesxi_lanev8bf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesxi_lanev2di (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesxi_lanev4hf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesxi_lanev8hf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesxi_lanev2sf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesxi_lanev4sf (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesxi_lanev2df (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesxi_lanedi (TARGET_SIMD)
- #define HAVE_aarch64_vec_store_lanesxi_lanedf (TARGET_SIMD)
- #define HAVE_aarch64_rev_reglistoi (TARGET_SIMD)
- #define HAVE_aarch64_rev_reglistci (TARGET_SIMD)
- #define HAVE_aarch64_rev_reglistxi (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x3_v8qi (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x3_v16qi (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x3_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x3_v8hi (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x3_v2si (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x3_v4si (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x3_v4bf (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x3_v8bf (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x3_v2di (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x3_v4hf (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x3_v8hf (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x3_v2sf (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x3_v4sf (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x3_v2df (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x3_di (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x3_df (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x4_v8qi (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x4_v16qi (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x4_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x4_v8hi (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x4_v2si (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x4_v4si (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x4_v4bf (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x4_v8bf (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x4_v2di (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x4_v4hf (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x4_v8hf (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x4_v2sf (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x4_v4sf (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x4_v2df (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x4_di (TARGET_SIMD)
- #define HAVE_aarch64_ld1_x4_df (TARGET_SIMD)
- #define HAVE_aarch64_st1_x2_v8qi (TARGET_SIMD)
- #define HAVE_aarch64_st1_x2_v16qi (TARGET_SIMD)
- #define HAVE_aarch64_st1_x2_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_st1_x2_v8hi (TARGET_SIMD)
- #define HAVE_aarch64_st1_x2_v2si (TARGET_SIMD)
- #define HAVE_aarch64_st1_x2_v4si (TARGET_SIMD)
- #define HAVE_aarch64_st1_x2_v4bf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x2_v8bf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x2_v2di (TARGET_SIMD)
- #define HAVE_aarch64_st1_x2_v4hf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x2_v8hf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x2_v2sf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x2_v4sf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x2_v2df (TARGET_SIMD)
- #define HAVE_aarch64_st1_x2_di (TARGET_SIMD)
- #define HAVE_aarch64_st1_x2_df (TARGET_SIMD)
- #define HAVE_aarch64_st1_x3_v8qi (TARGET_SIMD)
- #define HAVE_aarch64_st1_x3_v16qi (TARGET_SIMD)
- #define HAVE_aarch64_st1_x3_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_st1_x3_v8hi (TARGET_SIMD)
- #define HAVE_aarch64_st1_x3_v2si (TARGET_SIMD)
- #define HAVE_aarch64_st1_x3_v4si (TARGET_SIMD)
- #define HAVE_aarch64_st1_x3_v4bf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x3_v8bf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x3_v2di (TARGET_SIMD)
- #define HAVE_aarch64_st1_x3_v4hf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x3_v8hf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x3_v2sf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x3_v4sf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x3_v2df (TARGET_SIMD)
- #define HAVE_aarch64_st1_x3_di (TARGET_SIMD)
- #define HAVE_aarch64_st1_x3_df (TARGET_SIMD)
- #define HAVE_aarch64_st1_x4_v8qi (TARGET_SIMD)
- #define HAVE_aarch64_st1_x4_v16qi (TARGET_SIMD)
- #define HAVE_aarch64_st1_x4_v4hi (TARGET_SIMD)
- #define HAVE_aarch64_st1_x4_v8hi (TARGET_SIMD)
- #define HAVE_aarch64_st1_x4_v2si (TARGET_SIMD)
- #define HAVE_aarch64_st1_x4_v4si (TARGET_SIMD)
- #define HAVE_aarch64_st1_x4_v4bf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x4_v8bf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x4_v2di (TARGET_SIMD)
- #define HAVE_aarch64_st1_x4_v4hf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x4_v8hf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x4_v2sf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x4_v4sf (TARGET_SIMD)
- #define HAVE_aarch64_st1_x4_v2df (TARGET_SIMD)
- #define HAVE_aarch64_st1_x4_di (TARGET_SIMD)
- #define HAVE_aarch64_st1_x4_df (TARGET_SIMD)
- #define HAVE_aarch64_be_ld1v8qi (TARGET_SIMD)
- #define HAVE_aarch64_be_ld1v16qi (TARGET_SIMD)
- #define HAVE_aarch64_be_ld1v4hi (TARGET_SIMD)
- #define HAVE_aarch64_be_ld1v8hi (TARGET_SIMD)
- #define HAVE_aarch64_be_ld1v2si (TARGET_SIMD)
- #define HAVE_aarch64_be_ld1v4si (TARGET_SIMD)
- #define HAVE_aarch64_be_ld1v2di (TARGET_SIMD)
- #define HAVE_aarch64_be_ld1v4hf (TARGET_SIMD)
- #define HAVE_aarch64_be_ld1v8hf (TARGET_SIMD)
- #define HAVE_aarch64_be_ld1v4bf (TARGET_SIMD)
- #define HAVE_aarch64_be_ld1v8bf (TARGET_SIMD)
- #define HAVE_aarch64_be_ld1v2sf (TARGET_SIMD)
- #define HAVE_aarch64_be_ld1v4sf (TARGET_SIMD)
- #define HAVE_aarch64_be_ld1v2df (TARGET_SIMD)
- #define HAVE_aarch64_be_ld1di (TARGET_SIMD)
- #define HAVE_aarch64_be_st1v8qi (TARGET_SIMD)
- #define HAVE_aarch64_be_st1v16qi (TARGET_SIMD)
- #define HAVE_aarch64_be_st1v4hi (TARGET_SIMD)
- #define HAVE_aarch64_be_st1v8hi (TARGET_SIMD)
- #define HAVE_aarch64_be_st1v2si (TARGET_SIMD)
- #define HAVE_aarch64_be_st1v4si (TARGET_SIMD)
- #define HAVE_aarch64_be_st1v2di (TARGET_SIMD)
- #define HAVE_aarch64_be_st1v4hf (TARGET_SIMD)
- #define HAVE_aarch64_be_st1v8hf (TARGET_SIMD)
- #define HAVE_aarch64_be_st1v4bf (TARGET_SIMD)
- #define HAVE_aarch64_be_st1v8bf (TARGET_SIMD)
- #define HAVE_aarch64_be_st1v2sf (TARGET_SIMD)
- #define HAVE_aarch64_be_st1v4sf (TARGET_SIMD)
- #define HAVE_aarch64_be_st1v2df (TARGET_SIMD)
- #define HAVE_aarch64_be_st1di (TARGET_SIMD)
- #define HAVE_aarch64_ld2v8qi_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld2v4hi_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld2v4hf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld2v2si_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld2v2sf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld2v4bf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld2di_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld2df_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld3v8qi_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld3v4hi_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld3v4hf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld3v2si_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld3v2sf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld3v4bf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld3di_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld3df_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld4v8qi_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld4v4hi_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld4v4hf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld4v2si_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld4v2sf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld4v4bf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld4di_dreg (TARGET_SIMD)
- #define HAVE_aarch64_ld4df_dreg (TARGET_SIMD)
- #define HAVE_aarch64_tbl1v8qi (TARGET_SIMD)
- #define HAVE_aarch64_tbl1v16qi (TARGET_SIMD)
- #define HAVE_aarch64_tbl2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_tbl3v8qi (TARGET_SIMD)
- #define HAVE_aarch64_tbl3v16qi (TARGET_SIMD)
- #define HAVE_aarch64_tbx4v8qi (TARGET_SIMD)
- #define HAVE_aarch64_tbx4v16qi (TARGET_SIMD)
- #define HAVE_aarch64_qtbl3v8qi (TARGET_SIMD)
- #define HAVE_aarch64_qtbl3v16qi (TARGET_SIMD)
- #define HAVE_aarch64_qtbx3v8qi (TARGET_SIMD)
- #define HAVE_aarch64_qtbx3v16qi (TARGET_SIMD)
- #define HAVE_aarch64_qtbl4v8qi (TARGET_SIMD)
- #define HAVE_aarch64_qtbl4v16qi (TARGET_SIMD)
- #define HAVE_aarch64_qtbx4v8qi (TARGET_SIMD)
- #define HAVE_aarch64_qtbx4v16qi (TARGET_SIMD)
- #define HAVE_aarch64_combinev16qi (TARGET_SIMD)
- #define HAVE_aarch64_zip1v8qi (TARGET_SIMD)
- #define HAVE_aarch64_zip2v8qi (TARGET_SIMD)
- #define HAVE_aarch64_trn1v8qi (TARGET_SIMD)
- #define HAVE_aarch64_trn2v8qi (TARGET_SIMD)
- #define HAVE_aarch64_uzp1v8qi (TARGET_SIMD)
- #define HAVE_aarch64_uzp2v8qi (TARGET_SIMD)
- #define HAVE_aarch64_zip1v16qi (TARGET_SIMD)
- #define HAVE_aarch64_zip2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_trn1v16qi (TARGET_SIMD)
- #define HAVE_aarch64_trn2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_uzp1v16qi (TARGET_SIMD)
- #define HAVE_aarch64_uzp2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_zip1v4hi (TARGET_SIMD)
- #define HAVE_aarch64_zip2v4hi (TARGET_SIMD)
- #define HAVE_aarch64_trn1v4hi (TARGET_SIMD)
- #define HAVE_aarch64_trn2v4hi (TARGET_SIMD)
- #define HAVE_aarch64_uzp1v4hi (TARGET_SIMD)
- #define HAVE_aarch64_uzp2v4hi (TARGET_SIMD)
- #define HAVE_aarch64_zip1v8hi (TARGET_SIMD)
- #define HAVE_aarch64_zip2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_trn1v8hi (TARGET_SIMD)
- #define HAVE_aarch64_trn2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_uzp1v8hi (TARGET_SIMD)
- #define HAVE_aarch64_uzp2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_zip1v2si (TARGET_SIMD)
- #define HAVE_aarch64_zip2v2si (TARGET_SIMD)
- #define HAVE_aarch64_trn1v2si (TARGET_SIMD)
- #define HAVE_aarch64_trn2v2si (TARGET_SIMD)
- #define HAVE_aarch64_uzp1v2si (TARGET_SIMD)
- #define HAVE_aarch64_uzp2v2si (TARGET_SIMD)
- #define HAVE_aarch64_zip1v4si (TARGET_SIMD)
- #define HAVE_aarch64_zip2v4si (TARGET_SIMD)
- #define HAVE_aarch64_trn1v4si (TARGET_SIMD)
- #define HAVE_aarch64_trn2v4si (TARGET_SIMD)
- #define HAVE_aarch64_uzp1v4si (TARGET_SIMD)
- #define HAVE_aarch64_uzp2v4si (TARGET_SIMD)
- #define HAVE_aarch64_zip1v2di (TARGET_SIMD)
- #define HAVE_aarch64_zip2v2di (TARGET_SIMD)
- #define HAVE_aarch64_trn1v2di (TARGET_SIMD)
- #define HAVE_aarch64_trn2v2di (TARGET_SIMD)
- #define HAVE_aarch64_uzp1v2di (TARGET_SIMD)
- #define HAVE_aarch64_uzp2v2di (TARGET_SIMD)
- #define HAVE_aarch64_zip1v4hf (TARGET_SIMD)
- #define HAVE_aarch64_zip2v4hf (TARGET_SIMD)
- #define HAVE_aarch64_trn1v4hf (TARGET_SIMD)
- #define HAVE_aarch64_trn2v4hf (TARGET_SIMD)
- #define HAVE_aarch64_uzp1v4hf (TARGET_SIMD)
- #define HAVE_aarch64_uzp2v4hf (TARGET_SIMD)
- #define HAVE_aarch64_zip1v8hf (TARGET_SIMD)
- #define HAVE_aarch64_zip2v8hf (TARGET_SIMD)
- #define HAVE_aarch64_trn1v8hf (TARGET_SIMD)
- #define HAVE_aarch64_trn2v8hf (TARGET_SIMD)
- #define HAVE_aarch64_uzp1v8hf (TARGET_SIMD)
- #define HAVE_aarch64_uzp2v8hf (TARGET_SIMD)
- #define HAVE_aarch64_zip1v4bf (TARGET_SIMD)
- #define HAVE_aarch64_zip2v4bf (TARGET_SIMD)
- #define HAVE_aarch64_trn1v4bf (TARGET_SIMD)
- #define HAVE_aarch64_trn2v4bf (TARGET_SIMD)
- #define HAVE_aarch64_uzp1v4bf (TARGET_SIMD)
- #define HAVE_aarch64_uzp2v4bf (TARGET_SIMD)
- #define HAVE_aarch64_zip1v8bf (TARGET_SIMD)
- #define HAVE_aarch64_zip2v8bf (TARGET_SIMD)
- #define HAVE_aarch64_trn1v8bf (TARGET_SIMD)
- #define HAVE_aarch64_trn2v8bf (TARGET_SIMD)
- #define HAVE_aarch64_uzp1v8bf (TARGET_SIMD)
- #define HAVE_aarch64_uzp2v8bf (TARGET_SIMD)
- #define HAVE_aarch64_zip1v2sf (TARGET_SIMD)
- #define HAVE_aarch64_zip2v2sf (TARGET_SIMD)
- #define HAVE_aarch64_trn1v2sf (TARGET_SIMD)
- #define HAVE_aarch64_trn2v2sf (TARGET_SIMD)
- #define HAVE_aarch64_uzp1v2sf (TARGET_SIMD)
- #define HAVE_aarch64_uzp2v2sf (TARGET_SIMD)
- #define HAVE_aarch64_zip1v4sf (TARGET_SIMD)
- #define HAVE_aarch64_zip2v4sf (TARGET_SIMD)
- #define HAVE_aarch64_trn1v4sf (TARGET_SIMD)
- #define HAVE_aarch64_trn2v4sf (TARGET_SIMD)
- #define HAVE_aarch64_uzp1v4sf (TARGET_SIMD)
- #define HAVE_aarch64_uzp2v4sf (TARGET_SIMD)
- #define HAVE_aarch64_zip1v2df (TARGET_SIMD)
- #define HAVE_aarch64_zip2v2df (TARGET_SIMD)
- #define HAVE_aarch64_trn1v2df (TARGET_SIMD)
- #define HAVE_aarch64_trn2v2df (TARGET_SIMD)
- #define HAVE_aarch64_uzp1v2df (TARGET_SIMD)
- #define HAVE_aarch64_uzp2v2df (TARGET_SIMD)
- #define HAVE_aarch64_extv8qi (TARGET_SIMD)
- #define HAVE_aarch64_extv16qi (TARGET_SIMD)
- #define HAVE_aarch64_extv4hi (TARGET_SIMD)
- #define HAVE_aarch64_extv8hi (TARGET_SIMD)
- #define HAVE_aarch64_extv2si (TARGET_SIMD)
- #define HAVE_aarch64_extv4si (TARGET_SIMD)
- #define HAVE_aarch64_extv2di (TARGET_SIMD)
- #define HAVE_aarch64_extv4hf (TARGET_SIMD)
- #define HAVE_aarch64_extv8hf (TARGET_SIMD)
- #define HAVE_aarch64_extv4bf (TARGET_SIMD)
- #define HAVE_aarch64_extv8bf (TARGET_SIMD)
- #define HAVE_aarch64_extv2sf (TARGET_SIMD)
- #define HAVE_aarch64_extv4sf (TARGET_SIMD)
- #define HAVE_aarch64_extv2df (TARGET_SIMD)
- #define HAVE_aarch64_rev64v8qi (TARGET_SIMD)
- #define HAVE_aarch64_rev32v8qi (TARGET_SIMD)
- #define HAVE_aarch64_rev16v8qi (TARGET_SIMD)
- #define HAVE_aarch64_rev64v16qi (TARGET_SIMD)
- #define HAVE_aarch64_rev32v16qi (TARGET_SIMD)
- #define HAVE_aarch64_rev16v16qi (TARGET_SIMD)
- #define HAVE_aarch64_rev64v4hi (TARGET_SIMD)
- #define HAVE_aarch64_rev32v4hi (TARGET_SIMD)
- #define HAVE_aarch64_rev16v4hi (TARGET_SIMD)
- #define HAVE_aarch64_rev64v8hi (TARGET_SIMD)
- #define HAVE_aarch64_rev32v8hi (TARGET_SIMD)
- #define HAVE_aarch64_rev16v8hi (TARGET_SIMD)
- #define HAVE_aarch64_rev64v2si (TARGET_SIMD)
- #define HAVE_aarch64_rev32v2si (TARGET_SIMD)
- #define HAVE_aarch64_rev16v2si (TARGET_SIMD)
- #define HAVE_aarch64_rev64v4si (TARGET_SIMD)
- #define HAVE_aarch64_rev32v4si (TARGET_SIMD)
- #define HAVE_aarch64_rev16v4si (TARGET_SIMD)
- #define HAVE_aarch64_rev64v2di (TARGET_SIMD)
- #define HAVE_aarch64_rev32v2di (TARGET_SIMD)
- #define HAVE_aarch64_rev16v2di (TARGET_SIMD)
- #define HAVE_aarch64_rev64v4hf (TARGET_SIMD)
- #define HAVE_aarch64_rev32v4hf (TARGET_SIMD)
- #define HAVE_aarch64_rev16v4hf (TARGET_SIMD)
- #define HAVE_aarch64_rev64v8hf (TARGET_SIMD)
- #define HAVE_aarch64_rev32v8hf (TARGET_SIMD)
- #define HAVE_aarch64_rev16v8hf (TARGET_SIMD)
- #define HAVE_aarch64_rev64v4bf (TARGET_SIMD)
- #define HAVE_aarch64_rev32v4bf (TARGET_SIMD)
- #define HAVE_aarch64_rev16v4bf (TARGET_SIMD)
- #define HAVE_aarch64_rev64v8bf (TARGET_SIMD)
- #define HAVE_aarch64_rev32v8bf (TARGET_SIMD)
- #define HAVE_aarch64_rev16v8bf (TARGET_SIMD)
- #define HAVE_aarch64_rev64v2sf (TARGET_SIMD)
- #define HAVE_aarch64_rev32v2sf (TARGET_SIMD)
- #define HAVE_aarch64_rev16v2sf (TARGET_SIMD)
- #define HAVE_aarch64_rev64v4sf (TARGET_SIMD)
- #define HAVE_aarch64_rev32v4sf (TARGET_SIMD)
- #define HAVE_aarch64_rev16v4sf (TARGET_SIMD)
- #define HAVE_aarch64_rev64v2df (TARGET_SIMD)
- #define HAVE_aarch64_rev32v2df (TARGET_SIMD)
- #define HAVE_aarch64_rev16v2df (TARGET_SIMD)
- #define HAVE_aarch64_st2v8qi_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st2v4hi_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st2v4hf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st2v2si_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st2v2sf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st2v4bf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st2di_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st2df_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st3v8qi_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st3v4hi_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st3v4hf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st3v2si_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st3v2sf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st3v4bf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st3di_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st3df_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st4v8qi_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st4v4hi_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st4v4hf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st4v2si_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st4v2sf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st4v4bf_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st4di_dreg (TARGET_SIMD)
- #define HAVE_aarch64_st4df_dreg (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld1v16qi_x2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld1v8hi_x2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld1v4si_x2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld1v2di_x2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld1v8hf_x2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld1v4sf_x2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld1v2df_x2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld1v8bf_x2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld1v8qi_x2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld1v4hi_x2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld1v4bf_x2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld1v4hf_x2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld1v2si_x2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld1v2sf_x2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld1di_x2 (TARGET_SIMD)
- #define HAVE_aarch64_simd_ld1df_x2 (TARGET_SIMD)
- #define HAVE_aarch64_frecpev4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_frecpev8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_frecpev2sf (TARGET_SIMD)
- #define HAVE_aarch64_frecpev4sf (TARGET_SIMD)
- #define HAVE_aarch64_frecpev2df (TARGET_SIMD)
- #define HAVE_aarch64_frecpehf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_frecpesf (TARGET_SIMD)
- #define HAVE_aarch64_frecpedf (TARGET_SIMD)
- #define HAVE_aarch64_frecpxhf ((TARGET_SIMD) && (AARCH64_ISA_F16))
- #define HAVE_aarch64_frecpxsf (TARGET_SIMD)
- #define HAVE_aarch64_frecpxdf (TARGET_SIMD)
- #define HAVE_aarch64_frecpsv4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_frecpsv8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_frecpsv2sf (TARGET_SIMD)
- #define HAVE_aarch64_frecpsv4sf (TARGET_SIMD)
- #define HAVE_aarch64_frecpsv2df (TARGET_SIMD)
- #define HAVE_aarch64_frecpshf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_aarch64_frecpssf (TARGET_SIMD)
- #define HAVE_aarch64_frecpsdf (TARGET_SIMD)
- #define HAVE_aarch64_urecpev2si (TARGET_SIMD)
- #define HAVE_aarch64_urecpev4si (TARGET_SIMD)
- #define HAVE_aarch64_crypto_aesev16qi (TARGET_SIMD && TARGET_AES)
- #define HAVE_aarch64_crypto_aesdv16qi (TARGET_SIMD && TARGET_AES)
- #define HAVE_aarch64_crypto_aesmcv16qi (TARGET_SIMD && TARGET_AES)
- #define HAVE_aarch64_crypto_aesimcv16qi (TARGET_SIMD && TARGET_AES)
- #define HAVE_aarch64_crypto_sha1hsi (TARGET_SIMD && TARGET_SHA2)
- #define HAVE_aarch64_crypto_sha1hv4si (TARGET_SIMD && TARGET_SHA2 && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_be_crypto_sha1hv4si (TARGET_SIMD && TARGET_SHA2 && BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_crypto_sha1su1v4si (TARGET_SIMD && TARGET_SHA2)
- #define HAVE_aarch64_crypto_sha1cv4si (TARGET_SIMD && TARGET_SHA2)
- #define HAVE_aarch64_crypto_sha1mv4si (TARGET_SIMD && TARGET_SHA2)
- #define HAVE_aarch64_crypto_sha1pv4si (TARGET_SIMD && TARGET_SHA2)
- #define HAVE_aarch64_crypto_sha1su0v4si (TARGET_SIMD && TARGET_SHA2)
- #define HAVE_aarch64_crypto_sha256hv4si (TARGET_SIMD && TARGET_SHA2)
- #define HAVE_aarch64_crypto_sha256h2v4si (TARGET_SIMD && TARGET_SHA2)
- #define HAVE_aarch64_crypto_sha256su0v4si (TARGET_SIMD && TARGET_SHA2)
- #define HAVE_aarch64_crypto_sha256su1v4si (TARGET_SIMD && TARGET_SHA2)
- #define HAVE_aarch64_crypto_sha512hqv2di (TARGET_SIMD && TARGET_SHA3)
- #define HAVE_aarch64_crypto_sha512h2qv2di (TARGET_SIMD && TARGET_SHA3)
- #define HAVE_aarch64_crypto_sha512su0qv2di (TARGET_SIMD && TARGET_SHA3)
- #define HAVE_aarch64_crypto_sha512su1qv2di (TARGET_SIMD && TARGET_SHA3)
- #define HAVE_eor3qv16qi4 (TARGET_SIMD && TARGET_SHA3)
- #define HAVE_eor3qv8hi4 (TARGET_SIMD && TARGET_SHA3)
- #define HAVE_eor3qv4si4 (TARGET_SIMD && TARGET_SHA3)
- #define HAVE_eor3qv2di4 (TARGET_SIMD && TARGET_SHA3)
- #define HAVE_aarch64_rax1qv2di (TARGET_SIMD && TARGET_SHA3)
- #define HAVE_aarch64_xarqv2di (TARGET_SIMD && TARGET_SHA3)
- #define HAVE_bcaxqv16qi4 (TARGET_SIMD && TARGET_SHA3)
- #define HAVE_bcaxqv8hi4 (TARGET_SIMD && TARGET_SHA3)
- #define HAVE_bcaxqv4si4 (TARGET_SIMD && TARGET_SHA3)
- #define HAVE_bcaxqv2di4 (TARGET_SIMD && TARGET_SHA3)
- #define HAVE_aarch64_sm3ss1qv4si (TARGET_SIMD && TARGET_SM4)
- #define HAVE_aarch64_sm3tt1aqv4si (TARGET_SIMD && TARGET_SM4)
- #define HAVE_aarch64_sm3tt1bqv4si (TARGET_SIMD && TARGET_SM4)
- #define HAVE_aarch64_sm3tt2aqv4si (TARGET_SIMD && TARGET_SM4)
- #define HAVE_aarch64_sm3tt2bqv4si (TARGET_SIMD && TARGET_SM4)
- #define HAVE_aarch64_sm3partw1qv4si (TARGET_SIMD && TARGET_SM4)
- #define HAVE_aarch64_sm3partw2qv4si (TARGET_SIMD && TARGET_SM4)
- #define HAVE_aarch64_sm4eqv4si (TARGET_SIMD && TARGET_SM4)
- #define HAVE_aarch64_sm4ekeyqv4si (TARGET_SIMD && TARGET_SM4)
- #define HAVE_aarch64_simd_fmlal_lowv2sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlalq_lowv4sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlsl_lowv2sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlslq_lowv4sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlal_highv2sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlalq_highv4sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlsl_highv2sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlslq_highv4sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlal_lane_lowv2sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlsl_lane_lowv2sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlal_lane_highv2sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlsl_lane_highv2sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlalq_laneq_lowv4sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlslq_laneq_lowv4sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlalq_laneq_highv4sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlslq_laneq_highv4sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlal_laneq_lowv2sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlsl_laneq_lowv2sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlal_laneq_highv2sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlsl_laneq_highv2sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlalq_lane_lowv4sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlslq_lane_lowv4sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlalq_lane_highv4sf (TARGET_F16FML)
- #define HAVE_aarch64_simd_fmlslq_lane_highv4sf (TARGET_F16FML)
- #define HAVE_aarch64_crypto_pmulldi (TARGET_SIMD && TARGET_AES)
- #define HAVE_aarch64_crypto_pmullv2di (TARGET_SIMD && TARGET_AES)
- #define HAVE_extendv8qiv8hi2 (TARGET_SIMD)
- #define HAVE_zero_extendv8qiv8hi2 (TARGET_SIMD)
- #define HAVE_extendv4hiv4si2 (TARGET_SIMD)
- #define HAVE_zero_extendv4hiv4si2 (TARGET_SIMD)
- #define HAVE_extendv2siv2di2 (TARGET_SIMD)
- #define HAVE_zero_extendv2siv2di2 (TARGET_SIMD)
- #define HAVE_truncv8hiv8qi2 (TARGET_SIMD)
- #define HAVE_truncv4siv4hi2 (TARGET_SIMD)
- #define HAVE_truncv2div2si2 (TARGET_SIMD)
- #define HAVE_aarch64_bfdotv2sf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_bfdotv4sf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_bfdot_lanev2sf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_bfdot_laneqv2sf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_bfdot_lanev4sf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_bfdot_laneqv4sf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_bfmmlaqv4sf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_bfmlalbv4sf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_bfmlaltv4sf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_bfmlalb_lanev4sf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_bfmlalt_lanev4sf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_bfmlalb_lane_qv4sf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_bfmlalt_lane_qv4sf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_simd_smmlav16qi (TARGET_I8MM)
- #define HAVE_aarch64_simd_ummlav16qi (TARGET_I8MM)
- #define HAVE_aarch64_simd_usmmlav16qi (TARGET_I8MM)
- #define HAVE_aarch64_bfcvtnv4bf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_bfcvtn_qv8bf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_bfcvtn2v8bf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_bfcvtbf (TARGET_BF16_FP)
- #define HAVE_aarch64_vbfcvtv4bf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_vbfcvtv8bf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_vbfcvt_highv8bf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_bfcvtsf (TARGET_BF16_FP)
- #define HAVE_aarch64_compare_and_swapqi 1
- #define HAVE_aarch64_compare_and_swaphi 1
- #define HAVE_aarch64_compare_and_swapsi 1
- #define HAVE_aarch64_compare_and_swapdi 1
- #define HAVE_aarch64_compare_and_swapti 1
- #define HAVE_aarch64_compare_and_swapqi_lse (TARGET_LSE)
- #define HAVE_aarch64_compare_and_swaphi_lse (TARGET_LSE)
- #define HAVE_aarch64_compare_and_swapsi_lse (TARGET_LSE)
- #define HAVE_aarch64_compare_and_swapdi_lse (TARGET_LSE)
- #define HAVE_aarch64_compare_and_swapti_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_exchangeqi 1
- #define HAVE_aarch64_atomic_exchangehi 1
- #define HAVE_aarch64_atomic_exchangesi 1
- #define HAVE_aarch64_atomic_exchangedi 1
- #define HAVE_aarch64_atomic_exchangeqi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_exchangehi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_exchangesi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_exchangedi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_addqi 1
- #define HAVE_aarch64_atomic_subqi 1
- #define HAVE_aarch64_atomic_orqi 1
- #define HAVE_aarch64_atomic_xorqi 1
- #define HAVE_aarch64_atomic_andqi 1
- #define HAVE_aarch64_atomic_addhi 1
- #define HAVE_aarch64_atomic_subhi 1
- #define HAVE_aarch64_atomic_orhi 1
- #define HAVE_aarch64_atomic_xorhi 1
- #define HAVE_aarch64_atomic_andhi 1
- #define HAVE_aarch64_atomic_addsi 1
- #define HAVE_aarch64_atomic_subsi 1
- #define HAVE_aarch64_atomic_orsi 1
- #define HAVE_aarch64_atomic_xorsi 1
- #define HAVE_aarch64_atomic_andsi 1
- #define HAVE_aarch64_atomic_adddi 1
- #define HAVE_aarch64_atomic_subdi 1
- #define HAVE_aarch64_atomic_ordi 1
- #define HAVE_aarch64_atomic_xordi 1
- #define HAVE_aarch64_atomic_anddi 1
- #define HAVE_aarch64_atomic_iorqi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_bicqi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_xorqi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_addqi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_iorhi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_bichi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_xorhi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_addhi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_iorsi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_bicsi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_xorsi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_addsi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_iordi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_bicdi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_xordi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_adddi_lse (TARGET_LSE)
- #define HAVE_atomic_nandqi 1
- #define HAVE_atomic_nandhi 1
- #define HAVE_atomic_nandsi 1
- #define HAVE_atomic_nanddi 1
- #define HAVE_aarch64_atomic_fetch_addqi 1
- #define HAVE_aarch64_atomic_fetch_subqi 1
- #define HAVE_aarch64_atomic_fetch_orqi 1
- #define HAVE_aarch64_atomic_fetch_xorqi 1
- #define HAVE_aarch64_atomic_fetch_andqi 1
- #define HAVE_aarch64_atomic_fetch_addhi 1
- #define HAVE_aarch64_atomic_fetch_subhi 1
- #define HAVE_aarch64_atomic_fetch_orhi 1
- #define HAVE_aarch64_atomic_fetch_xorhi 1
- #define HAVE_aarch64_atomic_fetch_andhi 1
- #define HAVE_aarch64_atomic_fetch_addsi 1
- #define HAVE_aarch64_atomic_fetch_subsi 1
- #define HAVE_aarch64_atomic_fetch_orsi 1
- #define HAVE_aarch64_atomic_fetch_xorsi 1
- #define HAVE_aarch64_atomic_fetch_andsi 1
- #define HAVE_aarch64_atomic_fetch_adddi 1
- #define HAVE_aarch64_atomic_fetch_subdi 1
- #define HAVE_aarch64_atomic_fetch_ordi 1
- #define HAVE_aarch64_atomic_fetch_xordi 1
- #define HAVE_aarch64_atomic_fetch_anddi 1
- #define HAVE_aarch64_atomic_fetch_iorqi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_fetch_bicqi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_fetch_xorqi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_fetch_addqi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_fetch_iorhi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_fetch_bichi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_fetch_xorhi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_fetch_addhi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_fetch_iorsi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_fetch_bicsi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_fetch_xorsi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_fetch_addsi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_fetch_iordi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_fetch_bicdi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_fetch_xordi_lse (TARGET_LSE)
- #define HAVE_aarch64_atomic_fetch_adddi_lse (TARGET_LSE)
- #define HAVE_atomic_fetch_nandqi 1
- #define HAVE_atomic_fetch_nandhi 1
- #define HAVE_atomic_fetch_nandsi 1
- #define HAVE_atomic_fetch_nanddi 1
- #define HAVE_aarch64_atomic_add_fetchqi 1
- #define HAVE_aarch64_atomic_sub_fetchqi 1
- #define HAVE_aarch64_atomic_or_fetchqi 1
- #define HAVE_aarch64_atomic_xor_fetchqi 1
- #define HAVE_aarch64_atomic_and_fetchqi 1
- #define HAVE_aarch64_atomic_add_fetchhi 1
- #define HAVE_aarch64_atomic_sub_fetchhi 1
- #define HAVE_aarch64_atomic_or_fetchhi 1
- #define HAVE_aarch64_atomic_xor_fetchhi 1
- #define HAVE_aarch64_atomic_and_fetchhi 1
- #define HAVE_aarch64_atomic_add_fetchsi 1
- #define HAVE_aarch64_atomic_sub_fetchsi 1
- #define HAVE_aarch64_atomic_or_fetchsi 1
- #define HAVE_aarch64_atomic_xor_fetchsi 1
- #define HAVE_aarch64_atomic_and_fetchsi 1
- #define HAVE_aarch64_atomic_add_fetchdi 1
- #define HAVE_aarch64_atomic_sub_fetchdi 1
- #define HAVE_aarch64_atomic_or_fetchdi 1
- #define HAVE_aarch64_atomic_xor_fetchdi 1
- #define HAVE_aarch64_atomic_and_fetchdi 1
- #define HAVE_atomic_nand_fetchqi 1
- #define HAVE_atomic_nand_fetchhi 1
- #define HAVE_atomic_nand_fetchsi 1
- #define HAVE_atomic_nand_fetchdi 1
- #define HAVE_atomic_loadqi 1
- #define HAVE_atomic_loadhi 1
- #define HAVE_atomic_loadsi 1
- #define HAVE_atomic_loaddi 1
- #define HAVE_atomic_storeqi 1
- #define HAVE_atomic_storehi 1
- #define HAVE_atomic_storesi 1
- #define HAVE_atomic_storedi 1
- #define HAVE_aarch64_load_exclusiveqi 1
- #define HAVE_aarch64_load_exclusivehi 1
- #define HAVE_aarch64_load_exclusivesi 1
- #define HAVE_aarch64_load_exclusivedi 1
- #define HAVE_aarch64_load_exclusive_pair 1
- #define HAVE_aarch64_store_exclusiveqi 1
- #define HAVE_aarch64_store_exclusivehi 1
- #define HAVE_aarch64_store_exclusivesi 1
- #define HAVE_aarch64_store_exclusivedi 1
- #define HAVE_aarch64_store_exclusive_pair 1
- #define HAVE_aarch64_pred_movvnx16qi (TARGET_SVE \
- && (register_operand (operands[0], VNx16QImode) \
- || register_operand (operands[2], VNx16QImode)))
- #define HAVE_aarch64_pred_movvnx8qi (TARGET_SVE \
- && (register_operand (operands[0], VNx8QImode) \
- || register_operand (operands[2], VNx8QImode)))
- #define HAVE_aarch64_pred_movvnx4qi (TARGET_SVE \
- && (register_operand (operands[0], VNx4QImode) \
- || register_operand (operands[2], VNx4QImode)))
- #define HAVE_aarch64_pred_movvnx2qi (TARGET_SVE \
- && (register_operand (operands[0], VNx2QImode) \
- || register_operand (operands[2], VNx2QImode)))
- #define HAVE_aarch64_pred_movvnx8hi (TARGET_SVE \
- && (register_operand (operands[0], VNx8HImode) \
- || register_operand (operands[2], VNx8HImode)))
- #define HAVE_aarch64_pred_movvnx4hi (TARGET_SVE \
- && (register_operand (operands[0], VNx4HImode) \
- || register_operand (operands[2], VNx4HImode)))
- #define HAVE_aarch64_pred_movvnx2hi (TARGET_SVE \
- && (register_operand (operands[0], VNx2HImode) \
- || register_operand (operands[2], VNx2HImode)))
- #define HAVE_aarch64_pred_movvnx8hf (TARGET_SVE \
- && (register_operand (operands[0], VNx8HFmode) \
- || register_operand (operands[2], VNx8HFmode)))
- #define HAVE_aarch64_pred_movvnx4hf (TARGET_SVE \
- && (register_operand (operands[0], VNx4HFmode) \
- || register_operand (operands[2], VNx4HFmode)))
- #define HAVE_aarch64_pred_movvnx2hf (TARGET_SVE \
- && (register_operand (operands[0], VNx2HFmode) \
- || register_operand (operands[2], VNx2HFmode)))
- #define HAVE_aarch64_pred_movvnx8bf (TARGET_SVE \
- && (register_operand (operands[0], VNx8BFmode) \
- || register_operand (operands[2], VNx8BFmode)))
- #define HAVE_aarch64_pred_movvnx4si (TARGET_SVE \
- && (register_operand (operands[0], VNx4SImode) \
- || register_operand (operands[2], VNx4SImode)))
- #define HAVE_aarch64_pred_movvnx2si (TARGET_SVE \
- && (register_operand (operands[0], VNx2SImode) \
- || register_operand (operands[2], VNx2SImode)))
- #define HAVE_aarch64_pred_movvnx4sf (TARGET_SVE \
- && (register_operand (operands[0], VNx4SFmode) \
- || register_operand (operands[2], VNx4SFmode)))
- #define HAVE_aarch64_pred_movvnx2sf (TARGET_SVE \
- && (register_operand (operands[0], VNx2SFmode) \
- || register_operand (operands[2], VNx2SFmode)))
- #define HAVE_aarch64_pred_movvnx2di (TARGET_SVE \
- && (register_operand (operands[0], VNx2DImode) \
- || register_operand (operands[2], VNx2DImode)))
- #define HAVE_aarch64_pred_movvnx2df (TARGET_SVE \
- && (register_operand (operands[0], VNx2DFmode) \
- || register_operand (operands[2], VNx2DFmode)))
- #define HAVE_aarch64_pred_movvnx32qi (TARGET_SVE \
- && (register_operand (operands[0], VNx32QImode) \
- || register_operand (operands[2], VNx32QImode)))
- #define HAVE_aarch64_pred_movvnx16hi (TARGET_SVE \
- && (register_operand (operands[0], VNx16HImode) \
- || register_operand (operands[2], VNx16HImode)))
- #define HAVE_aarch64_pred_movvnx8si (TARGET_SVE \
- && (register_operand (operands[0], VNx8SImode) \
- || register_operand (operands[2], VNx8SImode)))
- #define HAVE_aarch64_pred_movvnx4di (TARGET_SVE \
- && (register_operand (operands[0], VNx4DImode) \
- || register_operand (operands[2], VNx4DImode)))
- #define HAVE_aarch64_pred_movvnx16bf (TARGET_SVE \
- && (register_operand (operands[0], VNx16BFmode) \
- || register_operand (operands[2], VNx16BFmode)))
- #define HAVE_aarch64_pred_movvnx16hf (TARGET_SVE \
- && (register_operand (operands[0], VNx16HFmode) \
- || register_operand (operands[2], VNx16HFmode)))
- #define HAVE_aarch64_pred_movvnx8sf (TARGET_SVE \
- && (register_operand (operands[0], VNx8SFmode) \
- || register_operand (operands[2], VNx8SFmode)))
- #define HAVE_aarch64_pred_movvnx4df (TARGET_SVE \
- && (register_operand (operands[0], VNx4DFmode) \
- || register_operand (operands[2], VNx4DFmode)))
- #define HAVE_aarch64_pred_movvnx48qi (TARGET_SVE \
- && (register_operand (operands[0], VNx48QImode) \
- || register_operand (operands[2], VNx48QImode)))
- #define HAVE_aarch64_pred_movvnx24hi (TARGET_SVE \
- && (register_operand (operands[0], VNx24HImode) \
- || register_operand (operands[2], VNx24HImode)))
- #define HAVE_aarch64_pred_movvnx12si (TARGET_SVE \
- && (register_operand (operands[0], VNx12SImode) \
- || register_operand (operands[2], VNx12SImode)))
- #define HAVE_aarch64_pred_movvnx6di (TARGET_SVE \
- && (register_operand (operands[0], VNx6DImode) \
- || register_operand (operands[2], VNx6DImode)))
- #define HAVE_aarch64_pred_movvnx24bf (TARGET_SVE \
- && (register_operand (operands[0], VNx24BFmode) \
- || register_operand (operands[2], VNx24BFmode)))
- #define HAVE_aarch64_pred_movvnx24hf (TARGET_SVE \
- && (register_operand (operands[0], VNx24HFmode) \
- || register_operand (operands[2], VNx24HFmode)))
- #define HAVE_aarch64_pred_movvnx12sf (TARGET_SVE \
- && (register_operand (operands[0], VNx12SFmode) \
- || register_operand (operands[2], VNx12SFmode)))
- #define HAVE_aarch64_pred_movvnx6df (TARGET_SVE \
- && (register_operand (operands[0], VNx6DFmode) \
- || register_operand (operands[2], VNx6DFmode)))
- #define HAVE_aarch64_pred_movvnx64qi (TARGET_SVE \
- && (register_operand (operands[0], VNx64QImode) \
- || register_operand (operands[2], VNx64QImode)))
- #define HAVE_aarch64_pred_movvnx32hi (TARGET_SVE \
- && (register_operand (operands[0], VNx32HImode) \
- || register_operand (operands[2], VNx32HImode)))
- #define HAVE_aarch64_pred_movvnx16si (TARGET_SVE \
- && (register_operand (operands[0], VNx16SImode) \
- || register_operand (operands[2], VNx16SImode)))
- #define HAVE_aarch64_pred_movvnx8di (TARGET_SVE \
- && (register_operand (operands[0], VNx8DImode) \
- || register_operand (operands[2], VNx8DImode)))
- #define HAVE_aarch64_pred_movvnx32bf (TARGET_SVE \
- && (register_operand (operands[0], VNx32BFmode) \
- || register_operand (operands[2], VNx32BFmode)))
- #define HAVE_aarch64_pred_movvnx32hf (TARGET_SVE \
- && (register_operand (operands[0], VNx32HFmode) \
- || register_operand (operands[2], VNx32HFmode)))
- #define HAVE_aarch64_pred_movvnx16sf (TARGET_SVE \
- && (register_operand (operands[0], VNx16SFmode) \
- || register_operand (operands[2], VNx16SFmode)))
- #define HAVE_aarch64_pred_movvnx8df (TARGET_SVE \
- && (register_operand (operands[0], VNx8DFmode) \
- || register_operand (operands[2], VNx8DFmode)))
- #define HAVE_aarch64_wrffr (TARGET_SVE)
- #define HAVE_aarch64_update_ffr_for_load (TARGET_SVE)
- #define HAVE_aarch64_copy_ffr_to_ffrt (TARGET_SVE)
- #define HAVE_aarch64_rdffr (TARGET_SVE)
- #define HAVE_aarch64_rdffr_z (TARGET_SVE)
- #define HAVE_aarch64_update_ffrt (TARGET_SVE)
- #define HAVE_maskloadvnx16qivnx16bi (TARGET_SVE)
- #define HAVE_maskloadvnx8qivnx8bi (TARGET_SVE)
- #define HAVE_maskloadvnx4qivnx4bi (TARGET_SVE)
- #define HAVE_maskloadvnx2qivnx2bi (TARGET_SVE)
- #define HAVE_maskloadvnx8hivnx8bi (TARGET_SVE)
- #define HAVE_maskloadvnx4hivnx4bi (TARGET_SVE)
- #define HAVE_maskloadvnx2hivnx2bi (TARGET_SVE)
- #define HAVE_maskloadvnx8hfvnx8bi (TARGET_SVE)
- #define HAVE_maskloadvnx4hfvnx4bi (TARGET_SVE)
- #define HAVE_maskloadvnx2hfvnx2bi (TARGET_SVE)
- #define HAVE_maskloadvnx8bfvnx8bi (TARGET_SVE)
- #define HAVE_maskloadvnx4sivnx4bi (TARGET_SVE)
- #define HAVE_maskloadvnx2sivnx2bi (TARGET_SVE)
- #define HAVE_maskloadvnx4sfvnx4bi (TARGET_SVE)
- #define HAVE_maskloadvnx2sfvnx2bi (TARGET_SVE)
- #define HAVE_maskloadvnx2divnx2bi (TARGET_SVE)
- #define HAVE_maskloadvnx2dfvnx2bi (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx32qivnx16qi (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx16hivnx8hi (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx8sivnx4si (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx4divnx2di (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx16bfvnx8bf (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx16hfvnx8hf (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx8sfvnx4sf (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx4dfvnx2df (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx48qivnx16qi (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx24hivnx8hi (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx12sivnx4si (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx6divnx2di (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx24bfvnx8bf (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx24hfvnx8hf (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx12sfvnx4sf (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx6dfvnx2df (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx64qivnx16qi (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx32hivnx8hi (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx16sivnx4si (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx8divnx2di (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx32bfvnx8bf (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx32hfvnx8hf (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx16sfvnx4sf (TARGET_SVE)
- #define HAVE_vec_mask_load_lanesvnx8dfvnx2df (TARGET_SVE)
- #define HAVE_aarch64_load_extendvnx8hivnx8qi (TARGET_SVE && (~0x81 & 0x81) == 0)
- #define HAVE_aarch64_load_zero_extendvnx8hivnx8qi (TARGET_SVE && (~0x81 & 0x81) == 0)
- #define HAVE_aarch64_load_extendvnx4hivnx4qi (TARGET_SVE && (~0x41 & 0x41) == 0)
- #define HAVE_aarch64_load_zero_extendvnx4hivnx4qi (TARGET_SVE && (~0x41 & 0x41) == 0)
- #define HAVE_aarch64_load_extendvnx2hivnx2qi (TARGET_SVE && (~0x21 & 0x21) == 0)
- #define HAVE_aarch64_load_zero_extendvnx2hivnx2qi (TARGET_SVE && (~0x21 & 0x21) == 0)
- #define HAVE_aarch64_load_extendvnx4sivnx4qi (TARGET_SVE && (~0x43 & 0x41) == 0)
- #define HAVE_aarch64_load_zero_extendvnx4sivnx4qi (TARGET_SVE && (~0x43 & 0x41) == 0)
- #define HAVE_aarch64_load_extendvnx4sivnx4hi (TARGET_SVE && (~0x43 & 0x42) == 0)
- #define HAVE_aarch64_load_zero_extendvnx4sivnx4hi (TARGET_SVE && (~0x43 & 0x42) == 0)
- #define HAVE_aarch64_load_extendvnx2sivnx2qi (TARGET_SVE && (~0x23 & 0x21) == 0)
- #define HAVE_aarch64_load_zero_extendvnx2sivnx2qi (TARGET_SVE && (~0x23 & 0x21) == 0)
- #define HAVE_aarch64_load_extendvnx2sivnx2hi (TARGET_SVE && (~0x23 & 0x22) == 0)
- #define HAVE_aarch64_load_zero_extendvnx2sivnx2hi (TARGET_SVE && (~0x23 & 0x22) == 0)
- #define HAVE_aarch64_load_extendvnx2divnx2qi (TARGET_SVE && (~0x27 & 0x21) == 0)
- #define HAVE_aarch64_load_zero_extendvnx2divnx2qi (TARGET_SVE && (~0x27 & 0x21) == 0)
- #define HAVE_aarch64_load_extendvnx2divnx2hi (TARGET_SVE && (~0x27 & 0x22) == 0)
- #define HAVE_aarch64_load_zero_extendvnx2divnx2hi (TARGET_SVE && (~0x27 & 0x22) == 0)
- #define HAVE_aarch64_load_extendvnx2divnx2si (TARGET_SVE && (~0x27 & 0x24) == 0)
- #define HAVE_aarch64_load_zero_extendvnx2divnx2si (TARGET_SVE && (~0x27 & 0x24) == 0)
- #define HAVE_aarch64_ldff1vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_ldnf1vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_ldff1vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_ldnf1vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_ldff1vnx4si (TARGET_SVE)
- #define HAVE_aarch64_ldnf1vnx4si (TARGET_SVE)
- #define HAVE_aarch64_ldff1vnx2di (TARGET_SVE)
- #define HAVE_aarch64_ldnf1vnx2di (TARGET_SVE)
- #define HAVE_aarch64_ldff1vnx8bf (TARGET_SVE)
- #define HAVE_aarch64_ldnf1vnx8bf (TARGET_SVE)
- #define HAVE_aarch64_ldff1vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_ldnf1vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_ldff1vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_ldnf1vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_ldff1vnx2df (TARGET_SVE)
- #define HAVE_aarch64_ldnf1vnx2df (TARGET_SVE)
- #define HAVE_aarch64_ldff1_extendvnx8hivnx8qi (TARGET_SVE && (~0x81 & 0x81) == 0)
- #define HAVE_aarch64_ldnf1_extendvnx8hivnx8qi (TARGET_SVE && (~0x81 & 0x81) == 0)
- #define HAVE_aarch64_ldff1_zero_extendvnx8hivnx8qi (TARGET_SVE && (~0x81 & 0x81) == 0)
- #define HAVE_aarch64_ldnf1_zero_extendvnx8hivnx8qi (TARGET_SVE && (~0x81 & 0x81) == 0)
- #define HAVE_aarch64_ldff1_extendvnx4hivnx4qi (TARGET_SVE && (~0x41 & 0x41) == 0)
- #define HAVE_aarch64_ldnf1_extendvnx4hivnx4qi (TARGET_SVE && (~0x41 & 0x41) == 0)
- #define HAVE_aarch64_ldff1_zero_extendvnx4hivnx4qi (TARGET_SVE && (~0x41 & 0x41) == 0)
- #define HAVE_aarch64_ldnf1_zero_extendvnx4hivnx4qi (TARGET_SVE && (~0x41 & 0x41) == 0)
- #define HAVE_aarch64_ldff1_extendvnx2hivnx2qi (TARGET_SVE && (~0x21 & 0x21) == 0)
- #define HAVE_aarch64_ldnf1_extendvnx2hivnx2qi (TARGET_SVE && (~0x21 & 0x21) == 0)
- #define HAVE_aarch64_ldff1_zero_extendvnx2hivnx2qi (TARGET_SVE && (~0x21 & 0x21) == 0)
- #define HAVE_aarch64_ldnf1_zero_extendvnx2hivnx2qi (TARGET_SVE && (~0x21 & 0x21) == 0)
- #define HAVE_aarch64_ldff1_extendvnx4sivnx4qi (TARGET_SVE && (~0x43 & 0x41) == 0)
- #define HAVE_aarch64_ldnf1_extendvnx4sivnx4qi (TARGET_SVE && (~0x43 & 0x41) == 0)
- #define HAVE_aarch64_ldff1_zero_extendvnx4sivnx4qi (TARGET_SVE && (~0x43 & 0x41) == 0)
- #define HAVE_aarch64_ldnf1_zero_extendvnx4sivnx4qi (TARGET_SVE && (~0x43 & 0x41) == 0)
- #define HAVE_aarch64_ldff1_extendvnx4sivnx4hi (TARGET_SVE && (~0x43 & 0x42) == 0)
- #define HAVE_aarch64_ldnf1_extendvnx4sivnx4hi (TARGET_SVE && (~0x43 & 0x42) == 0)
- #define HAVE_aarch64_ldff1_zero_extendvnx4sivnx4hi (TARGET_SVE && (~0x43 & 0x42) == 0)
- #define HAVE_aarch64_ldnf1_zero_extendvnx4sivnx4hi (TARGET_SVE && (~0x43 & 0x42) == 0)
- #define HAVE_aarch64_ldff1_extendvnx2sivnx2qi (TARGET_SVE && (~0x23 & 0x21) == 0)
- #define HAVE_aarch64_ldnf1_extendvnx2sivnx2qi (TARGET_SVE && (~0x23 & 0x21) == 0)
- #define HAVE_aarch64_ldff1_zero_extendvnx2sivnx2qi (TARGET_SVE && (~0x23 & 0x21) == 0)
- #define HAVE_aarch64_ldnf1_zero_extendvnx2sivnx2qi (TARGET_SVE && (~0x23 & 0x21) == 0)
- #define HAVE_aarch64_ldff1_extendvnx2sivnx2hi (TARGET_SVE && (~0x23 & 0x22) == 0)
- #define HAVE_aarch64_ldnf1_extendvnx2sivnx2hi (TARGET_SVE && (~0x23 & 0x22) == 0)
- #define HAVE_aarch64_ldff1_zero_extendvnx2sivnx2hi (TARGET_SVE && (~0x23 & 0x22) == 0)
- #define HAVE_aarch64_ldnf1_zero_extendvnx2sivnx2hi (TARGET_SVE && (~0x23 & 0x22) == 0)
- #define HAVE_aarch64_ldff1_extendvnx2divnx2qi (TARGET_SVE && (~0x27 & 0x21) == 0)
- #define HAVE_aarch64_ldnf1_extendvnx2divnx2qi (TARGET_SVE && (~0x27 & 0x21) == 0)
- #define HAVE_aarch64_ldff1_zero_extendvnx2divnx2qi (TARGET_SVE && (~0x27 & 0x21) == 0)
- #define HAVE_aarch64_ldnf1_zero_extendvnx2divnx2qi (TARGET_SVE && (~0x27 & 0x21) == 0)
- #define HAVE_aarch64_ldff1_extendvnx2divnx2hi (TARGET_SVE && (~0x27 & 0x22) == 0)
- #define HAVE_aarch64_ldnf1_extendvnx2divnx2hi (TARGET_SVE && (~0x27 & 0x22) == 0)
- #define HAVE_aarch64_ldff1_zero_extendvnx2divnx2hi (TARGET_SVE && (~0x27 & 0x22) == 0)
- #define HAVE_aarch64_ldnf1_zero_extendvnx2divnx2hi (TARGET_SVE && (~0x27 & 0x22) == 0)
- #define HAVE_aarch64_ldff1_extendvnx2divnx2si (TARGET_SVE && (~0x27 & 0x24) == 0)
- #define HAVE_aarch64_ldnf1_extendvnx2divnx2si (TARGET_SVE && (~0x27 & 0x24) == 0)
- #define HAVE_aarch64_ldff1_zero_extendvnx2divnx2si (TARGET_SVE && (~0x27 & 0x24) == 0)
- #define HAVE_aarch64_ldnf1_zero_extendvnx2divnx2si (TARGET_SVE && (~0x27 & 0x24) == 0)
- #define HAVE_aarch64_ldnt1vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_ldnt1vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_ldnt1vnx4si (TARGET_SVE)
- #define HAVE_aarch64_ldnt1vnx2di (TARGET_SVE)
- #define HAVE_aarch64_ldnt1vnx8bf (TARGET_SVE)
- #define HAVE_aarch64_ldnt1vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_ldnt1vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_ldnt1vnx2df (TARGET_SVE)
- #define HAVE_mask_gather_loadvnx4qivnx4si (TARGET_SVE)
- #define HAVE_mask_gather_loadvnx4hivnx4si (TARGET_SVE)
- #define HAVE_mask_gather_loadvnx4hfvnx4si (TARGET_SVE)
- #define HAVE_mask_gather_loadvnx4sivnx4si (TARGET_SVE)
- #define HAVE_mask_gather_loadvnx4sfvnx4si (TARGET_SVE)
- #define HAVE_mask_gather_loadvnx2qivnx2di (TARGET_SVE)
- #define HAVE_mask_gather_loadvnx2hivnx2di (TARGET_SVE)
- #define HAVE_mask_gather_loadvnx2hfvnx2di (TARGET_SVE)
- #define HAVE_mask_gather_loadvnx2sivnx2di (TARGET_SVE)
- #define HAVE_mask_gather_loadvnx2sfvnx2di (TARGET_SVE)
- #define HAVE_mask_gather_loadvnx2divnx2di (TARGET_SVE)
- #define HAVE_mask_gather_loadvnx2dfvnx2di (TARGET_SVE)
- #define HAVE_aarch64_gather_load_extendvnx4hivnx4qi (TARGET_SVE && (~0x41 & 0x41) == 0)
- #define HAVE_aarch64_gather_load_zero_extendvnx4hivnx4qi (TARGET_SVE && (~0x41 & 0x41) == 0)
- #define HAVE_aarch64_gather_load_extendvnx4sivnx4qi (TARGET_SVE && (~0x43 & 0x41) == 0)
- #define HAVE_aarch64_gather_load_zero_extendvnx4sivnx4qi (TARGET_SVE && (~0x43 & 0x41) == 0)
- #define HAVE_aarch64_gather_load_extendvnx4sivnx4hi (TARGET_SVE && (~0x43 & 0x42) == 0)
- #define HAVE_aarch64_gather_load_zero_extendvnx4sivnx4hi (TARGET_SVE && (~0x43 & 0x42) == 0)
- #define HAVE_aarch64_gather_load_extendvnx2hivnx2qi (TARGET_SVE && (~0x21 & 0x21) == 0)
- #define HAVE_aarch64_gather_load_zero_extendvnx2hivnx2qi (TARGET_SVE && (~0x21 & 0x21) == 0)
- #define HAVE_aarch64_gather_load_extendvnx2sivnx2qi (TARGET_SVE && (~0x23 & 0x21) == 0)
- #define HAVE_aarch64_gather_load_zero_extendvnx2sivnx2qi (TARGET_SVE && (~0x23 & 0x21) == 0)
- #define HAVE_aarch64_gather_load_extendvnx2divnx2qi (TARGET_SVE && (~0x27 & 0x21) == 0)
- #define HAVE_aarch64_gather_load_zero_extendvnx2divnx2qi (TARGET_SVE && (~0x27 & 0x21) == 0)
- #define HAVE_aarch64_gather_load_extendvnx2sivnx2hi (TARGET_SVE && (~0x23 & 0x22) == 0)
- #define HAVE_aarch64_gather_load_zero_extendvnx2sivnx2hi (TARGET_SVE && (~0x23 & 0x22) == 0)
- #define HAVE_aarch64_gather_load_extendvnx2divnx2hi (TARGET_SVE && (~0x27 & 0x22) == 0)
- #define HAVE_aarch64_gather_load_zero_extendvnx2divnx2hi (TARGET_SVE && (~0x27 & 0x22) == 0)
- #define HAVE_aarch64_gather_load_extendvnx2divnx2si (TARGET_SVE && (~0x27 & 0x24) == 0)
- #define HAVE_aarch64_gather_load_zero_extendvnx2divnx2si (TARGET_SVE && (~0x27 & 0x24) == 0)
- #define HAVE_aarch64_ldff1_gathervnx4si (TARGET_SVE)
- #define HAVE_aarch64_ldff1_gathervnx4sf (TARGET_SVE)
- #define HAVE_aarch64_ldff1_gathervnx2di (TARGET_SVE)
- #define HAVE_aarch64_ldff1_gathervnx2df (TARGET_SVE)
- #define HAVE_aarch64_ldff1_gather_extendvnx4sivnx4qi (TARGET_SVE)
- #define HAVE_aarch64_ldff1_gather_zero_extendvnx4sivnx4qi (TARGET_SVE)
- #define HAVE_aarch64_ldff1_gather_extendvnx4sivnx4hi (TARGET_SVE)
- #define HAVE_aarch64_ldff1_gather_zero_extendvnx4sivnx4hi (TARGET_SVE)
- #define HAVE_aarch64_ldff1_gather_extendvnx2divnx2qi (TARGET_SVE)
- #define HAVE_aarch64_ldff1_gather_zero_extendvnx2divnx2qi (TARGET_SVE)
- #define HAVE_aarch64_ldff1_gather_extendvnx2divnx2hi (TARGET_SVE)
- #define HAVE_aarch64_ldff1_gather_zero_extendvnx2divnx2hi (TARGET_SVE)
- #define HAVE_aarch64_ldff1_gather_extendvnx2divnx2si (TARGET_SVE)
- #define HAVE_aarch64_ldff1_gather_zero_extendvnx2divnx2si (TARGET_SVE)
- #define HAVE_aarch64_sve_prefetchvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_prefetchvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_prefetchvnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_prefetchvnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_gather_prefetchvnx16qivnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_gather_prefetchvnx8hivnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_gather_prefetchvnx4sivnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_gather_prefetchvnx2divnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_gather_prefetchvnx16qivnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_gather_prefetchvnx8hivnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_gather_prefetchvnx4sivnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_gather_prefetchvnx2divnx2di (TARGET_SVE)
- #define HAVE_maskstorevnx16qivnx16bi (TARGET_SVE)
- #define HAVE_maskstorevnx8qivnx8bi (TARGET_SVE)
- #define HAVE_maskstorevnx4qivnx4bi (TARGET_SVE)
- #define HAVE_maskstorevnx2qivnx2bi (TARGET_SVE)
- #define HAVE_maskstorevnx8hivnx8bi (TARGET_SVE)
- #define HAVE_maskstorevnx4hivnx4bi (TARGET_SVE)
- #define HAVE_maskstorevnx2hivnx2bi (TARGET_SVE)
- #define HAVE_maskstorevnx8hfvnx8bi (TARGET_SVE)
- #define HAVE_maskstorevnx4hfvnx4bi (TARGET_SVE)
- #define HAVE_maskstorevnx2hfvnx2bi (TARGET_SVE)
- #define HAVE_maskstorevnx8bfvnx8bi (TARGET_SVE)
- #define HAVE_maskstorevnx4sivnx4bi (TARGET_SVE)
- #define HAVE_maskstorevnx2sivnx2bi (TARGET_SVE)
- #define HAVE_maskstorevnx4sfvnx4bi (TARGET_SVE)
- #define HAVE_maskstorevnx2sfvnx2bi (TARGET_SVE)
- #define HAVE_maskstorevnx2divnx2bi (TARGET_SVE)
- #define HAVE_maskstorevnx2dfvnx2bi (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx32qivnx16qi (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx16hivnx8hi (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx8sivnx4si (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx4divnx2di (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx16bfvnx8bf (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx16hfvnx8hf (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx8sfvnx4sf (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx4dfvnx2df (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx48qivnx16qi (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx24hivnx8hi (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx12sivnx4si (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx6divnx2di (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx24bfvnx8bf (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx24hfvnx8hf (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx12sfvnx4sf (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx6dfvnx2df (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx64qivnx16qi (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx32hivnx8hi (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx16sivnx4si (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx8divnx2di (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx32bfvnx8bf (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx32hfvnx8hf (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx16sfvnx4sf (TARGET_SVE)
- #define HAVE_vec_mask_store_lanesvnx8dfvnx2df (TARGET_SVE)
- #define HAVE_aarch64_store_truncvnx8qivnx8hi (TARGET_SVE)
- #define HAVE_aarch64_store_truncvnx4qivnx4si (TARGET_SVE)
- #define HAVE_aarch64_store_truncvnx4hivnx4si (TARGET_SVE)
- #define HAVE_aarch64_store_truncvnx2qivnx2di (TARGET_SVE)
- #define HAVE_aarch64_store_truncvnx2hivnx2di (TARGET_SVE)
- #define HAVE_aarch64_store_truncvnx2sivnx2di (TARGET_SVE)
- #define HAVE_aarch64_stnt1vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_stnt1vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_stnt1vnx4si (TARGET_SVE)
- #define HAVE_aarch64_stnt1vnx2di (TARGET_SVE)
- #define HAVE_aarch64_stnt1vnx8bf (TARGET_SVE)
- #define HAVE_aarch64_stnt1vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_stnt1vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_stnt1vnx2df (TARGET_SVE)
- #define HAVE_mask_scatter_storevnx4qivnx4si (TARGET_SVE)
- #define HAVE_mask_scatter_storevnx4hivnx4si (TARGET_SVE)
- #define HAVE_mask_scatter_storevnx4hfvnx4si (TARGET_SVE)
- #define HAVE_mask_scatter_storevnx4sivnx4si (TARGET_SVE)
- #define HAVE_mask_scatter_storevnx4sfvnx4si (TARGET_SVE)
- #define HAVE_mask_scatter_storevnx2qivnx2di (TARGET_SVE)
- #define HAVE_mask_scatter_storevnx2hivnx2di (TARGET_SVE)
- #define HAVE_mask_scatter_storevnx2hfvnx2di (TARGET_SVE)
- #define HAVE_mask_scatter_storevnx2sivnx2di (TARGET_SVE)
- #define HAVE_mask_scatter_storevnx2sfvnx2di (TARGET_SVE)
- #define HAVE_mask_scatter_storevnx2divnx2di (TARGET_SVE)
- #define HAVE_mask_scatter_storevnx2dfvnx2di (TARGET_SVE)
- #define HAVE_aarch64_scatter_store_truncvnx4qivnx4si (TARGET_SVE)
- #define HAVE_aarch64_scatter_store_truncvnx4hivnx4si (TARGET_SVE)
- #define HAVE_aarch64_scatter_store_truncvnx2qivnx2di (TARGET_SVE)
- #define HAVE_aarch64_scatter_store_truncvnx2hivnx2di (TARGET_SVE)
- #define HAVE_aarch64_scatter_store_truncvnx2sivnx2di (TARGET_SVE)
- #define HAVE_aarch64_vec_duplicate_vqvnx16qi_le (TARGET_SVE && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_vec_duplicate_vqvnx8hi_le (TARGET_SVE && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_vec_duplicate_vqvnx4si_le (TARGET_SVE && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_vec_duplicate_vqvnx2di_le (TARGET_SVE && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_vec_duplicate_vqvnx8bf_le (TARGET_SVE && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_vec_duplicate_vqvnx8hf_le (TARGET_SVE && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_vec_duplicate_vqvnx4sf_le (TARGET_SVE && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_vec_duplicate_vqvnx2df_le (TARGET_SVE && !BYTES_BIG_ENDIAN)
- #define HAVE_aarch64_vec_duplicate_vqvnx16qi_be (TARGET_SVE \
- && BYTES_BIG_ENDIAN \
- && known_eq (INTVAL (XVECEXP (operands[2], 0, 0)), \
- GET_MODE_NUNITS (V16QImode) - 1))
- #define HAVE_aarch64_vec_duplicate_vqvnx8hi_be (TARGET_SVE \
- && BYTES_BIG_ENDIAN \
- && known_eq (INTVAL (XVECEXP (operands[2], 0, 0)), \
- GET_MODE_NUNITS (V8HImode) - 1))
- #define HAVE_aarch64_vec_duplicate_vqvnx4si_be (TARGET_SVE \
- && BYTES_BIG_ENDIAN \
- && known_eq (INTVAL (XVECEXP (operands[2], 0, 0)), \
- GET_MODE_NUNITS (V4SImode) - 1))
- #define HAVE_aarch64_vec_duplicate_vqvnx2di_be (TARGET_SVE \
- && BYTES_BIG_ENDIAN \
- && known_eq (INTVAL (XVECEXP (operands[2], 0, 0)), \
- GET_MODE_NUNITS (V2DImode) - 1))
- #define HAVE_aarch64_vec_duplicate_vqvnx8bf_be (TARGET_SVE \
- && BYTES_BIG_ENDIAN \
- && known_eq (INTVAL (XVECEXP (operands[2], 0, 0)), \
- GET_MODE_NUNITS (V8BFmode) - 1))
- #define HAVE_aarch64_vec_duplicate_vqvnx8hf_be (TARGET_SVE \
- && BYTES_BIG_ENDIAN \
- && known_eq (INTVAL (XVECEXP (operands[2], 0, 0)), \
- GET_MODE_NUNITS (V8HFmode) - 1))
- #define HAVE_aarch64_vec_duplicate_vqvnx4sf_be (TARGET_SVE \
- && BYTES_BIG_ENDIAN \
- && known_eq (INTVAL (XVECEXP (operands[2], 0, 0)), \
- GET_MODE_NUNITS (V4SFmode) - 1))
- #define HAVE_aarch64_vec_duplicate_vqvnx2df_be (TARGET_SVE \
- && BYTES_BIG_ENDIAN \
- && known_eq (INTVAL (XVECEXP (operands[2], 0, 0)), \
- GET_MODE_NUNITS (V2DFmode) - 1))
- #define HAVE_sve_ld1rvnx16qi (TARGET_SVE)
- #define HAVE_sve_ld1rvnx8qi (TARGET_SVE)
- #define HAVE_sve_ld1rvnx4qi (TARGET_SVE)
- #define HAVE_sve_ld1rvnx2qi (TARGET_SVE)
- #define HAVE_sve_ld1rvnx8hi (TARGET_SVE)
- #define HAVE_sve_ld1rvnx4hi (TARGET_SVE)
- #define HAVE_sve_ld1rvnx2hi (TARGET_SVE)
- #define HAVE_sve_ld1rvnx8hf (TARGET_SVE)
- #define HAVE_sve_ld1rvnx4hf (TARGET_SVE)
- #define HAVE_sve_ld1rvnx2hf (TARGET_SVE)
- #define HAVE_sve_ld1rvnx8bf (TARGET_SVE)
- #define HAVE_sve_ld1rvnx4si (TARGET_SVE)
- #define HAVE_sve_ld1rvnx2si (TARGET_SVE)
- #define HAVE_sve_ld1rvnx4sf (TARGET_SVE)
- #define HAVE_sve_ld1rvnx2sf (TARGET_SVE)
- #define HAVE_sve_ld1rvnx2di (TARGET_SVE)
- #define HAVE_sve_ld1rvnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_ld1rqvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_ld1rqvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_ld1rqvnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_ld1rqvnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_ld1rqvnx8bf (TARGET_SVE)
- #define HAVE_aarch64_sve_ld1rqvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_ld1rqvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_ld1rqvnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_ld1rovnx16qi (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_ld1rovnx8hi (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_ld1rovnx4si (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_ld1rovnx2di (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_ld1rovnx8bf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_ld1rovnx8hf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_ld1rovnx4sf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_ld1rovnx2df (TARGET_SVE_F64MM)
- #define HAVE_vec_shl_insert_vnx16qi (TARGET_SVE)
- #define HAVE_vec_shl_insert_vnx8hi (TARGET_SVE)
- #define HAVE_vec_shl_insert_vnx4si (TARGET_SVE)
- #define HAVE_vec_shl_insert_vnx2di (TARGET_SVE)
- #define HAVE_vec_shl_insert_vnx8bf (TARGET_SVE)
- #define HAVE_vec_shl_insert_vnx8hf (TARGET_SVE)
- #define HAVE_vec_shl_insert_vnx4sf (TARGET_SVE)
- #define HAVE_vec_shl_insert_vnx2df (TARGET_SVE)
- #define HAVE_vec_seriesvnx16qi (TARGET_SVE)
- #define HAVE_vec_seriesvnx8qi (TARGET_SVE)
- #define HAVE_vec_seriesvnx4qi (TARGET_SVE)
- #define HAVE_vec_seriesvnx2qi (TARGET_SVE)
- #define HAVE_vec_seriesvnx8hi (TARGET_SVE)
- #define HAVE_vec_seriesvnx4hi (TARGET_SVE)
- #define HAVE_vec_seriesvnx2hi (TARGET_SVE)
- #define HAVE_vec_seriesvnx4si (TARGET_SVE)
- #define HAVE_vec_seriesvnx2si (TARGET_SVE)
- #define HAVE_vec_seriesvnx2di (TARGET_SVE)
- #define HAVE_extract_after_last_vnx16qi (TARGET_SVE)
- #define HAVE_extract_last_vnx16qi (TARGET_SVE)
- #define HAVE_extract_after_last_vnx8hi (TARGET_SVE)
- #define HAVE_extract_last_vnx8hi (TARGET_SVE)
- #define HAVE_extract_after_last_vnx4si (TARGET_SVE)
- #define HAVE_extract_last_vnx4si (TARGET_SVE)
- #define HAVE_extract_after_last_vnx2di (TARGET_SVE)
- #define HAVE_extract_last_vnx2di (TARGET_SVE)
- #define HAVE_extract_after_last_vnx8bf (TARGET_SVE)
- #define HAVE_extract_last_vnx8bf (TARGET_SVE)
- #define HAVE_extract_after_last_vnx8hf (TARGET_SVE)
- #define HAVE_extract_last_vnx8hf (TARGET_SVE)
- #define HAVE_extract_after_last_vnx4sf (TARGET_SVE)
- #define HAVE_extract_last_vnx4sf (TARGET_SVE)
- #define HAVE_extract_after_last_vnx2df (TARGET_SVE)
- #define HAVE_extract_last_vnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_absvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_negvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_one_cmplvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_clrsbvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_clzvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_popcountvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_qabsvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_aarch64_pred_qnegvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_aarch64_pred_absvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_negvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_one_cmplvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_clrsbvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_clzvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_popcountvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_qabsvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_aarch64_pred_qnegvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_aarch64_pred_absvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_negvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_one_cmplvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_clrsbvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_clzvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_popcountvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_qabsvnx4si ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_aarch64_pred_qnegvnx4si ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_aarch64_pred_absvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_negvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_one_cmplvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_clrsbvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_clzvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_popcountvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_qabsvnx2di ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_aarch64_pred_qnegvnx2di ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_aarch64_pred_rbitvnx16qi (TARGET_SVE && 8 >= 8)
- #define HAVE_aarch64_pred_rbitvnx8hi (TARGET_SVE && 16 >= 8)
- #define HAVE_aarch64_pred_revbvnx8hi (TARGET_SVE && 16 >= 16)
- #define HAVE_aarch64_pred_rbitvnx4si (TARGET_SVE && 32 >= 8)
- #define HAVE_aarch64_pred_revbvnx4si (TARGET_SVE && 32 >= 16)
- #define HAVE_aarch64_pred_revhvnx4si (TARGET_SVE && 32 >= 32)
- #define HAVE_aarch64_pred_rbitvnx2di (TARGET_SVE && 64 >= 8)
- #define HAVE_aarch64_pred_revbvnx2di (TARGET_SVE && 64 >= 16)
- #define HAVE_aarch64_pred_revhvnx2di (TARGET_SVE && 64 >= 32)
- #define HAVE_aarch64_pred_revwvnx2di (TARGET_SVE && 64 >= 64)
- #define HAVE_cond_rbitvnx16qi (TARGET_SVE && 8 >= 8)
- #define HAVE_cond_rbitvnx8hi (TARGET_SVE && 16 >= 8)
- #define HAVE_cond_revbvnx8hi (TARGET_SVE && 16 >= 16)
- #define HAVE_cond_rbitvnx4si (TARGET_SVE && 32 >= 8)
- #define HAVE_cond_revbvnx4si (TARGET_SVE && 32 >= 16)
- #define HAVE_cond_revhvnx4si (TARGET_SVE && 32 >= 32)
- #define HAVE_cond_rbitvnx2di (TARGET_SVE && 64 >= 8)
- #define HAVE_cond_revbvnx2di (TARGET_SVE && 64 >= 16)
- #define HAVE_cond_revhvnx2di (TARGET_SVE && 64 >= 32)
- #define HAVE_cond_revwvnx2di (TARGET_SVE && 64 >= 64)
- #define HAVE_aarch64_pred_sxtvnx8hivnx8qi (TARGET_SVE \
- && (~0x81 & 0x81) == 0)
- #define HAVE_aarch64_pred_sxtvnx4sivnx4qi (TARGET_SVE \
- && (~0x43 & 0x41) == 0)
- #define HAVE_aarch64_pred_sxtvnx4sivnx4hi (TARGET_SVE \
- && (~0x43 & 0x42) == 0)
- #define HAVE_aarch64_pred_sxtvnx2divnx2qi (TARGET_SVE \
- && (~0x27 & 0x21) == 0)
- #define HAVE_aarch64_pred_sxtvnx2divnx2hi (TARGET_SVE \
- && (~0x27 & 0x22) == 0)
- #define HAVE_aarch64_pred_sxtvnx2divnx2si (TARGET_SVE \
- && (~0x27 & 0x24) == 0)
- #define HAVE_aarch64_cond_sxtvnx8hivnx8qi (TARGET_SVE \
- && (~0x81 & 0x81) == 0)
- #define HAVE_aarch64_cond_sxtvnx4sivnx4qi (TARGET_SVE \
- && (~0x43 & 0x41) == 0)
- #define HAVE_aarch64_cond_sxtvnx4sivnx4hi (TARGET_SVE \
- && (~0x43 & 0x42) == 0)
- #define HAVE_aarch64_cond_sxtvnx2divnx2qi (TARGET_SVE \
- && (~0x27 & 0x21) == 0)
- #define HAVE_aarch64_cond_sxtvnx2divnx2hi (TARGET_SVE \
- && (~0x27 & 0x22) == 0)
- #define HAVE_aarch64_cond_sxtvnx2divnx2si (TARGET_SVE \
- && (~0x27 & 0x24) == 0)
- #define HAVE_truncvnx8hivnx8qi2 (TARGET_SVE && (~0x81 & 0x81) == 0)
- #define HAVE_truncvnx4hivnx4qi2 (TARGET_SVE && (~0x41 & 0x41) == 0)
- #define HAVE_truncvnx2hivnx2qi2 (TARGET_SVE && (~0x21 & 0x21) == 0)
- #define HAVE_truncvnx4sivnx4qi2 (TARGET_SVE && (~0x43 & 0x41) == 0)
- #define HAVE_truncvnx4sivnx4hi2 (TARGET_SVE && (~0x43 & 0x42) == 0)
- #define HAVE_truncvnx2sivnx2qi2 (TARGET_SVE && (~0x23 & 0x21) == 0)
- #define HAVE_truncvnx2sivnx2hi2 (TARGET_SVE && (~0x23 & 0x22) == 0)
- #define HAVE_truncvnx2divnx2qi2 (TARGET_SVE && (~0x27 & 0x21) == 0)
- #define HAVE_truncvnx2divnx2hi2 (TARGET_SVE && (~0x27 & 0x22) == 0)
- #define HAVE_truncvnx2divnx2si2 (TARGET_SVE && (~0x27 & 0x24) == 0)
- #define HAVE_aarch64_sve_fexpavnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_fexpavnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_fexpavnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_frecpevnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_frsqrtevnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_frecpevnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_frsqrtevnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_frecpevnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_frsqrtevnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_absvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_negvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_frecpxvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_roundvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_nearbyintvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_floorvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_frintnvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_ceilvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_rintvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_btruncvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_sqrtvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_absvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_negvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_frecpxvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_roundvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_nearbyintvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_floorvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_frintnvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_ceilvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_rintvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_btruncvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_sqrtvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_absvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_negvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_frecpxvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_roundvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_nearbyintvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_floorvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_frintnvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_ceilvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_rintvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_btruncvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_sqrtvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_mulvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_smaxvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_sminvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_umaxvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_uminvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_mulvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_smaxvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_sminvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_umaxvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_uminvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_mulvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_smaxvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_sminvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_umaxvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_uminvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_mulvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_smaxvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_sminvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_umaxvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_uminvnx2di (TARGET_SVE)
- #define HAVE_addvnx16qi3 (TARGET_SVE)
- #define HAVE_addvnx8qi3 (TARGET_SVE)
- #define HAVE_addvnx4qi3 (TARGET_SVE)
- #define HAVE_addvnx2qi3 (TARGET_SVE)
- #define HAVE_addvnx8hi3 (TARGET_SVE)
- #define HAVE_addvnx4hi3 (TARGET_SVE)
- #define HAVE_addvnx2hi3 (TARGET_SVE)
- #define HAVE_addvnx4si3 (TARGET_SVE)
- #define HAVE_addvnx2si3 (TARGET_SVE)
- #define HAVE_addvnx2di3 (TARGET_SVE)
- #define HAVE_subvnx16qi3 (TARGET_SVE)
- #define HAVE_subvnx8hi3 (TARGET_SVE)
- #define HAVE_subvnx4si3 (TARGET_SVE)
- #define HAVE_subvnx2di3 (TARGET_SVE)
- #define HAVE_aarch64_adrvnx4si (TARGET_SVE)
- #define HAVE_aarch64_adrvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_sabdvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_uabdvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_sabdvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_uabdvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_sabdvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_uabdvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_sabdvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_uabdvnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_ssaddvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_sssubvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_ssaddvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_sssubvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_ssaddvnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_sssubvnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_ssaddvnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_sssubvnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_usaddvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_ussubvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_usaddvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_ussubvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_usaddvnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_ussubvnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_usaddvnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_ussubvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_smulhvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_umulhvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_smulhvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_umulhvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_smulhvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_umulhvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_smulhvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_umulhvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_divvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_udivvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_divvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_udivvnx2di (TARGET_SVE)
- #define HAVE_andvnx16qi3 (TARGET_SVE)
- #define HAVE_iorvnx16qi3 (TARGET_SVE)
- #define HAVE_xorvnx16qi3 (TARGET_SVE)
- #define HAVE_andvnx8hi3 (TARGET_SVE)
- #define HAVE_iorvnx8hi3 (TARGET_SVE)
- #define HAVE_xorvnx8hi3 (TARGET_SVE)
- #define HAVE_andvnx4si3 (TARGET_SVE)
- #define HAVE_iorvnx4si3 (TARGET_SVE)
- #define HAVE_xorvnx4si3 (TARGET_SVE)
- #define HAVE_andvnx2di3 (TARGET_SVE)
- #define HAVE_iorvnx2di3 (TARGET_SVE)
- #define HAVE_xorvnx2di3 (TARGET_SVE)
- #define HAVE_aarch64_pred_ashlvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_ashrvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_lshrvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_ashlvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_ashrvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_lshrvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_ashlvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_ashrvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_lshrvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_ashlvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_ashrvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_lshrvnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_lslvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_asrvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_lsrvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_lslvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_asrvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_lsrvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_lslvnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_asrvnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_lsrvnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_ftsmulvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_ftsselvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_ftsmulvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_ftsselvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_ftsmulvnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_ftsselvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_fscalevnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_fscalevnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_fscalevnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_frecpsvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_frsqrtsvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_frecpsvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_frsqrtsvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_frecpsvnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_frsqrtsvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_divvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_mulxvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_divvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_mulxvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_divvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_mulxvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_addvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_addvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_addvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_cadd90vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_cadd270vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_cadd90vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_cadd270vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_cadd90vnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_cadd270vnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_subvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_subvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_subvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_mulvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_mulvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_mulvnx2df (TARGET_SVE)
- #define HAVE_aarch64_mul_lane_vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_mul_lane_vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_mul_lane_vnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_smax_nanvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_smaxvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_smin_nanvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_sminvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_smax_nanvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_smaxvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_smin_nanvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_sminvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_smax_nanvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_smaxvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_smin_nanvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_sminvnx2df (TARGET_SVE)
- #define HAVE_andvnx16bi3 (TARGET_SVE)
- #define HAVE_andvnx8bi3 (TARGET_SVE)
- #define HAVE_andvnx4bi3 (TARGET_SVE)
- #define HAVE_andvnx2bi3 (TARGET_SVE)
- #define HAVE_aarch64_pred_andvnx16bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_iorvnx16bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_xorvnx16bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_andvnx8bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_iorvnx8bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_xorvnx8bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_andvnx4bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_iorvnx4bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_xorvnx4bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_andvnx2bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_iorvnx2bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_xorvnx2bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_bicvnx16bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_ornvnx16bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_bicvnx8bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_ornvnx8bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_bicvnx4bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_ornvnx4bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_bicvnx2bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_ornvnx2bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_norvnx16bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_nandvnx16bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_norvnx8bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_nandvnx8bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_norvnx4bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_nandvnx4bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_norvnx2bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_nandvnx2bi_z (TARGET_SVE)
- #define HAVE_aarch64_pred_fmavnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_fmavnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_fmavnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_fmavnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_fnmavnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_fnmavnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_fnmavnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_fnmavnx2di (TARGET_SVE)
- #define HAVE_sdot_prodvnx16qi (TARGET_SVE)
- #define HAVE_udot_prodvnx16qi (TARGET_SVE)
- #define HAVE_sdot_prodvnx8hi (TARGET_SVE)
- #define HAVE_udot_prodvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sdot_prod_lanevnx16qi (TARGET_SVE)
- #define HAVE_aarch64_udot_prod_lanevnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sdot_prod_lanevnx8hi (TARGET_SVE)
- #define HAVE_aarch64_udot_prod_lanevnx8hi (TARGET_SVE)
- #define HAVE_aarch64_usdot_prodvnx16qi (TARGET_SVE_I8MM)
- #define HAVE_aarch64_usdot_prod_lanevnx16qi (TARGET_SVE_I8MM)
- #define HAVE_aarch64_sudot_prod_lanevnx16qi (TARGET_SVE_I8MM)
- #define HAVE_aarch64_sve_add_smatmulvnx16qi (TARGET_SVE_I8MM)
- #define HAVE_aarch64_sve_add_umatmulvnx16qi (TARGET_SVE_I8MM)
- #define HAVE_aarch64_sve_add_usmatmulvnx16qi (TARGET_SVE_I8MM)
- #define HAVE_aarch64_pred_fmavnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_fnmavnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_fnmsvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_fmsvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_fmavnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_fnmavnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_fnmsvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_fmsvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_fmavnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_fnmavnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_fnmsvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_fmsvnx2df (TARGET_SVE)
- #define HAVE_aarch64_fma_lane_vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_fnma_lane_vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_fma_lane_vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_fnma_lane_vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_fma_lane_vnx2df (TARGET_SVE)
- #define HAVE_aarch64_fnma_lane_vnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmlavnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmla90vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmla180vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmla270vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmlavnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmla90vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmla180vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmla270vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmlavnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmla90vnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmla180vnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmla270vnx2df (TARGET_SVE)
- #define HAVE_aarch64_fcmla_lane_vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_fcmla90_lane_vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_fcmla180_lane_vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_fcmla270_lane_vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_fcmla_lane_vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_fcmla90_lane_vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_fcmla180_lane_vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_fcmla270_lane_vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_tmadvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_tmadvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_tmadvnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_bfdotvnx4sf (TARGET_SVE_BF16)
- #define HAVE_aarch64_sve_bfmlalbvnx4sf (TARGET_SVE_BF16)
- #define HAVE_aarch64_sve_bfmlaltvnx4sf (TARGET_SVE_BF16)
- #define HAVE_aarch64_sve_bfmmlavnx4sf (TARGET_SVE_BF16)
- #define HAVE_aarch64_sve_bfdot_lanevnx4sf (TARGET_SVE_BF16)
- #define HAVE_aarch64_sve_bfmlalb_lanevnx4sf (TARGET_SVE_BF16)
- #define HAVE_aarch64_sve_bfmlalt_lanevnx4sf (TARGET_SVE_BF16)
- #define HAVE_aarch64_sve_fmmlavnx4sf ((TARGET_SVE) && (TARGET_SVE_F32MM))
- #define HAVE_aarch64_sve_fmmlavnx2df ((TARGET_SVE) && (TARGET_SVE_F64MM))
- #define HAVE_aarch64_sel_dupvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sel_dupvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sel_dupvnx4si (TARGET_SVE)
- #define HAVE_aarch64_sel_dupvnx2di (TARGET_SVE)
- #define HAVE_aarch64_sel_dupvnx8bf (TARGET_SVE)
- #define HAVE_aarch64_sel_dupvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sel_dupvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sel_dupvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpltvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplevnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpeqvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpnevnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpgevnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpgtvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplovnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplsvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmphsvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmphivnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpltvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplevnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpeqvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpnevnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpgevnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpgtvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplovnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplsvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmphsvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmphivnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpltvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplevnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpeqvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpnevnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpgevnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpgtvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplovnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplsvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_cmphsvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_cmphivnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpltvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplevnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpeqvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpnevnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpgevnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpgtvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplovnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplsvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_cmphsvnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_cmphivnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpeqvnx16qi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpgevnx16qi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpgtvnx16qi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmphivnx16qi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmphsvnx16qi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplevnx16qi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplovnx16qi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplsvnx16qi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpltvnx16qi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpnevnx16qi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpeqvnx8hi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpgevnx8hi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpgtvnx8hi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmphivnx8hi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmphsvnx8hi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplevnx8hi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplovnx8hi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplsvnx8hi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpltvnx8hi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpnevnx8hi_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpeqvnx4si_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpgevnx4si_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpgtvnx4si_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmphivnx4si_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmphsvnx4si_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplevnx4si_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplovnx4si_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmplsvnx4si_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpltvnx4si_wide (TARGET_SVE)
- #define HAVE_aarch64_pred_cmpnevnx4si_wide (TARGET_SVE)
- #define HAVE_while_lesivnx16bi (TARGET_SVE)
- #define HAVE_while_ultsivnx16bi (TARGET_SVE)
- #define HAVE_while_ulesivnx16bi (TARGET_SVE)
- #define HAVE_while_ltsivnx16bi (TARGET_SVE)
- #define HAVE_while_gesivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_gtsivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugtsivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugesivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_rwsivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_wrsivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_lesivnx8bi (TARGET_SVE)
- #define HAVE_while_ultsivnx8bi (TARGET_SVE)
- #define HAVE_while_ulesivnx8bi (TARGET_SVE)
- #define HAVE_while_ltsivnx8bi (TARGET_SVE)
- #define HAVE_while_gesivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_gtsivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugtsivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugesivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_rwsivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_wrsivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_lesivnx4bi (TARGET_SVE)
- #define HAVE_while_ultsivnx4bi (TARGET_SVE)
- #define HAVE_while_ulesivnx4bi (TARGET_SVE)
- #define HAVE_while_ltsivnx4bi (TARGET_SVE)
- #define HAVE_while_gesivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_gtsivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugtsivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugesivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_rwsivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_wrsivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_lesivnx2bi (TARGET_SVE)
- #define HAVE_while_ultsivnx2bi (TARGET_SVE)
- #define HAVE_while_ulesivnx2bi (TARGET_SVE)
- #define HAVE_while_ltsivnx2bi (TARGET_SVE)
- #define HAVE_while_gesivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_gtsivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugtsivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugesivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_rwsivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_wrsivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ledivnx16bi (TARGET_SVE)
- #define HAVE_while_ultdivnx16bi (TARGET_SVE)
- #define HAVE_while_uledivnx16bi (TARGET_SVE)
- #define HAVE_while_ltdivnx16bi (TARGET_SVE)
- #define HAVE_while_gedivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_gtdivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugtdivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugedivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_rwdivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_wrdivnx16bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ledivnx8bi (TARGET_SVE)
- #define HAVE_while_ultdivnx8bi (TARGET_SVE)
- #define HAVE_while_uledivnx8bi (TARGET_SVE)
- #define HAVE_while_ltdivnx8bi (TARGET_SVE)
- #define HAVE_while_gedivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_gtdivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugtdivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugedivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_rwdivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_wrdivnx8bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ledivnx4bi (TARGET_SVE)
- #define HAVE_while_ultdivnx4bi (TARGET_SVE)
- #define HAVE_while_uledivnx4bi (TARGET_SVE)
- #define HAVE_while_ltdivnx4bi (TARGET_SVE)
- #define HAVE_while_gedivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_gtdivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugtdivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugedivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_rwdivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_wrdivnx4bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ledivnx2bi (TARGET_SVE)
- #define HAVE_while_ultdivnx2bi (TARGET_SVE)
- #define HAVE_while_uledivnx2bi (TARGET_SVE)
- #define HAVE_while_ltdivnx2bi (TARGET_SVE)
- #define HAVE_while_gedivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_gtdivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugtdivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugedivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_rwdivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_wrdivnx2bi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_lesivnx16bi_ptest (TARGET_SVE)
- #define HAVE_while_ultsivnx16bi_ptest (TARGET_SVE)
- #define HAVE_while_ulesivnx16bi_ptest (TARGET_SVE)
- #define HAVE_while_ltsivnx16bi_ptest (TARGET_SVE)
- #define HAVE_while_gesivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_gtsivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugtsivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugesivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_rwsivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_wrsivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_lesivnx8bi_ptest (TARGET_SVE)
- #define HAVE_while_ultsivnx8bi_ptest (TARGET_SVE)
- #define HAVE_while_ulesivnx8bi_ptest (TARGET_SVE)
- #define HAVE_while_ltsivnx8bi_ptest (TARGET_SVE)
- #define HAVE_while_gesivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_gtsivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugtsivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugesivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_rwsivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_wrsivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_lesivnx4bi_ptest (TARGET_SVE)
- #define HAVE_while_ultsivnx4bi_ptest (TARGET_SVE)
- #define HAVE_while_ulesivnx4bi_ptest (TARGET_SVE)
- #define HAVE_while_ltsivnx4bi_ptest (TARGET_SVE)
- #define HAVE_while_gesivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_gtsivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugtsivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugesivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_rwsivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_wrsivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_lesivnx2bi_ptest (TARGET_SVE)
- #define HAVE_while_ultsivnx2bi_ptest (TARGET_SVE)
- #define HAVE_while_ulesivnx2bi_ptest (TARGET_SVE)
- #define HAVE_while_ltsivnx2bi_ptest (TARGET_SVE)
- #define HAVE_while_gesivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_gtsivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugtsivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugesivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_rwsivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_wrsivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ledivnx16bi_ptest (TARGET_SVE)
- #define HAVE_while_ultdivnx16bi_ptest (TARGET_SVE)
- #define HAVE_while_uledivnx16bi_ptest (TARGET_SVE)
- #define HAVE_while_ltdivnx16bi_ptest (TARGET_SVE)
- #define HAVE_while_gedivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_gtdivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugtdivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugedivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_rwdivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_wrdivnx16bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ledivnx8bi_ptest (TARGET_SVE)
- #define HAVE_while_ultdivnx8bi_ptest (TARGET_SVE)
- #define HAVE_while_uledivnx8bi_ptest (TARGET_SVE)
- #define HAVE_while_ltdivnx8bi_ptest (TARGET_SVE)
- #define HAVE_while_gedivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_gtdivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugtdivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugedivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_rwdivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_wrdivnx8bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ledivnx4bi_ptest (TARGET_SVE)
- #define HAVE_while_ultdivnx4bi_ptest (TARGET_SVE)
- #define HAVE_while_uledivnx4bi_ptest (TARGET_SVE)
- #define HAVE_while_ltdivnx4bi_ptest (TARGET_SVE)
- #define HAVE_while_gedivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_gtdivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugtdivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugedivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_rwdivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_wrdivnx4bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ledivnx2bi_ptest (TARGET_SVE)
- #define HAVE_while_ultdivnx2bi_ptest (TARGET_SVE)
- #define HAVE_while_uledivnx2bi_ptest (TARGET_SVE)
- #define HAVE_while_ltdivnx2bi_ptest (TARGET_SVE)
- #define HAVE_while_gedivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_gtdivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugtdivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_ugedivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_rwdivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_while_wrdivnx2bi_ptest ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_aarch64_pred_fcmeqvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmgevnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmgtvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmlevnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmltvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmnevnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmeqvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmgevnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmgtvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmlevnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmltvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmnevnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmeqvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmgevnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmgtvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmlevnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmltvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmnevnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmuovnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmuovnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_fcmuovnx2df (TARGET_SVE)
- #define HAVE_vcond_mask_vnx16bivnx16bi (TARGET_SVE)
- #define HAVE_vcond_mask_vnx8bivnx8bi (TARGET_SVE)
- #define HAVE_vcond_mask_vnx4bivnx4bi (TARGET_SVE)
- #define HAVE_vcond_mask_vnx2bivnx2bi (TARGET_SVE)
- #define HAVE_aarch64_ptestvnx16bi (TARGET_SVE)
- #define HAVE_aarch64_ptestvnx8bi (TARGET_SVE)
- #define HAVE_aarch64_ptestvnx4bi (TARGET_SVE)
- #define HAVE_aarch64_ptestvnx2bi (TARGET_SVE)
- #define HAVE_fold_extract_after_last_vnx16qi (TARGET_SVE)
- #define HAVE_fold_extract_last_vnx16qi (TARGET_SVE)
- #define HAVE_fold_extract_after_last_vnx8hi (TARGET_SVE)
- #define HAVE_fold_extract_last_vnx8hi (TARGET_SVE)
- #define HAVE_fold_extract_after_last_vnx4si (TARGET_SVE)
- #define HAVE_fold_extract_last_vnx4si (TARGET_SVE)
- #define HAVE_fold_extract_after_last_vnx2di (TARGET_SVE)
- #define HAVE_fold_extract_last_vnx2di (TARGET_SVE)
- #define HAVE_fold_extract_after_last_vnx8bf (TARGET_SVE)
- #define HAVE_fold_extract_last_vnx8bf (TARGET_SVE)
- #define HAVE_fold_extract_after_last_vnx8hf (TARGET_SVE)
- #define HAVE_fold_extract_last_vnx8hf (TARGET_SVE)
- #define HAVE_fold_extract_after_last_vnx4sf (TARGET_SVE)
- #define HAVE_fold_extract_last_vnx4sf (TARGET_SVE)
- #define HAVE_fold_extract_after_last_vnx2df (TARGET_SVE)
- #define HAVE_fold_extract_last_vnx2df (TARGET_SVE)
- #define HAVE_aarch64_fold_extract_vector_after_last_vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_fold_extract_vector_last_vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_fold_extract_vector_after_last_vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_fold_extract_vector_last_vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_fold_extract_vector_after_last_vnx4si (TARGET_SVE)
- #define HAVE_aarch64_fold_extract_vector_last_vnx4si (TARGET_SVE)
- #define HAVE_aarch64_fold_extract_vector_after_last_vnx2di (TARGET_SVE)
- #define HAVE_aarch64_fold_extract_vector_last_vnx2di (TARGET_SVE)
- #define HAVE_aarch64_fold_extract_vector_after_last_vnx8bf (TARGET_SVE)
- #define HAVE_aarch64_fold_extract_vector_last_vnx8bf (TARGET_SVE)
- #define HAVE_aarch64_fold_extract_vector_after_last_vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_fold_extract_vector_last_vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_fold_extract_vector_after_last_vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_fold_extract_vector_last_vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_fold_extract_vector_after_last_vnx2df (TARGET_SVE)
- #define HAVE_aarch64_fold_extract_vector_last_vnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_sadd_vnx16qi (TARGET_SVE && 32 >= 8)
- #define HAVE_aarch64_pred_reduc_uadd_vnx16qi (TARGET_SVE && 64 >= 8)
- #define HAVE_aarch64_pred_reduc_sadd_vnx8hi (TARGET_SVE && 32 >= 16)
- #define HAVE_aarch64_pred_reduc_uadd_vnx8hi (TARGET_SVE && 64 >= 16)
- #define HAVE_aarch64_pred_reduc_sadd_vnx4si (TARGET_SVE && 32 >= 32)
- #define HAVE_aarch64_pred_reduc_uadd_vnx4si (TARGET_SVE && 64 >= 32)
- #define HAVE_aarch64_pred_reduc_uadd_vnx2di (TARGET_SVE && 64 >= 64)
- #define HAVE_aarch64_pred_reduc_and_vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_ior_vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smax_vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smin_vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_umax_vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_umin_vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_xor_vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_and_vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_ior_vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smax_vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smin_vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_umax_vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_umin_vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_xor_vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_and_vnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_ior_vnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smax_vnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smin_vnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_umax_vnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_umin_vnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_xor_vnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_and_vnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_ior_vnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smax_vnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smin_vnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_umax_vnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_umin_vnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_xor_vnx2di (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_plus_vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smax_nan_vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smax_vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smin_nan_vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smin_vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_plus_vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smax_nan_vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smax_vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smin_nan_vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smin_vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_plus_vnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smax_nan_vnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smax_vnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smin_nan_vnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_reduc_smin_vnx2df (TARGET_SVE)
- #define HAVE_mask_fold_left_plus_vnx8hf (TARGET_SVE)
- #define HAVE_mask_fold_left_plus_vnx4sf (TARGET_SVE)
- #define HAVE_mask_fold_left_plus_vnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_tblvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_tblvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_tblvnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_tblvnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_tblvnx8bf (TARGET_SVE)
- #define HAVE_aarch64_sve_tblvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_tblvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_tblvnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_compactvnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_compactvnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_compactvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_compactvnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_dup_lanevnx16qi (TARGET_SVE \
- && IN_RANGE (INTVAL (operands[2]) * GET_MODE_SIZE (QImode), 0, 63))
- #define HAVE_aarch64_sve_dup_lanevnx8hi (TARGET_SVE \
- && IN_RANGE (INTVAL (operands[2]) * GET_MODE_SIZE (HImode), 0, 63))
- #define HAVE_aarch64_sve_dup_lanevnx4si (TARGET_SVE \
- && IN_RANGE (INTVAL (operands[2]) * GET_MODE_SIZE (SImode), 0, 63))
- #define HAVE_aarch64_sve_dup_lanevnx2di (TARGET_SVE \
- && IN_RANGE (INTVAL (operands[2]) * GET_MODE_SIZE (DImode), 0, 63))
- #define HAVE_aarch64_sve_dup_lanevnx8bf (TARGET_SVE \
- && IN_RANGE (INTVAL (operands[2]) * GET_MODE_SIZE (BFmode), 0, 63))
- #define HAVE_aarch64_sve_dup_lanevnx8hf (TARGET_SVE \
- && IN_RANGE (INTVAL (operands[2]) * GET_MODE_SIZE (HFmode), 0, 63))
- #define HAVE_aarch64_sve_dup_lanevnx4sf (TARGET_SVE \
- && IN_RANGE (INTVAL (operands[2]) * GET_MODE_SIZE (SFmode), 0, 63))
- #define HAVE_aarch64_sve_dup_lanevnx2df (TARGET_SVE \
- && IN_RANGE (INTVAL (operands[2]) * GET_MODE_SIZE (DFmode), 0, 63))
- #define HAVE_aarch64_sve_dupq_lanevnx16qi (TARGET_SVE \
- && (INTVAL (XVECEXP (operands[2], 0, 0)) \
- * GET_MODE_SIZE (QImode)) % 16 == 0 \
- && IN_RANGE (INTVAL (XVECEXP (operands[2], 0, 0)) \
- * GET_MODE_SIZE (QImode), 0, 63))
- #define HAVE_aarch64_sve_dupq_lanevnx8hi (TARGET_SVE \
- && (INTVAL (XVECEXP (operands[2], 0, 0)) \
- * GET_MODE_SIZE (HImode)) % 16 == 0 \
- && IN_RANGE (INTVAL (XVECEXP (operands[2], 0, 0)) \
- * GET_MODE_SIZE (HImode), 0, 63))
- #define HAVE_aarch64_sve_dupq_lanevnx4si (TARGET_SVE \
- && (INTVAL (XVECEXP (operands[2], 0, 0)) \
- * GET_MODE_SIZE (SImode)) % 16 == 0 \
- && IN_RANGE (INTVAL (XVECEXP (operands[2], 0, 0)) \
- * GET_MODE_SIZE (SImode), 0, 63))
- #define HAVE_aarch64_sve_dupq_lanevnx2di (TARGET_SVE \
- && (INTVAL (XVECEXP (operands[2], 0, 0)) \
- * GET_MODE_SIZE (DImode)) % 16 == 0 \
- && IN_RANGE (INTVAL (XVECEXP (operands[2], 0, 0)) \
- * GET_MODE_SIZE (DImode), 0, 63))
- #define HAVE_aarch64_sve_dupq_lanevnx8bf (TARGET_SVE \
- && (INTVAL (XVECEXP (operands[2], 0, 0)) \
- * GET_MODE_SIZE (BFmode)) % 16 == 0 \
- && IN_RANGE (INTVAL (XVECEXP (operands[2], 0, 0)) \
- * GET_MODE_SIZE (BFmode), 0, 63))
- #define HAVE_aarch64_sve_dupq_lanevnx8hf (TARGET_SVE \
- && (INTVAL (XVECEXP (operands[2], 0, 0)) \
- * GET_MODE_SIZE (HFmode)) % 16 == 0 \
- && IN_RANGE (INTVAL (XVECEXP (operands[2], 0, 0)) \
- * GET_MODE_SIZE (HFmode), 0, 63))
- #define HAVE_aarch64_sve_dupq_lanevnx4sf (TARGET_SVE \
- && (INTVAL (XVECEXP (operands[2], 0, 0)) \
- * GET_MODE_SIZE (SFmode)) % 16 == 0 \
- && IN_RANGE (INTVAL (XVECEXP (operands[2], 0, 0)) \
- * GET_MODE_SIZE (SFmode), 0, 63))
- #define HAVE_aarch64_sve_dupq_lanevnx2df (TARGET_SVE \
- && (INTVAL (XVECEXP (operands[2], 0, 0)) \
- * GET_MODE_SIZE (DFmode)) % 16 == 0 \
- && IN_RANGE (INTVAL (XVECEXP (operands[2], 0, 0)) \
- * GET_MODE_SIZE (DFmode), 0, 63))
- #define HAVE_aarch64_sve_revvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_revvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_revvnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_revvnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_revvnx8bf (TARGET_SVE)
- #define HAVE_aarch64_sve_revvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_revvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_revvnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_splicevnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_splicevnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_splicevnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_splicevnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_splicevnx8bf (TARGET_SVE)
- #define HAVE_aarch64_sve_splicevnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_splicevnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_splicevnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_zip1vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_zip2vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_trn1vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_trn2vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp1vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp2vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_zip1vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_zip2vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_trn1vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_trn2vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp1vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp2vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_zip1vnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_zip2vnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_trn1vnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_trn2vnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp1vnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp2vnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_zip1vnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_zip2vnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_trn1vnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_trn2vnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp1vnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp2vnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_zip1vnx8bf (TARGET_SVE)
- #define HAVE_aarch64_sve_zip2vnx8bf (TARGET_SVE)
- #define HAVE_aarch64_sve_trn1vnx8bf (TARGET_SVE)
- #define HAVE_aarch64_sve_trn2vnx8bf (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp1vnx8bf (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp2vnx8bf (TARGET_SVE)
- #define HAVE_aarch64_sve_zip1vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_zip2vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_trn1vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_trn2vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp1vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp2vnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_zip1vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_zip2vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_trn1vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_trn2vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp1vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp2vnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_zip1vnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_zip2vnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_trn1vnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_trn2vnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp1vnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp2vnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_zip1qvnx16qi (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_zip2qvnx16qi (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_trn1qvnx16qi (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_trn2qvnx16qi (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_uzp1qvnx16qi (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_uzp2qvnx16qi (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_zip1qvnx8hi (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_zip2qvnx8hi (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_trn1qvnx8hi (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_trn2qvnx8hi (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_uzp1qvnx8hi (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_uzp2qvnx8hi (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_zip1qvnx4si (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_zip2qvnx4si (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_trn1qvnx4si (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_trn2qvnx4si (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_uzp1qvnx4si (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_uzp2qvnx4si (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_zip1qvnx2di (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_zip2qvnx2di (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_trn1qvnx2di (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_trn2qvnx2di (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_uzp1qvnx2di (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_uzp2qvnx2di (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_zip1qvnx8bf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_zip2qvnx8bf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_trn1qvnx8bf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_trn2qvnx8bf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_uzp1qvnx8bf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_uzp2qvnx8bf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_zip1qvnx8hf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_zip2qvnx8hf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_trn1qvnx8hf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_trn2qvnx8hf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_uzp1qvnx8hf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_uzp2qvnx8hf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_zip1qvnx4sf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_zip2qvnx4sf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_trn1qvnx4sf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_trn2qvnx4sf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_uzp1qvnx4sf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_uzp2qvnx4sf (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_zip1qvnx2df (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_zip2qvnx2df (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_trn1qvnx2df (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_trn2qvnx2df (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_uzp1qvnx2df (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_uzp2qvnx2df (TARGET_SVE_F64MM)
- #define HAVE_aarch64_sve_extvnx16qi (TARGET_SVE \
- && IN_RANGE (INTVAL (operands[3]) * GET_MODE_SIZE (QImode), 0, 255))
- #define HAVE_aarch64_sve_extvnx8hi (TARGET_SVE \
- && IN_RANGE (INTVAL (operands[3]) * GET_MODE_SIZE (HImode), 0, 255))
- #define HAVE_aarch64_sve_extvnx4si (TARGET_SVE \
- && IN_RANGE (INTVAL (operands[3]) * GET_MODE_SIZE (SImode), 0, 255))
- #define HAVE_aarch64_sve_extvnx2di (TARGET_SVE \
- && IN_RANGE (INTVAL (operands[3]) * GET_MODE_SIZE (DImode), 0, 255))
- #define HAVE_aarch64_sve_extvnx8bf (TARGET_SVE \
- && IN_RANGE (INTVAL (operands[3]) * GET_MODE_SIZE (BFmode), 0, 255))
- #define HAVE_aarch64_sve_extvnx8hf (TARGET_SVE \
- && IN_RANGE (INTVAL (operands[3]) * GET_MODE_SIZE (HFmode), 0, 255))
- #define HAVE_aarch64_sve_extvnx4sf (TARGET_SVE \
- && IN_RANGE (INTVAL (operands[3]) * GET_MODE_SIZE (SFmode), 0, 255))
- #define HAVE_aarch64_sve_extvnx2df (TARGET_SVE \
- && IN_RANGE (INTVAL (operands[3]) * GET_MODE_SIZE (DFmode), 0, 255))
- #define HAVE_aarch64_sve_revvnx16bi (TARGET_SVE)
- #define HAVE_aarch64_sve_revvnx8bi (TARGET_SVE)
- #define HAVE_aarch64_sve_revvnx4bi (TARGET_SVE)
- #define HAVE_aarch64_sve_revvnx2bi (TARGET_SVE)
- #define HAVE_aarch64_sve_zip1vnx16bi (TARGET_SVE)
- #define HAVE_aarch64_sve_zip2vnx16bi (TARGET_SVE)
- #define HAVE_aarch64_sve_trn1vnx16bi (TARGET_SVE)
- #define HAVE_aarch64_sve_trn2vnx16bi (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp1vnx16bi (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp2vnx16bi (TARGET_SVE)
- #define HAVE_aarch64_sve_zip1vnx8bi (TARGET_SVE)
- #define HAVE_aarch64_sve_zip2vnx8bi (TARGET_SVE)
- #define HAVE_aarch64_sve_trn1vnx8bi (TARGET_SVE)
- #define HAVE_aarch64_sve_trn2vnx8bi (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp1vnx8bi (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp2vnx8bi (TARGET_SVE)
- #define HAVE_aarch64_sve_zip1vnx4bi (TARGET_SVE)
- #define HAVE_aarch64_sve_zip2vnx4bi (TARGET_SVE)
- #define HAVE_aarch64_sve_trn1vnx4bi (TARGET_SVE)
- #define HAVE_aarch64_sve_trn2vnx4bi (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp1vnx4bi (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp2vnx4bi (TARGET_SVE)
- #define HAVE_aarch64_sve_zip1vnx2bi (TARGET_SVE)
- #define HAVE_aarch64_sve_zip2vnx2bi (TARGET_SVE)
- #define HAVE_aarch64_sve_trn1vnx2bi (TARGET_SVE)
- #define HAVE_aarch64_sve_trn2vnx2bi (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp1vnx2bi (TARGET_SVE)
- #define HAVE_aarch64_sve_uzp2vnx2bi (TARGET_SVE)
- #define HAVE_aarch64_sve_trn1_convvnx16bi (TARGET_SVE)
- #define HAVE_aarch64_sve_trn1_convvnx8bi (TARGET_SVE)
- #define HAVE_aarch64_sve_trn1_convvnx4bi (TARGET_SVE)
- #define HAVE_aarch64_sve_trn1_convvnx2bi (TARGET_SVE)
- #define HAVE_vec_pack_trunc_vnx8hi (TARGET_SVE)
- #define HAVE_vec_pack_trunc_vnx4si (TARGET_SVE)
- #define HAVE_vec_pack_trunc_vnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_sunpkhi_vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_uunpkhi_vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_sunpklo_vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_uunpklo_vnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_sunpkhi_vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_uunpkhi_vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_sunpklo_vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_uunpklo_vnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_sunpkhi_vnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_uunpkhi_vnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_sunpklo_vnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_uunpklo_vnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_fix_trunc_nontruncvnx8hfvnx8hi (TARGET_SVE && 16 >= 16)
- #define HAVE_aarch64_sve_fixuns_trunc_nontruncvnx8hfvnx8hi (TARGET_SVE && 16 >= 16)
- #define HAVE_aarch64_sve_fix_trunc_nontruncvnx8hfvnx4si (TARGET_SVE && 32 >= 16)
- #define HAVE_aarch64_sve_fixuns_trunc_nontruncvnx8hfvnx4si (TARGET_SVE && 32 >= 16)
- #define HAVE_aarch64_sve_fix_trunc_nontruncvnx4sfvnx4si (TARGET_SVE && 32 >= 32)
- #define HAVE_aarch64_sve_fixuns_trunc_nontruncvnx4sfvnx4si (TARGET_SVE && 32 >= 32)
- #define HAVE_aarch64_sve_fix_trunc_nontruncvnx8hfvnx2di (TARGET_SVE && 64 >= 16)
- #define HAVE_aarch64_sve_fixuns_trunc_nontruncvnx8hfvnx2di (TARGET_SVE && 64 >= 16)
- #define HAVE_aarch64_sve_fix_trunc_nontruncvnx4sfvnx2di (TARGET_SVE && 64 >= 32)
- #define HAVE_aarch64_sve_fixuns_trunc_nontruncvnx4sfvnx2di (TARGET_SVE && 64 >= 32)
- #define HAVE_aarch64_sve_fix_trunc_nontruncvnx2dfvnx2di (TARGET_SVE && 64 >= 64)
- #define HAVE_aarch64_sve_fixuns_trunc_nontruncvnx2dfvnx2di (TARGET_SVE && 64 >= 64)
- #define HAVE_aarch64_sve_fix_trunc_truncvnx2dfvnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_fixuns_trunc_truncvnx2dfvnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_float_nonextendvnx8hivnx8hf (TARGET_SVE && 16 >= 16)
- #define HAVE_aarch64_sve_floatuns_nonextendvnx8hivnx8hf (TARGET_SVE && 16 >= 16)
- #define HAVE_aarch64_sve_float_nonextendvnx4sivnx8hf (TARGET_SVE && 32 >= 16)
- #define HAVE_aarch64_sve_floatuns_nonextendvnx4sivnx8hf (TARGET_SVE && 32 >= 16)
- #define HAVE_aarch64_sve_float_nonextendvnx4sivnx4sf (TARGET_SVE && 32 >= 32)
- #define HAVE_aarch64_sve_floatuns_nonextendvnx4sivnx4sf (TARGET_SVE && 32 >= 32)
- #define HAVE_aarch64_sve_float_nonextendvnx2divnx8hf (TARGET_SVE && 64 >= 16)
- #define HAVE_aarch64_sve_floatuns_nonextendvnx2divnx8hf (TARGET_SVE && 64 >= 16)
- #define HAVE_aarch64_sve_float_nonextendvnx2divnx4sf (TARGET_SVE && 64 >= 32)
- #define HAVE_aarch64_sve_floatuns_nonextendvnx2divnx4sf (TARGET_SVE && 64 >= 32)
- #define HAVE_aarch64_sve_float_nonextendvnx2divnx2df (TARGET_SVE && 64 >= 64)
- #define HAVE_aarch64_sve_floatuns_nonextendvnx2divnx2df (TARGET_SVE && 64 >= 64)
- #define HAVE_aarch64_sve_float_extendvnx4sivnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_floatuns_extendvnx4sivnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_fcvt_truncvnx4sfvnx8hf (TARGET_SVE && 32 > 16)
- #define HAVE_aarch64_sve_fcvt_truncvnx2dfvnx8hf (TARGET_SVE && 64 > 16)
- #define HAVE_aarch64_sve_fcvt_truncvnx2dfvnx4sf (TARGET_SVE && 64 > 32)
- #define HAVE_aarch64_sve_fcvt_truncvnx4sfvnx8bf (TARGET_SVE_BF16)
- #define HAVE_aarch64_sve_cvtntvnx8bf (TARGET_SVE_BF16)
- #define HAVE_aarch64_sve_fcvt_nontruncvnx8hfvnx4sf (TARGET_SVE && 32 > 16)
- #define HAVE_aarch64_sve_fcvt_nontruncvnx8hfvnx2df (TARGET_SVE && 64 > 16)
- #define HAVE_aarch64_sve_fcvt_nontruncvnx4sfvnx2df (TARGET_SVE && 64 > 32)
- #define HAVE_vec_pack_trunc_vnx8bi (TARGET_SVE)
- #define HAVE_vec_pack_trunc_vnx4bi (TARGET_SVE)
- #define HAVE_vec_pack_trunc_vnx2bi (TARGET_SVE)
- #define HAVE_aarch64_sve_punpklo_vnx16bi (TARGET_SVE)
- #define HAVE_aarch64_sve_punpkhi_vnx16bi (TARGET_SVE)
- #define HAVE_aarch64_sve_punpklo_vnx8bi (TARGET_SVE)
- #define HAVE_aarch64_sve_punpkhi_vnx8bi (TARGET_SVE)
- #define HAVE_aarch64_sve_punpklo_vnx4bi (TARGET_SVE)
- #define HAVE_aarch64_sve_punpkhi_vnx4bi (TARGET_SVE)
- #define HAVE_aarch64_brka (TARGET_SVE)
- #define HAVE_aarch64_brkb (TARGET_SVE)
- #define HAVE_aarch64_brkn (TARGET_SVE)
- #define HAVE_aarch64_brkpa (TARGET_SVE)
- #define HAVE_aarch64_brkpb (TARGET_SVE)
- #define HAVE_aarch64_sve_pfirstvnx16bi (TARGET_SVE && 8 >= 8)
- #define HAVE_aarch64_sve_pnextvnx16bi (TARGET_SVE && 64 >= 8)
- #define HAVE_aarch64_sve_pnextvnx8bi (TARGET_SVE && 64 >= 16)
- #define HAVE_aarch64_sve_pnextvnx4bi (TARGET_SVE && 64 >= 32)
- #define HAVE_aarch64_sve_pnextvnx2bi (TARGET_SVE && 64 >= 64)
- #define HAVE_aarch64_sve_cnt_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_incdi_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_sqincdi_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_uqincdi_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_sqincsi_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_uqincsi_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_incvnx2di_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_sqincvnx2di_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_uqincvnx2di_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_incvnx4si_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_sqincvnx4si_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_uqincvnx4si_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_decdi_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_sqdecdi_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_uqdecdi_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_sqdecsi_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_uqdecsi_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_decvnx2di_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_sqdecvnx2di_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_uqdecvnx2di_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_decvnx4si_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_sqdecvnx4si_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_uqdecvnx4si_pat (TARGET_SVE)
- #define HAVE_aarch64_pred_cntpvnx16bi (TARGET_SVE)
- #define HAVE_aarch64_pred_cntpvnx8bi (TARGET_SVE)
- #define HAVE_aarch64_pred_cntpvnx4bi (TARGET_SVE)
- #define HAVE_aarch64_pred_cntpvnx2bi (TARGET_SVE)
- #define HAVE_aarch64_gather_ldntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_gather_ldntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_gather_ldntvnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_gather_ldntvnx2df (TARGET_SVE2)
- #define HAVE_aarch64_gather_ldnt_extendvnx4sivnx4qi (TARGET_SVE2 \
- && (~0x43 & 0x41) == 0)
- #define HAVE_aarch64_gather_ldnt_zero_extendvnx4sivnx4qi (TARGET_SVE2 \
- && (~0x43 & 0x41) == 0)
- #define HAVE_aarch64_gather_ldnt_extendvnx2divnx2qi (TARGET_SVE2 \
- && (~0x27 & 0x21) == 0)
- #define HAVE_aarch64_gather_ldnt_zero_extendvnx2divnx2qi (TARGET_SVE2 \
- && (~0x27 & 0x21) == 0)
- #define HAVE_aarch64_gather_ldnt_extendvnx4sivnx4hi (TARGET_SVE2 \
- && (~0x43 & 0x42) == 0)
- #define HAVE_aarch64_gather_ldnt_zero_extendvnx4sivnx4hi (TARGET_SVE2 \
- && (~0x43 & 0x42) == 0)
- #define HAVE_aarch64_gather_ldnt_extendvnx2divnx2hi (TARGET_SVE2 \
- && (~0x27 & 0x22) == 0)
- #define HAVE_aarch64_gather_ldnt_zero_extendvnx2divnx2hi (TARGET_SVE2 \
- && (~0x27 & 0x22) == 0)
- #define HAVE_aarch64_gather_ldnt_extendvnx2divnx2si (TARGET_SVE2 \
- && (~0x27 & 0x24) == 0)
- #define HAVE_aarch64_gather_ldnt_zero_extendvnx2divnx2si (TARGET_SVE2 \
- && (~0x27 & 0x24) == 0)
- #define HAVE_aarch64_scatter_stntvnx4si (TARGET_SVE)
- #define HAVE_aarch64_scatter_stntvnx2di (TARGET_SVE)
- #define HAVE_aarch64_scatter_stntvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_scatter_stntvnx2df (TARGET_SVE)
- #define HAVE_aarch64_scatter_stnt_vnx4sivnx4qi (TARGET_SVE2 \
- && (~0x43 & 0x41) == 0)
- #define HAVE_aarch64_scatter_stnt_vnx2divnx2qi (TARGET_SVE2 \
- && (~0x27 & 0x21) == 0)
- #define HAVE_aarch64_scatter_stnt_vnx4sivnx4hi (TARGET_SVE2 \
- && (~0x43 & 0x42) == 0)
- #define HAVE_aarch64_scatter_stnt_vnx2divnx2hi (TARGET_SVE2 \
- && (~0x27 & 0x22) == 0)
- #define HAVE_aarch64_scatter_stnt_vnx2divnx2si (TARGET_SVE2 \
- && (~0x27 & 0x24) == 0)
- #define HAVE_aarch64_mul_lane_vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_mul_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_mul_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_suqaddvnx16qi_const (TARGET_SVE2)
- #define HAVE_aarch64_sve_suqaddvnx8hi_const (TARGET_SVE2)
- #define HAVE_aarch64_sve_suqaddvnx4si_const (TARGET_SVE2)
- #define HAVE_aarch64_sve_suqaddvnx2di_const (TARGET_SVE2)
- #define HAVE_aarch64_pred_shaddvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_shsubvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_sqrshlvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_srhaddvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_srshlvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_uhaddvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_uhsubvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_uqrshlvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_urhaddvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_urshlvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_shaddvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_shsubvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_sqrshlvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_srhaddvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_srshlvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_uhaddvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_uhsubvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_uqrshlvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_urhaddvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_urshlvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_shaddvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_shsubvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_sqrshlvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_srhaddvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_srshlvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_uhaddvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_uhsubvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_uqrshlvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_urhaddvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_urshlvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_shaddvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_shsubvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_sqrshlvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_srhaddvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_srshlvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_uhaddvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_uhsubvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_uqrshlvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_urhaddvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_urshlvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmulhvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmulhvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmulhvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmulhvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmulhvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmulhvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmulhvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmulhvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmulh_lane_vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmulh_lane_vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmulh_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmulh_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmulh_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmulh_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_sqshlvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_uqshlvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_sqshlvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_uqshlvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_sqshlvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_uqshlvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_sqshlvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_uqshlvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_adclbvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_adcltvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_eorbtvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_eortbvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sbclbvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sbcltvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmlahvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmlshvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_adclbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_adcltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_eorbtvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_eortbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sbclbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sbcltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmlahvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmlshvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_adclbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_adcltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_eorbtvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_eortbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sbclbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sbcltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmlahvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmlshvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_adclbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_adcltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_eorbtvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_eortbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sbclbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sbcltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmlahvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmlshvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmlah_lane_vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmlsh_lane_vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmlah_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmlsh_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmlah_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdmlsh_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_mul_lane_vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_mul_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_mul_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_mul_lane_vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_mul_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_mul_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve2_xarvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_xarvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_xarvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve2_xarvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve2_eor3vnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_eor3vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_eor3vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve2_eor3vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_srshrvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_urshrvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_srshrvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_urshrvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_srshrvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_urshrvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_srshrvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_urshrvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_slivnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_srivnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_slivnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_srivnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_slivnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_srivnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_slivnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_srivnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_saddwbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_saddwtvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssubwbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssubwtvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_uaddwbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_uaddwtvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_usubwbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_usubwtvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_saddwbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_saddwtvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssubwbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssubwtvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_uaddwbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_uaddwtvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_usubwbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_usubwtvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_saddwbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_saddwtvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssubwbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssubwtvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_uaddwbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_uaddwtvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_usubwbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_usubwtvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sabdlbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sabdltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_saddlbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_saddlbtvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_saddltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_smullbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_smulltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmullbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmulltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssublbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssublbtvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssubltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssubltbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_uabdlbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_uabdltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_uaddlbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_uaddltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_umullbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_umulltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_usublbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_usubltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sabdlbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sabdltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_saddlbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_saddlbtvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_saddltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_smullbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_smulltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmullbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmulltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssublbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssublbtvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssubltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssubltbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_uabdlbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_uabdltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_uaddlbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_uaddltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_umullbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_umulltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_usublbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_usubltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sabdlbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sabdltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_saddlbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_saddlbtvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_saddltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_smullbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_smulltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmullbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmulltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssublbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssublbtvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssubltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_ssubltbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_uabdlbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_uabdltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_uaddlbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_uaddltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_umullbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_umulltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_usublbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_usubltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_smullb_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_smullt_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmullb_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmullt_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_umullb_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_umullt_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_smullb_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_smullt_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmullb_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqdmullt_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_umullb_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_umullt_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sshllbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sshlltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_ushllbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_ushlltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sshllbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sshlltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_ushllbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_ushlltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sshllbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sshlltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_ushllbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_ushlltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_sabdlbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_sabdltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_smullbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_smulltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_uabdlbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_uabdltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_umullbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_umulltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_sabdlbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_sabdltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_smullbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_smulltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_uabdlbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_uabdltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_umullbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_umulltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_sabdlbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_sabdltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_smullbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_smulltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_uabdlbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_uabdltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_umullbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_umulltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_smullb_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_smullt_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_umullb_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_umullt_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_smullb_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_smullt_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_umullb_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_umullt_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_qadd_sqdmullbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_qadd_sqdmullbtvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_qadd_sqdmulltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_qadd_sqdmullbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_qadd_sqdmullbtvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_qadd_sqdmulltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_qadd_sqdmullbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_qadd_sqdmullbtvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_qadd_sqdmulltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_qadd_sqdmullb_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_qadd_sqdmullt_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_qadd_sqdmullb_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_qadd_sqdmullt_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_smullbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_smulltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_umullbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_umulltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_smullbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_smulltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_umullbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_umulltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_smullbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_smulltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_umullbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_umulltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_smullb_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_smullt_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_umullb_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_umullt_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_smullb_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_smullt_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_umullb_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sub_umullt_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_qsub_sqdmullbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_qsub_sqdmullbtvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_qsub_sqdmulltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_qsub_sqdmullbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_qsub_sqdmullbtvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_qsub_sqdmulltvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_qsub_sqdmullbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_qsub_sqdmullbtvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_qsub_sqdmulltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_qsub_sqdmullb_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_qsub_sqdmullt_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_qsub_sqdmullb_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_qsub_sqdmullt_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_fmlalbvnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_sve_fmlaltvnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_sve_fmlslbvnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_sve_fmlsltvnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_fmlalb_lane_vnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_fmlalt_lane_vnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_fmlslb_lane_vnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_fmlslt_lane_vnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqxtnbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqxtunbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqxtnbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqxtnbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqxtunbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqxtnbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqxtnbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqxtunbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqxtnbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqxtntvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqxtuntvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqxtntvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqxtntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqxtuntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqxtntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqxtntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqxtuntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqxtntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_addhnbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_raddhnbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_rsubhnbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_subhnbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_addhnbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_raddhnbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_rsubhnbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_subhnbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_addhnbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_raddhnbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_rsubhnbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_subhnbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_addhntvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_raddhntvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_rsubhntvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_subhntvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_addhntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_raddhntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_rsubhntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_subhntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_addhntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_raddhntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_rsubhntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_subhntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_rshrnbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_shrnbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrshrnbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrshrunbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqshrnbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqshrunbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqrshrnbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqshrnbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_rshrnbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_shrnbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrshrnbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrshrunbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqshrnbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqshrunbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqrshrnbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqshrnbvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_rshrnbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_shrnbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrshrnbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrshrunbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqshrnbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqshrunbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqrshrnbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqshrnbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_rshrntvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_shrntvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrshrntvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrshruntvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqshrntvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqshruntvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqrshrntvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqshrntvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_rshrntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_shrntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrshrntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrshruntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqshrntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqshruntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqrshrntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqshrntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_rshrntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_shrntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrshrntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrshruntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqshrntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqshruntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqrshrntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_uqshrntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_addpvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_smaxpvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_sminpvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_umaxpvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_uminpvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_addpvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_smaxpvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_sminpvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_umaxpvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_uminpvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_addpvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_smaxpvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_sminpvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_umaxpvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_uminpvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_addpvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_smaxpvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_sminpvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_umaxpvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_uminpvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_faddpvnx8hf (TARGET_SVE2)
- #define HAVE_aarch64_pred_fmaxpvnx8hf (TARGET_SVE2)
- #define HAVE_aarch64_pred_fmaxnmpvnx8hf (TARGET_SVE2)
- #define HAVE_aarch64_pred_fminpvnx8hf (TARGET_SVE2)
- #define HAVE_aarch64_pred_fminnmpvnx8hf (TARGET_SVE2)
- #define HAVE_aarch64_pred_faddpvnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_pred_fmaxpvnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_pred_fmaxnmpvnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_pred_fminpvnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_pred_fminnmpvnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_pred_faddpvnx2df (TARGET_SVE2)
- #define HAVE_aarch64_pred_fmaxpvnx2df (TARGET_SVE2)
- #define HAVE_aarch64_pred_fmaxnmpvnx2df (TARGET_SVE2)
- #define HAVE_aarch64_pred_fminpvnx2df (TARGET_SVE2)
- #define HAVE_aarch64_pred_fminnmpvnx2df (TARGET_SVE2)
- #define HAVE_aarch64_sve_cadd90vnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_cadd270vnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqcadd90vnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqcadd270vnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_cadd90vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_cadd270vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqcadd90vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqcadd270vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_cadd90vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_cadd270vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqcadd90vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqcadd270vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_cadd90vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_cadd270vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqcadd90vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqcadd270vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_cmlavnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_cmla90vnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_cmla180vnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_cmla270vnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdcmlahvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdcmlah90vnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdcmlah180vnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdcmlah270vnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_cmlavnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_cmla90vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_cmla180vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_cmla270vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdcmlahvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdcmlah90vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdcmlah180vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdcmlah270vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_cmlavnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_cmla90vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_cmla180vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_cmla270vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdcmlahvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdcmlah90vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdcmlah180vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdcmlah270vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_cmlavnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_cmla90vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_cmla180vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_cmla270vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdcmlahvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdcmlah90vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdcmlah180vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_sqrdcmlah270vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_cmla_lane_vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_cmla90_lane_vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_cmla180_lane_vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_cmla270_lane_vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sqrdcmlah_lane_vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sqrdcmlah90_lane_vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sqrdcmlah180_lane_vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sqrdcmlah270_lane_vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_cmla_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_cmla90_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_cmla180_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_cmla270_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sqrdcmlah_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sqrdcmlah90_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sqrdcmlah180_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sqrdcmlah270_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_cdotvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_cdot90vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_cdot180vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_cdot270vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_cdotvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_cdot90vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_cdot180vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_cdot270vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_cdot_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_cdot90_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_cdot180_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_cdot270_lane_vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_cdot_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_cdot90_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_cdot180_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_cdot270_lane_vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_pred_fcvtltvnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_pred_fcvtltvnx2df (TARGET_SVE2)
- #define HAVE_aarch64_sve_cvtntvnx8hf (TARGET_SVE2)
- #define HAVE_aarch64_sve_cvtntvnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_pred_fcvtxvnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_sve2_cvtxntvnx2df (TARGET_SVE2)
- #define HAVE_aarch64_pred_urecpevnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_ursqrtevnx4si (TARGET_SVE2)
- #define HAVE_aarch64_pred_flogbvnx8hf (TARGET_SVE2)
- #define HAVE_aarch64_pred_flogbvnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_pred_flogbvnx2df (TARGET_SVE2)
- #define HAVE_aarch64_sve2_pmulvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_pmullbvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_pmulltvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_pmullbvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_pmulltvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_pmullb_pairvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_pmullt_pairvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_pmullb_pairvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_pmullt_pairvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_pmullb_pairvnx2di ((TARGET_SVE2) && (TARGET_SVE2_AES))
- #define HAVE_aarch64_sve_pmullt_pairvnx2di ((TARGET_SVE2) && (TARGET_SVE2_AES))
- #define HAVE_aarch64_sve2_tbl2vnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_tbl2vnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_tbl2vnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve2_tbl2vnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve2_tbl2vnx8bf (TARGET_SVE2)
- #define HAVE_aarch64_sve2_tbl2vnx8hf (TARGET_SVE2)
- #define HAVE_aarch64_sve2_tbl2vnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_sve2_tbl2vnx2df (TARGET_SVE2)
- #define HAVE_aarch64_sve2_tbxvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_tbxvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_tbxvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve2_tbxvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve2_tbxvnx8bf (TARGET_SVE2)
- #define HAVE_aarch64_sve2_tbxvnx8hf (TARGET_SVE2)
- #define HAVE_aarch64_sve2_tbxvnx4sf (TARGET_SVE2)
- #define HAVE_aarch64_sve2_tbxvnx2df (TARGET_SVE2)
- #define HAVE_aarch64_sve_bdepvnx16qi (TARGET_SVE2_BITPERM)
- #define HAVE_aarch64_sve_bextvnx16qi (TARGET_SVE2_BITPERM)
- #define HAVE_aarch64_sve_bgrpvnx16qi (TARGET_SVE2_BITPERM)
- #define HAVE_aarch64_sve_bdepvnx8hi (TARGET_SVE2_BITPERM)
- #define HAVE_aarch64_sve_bextvnx8hi (TARGET_SVE2_BITPERM)
- #define HAVE_aarch64_sve_bgrpvnx8hi (TARGET_SVE2_BITPERM)
- #define HAVE_aarch64_sve_bdepvnx4si (TARGET_SVE2_BITPERM)
- #define HAVE_aarch64_sve_bextvnx4si (TARGET_SVE2_BITPERM)
- #define HAVE_aarch64_sve_bgrpvnx4si (TARGET_SVE2_BITPERM)
- #define HAVE_aarch64_sve_bdepvnx2di (TARGET_SVE2_BITPERM)
- #define HAVE_aarch64_sve_bextvnx2di (TARGET_SVE2_BITPERM)
- #define HAVE_aarch64_sve_bgrpvnx2di (TARGET_SVE2_BITPERM)
- #define HAVE_aarch64_sve2_histcntvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve2_histcntvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve2_histsegvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_matchvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_nmatchvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_pred_matchvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_pred_nmatchvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_aese (TARGET_SVE2_AES)
- #define HAVE_aarch64_sve2_aesd (TARGET_SVE2_AES)
- #define HAVE_aarch64_sve2_aesmc (TARGET_SVE2_AES)
- #define HAVE_aarch64_sve2_aesimc (TARGET_SVE2_AES)
- #define HAVE_aarch64_sve2_rax1 (TARGET_SVE2_SHA3)
- #define HAVE_aarch64_sve2_sm4e (TARGET_SVE2_SM4)
- #define HAVE_aarch64_sve2_sm4ekey (TARGET_SVE2_SM4)
- #define HAVE_cbranchsi4 1
- #define HAVE_cbranchdi4 1
- #define HAVE_cbranchsf4 1
- #define HAVE_cbranchdf4 1
- #define HAVE_cbranchcc4 1
- #define HAVE_modsi3 1
- #define HAVE_moddi3 1
- #define HAVE_casesi 1
- #define HAVE_casesi_dispatch 1
- #define HAVE_prologue 1
- #define HAVE_epilogue 1
- #define HAVE_sibcall_epilogue 1
- #define HAVE_return (aarch64_use_return_insn_p ())
- #define HAVE_call 1
- #define HAVE_call_value 1
- #define HAVE_sibcall 1
- #define HAVE_sibcall_value 1
- #define HAVE_untyped_call 1
- #define HAVE_movqi 1
- #define HAVE_movhi 1
- #define HAVE_movsi 1
- #define HAVE_movdi 1
- #define HAVE_movti 1
- #define HAVE_movhf 1
- #define HAVE_movbf 1
- #define HAVE_movsf 1
- #define HAVE_movdf 1
- #define HAVE_movtf 1
- #define HAVE_cpymemdi (!STRICT_ALIGNMENT)
- #define HAVE_extendsidi2 1
- #define HAVE_zero_extendsidi2 1
- #define HAVE_extendqisi2 1
- #define HAVE_zero_extendqisi2 1
- #define HAVE_extendhisi2 1
- #define HAVE_zero_extendhisi2 1
- #define HAVE_extendqidi2 1
- #define HAVE_zero_extendqidi2 1
- #define HAVE_extendhidi2 1
- #define HAVE_zero_extendhidi2 1
- #define HAVE_extendqihi2 1
- #define HAVE_zero_extendqihi2 1
- #define HAVE_addsi3 1
- #define HAVE_adddi3 1
- #define HAVE_addvsi4 1
- #define HAVE_addvdi4 1
- #define HAVE_uaddvsi4 1
- #define HAVE_uaddvdi4 1
- #define HAVE_addti3 1
- #define HAVE_addvti4 1
- #define HAVE_uaddvti4 1
- #define HAVE_addsi3_carryin 1
- #define HAVE_adddi3_carryin 1
- #define HAVE_addsi3_carryinC 1
- #define HAVE_adddi3_carryinC 1
- #define HAVE_addsi3_carryinV 1
- #define HAVE_adddi3_carryinV 1
- #define HAVE_subvsi4 1
- #define HAVE_subvdi4 1
- #define HAVE_negvsi3 1
- #define HAVE_negvdi3 1
- #define HAVE_usubvsi4 1
- #define HAVE_usubvdi4 1
- #define HAVE_subti3 1
- #define HAVE_subvti4 1
- #define HAVE_usubvti4 1
- #define HAVE_negvti3 1
- #define HAVE_subsi3_carryin 1
- #define HAVE_subdi3_carryin 1
- #define HAVE_usubsi3_carryinC 1
- #define HAVE_usubdi3_carryinC 1
- #define HAVE_subsi3_carryinV 1
- #define HAVE_subdi3_carryinV 1
- #define HAVE_abssi2 1
- #define HAVE_absdi2 1
- #define HAVE_mulditi3 1
- #define HAVE_umulditi3 1
- #define HAVE_multi3 1
- #define HAVE_cstoresi4 1
- #define HAVE_cstoredi4 1
- #define HAVE_cstorecc4 1
- #define HAVE_cstoresf4 1
- #define HAVE_cstoredf4 1
- #define HAVE_cmovsi6 1
- #define HAVE_cmovdi6 1
- #define HAVE_cmovsf6 1
- #define HAVE_cmovdf6 1
- #define HAVE_movqicc 1
- #define HAVE_movhicc 1
- #define HAVE_movsicc 1
- #define HAVE_movdicc 1
- #define HAVE_movsfsicc 1
- #define HAVE_movdfsicc 1
- #define HAVE_movsfdicc 1
- #define HAVE_movdfdicc 1
- #define HAVE_movsfcc 1
- #define HAVE_movdfcc 1
- #define HAVE_negsicc 1
- #define HAVE_notsicc 1
- #define HAVE_negdicc 1
- #define HAVE_notdicc 1
- #define HAVE_umaxsi3 (TARGET_SVE)
- #define HAVE_umaxdi3 (TARGET_SVE)
- #define HAVE_ffssi2 1
- #define HAVE_ffsdi2 1
- #define HAVE_popcountsi2 (TARGET_SIMD)
- #define HAVE_popcountdi2 (TARGET_SIMD)
- #define HAVE_ashlsi3 1
- #define HAVE_ashrsi3 1
- #define HAVE_lshrsi3 1
- #define HAVE_ashldi3 1
- #define HAVE_ashrdi3 1
- #define HAVE_lshrdi3 1
- #define HAVE_ashlqi3 1
- #define HAVE_ashlhi3 1
- #define HAVE_rotrsi3 1
- #define HAVE_rotrdi3 1
- #define HAVE_rotlsi3 1
- #define HAVE_rotldi3 1
- #define HAVE_extv 1
- #define HAVE_extzv 1
- #define HAVE_insvsi 1
- #define HAVE_insvdi 1
- #define HAVE_fmahf4 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_fmasf4 (TARGET_FLOAT)
- #define HAVE_fmadf4 (TARGET_FLOAT)
- #define HAVE_fnmahf4 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_fnmasf4 (TARGET_FLOAT)
- #define HAVE_fnmadf4 (TARGET_FLOAT)
- #define HAVE_fmssf4 (TARGET_FLOAT)
- #define HAVE_fmsdf4 (TARGET_FLOAT)
- #define HAVE_fnmssf4 (TARGET_FLOAT)
- #define HAVE_fnmsdf4 (TARGET_FLOAT)
- #define HAVE_floatsihf2 (TARGET_FLOAT)
- #define HAVE_floatunssihf2 (TARGET_FLOAT)
- #define HAVE_floatdihf2 (TARGET_FLOAT && (TARGET_FP_F16INST || TARGET_SIMD))
- #define HAVE_floatunsdihf2 (TARGET_FLOAT && (TARGET_FP_F16INST || TARGET_SIMD))
- #define HAVE_divhf3 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_divsf3 (TARGET_FLOAT)
- #define HAVE_divdf3 (TARGET_FLOAT)
- #define HAVE_sqrthf2 ((TARGET_FLOAT) && (AARCH64_ISA_F16))
- #define HAVE_sqrtsf2 (TARGET_FLOAT)
- #define HAVE_sqrtdf2 (TARGET_FLOAT)
- #define HAVE_lrintsfsi2 (TARGET_FLOAT \
- && ((GET_MODE_BITSIZE (SFmode) <= LONG_TYPE_SIZE) \
- || !flag_trapping_math || flag_fp_int_builtin_inexact))
- #define HAVE_lrintdfsi2 (TARGET_FLOAT \
- && ((GET_MODE_BITSIZE (DFmode) <= LONG_TYPE_SIZE) \
- || !flag_trapping_math || flag_fp_int_builtin_inexact))
- #define HAVE_lrintsfdi2 (TARGET_FLOAT \
- && ((GET_MODE_BITSIZE (SFmode) <= LONG_TYPE_SIZE) \
- || !flag_trapping_math || flag_fp_int_builtin_inexact))
- #define HAVE_lrintdfdi2 (TARGET_FLOAT \
- && ((GET_MODE_BITSIZE (DFmode) <= LONG_TYPE_SIZE) \
- || !flag_trapping_math || flag_fp_int_builtin_inexact))
- #define HAVE_copysignsf3 (TARGET_FLOAT && TARGET_SIMD)
- #define HAVE_copysigndf3 (TARGET_FLOAT && TARGET_SIMD)
- #define HAVE_xorsignsf3 (TARGET_FLOAT && TARGET_SIMD)
- #define HAVE_xorsigndf3 (TARGET_FLOAT && TARGET_SIMD)
- #define HAVE_aarch64_reload_movcpsfsi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_aarch64_reload_movcpsfdi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_aarch64_reload_movcpdfsi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_aarch64_reload_movcpdfdi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_aarch64_reload_movcptfsi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_aarch64_reload_movcptfdi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_aarch64_reload_movcpv8qisi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_aarch64_reload_movcpv8qidi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_aarch64_reload_movcpv16qisi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_aarch64_reload_movcpv16qidi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_aarch64_reload_movcpv4hisi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_aarch64_reload_movcpv4hidi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_aarch64_reload_movcpv8hisi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_aarch64_reload_movcpv8hidi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_aarch64_reload_movcpv2sisi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_aarch64_reload_movcpv2sidi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_aarch64_reload_movcpv4sisi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_aarch64_reload_movcpv4sidi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_aarch64_reload_movcpv2disi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_aarch64_reload_movcpv2didi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_aarch64_reload_movcpv2sfsi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_aarch64_reload_movcpv2sfdi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_aarch64_reload_movcpv4sfsi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_aarch64_reload_movcpv4sfdi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_aarch64_reload_movcpv2dfsi ((TARGET_FLOAT) && (ptr_mode == SImode || Pmode == SImode))
- #define HAVE_aarch64_reload_movcpv2dfdi ((TARGET_FLOAT) && (ptr_mode == DImode || Pmode == DImode))
- #define HAVE_aarch64_reload_movti (TARGET_FLOAT)
- #define HAVE_aarch64_reload_movtf (TARGET_FLOAT)
- #define HAVE_add_losym 1
- #define HAVE_tlsgd_small_si (ptr_mode == SImode)
- #define HAVE_tlsgd_small_di (ptr_mode == DImode)
- #define HAVE_tlsdesc_small_si ((TARGET_TLS_DESC) && (ptr_mode == SImode))
- #define HAVE_tlsdesc_small_di ((TARGET_TLS_DESC) && (ptr_mode == DImode))
- #define HAVE_get_thread_pointerdi 1
- #define HAVE_stack_protect_set 1
- #define HAVE_stack_protect_combined_set 1
- #define HAVE_stack_protect_test 1
- #define HAVE_stack_protect_combined_test 1
- #define HAVE_doloop_end (optimize > 0 && flag_modulo_sched)
- #define HAVE_despeculate_copyqi 1
- #define HAVE_despeculate_copyhi 1
- #define HAVE_despeculate_copysi 1
- #define HAVE_despeculate_copydi 1
- #define HAVE_despeculate_copyti 1
- #define HAVE_movv8qi (TARGET_SIMD)
- #define HAVE_movv16qi (TARGET_SIMD)
- #define HAVE_movv4hi (TARGET_SIMD)
- #define HAVE_movv8hi (TARGET_SIMD)
- #define HAVE_movv2si (TARGET_SIMD)
- #define HAVE_movv4si (TARGET_SIMD)
- #define HAVE_movv2di (TARGET_SIMD)
- #define HAVE_movv4hf (TARGET_SIMD)
- #define HAVE_movv8hf (TARGET_SIMD)
- #define HAVE_movv4bf (TARGET_SIMD)
- #define HAVE_movv8bf (TARGET_SIMD)
- #define HAVE_movv2sf (TARGET_SIMD)
- #define HAVE_movv4sf (TARGET_SIMD)
- #define HAVE_movv2df (TARGET_SIMD)
- #define HAVE_movmisalignv8qi (TARGET_SIMD && !STRICT_ALIGNMENT)
- #define HAVE_movmisalignv16qi (TARGET_SIMD && !STRICT_ALIGNMENT)
- #define HAVE_movmisalignv4hi (TARGET_SIMD && !STRICT_ALIGNMENT)
- #define HAVE_movmisalignv8hi (TARGET_SIMD && !STRICT_ALIGNMENT)
- #define HAVE_movmisalignv2si (TARGET_SIMD && !STRICT_ALIGNMENT)
- #define HAVE_movmisalignv4si (TARGET_SIMD && !STRICT_ALIGNMENT)
- #define HAVE_movmisalignv2di (TARGET_SIMD && !STRICT_ALIGNMENT)
- #define HAVE_movmisalignv2sf (TARGET_SIMD && !STRICT_ALIGNMENT)
- #define HAVE_movmisalignv4sf (TARGET_SIMD && !STRICT_ALIGNMENT)
- #define HAVE_movmisalignv2df (TARGET_SIMD && !STRICT_ALIGNMENT)
- #define HAVE_aarch64_split_simd_movv16qi (TARGET_SIMD)
- #define HAVE_aarch64_split_simd_movv8hi (TARGET_SIMD)
- #define HAVE_aarch64_split_simd_movv4si (TARGET_SIMD)
- #define HAVE_aarch64_split_simd_movv2di (TARGET_SIMD)
- #define HAVE_aarch64_split_simd_movv8hf (TARGET_SIMD)
- #define HAVE_aarch64_split_simd_movv8bf (TARGET_SIMD)
- #define HAVE_aarch64_split_simd_movv4sf (TARGET_SIMD)
- #define HAVE_aarch64_split_simd_movv2df (TARGET_SIMD)
- #define HAVE_aarch64_get_halfv16qi (TARGET_SIMD)
- #define HAVE_aarch64_get_halfv8hi (TARGET_SIMD)
- #define HAVE_aarch64_get_halfv4si (TARGET_SIMD)
- #define HAVE_aarch64_get_halfv2di (TARGET_SIMD)
- #define HAVE_aarch64_get_halfv8hf (TARGET_SIMD)
- #define HAVE_aarch64_get_halfv8bf (TARGET_SIMD)
- #define HAVE_aarch64_get_halfv4sf (TARGET_SIMD)
- #define HAVE_aarch64_get_halfv2df (TARGET_SIMD)
- #define HAVE_ctzv2si2 (TARGET_SIMD)
- #define HAVE_ctzv4si2 (TARGET_SIMD)
- #define HAVE_xorsignv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_xorsignv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_xorsignv2sf3 (TARGET_SIMD)
- #define HAVE_xorsignv4sf3 (TARGET_SIMD)
- #define HAVE_xorsignv2df3 (TARGET_SIMD)
- #define HAVE_sdot_prodv8qi (TARGET_DOTPROD)
- #define HAVE_udot_prodv8qi (TARGET_DOTPROD)
- #define HAVE_sdot_prodv16qi (TARGET_DOTPROD)
- #define HAVE_udot_prodv16qi (TARGET_DOTPROD)
- #define HAVE_copysignv4hf3 ((TARGET_FLOAT && TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_copysignv8hf3 ((TARGET_FLOAT && TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_copysignv2sf3 (TARGET_FLOAT && TARGET_SIMD)
- #define HAVE_copysignv4sf3 (TARGET_FLOAT && TARGET_SIMD)
- #define HAVE_copysignv2df3 (TARGET_FLOAT && TARGET_SIMD)
- #define HAVE_rsqrtv2sf2 (TARGET_SIMD)
- #define HAVE_rsqrtv4sf2 (TARGET_SIMD)
- #define HAVE_rsqrtv2df2 (TARGET_SIMD)
- #define HAVE_rsqrtsf2 (TARGET_SIMD)
- #define HAVE_rsqrtdf2 (TARGET_SIMD)
- #define HAVE_ssadv16qi (TARGET_SIMD)
- #define HAVE_usadv16qi (TARGET_SIMD)
- #define HAVE_signbitv2sf2 (TARGET_SIMD)
- #define HAVE_signbitv4sf2 (TARGET_SIMD)
- #define HAVE_ashlv8qi3 (TARGET_SIMD)
- #define HAVE_ashlv16qi3 (TARGET_SIMD)
- #define HAVE_ashlv4hi3 (TARGET_SIMD)
- #define HAVE_ashlv8hi3 (TARGET_SIMD)
- #define HAVE_ashlv2si3 (TARGET_SIMD)
- #define HAVE_ashlv4si3 (TARGET_SIMD)
- #define HAVE_ashlv2di3 (TARGET_SIMD)
- #define HAVE_lshrv8qi3 (TARGET_SIMD)
- #define HAVE_lshrv16qi3 (TARGET_SIMD)
- #define HAVE_lshrv4hi3 (TARGET_SIMD)
- #define HAVE_lshrv8hi3 (TARGET_SIMD)
- #define HAVE_lshrv2si3 (TARGET_SIMD)
- #define HAVE_lshrv4si3 (TARGET_SIMD)
- #define HAVE_lshrv2di3 (TARGET_SIMD)
- #define HAVE_ashrv8qi3 (TARGET_SIMD)
- #define HAVE_ashrv16qi3 (TARGET_SIMD)
- #define HAVE_ashrv4hi3 (TARGET_SIMD)
- #define HAVE_ashrv8hi3 (TARGET_SIMD)
- #define HAVE_ashrv2si3 (TARGET_SIMD)
- #define HAVE_ashrv4si3 (TARGET_SIMD)
- #define HAVE_ashrv2di3 (TARGET_SIMD)
- #define HAVE_vashlv8qi3 (TARGET_SIMD)
- #define HAVE_vashlv16qi3 (TARGET_SIMD)
- #define HAVE_vashlv4hi3 (TARGET_SIMD)
- #define HAVE_vashlv8hi3 (TARGET_SIMD)
- #define HAVE_vashlv2si3 (TARGET_SIMD)
- #define HAVE_vashlv4si3 (TARGET_SIMD)
- #define HAVE_vashlv2di3 (TARGET_SIMD)
- #define HAVE_vashrv8qi3 (TARGET_SIMD)
- #define HAVE_vashrv16qi3 (TARGET_SIMD)
- #define HAVE_vashrv4hi3 (TARGET_SIMD)
- #define HAVE_vashrv8hi3 (TARGET_SIMD)
- #define HAVE_vashrv2si3 (TARGET_SIMD)
- #define HAVE_vashrv4si3 (TARGET_SIMD)
- #define HAVE_aarch64_ashr_simddi (TARGET_SIMD)
- #define HAVE_vlshrv8qi3 (TARGET_SIMD)
- #define HAVE_vlshrv16qi3 (TARGET_SIMD)
- #define HAVE_vlshrv4hi3 (TARGET_SIMD)
- #define HAVE_vlshrv8hi3 (TARGET_SIMD)
- #define HAVE_vlshrv2si3 (TARGET_SIMD)
- #define HAVE_vlshrv4si3 (TARGET_SIMD)
- #define HAVE_aarch64_lshr_simddi (TARGET_SIMD)
- #define HAVE_vec_setv8qi (TARGET_SIMD)
- #define HAVE_vec_setv16qi (TARGET_SIMD)
- #define HAVE_vec_setv4hi (TARGET_SIMD)
- #define HAVE_vec_setv8hi (TARGET_SIMD)
- #define HAVE_vec_setv2si (TARGET_SIMD)
- #define HAVE_vec_setv4si (TARGET_SIMD)
- #define HAVE_vec_setv2di (TARGET_SIMD)
- #define HAVE_vec_setv4hf (TARGET_SIMD)
- #define HAVE_vec_setv8hf (TARGET_SIMD)
- #define HAVE_vec_setv4bf (TARGET_SIMD)
- #define HAVE_vec_setv8bf (TARGET_SIMD)
- #define HAVE_vec_setv2sf (TARGET_SIMD)
- #define HAVE_vec_setv4sf (TARGET_SIMD)
- #define HAVE_vec_setv2df (TARGET_SIMD)
- #define HAVE_smaxv2di3 (TARGET_SIMD)
- #define HAVE_sminv2di3 (TARGET_SIMD)
- #define HAVE_umaxv2di3 (TARGET_SIMD)
- #define HAVE_uminv2di3 (TARGET_SIMD)
- #define HAVE_move_lo_quad_v16qi (TARGET_SIMD)
- #define HAVE_move_lo_quad_v8hi (TARGET_SIMD)
- #define HAVE_move_lo_quad_v4si (TARGET_SIMD)
- #define HAVE_move_lo_quad_v2di (TARGET_SIMD)
- #define HAVE_move_lo_quad_v8hf (TARGET_SIMD)
- #define HAVE_move_lo_quad_v8bf (TARGET_SIMD)
- #define HAVE_move_lo_quad_v4sf (TARGET_SIMD)
- #define HAVE_move_lo_quad_v2df (TARGET_SIMD)
- #define HAVE_move_hi_quad_v16qi (TARGET_SIMD)
- #define HAVE_move_hi_quad_v8hi (TARGET_SIMD)
- #define HAVE_move_hi_quad_v4si (TARGET_SIMD)
- #define HAVE_move_hi_quad_v2di (TARGET_SIMD)
- #define HAVE_move_hi_quad_v8hf (TARGET_SIMD)
- #define HAVE_move_hi_quad_v8bf (TARGET_SIMD)
- #define HAVE_move_hi_quad_v4sf (TARGET_SIMD)
- #define HAVE_move_hi_quad_v2df (TARGET_SIMD)
- #define HAVE_vec_pack_trunc_v4hi (TARGET_SIMD)
- #define HAVE_vec_pack_trunc_v2si (TARGET_SIMD)
- #define HAVE_vec_pack_trunc_di (TARGET_SIMD)
- #define HAVE_vec_unpacks_hi_v16qi (TARGET_SIMD)
- #define HAVE_vec_unpacku_hi_v16qi (TARGET_SIMD)
- #define HAVE_vec_unpacks_hi_v8hi (TARGET_SIMD)
- #define HAVE_vec_unpacku_hi_v8hi (TARGET_SIMD)
- #define HAVE_vec_unpacks_hi_v4si (TARGET_SIMD)
- #define HAVE_vec_unpacku_hi_v4si (TARGET_SIMD)
- #define HAVE_vec_unpacks_lo_v16qi (TARGET_SIMD)
- #define HAVE_vec_unpacku_lo_v16qi (TARGET_SIMD)
- #define HAVE_vec_unpacks_lo_v8hi (TARGET_SIMD)
- #define HAVE_vec_unpacku_lo_v8hi (TARGET_SIMD)
- #define HAVE_vec_unpacks_lo_v4si (TARGET_SIMD)
- #define HAVE_vec_unpacku_lo_v4si (TARGET_SIMD)
- #define HAVE_vec_widen_smult_lo_v16qi (TARGET_SIMD)
- #define HAVE_vec_widen_umult_lo_v16qi (TARGET_SIMD)
- #define HAVE_vec_widen_smult_lo_v8hi (TARGET_SIMD)
- #define HAVE_vec_widen_umult_lo_v8hi (TARGET_SIMD)
- #define HAVE_vec_widen_smult_lo_v4si (TARGET_SIMD)
- #define HAVE_vec_widen_umult_lo_v4si (TARGET_SIMD)
- #define HAVE_vec_widen_smult_hi_v16qi (TARGET_SIMD)
- #define HAVE_vec_widen_umult_hi_v16qi (TARGET_SIMD)
- #define HAVE_vec_widen_smult_hi_v8hi (TARGET_SIMD)
- #define HAVE_vec_widen_umult_hi_v8hi (TARGET_SIMD)
- #define HAVE_vec_widen_smult_hi_v4si (TARGET_SIMD)
- #define HAVE_vec_widen_umult_hi_v4si (TARGET_SIMD)
- #define HAVE_divv4hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_divv8hf3 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_divv2sf3 (TARGET_SIMD)
- #define HAVE_divv4sf3 (TARGET_SIMD)
- #define HAVE_divv2df3 (TARGET_SIMD)
- #define HAVE_fixv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fixunsv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fixv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fixunsv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fixv2sfv2si2 (TARGET_SIMD)
- #define HAVE_fixunsv2sfv2si2 (TARGET_SIMD)
- #define HAVE_fixv4sfv4si2 (TARGET_SIMD)
- #define HAVE_fixunsv4sfv4si2 (TARGET_SIMD)
- #define HAVE_fixv2dfv2di2 (TARGET_SIMD)
- #define HAVE_fixunsv2dfv2di2 (TARGET_SIMD)
- #define HAVE_fix_truncv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fixuns_truncv4hfv4hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fix_truncv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fixuns_truncv8hfv8hi2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_fix_truncv2sfv2si2 (TARGET_SIMD)
- #define HAVE_fixuns_truncv2sfv2si2 (TARGET_SIMD)
- #define HAVE_fix_truncv4sfv4si2 (TARGET_SIMD)
- #define HAVE_fixuns_truncv4sfv4si2 (TARGET_SIMD)
- #define HAVE_fix_truncv2dfv2di2 (TARGET_SIMD)
- #define HAVE_fixuns_truncv2dfv2di2 (TARGET_SIMD)
- #define HAVE_ftruncv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_ftruncv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_ftruncv2sf2 (TARGET_SIMD)
- #define HAVE_ftruncv4sf2 (TARGET_SIMD)
- #define HAVE_ftruncv2df2 (TARGET_SIMD)
- #define HAVE_vec_unpacks_lo_v8hf (TARGET_SIMD)
- #define HAVE_vec_unpacks_lo_v4sf (TARGET_SIMD)
- #define HAVE_vec_unpacks_hi_v8hf (TARGET_SIMD)
- #define HAVE_vec_unpacks_hi_v4sf (TARGET_SIMD)
- #define HAVE_aarch64_float_truncate_hi_v4sf (TARGET_SIMD)
- #define HAVE_aarch64_float_truncate_hi_v8hf (TARGET_SIMD)
- #define HAVE_vec_pack_trunc_v2df (TARGET_SIMD)
- #define HAVE_vec_pack_trunc_df (TARGET_SIMD)
- #define HAVE_reduc_plus_scal_v8qi (TARGET_SIMD)
- #define HAVE_reduc_plus_scal_v16qi (TARGET_SIMD)
- #define HAVE_reduc_plus_scal_v4hi (TARGET_SIMD)
- #define HAVE_reduc_plus_scal_v8hi (TARGET_SIMD)
- #define HAVE_reduc_plus_scal_v2si (TARGET_SIMD)
- #define HAVE_reduc_plus_scal_v4si (TARGET_SIMD)
- #define HAVE_reduc_plus_scal_v2di (TARGET_SIMD)
- #define HAVE_reduc_plus_scal_v4sf (TARGET_SIMD)
- #define HAVE_reduc_smax_nan_scal_v4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_reduc_smin_nan_scal_v4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_reduc_smax_scal_v4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_reduc_smin_scal_v4hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_reduc_smax_nan_scal_v8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_reduc_smin_nan_scal_v8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_reduc_smax_scal_v8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_reduc_smin_scal_v8hf ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_reduc_smax_nan_scal_v2sf (TARGET_SIMD)
- #define HAVE_reduc_smin_nan_scal_v2sf (TARGET_SIMD)
- #define HAVE_reduc_smax_scal_v2sf (TARGET_SIMD)
- #define HAVE_reduc_smin_scal_v2sf (TARGET_SIMD)
- #define HAVE_reduc_smax_nan_scal_v4sf (TARGET_SIMD)
- #define HAVE_reduc_smin_nan_scal_v4sf (TARGET_SIMD)
- #define HAVE_reduc_smax_scal_v4sf (TARGET_SIMD)
- #define HAVE_reduc_smin_scal_v4sf (TARGET_SIMD)
- #define HAVE_reduc_smax_nan_scal_v2df (TARGET_SIMD)
- #define HAVE_reduc_smin_nan_scal_v2df (TARGET_SIMD)
- #define HAVE_reduc_smax_scal_v2df (TARGET_SIMD)
- #define HAVE_reduc_smin_scal_v2df (TARGET_SIMD)
- #define HAVE_reduc_umax_scal_v8qi (TARGET_SIMD)
- #define HAVE_reduc_umin_scal_v8qi (TARGET_SIMD)
- #define HAVE_reduc_smax_scal_v8qi (TARGET_SIMD)
- #define HAVE_reduc_smin_scal_v8qi (TARGET_SIMD)
- #define HAVE_reduc_umax_scal_v16qi (TARGET_SIMD)
- #define HAVE_reduc_umin_scal_v16qi (TARGET_SIMD)
- #define HAVE_reduc_smax_scal_v16qi (TARGET_SIMD)
- #define HAVE_reduc_smin_scal_v16qi (TARGET_SIMD)
- #define HAVE_reduc_umax_scal_v4hi (TARGET_SIMD)
- #define HAVE_reduc_umin_scal_v4hi (TARGET_SIMD)
- #define HAVE_reduc_smax_scal_v4hi (TARGET_SIMD)
- #define HAVE_reduc_smin_scal_v4hi (TARGET_SIMD)
- #define HAVE_reduc_umax_scal_v8hi (TARGET_SIMD)
- #define HAVE_reduc_umin_scal_v8hi (TARGET_SIMD)
- #define HAVE_reduc_smax_scal_v8hi (TARGET_SIMD)
- #define HAVE_reduc_smin_scal_v8hi (TARGET_SIMD)
- #define HAVE_reduc_umax_scal_v2si (TARGET_SIMD)
- #define HAVE_reduc_umin_scal_v2si (TARGET_SIMD)
- #define HAVE_reduc_smax_scal_v2si (TARGET_SIMD)
- #define HAVE_reduc_smin_scal_v2si (TARGET_SIMD)
- #define HAVE_reduc_umax_scal_v4si (TARGET_SIMD)
- #define HAVE_reduc_umin_scal_v4si (TARGET_SIMD)
- #define HAVE_reduc_smax_scal_v4si (TARGET_SIMD)
- #define HAVE_reduc_smin_scal_v4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv8qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv16qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv4hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv8hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv2si (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv4si (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv4bf (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv8bf (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv2di (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv4hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv8hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv2sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv4sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_bslv2df (TARGET_SIMD)
- #define HAVE_aarch64_simd_bsldi (TARGET_SIMD)
- #define HAVE_aarch64_simd_bsldf (TARGET_SIMD)
- #define HAVE_vcond_mask_v8qiv8qi (TARGET_SIMD)
- #define HAVE_vcond_mask_v16qiv16qi (TARGET_SIMD)
- #define HAVE_vcond_mask_v4hiv4hi (TARGET_SIMD)
- #define HAVE_vcond_mask_v8hiv8hi (TARGET_SIMD)
- #define HAVE_vcond_mask_v2siv2si (TARGET_SIMD)
- #define HAVE_vcond_mask_v4siv4si (TARGET_SIMD)
- #define HAVE_vcond_mask_v2div2di (TARGET_SIMD)
- #define HAVE_vcond_mask_v2sfv2si (TARGET_SIMD)
- #define HAVE_vcond_mask_v4sfv4si (TARGET_SIMD)
- #define HAVE_vcond_mask_v2dfv2di (TARGET_SIMD)
- #define HAVE_vcond_mask_didi (TARGET_SIMD)
- #define HAVE_vec_cmpv8qiv8qi (TARGET_SIMD)
- #define HAVE_vec_cmpv16qiv16qi (TARGET_SIMD)
- #define HAVE_vec_cmpv4hiv4hi (TARGET_SIMD)
- #define HAVE_vec_cmpv8hiv8hi (TARGET_SIMD)
- #define HAVE_vec_cmpv2siv2si (TARGET_SIMD)
- #define HAVE_vec_cmpv4siv4si (TARGET_SIMD)
- #define HAVE_vec_cmpv2div2di (TARGET_SIMD)
- #define HAVE_vec_cmpdidi (TARGET_SIMD)
- #define HAVE_vec_cmpv2sfv2si (TARGET_SIMD)
- #define HAVE_vec_cmpv4sfv4si (TARGET_SIMD)
- #define HAVE_vec_cmpv2dfv2di (TARGET_SIMD)
- #define HAVE_vec_cmpuv8qiv8qi (TARGET_SIMD)
- #define HAVE_vec_cmpuv16qiv16qi (TARGET_SIMD)
- #define HAVE_vec_cmpuv4hiv4hi (TARGET_SIMD)
- #define HAVE_vec_cmpuv8hiv8hi (TARGET_SIMD)
- #define HAVE_vec_cmpuv2siv2si (TARGET_SIMD)
- #define HAVE_vec_cmpuv4siv4si (TARGET_SIMD)
- #define HAVE_vec_cmpuv2div2di (TARGET_SIMD)
- #define HAVE_vec_cmpudidi (TARGET_SIMD)
- #define HAVE_vcondv8qiv8qi (TARGET_SIMD)
- #define HAVE_vcondv16qiv16qi (TARGET_SIMD)
- #define HAVE_vcondv4hiv4hi (TARGET_SIMD)
- #define HAVE_vcondv8hiv8hi (TARGET_SIMD)
- #define HAVE_vcondv2siv2si (TARGET_SIMD)
- #define HAVE_vcondv4siv4si (TARGET_SIMD)
- #define HAVE_vcondv2div2di (TARGET_SIMD)
- #define HAVE_vcondv2sfv2sf (TARGET_SIMD)
- #define HAVE_vcondv4sfv4sf (TARGET_SIMD)
- #define HAVE_vcondv2dfv2df (TARGET_SIMD)
- #define HAVE_vconddidi (TARGET_SIMD)
- #define HAVE_vcondv2siv2sf (TARGET_SIMD)
- #define HAVE_vcondv2sfv2si (TARGET_SIMD)
- #define HAVE_vcondv4siv4sf (TARGET_SIMD)
- #define HAVE_vcondv4sfv4si (TARGET_SIMD)
- #define HAVE_vcondv2div2df (TARGET_SIMD)
- #define HAVE_vcondv2dfv2di (TARGET_SIMD)
- #define HAVE_vconduv8qiv8qi (TARGET_SIMD)
- #define HAVE_vconduv16qiv16qi (TARGET_SIMD)
- #define HAVE_vconduv4hiv4hi (TARGET_SIMD)
- #define HAVE_vconduv8hiv8hi (TARGET_SIMD)
- #define HAVE_vconduv2siv2si (TARGET_SIMD)
- #define HAVE_vconduv4siv4si (TARGET_SIMD)
- #define HAVE_vconduv2div2di (TARGET_SIMD)
- #define HAVE_vcondudidi (TARGET_SIMD)
- #define HAVE_vconduv2sfv2si (TARGET_SIMD)
- #define HAVE_vconduv4sfv4si (TARGET_SIMD)
- #define HAVE_vconduv2dfv2di (TARGET_SIMD)
- #define HAVE_aarch64_combinev8qi (TARGET_SIMD)
- #define HAVE_aarch64_combinev4hi (TARGET_SIMD)
- #define HAVE_aarch64_combinev4bf (TARGET_SIMD)
- #define HAVE_aarch64_combinev4hf (TARGET_SIMD)
- #define HAVE_aarch64_combinev2si (TARGET_SIMD)
- #define HAVE_aarch64_combinev2sf (TARGET_SIMD)
- #define HAVE_aarch64_combinedi (TARGET_SIMD)
- #define HAVE_aarch64_combinedf (TARGET_SIMD)
- #define HAVE_aarch64_simd_combinev8qi (TARGET_SIMD)
- #define HAVE_aarch64_simd_combinev4hi (TARGET_SIMD)
- #define HAVE_aarch64_simd_combinev4bf (TARGET_SIMD)
- #define HAVE_aarch64_simd_combinev4hf (TARGET_SIMD)
- #define HAVE_aarch64_simd_combinev2si (TARGET_SIMD)
- #define HAVE_aarch64_simd_combinev2sf (TARGET_SIMD)
- #define HAVE_aarch64_simd_combinedi (TARGET_SIMD)
- #define HAVE_aarch64_simd_combinedf (TARGET_SIMD)
- #define HAVE_aarch64_saddl2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_saddl2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_saddl2v4si (TARGET_SIMD)
- #define HAVE_aarch64_uaddl2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_uaddl2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_uaddl2v4si (TARGET_SIMD)
- #define HAVE_aarch64_ssubl2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_ssubl2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_ssubl2v4si (TARGET_SIMD)
- #define HAVE_aarch64_usubl2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_usubl2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_usubl2v4si (TARGET_SIMD)
- #define HAVE_widen_ssumv16qi3 (TARGET_SIMD)
- #define HAVE_widen_ssumv8hi3 (TARGET_SIMD)
- #define HAVE_widen_ssumv4si3 (TARGET_SIMD)
- #define HAVE_widen_ssumv8qi3 (TARGET_SIMD)
- #define HAVE_widen_ssumv4hi3 (TARGET_SIMD)
- #define HAVE_widen_ssumv2si3 (TARGET_SIMD)
- #define HAVE_widen_usumv16qi3 (TARGET_SIMD)
- #define HAVE_widen_usumv8hi3 (TARGET_SIMD)
- #define HAVE_widen_usumv4si3 (TARGET_SIMD)
- #define HAVE_widen_usumv8qi3 (TARGET_SIMD)
- #define HAVE_widen_usumv4hi3 (TARGET_SIMD)
- #define HAVE_widen_usumv2si3 (TARGET_SIMD)
- #define HAVE_aarch64_saddw2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_saddw2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_saddw2v4si (TARGET_SIMD)
- #define HAVE_aarch64_uaddw2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_uaddw2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_uaddw2v4si (TARGET_SIMD)
- #define HAVE_aarch64_ssubw2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_ssubw2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_ssubw2v4si (TARGET_SIMD)
- #define HAVE_aarch64_usubw2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_usubw2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_usubw2v4si (TARGET_SIMD)
- #define HAVE_avgv8qi3_floor (TARGET_SIMD)
- #define HAVE_uavgv8qi3_floor (TARGET_SIMD)
- #define HAVE_avgv16qi3_floor (TARGET_SIMD)
- #define HAVE_uavgv16qi3_floor (TARGET_SIMD)
- #define HAVE_avgv4hi3_floor (TARGET_SIMD)
- #define HAVE_uavgv4hi3_floor (TARGET_SIMD)
- #define HAVE_avgv8hi3_floor (TARGET_SIMD)
- #define HAVE_uavgv8hi3_floor (TARGET_SIMD)
- #define HAVE_avgv2si3_floor (TARGET_SIMD)
- #define HAVE_uavgv2si3_floor (TARGET_SIMD)
- #define HAVE_avgv4si3_floor (TARGET_SIMD)
- #define HAVE_uavgv4si3_floor (TARGET_SIMD)
- #define HAVE_avgv8qi3_ceil (TARGET_SIMD)
- #define HAVE_uavgv8qi3_ceil (TARGET_SIMD)
- #define HAVE_avgv16qi3_ceil (TARGET_SIMD)
- #define HAVE_uavgv16qi3_ceil (TARGET_SIMD)
- #define HAVE_avgv4hi3_ceil (TARGET_SIMD)
- #define HAVE_uavgv4hi3_ceil (TARGET_SIMD)
- #define HAVE_avgv8hi3_ceil (TARGET_SIMD)
- #define HAVE_uavgv8hi3_ceil (TARGET_SIMD)
- #define HAVE_avgv2si3_ceil (TARGET_SIMD)
- #define HAVE_uavgv2si3_ceil (TARGET_SIMD)
- #define HAVE_avgv4si3_ceil (TARGET_SIMD)
- #define HAVE_uavgv4si3_ceil (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal2v4si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl2v4si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal2_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal2_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal2_laneqv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal2_laneqv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl2_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl2_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl2_laneqv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl2_laneqv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal2_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlal2_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl2_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmlsl2_nv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull2v4si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull2_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull2_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull2_laneqv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull2_laneqv4si (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull2_nv8hi (TARGET_SIMD)
- #define HAVE_aarch64_sqdmull2_nv4si (TARGET_SIMD)
- #define HAVE_sqrtv4hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_sqrtv8hf2 ((TARGET_SIMD) && (TARGET_SIMD_F16INST))
- #define HAVE_sqrtv2sf2 (TARGET_SIMD)
- #define HAVE_sqrtv4sf2 (TARGET_SIMD)
- #define HAVE_sqrtv2df2 (TARGET_SIMD)
- #define HAVE_vec_load_lanesoiv16qi (TARGET_SIMD)
- #define HAVE_vec_load_lanesoiv8hi (TARGET_SIMD)
- #define HAVE_vec_load_lanesoiv4si (TARGET_SIMD)
- #define HAVE_vec_load_lanesoiv2di (TARGET_SIMD)
- #define HAVE_vec_load_lanesoiv8hf (TARGET_SIMD)
- #define HAVE_vec_load_lanesoiv4sf (TARGET_SIMD)
- #define HAVE_vec_load_lanesoiv2df (TARGET_SIMD)
- #define HAVE_vec_load_lanesoiv8bf (TARGET_SIMD)
- #define HAVE_vec_store_lanesoiv16qi (TARGET_SIMD)
- #define HAVE_vec_store_lanesoiv8hi (TARGET_SIMD)
- #define HAVE_vec_store_lanesoiv4si (TARGET_SIMD)
- #define HAVE_vec_store_lanesoiv2di (TARGET_SIMD)
- #define HAVE_vec_store_lanesoiv8hf (TARGET_SIMD)
- #define HAVE_vec_store_lanesoiv4sf (TARGET_SIMD)
- #define HAVE_vec_store_lanesoiv2df (TARGET_SIMD)
- #define HAVE_vec_store_lanesoiv8bf (TARGET_SIMD)
- #define HAVE_vec_load_lanesciv16qi (TARGET_SIMD)
- #define HAVE_vec_load_lanesciv8hi (TARGET_SIMD)
- #define HAVE_vec_load_lanesciv4si (TARGET_SIMD)
- #define HAVE_vec_load_lanesciv2di (TARGET_SIMD)
- #define HAVE_vec_load_lanesciv8hf (TARGET_SIMD)
- #define HAVE_vec_load_lanesciv4sf (TARGET_SIMD)
- #define HAVE_vec_load_lanesciv2df (TARGET_SIMD)
- #define HAVE_vec_load_lanesciv8bf (TARGET_SIMD)
- #define HAVE_vec_store_lanesciv16qi (TARGET_SIMD)
- #define HAVE_vec_store_lanesciv8hi (TARGET_SIMD)
- #define HAVE_vec_store_lanesciv4si (TARGET_SIMD)
- #define HAVE_vec_store_lanesciv2di (TARGET_SIMD)
- #define HAVE_vec_store_lanesciv8hf (TARGET_SIMD)
- #define HAVE_vec_store_lanesciv4sf (TARGET_SIMD)
- #define HAVE_vec_store_lanesciv2df (TARGET_SIMD)
- #define HAVE_vec_store_lanesciv8bf (TARGET_SIMD)
- #define HAVE_vec_load_lanesxiv16qi (TARGET_SIMD)
- #define HAVE_vec_load_lanesxiv8hi (TARGET_SIMD)
- #define HAVE_vec_load_lanesxiv4si (TARGET_SIMD)
- #define HAVE_vec_load_lanesxiv2di (TARGET_SIMD)
- #define HAVE_vec_load_lanesxiv8hf (TARGET_SIMD)
- #define HAVE_vec_load_lanesxiv4sf (TARGET_SIMD)
- #define HAVE_vec_load_lanesxiv2df (TARGET_SIMD)
- #define HAVE_vec_load_lanesxiv8bf (TARGET_SIMD)
- #define HAVE_vec_store_lanesxiv16qi (TARGET_SIMD)
- #define HAVE_vec_store_lanesxiv8hi (TARGET_SIMD)
- #define HAVE_vec_store_lanesxiv4si (TARGET_SIMD)
- #define HAVE_vec_store_lanesxiv2di (TARGET_SIMD)
- #define HAVE_vec_store_lanesxiv8hf (TARGET_SIMD)
- #define HAVE_vec_store_lanesxiv4sf (TARGET_SIMD)
- #define HAVE_vec_store_lanesxiv2df (TARGET_SIMD)
- #define HAVE_vec_store_lanesxiv8bf (TARGET_SIMD)
- #define HAVE_movoi (TARGET_SIMD)
- #define HAVE_movci (TARGET_SIMD)
- #define HAVE_movxi (TARGET_SIMD)
- #define HAVE_aarch64_ld1x3v8qi (TARGET_SIMD)
- #define HAVE_aarch64_ld1x3v16qi (TARGET_SIMD)
- #define HAVE_aarch64_ld1x3v4hi (TARGET_SIMD)
- #define HAVE_aarch64_ld1x3v8hi (TARGET_SIMD)
- #define HAVE_aarch64_ld1x3v2si (TARGET_SIMD)
- #define HAVE_aarch64_ld1x3v4si (TARGET_SIMD)
- #define HAVE_aarch64_ld1x3v4bf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x3v8bf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x3v2di (TARGET_SIMD)
- #define HAVE_aarch64_ld1x3v4hf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x3v8hf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x3v2sf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x3v4sf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x3v2df (TARGET_SIMD)
- #define HAVE_aarch64_ld1x3di (TARGET_SIMD)
- #define HAVE_aarch64_ld1x3df (TARGET_SIMD)
- #define HAVE_aarch64_ld1x4v8qi (TARGET_SIMD)
- #define HAVE_aarch64_ld1x4v16qi (TARGET_SIMD)
- #define HAVE_aarch64_ld1x4v4hi (TARGET_SIMD)
- #define HAVE_aarch64_ld1x4v8hi (TARGET_SIMD)
- #define HAVE_aarch64_ld1x4v2si (TARGET_SIMD)
- #define HAVE_aarch64_ld1x4v4si (TARGET_SIMD)
- #define HAVE_aarch64_ld1x4v4bf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x4v8bf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x4v2di (TARGET_SIMD)
- #define HAVE_aarch64_ld1x4v4hf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x4v8hf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x4v2sf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x4v4sf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x4v2df (TARGET_SIMD)
- #define HAVE_aarch64_ld1x4di (TARGET_SIMD)
- #define HAVE_aarch64_ld1x4df (TARGET_SIMD)
- #define HAVE_aarch64_st1x2v8qi (TARGET_SIMD)
- #define HAVE_aarch64_st1x2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_st1x2v4hi (TARGET_SIMD)
- #define HAVE_aarch64_st1x2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_st1x2v2si (TARGET_SIMD)
- #define HAVE_aarch64_st1x2v4si (TARGET_SIMD)
- #define HAVE_aarch64_st1x2v4bf (TARGET_SIMD)
- #define HAVE_aarch64_st1x2v8bf (TARGET_SIMD)
- #define HAVE_aarch64_st1x2v2di (TARGET_SIMD)
- #define HAVE_aarch64_st1x2v4hf (TARGET_SIMD)
- #define HAVE_aarch64_st1x2v8hf (TARGET_SIMD)
- #define HAVE_aarch64_st1x2v2sf (TARGET_SIMD)
- #define HAVE_aarch64_st1x2v4sf (TARGET_SIMD)
- #define HAVE_aarch64_st1x2v2df (TARGET_SIMD)
- #define HAVE_aarch64_st1x2di (TARGET_SIMD)
- #define HAVE_aarch64_st1x2df (TARGET_SIMD)
- #define HAVE_aarch64_st1x3v8qi (TARGET_SIMD)
- #define HAVE_aarch64_st1x3v16qi (TARGET_SIMD)
- #define HAVE_aarch64_st1x3v4hi (TARGET_SIMD)
- #define HAVE_aarch64_st1x3v8hi (TARGET_SIMD)
- #define HAVE_aarch64_st1x3v2si (TARGET_SIMD)
- #define HAVE_aarch64_st1x3v4si (TARGET_SIMD)
- #define HAVE_aarch64_st1x3v4bf (TARGET_SIMD)
- #define HAVE_aarch64_st1x3v8bf (TARGET_SIMD)
- #define HAVE_aarch64_st1x3v2di (TARGET_SIMD)
- #define HAVE_aarch64_st1x3v4hf (TARGET_SIMD)
- #define HAVE_aarch64_st1x3v8hf (TARGET_SIMD)
- #define HAVE_aarch64_st1x3v2sf (TARGET_SIMD)
- #define HAVE_aarch64_st1x3v4sf (TARGET_SIMD)
- #define HAVE_aarch64_st1x3v2df (TARGET_SIMD)
- #define HAVE_aarch64_st1x3di (TARGET_SIMD)
- #define HAVE_aarch64_st1x3df (TARGET_SIMD)
- #define HAVE_aarch64_st1x4v8qi (TARGET_SIMD)
- #define HAVE_aarch64_st1x4v16qi (TARGET_SIMD)
- #define HAVE_aarch64_st1x4v4hi (TARGET_SIMD)
- #define HAVE_aarch64_st1x4v8hi (TARGET_SIMD)
- #define HAVE_aarch64_st1x4v2si (TARGET_SIMD)
- #define HAVE_aarch64_st1x4v4si (TARGET_SIMD)
- #define HAVE_aarch64_st1x4v4bf (TARGET_SIMD)
- #define HAVE_aarch64_st1x4v8bf (TARGET_SIMD)
- #define HAVE_aarch64_st1x4v2di (TARGET_SIMD)
- #define HAVE_aarch64_st1x4v4hf (TARGET_SIMD)
- #define HAVE_aarch64_st1x4v8hf (TARGET_SIMD)
- #define HAVE_aarch64_st1x4v2sf (TARGET_SIMD)
- #define HAVE_aarch64_st1x4v4sf (TARGET_SIMD)
- #define HAVE_aarch64_st1x4v2df (TARGET_SIMD)
- #define HAVE_aarch64_st1x4di (TARGET_SIMD)
- #define HAVE_aarch64_st1x4df (TARGET_SIMD)
- #define HAVE_aarch64_ld2rv8qi (TARGET_SIMD)
- #define HAVE_aarch64_ld3rv8qi (TARGET_SIMD)
- #define HAVE_aarch64_ld4rv8qi (TARGET_SIMD)
- #define HAVE_aarch64_ld2rv16qi (TARGET_SIMD)
- #define HAVE_aarch64_ld3rv16qi (TARGET_SIMD)
- #define HAVE_aarch64_ld4rv16qi (TARGET_SIMD)
- #define HAVE_aarch64_ld2rv4hi (TARGET_SIMD)
- #define HAVE_aarch64_ld3rv4hi (TARGET_SIMD)
- #define HAVE_aarch64_ld4rv4hi (TARGET_SIMD)
- #define HAVE_aarch64_ld2rv8hi (TARGET_SIMD)
- #define HAVE_aarch64_ld3rv8hi (TARGET_SIMD)
- #define HAVE_aarch64_ld4rv8hi (TARGET_SIMD)
- #define HAVE_aarch64_ld2rv2si (TARGET_SIMD)
- #define HAVE_aarch64_ld3rv2si (TARGET_SIMD)
- #define HAVE_aarch64_ld4rv2si (TARGET_SIMD)
- #define HAVE_aarch64_ld2rv4si (TARGET_SIMD)
- #define HAVE_aarch64_ld3rv4si (TARGET_SIMD)
- #define HAVE_aarch64_ld4rv4si (TARGET_SIMD)
- #define HAVE_aarch64_ld2rv4bf (TARGET_SIMD)
- #define HAVE_aarch64_ld3rv4bf (TARGET_SIMD)
- #define HAVE_aarch64_ld4rv4bf (TARGET_SIMD)
- #define HAVE_aarch64_ld2rv8bf (TARGET_SIMD)
- #define HAVE_aarch64_ld3rv8bf (TARGET_SIMD)
- #define HAVE_aarch64_ld4rv8bf (TARGET_SIMD)
- #define HAVE_aarch64_ld2rv2di (TARGET_SIMD)
- #define HAVE_aarch64_ld3rv2di (TARGET_SIMD)
- #define HAVE_aarch64_ld4rv2di (TARGET_SIMD)
- #define HAVE_aarch64_ld2rv4hf (TARGET_SIMD)
- #define HAVE_aarch64_ld3rv4hf (TARGET_SIMD)
- #define HAVE_aarch64_ld4rv4hf (TARGET_SIMD)
- #define HAVE_aarch64_ld2rv8hf (TARGET_SIMD)
- #define HAVE_aarch64_ld3rv8hf (TARGET_SIMD)
- #define HAVE_aarch64_ld4rv8hf (TARGET_SIMD)
- #define HAVE_aarch64_ld2rv2sf (TARGET_SIMD)
- #define HAVE_aarch64_ld3rv2sf (TARGET_SIMD)
- #define HAVE_aarch64_ld4rv2sf (TARGET_SIMD)
- #define HAVE_aarch64_ld2rv4sf (TARGET_SIMD)
- #define HAVE_aarch64_ld3rv4sf (TARGET_SIMD)
- #define HAVE_aarch64_ld4rv4sf (TARGET_SIMD)
- #define HAVE_aarch64_ld2rv2df (TARGET_SIMD)
- #define HAVE_aarch64_ld3rv2df (TARGET_SIMD)
- #define HAVE_aarch64_ld4rv2df (TARGET_SIMD)
- #define HAVE_aarch64_ld2rdi (TARGET_SIMD)
- #define HAVE_aarch64_ld3rdi (TARGET_SIMD)
- #define HAVE_aarch64_ld4rdi (TARGET_SIMD)
- #define HAVE_aarch64_ld2rdf (TARGET_SIMD)
- #define HAVE_aarch64_ld3rdf (TARGET_SIMD)
- #define HAVE_aarch64_ld4rdf (TARGET_SIMD)
- #define HAVE_aarch64_ld2v8qi (TARGET_SIMD)
- #define HAVE_aarch64_ld2v4hi (TARGET_SIMD)
- #define HAVE_aarch64_ld2v4bf (TARGET_SIMD)
- #define HAVE_aarch64_ld2v4hf (TARGET_SIMD)
- #define HAVE_aarch64_ld2v2si (TARGET_SIMD)
- #define HAVE_aarch64_ld2v2sf (TARGET_SIMD)
- #define HAVE_aarch64_ld2di (TARGET_SIMD)
- #define HAVE_aarch64_ld2df (TARGET_SIMD)
- #define HAVE_aarch64_ld3v8qi (TARGET_SIMD)
- #define HAVE_aarch64_ld3v4hi (TARGET_SIMD)
- #define HAVE_aarch64_ld3v4bf (TARGET_SIMD)
- #define HAVE_aarch64_ld3v4hf (TARGET_SIMD)
- #define HAVE_aarch64_ld3v2si (TARGET_SIMD)
- #define HAVE_aarch64_ld3v2sf (TARGET_SIMD)
- #define HAVE_aarch64_ld3di (TARGET_SIMD)
- #define HAVE_aarch64_ld3df (TARGET_SIMD)
- #define HAVE_aarch64_ld4v8qi (TARGET_SIMD)
- #define HAVE_aarch64_ld4v4hi (TARGET_SIMD)
- #define HAVE_aarch64_ld4v4bf (TARGET_SIMD)
- #define HAVE_aarch64_ld4v4hf (TARGET_SIMD)
- #define HAVE_aarch64_ld4v2si (TARGET_SIMD)
- #define HAVE_aarch64_ld4v2sf (TARGET_SIMD)
- #define HAVE_aarch64_ld4di (TARGET_SIMD)
- #define HAVE_aarch64_ld4df (TARGET_SIMD)
- #define HAVE_aarch64_ld1v8qi (TARGET_SIMD)
- #define HAVE_aarch64_ld1v16qi (TARGET_SIMD)
- #define HAVE_aarch64_ld1v4hi (TARGET_SIMD)
- #define HAVE_aarch64_ld1v8hi (TARGET_SIMD)
- #define HAVE_aarch64_ld1v2si (TARGET_SIMD)
- #define HAVE_aarch64_ld1v4si (TARGET_SIMD)
- #define HAVE_aarch64_ld1v2di (TARGET_SIMD)
- #define HAVE_aarch64_ld1v4hf (TARGET_SIMD)
- #define HAVE_aarch64_ld1v8hf (TARGET_SIMD)
- #define HAVE_aarch64_ld1v4bf (TARGET_SIMD)
- #define HAVE_aarch64_ld1v8bf (TARGET_SIMD)
- #define HAVE_aarch64_ld1v2sf (TARGET_SIMD)
- #define HAVE_aarch64_ld1v4sf (TARGET_SIMD)
- #define HAVE_aarch64_ld1v2df (TARGET_SIMD)
- #define HAVE_aarch64_ld2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_ld3v16qi (TARGET_SIMD)
- #define HAVE_aarch64_ld4v16qi (TARGET_SIMD)
- #define HAVE_aarch64_ld2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_ld3v8hi (TARGET_SIMD)
- #define HAVE_aarch64_ld4v8hi (TARGET_SIMD)
- #define HAVE_aarch64_ld2v4si (TARGET_SIMD)
- #define HAVE_aarch64_ld3v4si (TARGET_SIMD)
- #define HAVE_aarch64_ld4v4si (TARGET_SIMD)
- #define HAVE_aarch64_ld2v2di (TARGET_SIMD)
- #define HAVE_aarch64_ld3v2di (TARGET_SIMD)
- #define HAVE_aarch64_ld4v2di (TARGET_SIMD)
- #define HAVE_aarch64_ld2v8hf (TARGET_SIMD)
- #define HAVE_aarch64_ld3v8hf (TARGET_SIMD)
- #define HAVE_aarch64_ld4v8hf (TARGET_SIMD)
- #define HAVE_aarch64_ld2v4sf (TARGET_SIMD)
- #define HAVE_aarch64_ld3v4sf (TARGET_SIMD)
- #define HAVE_aarch64_ld4v4sf (TARGET_SIMD)
- #define HAVE_aarch64_ld2v2df (TARGET_SIMD)
- #define HAVE_aarch64_ld3v2df (TARGET_SIMD)
- #define HAVE_aarch64_ld4v2df (TARGET_SIMD)
- #define HAVE_aarch64_ld2v8bf (TARGET_SIMD)
- #define HAVE_aarch64_ld3v8bf (TARGET_SIMD)
- #define HAVE_aarch64_ld4v8bf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_ld1x2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_ld1x2v4si (TARGET_SIMD)
- #define HAVE_aarch64_ld1x2v2di (TARGET_SIMD)
- #define HAVE_aarch64_ld1x2v8hf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x2v4sf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x2v2df (TARGET_SIMD)
- #define HAVE_aarch64_ld1x2v8bf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x2v8qi (TARGET_SIMD)
- #define HAVE_aarch64_ld1x2v4hi (TARGET_SIMD)
- #define HAVE_aarch64_ld1x2v4bf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x2v4hf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x2v2si (TARGET_SIMD)
- #define HAVE_aarch64_ld1x2v2sf (TARGET_SIMD)
- #define HAVE_aarch64_ld1x2di (TARGET_SIMD)
- #define HAVE_aarch64_ld1x2df (TARGET_SIMD)
- #define HAVE_aarch64_ld2_lanev8qi (TARGET_SIMD)
- #define HAVE_aarch64_ld3_lanev8qi (TARGET_SIMD)
- #define HAVE_aarch64_ld4_lanev8qi (TARGET_SIMD)
- #define HAVE_aarch64_ld2_lanev16qi (TARGET_SIMD)
- #define HAVE_aarch64_ld3_lanev16qi (TARGET_SIMD)
- #define HAVE_aarch64_ld4_lanev16qi (TARGET_SIMD)
- #define HAVE_aarch64_ld2_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_ld3_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_ld4_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_ld2_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_ld3_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_ld4_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_ld2_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_ld3_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_ld4_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_ld2_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_ld3_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_ld4_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_ld2_lanev4bf (TARGET_SIMD)
- #define HAVE_aarch64_ld3_lanev4bf (TARGET_SIMD)
- #define HAVE_aarch64_ld4_lanev4bf (TARGET_SIMD)
- #define HAVE_aarch64_ld2_lanev8bf (TARGET_SIMD)
- #define HAVE_aarch64_ld3_lanev8bf (TARGET_SIMD)
- #define HAVE_aarch64_ld4_lanev8bf (TARGET_SIMD)
- #define HAVE_aarch64_ld2_lanev2di (TARGET_SIMD)
- #define HAVE_aarch64_ld3_lanev2di (TARGET_SIMD)
- #define HAVE_aarch64_ld4_lanev2di (TARGET_SIMD)
- #define HAVE_aarch64_ld2_lanev4hf (TARGET_SIMD)
- #define HAVE_aarch64_ld3_lanev4hf (TARGET_SIMD)
- #define HAVE_aarch64_ld4_lanev4hf (TARGET_SIMD)
- #define HAVE_aarch64_ld2_lanev8hf (TARGET_SIMD)
- #define HAVE_aarch64_ld3_lanev8hf (TARGET_SIMD)
- #define HAVE_aarch64_ld4_lanev8hf (TARGET_SIMD)
- #define HAVE_aarch64_ld2_lanev2sf (TARGET_SIMD)
- #define HAVE_aarch64_ld3_lanev2sf (TARGET_SIMD)
- #define HAVE_aarch64_ld4_lanev2sf (TARGET_SIMD)
- #define HAVE_aarch64_ld2_lanev4sf (TARGET_SIMD)
- #define HAVE_aarch64_ld3_lanev4sf (TARGET_SIMD)
- #define HAVE_aarch64_ld4_lanev4sf (TARGET_SIMD)
- #define HAVE_aarch64_ld2_lanev2df (TARGET_SIMD)
- #define HAVE_aarch64_ld3_lanev2df (TARGET_SIMD)
- #define HAVE_aarch64_ld4_lanev2df (TARGET_SIMD)
- #define HAVE_aarch64_ld2_lanedi (TARGET_SIMD)
- #define HAVE_aarch64_ld3_lanedi (TARGET_SIMD)
- #define HAVE_aarch64_ld4_lanedi (TARGET_SIMD)
- #define HAVE_aarch64_ld2_lanedf (TARGET_SIMD)
- #define HAVE_aarch64_ld3_lanedf (TARGET_SIMD)
- #define HAVE_aarch64_ld4_lanedf (TARGET_SIMD)
- #define HAVE_aarch64_get_dregoiv8qi (TARGET_SIMD)
- #define HAVE_aarch64_get_dregoiv4hi (TARGET_SIMD)
- #define HAVE_aarch64_get_dregoiv4bf (TARGET_SIMD)
- #define HAVE_aarch64_get_dregoiv4hf (TARGET_SIMD)
- #define HAVE_aarch64_get_dregoiv2si (TARGET_SIMD)
- #define HAVE_aarch64_get_dregoiv2sf (TARGET_SIMD)
- #define HAVE_aarch64_get_dregoidi (TARGET_SIMD)
- #define HAVE_aarch64_get_dregoidf (TARGET_SIMD)
- #define HAVE_aarch64_get_dregciv8qi (TARGET_SIMD)
- #define HAVE_aarch64_get_dregciv4hi (TARGET_SIMD)
- #define HAVE_aarch64_get_dregciv4bf (TARGET_SIMD)
- #define HAVE_aarch64_get_dregciv4hf (TARGET_SIMD)
- #define HAVE_aarch64_get_dregciv2si (TARGET_SIMD)
- #define HAVE_aarch64_get_dregciv2sf (TARGET_SIMD)
- #define HAVE_aarch64_get_dregcidi (TARGET_SIMD)
- #define HAVE_aarch64_get_dregcidf (TARGET_SIMD)
- #define HAVE_aarch64_get_dregxiv8qi (TARGET_SIMD)
- #define HAVE_aarch64_get_dregxiv4hi (TARGET_SIMD)
- #define HAVE_aarch64_get_dregxiv4bf (TARGET_SIMD)
- #define HAVE_aarch64_get_dregxiv4hf (TARGET_SIMD)
- #define HAVE_aarch64_get_dregxiv2si (TARGET_SIMD)
- #define HAVE_aarch64_get_dregxiv2sf (TARGET_SIMD)
- #define HAVE_aarch64_get_dregxidi (TARGET_SIMD)
- #define HAVE_aarch64_get_dregxidf (TARGET_SIMD)
- #define HAVE_aarch64_get_qregoiv16qi (TARGET_SIMD)
- #define HAVE_aarch64_get_qregciv16qi (TARGET_SIMD)
- #define HAVE_aarch64_get_qregxiv16qi (TARGET_SIMD)
- #define HAVE_aarch64_get_qregoiv8hi (TARGET_SIMD)
- #define HAVE_aarch64_get_qregciv8hi (TARGET_SIMD)
- #define HAVE_aarch64_get_qregxiv8hi (TARGET_SIMD)
- #define HAVE_aarch64_get_qregoiv4si (TARGET_SIMD)
- #define HAVE_aarch64_get_qregciv4si (TARGET_SIMD)
- #define HAVE_aarch64_get_qregxiv4si (TARGET_SIMD)
- #define HAVE_aarch64_get_qregoiv2di (TARGET_SIMD)
- #define HAVE_aarch64_get_qregciv2di (TARGET_SIMD)
- #define HAVE_aarch64_get_qregxiv2di (TARGET_SIMD)
- #define HAVE_aarch64_get_qregoiv8hf (TARGET_SIMD)
- #define HAVE_aarch64_get_qregciv8hf (TARGET_SIMD)
- #define HAVE_aarch64_get_qregxiv8hf (TARGET_SIMD)
- #define HAVE_aarch64_get_qregoiv4sf (TARGET_SIMD)
- #define HAVE_aarch64_get_qregciv4sf (TARGET_SIMD)
- #define HAVE_aarch64_get_qregxiv4sf (TARGET_SIMD)
- #define HAVE_aarch64_get_qregoiv2df (TARGET_SIMD)
- #define HAVE_aarch64_get_qregciv2df (TARGET_SIMD)
- #define HAVE_aarch64_get_qregxiv2df (TARGET_SIMD)
- #define HAVE_aarch64_get_qregoiv8bf (TARGET_SIMD)
- #define HAVE_aarch64_get_qregciv8bf (TARGET_SIMD)
- #define HAVE_aarch64_get_qregxiv8bf (TARGET_SIMD)
- #define HAVE_vec_permv8qi (TARGET_SIMD)
- #define HAVE_vec_permv16qi (TARGET_SIMD)
- #define HAVE_aarch64_st2v8qi (TARGET_SIMD)
- #define HAVE_aarch64_st2v4hi (TARGET_SIMD)
- #define HAVE_aarch64_st2v4bf (TARGET_SIMD)
- #define HAVE_aarch64_st2v4hf (TARGET_SIMD)
- #define HAVE_aarch64_st2v2si (TARGET_SIMD)
- #define HAVE_aarch64_st2v2sf (TARGET_SIMD)
- #define HAVE_aarch64_st2di (TARGET_SIMD)
- #define HAVE_aarch64_st2df (TARGET_SIMD)
- #define HAVE_aarch64_st3v8qi (TARGET_SIMD)
- #define HAVE_aarch64_st3v4hi (TARGET_SIMD)
- #define HAVE_aarch64_st3v4bf (TARGET_SIMD)
- #define HAVE_aarch64_st3v4hf (TARGET_SIMD)
- #define HAVE_aarch64_st3v2si (TARGET_SIMD)
- #define HAVE_aarch64_st3v2sf (TARGET_SIMD)
- #define HAVE_aarch64_st3di (TARGET_SIMD)
- #define HAVE_aarch64_st3df (TARGET_SIMD)
- #define HAVE_aarch64_st4v8qi (TARGET_SIMD)
- #define HAVE_aarch64_st4v4hi (TARGET_SIMD)
- #define HAVE_aarch64_st4v4bf (TARGET_SIMD)
- #define HAVE_aarch64_st4v4hf (TARGET_SIMD)
- #define HAVE_aarch64_st4v2si (TARGET_SIMD)
- #define HAVE_aarch64_st4v2sf (TARGET_SIMD)
- #define HAVE_aarch64_st4di (TARGET_SIMD)
- #define HAVE_aarch64_st4df (TARGET_SIMD)
- #define HAVE_aarch64_st2v16qi (TARGET_SIMD)
- #define HAVE_aarch64_st3v16qi (TARGET_SIMD)
- #define HAVE_aarch64_st4v16qi (TARGET_SIMD)
- #define HAVE_aarch64_st2v8hi (TARGET_SIMD)
- #define HAVE_aarch64_st3v8hi (TARGET_SIMD)
- #define HAVE_aarch64_st4v8hi (TARGET_SIMD)
- #define HAVE_aarch64_st2v4si (TARGET_SIMD)
- #define HAVE_aarch64_st3v4si (TARGET_SIMD)
- #define HAVE_aarch64_st4v4si (TARGET_SIMD)
- #define HAVE_aarch64_st2v2di (TARGET_SIMD)
- #define HAVE_aarch64_st3v2di (TARGET_SIMD)
- #define HAVE_aarch64_st4v2di (TARGET_SIMD)
- #define HAVE_aarch64_st2v8hf (TARGET_SIMD)
- #define HAVE_aarch64_st3v8hf (TARGET_SIMD)
- #define HAVE_aarch64_st4v8hf (TARGET_SIMD)
- #define HAVE_aarch64_st2v4sf (TARGET_SIMD)
- #define HAVE_aarch64_st3v4sf (TARGET_SIMD)
- #define HAVE_aarch64_st4v4sf (TARGET_SIMD)
- #define HAVE_aarch64_st2v2df (TARGET_SIMD)
- #define HAVE_aarch64_st3v2df (TARGET_SIMD)
- #define HAVE_aarch64_st4v2df (TARGET_SIMD)
- #define HAVE_aarch64_st2v8bf (TARGET_SIMD)
- #define HAVE_aarch64_st3v8bf (TARGET_SIMD)
- #define HAVE_aarch64_st4v8bf (TARGET_SIMD)
- #define HAVE_aarch64_st2_lanev8qi (TARGET_SIMD)
- #define HAVE_aarch64_st3_lanev8qi (TARGET_SIMD)
- #define HAVE_aarch64_st4_lanev8qi (TARGET_SIMD)
- #define HAVE_aarch64_st2_lanev16qi (TARGET_SIMD)
- #define HAVE_aarch64_st3_lanev16qi (TARGET_SIMD)
- #define HAVE_aarch64_st4_lanev16qi (TARGET_SIMD)
- #define HAVE_aarch64_st2_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_st3_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_st4_lanev4hi (TARGET_SIMD)
- #define HAVE_aarch64_st2_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_st3_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_st4_lanev8hi (TARGET_SIMD)
- #define HAVE_aarch64_st2_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_st3_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_st4_lanev2si (TARGET_SIMD)
- #define HAVE_aarch64_st2_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_st3_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_st4_lanev4si (TARGET_SIMD)
- #define HAVE_aarch64_st2_lanev4bf (TARGET_SIMD)
- #define HAVE_aarch64_st3_lanev4bf (TARGET_SIMD)
- #define HAVE_aarch64_st4_lanev4bf (TARGET_SIMD)
- #define HAVE_aarch64_st2_lanev8bf (TARGET_SIMD)
- #define HAVE_aarch64_st3_lanev8bf (TARGET_SIMD)
- #define HAVE_aarch64_st4_lanev8bf (TARGET_SIMD)
- #define HAVE_aarch64_st2_lanev2di (TARGET_SIMD)
- #define HAVE_aarch64_st3_lanev2di (TARGET_SIMD)
- #define HAVE_aarch64_st4_lanev2di (TARGET_SIMD)
- #define HAVE_aarch64_st2_lanev4hf (TARGET_SIMD)
- #define HAVE_aarch64_st3_lanev4hf (TARGET_SIMD)
- #define HAVE_aarch64_st4_lanev4hf (TARGET_SIMD)
- #define HAVE_aarch64_st2_lanev8hf (TARGET_SIMD)
- #define HAVE_aarch64_st3_lanev8hf (TARGET_SIMD)
- #define HAVE_aarch64_st4_lanev8hf (TARGET_SIMD)
- #define HAVE_aarch64_st2_lanev2sf (TARGET_SIMD)
- #define HAVE_aarch64_st3_lanev2sf (TARGET_SIMD)
- #define HAVE_aarch64_st4_lanev2sf (TARGET_SIMD)
- #define HAVE_aarch64_st2_lanev4sf (TARGET_SIMD)
- #define HAVE_aarch64_st3_lanev4sf (TARGET_SIMD)
- #define HAVE_aarch64_st4_lanev4sf (TARGET_SIMD)
- #define HAVE_aarch64_st2_lanev2df (TARGET_SIMD)
- #define HAVE_aarch64_st3_lanev2df (TARGET_SIMD)
- #define HAVE_aarch64_st4_lanev2df (TARGET_SIMD)
- #define HAVE_aarch64_st2_lanedi (TARGET_SIMD)
- #define HAVE_aarch64_st3_lanedi (TARGET_SIMD)
- #define HAVE_aarch64_st4_lanedi (TARGET_SIMD)
- #define HAVE_aarch64_st2_lanedf (TARGET_SIMD)
- #define HAVE_aarch64_st3_lanedf (TARGET_SIMD)
- #define HAVE_aarch64_st4_lanedf (TARGET_SIMD)
- #define HAVE_aarch64_st1v8qi (TARGET_SIMD)
- #define HAVE_aarch64_st1v16qi (TARGET_SIMD)
- #define HAVE_aarch64_st1v4hi (TARGET_SIMD)
- #define HAVE_aarch64_st1v8hi (TARGET_SIMD)
- #define HAVE_aarch64_st1v2si (TARGET_SIMD)
- #define HAVE_aarch64_st1v4si (TARGET_SIMD)
- #define HAVE_aarch64_st1v2di (TARGET_SIMD)
- #define HAVE_aarch64_st1v4hf (TARGET_SIMD)
- #define HAVE_aarch64_st1v8hf (TARGET_SIMD)
- #define HAVE_aarch64_st1v4bf (TARGET_SIMD)
- #define HAVE_aarch64_st1v8bf (TARGET_SIMD)
- #define HAVE_aarch64_st1v2sf (TARGET_SIMD)
- #define HAVE_aarch64_st1v4sf (TARGET_SIMD)
- #define HAVE_aarch64_st1v2df (TARGET_SIMD)
- #define HAVE_aarch64_set_qregoiv16qi (TARGET_SIMD)
- #define HAVE_aarch64_set_qregciv16qi (TARGET_SIMD)
- #define HAVE_aarch64_set_qregxiv16qi (TARGET_SIMD)
- #define HAVE_aarch64_set_qregoiv8hi (TARGET_SIMD)
- #define HAVE_aarch64_set_qregciv8hi (TARGET_SIMD)
- #define HAVE_aarch64_set_qregxiv8hi (TARGET_SIMD)
- #define HAVE_aarch64_set_qregoiv4si (TARGET_SIMD)
- #define HAVE_aarch64_set_qregciv4si (TARGET_SIMD)
- #define HAVE_aarch64_set_qregxiv4si (TARGET_SIMD)
- #define HAVE_aarch64_set_qregoiv2di (TARGET_SIMD)
- #define HAVE_aarch64_set_qregciv2di (TARGET_SIMD)
- #define HAVE_aarch64_set_qregxiv2di (TARGET_SIMD)
- #define HAVE_aarch64_set_qregoiv8hf (TARGET_SIMD)
- #define HAVE_aarch64_set_qregciv8hf (TARGET_SIMD)
- #define HAVE_aarch64_set_qregxiv8hf (TARGET_SIMD)
- #define HAVE_aarch64_set_qregoiv4sf (TARGET_SIMD)
- #define HAVE_aarch64_set_qregciv4sf (TARGET_SIMD)
- #define HAVE_aarch64_set_qregxiv4sf (TARGET_SIMD)
- #define HAVE_aarch64_set_qregoiv2df (TARGET_SIMD)
- #define HAVE_aarch64_set_qregciv2df (TARGET_SIMD)
- #define HAVE_aarch64_set_qregxiv2df (TARGET_SIMD)
- #define HAVE_aarch64_set_qregoiv8bf (TARGET_SIMD)
- #define HAVE_aarch64_set_qregciv8bf (TARGET_SIMD)
- #define HAVE_aarch64_set_qregxiv8bf (TARGET_SIMD)
- #define HAVE_vec_initv8qiqi (TARGET_SIMD)
- #define HAVE_vec_initv16qiqi (TARGET_SIMD)
- #define HAVE_vec_initv4hihi (TARGET_SIMD)
- #define HAVE_vec_initv8hihi (TARGET_SIMD)
- #define HAVE_vec_initv2sisi (TARGET_SIMD)
- #define HAVE_vec_initv4sisi (TARGET_SIMD)
- #define HAVE_vec_initv2didi (TARGET_SIMD)
- #define HAVE_vec_initv4hfhf (TARGET_SIMD)
- #define HAVE_vec_initv8hfhf (TARGET_SIMD)
- #define HAVE_vec_initv4bfbf (TARGET_SIMD)
- #define HAVE_vec_initv8bfbf (TARGET_SIMD)
- #define HAVE_vec_initv2sfsf (TARGET_SIMD)
- #define HAVE_vec_initv4sfsf (TARGET_SIMD)
- #define HAVE_vec_initv2dfdf (TARGET_SIMD)
- #define HAVE_vec_initv16qiv8qi (TARGET_SIMD)
- #define HAVE_vec_initv8hiv4hi (TARGET_SIMD)
- #define HAVE_vec_initv4siv2si (TARGET_SIMD)
- #define HAVE_vec_initv8hfv4hf (TARGET_SIMD)
- #define HAVE_vec_initv4sfv2sf (TARGET_SIMD)
- #define HAVE_vec_initv8bfv4bf (TARGET_SIMD)
- #define HAVE_vec_extractv8qiqi (TARGET_SIMD)
- #define HAVE_vec_extractv16qiqi (TARGET_SIMD)
- #define HAVE_vec_extractv4hihi (TARGET_SIMD)
- #define HAVE_vec_extractv8hihi (TARGET_SIMD)
- #define HAVE_vec_extractv2sisi (TARGET_SIMD)
- #define HAVE_vec_extractv4sisi (TARGET_SIMD)
- #define HAVE_vec_extractv2didi (TARGET_SIMD)
- #define HAVE_vec_extractv4hfhf (TARGET_SIMD)
- #define HAVE_vec_extractv8hfhf (TARGET_SIMD)
- #define HAVE_vec_extractv4bfbf (TARGET_SIMD)
- #define HAVE_vec_extractv8bfbf (TARGET_SIMD)
- #define HAVE_vec_extractv2sfsf (TARGET_SIMD)
- #define HAVE_vec_extractv4sfsf (TARGET_SIMD)
- #define HAVE_vec_extractv2dfdf (TARGET_SIMD)
- #define HAVE_vec_extractv16qiv8qi (TARGET_SIMD)
- #define HAVE_vec_extractv8hiv4hi (TARGET_SIMD)
- #define HAVE_vec_extractv4siv2si (TARGET_SIMD)
- #define HAVE_vec_extractv8hfv4hf (TARGET_SIMD)
- #define HAVE_vec_extractv8bfv4bf (TARGET_SIMD)
- #define HAVE_vec_extractv4sfv2sf (TARGET_SIMD)
- #define HAVE_vec_extractv2dfv1df (TARGET_SIMD)
- #define HAVE_aarch64_fmlal_lowv2sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlsl_lowv2sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlalq_lowv4sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlslq_lowv4sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlal_highv2sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlsl_highv2sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlalq_highv4sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlslq_highv4sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlal_lane_lowv2sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlsl_lane_lowv2sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlal_lane_highv2sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlsl_lane_highv2sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlalq_laneq_lowv4sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlslq_laneq_lowv4sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlalq_laneq_highv4sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlslq_laneq_highv4sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlal_laneq_lowv2sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlsl_laneq_lowv2sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlal_laneq_highv2sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlsl_laneq_highv2sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlalq_lane_lowv4sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlslq_lane_lowv4sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlalq_lane_highv4sf (TARGET_F16FML)
- #define HAVE_aarch64_fmlslq_lane_highv4sf (TARGET_F16FML)
- #define HAVE_aarch64_vget_lo_halfv8bf (TARGET_BF16_SIMD)
- #define HAVE_aarch64_vget_hi_halfv8bf (TARGET_BF16_SIMD)
- #define HAVE_atomic_compare_and_swapqi 1
- #define HAVE_atomic_compare_and_swaphi 1
- #define HAVE_atomic_compare_and_swapsi 1
- #define HAVE_atomic_compare_and_swapdi 1
- #define HAVE_atomic_compare_and_swapti 1
- #define HAVE_atomic_exchangeqi 1
- #define HAVE_atomic_exchangehi 1
- #define HAVE_atomic_exchangesi 1
- #define HAVE_atomic_exchangedi 1
- #define HAVE_atomic_addqi 1
- #define HAVE_atomic_subqi 1
- #define HAVE_atomic_orqi 1
- #define HAVE_atomic_xorqi 1
- #define HAVE_atomic_andqi 1
- #define HAVE_atomic_addhi 1
- #define HAVE_atomic_subhi 1
- #define HAVE_atomic_orhi 1
- #define HAVE_atomic_xorhi 1
- #define HAVE_atomic_andhi 1
- #define HAVE_atomic_addsi 1
- #define HAVE_atomic_subsi 1
- #define HAVE_atomic_orsi 1
- #define HAVE_atomic_xorsi 1
- #define HAVE_atomic_andsi 1
- #define HAVE_atomic_adddi 1
- #define HAVE_atomic_subdi 1
- #define HAVE_atomic_ordi 1
- #define HAVE_atomic_xordi 1
- #define HAVE_atomic_anddi 1
- #define HAVE_atomic_fetch_addqi 1
- #define HAVE_atomic_fetch_subqi 1
- #define HAVE_atomic_fetch_orqi 1
- #define HAVE_atomic_fetch_xorqi 1
- #define HAVE_atomic_fetch_andqi 1
- #define HAVE_atomic_fetch_addhi 1
- #define HAVE_atomic_fetch_subhi 1
- #define HAVE_atomic_fetch_orhi 1
- #define HAVE_atomic_fetch_xorhi 1
- #define HAVE_atomic_fetch_andhi 1
- #define HAVE_atomic_fetch_addsi 1
- #define HAVE_atomic_fetch_subsi 1
- #define HAVE_atomic_fetch_orsi 1
- #define HAVE_atomic_fetch_xorsi 1
- #define HAVE_atomic_fetch_andsi 1
- #define HAVE_atomic_fetch_adddi 1
- #define HAVE_atomic_fetch_subdi 1
- #define HAVE_atomic_fetch_ordi 1
- #define HAVE_atomic_fetch_xordi 1
- #define HAVE_atomic_fetch_anddi 1
- #define HAVE_atomic_add_fetchqi 1
- #define HAVE_atomic_sub_fetchqi 1
- #define HAVE_atomic_or_fetchqi 1
- #define HAVE_atomic_xor_fetchqi 1
- #define HAVE_atomic_and_fetchqi 1
- #define HAVE_atomic_add_fetchhi 1
- #define HAVE_atomic_sub_fetchhi 1
- #define HAVE_atomic_or_fetchhi 1
- #define HAVE_atomic_xor_fetchhi 1
- #define HAVE_atomic_and_fetchhi 1
- #define HAVE_atomic_add_fetchsi 1
- #define HAVE_atomic_sub_fetchsi 1
- #define HAVE_atomic_or_fetchsi 1
- #define HAVE_atomic_xor_fetchsi 1
- #define HAVE_atomic_and_fetchsi 1
- #define HAVE_atomic_add_fetchdi 1
- #define HAVE_atomic_sub_fetchdi 1
- #define HAVE_atomic_or_fetchdi 1
- #define HAVE_atomic_xor_fetchdi 1
- #define HAVE_atomic_and_fetchdi 1
- #define HAVE_mem_thread_fence 1
- #define HAVE_dmb 1
- #define HAVE_movvnx16qi (TARGET_SVE)
- #define HAVE_movvnx8qi (TARGET_SVE)
- #define HAVE_movvnx4qi (TARGET_SVE)
- #define HAVE_movvnx2qi (TARGET_SVE)
- #define HAVE_movvnx8hi (TARGET_SVE)
- #define HAVE_movvnx4hi (TARGET_SVE)
- #define HAVE_movvnx2hi (TARGET_SVE)
- #define HAVE_movvnx8hf (TARGET_SVE)
- #define HAVE_movvnx4hf (TARGET_SVE)
- #define HAVE_movvnx2hf (TARGET_SVE)
- #define HAVE_movvnx8bf (TARGET_SVE)
- #define HAVE_movvnx4si (TARGET_SVE)
- #define HAVE_movvnx2si (TARGET_SVE)
- #define HAVE_movvnx4sf (TARGET_SVE)
- #define HAVE_movvnx2sf (TARGET_SVE)
- #define HAVE_movvnx2di (TARGET_SVE)
- #define HAVE_movvnx2df (TARGET_SVE)
- #define HAVE_movmisalignvnx16qi (TARGET_SVE)
- #define HAVE_movmisalignvnx8qi (TARGET_SVE)
- #define HAVE_movmisalignvnx4qi (TARGET_SVE)
- #define HAVE_movmisalignvnx2qi (TARGET_SVE)
- #define HAVE_movmisalignvnx8hi (TARGET_SVE)
- #define HAVE_movmisalignvnx4hi (TARGET_SVE)
- #define HAVE_movmisalignvnx2hi (TARGET_SVE)
- #define HAVE_movmisalignvnx8hf (TARGET_SVE)
- #define HAVE_movmisalignvnx4hf (TARGET_SVE)
- #define HAVE_movmisalignvnx2hf (TARGET_SVE)
- #define HAVE_movmisalignvnx8bf (TARGET_SVE)
- #define HAVE_movmisalignvnx4si (TARGET_SVE)
- #define HAVE_movmisalignvnx2si (TARGET_SVE)
- #define HAVE_movmisalignvnx4sf (TARGET_SVE)
- #define HAVE_movmisalignvnx2sf (TARGET_SVE)
- #define HAVE_movmisalignvnx2di (TARGET_SVE)
- #define HAVE_movmisalignvnx2df (TARGET_SVE)
- #define HAVE_aarch64_sve_reload_mem (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx8qi (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx4qi (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx2qi (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx4hi (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx2hi (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx4hf (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx2hf (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx8bf (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx4si (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx2si (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx2sf (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx2di (TARGET_SVE)
- #define HAVE_aarch64_sve_reinterpretvnx2df (TARGET_SVE)
- #define HAVE_movvnx32qi (TARGET_SVE)
- #define HAVE_movvnx16hi (TARGET_SVE)
- #define HAVE_movvnx8si (TARGET_SVE)
- #define HAVE_movvnx4di (TARGET_SVE)
- #define HAVE_movvnx16bf (TARGET_SVE)
- #define HAVE_movvnx16hf (TARGET_SVE)
- #define HAVE_movvnx8sf (TARGET_SVE)
- #define HAVE_movvnx4df (TARGET_SVE)
- #define HAVE_movvnx48qi (TARGET_SVE)
- #define HAVE_movvnx24hi (TARGET_SVE)
- #define HAVE_movvnx12si (TARGET_SVE)
- #define HAVE_movvnx6di (TARGET_SVE)
- #define HAVE_movvnx24bf (TARGET_SVE)
- #define HAVE_movvnx24hf (TARGET_SVE)
- #define HAVE_movvnx12sf (TARGET_SVE)
- #define HAVE_movvnx6df (TARGET_SVE)
- #define HAVE_movvnx64qi (TARGET_SVE)
- #define HAVE_movvnx32hi (TARGET_SVE)
- #define HAVE_movvnx16si (TARGET_SVE)
- #define HAVE_movvnx8di (TARGET_SVE)
- #define HAVE_movvnx32bf (TARGET_SVE)
- #define HAVE_movvnx32hf (TARGET_SVE)
- #define HAVE_movvnx16sf (TARGET_SVE)
- #define HAVE_movvnx8df (TARGET_SVE)
- #define HAVE_movvnx16bi (TARGET_SVE)
- #define HAVE_movvnx8bi (TARGET_SVE)
- #define HAVE_movvnx4bi (TARGET_SVE)
- #define HAVE_movvnx2bi (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx32qivnx16qi (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx16hivnx8hi (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx8sivnx4si (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx4divnx2di (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx16bfvnx8bf (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx16hfvnx8hf (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx8sfvnx4sf (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx4dfvnx2df (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx48qivnx16qi (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx24hivnx8hi (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx12sivnx4si (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx6divnx2di (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx24bfvnx8bf (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx24hfvnx8hf (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx12sfvnx4sf (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx6dfvnx2df (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx64qivnx16qi (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx32hivnx8hi (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx16sivnx4si (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx8divnx2di (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx32bfvnx8bf (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx32hfvnx8hf (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx16sfvnx4sf (TARGET_SVE)
- #define HAVE_vec_load_lanesvnx8dfvnx2df (TARGET_SVE)
- #define HAVE_gather_loadvnx2qivnx2di (TARGET_SVE)
- #define HAVE_gather_loadvnx2hivnx2di (TARGET_SVE)
- #define HAVE_gather_loadvnx2hfvnx2di (TARGET_SVE)
- #define HAVE_gather_loadvnx2sivnx2di (TARGET_SVE)
- #define HAVE_gather_loadvnx2sfvnx2di (TARGET_SVE)
- #define HAVE_gather_loadvnx2divnx2di (TARGET_SVE)
- #define HAVE_gather_loadvnx2dfvnx2di (TARGET_SVE)
- #define HAVE_gather_loadvnx4qivnx4si (TARGET_SVE)
- #define HAVE_gather_loadvnx4hivnx4si (TARGET_SVE)
- #define HAVE_gather_loadvnx4hfvnx4si (TARGET_SVE)
- #define HAVE_gather_loadvnx4sivnx4si (TARGET_SVE)
- #define HAVE_gather_loadvnx4sfvnx4si (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx32qivnx16qi (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx16hivnx8hi (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx8sivnx4si (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx4divnx2di (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx16bfvnx8bf (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx16hfvnx8hf (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx8sfvnx4sf (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx4dfvnx2df (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx48qivnx16qi (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx24hivnx8hi (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx12sivnx4si (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx6divnx2di (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx24bfvnx8bf (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx24hfvnx8hf (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx12sfvnx4sf (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx6dfvnx2df (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx64qivnx16qi (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx32hivnx8hi (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx16sivnx4si (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx8divnx2di (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx32bfvnx8bf (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx32hfvnx8hf (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx16sfvnx4sf (TARGET_SVE)
- #define HAVE_vec_store_lanesvnx8dfvnx2df (TARGET_SVE)
- #define HAVE_scatter_storevnx2qivnx2di (TARGET_SVE)
- #define HAVE_scatter_storevnx2hivnx2di (TARGET_SVE)
- #define HAVE_scatter_storevnx2hfvnx2di (TARGET_SVE)
- #define HAVE_scatter_storevnx2sivnx2di (TARGET_SVE)
- #define HAVE_scatter_storevnx2sfvnx2di (TARGET_SVE)
- #define HAVE_scatter_storevnx2divnx2di (TARGET_SVE)
- #define HAVE_scatter_storevnx2dfvnx2di (TARGET_SVE)
- #define HAVE_scatter_storevnx4qivnx4si (TARGET_SVE)
- #define HAVE_scatter_storevnx4hivnx4si (TARGET_SVE)
- #define HAVE_scatter_storevnx4hfvnx4si (TARGET_SVE)
- #define HAVE_scatter_storevnx4sivnx4si (TARGET_SVE)
- #define HAVE_scatter_storevnx4sfvnx4si (TARGET_SVE)
- #define HAVE_vec_duplicatevnx16qi (TARGET_SVE)
- #define HAVE_vec_duplicatevnx8qi (TARGET_SVE)
- #define HAVE_vec_duplicatevnx4qi (TARGET_SVE)
- #define HAVE_vec_duplicatevnx2qi (TARGET_SVE)
- #define HAVE_vec_duplicatevnx8hi (TARGET_SVE)
- #define HAVE_vec_duplicatevnx4hi (TARGET_SVE)
- #define HAVE_vec_duplicatevnx2hi (TARGET_SVE)
- #define HAVE_vec_duplicatevnx8hf (TARGET_SVE)
- #define HAVE_vec_duplicatevnx4hf (TARGET_SVE)
- #define HAVE_vec_duplicatevnx2hf (TARGET_SVE)
- #define HAVE_vec_duplicatevnx8bf (TARGET_SVE)
- #define HAVE_vec_duplicatevnx4si (TARGET_SVE)
- #define HAVE_vec_duplicatevnx2si (TARGET_SVE)
- #define HAVE_vec_duplicatevnx4sf (TARGET_SVE)
- #define HAVE_vec_duplicatevnx2sf (TARGET_SVE)
- #define HAVE_vec_duplicatevnx2di (TARGET_SVE)
- #define HAVE_vec_duplicatevnx2df (TARGET_SVE)
- #define HAVE_vec_initvnx16qiqi (TARGET_SVE)
- #define HAVE_vec_initvnx8hihi (TARGET_SVE)
- #define HAVE_vec_initvnx4sisi (TARGET_SVE)
- #define HAVE_vec_initvnx2didi (TARGET_SVE)
- #define HAVE_vec_initvnx8bfbf (TARGET_SVE)
- #define HAVE_vec_initvnx8hfhf (TARGET_SVE)
- #define HAVE_vec_initvnx4sfsf (TARGET_SVE)
- #define HAVE_vec_initvnx2dfdf (TARGET_SVE)
- #define HAVE_vec_duplicatevnx16bi (TARGET_SVE)
- #define HAVE_vec_duplicatevnx8bi (TARGET_SVE)
- #define HAVE_vec_duplicatevnx4bi (TARGET_SVE)
- #define HAVE_vec_duplicatevnx2bi (TARGET_SVE)
- #define HAVE_vec_extractvnx16qiqi (TARGET_SVE)
- #define HAVE_vec_extractvnx8hihi (TARGET_SVE)
- #define HAVE_vec_extractvnx4sisi (TARGET_SVE)
- #define HAVE_vec_extractvnx2didi (TARGET_SVE)
- #define HAVE_vec_extractvnx8bfbf (TARGET_SVE)
- #define HAVE_vec_extractvnx8hfhf (TARGET_SVE)
- #define HAVE_vec_extractvnx4sfsf (TARGET_SVE)
- #define HAVE_vec_extractvnx2dfdf (TARGET_SVE)
- #define HAVE_vec_extractvnx16biqi (TARGET_SVE)
- #define HAVE_vec_extractvnx8bihi (TARGET_SVE)
- #define HAVE_vec_extractvnx4bisi (TARGET_SVE)
- #define HAVE_vec_extractvnx2bidi (TARGET_SVE)
- #define HAVE_absvnx16qi2 (TARGET_SVE)
- #define HAVE_negvnx16qi2 (TARGET_SVE)
- #define HAVE_one_cmplvnx16qi2 (TARGET_SVE)
- #define HAVE_clrsbvnx16qi2 (TARGET_SVE)
- #define HAVE_clzvnx16qi2 (TARGET_SVE)
- #define HAVE_popcountvnx16qi2 (TARGET_SVE)
- #define HAVE_qabsvnx16qi2 ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_qnegvnx16qi2 ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_absvnx8hi2 (TARGET_SVE)
- #define HAVE_negvnx8hi2 (TARGET_SVE)
- #define HAVE_one_cmplvnx8hi2 (TARGET_SVE)
- #define HAVE_clrsbvnx8hi2 (TARGET_SVE)
- #define HAVE_clzvnx8hi2 (TARGET_SVE)
- #define HAVE_popcountvnx8hi2 (TARGET_SVE)
- #define HAVE_qabsvnx8hi2 ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_qnegvnx8hi2 ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_absvnx4si2 (TARGET_SVE)
- #define HAVE_negvnx4si2 (TARGET_SVE)
- #define HAVE_one_cmplvnx4si2 (TARGET_SVE)
- #define HAVE_clrsbvnx4si2 (TARGET_SVE)
- #define HAVE_clzvnx4si2 (TARGET_SVE)
- #define HAVE_popcountvnx4si2 (TARGET_SVE)
- #define HAVE_qabsvnx4si2 ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_qnegvnx4si2 ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_absvnx2di2 (TARGET_SVE)
- #define HAVE_negvnx2di2 (TARGET_SVE)
- #define HAVE_one_cmplvnx2di2 (TARGET_SVE)
- #define HAVE_clrsbvnx2di2 (TARGET_SVE)
- #define HAVE_clzvnx2di2 (TARGET_SVE)
- #define HAVE_popcountvnx2di2 (TARGET_SVE)
- #define HAVE_qabsvnx2di2 ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_qnegvnx2di2 ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_absvnx16qi (TARGET_SVE)
- #define HAVE_cond_negvnx16qi (TARGET_SVE)
- #define HAVE_cond_one_cmplvnx16qi (TARGET_SVE)
- #define HAVE_cond_clrsbvnx16qi (TARGET_SVE)
- #define HAVE_cond_clzvnx16qi (TARGET_SVE)
- #define HAVE_cond_popcountvnx16qi (TARGET_SVE)
- #define HAVE_cond_qabsvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_qnegvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_absvnx8hi (TARGET_SVE)
- #define HAVE_cond_negvnx8hi (TARGET_SVE)
- #define HAVE_cond_one_cmplvnx8hi (TARGET_SVE)
- #define HAVE_cond_clrsbvnx8hi (TARGET_SVE)
- #define HAVE_cond_clzvnx8hi (TARGET_SVE)
- #define HAVE_cond_popcountvnx8hi (TARGET_SVE)
- #define HAVE_cond_qabsvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_qnegvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_absvnx4si (TARGET_SVE)
- #define HAVE_cond_negvnx4si (TARGET_SVE)
- #define HAVE_cond_one_cmplvnx4si (TARGET_SVE)
- #define HAVE_cond_clrsbvnx4si (TARGET_SVE)
- #define HAVE_cond_clzvnx4si (TARGET_SVE)
- #define HAVE_cond_popcountvnx4si (TARGET_SVE)
- #define HAVE_cond_qabsvnx4si ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_qnegvnx4si ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_absvnx2di (TARGET_SVE)
- #define HAVE_cond_negvnx2di (TARGET_SVE)
- #define HAVE_cond_one_cmplvnx2di (TARGET_SVE)
- #define HAVE_cond_clrsbvnx2di (TARGET_SVE)
- #define HAVE_cond_clzvnx2di (TARGET_SVE)
- #define HAVE_cond_popcountvnx2di (TARGET_SVE)
- #define HAVE_cond_qabsvnx2di ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_qnegvnx2di ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_extendvnx8qivnx8hi2 (TARGET_SVE && (~0x81 & 0x81) == 0)
- #define HAVE_zero_extendvnx8qivnx8hi2 (TARGET_SVE && (~0x81 & 0x81) == 0)
- #define HAVE_extendvnx4qivnx4hi2 (TARGET_SVE && (~0x41 & 0x41) == 0)
- #define HAVE_zero_extendvnx4qivnx4hi2 (TARGET_SVE && (~0x41 & 0x41) == 0)
- #define HAVE_extendvnx2qivnx2hi2 (TARGET_SVE && (~0x21 & 0x21) == 0)
- #define HAVE_zero_extendvnx2qivnx2hi2 (TARGET_SVE && (~0x21 & 0x21) == 0)
- #define HAVE_extendvnx4qivnx4si2 (TARGET_SVE && (~0x43 & 0x41) == 0)
- #define HAVE_zero_extendvnx4qivnx4si2 (TARGET_SVE && (~0x43 & 0x41) == 0)
- #define HAVE_extendvnx4hivnx4si2 (TARGET_SVE && (~0x43 & 0x42) == 0)
- #define HAVE_zero_extendvnx4hivnx4si2 (TARGET_SVE && (~0x43 & 0x42) == 0)
- #define HAVE_extendvnx2qivnx2si2 (TARGET_SVE && (~0x23 & 0x21) == 0)
- #define HAVE_zero_extendvnx2qivnx2si2 (TARGET_SVE && (~0x23 & 0x21) == 0)
- #define HAVE_extendvnx2hivnx2si2 (TARGET_SVE && (~0x23 & 0x22) == 0)
- #define HAVE_zero_extendvnx2hivnx2si2 (TARGET_SVE && (~0x23 & 0x22) == 0)
- #define HAVE_extendvnx2qivnx2di2 (TARGET_SVE && (~0x27 & 0x21) == 0)
- #define HAVE_zero_extendvnx2qivnx2di2 (TARGET_SVE && (~0x27 & 0x21) == 0)
- #define HAVE_extendvnx2hivnx2di2 (TARGET_SVE && (~0x27 & 0x22) == 0)
- #define HAVE_zero_extendvnx2hivnx2di2 (TARGET_SVE && (~0x27 & 0x22) == 0)
- #define HAVE_extendvnx2sivnx2di2 (TARGET_SVE && (~0x27 & 0x24) == 0)
- #define HAVE_zero_extendvnx2sivnx2di2 (TARGET_SVE && (~0x27 & 0x24) == 0)
- #define HAVE_aarch64_pred_cnotvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_pred_cnotvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_pred_cnotvnx4si (TARGET_SVE)
- #define HAVE_aarch64_pred_cnotvnx2di (TARGET_SVE)
- #define HAVE_cond_cnotvnx16qi (TARGET_SVE)
- #define HAVE_cond_cnotvnx8hi (TARGET_SVE)
- #define HAVE_cond_cnotvnx4si (TARGET_SVE)
- #define HAVE_cond_cnotvnx2di (TARGET_SVE)
- #define HAVE_absvnx8hf2 (TARGET_SVE)
- #define HAVE_negvnx8hf2 (TARGET_SVE)
- #define HAVE_frecpxvnx8hf2 (TARGET_SVE)
- #define HAVE_roundvnx8hf2 (TARGET_SVE)
- #define HAVE_nearbyintvnx8hf2 (TARGET_SVE)
- #define HAVE_floorvnx8hf2 (TARGET_SVE)
- #define HAVE_frintnvnx8hf2 (TARGET_SVE)
- #define HAVE_ceilvnx8hf2 (TARGET_SVE)
- #define HAVE_rintvnx8hf2 (TARGET_SVE)
- #define HAVE_btruncvnx8hf2 (TARGET_SVE)
- #define HAVE_absvnx4sf2 (TARGET_SVE)
- #define HAVE_negvnx4sf2 (TARGET_SVE)
- #define HAVE_frecpxvnx4sf2 (TARGET_SVE)
- #define HAVE_roundvnx4sf2 (TARGET_SVE)
- #define HAVE_nearbyintvnx4sf2 (TARGET_SVE)
- #define HAVE_floorvnx4sf2 (TARGET_SVE)
- #define HAVE_frintnvnx4sf2 (TARGET_SVE)
- #define HAVE_ceilvnx4sf2 (TARGET_SVE)
- #define HAVE_rintvnx4sf2 (TARGET_SVE)
- #define HAVE_btruncvnx4sf2 (TARGET_SVE)
- #define HAVE_absvnx2df2 (TARGET_SVE)
- #define HAVE_negvnx2df2 (TARGET_SVE)
- #define HAVE_frecpxvnx2df2 (TARGET_SVE)
- #define HAVE_roundvnx2df2 (TARGET_SVE)
- #define HAVE_nearbyintvnx2df2 (TARGET_SVE)
- #define HAVE_floorvnx2df2 (TARGET_SVE)
- #define HAVE_frintnvnx2df2 (TARGET_SVE)
- #define HAVE_ceilvnx2df2 (TARGET_SVE)
- #define HAVE_rintvnx2df2 (TARGET_SVE)
- #define HAVE_btruncvnx2df2 (TARGET_SVE)
- #define HAVE_cond_absvnx8hf (TARGET_SVE)
- #define HAVE_cond_negvnx8hf (TARGET_SVE)
- #define HAVE_cond_frecpxvnx8hf (TARGET_SVE)
- #define HAVE_cond_roundvnx8hf (TARGET_SVE)
- #define HAVE_cond_nearbyintvnx8hf (TARGET_SVE)
- #define HAVE_cond_floorvnx8hf (TARGET_SVE)
- #define HAVE_cond_frintnvnx8hf (TARGET_SVE)
- #define HAVE_cond_ceilvnx8hf (TARGET_SVE)
- #define HAVE_cond_rintvnx8hf (TARGET_SVE)
- #define HAVE_cond_btruncvnx8hf (TARGET_SVE)
- #define HAVE_cond_sqrtvnx8hf (TARGET_SVE)
- #define HAVE_cond_absvnx4sf (TARGET_SVE)
- #define HAVE_cond_negvnx4sf (TARGET_SVE)
- #define HAVE_cond_frecpxvnx4sf (TARGET_SVE)
- #define HAVE_cond_roundvnx4sf (TARGET_SVE)
- #define HAVE_cond_nearbyintvnx4sf (TARGET_SVE)
- #define HAVE_cond_floorvnx4sf (TARGET_SVE)
- #define HAVE_cond_frintnvnx4sf (TARGET_SVE)
- #define HAVE_cond_ceilvnx4sf (TARGET_SVE)
- #define HAVE_cond_rintvnx4sf (TARGET_SVE)
- #define HAVE_cond_btruncvnx4sf (TARGET_SVE)
- #define HAVE_cond_sqrtvnx4sf (TARGET_SVE)
- #define HAVE_cond_absvnx2df (TARGET_SVE)
- #define HAVE_cond_negvnx2df (TARGET_SVE)
- #define HAVE_cond_frecpxvnx2df (TARGET_SVE)
- #define HAVE_cond_roundvnx2df (TARGET_SVE)
- #define HAVE_cond_nearbyintvnx2df (TARGET_SVE)
- #define HAVE_cond_floorvnx2df (TARGET_SVE)
- #define HAVE_cond_frintnvnx2df (TARGET_SVE)
- #define HAVE_cond_ceilvnx2df (TARGET_SVE)
- #define HAVE_cond_rintvnx2df (TARGET_SVE)
- #define HAVE_cond_btruncvnx2df (TARGET_SVE)
- #define HAVE_cond_sqrtvnx2df (TARGET_SVE)
- #define HAVE_sqrtvnx8hf2 (TARGET_SVE)
- #define HAVE_sqrtvnx4sf2 (TARGET_SVE)
- #define HAVE_sqrtvnx2df2 (TARGET_SVE)
- #define HAVE_rsqrtvnx4sf2 (TARGET_SVE)
- #define HAVE_rsqrtvnx2df2 (TARGET_SVE)
- #define HAVE_aarch64_rsqrtevnx4sf (TARGET_SVE)
- #define HAVE_aarch64_rsqrtevnx2df (TARGET_SVE)
- #define HAVE_aarch64_rsqrtsvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_rsqrtsvnx2df (TARGET_SVE)
- #define HAVE_one_cmplvnx16bi2 (TARGET_SVE)
- #define HAVE_one_cmplvnx8bi2 (TARGET_SVE)
- #define HAVE_one_cmplvnx4bi2 (TARGET_SVE)
- #define HAVE_one_cmplvnx2bi2 (TARGET_SVE)
- #define HAVE_mulvnx16qi3 (TARGET_SVE)
- #define HAVE_smaxvnx16qi3 (TARGET_SVE)
- #define HAVE_sminvnx16qi3 (TARGET_SVE)
- #define HAVE_umaxvnx16qi3 (TARGET_SVE)
- #define HAVE_uminvnx16qi3 (TARGET_SVE)
- #define HAVE_mulvnx8hi3 (TARGET_SVE)
- #define HAVE_smaxvnx8hi3 (TARGET_SVE)
- #define HAVE_sminvnx8hi3 (TARGET_SVE)
- #define HAVE_umaxvnx8hi3 (TARGET_SVE)
- #define HAVE_uminvnx8hi3 (TARGET_SVE)
- #define HAVE_mulvnx4si3 (TARGET_SVE)
- #define HAVE_smaxvnx4si3 (TARGET_SVE)
- #define HAVE_sminvnx4si3 (TARGET_SVE)
- #define HAVE_umaxvnx4si3 (TARGET_SVE)
- #define HAVE_uminvnx4si3 (TARGET_SVE)
- #define HAVE_mulvnx2di3 (TARGET_SVE)
- #define HAVE_smaxvnx2di3 (TARGET_SVE)
- #define HAVE_sminvnx2di3 (TARGET_SVE)
- #define HAVE_umaxvnx2di3 (TARGET_SVE)
- #define HAVE_uminvnx2di3 (TARGET_SVE)
- #define HAVE_cond_addvnx16qi (TARGET_SVE)
- #define HAVE_cond_subvnx16qi (TARGET_SVE)
- #define HAVE_cond_mulvnx16qi (TARGET_SVE)
- #define HAVE_cond_smaxvnx16qi (TARGET_SVE)
- #define HAVE_cond_umaxvnx16qi (TARGET_SVE)
- #define HAVE_cond_sminvnx16qi (TARGET_SVE)
- #define HAVE_cond_uminvnx16qi (TARGET_SVE)
- #define HAVE_cond_ashlvnx16qi (TARGET_SVE)
- #define HAVE_cond_ashrvnx16qi (TARGET_SVE)
- #define HAVE_cond_lshrvnx16qi (TARGET_SVE)
- #define HAVE_cond_andvnx16qi (TARGET_SVE)
- #define HAVE_cond_iorvnx16qi (TARGET_SVE)
- #define HAVE_cond_xorvnx16qi (TARGET_SVE)
- #define HAVE_cond_ssaddvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_usaddvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_sssubvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_ussubvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_addvnx8hi (TARGET_SVE)
- #define HAVE_cond_subvnx8hi (TARGET_SVE)
- #define HAVE_cond_mulvnx8hi (TARGET_SVE)
- #define HAVE_cond_smaxvnx8hi (TARGET_SVE)
- #define HAVE_cond_umaxvnx8hi (TARGET_SVE)
- #define HAVE_cond_sminvnx8hi (TARGET_SVE)
- #define HAVE_cond_uminvnx8hi (TARGET_SVE)
- #define HAVE_cond_ashlvnx8hi (TARGET_SVE)
- #define HAVE_cond_ashrvnx8hi (TARGET_SVE)
- #define HAVE_cond_lshrvnx8hi (TARGET_SVE)
- #define HAVE_cond_andvnx8hi (TARGET_SVE)
- #define HAVE_cond_iorvnx8hi (TARGET_SVE)
- #define HAVE_cond_xorvnx8hi (TARGET_SVE)
- #define HAVE_cond_ssaddvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_usaddvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_sssubvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_ussubvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_addvnx4si (TARGET_SVE)
- #define HAVE_cond_subvnx4si (TARGET_SVE)
- #define HAVE_cond_mulvnx4si (TARGET_SVE)
- #define HAVE_cond_smaxvnx4si (TARGET_SVE)
- #define HAVE_cond_umaxvnx4si (TARGET_SVE)
- #define HAVE_cond_sminvnx4si (TARGET_SVE)
- #define HAVE_cond_uminvnx4si (TARGET_SVE)
- #define HAVE_cond_ashlvnx4si (TARGET_SVE)
- #define HAVE_cond_ashrvnx4si (TARGET_SVE)
- #define HAVE_cond_lshrvnx4si (TARGET_SVE)
- #define HAVE_cond_andvnx4si (TARGET_SVE)
- #define HAVE_cond_iorvnx4si (TARGET_SVE)
- #define HAVE_cond_xorvnx4si (TARGET_SVE)
- #define HAVE_cond_ssaddvnx4si ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_usaddvnx4si ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_sssubvnx4si ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_ussubvnx4si ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_addvnx2di (TARGET_SVE)
- #define HAVE_cond_subvnx2di (TARGET_SVE)
- #define HAVE_cond_mulvnx2di (TARGET_SVE)
- #define HAVE_cond_smaxvnx2di (TARGET_SVE)
- #define HAVE_cond_umaxvnx2di (TARGET_SVE)
- #define HAVE_cond_sminvnx2di (TARGET_SVE)
- #define HAVE_cond_uminvnx2di (TARGET_SVE)
- #define HAVE_cond_ashlvnx2di (TARGET_SVE)
- #define HAVE_cond_ashrvnx2di (TARGET_SVE)
- #define HAVE_cond_lshrvnx2di (TARGET_SVE)
- #define HAVE_cond_andvnx2di (TARGET_SVE)
- #define HAVE_cond_iorvnx2di (TARGET_SVE)
- #define HAVE_cond_xorvnx2di (TARGET_SVE)
- #define HAVE_cond_ssaddvnx2di ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_usaddvnx2di ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_sssubvnx2di ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_ussubvnx2di ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_aarch64_adrvnx4si_shift (TARGET_SVE)
- #define HAVE_aarch64_adrvnx2di_shift (TARGET_SVE)
- #define HAVE_sabdvnx16qi_3 (TARGET_SVE)
- #define HAVE_uabdvnx16qi_3 (TARGET_SVE)
- #define HAVE_sabdvnx8hi_3 (TARGET_SVE)
- #define HAVE_uabdvnx8hi_3 (TARGET_SVE)
- #define HAVE_sabdvnx4si_3 (TARGET_SVE)
- #define HAVE_uabdvnx4si_3 (TARGET_SVE)
- #define HAVE_sabdvnx2di_3 (TARGET_SVE)
- #define HAVE_uabdvnx2di_3 (TARGET_SVE)
- #define HAVE_aarch64_cond_sabdvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_cond_uabdvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_cond_sabdvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_cond_uabdvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_cond_sabdvnx4si (TARGET_SVE)
- #define HAVE_aarch64_cond_uabdvnx4si (TARGET_SVE)
- #define HAVE_aarch64_cond_sabdvnx2di (TARGET_SVE)
- #define HAVE_aarch64_cond_uabdvnx2di (TARGET_SVE)
- #define HAVE_smulvnx16qi3_highpart (TARGET_SVE)
- #define HAVE_umulvnx16qi3_highpart (TARGET_SVE)
- #define HAVE_smulvnx8hi3_highpart (TARGET_SVE)
- #define HAVE_umulvnx8hi3_highpart (TARGET_SVE)
- #define HAVE_smulvnx4si3_highpart (TARGET_SVE)
- #define HAVE_umulvnx4si3_highpart (TARGET_SVE)
- #define HAVE_smulvnx2di3_highpart (TARGET_SVE)
- #define HAVE_umulvnx2di3_highpart (TARGET_SVE)
- #define HAVE_cond_smulhvnx16qi (TARGET_SVE)
- #define HAVE_cond_umulhvnx16qi (TARGET_SVE)
- #define HAVE_cond_smulhvnx8hi (TARGET_SVE)
- #define HAVE_cond_umulhvnx8hi (TARGET_SVE)
- #define HAVE_cond_smulhvnx4si (TARGET_SVE)
- #define HAVE_cond_umulhvnx4si (TARGET_SVE)
- #define HAVE_cond_smulhvnx2di (TARGET_SVE)
- #define HAVE_cond_umulhvnx2di (TARGET_SVE)
- #define HAVE_divvnx4si3 (TARGET_SVE)
- #define HAVE_udivvnx4si3 (TARGET_SVE)
- #define HAVE_divvnx2di3 (TARGET_SVE)
- #define HAVE_udivvnx2di3 (TARGET_SVE)
- #define HAVE_cond_divvnx4si (TARGET_SVE)
- #define HAVE_cond_udivvnx4si (TARGET_SVE)
- #define HAVE_cond_divvnx2di (TARGET_SVE)
- #define HAVE_cond_udivvnx2di (TARGET_SVE)
- #define HAVE_aarch64_bicvnx16qi (TARGET_SVE)
- #define HAVE_aarch64_bicvnx8hi (TARGET_SVE)
- #define HAVE_aarch64_bicvnx4si (TARGET_SVE)
- #define HAVE_aarch64_bicvnx2di (TARGET_SVE)
- #define HAVE_cond_bicvnx16qi (TARGET_SVE)
- #define HAVE_cond_bicvnx8hi (TARGET_SVE)
- #define HAVE_cond_bicvnx4si (TARGET_SVE)
- #define HAVE_cond_bicvnx2di (TARGET_SVE)
- #define HAVE_ashlvnx16qi3 (TARGET_SVE)
- #define HAVE_ashrvnx16qi3 (TARGET_SVE)
- #define HAVE_lshrvnx16qi3 (TARGET_SVE)
- #define HAVE_ashlvnx8hi3 (TARGET_SVE)
- #define HAVE_ashrvnx8hi3 (TARGET_SVE)
- #define HAVE_lshrvnx8hi3 (TARGET_SVE)
- #define HAVE_ashlvnx4si3 (TARGET_SVE)
- #define HAVE_ashrvnx4si3 (TARGET_SVE)
- #define HAVE_lshrvnx4si3 (TARGET_SVE)
- #define HAVE_ashlvnx2di3 (TARGET_SVE)
- #define HAVE_ashrvnx2di3 (TARGET_SVE)
- #define HAVE_lshrvnx2di3 (TARGET_SVE)
- #define HAVE_vashlvnx16qi3 (TARGET_SVE)
- #define HAVE_vashrvnx16qi3 (TARGET_SVE)
- #define HAVE_vlshrvnx16qi3 (TARGET_SVE)
- #define HAVE_vashlvnx8hi3 (TARGET_SVE)
- #define HAVE_vashrvnx8hi3 (TARGET_SVE)
- #define HAVE_vlshrvnx8hi3 (TARGET_SVE)
- #define HAVE_vashlvnx4si3 (TARGET_SVE)
- #define HAVE_vashrvnx4si3 (TARGET_SVE)
- #define HAVE_vlshrvnx4si3 (TARGET_SVE)
- #define HAVE_vashlvnx2di3 (TARGET_SVE)
- #define HAVE_vashrvnx2di3 (TARGET_SVE)
- #define HAVE_vlshrvnx2di3 (TARGET_SVE)
- #define HAVE_cond_lslvnx16qi (TARGET_SVE)
- #define HAVE_cond_asrvnx16qi (TARGET_SVE)
- #define HAVE_cond_lsrvnx16qi (TARGET_SVE)
- #define HAVE_cond_lslvnx8hi (TARGET_SVE)
- #define HAVE_cond_asrvnx8hi (TARGET_SVE)
- #define HAVE_cond_lsrvnx8hi (TARGET_SVE)
- #define HAVE_cond_lslvnx4si (TARGET_SVE)
- #define HAVE_cond_asrvnx4si (TARGET_SVE)
- #define HAVE_cond_lsrvnx4si (TARGET_SVE)
- #define HAVE_sdiv_pow2vnx16qi3 (TARGET_SVE)
- #define HAVE_sdiv_pow2vnx8hi3 (TARGET_SVE)
- #define HAVE_sdiv_pow2vnx4si3 (TARGET_SVE)
- #define HAVE_sdiv_pow2vnx2di3 (TARGET_SVE)
- #define HAVE_cond_asrdvnx16qi (TARGET_SVE)
- #define HAVE_cond_sqshluvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_srshrvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_urshrvnx16qi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_asrdvnx8hi (TARGET_SVE)
- #define HAVE_cond_sqshluvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_srshrvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_urshrvnx8hi ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_asrdvnx4si (TARGET_SVE)
- #define HAVE_cond_sqshluvnx4si ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_srshrvnx4si ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_urshrvnx4si ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_asrdvnx2di (TARGET_SVE)
- #define HAVE_cond_sqshluvnx2di ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_srshrvnx2di ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_urshrvnx2di ((TARGET_SVE) && (TARGET_SVE2))
- #define HAVE_cond_fscalevnx8hf (TARGET_SVE)
- #define HAVE_cond_fscalevnx4sf (TARGET_SVE)
- #define HAVE_cond_fscalevnx2df (TARGET_SVE)
- #define HAVE_addvnx8hf3 (TARGET_SVE)
- #define HAVE_smax_nanvnx8hf3 (TARGET_SVE)
- #define HAVE_smaxvnx8hf3 (TARGET_SVE)
- #define HAVE_smin_nanvnx8hf3 (TARGET_SVE)
- #define HAVE_sminvnx8hf3 (TARGET_SVE)
- #define HAVE_mulvnx8hf3 (TARGET_SVE)
- #define HAVE_mulxvnx8hf3 (TARGET_SVE)
- #define HAVE_subvnx8hf3 (TARGET_SVE)
- #define HAVE_addvnx4sf3 (TARGET_SVE)
- #define HAVE_smax_nanvnx4sf3 (TARGET_SVE)
- #define HAVE_smaxvnx4sf3 (TARGET_SVE)
- #define HAVE_smin_nanvnx4sf3 (TARGET_SVE)
- #define HAVE_sminvnx4sf3 (TARGET_SVE)
- #define HAVE_mulvnx4sf3 (TARGET_SVE)
- #define HAVE_mulxvnx4sf3 (TARGET_SVE)
- #define HAVE_subvnx4sf3 (TARGET_SVE)
- #define HAVE_addvnx2df3 (TARGET_SVE)
- #define HAVE_smax_nanvnx2df3 (TARGET_SVE)
- #define HAVE_smaxvnx2df3 (TARGET_SVE)
- #define HAVE_smin_nanvnx2df3 (TARGET_SVE)
- #define HAVE_sminvnx2df3 (TARGET_SVE)
- #define HAVE_mulvnx2df3 (TARGET_SVE)
- #define HAVE_mulxvnx2df3 (TARGET_SVE)
- #define HAVE_subvnx2df3 (TARGET_SVE)
- #define HAVE_cond_addvnx8hf (TARGET_SVE)
- #define HAVE_cond_divvnx8hf (TARGET_SVE)
- #define HAVE_cond_smax_nanvnx8hf (TARGET_SVE)
- #define HAVE_cond_smaxvnx8hf (TARGET_SVE)
- #define HAVE_cond_smin_nanvnx8hf (TARGET_SVE)
- #define HAVE_cond_sminvnx8hf (TARGET_SVE)
- #define HAVE_cond_mulvnx8hf (TARGET_SVE)
- #define HAVE_cond_mulxvnx8hf (TARGET_SVE)
- #define HAVE_cond_subvnx8hf (TARGET_SVE)
- #define HAVE_cond_addvnx4sf (TARGET_SVE)
- #define HAVE_cond_divvnx4sf (TARGET_SVE)
- #define HAVE_cond_smax_nanvnx4sf (TARGET_SVE)
- #define HAVE_cond_smaxvnx4sf (TARGET_SVE)
- #define HAVE_cond_smin_nanvnx4sf (TARGET_SVE)
- #define HAVE_cond_sminvnx4sf (TARGET_SVE)
- #define HAVE_cond_mulvnx4sf (TARGET_SVE)
- #define HAVE_cond_mulxvnx4sf (TARGET_SVE)
- #define HAVE_cond_subvnx4sf (TARGET_SVE)
- #define HAVE_cond_addvnx2df (TARGET_SVE)
- #define HAVE_cond_divvnx2df (TARGET_SVE)
- #define HAVE_cond_smax_nanvnx2df (TARGET_SVE)
- #define HAVE_cond_smaxvnx2df (TARGET_SVE)
- #define HAVE_cond_smin_nanvnx2df (TARGET_SVE)
- #define HAVE_cond_sminvnx2df (TARGET_SVE)
- #define HAVE_cond_mulvnx2df (TARGET_SVE)
- #define HAVE_cond_mulxvnx2df (TARGET_SVE)
- #define HAVE_cond_subvnx2df (TARGET_SVE)
- #define HAVE_cond_cadd90vnx8hf (TARGET_SVE)
- #define HAVE_cond_cadd270vnx8hf (TARGET_SVE)
- #define HAVE_cond_cadd90vnx4sf (TARGET_SVE)
- #define HAVE_cond_cadd270vnx4sf (TARGET_SVE)
- #define HAVE_cond_cadd90vnx2df (TARGET_SVE)
- #define HAVE_cond_cadd270vnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_abdvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_abdvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_abdvnx2df (TARGET_SVE)
- #define HAVE_aarch64_cond_abdvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_cond_abdvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_cond_abdvnx2df (TARGET_SVE)
- #define HAVE_divvnx8hf3 (TARGET_SVE)
- #define HAVE_divvnx4sf3 (TARGET_SVE)
- #define HAVE_divvnx2df3 (TARGET_SVE)
- #define HAVE_aarch64_frecpevnx8hf (TARGET_SVE)
- #define HAVE_aarch64_frecpevnx4sf (TARGET_SVE)
- #define HAVE_aarch64_frecpevnx2df (TARGET_SVE)
- #define HAVE_aarch64_frecpsvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_frecpsvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_frecpsvnx2df (TARGET_SVE)
- #define HAVE_copysignvnx8hf3 (TARGET_SVE)
- #define HAVE_copysignvnx4sf3 (TARGET_SVE)
- #define HAVE_copysignvnx2df3 (TARGET_SVE)
- #define HAVE_xorsignvnx8hf3 (TARGET_SVE)
- #define HAVE_xorsignvnx4sf3 (TARGET_SVE)
- #define HAVE_xorsignvnx2df3 (TARGET_SVE)
- #define HAVE_fmaxvnx8hf3 (TARGET_SVE)
- #define HAVE_fminvnx8hf3 (TARGET_SVE)
- #define HAVE_fmaxvnx4sf3 (TARGET_SVE)
- #define HAVE_fminvnx4sf3 (TARGET_SVE)
- #define HAVE_fmaxvnx2df3 (TARGET_SVE)
- #define HAVE_fminvnx2df3 (TARGET_SVE)
- #define HAVE_iorvnx16bi3 (TARGET_SVE)
- #define HAVE_xorvnx16bi3 (TARGET_SVE)
- #define HAVE_iorvnx8bi3 (TARGET_SVE)
- #define HAVE_xorvnx8bi3 (TARGET_SVE)
- #define HAVE_iorvnx4bi3 (TARGET_SVE)
- #define HAVE_xorvnx4bi3 (TARGET_SVE)
- #define HAVE_iorvnx2bi3 (TARGET_SVE)
- #define HAVE_xorvnx2bi3 (TARGET_SVE)
- #define HAVE_fmavnx16qi4 (TARGET_SVE)
- #define HAVE_fmavnx8hi4 (TARGET_SVE)
- #define HAVE_fmavnx4si4 (TARGET_SVE)
- #define HAVE_fmavnx2di4 (TARGET_SVE)
- #define HAVE_cond_fmavnx16qi (TARGET_SVE)
- #define HAVE_cond_fmavnx8hi (TARGET_SVE)
- #define HAVE_cond_fmavnx4si (TARGET_SVE)
- #define HAVE_cond_fmavnx2di (TARGET_SVE)
- #define HAVE_fnmavnx16qi4 (TARGET_SVE)
- #define HAVE_fnmavnx8hi4 (TARGET_SVE)
- #define HAVE_fnmavnx4si4 (TARGET_SVE)
- #define HAVE_fnmavnx2di4 (TARGET_SVE)
- #define HAVE_cond_fnmavnx16qi (TARGET_SVE)
- #define HAVE_cond_fnmavnx8hi (TARGET_SVE)
- #define HAVE_cond_fnmavnx4si (TARGET_SVE)
- #define HAVE_cond_fnmavnx2di (TARGET_SVE)
- #define HAVE_ssadvnx16qi (TARGET_SVE)
- #define HAVE_usadvnx16qi (TARGET_SVE)
- #define HAVE_ssadvnx8hi (TARGET_SVE)
- #define HAVE_usadvnx8hi (TARGET_SVE)
- #define HAVE_fmavnx8hf4 (TARGET_SVE)
- #define HAVE_fnmavnx8hf4 (TARGET_SVE)
- #define HAVE_fnmsvnx8hf4 (TARGET_SVE)
- #define HAVE_fmsvnx8hf4 (TARGET_SVE)
- #define HAVE_fmavnx4sf4 (TARGET_SVE)
- #define HAVE_fnmavnx4sf4 (TARGET_SVE)
- #define HAVE_fnmsvnx4sf4 (TARGET_SVE)
- #define HAVE_fmsvnx4sf4 (TARGET_SVE)
- #define HAVE_fmavnx2df4 (TARGET_SVE)
- #define HAVE_fnmavnx2df4 (TARGET_SVE)
- #define HAVE_fnmsvnx2df4 (TARGET_SVE)
- #define HAVE_fmsvnx2df4 (TARGET_SVE)
- #define HAVE_cond_fmavnx8hf (TARGET_SVE)
- #define HAVE_cond_fnmavnx8hf (TARGET_SVE)
- #define HAVE_cond_fnmsvnx8hf (TARGET_SVE)
- #define HAVE_cond_fmsvnx8hf (TARGET_SVE)
- #define HAVE_cond_fmavnx4sf (TARGET_SVE)
- #define HAVE_cond_fnmavnx4sf (TARGET_SVE)
- #define HAVE_cond_fnmsvnx4sf (TARGET_SVE)
- #define HAVE_cond_fmsvnx4sf (TARGET_SVE)
- #define HAVE_cond_fmavnx2df (TARGET_SVE)
- #define HAVE_cond_fnmavnx2df (TARGET_SVE)
- #define HAVE_cond_fnmsvnx2df (TARGET_SVE)
- #define HAVE_cond_fmsvnx2df (TARGET_SVE)
- #define HAVE_cond_fcmlavnx8hf (TARGET_SVE)
- #define HAVE_cond_fcmla90vnx8hf (TARGET_SVE)
- #define HAVE_cond_fcmla180vnx8hf (TARGET_SVE)
- #define HAVE_cond_fcmla270vnx8hf (TARGET_SVE)
- #define HAVE_cond_fcmlavnx4sf (TARGET_SVE)
- #define HAVE_cond_fcmla90vnx4sf (TARGET_SVE)
- #define HAVE_cond_fcmla180vnx4sf (TARGET_SVE)
- #define HAVE_cond_fcmla270vnx4sf (TARGET_SVE)
- #define HAVE_cond_fcmlavnx2df (TARGET_SVE)
- #define HAVE_cond_fcmla90vnx2df (TARGET_SVE)
- #define HAVE_cond_fcmla180vnx2df (TARGET_SVE)
- #define HAVE_cond_fcmla270vnx2df (TARGET_SVE)
- #define HAVE_vcond_mask_vnx16qivnx16bi (TARGET_SVE)
- #define HAVE_vcond_mask_vnx8hivnx8bi (TARGET_SVE)
- #define HAVE_vcond_mask_vnx4sivnx4bi (TARGET_SVE)
- #define HAVE_vcond_mask_vnx2divnx2bi (TARGET_SVE)
- #define HAVE_vcond_mask_vnx8bfvnx8bi (TARGET_SVE)
- #define HAVE_vcond_mask_vnx8hfvnx8bi (TARGET_SVE)
- #define HAVE_vcond_mask_vnx4sfvnx4bi (TARGET_SVE)
- #define HAVE_vcond_mask_vnx2dfvnx2bi (TARGET_SVE)
- #define HAVE_vcondvnx16qivnx16qi (TARGET_SVE)
- #define HAVE_vcondvnx8hivnx8hi (TARGET_SVE)
- #define HAVE_vcondvnx4sivnx4si (TARGET_SVE)
- #define HAVE_vcondvnx2divnx2di (TARGET_SVE)
- #define HAVE_vcondvnx8bfvnx8hi (TARGET_SVE)
- #define HAVE_vcondvnx8hfvnx8hi (TARGET_SVE)
- #define HAVE_vcondvnx4sfvnx4si (TARGET_SVE)
- #define HAVE_vcondvnx2dfvnx2di (TARGET_SVE)
- #define HAVE_vconduvnx16qivnx16qi (TARGET_SVE)
- #define HAVE_vconduvnx8hivnx8hi (TARGET_SVE)
- #define HAVE_vconduvnx4sivnx4si (TARGET_SVE)
- #define HAVE_vconduvnx2divnx2di (TARGET_SVE)
- #define HAVE_vconduvnx8bfvnx8hi (TARGET_SVE)
- #define HAVE_vconduvnx8hfvnx8hi (TARGET_SVE)
- #define HAVE_vconduvnx4sfvnx4si (TARGET_SVE)
- #define HAVE_vconduvnx2dfvnx2di (TARGET_SVE)
- #define HAVE_vcondvnx8hivnx8hf (TARGET_SVE)
- #define HAVE_vcondvnx4sivnx4sf (TARGET_SVE)
- #define HAVE_vcondvnx2divnx2df (TARGET_SVE)
- #define HAVE_vcondvnx8bfvnx8hf (TARGET_SVE)
- #define HAVE_vcondvnx8hfvnx8hf (TARGET_SVE)
- #define HAVE_vcondvnx4sfvnx4sf (TARGET_SVE)
- #define HAVE_vcondvnx2dfvnx2df (TARGET_SVE)
- #define HAVE_vec_cmpvnx16qivnx16bi (TARGET_SVE)
- #define HAVE_vec_cmpvnx8hivnx8bi (TARGET_SVE)
- #define HAVE_vec_cmpvnx4sivnx4bi (TARGET_SVE)
- #define HAVE_vec_cmpvnx2divnx2bi (TARGET_SVE)
- #define HAVE_vec_cmpuvnx16qivnx16bi (TARGET_SVE)
- #define HAVE_vec_cmpuvnx8hivnx8bi (TARGET_SVE)
- #define HAVE_vec_cmpuvnx4sivnx4bi (TARGET_SVE)
- #define HAVE_vec_cmpuvnx2divnx2bi (TARGET_SVE)
- #define HAVE_vec_cmpvnx8hfvnx8bi (TARGET_SVE)
- #define HAVE_vec_cmpvnx4sfvnx4bi (TARGET_SVE)
- #define HAVE_vec_cmpvnx2dfvnx2bi (TARGET_SVE)
- #define HAVE_aarch64_pred_facgevnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_facgtvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_faclevnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_facltvnx8hf (TARGET_SVE)
- #define HAVE_aarch64_pred_facgevnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_facgtvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_faclevnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_facltvnx4sf (TARGET_SVE)
- #define HAVE_aarch64_pred_facgevnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_facgtvnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_faclevnx2df (TARGET_SVE)
- #define HAVE_aarch64_pred_facltvnx2df (TARGET_SVE)
- #define HAVE_cbranchvnx16bi4 1
- #define HAVE_cbranchvnx8bi4 1
- #define HAVE_cbranchvnx4bi4 1
- #define HAVE_cbranchvnx2bi4 1
- #define HAVE_reduc_plus_scal_vnx16qi (TARGET_SVE)
- #define HAVE_reduc_plus_scal_vnx8hi (TARGET_SVE)
- #define HAVE_reduc_plus_scal_vnx4si (TARGET_SVE)
- #define HAVE_reduc_plus_scal_vnx2di (TARGET_SVE)
- #define HAVE_reduc_and_scal_vnx16qi (TARGET_SVE)
- #define HAVE_reduc_ior_scal_vnx16qi (TARGET_SVE)
- #define HAVE_reduc_smax_scal_vnx16qi (TARGET_SVE)
- #define HAVE_reduc_smin_scal_vnx16qi (TARGET_SVE)
- #define HAVE_reduc_umax_scal_vnx16qi (TARGET_SVE)
- #define HAVE_reduc_umin_scal_vnx16qi (TARGET_SVE)
- #define HAVE_reduc_xor_scal_vnx16qi (TARGET_SVE)
- #define HAVE_reduc_and_scal_vnx8hi (TARGET_SVE)
- #define HAVE_reduc_ior_scal_vnx8hi (TARGET_SVE)
- #define HAVE_reduc_smax_scal_vnx8hi (TARGET_SVE)
- #define HAVE_reduc_smin_scal_vnx8hi (TARGET_SVE)
- #define HAVE_reduc_umax_scal_vnx8hi (TARGET_SVE)
- #define HAVE_reduc_umin_scal_vnx8hi (TARGET_SVE)
- #define HAVE_reduc_xor_scal_vnx8hi (TARGET_SVE)
- #define HAVE_reduc_and_scal_vnx4si (TARGET_SVE)
- #define HAVE_reduc_ior_scal_vnx4si (TARGET_SVE)
- #define HAVE_reduc_smax_scal_vnx4si (TARGET_SVE)
- #define HAVE_reduc_smin_scal_vnx4si (TARGET_SVE)
- #define HAVE_reduc_umax_scal_vnx4si (TARGET_SVE)
- #define HAVE_reduc_umin_scal_vnx4si (TARGET_SVE)
- #define HAVE_reduc_xor_scal_vnx4si (TARGET_SVE)
- #define HAVE_reduc_and_scal_vnx2di (TARGET_SVE)
- #define HAVE_reduc_ior_scal_vnx2di (TARGET_SVE)
- #define HAVE_reduc_smax_scal_vnx2di (TARGET_SVE)
- #define HAVE_reduc_smin_scal_vnx2di (TARGET_SVE)
- #define HAVE_reduc_umax_scal_vnx2di (TARGET_SVE)
- #define HAVE_reduc_umin_scal_vnx2di (TARGET_SVE)
- #define HAVE_reduc_xor_scal_vnx2di (TARGET_SVE)
- #define HAVE_reduc_plus_scal_vnx8hf (TARGET_SVE)
- #define HAVE_reduc_smax_nan_scal_vnx8hf (TARGET_SVE)
- #define HAVE_reduc_smax_scal_vnx8hf (TARGET_SVE)
- #define HAVE_reduc_smin_nan_scal_vnx8hf (TARGET_SVE)
- #define HAVE_reduc_smin_scal_vnx8hf (TARGET_SVE)
- #define HAVE_reduc_plus_scal_vnx4sf (TARGET_SVE)
- #define HAVE_reduc_smax_nan_scal_vnx4sf (TARGET_SVE)
- #define HAVE_reduc_smax_scal_vnx4sf (TARGET_SVE)
- #define HAVE_reduc_smin_nan_scal_vnx4sf (TARGET_SVE)
- #define HAVE_reduc_smin_scal_vnx4sf (TARGET_SVE)
- #define HAVE_reduc_plus_scal_vnx2df (TARGET_SVE)
- #define HAVE_reduc_smax_nan_scal_vnx2df (TARGET_SVE)
- #define HAVE_reduc_smax_scal_vnx2df (TARGET_SVE)
- #define HAVE_reduc_smin_nan_scal_vnx2df (TARGET_SVE)
- #define HAVE_reduc_smin_scal_vnx2df (TARGET_SVE)
- #define HAVE_fold_left_plus_vnx8hf (TARGET_SVE)
- #define HAVE_fold_left_plus_vnx4sf (TARGET_SVE)
- #define HAVE_fold_left_plus_vnx2df (TARGET_SVE)
- #define HAVE_vec_permvnx16qi (TARGET_SVE && GET_MODE_NUNITS (VNx16QImode).is_constant ())
- #define HAVE_vec_permvnx8hi (TARGET_SVE && GET_MODE_NUNITS (VNx8HImode).is_constant ())
- #define HAVE_vec_permvnx4si (TARGET_SVE && GET_MODE_NUNITS (VNx4SImode).is_constant ())
- #define HAVE_vec_permvnx2di (TARGET_SVE && GET_MODE_NUNITS (VNx2DImode).is_constant ())
- #define HAVE_vec_permvnx8bf (TARGET_SVE && GET_MODE_NUNITS (VNx8BFmode).is_constant ())
- #define HAVE_vec_permvnx8hf (TARGET_SVE && GET_MODE_NUNITS (VNx8HFmode).is_constant ())
- #define HAVE_vec_permvnx4sf (TARGET_SVE && GET_MODE_NUNITS (VNx4SFmode).is_constant ())
- #define HAVE_vec_permvnx2df (TARGET_SVE && GET_MODE_NUNITS (VNx2DFmode).is_constant ())
- #define HAVE_vec_unpacks_hi_vnx16qi (TARGET_SVE)
- #define HAVE_vec_unpacku_hi_vnx16qi (TARGET_SVE)
- #define HAVE_vec_unpacks_lo_vnx16qi (TARGET_SVE)
- #define HAVE_vec_unpacku_lo_vnx16qi (TARGET_SVE)
- #define HAVE_vec_unpacks_hi_vnx8hi (TARGET_SVE)
- #define HAVE_vec_unpacku_hi_vnx8hi (TARGET_SVE)
- #define HAVE_vec_unpacks_lo_vnx8hi (TARGET_SVE)
- #define HAVE_vec_unpacku_lo_vnx8hi (TARGET_SVE)
- #define HAVE_vec_unpacks_hi_vnx4si (TARGET_SVE)
- #define HAVE_vec_unpacku_hi_vnx4si (TARGET_SVE)
- #define HAVE_vec_unpacks_lo_vnx4si (TARGET_SVE)
- #define HAVE_vec_unpacku_lo_vnx4si (TARGET_SVE)
- #define HAVE_fix_truncvnx8hfvnx8hi2 (TARGET_SVE)
- #define HAVE_fixuns_truncvnx8hfvnx8hi2 (TARGET_SVE)
- #define HAVE_fix_truncvnx4sfvnx4si2 (TARGET_SVE)
- #define HAVE_fixuns_truncvnx4sfvnx4si2 (TARGET_SVE)
- #define HAVE_fix_truncvnx2dfvnx2di2 (TARGET_SVE)
- #define HAVE_fixuns_truncvnx2dfvnx2di2 (TARGET_SVE)
- #define HAVE_cond_fix_trunc_nontruncvnx8hfvnx8hi (TARGET_SVE && 16 >= 16)
- #define HAVE_cond_fixuns_trunc_nontruncvnx8hfvnx8hi (TARGET_SVE && 16 >= 16)
- #define HAVE_cond_fix_trunc_nontruncvnx8hfvnx4si (TARGET_SVE && 32 >= 16)
- #define HAVE_cond_fixuns_trunc_nontruncvnx8hfvnx4si (TARGET_SVE && 32 >= 16)
- #define HAVE_cond_fix_trunc_nontruncvnx4sfvnx4si (TARGET_SVE && 32 >= 32)
- #define HAVE_cond_fixuns_trunc_nontruncvnx4sfvnx4si (TARGET_SVE && 32 >= 32)
- #define HAVE_cond_fix_trunc_nontruncvnx8hfvnx2di (TARGET_SVE && 64 >= 16)
- #define HAVE_cond_fixuns_trunc_nontruncvnx8hfvnx2di (TARGET_SVE && 64 >= 16)
- #define HAVE_cond_fix_trunc_nontruncvnx4sfvnx2di (TARGET_SVE && 64 >= 32)
- #define HAVE_cond_fixuns_trunc_nontruncvnx4sfvnx2di (TARGET_SVE && 64 >= 32)
- #define HAVE_cond_fix_trunc_nontruncvnx2dfvnx2di (TARGET_SVE && 64 >= 64)
- #define HAVE_cond_fixuns_trunc_nontruncvnx2dfvnx2di (TARGET_SVE && 64 >= 64)
- #define HAVE_cond_fix_trunc_truncvnx2dfvnx4si (TARGET_SVE)
- #define HAVE_cond_fixuns_trunc_truncvnx2dfvnx4si (TARGET_SVE)
- #define HAVE_vec_pack_sfix_trunc_vnx2df (TARGET_SVE)
- #define HAVE_vec_pack_ufix_trunc_vnx2df (TARGET_SVE)
- #define HAVE_floatvnx8hivnx8hf2 (TARGET_SVE)
- #define HAVE_floatunsvnx8hivnx8hf2 (TARGET_SVE)
- #define HAVE_floatvnx4sivnx4sf2 (TARGET_SVE)
- #define HAVE_floatunsvnx4sivnx4sf2 (TARGET_SVE)
- #define HAVE_floatvnx2divnx2df2 (TARGET_SVE)
- #define HAVE_floatunsvnx2divnx2df2 (TARGET_SVE)
- #define HAVE_cond_float_nonextendvnx8hivnx8hf (TARGET_SVE && 16 >= 16)
- #define HAVE_cond_floatuns_nonextendvnx8hivnx8hf (TARGET_SVE && 16 >= 16)
- #define HAVE_cond_float_nonextendvnx4sivnx8hf (TARGET_SVE && 32 >= 16)
- #define HAVE_cond_floatuns_nonextendvnx4sivnx8hf (TARGET_SVE && 32 >= 16)
- #define HAVE_cond_float_nonextendvnx4sivnx4sf (TARGET_SVE && 32 >= 32)
- #define HAVE_cond_floatuns_nonextendvnx4sivnx4sf (TARGET_SVE && 32 >= 32)
- #define HAVE_cond_float_nonextendvnx2divnx8hf (TARGET_SVE && 64 >= 16)
- #define HAVE_cond_floatuns_nonextendvnx2divnx8hf (TARGET_SVE && 64 >= 16)
- #define HAVE_cond_float_nonextendvnx2divnx4sf (TARGET_SVE && 64 >= 32)
- #define HAVE_cond_floatuns_nonextendvnx2divnx4sf (TARGET_SVE && 64 >= 32)
- #define HAVE_cond_float_nonextendvnx2divnx2df (TARGET_SVE && 64 >= 64)
- #define HAVE_cond_floatuns_nonextendvnx2divnx2df (TARGET_SVE && 64 >= 64)
- #define HAVE_cond_float_extendvnx4sivnx2df (TARGET_SVE)
- #define HAVE_cond_floatuns_extendvnx4sivnx2df (TARGET_SVE)
- #define HAVE_vec_unpacks_float_lo_vnx4si (TARGET_SVE)
- #define HAVE_vec_unpacks_float_hi_vnx4si (TARGET_SVE)
- #define HAVE_vec_unpacku_float_lo_vnx4si (TARGET_SVE)
- #define HAVE_vec_unpacku_float_hi_vnx4si (TARGET_SVE)
- #define HAVE_vec_pack_trunc_vnx4sf (TARGET_SVE)
- #define HAVE_vec_pack_trunc_vnx2df (TARGET_SVE)
- #define HAVE_cond_fcvt_truncvnx4sfvnx8hf (TARGET_SVE && 32 > 16)
- #define HAVE_cond_fcvt_truncvnx2dfvnx8hf (TARGET_SVE && 64 > 16)
- #define HAVE_cond_fcvt_truncvnx2dfvnx4sf (TARGET_SVE && 64 > 32)
- #define HAVE_cond_fcvt_truncvnx4sfvnx8bf (TARGET_SVE_BF16)
- #define HAVE_vec_unpacks_lo_vnx8hf (TARGET_SVE)
- #define HAVE_vec_unpacks_hi_vnx8hf (TARGET_SVE)
- #define HAVE_vec_unpacks_lo_vnx4sf (TARGET_SVE)
- #define HAVE_vec_unpacks_hi_vnx4sf (TARGET_SVE)
- #define HAVE_cond_fcvt_nontruncvnx8hfvnx4sf (TARGET_SVE && 32 > 16)
- #define HAVE_cond_fcvt_nontruncvnx8hfvnx2df (TARGET_SVE && 64 > 16)
- #define HAVE_cond_fcvt_nontruncvnx4sfvnx2df (TARGET_SVE && 64 > 32)
- #define HAVE_vec_unpacks_hi_vnx16bi (TARGET_SVE)
- #define HAVE_vec_unpacku_hi_vnx16bi (TARGET_SVE)
- #define HAVE_vec_unpacks_lo_vnx16bi (TARGET_SVE)
- #define HAVE_vec_unpacku_lo_vnx16bi (TARGET_SVE)
- #define HAVE_vec_unpacks_hi_vnx8bi (TARGET_SVE)
- #define HAVE_vec_unpacku_hi_vnx8bi (TARGET_SVE)
- #define HAVE_vec_unpacks_lo_vnx8bi (TARGET_SVE)
- #define HAVE_vec_unpacku_lo_vnx8bi (TARGET_SVE)
- #define HAVE_vec_unpacks_hi_vnx4bi (TARGET_SVE)
- #define HAVE_vec_unpacku_hi_vnx4bi (TARGET_SVE)
- #define HAVE_vec_unpacks_lo_vnx4bi (TARGET_SVE)
- #define HAVE_vec_unpacku_lo_vnx4bi (TARGET_SVE)
- #define HAVE_aarch64_sve_incvnx8hi_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_sqincvnx8hi_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_uqincvnx8hi_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_decvnx8hi_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_sqdecvnx8hi_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_uqdecvnx8hi_pat (TARGET_SVE)
- #define HAVE_aarch64_sve_incdivnx16bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqincdivnx16bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqincdivnx16bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_incdivnx8bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqincdivnx8bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqincdivnx8bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_incdivnx4bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqincdivnx4bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqincdivnx4bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_incdivnx2bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqincdivnx2bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqincdivnx2bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqincsivnx16bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqincsivnx16bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqincsivnx8bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqincsivnx8bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqincsivnx4bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqincsivnx4bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqincsivnx2bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqincsivnx2bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_incvnx2di_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqincvnx2di_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqincvnx2di_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_incvnx4si_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqincvnx4si_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqincvnx4si_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_incvnx8hi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqincvnx8hi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqincvnx8hi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_decdivnx16bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqdecdivnx16bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqdecdivnx16bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_decdivnx8bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqdecdivnx8bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqdecdivnx8bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_decdivnx4bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqdecdivnx4bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqdecdivnx4bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_decdivnx2bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqdecdivnx2bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqdecdivnx2bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqdecsivnx16bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqdecsivnx16bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqdecsivnx8bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqdecsivnx8bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqdecsivnx4bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqdecsivnx4bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqdecsivnx2bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqdecsivnx2bi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_decvnx2di_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqdecvnx2di_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqdecvnx2di_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_decvnx4si_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqdecvnx4si_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqdecvnx4si_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_decvnx8hi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_sqdecvnx8hi_cntp (TARGET_SVE)
- #define HAVE_aarch64_sve_uqdecvnx8hi_cntp (TARGET_SVE)
- #define HAVE_smulhsvnx16qi3 (TARGET_SVE2)
- #define HAVE_umulhsvnx16qi3 (TARGET_SVE2)
- #define HAVE_smulhrsvnx16qi3 (TARGET_SVE2)
- #define HAVE_umulhrsvnx16qi3 (TARGET_SVE2)
- #define HAVE_smulhsvnx8hi3 (TARGET_SVE2)
- #define HAVE_umulhsvnx8hi3 (TARGET_SVE2)
- #define HAVE_smulhrsvnx8hi3 (TARGET_SVE2)
- #define HAVE_umulhrsvnx8hi3 (TARGET_SVE2)
- #define HAVE_smulhsvnx4si3 (TARGET_SVE2)
- #define HAVE_umulhsvnx4si3 (TARGET_SVE2)
- #define HAVE_smulhrsvnx4si3 (TARGET_SVE2)
- #define HAVE_umulhrsvnx4si3 (TARGET_SVE2)
- #define HAVE_avgvnx16qi3_floor (TARGET_SVE2)
- #define HAVE_uavgvnx16qi3_floor (TARGET_SVE2)
- #define HAVE_avgvnx8hi3_floor (TARGET_SVE2)
- #define HAVE_uavgvnx8hi3_floor (TARGET_SVE2)
- #define HAVE_avgvnx4si3_floor (TARGET_SVE2)
- #define HAVE_uavgvnx4si3_floor (TARGET_SVE2)
- #define HAVE_avgvnx2di3_floor (TARGET_SVE2)
- #define HAVE_uavgvnx2di3_floor (TARGET_SVE2)
- #define HAVE_avgvnx16qi3_ceil (TARGET_SVE2)
- #define HAVE_uavgvnx16qi3_ceil (TARGET_SVE2)
- #define HAVE_avgvnx8hi3_ceil (TARGET_SVE2)
- #define HAVE_uavgvnx8hi3_ceil (TARGET_SVE2)
- #define HAVE_avgvnx4si3_ceil (TARGET_SVE2)
- #define HAVE_uavgvnx4si3_ceil (TARGET_SVE2)
- #define HAVE_avgvnx2di3_ceil (TARGET_SVE2)
- #define HAVE_uavgvnx2di3_ceil (TARGET_SVE2)
- #define HAVE_cond_shaddvnx16qi (TARGET_SVE2)
- #define HAVE_cond_shsubvnx16qi (TARGET_SVE2)
- #define HAVE_cond_sqrshlvnx16qi (TARGET_SVE2)
- #define HAVE_cond_srhaddvnx16qi (TARGET_SVE2)
- #define HAVE_cond_srshlvnx16qi (TARGET_SVE2)
- #define HAVE_cond_suqaddvnx16qi (TARGET_SVE2)
- #define HAVE_cond_uhaddvnx16qi (TARGET_SVE2)
- #define HAVE_cond_uhsubvnx16qi (TARGET_SVE2)
- #define HAVE_cond_uqrshlvnx16qi (TARGET_SVE2)
- #define HAVE_cond_urhaddvnx16qi (TARGET_SVE2)
- #define HAVE_cond_urshlvnx16qi (TARGET_SVE2)
- #define HAVE_cond_usqaddvnx16qi (TARGET_SVE2)
- #define HAVE_cond_shaddvnx8hi (TARGET_SVE2)
- #define HAVE_cond_shsubvnx8hi (TARGET_SVE2)
- #define HAVE_cond_sqrshlvnx8hi (TARGET_SVE2)
- #define HAVE_cond_srhaddvnx8hi (TARGET_SVE2)
- #define HAVE_cond_srshlvnx8hi (TARGET_SVE2)
- #define HAVE_cond_suqaddvnx8hi (TARGET_SVE2)
- #define HAVE_cond_uhaddvnx8hi (TARGET_SVE2)
- #define HAVE_cond_uhsubvnx8hi (TARGET_SVE2)
- #define HAVE_cond_uqrshlvnx8hi (TARGET_SVE2)
- #define HAVE_cond_urhaddvnx8hi (TARGET_SVE2)
- #define HAVE_cond_urshlvnx8hi (TARGET_SVE2)
- #define HAVE_cond_usqaddvnx8hi (TARGET_SVE2)
- #define HAVE_cond_shaddvnx4si (TARGET_SVE2)
- #define HAVE_cond_shsubvnx4si (TARGET_SVE2)
- #define HAVE_cond_sqrshlvnx4si (TARGET_SVE2)
- #define HAVE_cond_srhaddvnx4si (TARGET_SVE2)
- #define HAVE_cond_srshlvnx4si (TARGET_SVE2)
- #define HAVE_cond_suqaddvnx4si (TARGET_SVE2)
- #define HAVE_cond_uhaddvnx4si (TARGET_SVE2)
- #define HAVE_cond_uhsubvnx4si (TARGET_SVE2)
- #define HAVE_cond_uqrshlvnx4si (TARGET_SVE2)
- #define HAVE_cond_urhaddvnx4si (TARGET_SVE2)
- #define HAVE_cond_urshlvnx4si (TARGET_SVE2)
- #define HAVE_cond_usqaddvnx4si (TARGET_SVE2)
- #define HAVE_cond_shaddvnx2di (TARGET_SVE2)
- #define HAVE_cond_shsubvnx2di (TARGET_SVE2)
- #define HAVE_cond_sqrshlvnx2di (TARGET_SVE2)
- #define HAVE_cond_srhaddvnx2di (TARGET_SVE2)
- #define HAVE_cond_srshlvnx2di (TARGET_SVE2)
- #define HAVE_cond_suqaddvnx2di (TARGET_SVE2)
- #define HAVE_cond_uhaddvnx2di (TARGET_SVE2)
- #define HAVE_cond_uhsubvnx2di (TARGET_SVE2)
- #define HAVE_cond_uqrshlvnx2di (TARGET_SVE2)
- #define HAVE_cond_urhaddvnx2di (TARGET_SVE2)
- #define HAVE_cond_urshlvnx2di (TARGET_SVE2)
- #define HAVE_cond_usqaddvnx2di (TARGET_SVE2)
- #define HAVE_cond_sqshlvnx16qi (TARGET_SVE2)
- #define HAVE_cond_uqshlvnx16qi (TARGET_SVE2)
- #define HAVE_cond_sqshlvnx8hi (TARGET_SVE2)
- #define HAVE_cond_uqshlvnx8hi (TARGET_SVE2)
- #define HAVE_cond_sqshlvnx4si (TARGET_SVE2)
- #define HAVE_cond_uqshlvnx4si (TARGET_SVE2)
- #define HAVE_cond_sqshlvnx2di (TARGET_SVE2)
- #define HAVE_cond_uqshlvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve2_bcaxvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_bcaxvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_bcaxvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve2_bcaxvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve2_bslvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_bslvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_bslvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve2_bslvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve2_nbslvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_nbslvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_nbslvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve2_nbslvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve2_bsl1nvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_bsl1nvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_bsl1nvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve2_bsl1nvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve2_bsl2nvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_bsl2nvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_bsl2nvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve2_bsl2nvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_asrvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_lsrvnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_asrvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_lsrvnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_asrvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_lsrvnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_asrvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve_add_lsrvnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve2_sabavnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_uabavnx16qi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_sabavnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_uabavnx8hi (TARGET_SVE2)
- #define HAVE_aarch64_sve2_sabavnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve2_uabavnx4si (TARGET_SVE2)
- #define HAVE_aarch64_sve2_sabavnx2di (TARGET_SVE2)
- #define HAVE_aarch64_sve2_uabavnx2di (TARGET_SVE2)
- #define HAVE_cond_sadalpvnx8hi (TARGET_SVE2)
- #define HAVE_cond_uadalpvnx8hi (TARGET_SVE2)
- #define HAVE_cond_sadalpvnx4si (TARGET_SVE2)
- #define HAVE_cond_uadalpvnx4si (TARGET_SVE2)
- #define HAVE_cond_sadalpvnx2di (TARGET_SVE2)
- #define HAVE_cond_uadalpvnx2di (TARGET_SVE2)
- #define HAVE_cond_fcvtltvnx4sf (TARGET_SVE2)
- #define HAVE_cond_fcvtltvnx2df (TARGET_SVE2)
- #define HAVE_cond_fcvtxvnx4sf (TARGET_SVE2)
- #define HAVE_cond_urecpevnx4si (TARGET_SVE2)
- #define HAVE_cond_ursqrtevnx4si (TARGET_SVE2)
- #define HAVE_cond_flogbvnx8hf (TARGET_SVE2)
- #define HAVE_cond_flogbvnx4sf (TARGET_SVE2)
- #define HAVE_cond_flogbvnx2df (TARGET_SVE2)
- #define HAVE_check_raw_ptrssi (TARGET_SVE2)
- #define HAVE_check_war_ptrssi (TARGET_SVE2)
- #define HAVE_check_raw_ptrsdi (TARGET_SVE2)
- #define HAVE_check_war_ptrsdi (TARGET_SVE2)
- extern rtx gen_indirect_jump (rtx);
- extern rtx gen_jump (rtx);
- extern rtx gen_ccmpccsi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_ccmpccdi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_ccmpccfpsf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_ccmpccfpdf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_ccmpccfpesf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_ccmpccfpedf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_ccmpccsi_rev (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_ccmpccdi_rev (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_ccmpccfpsf_rev (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_ccmpccfpdf_rev (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_ccmpccfpesf_rev (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_ccmpccfpedf_rev (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_condjump (rtx, rtx, rtx);
- extern rtx gen_nop (void);
- extern rtx gen_prefetch (rtx, rtx, rtx);
- extern rtx gen_trap (void);
- extern rtx gen_simple_return (void);
- extern rtx gen_insv_immsi (rtx, rtx, rtx);
- extern rtx gen_insv_immdi (rtx, rtx, rtx);
- extern rtx gen_aarch64_movksi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_movkdi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pair_sw_sisi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pair_sw_sfsi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pair_sw_sisf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pair_sw_sfsf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pair_dw_didi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pair_dw_didf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pair_dw_dfdi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pair_dw_dfdf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pair_dw_tftf (rtx, rtx, rtx, rtx);
- extern rtx gen_store_pair_sw_sisi (rtx, rtx, rtx, rtx);
- extern rtx gen_store_pair_sw_sfsi (rtx, rtx, rtx, rtx);
- extern rtx gen_store_pair_sw_sisf (rtx, rtx, rtx, rtx);
- extern rtx gen_store_pair_sw_sfsf (rtx, rtx, rtx, rtx);
- extern rtx gen_store_pair_dw_didi (rtx, rtx, rtx, rtx);
- extern rtx gen_store_pair_dw_didf (rtx, rtx, rtx, rtx);
- extern rtx gen_store_pair_dw_dfdi (rtx, rtx, rtx, rtx);
- extern rtx gen_store_pair_dw_dfdf (rtx, rtx, rtx, rtx);
- extern rtx gen_store_pair_dw_tftf (rtx, rtx, rtx, rtx);
- extern rtx gen_loadwb_pairsi_si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_loadwb_pairsi_di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_loadwb_pairdi_si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_loadwb_pairdi_di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_loadwb_pairsf_si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_loadwb_pairdf_si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_loadwb_pairsf_di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_loadwb_pairdf_di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_loadwb_pairti_si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_loadwb_pairtf_si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_loadwb_pairti_di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_loadwb_pairtf_di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_storewb_pairsi_si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_storewb_pairsi_di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_storewb_pairdi_si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_storewb_pairdi_di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_storewb_pairsf_si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_storewb_pairdf_si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_storewb_pairsf_di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_storewb_pairdf_di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_storewb_pairti_si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_storewb_pairtf_si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_storewb_pairti_di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_storewb_pairtf_di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_addsi3_compare0 (rtx, rtx, rtx);
- extern rtx gen_adddi3_compare0 (rtx, rtx, rtx);
- extern rtx gen_addsi3_compareC (rtx, rtx, rtx);
- extern rtx gen_adddi3_compareC (rtx, rtx, rtx);
- extern rtx gen_addsi3_compareV_imm (rtx, rtx, rtx);
- extern rtx gen_adddi3_compareV_imm (rtx, rtx, rtx);
- extern rtx gen_addsi3_compareV (rtx, rtx, rtx);
- extern rtx gen_adddi3_compareV (rtx, rtx, rtx);
- extern rtx gen_aarch64_subsi_compare0 (rtx, rtx);
- extern rtx gen_aarch64_subdi_compare0 (rtx, rtx);
- extern rtx gen_subsi3 (rtx, rtx, rtx);
- extern rtx gen_subdi3 (rtx, rtx, rtx);
- extern rtx gen_subvsi_insn (rtx, rtx, rtx);
- extern rtx gen_subvdi_insn (rtx, rtx, rtx);
- extern rtx gen_subvsi_imm (rtx, rtx, rtx);
- extern rtx gen_subvdi_imm (rtx, rtx, rtx);
- extern rtx gen_negvsi_insn (rtx, rtx);
- extern rtx gen_negvdi_insn (rtx, rtx);
- extern rtx gen_negvsi_cmp_only (rtx);
- extern rtx gen_negvdi_cmp_only (rtx);
- extern rtx gen_negdi_carryout (rtx, rtx);
- extern rtx gen_negvdi_carryinV (rtx, rtx);
- extern rtx gen_subsi3_compare1_imm (rtx, rtx, rtx, rtx);
- extern rtx gen_subdi3_compare1_imm (rtx, rtx, rtx, rtx);
- extern rtx gen_subsi3_compare1 (rtx, rtx, rtx);
- extern rtx gen_subdi3_compare1 (rtx, rtx, rtx);
- extern rtx gen_negsi2 (rtx, rtx);
- extern rtx gen_negdi2 (rtx, rtx);
- extern rtx gen_negsi2_compare0 (rtx, rtx);
- extern rtx gen_negdi2_compare0 (rtx, rtx);
- extern rtx gen_mulsi3 (rtx, rtx, rtx);
- extern rtx gen_muldi3 (rtx, rtx, rtx);
- extern rtx gen_maddsi (rtx, rtx, rtx, rtx);
- extern rtx gen_madddi (rtx, rtx, rtx, rtx);
- extern rtx gen_mulsidi3 (rtx, rtx, rtx);
- extern rtx gen_umulsidi3 (rtx, rtx, rtx);
- extern rtx gen_maddsidi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_umaddsidi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_msubsidi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_umsubsidi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_smuldi3_highpart (rtx, rtx, rtx);
- extern rtx gen_umuldi3_highpart (rtx, rtx, rtx);
- extern rtx gen_divsi3 (rtx, rtx, rtx);
- extern rtx gen_udivsi3 (rtx, rtx, rtx);
- extern rtx gen_divdi3 (rtx, rtx, rtx);
- extern rtx gen_udivdi3 (rtx, rtx, rtx);
- extern rtx gen_cmpsi (rtx, rtx);
- extern rtx gen_cmpdi (rtx, rtx);
- extern rtx gen_fcmpsf (rtx, rtx);
- extern rtx gen_fcmpdf (rtx, rtx);
- extern rtx gen_fcmpesf (rtx, rtx);
- extern rtx gen_fcmpedf (rtx, rtx);
- extern rtx gen_aarch64_cstoreqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cstorehi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cstoresi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cstoredi (rtx, rtx, rtx);
- extern rtx gen_cstoreqi_neg (rtx, rtx, rtx);
- extern rtx gen_cstorehi_neg (rtx, rtx, rtx);
- extern rtx gen_cstoresi_neg (rtx, rtx, rtx);
- extern rtx gen_cstoredi_neg (rtx, rtx, rtx);
- extern rtx gen_aarch64_crc32b (rtx, rtx, rtx);
- extern rtx gen_aarch64_crc32h (rtx, rtx, rtx);
- extern rtx gen_aarch64_crc32w (rtx, rtx, rtx);
- extern rtx gen_aarch64_crc32x (rtx, rtx, rtx);
- extern rtx gen_aarch64_crc32cb (rtx, rtx, rtx);
- extern rtx gen_aarch64_crc32ch (rtx, rtx, rtx);
- extern rtx gen_aarch64_crc32cw (rtx, rtx, rtx);
- extern rtx gen_aarch64_crc32cx (rtx, rtx, rtx);
- extern rtx gen_csinc3si_insn (rtx, rtx, rtx, rtx);
- extern rtx gen_csinc3di_insn (rtx, rtx, rtx, rtx);
- extern rtx gen_csneg3_uxtw_insn (rtx, rtx, rtx, rtx);
- extern rtx gen_csneg3si_insn (rtx, rtx, rtx, rtx);
- extern rtx gen_csneg3di_insn (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uqdecsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqdecdi (rtx, rtx, rtx);
- extern rtx gen_andsi3 (rtx, rtx, rtx);
- extern rtx gen_iorsi3 (rtx, rtx, rtx);
- extern rtx gen_xorsi3 (rtx, rtx, rtx);
- extern rtx gen_anddi3 (rtx, rtx, rtx);
- extern rtx gen_iordi3 (rtx, rtx, rtx);
- extern rtx gen_xordi3 (rtx, rtx, rtx);
- extern rtx gen_one_cmplsi2 (rtx, rtx);
- extern rtx gen_one_cmpldi2 (rtx, rtx);
- extern rtx gen_and_one_cmpl_ashlsi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_ior_one_cmpl_ashlsi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_xor_one_cmpl_ashlsi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_and_one_cmpl_ashrsi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_ior_one_cmpl_ashrsi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_xor_one_cmpl_ashrsi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_and_one_cmpl_lshrsi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_ior_one_cmpl_lshrsi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_xor_one_cmpl_lshrsi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_and_one_cmpl_rotrsi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_ior_one_cmpl_rotrsi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_xor_one_cmpl_rotrsi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_and_one_cmpl_ashldi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_ior_one_cmpl_ashldi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_xor_one_cmpl_ashldi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_and_one_cmpl_ashrdi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_ior_one_cmpl_ashrdi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_xor_one_cmpl_ashrdi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_and_one_cmpl_lshrdi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_ior_one_cmpl_lshrdi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_xor_one_cmpl_lshrdi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_and_one_cmpl_rotrdi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_ior_one_cmpl_rotrdi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_xor_one_cmpl_rotrdi3 (rtx, rtx, rtx, rtx);
- extern rtx gen_clzsi2 (rtx, rtx);
- extern rtx gen_clzdi2 (rtx, rtx);
- extern rtx gen_clrsbsi2 (rtx, rtx);
- extern rtx gen_clrsbdi2 (rtx, rtx);
- extern rtx gen_rbitsi2 (rtx, rtx);
- extern rtx gen_rbitdi2 (rtx, rtx);
- extern rtx gen_ctzsi2 (rtx, rtx);
- extern rtx gen_ctzdi2 (rtx, rtx);
- extern rtx gen_bswapsi2 (rtx, rtx);
- extern rtx gen_bswapdi2 (rtx, rtx);
- extern rtx gen_bswaphi2 (rtx, rtx);
- extern rtx gen_rev16si2 (rtx, rtx, rtx, rtx);
- extern rtx gen_rev16di2 (rtx, rtx, rtx, rtx);
- extern rtx gen_rev16si2_alt (rtx, rtx, rtx, rtx);
- extern rtx gen_rev16di2_alt (rtx, rtx, rtx, rtx);
- extern rtx gen_btrunchf2 (rtx, rtx);
- extern rtx gen_ceilhf2 (rtx, rtx);
- extern rtx gen_floorhf2 (rtx, rtx);
- extern rtx gen_frintnhf2 (rtx, rtx);
- extern rtx gen_nearbyinthf2 (rtx, rtx);
- extern rtx gen_rinthf2 (rtx, rtx);
- extern rtx gen_roundhf2 (rtx, rtx);
- extern rtx gen_btruncsf2 (rtx, rtx);
- extern rtx gen_ceilsf2 (rtx, rtx);
- extern rtx gen_floorsf2 (rtx, rtx);
- extern rtx gen_frintnsf2 (rtx, rtx);
- extern rtx gen_nearbyintsf2 (rtx, rtx);
- extern rtx gen_rintsf2 (rtx, rtx);
- extern rtx gen_roundsf2 (rtx, rtx);
- extern rtx gen_btruncdf2 (rtx, rtx);
- extern rtx gen_ceildf2 (rtx, rtx);
- extern rtx gen_floordf2 (rtx, rtx);
- extern rtx gen_frintndf2 (rtx, rtx);
- extern rtx gen_nearbyintdf2 (rtx, rtx);
- extern rtx gen_rintdf2 (rtx, rtx);
- extern rtx gen_rounddf2 (rtx, rtx);
- extern rtx gen_lbtrunchfsi2 (rtx, rtx);
- extern rtx gen_lceilhfsi2 (rtx, rtx);
- extern rtx gen_lfloorhfsi2 (rtx, rtx);
- extern rtx gen_lroundhfsi2 (rtx, rtx);
- extern rtx gen_lfrintnhfsi2 (rtx, rtx);
- extern rtx gen_lbtruncuhfsi2 (rtx, rtx);
- extern rtx gen_lceiluhfsi2 (rtx, rtx);
- extern rtx gen_lflooruhfsi2 (rtx, rtx);
- extern rtx gen_lrounduhfsi2 (rtx, rtx);
- extern rtx gen_lfrintnuhfsi2 (rtx, rtx);
- extern rtx gen_lbtruncsfsi2 (rtx, rtx);
- extern rtx gen_lceilsfsi2 (rtx, rtx);
- extern rtx gen_lfloorsfsi2 (rtx, rtx);
- extern rtx gen_lroundsfsi2 (rtx, rtx);
- extern rtx gen_lfrintnsfsi2 (rtx, rtx);
- extern rtx gen_lbtruncusfsi2 (rtx, rtx);
- extern rtx gen_lceilusfsi2 (rtx, rtx);
- extern rtx gen_lfloorusfsi2 (rtx, rtx);
- extern rtx gen_lroundusfsi2 (rtx, rtx);
- extern rtx gen_lfrintnusfsi2 (rtx, rtx);
- extern rtx gen_lbtruncdfsi2 (rtx, rtx);
- extern rtx gen_lceildfsi2 (rtx, rtx);
- extern rtx gen_lfloordfsi2 (rtx, rtx);
- extern rtx gen_lrounddfsi2 (rtx, rtx);
- extern rtx gen_lfrintndfsi2 (rtx, rtx);
- extern rtx gen_lbtruncudfsi2 (rtx, rtx);
- extern rtx gen_lceiludfsi2 (rtx, rtx);
- extern rtx gen_lfloorudfsi2 (rtx, rtx);
- extern rtx gen_lroundudfsi2 (rtx, rtx);
- extern rtx gen_lfrintnudfsi2 (rtx, rtx);
- extern rtx gen_lbtrunchfdi2 (rtx, rtx);
- extern rtx gen_lceilhfdi2 (rtx, rtx);
- extern rtx gen_lfloorhfdi2 (rtx, rtx);
- extern rtx gen_lroundhfdi2 (rtx, rtx);
- extern rtx gen_lfrintnhfdi2 (rtx, rtx);
- extern rtx gen_lbtruncuhfdi2 (rtx, rtx);
- extern rtx gen_lceiluhfdi2 (rtx, rtx);
- extern rtx gen_lflooruhfdi2 (rtx, rtx);
- extern rtx gen_lrounduhfdi2 (rtx, rtx);
- extern rtx gen_lfrintnuhfdi2 (rtx, rtx);
- extern rtx gen_lbtruncsfdi2 (rtx, rtx);
- extern rtx gen_lceilsfdi2 (rtx, rtx);
- extern rtx gen_lfloorsfdi2 (rtx, rtx);
- extern rtx gen_lroundsfdi2 (rtx, rtx);
- extern rtx gen_lfrintnsfdi2 (rtx, rtx);
- extern rtx gen_lbtruncusfdi2 (rtx, rtx);
- extern rtx gen_lceilusfdi2 (rtx, rtx);
- extern rtx gen_lfloorusfdi2 (rtx, rtx);
- extern rtx gen_lroundusfdi2 (rtx, rtx);
- extern rtx gen_lfrintnusfdi2 (rtx, rtx);
- extern rtx gen_lbtruncdfdi2 (rtx, rtx);
- extern rtx gen_lceildfdi2 (rtx, rtx);
- extern rtx gen_lfloordfdi2 (rtx, rtx);
- extern rtx gen_lrounddfdi2 (rtx, rtx);
- extern rtx gen_lfrintndfdi2 (rtx, rtx);
- extern rtx gen_lbtruncudfdi2 (rtx, rtx);
- extern rtx gen_lceiludfdi2 (rtx, rtx);
- extern rtx gen_lfloorudfdi2 (rtx, rtx);
- extern rtx gen_lroundudfdi2 (rtx, rtx);
- extern rtx gen_lfrintnudfdi2 (rtx, rtx);
- extern rtx gen_extendsfdf2 (rtx, rtx);
- extern rtx gen_extendhfsf2 (rtx, rtx);
- extern rtx gen_extendhfdf2 (rtx, rtx);
- extern rtx gen_truncdfsf2 (rtx, rtx);
- extern rtx gen_truncsfhf2 (rtx, rtx);
- extern rtx gen_truncdfhf2 (rtx, rtx);
- extern rtx gen_fix_truncsfsi2 (rtx, rtx);
- extern rtx gen_fixuns_truncsfsi2 (rtx, rtx);
- extern rtx gen_fix_truncdfdi2 (rtx, rtx);
- extern rtx gen_fixuns_truncdfdi2 (rtx, rtx);
- extern rtx gen_fix_trunchfsi2 (rtx, rtx);
- extern rtx gen_fixuns_trunchfsi2 (rtx, rtx);
- extern rtx gen_fix_trunchfdi2 (rtx, rtx);
- extern rtx gen_fixuns_trunchfdi2 (rtx, rtx);
- extern rtx gen_fix_truncdfsi2 (rtx, rtx);
- extern rtx gen_fixuns_truncdfsi2 (rtx, rtx);
- extern rtx gen_fix_truncsfdi2 (rtx, rtx);
- extern rtx gen_fixuns_truncsfdi2 (rtx, rtx);
- extern rtx gen_floatsisf2 (rtx, rtx);
- extern rtx gen_floatunssisf2 (rtx, rtx);
- extern rtx gen_floatdidf2 (rtx, rtx);
- extern rtx gen_floatunsdidf2 (rtx, rtx);
- extern rtx gen_floatdisf2 (rtx, rtx);
- extern rtx gen_floatunsdisf2 (rtx, rtx);
- extern rtx gen_floatsidf2 (rtx, rtx);
- extern rtx gen_floatunssidf2 (rtx, rtx);
- extern rtx gen_aarch64_fp16_floatsihf2 (rtx, rtx);
- extern rtx gen_aarch64_fp16_floatunssihf2 (rtx, rtx);
- extern rtx gen_aarch64_fp16_floatdihf2 (rtx, rtx);
- extern rtx gen_aarch64_fp16_floatunsdihf2 (rtx, rtx);
- extern rtx gen_fcvtzssf3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzusf3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzsdf3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzudf3 (rtx, rtx, rtx);
- extern rtx gen_scvtfsi3 (rtx, rtx, rtx);
- extern rtx gen_ucvtfsi3 (rtx, rtx, rtx);
- extern rtx gen_scvtfdi3 (rtx, rtx, rtx);
- extern rtx gen_ucvtfdi3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzshfsi3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzuhfsi3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzshfdi3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzuhfdi3 (rtx, rtx, rtx);
- extern rtx gen_scvtfsihf3 (rtx, rtx, rtx);
- extern rtx gen_ucvtfsihf3 (rtx, rtx, rtx);
- extern rtx gen_scvtfdihf3 (rtx, rtx, rtx);
- extern rtx gen_ucvtfdihf3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzshf3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzuhf3 (rtx, rtx, rtx);
- extern rtx gen_scvtfhi3 (rtx, rtx, rtx);
- extern rtx gen_ucvtfhi3 (rtx, rtx, rtx);
- extern rtx gen_addhf3 (rtx, rtx, rtx);
- extern rtx gen_addsf3 (rtx, rtx, rtx);
- extern rtx gen_adddf3 (rtx, rtx, rtx);
- extern rtx gen_subhf3 (rtx, rtx, rtx);
- extern rtx gen_subsf3 (rtx, rtx, rtx);
- extern rtx gen_subdf3 (rtx, rtx, rtx);
- extern rtx gen_mulhf3 (rtx, rtx, rtx);
- extern rtx gen_mulsf3 (rtx, rtx, rtx);
- extern rtx gen_muldf3 (rtx, rtx, rtx);
- extern rtx gen_neghf2 (rtx, rtx);
- extern rtx gen_negsf2 (rtx, rtx);
- extern rtx gen_negdf2 (rtx, rtx);
- extern rtx gen_abshf2 (rtx, rtx);
- extern rtx gen_abssf2 (rtx, rtx);
- extern rtx gen_absdf2 (rtx, rtx);
- extern rtx gen_smaxsf3 (rtx, rtx, rtx);
- extern rtx gen_smaxdf3 (rtx, rtx, rtx);
- extern rtx gen_sminsf3 (rtx, rtx, rtx);
- extern rtx gen_smindf3 (rtx, rtx, rtx);
- extern rtx gen_smax_nanhf3 (rtx, rtx, rtx);
- extern rtx gen_smin_nanhf3 (rtx, rtx, rtx);
- extern rtx gen_fmaxhf3 (rtx, rtx, rtx);
- extern rtx gen_fminhf3 (rtx, rtx, rtx);
- extern rtx gen_smax_nansf3 (rtx, rtx, rtx);
- extern rtx gen_smin_nansf3 (rtx, rtx, rtx);
- extern rtx gen_fmaxsf3 (rtx, rtx, rtx);
- extern rtx gen_fminsf3 (rtx, rtx, rtx);
- extern rtx gen_smax_nandf3 (rtx, rtx, rtx);
- extern rtx gen_smin_nandf3 (rtx, rtx, rtx);
- extern rtx gen_fmaxdf3 (rtx, rtx, rtx);
- extern rtx gen_fmindf3 (rtx, rtx, rtx);
- extern rtx gen_copysignsf3_insn (rtx, rtx, rtx, rtx);
- extern rtx gen_copysigndf3_insn (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_movdi_tilow (rtx, rtx);
- extern rtx gen_aarch64_movdi_tflow (rtx, rtx);
- extern rtx gen_aarch64_movdi_tihigh (rtx, rtx);
- extern rtx gen_aarch64_movdi_tfhigh (rtx, rtx);
- extern rtx gen_aarch64_movtihigh_di (rtx, rtx);
- extern rtx gen_aarch64_movtfhigh_di (rtx, rtx);
- extern rtx gen_aarch64_movtilow_di (rtx, rtx);
- extern rtx gen_aarch64_movtflow_di (rtx, rtx);
- extern rtx gen_aarch64_movtilow_tilow (rtx, rtx);
- extern rtx gen_add_losym_si (rtx, rtx, rtx);
- extern rtx gen_add_losym_di (rtx, rtx, rtx);
- extern rtx gen_ldr_got_small_si (rtx, rtx, rtx);
- extern rtx gen_ldr_got_small_di (rtx, rtx, rtx);
- extern rtx gen_ldr_got_small_sidi (rtx, rtx, rtx);
- extern rtx gen_ldr_got_small_28k_si (rtx, rtx, rtx);
- extern rtx gen_ldr_got_small_28k_di (rtx, rtx, rtx);
- extern rtx gen_ldr_got_small_28k_sidi (rtx, rtx, rtx);
- extern rtx gen_ldr_got_tiny_si (rtx, rtx);
- extern rtx gen_ldr_got_tiny_di (rtx, rtx);
- extern rtx gen_ldr_got_tiny_sidi (rtx, rtx);
- extern rtx gen_aarch64_load_tp_hard (rtx);
- extern rtx gen_tlsie_small_si (rtx, rtx);
- extern rtx gen_tlsie_small_di (rtx, rtx);
- extern rtx gen_tlsie_small_sidi (rtx, rtx);
- extern rtx gen_tlsie_tiny_si (rtx, rtx, rtx);
- extern rtx gen_tlsie_tiny_di (rtx, rtx, rtx);
- extern rtx gen_tlsie_tiny_sidi (rtx, rtx, rtx);
- extern rtx gen_tlsle12_si (rtx, rtx, rtx);
- extern rtx gen_tlsle12_di (rtx, rtx, rtx);
- extern rtx gen_tlsle24_si (rtx, rtx, rtx);
- extern rtx gen_tlsle24_di (rtx, rtx, rtx);
- extern rtx gen_tlsle32_si (rtx, rtx);
- extern rtx gen_tlsle32_di (rtx, rtx);
- extern rtx gen_tlsle48_si (rtx, rtx);
- extern rtx gen_tlsle48_di (rtx, rtx);
- extern rtx gen_tlsdesc_small_advsimd_si (rtx);
- extern rtx gen_tlsdesc_small_advsimd_di (rtx);
- extern rtx gen_tlsdesc_small_sve_si (rtx, rtx);
- extern rtx gen_tlsdesc_small_sve_di (rtx, rtx);
- extern rtx gen_stack_tie (rtx, rtx);
- extern rtx gen_aarch64_fjcvtzs (rtx, rtx);
- extern rtx gen_paciasp (void);
- extern rtx gen_autiasp (void);
- extern rtx gen_pacibsp (void);
- extern rtx gen_autibsp (void);
- extern rtx gen_pacia1716 (void);
- extern rtx gen_autia1716 (void);
- extern rtx gen_pacib1716 (void);
- extern rtx gen_autib1716 (void);
- extern rtx gen_xpaclri (void);
- extern rtx gen_blockage (void);
- extern rtx gen_probe_stack_range (rtx, rtx, rtx);
- extern rtx gen_probe_sve_stack_clash_si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_probe_sve_stack_clash_di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_reg_stack_protect_address_si (rtx, rtx);
- extern rtx gen_reg_stack_protect_address_di (rtx, rtx);
- extern rtx gen_stack_protect_set_si (rtx, rtx);
- extern rtx gen_stack_protect_set_di (rtx, rtx);
- extern rtx gen_stack_protect_test_si (rtx, rtx);
- extern rtx gen_stack_protect_test_di (rtx, rtx);
- extern rtx gen_set_fpcr (rtx);
- extern rtx gen_get_fpcr (rtx);
- extern rtx gen_set_fpsr (rtx);
- extern rtx gen_get_fpsr (rtx);
- extern rtx gen_speculation_tracker (rtx);
- extern rtx gen_speculation_tracker_rev (rtx);
- extern rtx gen_bti_noarg (void);
- extern rtx gen_bti_c (void);
- extern rtx gen_bti_j (void);
- extern rtx gen_bti_jc (void);
- extern rtx gen_speculation_barrier (void);
- extern rtx gen_despeculate_simpleqi (rtx, rtx, rtx);
- extern rtx gen_despeculate_simplehi (rtx, rtx, rtx);
- extern rtx gen_despeculate_simplesi (rtx, rtx, rtx);
- extern rtx gen_despeculate_simpledi (rtx, rtx, rtx);
- extern rtx gen_despeculate_simpleti (rtx, rtx, rtx);
- extern rtx gen_aarch64_frint32zv2sf (rtx, rtx);
- extern rtx gen_aarch64_frint32xv2sf (rtx, rtx);
- extern rtx gen_aarch64_frint64zv2sf (rtx, rtx);
- extern rtx gen_aarch64_frint64xv2sf (rtx, rtx);
- extern rtx gen_aarch64_frint32zv4sf (rtx, rtx);
- extern rtx gen_aarch64_frint32xv4sf (rtx, rtx);
- extern rtx gen_aarch64_frint64zv4sf (rtx, rtx);
- extern rtx gen_aarch64_frint64xv4sf (rtx, rtx);
- extern rtx gen_aarch64_frint32zv2df (rtx, rtx);
- extern rtx gen_aarch64_frint32xv2df (rtx, rtx);
- extern rtx gen_aarch64_frint64zv2df (rtx, rtx);
- extern rtx gen_aarch64_frint64xv2df (rtx, rtx);
- extern rtx gen_aarch64_frint32zdf (rtx, rtx);
- extern rtx gen_aarch64_frint32xdf (rtx, rtx);
- extern rtx gen_aarch64_frint64zdf (rtx, rtx);
- extern rtx gen_aarch64_frint64xdf (rtx, rtx);
- extern rtx gen_aarch64_frint32zsf (rtx, rtx);
- extern rtx gen_aarch64_frint32xsf (rtx, rtx);
- extern rtx gen_aarch64_frint64zsf (rtx, rtx);
- extern rtx gen_aarch64_frint64xsf (rtx, rtx);
- extern rtx gen_tstart (rtx);
- extern rtx gen_ttest (rtx);
- extern rtx gen_tcommit (void);
- extern rtx gen_tcancel (rtx);
- extern rtx gen_aarch64_rndr (rtx);
- extern rtx gen_aarch64_rndrrs (rtx);
- extern rtx gen_irg (rtx, rtx, rtx);
- extern rtx gen_gmi (rtx, rtx, rtx);
- extern rtx gen_addg (rtx, rtx, rtx, rtx);
- extern rtx gen_subp (rtx, rtx, rtx);
- extern rtx gen_ldg (rtx, rtx, rtx);
- extern rtx gen_stg (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_dupv8qi (rtx, rtx);
- extern rtx gen_aarch64_simd_dupv16qi (rtx, rtx);
- extern rtx gen_aarch64_simd_dupv4hi (rtx, rtx);
- extern rtx gen_aarch64_simd_dupv8hi (rtx, rtx);
- extern rtx gen_aarch64_simd_dupv2si (rtx, rtx);
- extern rtx gen_aarch64_simd_dupv4si (rtx, rtx);
- extern rtx gen_aarch64_simd_dupv2di (rtx, rtx);
- extern rtx gen_aarch64_simd_dupv4hf (rtx, rtx);
- extern rtx gen_aarch64_simd_dupv8hf (rtx, rtx);
- extern rtx gen_aarch64_simd_dupv2sf (rtx, rtx);
- extern rtx gen_aarch64_simd_dupv4sf (rtx, rtx);
- extern rtx gen_aarch64_simd_dupv2df (rtx, rtx);
- extern rtx gen_aarch64_simd_dupv4bf (rtx, rtx);
- extern rtx gen_aarch64_simd_dupv8bf (rtx, rtx);
- extern rtx gen_aarch64_dup_lanev8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lanev16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lanev4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lanev8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lanev2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lanev4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lanev2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lanev4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lanev8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lanev4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lanev8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lanev2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lanev4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lanev2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lane_to_128v8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lane_to_64v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lane_to_128v4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lane_to_64v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lane_to_128v2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lane_to_64v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lane_to_128v4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lane_to_64v8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lane_to_128v2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_dup_lane_to_64v4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_lane0v8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_lane0v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_lane0v4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_lane0v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_lane0v2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_lane0v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_lane0v2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_lane0v4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_lane0v8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_lane0v4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_lane0v8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_lane0v2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_lane0v4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_lane0v2df (rtx, rtx, rtx);
- extern rtx gen_load_pairv8qiv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4hiv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4hfv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2siv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2sfv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairdfv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8qiv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4hiv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4hfv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2siv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2sfv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairdfv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8qiv4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4hiv4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4hfv4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2siv4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2sfv4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairdfv4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8qiv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4hiv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4hfv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2siv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2sfv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairdfv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8qiv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4hiv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4hfv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2siv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2sfv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairdfv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8qidf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4hidf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4hfdf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2sidf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2sfdf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairdfdf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8qiv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4hiv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4hfv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2siv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2sfv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairdfv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8qiv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4hiv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4hfv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2siv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2sfv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairdfv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8qiv4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4hiv4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4hfv4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2siv4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2sfv4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairdfv4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8qiv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4hiv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4hfv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2siv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2sfv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairdfv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8qiv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4hiv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4hfv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2siv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2sfv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairdfv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8qidf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4hidf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4hfdf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2sidf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2sfdf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairdfdf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv16qiv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv16qiv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv16qiv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv16qiv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv16qiv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv16qiv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv16qiv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv16qiv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8hiv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8hiv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8hiv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8hiv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8hiv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8hiv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8hiv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8hiv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4siv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4siv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4siv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4siv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4siv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4siv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4siv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4siv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2div16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2div8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2div4si (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2div2di (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2div8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2div8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2div4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2div2df (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8hfv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8hfv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8hfv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8hfv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8hfv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8hfv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8hfv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8hfv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4sfv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4sfv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4sfv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4sfv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4sfv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4sfv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4sfv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv4sfv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2dfv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2dfv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2dfv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2dfv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2dfv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2dfv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2dfv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv2dfv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8bfv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8bfv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8bfv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8bfv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8bfv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8bfv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8bfv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_load_pairv8bfv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv16qiv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv16qiv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv16qiv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv16qiv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv16qiv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv16qiv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv16qiv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv16qiv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8hiv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8hiv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8hiv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8hiv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8hiv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8hiv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8hiv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8hiv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4siv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4siv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4siv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4siv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4siv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4siv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4siv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4siv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2div16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2div8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2div4si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2div2di (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2div8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2div8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2div4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2div2df (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8hfv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8hfv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8hfv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8hfv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8hfv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8hfv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8hfv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8hfv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4sfv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4sfv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4sfv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4sfv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4sfv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4sfv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4sfv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv4sfv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2dfv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2dfv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2dfv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2dfv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2dfv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2dfv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2dfv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv2dfv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8bfv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8bfv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8bfv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8bfv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8bfv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8bfv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8bfv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_pairv8bfv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_mov_from_v16qilow (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_mov_from_v8hilow (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_mov_from_v4silow (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_mov_from_v8hflow (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_mov_from_v8bflow (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_mov_from_v4sflow (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_mov_from_v16qihigh (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_mov_from_v8hihigh (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_mov_from_v4sihigh (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_mov_from_v8hfhigh (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_mov_from_v8bfhigh (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_mov_from_v4sfhigh (rtx, rtx, rtx);
- extern rtx gen_ornv8qi3 (rtx, rtx, rtx);
- extern rtx gen_ornv16qi3 (rtx, rtx, rtx);
- extern rtx gen_ornv4hi3 (rtx, rtx, rtx);
- extern rtx gen_ornv8hi3 (rtx, rtx, rtx);
- extern rtx gen_ornv2si3 (rtx, rtx, rtx);
- extern rtx gen_ornv4si3 (rtx, rtx, rtx);
- extern rtx gen_ornv2di3 (rtx, rtx, rtx);
- extern rtx gen_bicv8qi3 (rtx, rtx, rtx);
- extern rtx gen_bicv16qi3 (rtx, rtx, rtx);
- extern rtx gen_bicv4hi3 (rtx, rtx, rtx);
- extern rtx gen_bicv8hi3 (rtx, rtx, rtx);
- extern rtx gen_bicv2si3 (rtx, rtx, rtx);
- extern rtx gen_bicv4si3 (rtx, rtx, rtx);
- extern rtx gen_bicv2di3 (rtx, rtx, rtx);
- extern rtx gen_addv8qi3 (rtx, rtx, rtx);
- extern rtx gen_addv16qi3 (rtx, rtx, rtx);
- extern rtx gen_addv4hi3 (rtx, rtx, rtx);
- extern rtx gen_addv8hi3 (rtx, rtx, rtx);
- extern rtx gen_addv2si3 (rtx, rtx, rtx);
- extern rtx gen_addv4si3 (rtx, rtx, rtx);
- extern rtx gen_addv2di3 (rtx, rtx, rtx);
- extern rtx gen_subv8qi3 (rtx, rtx, rtx);
- extern rtx gen_subv16qi3 (rtx, rtx, rtx);
- extern rtx gen_subv4hi3 (rtx, rtx, rtx);
- extern rtx gen_subv8hi3 (rtx, rtx, rtx);
- extern rtx gen_subv2si3 (rtx, rtx, rtx);
- extern rtx gen_subv4si3 (rtx, rtx, rtx);
- extern rtx gen_subv2di3 (rtx, rtx, rtx);
- extern rtx gen_mulv8qi3 (rtx, rtx, rtx);
- extern rtx gen_mulv16qi3 (rtx, rtx, rtx);
- extern rtx gen_mulv4hi3 (rtx, rtx, rtx);
- extern rtx gen_mulv8hi3 (rtx, rtx, rtx);
- extern rtx gen_mulv2si3 (rtx, rtx, rtx);
- extern rtx gen_mulv4si3 (rtx, rtx, rtx);
- extern rtx gen_bswapv4hi2 (rtx, rtx);
- extern rtx gen_bswapv8hi2 (rtx, rtx);
- extern rtx gen_bswapv2si2 (rtx, rtx);
- extern rtx gen_bswapv4si2 (rtx, rtx);
- extern rtx gen_bswapv2di2 (rtx, rtx);
- extern rtx gen_aarch64_rbitv8qi (rtx, rtx);
- extern rtx gen_aarch64_rbitv16qi (rtx, rtx);
- extern rtx gen_aarch64_fcadd90v4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_fcadd270v4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_fcadd90v8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_fcadd270v8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_fcadd90v2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_fcadd270v2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_fcadd90v4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_fcadd270v4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_fcadd90v2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_fcadd270v2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla0v4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla90v4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla180v4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla270v4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla0v8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla90v8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla180v8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla270v8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla0v2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla90v2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla180v2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla270v2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla0v4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla90v4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla180v4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla270v4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla0v2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla90v2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla180v2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla270v2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane0v4hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane90v4hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane180v4hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane270v4hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane0v8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane90v8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane180v8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane270v8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane0v2sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane90v2sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane180v2sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane270v2sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane0v4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane90v4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane180v4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane270v4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane0v2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane90v2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane180v2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane270v2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_laneq0v4hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_laneq90v4hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_laneq180v4hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_laneq270v4hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmlaq_lane0v8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmlaq_lane90v8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmlaq_lane180v8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmlaq_lane270v8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmlaq_lane0v4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmlaq_lane90v4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmlaq_lane180v4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmlaq_lane270v4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sdotv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_udotv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sdotv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_udotv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usdotv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usdotv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sdot_lanev8qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_udot_lanev8qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sdot_lanev16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_udot_lanev16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sdot_laneqv8qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_udot_laneqv8qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sdot_laneqv16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_udot_laneqv16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usdot_lanev8qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sudot_lanev8qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usdot_lanev16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sudot_lanev16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usdot_laneqv8qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sudot_laneqv8qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usdot_laneqv16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sudot_laneqv16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_rsqrtev4hf (rtx, rtx);
- extern rtx gen_aarch64_rsqrtev8hf (rtx, rtx);
- extern rtx gen_aarch64_rsqrtev2sf (rtx, rtx);
- extern rtx gen_aarch64_rsqrtev4sf (rtx, rtx);
- extern rtx gen_aarch64_rsqrtev2df (rtx, rtx);
- extern rtx gen_aarch64_rsqrtehf (rtx, rtx);
- extern rtx gen_aarch64_rsqrtesf (rtx, rtx);
- extern rtx gen_aarch64_rsqrtedf (rtx, rtx);
- extern rtx gen_aarch64_rsqrtsv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_rsqrtsv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_rsqrtsv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_rsqrtsv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_rsqrtsv2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_rsqrtshf (rtx, rtx, rtx);
- extern rtx gen_aarch64_rsqrtssf (rtx, rtx, rtx);
- extern rtx gen_aarch64_rsqrtsdf (rtx, rtx, rtx);
- extern rtx gen_negv8qi2 (rtx, rtx);
- extern rtx gen_negv16qi2 (rtx, rtx);
- extern rtx gen_negv4hi2 (rtx, rtx);
- extern rtx gen_negv8hi2 (rtx, rtx);
- extern rtx gen_negv2si2 (rtx, rtx);
- extern rtx gen_negv4si2 (rtx, rtx);
- extern rtx gen_negv2di2 (rtx, rtx);
- extern rtx gen_absv8qi2 (rtx, rtx);
- extern rtx gen_absv16qi2 (rtx, rtx);
- extern rtx gen_absv4hi2 (rtx, rtx);
- extern rtx gen_absv8hi2 (rtx, rtx);
- extern rtx gen_absv2si2 (rtx, rtx);
- extern rtx gen_absv4si2 (rtx, rtx);
- extern rtx gen_absv2di2 (rtx, rtx);
- extern rtx gen_aarch64_absv8qi (rtx, rtx);
- extern rtx gen_aarch64_absv16qi (rtx, rtx);
- extern rtx gen_aarch64_absv4hi (rtx, rtx);
- extern rtx gen_aarch64_absv8hi (rtx, rtx);
- extern rtx gen_aarch64_absv2si (rtx, rtx);
- extern rtx gen_aarch64_absv4si (rtx, rtx);
- extern rtx gen_aarch64_absv2di (rtx, rtx);
- extern rtx gen_aarch64_absdi (rtx, rtx);
- extern rtx gen_aarch64_sabdv8qi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_uabdv8qi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_sabdv16qi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_uabdv16qi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_sabdv4hi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_uabdv4hi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_sabdv8hi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_uabdv8hi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_sabdv2si_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_uabdv2si_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_sabdv4si_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_uabdv4si_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_sabdl2v8qi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_uabdl2v8qi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_sabdl2v16qi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_uabdl2v16qi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_sabdl2v4hi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_uabdl2v4hi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_sabdl2v8hi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_uabdl2v8hi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_sabdl2v4si_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_uabdl2v4si_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_sabalv8qi_4 (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uabalv8qi_4 (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sabalv16qi_4 (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uabalv16qi_4 (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sabalv4hi_4 (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uabalv4hi_4 (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sabalv8hi_4 (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uabalv8hi_4 (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sabalv4si_4 (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uabalv4si_4 (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sadalpv8qi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_uadalpv8qi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_sadalpv16qi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_uadalpv16qi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_sadalpv4hi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_uadalpv4hi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_sadalpv8hi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_uadalpv8hi_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_sadalpv4si_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_uadalpv4si_3 (rtx, rtx, rtx);
- extern rtx gen_abav8qi_3 (rtx, rtx, rtx, rtx);
- extern rtx gen_abav16qi_3 (rtx, rtx, rtx, rtx);
- extern rtx gen_abav4hi_3 (rtx, rtx, rtx, rtx);
- extern rtx gen_abav8hi_3 (rtx, rtx, rtx, rtx);
- extern rtx gen_abav2si_3 (rtx, rtx, rtx, rtx);
- extern rtx gen_abav4si_3 (rtx, rtx, rtx, rtx);
- extern rtx gen_fabdv4hf3 (rtx, rtx, rtx);
- extern rtx gen_fabdv8hf3 (rtx, rtx, rtx);
- extern rtx gen_fabdv2sf3 (rtx, rtx, rtx);
- extern rtx gen_fabdv4sf3 (rtx, rtx, rtx);
- extern rtx gen_fabdv2df3 (rtx, rtx, rtx);
- extern rtx gen_fabdhf3 (rtx, rtx, rtx);
- extern rtx gen_fabdsf3 (rtx, rtx, rtx);
- extern rtx gen_fabddf3 (rtx, rtx, rtx);
- extern rtx gen_andv8qi3 (rtx, rtx, rtx);
- extern rtx gen_andv16qi3 (rtx, rtx, rtx);
- extern rtx gen_andv4hi3 (rtx, rtx, rtx);
- extern rtx gen_andv8hi3 (rtx, rtx, rtx);
- extern rtx gen_andv2si3 (rtx, rtx, rtx);
- extern rtx gen_andv4si3 (rtx, rtx, rtx);
- extern rtx gen_andv2di3 (rtx, rtx, rtx);
- extern rtx gen_iorv8qi3 (rtx, rtx, rtx);
- extern rtx gen_iorv16qi3 (rtx, rtx, rtx);
- extern rtx gen_iorv4hi3 (rtx, rtx, rtx);
- extern rtx gen_iorv8hi3 (rtx, rtx, rtx);
- extern rtx gen_iorv2si3 (rtx, rtx, rtx);
- extern rtx gen_iorv4si3 (rtx, rtx, rtx);
- extern rtx gen_iorv2di3 (rtx, rtx, rtx);
- extern rtx gen_xorv8qi3 (rtx, rtx, rtx);
- extern rtx gen_xorv16qi3 (rtx, rtx, rtx);
- extern rtx gen_xorv4hi3 (rtx, rtx, rtx);
- extern rtx gen_xorv8hi3 (rtx, rtx, rtx);
- extern rtx gen_xorv2si3 (rtx, rtx, rtx);
- extern rtx gen_xorv4si3 (rtx, rtx, rtx);
- extern rtx gen_xorv2di3 (rtx, rtx, rtx);
- extern rtx gen_one_cmplv8qi2 (rtx, rtx);
- extern rtx gen_one_cmplv16qi2 (rtx, rtx);
- extern rtx gen_one_cmplv4hi2 (rtx, rtx);
- extern rtx gen_one_cmplv8hi2 (rtx, rtx);
- extern rtx gen_one_cmplv2si2 (rtx, rtx);
- extern rtx gen_one_cmplv4si2 (rtx, rtx);
- extern rtx gen_one_cmplv2di2 (rtx, rtx);
- extern rtx gen_aarch64_simd_vec_setv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_setv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_setv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_setv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_setv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_setv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_setv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_setv4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_setv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_setv4bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_setv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_setv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_setv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_setv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_lshrv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_lshrv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_lshrv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_lshrv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_lshrv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_lshrv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_lshrv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_ashrv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_ashrv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_ashrv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_ashrv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_ashrv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_ashrv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_ashrv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_imm_shlv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_imm_shlv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_imm_shlv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_imm_shlv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_imm_shlv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_imm_shlv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_imm_shlv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_sshlv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_sshlv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_sshlv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_sshlv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_sshlv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_sshlv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_sshlv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_shlv8qi_unsigned (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_shlv16qi_unsigned (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_shlv4hi_unsigned (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_shlv8hi_unsigned (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_shlv2si_unsigned (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_shlv4si_unsigned (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_shlv2di_unsigned (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_shlv8qi_signed (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_shlv16qi_signed (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_shlv4hi_signed (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_shlv8hi_signed (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_shlv2si_signed (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_shlv4si_signed (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_reg_shlv2di_signed (rtx, rtx, rtx);
- extern rtx gen_vec_shr_v8qi (rtx, rtx, rtx);
- extern rtx gen_vec_shr_v4hi (rtx, rtx, rtx);
- extern rtx gen_vec_shr_v4hf (rtx, rtx, rtx);
- extern rtx gen_vec_shr_v2si (rtx, rtx, rtx);
- extern rtx gen_vec_shr_v2sf (rtx, rtx, rtx);
- extern rtx gen_vec_shr_v4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_mlav8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mlav16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mlav4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mlav8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mlav2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mlav4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mlsv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mlsv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mlsv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mlsv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mlsv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mlsv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_smaxv8qi3 (rtx, rtx, rtx);
- extern rtx gen_sminv8qi3 (rtx, rtx, rtx);
- extern rtx gen_umaxv8qi3 (rtx, rtx, rtx);
- extern rtx gen_uminv8qi3 (rtx, rtx, rtx);
- extern rtx gen_smaxv16qi3 (rtx, rtx, rtx);
- extern rtx gen_sminv16qi3 (rtx, rtx, rtx);
- extern rtx gen_umaxv16qi3 (rtx, rtx, rtx);
- extern rtx gen_uminv16qi3 (rtx, rtx, rtx);
- extern rtx gen_smaxv4hi3 (rtx, rtx, rtx);
- extern rtx gen_sminv4hi3 (rtx, rtx, rtx);
- extern rtx gen_umaxv4hi3 (rtx, rtx, rtx);
- extern rtx gen_uminv4hi3 (rtx, rtx, rtx);
- extern rtx gen_smaxv8hi3 (rtx, rtx, rtx);
- extern rtx gen_sminv8hi3 (rtx, rtx, rtx);
- extern rtx gen_umaxv8hi3 (rtx, rtx, rtx);
- extern rtx gen_uminv8hi3 (rtx, rtx, rtx);
- extern rtx gen_smaxv2si3 (rtx, rtx, rtx);
- extern rtx gen_sminv2si3 (rtx, rtx, rtx);
- extern rtx gen_umaxv2si3 (rtx, rtx, rtx);
- extern rtx gen_uminv2si3 (rtx, rtx, rtx);
- extern rtx gen_smaxv4si3 (rtx, rtx, rtx);
- extern rtx gen_sminv4si3 (rtx, rtx, rtx);
- extern rtx gen_umaxv4si3 (rtx, rtx, rtx);
- extern rtx gen_uminv4si3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_umaxpv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uminpv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_smaxpv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sminpv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_umaxpv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uminpv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_smaxpv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sminpv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_umaxpv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uminpv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_smaxpv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sminpv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_umaxpv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uminpv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_smaxpv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sminpv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_umaxpv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uminpv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_smaxpv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sminpv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_umaxpv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uminpv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_smaxpv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sminpv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_smax_nanpv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_smin_nanpv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_smaxpv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sminpv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_smax_nanpv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_smin_nanpv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_smaxpv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sminpv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_smax_nanpv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_smin_nanpv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_smaxpv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sminpv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_smax_nanpv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_smin_nanpv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_smaxpv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sminpv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_smax_nanpv2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_smin_nanpv2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_smaxpv2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sminpv2df (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_internal_v16qi (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_internal_v8hi (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_internal_v4si (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_internal_v2di (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_internal_v8hf (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_internal_v8bf (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_internal_v4sf (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_internal_v2df (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_internal_be_v16qi (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_internal_be_v8hi (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_internal_be_v4si (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_internal_be_v2di (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_internal_be_v8hf (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_internal_be_v8bf (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_internal_be_v4sf (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_internal_be_v2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_move_hi_quad_v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_move_hi_quad_v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_move_hi_quad_v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_move_hi_quad_v2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_move_hi_quad_v8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_move_hi_quad_v8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_move_hi_quad_v4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_move_hi_quad_v2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_move_hi_quad_be_v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_move_hi_quad_be_v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_move_hi_quad_be_v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_move_hi_quad_be_v2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_move_hi_quad_be_v8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_move_hi_quad_be_v8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_move_hi_quad_be_v4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_move_hi_quad_be_v2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_pack_trunc_v8hi (rtx, rtx);
- extern rtx gen_aarch64_simd_vec_pack_trunc_v4si (rtx, rtx);
- extern rtx gen_aarch64_simd_vec_pack_trunc_v2di (rtx, rtx);
- extern rtx gen_vec_pack_trunc_v8hi (rtx, rtx, rtx);
- extern rtx gen_vec_pack_trunc_v4si (rtx, rtx, rtx);
- extern rtx gen_vec_pack_trunc_v2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_unpacks_lo_v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_unpacku_lo_v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_unpacks_lo_v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_unpacku_lo_v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_unpacks_lo_v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_unpacku_lo_v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_unpacks_hi_v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_unpacku_hi_v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_unpacks_hi_v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_unpacku_hi_v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_unpacks_hi_v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_unpacku_hi_v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_smult_lo_v16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_umult_lo_v16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_smult_lo_v8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_umult_lo_v8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_smult_lo_v4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_umult_lo_v4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_intrinsic_vec_smult_lo_v8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_intrinsic_vec_umult_lo_v8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_intrinsic_vec_smult_lo_v4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_intrinsic_vec_umult_lo_v4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_intrinsic_vec_smult_lo_v2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_intrinsic_vec_umult_lo_v2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_smult_hi_v16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_umult_hi_v16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_smult_hi_v8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_umult_hi_v8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_smult_hi_v4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_umult_hi_v4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_smult_lane_v4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_umult_lane_v4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_smult_laneq_v4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_umult_laneq_v4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_smult_lane_v2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_umult_lane_v2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_smult_laneq_v2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_umult_laneq_v2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_smlal_lane_v4hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_umlal_lane_v4hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_smlal_laneq_v4hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_umlal_laneq_v4hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_smlal_lane_v2si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_umlal_lane_v2si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_smlal_laneq_v2si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_umlal_laneq_v2si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_addv4hf3 (rtx, rtx, rtx);
- extern rtx gen_addv8hf3 (rtx, rtx, rtx);
- extern rtx gen_addv2sf3 (rtx, rtx, rtx);
- extern rtx gen_addv4sf3 (rtx, rtx, rtx);
- extern rtx gen_addv2df3 (rtx, rtx, rtx);
- extern rtx gen_subv4hf3 (rtx, rtx, rtx);
- extern rtx gen_subv8hf3 (rtx, rtx, rtx);
- extern rtx gen_subv2sf3 (rtx, rtx, rtx);
- extern rtx gen_subv4sf3 (rtx, rtx, rtx);
- extern rtx gen_subv2df3 (rtx, rtx, rtx);
- extern rtx gen_mulv4hf3 (rtx, rtx, rtx);
- extern rtx gen_mulv8hf3 (rtx, rtx, rtx);
- extern rtx gen_mulv2sf3 (rtx, rtx, rtx);
- extern rtx gen_mulv4sf3 (rtx, rtx, rtx);
- extern rtx gen_mulv2df3 (rtx, rtx, rtx);
- extern rtx gen_negv4hf2 (rtx, rtx);
- extern rtx gen_negv8hf2 (rtx, rtx);
- extern rtx gen_negv2sf2 (rtx, rtx);
- extern rtx gen_negv4sf2 (rtx, rtx);
- extern rtx gen_negv2df2 (rtx, rtx);
- extern rtx gen_absv4hf2 (rtx, rtx);
- extern rtx gen_absv8hf2 (rtx, rtx);
- extern rtx gen_absv2sf2 (rtx, rtx);
- extern rtx gen_absv4sf2 (rtx, rtx);
- extern rtx gen_absv2df2 (rtx, rtx);
- extern rtx gen_fmav4hf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fmav8hf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fmav2sf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fmav4sf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fmav2df4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmav4hf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmav8hf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmav2sf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmav4sf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmav2df4 (rtx, rtx, rtx, rtx);
- extern rtx gen_btruncv4hf2 (rtx, rtx);
- extern rtx gen_ceilv4hf2 (rtx, rtx);
- extern rtx gen_floorv4hf2 (rtx, rtx);
- extern rtx gen_frintnv4hf2 (rtx, rtx);
- extern rtx gen_nearbyintv4hf2 (rtx, rtx);
- extern rtx gen_rintv4hf2 (rtx, rtx);
- extern rtx gen_roundv4hf2 (rtx, rtx);
- extern rtx gen_btruncv8hf2 (rtx, rtx);
- extern rtx gen_ceilv8hf2 (rtx, rtx);
- extern rtx gen_floorv8hf2 (rtx, rtx);
- extern rtx gen_frintnv8hf2 (rtx, rtx);
- extern rtx gen_nearbyintv8hf2 (rtx, rtx);
- extern rtx gen_rintv8hf2 (rtx, rtx);
- extern rtx gen_roundv8hf2 (rtx, rtx);
- extern rtx gen_btruncv2sf2 (rtx, rtx);
- extern rtx gen_ceilv2sf2 (rtx, rtx);
- extern rtx gen_floorv2sf2 (rtx, rtx);
- extern rtx gen_frintnv2sf2 (rtx, rtx);
- extern rtx gen_nearbyintv2sf2 (rtx, rtx);
- extern rtx gen_rintv2sf2 (rtx, rtx);
- extern rtx gen_roundv2sf2 (rtx, rtx);
- extern rtx gen_btruncv4sf2 (rtx, rtx);
- extern rtx gen_ceilv4sf2 (rtx, rtx);
- extern rtx gen_floorv4sf2 (rtx, rtx);
- extern rtx gen_frintnv4sf2 (rtx, rtx);
- extern rtx gen_nearbyintv4sf2 (rtx, rtx);
- extern rtx gen_rintv4sf2 (rtx, rtx);
- extern rtx gen_roundv4sf2 (rtx, rtx);
- extern rtx gen_btruncv2df2 (rtx, rtx);
- extern rtx gen_ceilv2df2 (rtx, rtx);
- extern rtx gen_floorv2df2 (rtx, rtx);
- extern rtx gen_frintnv2df2 (rtx, rtx);
- extern rtx gen_nearbyintv2df2 (rtx, rtx);
- extern rtx gen_rintv2df2 (rtx, rtx);
- extern rtx gen_roundv2df2 (rtx, rtx);
- extern rtx gen_lbtruncv4hfv4hi2 (rtx, rtx);
- extern rtx gen_lceilv4hfv4hi2 (rtx, rtx);
- extern rtx gen_lfloorv4hfv4hi2 (rtx, rtx);
- extern rtx gen_lroundv4hfv4hi2 (rtx, rtx);
- extern rtx gen_lfrintnv4hfv4hi2 (rtx, rtx);
- extern rtx gen_lbtruncuv4hfv4hi2 (rtx, rtx);
- extern rtx gen_lceiluv4hfv4hi2 (rtx, rtx);
- extern rtx gen_lflooruv4hfv4hi2 (rtx, rtx);
- extern rtx gen_lrounduv4hfv4hi2 (rtx, rtx);
- extern rtx gen_lfrintnuv4hfv4hi2 (rtx, rtx);
- extern rtx gen_lbtruncv8hfv8hi2 (rtx, rtx);
- extern rtx gen_lceilv8hfv8hi2 (rtx, rtx);
- extern rtx gen_lfloorv8hfv8hi2 (rtx, rtx);
- extern rtx gen_lroundv8hfv8hi2 (rtx, rtx);
- extern rtx gen_lfrintnv8hfv8hi2 (rtx, rtx);
- extern rtx gen_lbtruncuv8hfv8hi2 (rtx, rtx);
- extern rtx gen_lceiluv8hfv8hi2 (rtx, rtx);
- extern rtx gen_lflooruv8hfv8hi2 (rtx, rtx);
- extern rtx gen_lrounduv8hfv8hi2 (rtx, rtx);
- extern rtx gen_lfrintnuv8hfv8hi2 (rtx, rtx);
- extern rtx gen_lbtruncv2sfv2si2 (rtx, rtx);
- extern rtx gen_lceilv2sfv2si2 (rtx, rtx);
- extern rtx gen_lfloorv2sfv2si2 (rtx, rtx);
- extern rtx gen_lroundv2sfv2si2 (rtx, rtx);
- extern rtx gen_lfrintnv2sfv2si2 (rtx, rtx);
- extern rtx gen_lbtruncuv2sfv2si2 (rtx, rtx);
- extern rtx gen_lceiluv2sfv2si2 (rtx, rtx);
- extern rtx gen_lflooruv2sfv2si2 (rtx, rtx);
- extern rtx gen_lrounduv2sfv2si2 (rtx, rtx);
- extern rtx gen_lfrintnuv2sfv2si2 (rtx, rtx);
- extern rtx gen_lbtruncv4sfv4si2 (rtx, rtx);
- extern rtx gen_lceilv4sfv4si2 (rtx, rtx);
- extern rtx gen_lfloorv4sfv4si2 (rtx, rtx);
- extern rtx gen_lroundv4sfv4si2 (rtx, rtx);
- extern rtx gen_lfrintnv4sfv4si2 (rtx, rtx);
- extern rtx gen_lbtruncuv4sfv4si2 (rtx, rtx);
- extern rtx gen_lceiluv4sfv4si2 (rtx, rtx);
- extern rtx gen_lflooruv4sfv4si2 (rtx, rtx);
- extern rtx gen_lrounduv4sfv4si2 (rtx, rtx);
- extern rtx gen_lfrintnuv4sfv4si2 (rtx, rtx);
- extern rtx gen_lbtruncv2dfv2di2 (rtx, rtx);
- extern rtx gen_lceilv2dfv2di2 (rtx, rtx);
- extern rtx gen_lfloorv2dfv2di2 (rtx, rtx);
- extern rtx gen_lroundv2dfv2di2 (rtx, rtx);
- extern rtx gen_lfrintnv2dfv2di2 (rtx, rtx);
- extern rtx gen_lbtruncuv2dfv2di2 (rtx, rtx);
- extern rtx gen_lceiluv2dfv2di2 (rtx, rtx);
- extern rtx gen_lflooruv2dfv2di2 (rtx, rtx);
- extern rtx gen_lrounduv2dfv2di2 (rtx, rtx);
- extern rtx gen_lfrintnuv2dfv2di2 (rtx, rtx);
- extern rtx gen_lbtrunchfhi2 (rtx, rtx);
- extern rtx gen_lceilhfhi2 (rtx, rtx);
- extern rtx gen_lfloorhfhi2 (rtx, rtx);
- extern rtx gen_lroundhfhi2 (rtx, rtx);
- extern rtx gen_lfrintnhfhi2 (rtx, rtx);
- extern rtx gen_lbtruncuhfhi2 (rtx, rtx);
- extern rtx gen_lceiluhfhi2 (rtx, rtx);
- extern rtx gen_lflooruhfhi2 (rtx, rtx);
- extern rtx gen_lrounduhfhi2 (rtx, rtx);
- extern rtx gen_lfrintnuhfhi2 (rtx, rtx);
- extern rtx gen_fix_trunchfhi2 (rtx, rtx);
- extern rtx gen_fixuns_trunchfhi2 (rtx, rtx);
- extern rtx gen_floathihf2 (rtx, rtx);
- extern rtx gen_floatunshihf2 (rtx, rtx);
- extern rtx gen_floatv4hiv4hf2 (rtx, rtx);
- extern rtx gen_floatunsv4hiv4hf2 (rtx, rtx);
- extern rtx gen_floatv8hiv8hf2 (rtx, rtx);
- extern rtx gen_floatunsv8hiv8hf2 (rtx, rtx);
- extern rtx gen_floatv2siv2sf2 (rtx, rtx);
- extern rtx gen_floatunsv2siv2sf2 (rtx, rtx);
- extern rtx gen_floatv4siv4sf2 (rtx, rtx);
- extern rtx gen_floatunsv4siv4sf2 (rtx, rtx);
- extern rtx gen_floatv2div2df2 (rtx, rtx);
- extern rtx gen_floatunsv2div2df2 (rtx, rtx);
- extern rtx gen_aarch64_simd_vec_unpacks_lo_v8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_unpacks_lo_v4sf (rtx, rtx, rtx);
- extern rtx gen_fcvtzsv4hf3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzuv4hf3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzsv8hf3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzuv8hf3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzsv2sf3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzuv2sf3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzsv4sf3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzuv4sf3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzsv2df3 (rtx, rtx, rtx);
- extern rtx gen_fcvtzuv2df3 (rtx, rtx, rtx);
- extern rtx gen_scvtfv4hi3 (rtx, rtx, rtx);
- extern rtx gen_ucvtfv4hi3 (rtx, rtx, rtx);
- extern rtx gen_scvtfv8hi3 (rtx, rtx, rtx);
- extern rtx gen_ucvtfv8hi3 (rtx, rtx, rtx);
- extern rtx gen_scvtfv2si3 (rtx, rtx, rtx);
- extern rtx gen_ucvtfv2si3 (rtx, rtx, rtx);
- extern rtx gen_scvtfv4si3 (rtx, rtx, rtx);
- extern rtx gen_ucvtfv4si3 (rtx, rtx, rtx);
- extern rtx gen_scvtfv2di3 (rtx, rtx, rtx);
- extern rtx gen_ucvtfv2di3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_unpacks_hi_v8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_vec_unpacks_hi_v4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_float_extend_lo_v2df (rtx, rtx);
- extern rtx gen_aarch64_float_extend_lo_v4sf (rtx, rtx);
- extern rtx gen_aarch64_float_truncate_lo_v2sf (rtx, rtx);
- extern rtx gen_aarch64_float_truncate_lo_v4hf (rtx, rtx);
- extern rtx gen_aarch64_float_truncate_hi_v4sf_le (rtx, rtx, rtx);
- extern rtx gen_aarch64_float_truncate_hi_v8hf_le (rtx, rtx, rtx);
- extern rtx gen_aarch64_float_truncate_hi_v4sf_be (rtx, rtx, rtx);
- extern rtx gen_aarch64_float_truncate_hi_v8hf_be (rtx, rtx, rtx);
- extern rtx gen_smaxv4hf3 (rtx, rtx, rtx);
- extern rtx gen_sminv4hf3 (rtx, rtx, rtx);
- extern rtx gen_smaxv8hf3 (rtx, rtx, rtx);
- extern rtx gen_sminv8hf3 (rtx, rtx, rtx);
- extern rtx gen_smaxv2sf3 (rtx, rtx, rtx);
- extern rtx gen_sminv2sf3 (rtx, rtx, rtx);
- extern rtx gen_smaxv4sf3 (rtx, rtx, rtx);
- extern rtx gen_sminv4sf3 (rtx, rtx, rtx);
- extern rtx gen_smaxv2df3 (rtx, rtx, rtx);
- extern rtx gen_sminv2df3 (rtx, rtx, rtx);
- extern rtx gen_smax_nanv4hf3 (rtx, rtx, rtx);
- extern rtx gen_smin_nanv4hf3 (rtx, rtx, rtx);
- extern rtx gen_fmaxv4hf3 (rtx, rtx, rtx);
- extern rtx gen_fminv4hf3 (rtx, rtx, rtx);
- extern rtx gen_smax_nanv8hf3 (rtx, rtx, rtx);
- extern rtx gen_smin_nanv8hf3 (rtx, rtx, rtx);
- extern rtx gen_fmaxv8hf3 (rtx, rtx, rtx);
- extern rtx gen_fminv8hf3 (rtx, rtx, rtx);
- extern rtx gen_smax_nanv2sf3 (rtx, rtx, rtx);
- extern rtx gen_smin_nanv2sf3 (rtx, rtx, rtx);
- extern rtx gen_fmaxv2sf3 (rtx, rtx, rtx);
- extern rtx gen_fminv2sf3 (rtx, rtx, rtx);
- extern rtx gen_smax_nanv4sf3 (rtx, rtx, rtx);
- extern rtx gen_smin_nanv4sf3 (rtx, rtx, rtx);
- extern rtx gen_fmaxv4sf3 (rtx, rtx, rtx);
- extern rtx gen_fminv4sf3 (rtx, rtx, rtx);
- extern rtx gen_smax_nanv2df3 (rtx, rtx, rtx);
- extern rtx gen_smin_nanv2df3 (rtx, rtx, rtx);
- extern rtx gen_fmaxv2df3 (rtx, rtx, rtx);
- extern rtx gen_fminv2df3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_faddpv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_faddpv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_faddpv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_faddpv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_faddpv2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_reduc_plus_internalv8qi (rtx, rtx);
- extern rtx gen_aarch64_reduc_plus_internalv16qi (rtx, rtx);
- extern rtx gen_aarch64_reduc_plus_internalv4hi (rtx, rtx);
- extern rtx gen_aarch64_reduc_plus_internalv8hi (rtx, rtx);
- extern rtx gen_aarch64_reduc_plus_internalv4si (rtx, rtx);
- extern rtx gen_aarch64_reduc_plus_internalv2di (rtx, rtx);
- extern rtx gen_aarch64_zero_extendsi_reduc_plus_v8qi (rtx, rtx);
- extern rtx gen_aarch64_zero_extenddi_reduc_plus_v8qi (rtx, rtx);
- extern rtx gen_aarch64_zero_extendsi_reduc_plus_v16qi (rtx, rtx);
- extern rtx gen_aarch64_zero_extenddi_reduc_plus_v16qi (rtx, rtx);
- extern rtx gen_aarch64_zero_extendsi_reduc_plus_v4hi (rtx, rtx);
- extern rtx gen_aarch64_zero_extenddi_reduc_plus_v4hi (rtx, rtx);
- extern rtx gen_aarch64_zero_extendsi_reduc_plus_v8hi (rtx, rtx);
- extern rtx gen_aarch64_zero_extenddi_reduc_plus_v8hi (rtx, rtx);
- extern rtx gen_aarch64_reduc_plus_internalv2si (rtx, rtx);
- extern rtx gen_reduc_plus_scal_v2sf (rtx, rtx);
- extern rtx gen_reduc_plus_scal_v2df (rtx, rtx);
- extern rtx gen_clrsbv8qi2 (rtx, rtx);
- extern rtx gen_clrsbv16qi2 (rtx, rtx);
- extern rtx gen_clrsbv4hi2 (rtx, rtx);
- extern rtx gen_clrsbv8hi2 (rtx, rtx);
- extern rtx gen_clrsbv2si2 (rtx, rtx);
- extern rtx gen_clrsbv4si2 (rtx, rtx);
- extern rtx gen_clzv8qi2 (rtx, rtx);
- extern rtx gen_clzv16qi2 (rtx, rtx);
- extern rtx gen_clzv4hi2 (rtx, rtx);
- extern rtx gen_clzv8hi2 (rtx, rtx);
- extern rtx gen_clzv2si2 (rtx, rtx);
- extern rtx gen_clzv4si2 (rtx, rtx);
- extern rtx gen_popcountv8qi2 (rtx, rtx);
- extern rtx gen_popcountv16qi2 (rtx, rtx);
- extern rtx gen_aarch64_reduc_umax_internalv8qi (rtx, rtx);
- extern rtx gen_aarch64_reduc_umin_internalv8qi (rtx, rtx);
- extern rtx gen_aarch64_reduc_smax_internalv8qi (rtx, rtx);
- extern rtx gen_aarch64_reduc_smin_internalv8qi (rtx, rtx);
- extern rtx gen_aarch64_reduc_umax_internalv16qi (rtx, rtx);
- extern rtx gen_aarch64_reduc_umin_internalv16qi (rtx, rtx);
- extern rtx gen_aarch64_reduc_smax_internalv16qi (rtx, rtx);
- extern rtx gen_aarch64_reduc_smin_internalv16qi (rtx, rtx);
- extern rtx gen_aarch64_reduc_umax_internalv4hi (rtx, rtx);
- extern rtx gen_aarch64_reduc_umin_internalv4hi (rtx, rtx);
- extern rtx gen_aarch64_reduc_smax_internalv4hi (rtx, rtx);
- extern rtx gen_aarch64_reduc_smin_internalv4hi (rtx, rtx);
- extern rtx gen_aarch64_reduc_umax_internalv8hi (rtx, rtx);
- extern rtx gen_aarch64_reduc_umin_internalv8hi (rtx, rtx);
- extern rtx gen_aarch64_reduc_smax_internalv8hi (rtx, rtx);
- extern rtx gen_aarch64_reduc_smin_internalv8hi (rtx, rtx);
- extern rtx gen_aarch64_reduc_umax_internalv4si (rtx, rtx);
- extern rtx gen_aarch64_reduc_umin_internalv4si (rtx, rtx);
- extern rtx gen_aarch64_reduc_smax_internalv4si (rtx, rtx);
- extern rtx gen_aarch64_reduc_smin_internalv4si (rtx, rtx);
- extern rtx gen_aarch64_reduc_umax_internalv2si (rtx, rtx);
- extern rtx gen_aarch64_reduc_umin_internalv2si (rtx, rtx);
- extern rtx gen_aarch64_reduc_smax_internalv2si (rtx, rtx);
- extern rtx gen_aarch64_reduc_smin_internalv2si (rtx, rtx);
- extern rtx gen_aarch64_reduc_smax_nan_internalv4hf (rtx, rtx);
- extern rtx gen_aarch64_reduc_smin_nan_internalv4hf (rtx, rtx);
- extern rtx gen_aarch64_reduc_smax_internalv4hf (rtx, rtx);
- extern rtx gen_aarch64_reduc_smin_internalv4hf (rtx, rtx);
- extern rtx gen_aarch64_reduc_smax_nan_internalv8hf (rtx, rtx);
- extern rtx gen_aarch64_reduc_smin_nan_internalv8hf (rtx, rtx);
- extern rtx gen_aarch64_reduc_smax_internalv8hf (rtx, rtx);
- extern rtx gen_aarch64_reduc_smin_internalv8hf (rtx, rtx);
- extern rtx gen_aarch64_reduc_smax_nan_internalv2sf (rtx, rtx);
- extern rtx gen_aarch64_reduc_smin_nan_internalv2sf (rtx, rtx);
- extern rtx gen_aarch64_reduc_smax_internalv2sf (rtx, rtx);
- extern rtx gen_aarch64_reduc_smin_internalv2sf (rtx, rtx);
- extern rtx gen_aarch64_reduc_smax_nan_internalv4sf (rtx, rtx);
- extern rtx gen_aarch64_reduc_smin_nan_internalv4sf (rtx, rtx);
- extern rtx gen_aarch64_reduc_smax_internalv4sf (rtx, rtx);
- extern rtx gen_aarch64_reduc_smin_internalv4sf (rtx, rtx);
- extern rtx gen_aarch64_reduc_smax_nan_internalv2df (rtx, rtx);
- extern rtx gen_aarch64_reduc_smin_nan_internalv2df (rtx, rtx);
- extern rtx gen_aarch64_reduc_smax_internalv2df (rtx, rtx);
- extern rtx gen_aarch64_reduc_smin_internalv2df (rtx, rtx);
- extern rtx gen_aarch64_simd_bslv8qi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv16qi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv4hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv8hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv2si_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv4si_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv2di_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bsldi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bsldi_alt (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_get_lanev8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_lanev16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_lanev4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_lanev8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_lanev2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_lanev4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_lanev2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_lanev4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_lanev8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_lanev4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_lanev8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_lanev2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_lanev4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_lanev2df (rtx, rtx, rtx);
- extern rtx gen_load_pair_lanesv8qi (rtx, rtx, rtx);
- extern rtx gen_load_pair_lanesv4hi (rtx, rtx, rtx);
- extern rtx gen_load_pair_lanesv4bf (rtx, rtx, rtx);
- extern rtx gen_load_pair_lanesv4hf (rtx, rtx, rtx);
- extern rtx gen_load_pair_lanesv2si (rtx, rtx, rtx);
- extern rtx gen_load_pair_lanesv2sf (rtx, rtx, rtx);
- extern rtx gen_load_pair_lanesdi (rtx, rtx, rtx);
- extern rtx gen_load_pair_lanesdf (rtx, rtx, rtx);
- extern rtx gen_store_pair_lanesv8qi (rtx, rtx, rtx);
- extern rtx gen_store_pair_lanesv4hi (rtx, rtx, rtx);
- extern rtx gen_store_pair_lanesv4bf (rtx, rtx, rtx);
- extern rtx gen_store_pair_lanesv4hf (rtx, rtx, rtx);
- extern rtx gen_store_pair_lanesv2si (rtx, rtx, rtx);
- extern rtx gen_store_pair_lanesv2sf (rtx, rtx, rtx);
- extern rtx gen_store_pair_lanesdi (rtx, rtx, rtx);
- extern rtx gen_store_pair_lanesdf (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinezv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinezv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinezv4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinezv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinezv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinezv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinezdi (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinezdf (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinez_bev8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinez_bev4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinez_bev4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinez_bev4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinez_bev2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinez_bev2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinez_bedi (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinez_bedf (rtx, rtx, rtx);
- extern rtx gen_aarch64_saddlv16qi_hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssublv16qi_hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddlv16qi_hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usublv16qi_hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_saddlv8hi_hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssublv8hi_hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddlv8hi_hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usublv8hi_hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_saddlv4si_hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssublv4si_hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddlv4si_hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usublv4si_hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_saddlv16qi_lo_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssublv16qi_lo_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddlv16qi_lo_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usublv16qi_lo_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_saddlv8hi_lo_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssublv8hi_lo_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddlv8hi_lo_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usublv8hi_lo_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_saddlv4si_lo_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssublv4si_lo_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddlv4si_lo_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usublv4si_lo_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_saddlv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ssublv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddlv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_usublv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_saddlv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ssublv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddlv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_usublv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_saddlv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_ssublv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddlv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_usublv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_ssubwv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_usubwv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ssubwv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_usubwv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ssubwv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_usubwv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_ssubwv16qi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usubwv16qi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssubwv8hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usubwv8hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssubwv4si_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usubwv4si_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssubw2v16qi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usubw2v16qi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssubw2v8hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usubw2v8hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssubw2v4si_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usubw2v4si_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_saddwv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddwv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_saddwv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddwv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_saddwv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddwv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_saddwv16qi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddwv16qi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_saddwv8hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddwv8hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_saddwv4si_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddwv4si_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_saddw2v16qi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddw2v16qi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_saddw2v8hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddw2v8hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_saddw2v4si_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddw2v4si_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_shaddv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uhaddv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_srhaddv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_urhaddv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_shsubv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uhsubv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_shaddv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uhaddv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_srhaddv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_urhaddv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_shsubv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uhsubv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_shaddv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uhaddv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_srhaddv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_urhaddv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_shsubv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uhsubv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_shaddv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uhaddv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_srhaddv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_urhaddv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_shsubv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uhsubv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_shaddv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uhaddv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_srhaddv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_urhaddv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_shsubv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uhsubv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_shaddv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uhaddv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_srhaddv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_urhaddv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_shsubv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uhsubv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_addhnv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_raddhnv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_subhnv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_rsubhnv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_addhnv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_raddhnv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_subhnv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_rsubhnv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_addhnv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_raddhnv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_subhnv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_rsubhnv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_addhn2v8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_raddhn2v8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_subhn2v8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_rsubhn2v8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_addhn2v4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_raddhn2v4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_subhn2v4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_rsubhn2v4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_addhn2v2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_raddhn2v2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_subhn2v2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_rsubhn2v2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pmulv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pmulv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_fmulxv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_fmulxv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_fmulxv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_fmulxv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_fmulxv2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_fmulxhf (rtx, rtx, rtx);
- extern rtx gen_aarch64_fmulxsf (rtx, rtx, rtx);
- extern rtx gen_aarch64_fmulxdf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqaddv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqaddv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqsubv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqsubv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqaddv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqaddv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqsubv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqsubv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqaddv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqaddv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqsubv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqsubv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqaddv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqaddv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqsubv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqsubv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqaddv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqaddv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqsubv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqsubv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqaddv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqaddv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqsubv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqsubv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqaddv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqaddv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqsubv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqsubv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqaddqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqaddqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqsubqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqsubqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqaddhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqaddhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqsubhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqsubhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqaddsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqaddsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqsubsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqsubsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqadddi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqadddi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqsubdi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqsubdi (rtx, rtx, rtx);
- extern rtx gen_aarch64_suqaddv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_usqaddv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_suqaddv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_usqaddv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_suqaddv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_usqaddv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_suqaddv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_usqaddv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_suqaddv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_usqaddv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_suqaddv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_usqaddv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_suqaddv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_usqaddv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_suqaddqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_usqaddqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_suqaddhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_usqaddhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_suqaddsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_usqaddsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_suqadddi (rtx, rtx, rtx);
- extern rtx gen_aarch64_usqadddi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqmovunv8hi (rtx, rtx);
- extern rtx gen_aarch64_sqmovunv4si (rtx, rtx);
- extern rtx gen_aarch64_sqmovunv2di (rtx, rtx);
- extern rtx gen_aarch64_sqmovunhi (rtx, rtx);
- extern rtx gen_aarch64_sqmovunsi (rtx, rtx);
- extern rtx gen_aarch64_sqmovundi (rtx, rtx);
- extern rtx gen_aarch64_sqmovnv8hi (rtx, rtx);
- extern rtx gen_aarch64_uqmovnv8hi (rtx, rtx);
- extern rtx gen_aarch64_sqmovnv4si (rtx, rtx);
- extern rtx gen_aarch64_uqmovnv4si (rtx, rtx);
- extern rtx gen_aarch64_sqmovnv2di (rtx, rtx);
- extern rtx gen_aarch64_uqmovnv2di (rtx, rtx);
- extern rtx gen_aarch64_sqmovnhi (rtx, rtx);
- extern rtx gen_aarch64_uqmovnhi (rtx, rtx);
- extern rtx gen_aarch64_sqmovnsi (rtx, rtx);
- extern rtx gen_aarch64_uqmovnsi (rtx, rtx);
- extern rtx gen_aarch64_sqmovndi (rtx, rtx);
- extern rtx gen_aarch64_uqmovndi (rtx, rtx);
- extern rtx gen_aarch64_sqnegv8qi (rtx, rtx);
- extern rtx gen_aarch64_sqabsv8qi (rtx, rtx);
- extern rtx gen_aarch64_sqnegv16qi (rtx, rtx);
- extern rtx gen_aarch64_sqabsv16qi (rtx, rtx);
- extern rtx gen_aarch64_sqnegv4hi (rtx, rtx);
- extern rtx gen_aarch64_sqabsv4hi (rtx, rtx);
- extern rtx gen_aarch64_sqnegv8hi (rtx, rtx);
- extern rtx gen_aarch64_sqabsv8hi (rtx, rtx);
- extern rtx gen_aarch64_sqnegv2si (rtx, rtx);
- extern rtx gen_aarch64_sqabsv2si (rtx, rtx);
- extern rtx gen_aarch64_sqnegv4si (rtx, rtx);
- extern rtx gen_aarch64_sqabsv4si (rtx, rtx);
- extern rtx gen_aarch64_sqnegv2di (rtx, rtx);
- extern rtx gen_aarch64_sqabsv2di (rtx, rtx);
- extern rtx gen_aarch64_sqnegqi (rtx, rtx);
- extern rtx gen_aarch64_sqabsqi (rtx, rtx);
- extern rtx gen_aarch64_sqneghi (rtx, rtx);
- extern rtx gen_aarch64_sqabshi (rtx, rtx);
- extern rtx gen_aarch64_sqnegsi (rtx, rtx);
- extern rtx gen_aarch64_sqabssi (rtx, rtx);
- extern rtx gen_aarch64_sqnegdi (rtx, rtx);
- extern rtx gen_aarch64_sqabsdi (rtx, rtx);
- extern rtx gen_aarch64_sqdmulhv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulhv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulhv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulhv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulhv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulhv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulhv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulhv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulhhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulhhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulhsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulhsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulh_lanev4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulh_lanev4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulh_lanev8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulh_lanev8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulh_lanev2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulh_lanev2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulh_lanev4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulh_lanev4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulh_laneqv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulh_laneqv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulh_laneqv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulh_laneqv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulh_laneqv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulh_laneqv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulh_laneqv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulh_laneqv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulh_lanehi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulh_lanehi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulh_lanesi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulh_lanesi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulh_laneqhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulh_laneqhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmulh_laneqsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmulh_laneqsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlahv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlshv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlahv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlshv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlahv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlshv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlahv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlshv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlahhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlshhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlahsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlshsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlah_lanev4hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlsh_lanev4hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlah_lanev8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlsh_lanev8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlah_lanev2si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlsh_lanev2si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlah_lanev4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlsh_lanev4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlah_lanehi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlsh_lanehi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlah_lanesi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlsh_lanesi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlah_laneqv4hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlsh_laneqv4hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlah_laneqv8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlsh_laneqv8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlah_laneqv2si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlsh_laneqv2si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlah_laneqv4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlsh_laneqv4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlah_laneqhi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlsh_laneqhi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlah_laneqsi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdmlsh_laneqsi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlalv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlslv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlalv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlslv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlalhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlslhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlalsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlslsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal_lanev4hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl_lanev4hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal_lanev2si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl_lanev2si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal_laneqv4hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl_laneqv4hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal_laneqv2si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl_laneqv2si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal_lanehi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl_lanehi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal_lanesi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl_lanesi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal_laneqhi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl_laneqhi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal_laneqsi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl_laneqsi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal_nv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl_nv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal_nv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl_nv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal2v8hi_internal (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl2v8hi_internal (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal2v4si_internal (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl2v4si_internal (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal2_lanev8hi_internal (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl2_lanev8hi_internal (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal2_lanev4si_internal (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl2_lanev4si_internal (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal2_laneqv8hi_internal (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl2_laneqv8hi_internal (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal2_laneqv4si_internal (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl2_laneqv4si_internal (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal2_nv8hi_internal (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl2_nv8hi_internal (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal2_nv4si_internal (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl2_nv4si_internal (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmullv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmullv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmullhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmullsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull_lanev4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull_lanev2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull_laneqv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull_laneqv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull_lanehi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull_lanesi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull_laneqhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull_laneqsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull_nv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull_nv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull2v8hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull2v4si_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull2_lanev8hi_internal (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull2_lanev4si_internal (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull2_laneqv8hi_internal (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull2_laneqv4si_internal (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull2_nv8hi_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull2_nv4si_internal (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sshlv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ushlv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_srshlv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_urshlv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sshlv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ushlv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_srshlv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_urshlv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sshlv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ushlv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_srshlv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_urshlv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sshlv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ushlv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_srshlv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_urshlv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sshlv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_ushlv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_srshlv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_urshlv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sshlv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_ushlv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_srshlv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_urshlv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sshlv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_ushlv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_srshlv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_urshlv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sshldi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ushldi (rtx, rtx, rtx);
- extern rtx gen_aarch64_srshldi (rtx, rtx, rtx);
- extern rtx gen_aarch64_urshldi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshlv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshlv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshlv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshlv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshlv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshlv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshlv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshlv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshlv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshlv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshlv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshlv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshlv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshlv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshlv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshlv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshlv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshlv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshlv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshlv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshlv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshlqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshlqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshlqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshlhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshlhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshlhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshlsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshlsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshlsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshldi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshldi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshldi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshldi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sshll_nv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ushll_nv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sshll_nv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ushll_nv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sshll_nv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_ushll_nv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sshll2_nv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ushll2_nv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sshll2_nv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ushll2_nv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sshll2_nv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_ushll2_nv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_srshr_nv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_urshr_nv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_srshr_nv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_urshr_nv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_srshr_nv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_urshr_nv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_srshr_nv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_urshr_nv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_srshr_nv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_urshr_nv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_srshr_nv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_urshr_nv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_srshr_nv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_urshr_nv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_srshr_ndi (rtx, rtx, rtx);
- extern rtx gen_aarch64_urshr_ndi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ssra_nv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usra_nv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_srsra_nv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ursra_nv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssra_nv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usra_nv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_srsra_nv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ursra_nv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssra_nv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usra_nv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_srsra_nv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ursra_nv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssra_nv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usra_nv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_srsra_nv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ursra_nv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssra_nv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usra_nv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_srsra_nv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ursra_nv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssra_nv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usra_nv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_srsra_nv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ursra_nv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssra_nv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usra_nv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_srsra_nv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ursra_nv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssra_ndi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usra_ndi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_srsra_ndi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ursra_ndi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssli_nv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usli_nv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssri_nv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usri_nv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssli_nv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usli_nv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssri_nv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usri_nv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssli_nv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usli_nv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssri_nv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usri_nv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssli_nv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usli_nv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssri_nv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usri_nv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssli_nv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usli_nv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssri_nv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usri_nv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssli_nv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usli_nv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssri_nv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usri_nv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssli_nv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usli_nv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssri_nv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usri_nv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssli_ndi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usli_ndi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ssri_ndi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usri_ndi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlu_nv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshl_nv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshl_nv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlu_nv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshl_nv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshl_nv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlu_nv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshl_nv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshl_nv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlu_nv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshl_nv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshl_nv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlu_nv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshl_nv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshl_nv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlu_nv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshl_nv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshl_nv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlu_nv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshl_nv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshl_nv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlu_nqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshl_nqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshl_nqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlu_nhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshl_nhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshl_nhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlu_nsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshl_nsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshl_nsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshlu_ndi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshl_ndi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshl_ndi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshrun_nv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshrun_nv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshrn_nv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshrn_nv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshrn_nv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshrn_nv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshrun_nv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshrun_nv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshrn_nv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshrn_nv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshrn_nv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshrn_nv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshrun_nv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshrun_nv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshrn_nv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshrn_nv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshrn_nv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshrn_nv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshrun_nhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshrun_nhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshrn_nhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshrn_nhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshrn_nhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshrn_nhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshrun_nsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshrun_nsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshrn_nsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshrn_nsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshrn_nsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshrn_nsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshrun_ndi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshrun_ndi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqshrn_ndi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqshrn_ndi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrshrn_ndi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uqrshrn_ndi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmlev8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmeqv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgev8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmlev16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmeqv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgev16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmlev4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmeqv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgev4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmlev8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmeqv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgev8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmlev2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmeqv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgev2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmlev4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmeqv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgev4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmlev2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmeqv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgev2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltdi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmledi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmeqdi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgedi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtdi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltuv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmleuv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgeuv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtuv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltuv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmleuv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgeuv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtuv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltuv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmleuv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgeuv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtuv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltuv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmleuv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgeuv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtuv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltuv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmleuv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgeuv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtuv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltuv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmleuv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgeuv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtuv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltuv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmleuv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgeuv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtuv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltudi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmleudi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgeudi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtudi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmtstv8qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cmtstv16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cmtstv4hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cmtstv8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cmtstv2si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cmtstv4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cmtstv2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cmtstdi (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmlev4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmeqv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgev4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmlev8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmeqv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgev8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmlev2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmeqv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgev2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmlev4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmeqv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgev4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltv2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmlev2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmeqv2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgev2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtv2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmlthf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmlehf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmeqhf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgehf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgthf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltsf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmlesf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmeqsf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgesf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtsf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmltdf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmledf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmeqdf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgedf (rtx, rtx, rtx);
- extern rtx gen_aarch64_cmgtdf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facltv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_faclev4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facgev4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facgtv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facltv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_faclev8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facgev8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facgtv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facltv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_faclev2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facgev2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facgtv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facltv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_faclev4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facgev4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facgtv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facltv2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_faclev2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_facgev2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_facgtv2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_faclthf (rtx, rtx, rtx);
- extern rtx gen_aarch64_faclehf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facgehf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facgthf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facltsf (rtx, rtx, rtx);
- extern rtx gen_aarch64_faclesf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facgesf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facgtsf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facltdf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facledf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facgedf (rtx, rtx, rtx);
- extern rtx gen_aarch64_facgtdf (rtx, rtx, rtx);
- extern rtx gen_aarch64_addpv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_addpv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_addpv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_addpdi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2v16qi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2v8hi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2v4si (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2v2di (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2v8hf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2v4sf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2v2df (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2v8bf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2rv8qi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2rv16qi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2rv4hi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2rv8hi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2rv2si (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2rv4si (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2rv4bf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2rv8bf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2rv2di (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2rv4hf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2rv8hf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2rv2sf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2rv4sf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2rv2df (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2rdi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld2rdf (rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesoi_lanev8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesoi_lanev16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesoi_lanev4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesoi_lanev8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesoi_lanev2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesoi_lanev4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesoi_lanev4bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesoi_lanev8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesoi_lanev2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesoi_lanev4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesoi_lanev8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesoi_lanev2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesoi_lanev4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesoi_lanev2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesoi_lanedi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesoi_lanedf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_st2v16qi (rtx, rtx);
- extern rtx gen_aarch64_simd_st2v8hi (rtx, rtx);
- extern rtx gen_aarch64_simd_st2v4si (rtx, rtx);
- extern rtx gen_aarch64_simd_st2v2di (rtx, rtx);
- extern rtx gen_aarch64_simd_st2v8hf (rtx, rtx);
- extern rtx gen_aarch64_simd_st2v4sf (rtx, rtx);
- extern rtx gen_aarch64_simd_st2v2df (rtx, rtx);
- extern rtx gen_aarch64_simd_st2v8bf (rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesoi_lanev8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesoi_lanev16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesoi_lanev4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesoi_lanev8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesoi_lanev2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesoi_lanev4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesoi_lanev4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesoi_lanev8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesoi_lanev2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesoi_lanev4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesoi_lanev8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesoi_lanev2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesoi_lanev4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesoi_lanev2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesoi_lanedi (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesoi_lanedf (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_ld3v16qi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3v8hi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3v4si (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3v2di (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3v8hf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3v4sf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3v2df (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3v8bf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3rv8qi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3rv16qi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3rv4hi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3rv8hi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3rv2si (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3rv4si (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3rv4bf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3rv8bf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3rv2di (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3rv4hf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3rv8hf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3rv2sf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3rv4sf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3rv2df (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3rdi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld3rdf (rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesci_lanev8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesci_lanev16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesci_lanev4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesci_lanev8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesci_lanev2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesci_lanev4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesci_lanev4bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesci_lanev8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesci_lanev2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesci_lanev4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesci_lanev8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesci_lanev2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesci_lanev4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesci_lanev2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesci_lanedi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesci_lanedf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_st3v16qi (rtx, rtx);
- extern rtx gen_aarch64_simd_st3v8hi (rtx, rtx);
- extern rtx gen_aarch64_simd_st3v4si (rtx, rtx);
- extern rtx gen_aarch64_simd_st3v2di (rtx, rtx);
- extern rtx gen_aarch64_simd_st3v8hf (rtx, rtx);
- extern rtx gen_aarch64_simd_st3v4sf (rtx, rtx);
- extern rtx gen_aarch64_simd_st3v2df (rtx, rtx);
- extern rtx gen_aarch64_simd_st3v8bf (rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesci_lanev8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesci_lanev16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesci_lanev4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesci_lanev8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesci_lanev2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesci_lanev4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesci_lanev4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesci_lanev8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesci_lanev2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesci_lanev4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesci_lanev8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesci_lanev2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesci_lanev4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesci_lanev2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesci_lanedi (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesci_lanedf (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_ld4v16qi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4v8hi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4v4si (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4v2di (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4v8hf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4v4sf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4v2df (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4v8bf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4rv8qi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4rv16qi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4rv4hi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4rv8hi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4rv2si (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4rv4si (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4rv4bf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4rv8bf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4rv2di (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4rv4hf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4rv8hf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4rv2sf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4rv4sf (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4rv2df (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4rdi (rtx, rtx);
- extern rtx gen_aarch64_simd_ld4rdf (rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesxi_lanev8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesxi_lanev16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesxi_lanev4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesxi_lanev8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesxi_lanev2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesxi_lanev4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesxi_lanev4bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesxi_lanev8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesxi_lanev2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesxi_lanev4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesxi_lanev8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesxi_lanev2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesxi_lanev4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesxi_lanev2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesxi_lanedi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_load_lanesxi_lanedf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_st4v16qi (rtx, rtx);
- extern rtx gen_aarch64_simd_st4v8hi (rtx, rtx);
- extern rtx gen_aarch64_simd_st4v4si (rtx, rtx);
- extern rtx gen_aarch64_simd_st4v2di (rtx, rtx);
- extern rtx gen_aarch64_simd_st4v8hf (rtx, rtx);
- extern rtx gen_aarch64_simd_st4v4sf (rtx, rtx);
- extern rtx gen_aarch64_simd_st4v2df (rtx, rtx);
- extern rtx gen_aarch64_simd_st4v8bf (rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesxi_lanev8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesxi_lanev16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesxi_lanev4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesxi_lanev8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesxi_lanev2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesxi_lanev4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesxi_lanev4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesxi_lanev8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesxi_lanev2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesxi_lanev4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesxi_lanev8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesxi_lanev2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesxi_lanev4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesxi_lanev2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesxi_lanedi (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_store_lanesxi_lanedf (rtx, rtx, rtx);
- extern rtx gen_aarch64_rev_reglistoi (rtx, rtx, rtx);
- extern rtx gen_aarch64_rev_reglistci (rtx, rtx, rtx);
- extern rtx gen_aarch64_rev_reglistxi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ld1_x3_v8qi (rtx, rtx);
- extern rtx gen_aarch64_ld1_x3_v16qi (rtx, rtx);
- extern rtx gen_aarch64_ld1_x3_v4hi (rtx, rtx);
- extern rtx gen_aarch64_ld1_x3_v8hi (rtx, rtx);
- extern rtx gen_aarch64_ld1_x3_v2si (rtx, rtx);
- extern rtx gen_aarch64_ld1_x3_v4si (rtx, rtx);
- extern rtx gen_aarch64_ld1_x3_v4bf (rtx, rtx);
- extern rtx gen_aarch64_ld1_x3_v8bf (rtx, rtx);
- extern rtx gen_aarch64_ld1_x3_v2di (rtx, rtx);
- extern rtx gen_aarch64_ld1_x3_v4hf (rtx, rtx);
- extern rtx gen_aarch64_ld1_x3_v8hf (rtx, rtx);
- extern rtx gen_aarch64_ld1_x3_v2sf (rtx, rtx);
- extern rtx gen_aarch64_ld1_x3_v4sf (rtx, rtx);
- extern rtx gen_aarch64_ld1_x3_v2df (rtx, rtx);
- extern rtx gen_aarch64_ld1_x3_di (rtx, rtx);
- extern rtx gen_aarch64_ld1_x3_df (rtx, rtx);
- extern rtx gen_aarch64_ld1_x4_v8qi (rtx, rtx);
- extern rtx gen_aarch64_ld1_x4_v16qi (rtx, rtx);
- extern rtx gen_aarch64_ld1_x4_v4hi (rtx, rtx);
- extern rtx gen_aarch64_ld1_x4_v8hi (rtx, rtx);
- extern rtx gen_aarch64_ld1_x4_v2si (rtx, rtx);
- extern rtx gen_aarch64_ld1_x4_v4si (rtx, rtx);
- extern rtx gen_aarch64_ld1_x4_v4bf (rtx, rtx);
- extern rtx gen_aarch64_ld1_x4_v8bf (rtx, rtx);
- extern rtx gen_aarch64_ld1_x4_v2di (rtx, rtx);
- extern rtx gen_aarch64_ld1_x4_v4hf (rtx, rtx);
- extern rtx gen_aarch64_ld1_x4_v8hf (rtx, rtx);
- extern rtx gen_aarch64_ld1_x4_v2sf (rtx, rtx);
- extern rtx gen_aarch64_ld1_x4_v4sf (rtx, rtx);
- extern rtx gen_aarch64_ld1_x4_v2df (rtx, rtx);
- extern rtx gen_aarch64_ld1_x4_di (rtx, rtx);
- extern rtx gen_aarch64_ld1_x4_df (rtx, rtx);
- extern rtx gen_aarch64_st1_x2_v8qi (rtx, rtx);
- extern rtx gen_aarch64_st1_x2_v16qi (rtx, rtx);
- extern rtx gen_aarch64_st1_x2_v4hi (rtx, rtx);
- extern rtx gen_aarch64_st1_x2_v8hi (rtx, rtx);
- extern rtx gen_aarch64_st1_x2_v2si (rtx, rtx);
- extern rtx gen_aarch64_st1_x2_v4si (rtx, rtx);
- extern rtx gen_aarch64_st1_x2_v4bf (rtx, rtx);
- extern rtx gen_aarch64_st1_x2_v8bf (rtx, rtx);
- extern rtx gen_aarch64_st1_x2_v2di (rtx, rtx);
- extern rtx gen_aarch64_st1_x2_v4hf (rtx, rtx);
- extern rtx gen_aarch64_st1_x2_v8hf (rtx, rtx);
- extern rtx gen_aarch64_st1_x2_v2sf (rtx, rtx);
- extern rtx gen_aarch64_st1_x2_v4sf (rtx, rtx);
- extern rtx gen_aarch64_st1_x2_v2df (rtx, rtx);
- extern rtx gen_aarch64_st1_x2_di (rtx, rtx);
- extern rtx gen_aarch64_st1_x2_df (rtx, rtx);
- extern rtx gen_aarch64_st1_x3_v8qi (rtx, rtx);
- extern rtx gen_aarch64_st1_x3_v16qi (rtx, rtx);
- extern rtx gen_aarch64_st1_x3_v4hi (rtx, rtx);
- extern rtx gen_aarch64_st1_x3_v8hi (rtx, rtx);
- extern rtx gen_aarch64_st1_x3_v2si (rtx, rtx);
- extern rtx gen_aarch64_st1_x3_v4si (rtx, rtx);
- extern rtx gen_aarch64_st1_x3_v4bf (rtx, rtx);
- extern rtx gen_aarch64_st1_x3_v8bf (rtx, rtx);
- extern rtx gen_aarch64_st1_x3_v2di (rtx, rtx);
- extern rtx gen_aarch64_st1_x3_v4hf (rtx, rtx);
- extern rtx gen_aarch64_st1_x3_v8hf (rtx, rtx);
- extern rtx gen_aarch64_st1_x3_v2sf (rtx, rtx);
- extern rtx gen_aarch64_st1_x3_v4sf (rtx, rtx);
- extern rtx gen_aarch64_st1_x3_v2df (rtx, rtx);
- extern rtx gen_aarch64_st1_x3_di (rtx, rtx);
- extern rtx gen_aarch64_st1_x3_df (rtx, rtx);
- extern rtx gen_aarch64_st1_x4_v8qi (rtx, rtx);
- extern rtx gen_aarch64_st1_x4_v16qi (rtx, rtx);
- extern rtx gen_aarch64_st1_x4_v4hi (rtx, rtx);
- extern rtx gen_aarch64_st1_x4_v8hi (rtx, rtx);
- extern rtx gen_aarch64_st1_x4_v2si (rtx, rtx);
- extern rtx gen_aarch64_st1_x4_v4si (rtx, rtx);
- extern rtx gen_aarch64_st1_x4_v4bf (rtx, rtx);
- extern rtx gen_aarch64_st1_x4_v8bf (rtx, rtx);
- extern rtx gen_aarch64_st1_x4_v2di (rtx, rtx);
- extern rtx gen_aarch64_st1_x4_v4hf (rtx, rtx);
- extern rtx gen_aarch64_st1_x4_v8hf (rtx, rtx);
- extern rtx gen_aarch64_st1_x4_v2sf (rtx, rtx);
- extern rtx gen_aarch64_st1_x4_v4sf (rtx, rtx);
- extern rtx gen_aarch64_st1_x4_v2df (rtx, rtx);
- extern rtx gen_aarch64_st1_x4_di (rtx, rtx);
- extern rtx gen_aarch64_st1_x4_df (rtx, rtx);
- extern rtx gen_aarch64_be_ld1v8qi (rtx, rtx);
- extern rtx gen_aarch64_be_ld1v16qi (rtx, rtx);
- extern rtx gen_aarch64_be_ld1v4hi (rtx, rtx);
- extern rtx gen_aarch64_be_ld1v8hi (rtx, rtx);
- extern rtx gen_aarch64_be_ld1v2si (rtx, rtx);
- extern rtx gen_aarch64_be_ld1v4si (rtx, rtx);
- extern rtx gen_aarch64_be_ld1v2di (rtx, rtx);
- extern rtx gen_aarch64_be_ld1v4hf (rtx, rtx);
- extern rtx gen_aarch64_be_ld1v8hf (rtx, rtx);
- extern rtx gen_aarch64_be_ld1v4bf (rtx, rtx);
- extern rtx gen_aarch64_be_ld1v8bf (rtx, rtx);
- extern rtx gen_aarch64_be_ld1v2sf (rtx, rtx);
- extern rtx gen_aarch64_be_ld1v4sf (rtx, rtx);
- extern rtx gen_aarch64_be_ld1v2df (rtx, rtx);
- extern rtx gen_aarch64_be_ld1di (rtx, rtx);
- extern rtx gen_aarch64_be_st1v8qi (rtx, rtx);
- extern rtx gen_aarch64_be_st1v16qi (rtx, rtx);
- extern rtx gen_aarch64_be_st1v4hi (rtx, rtx);
- extern rtx gen_aarch64_be_st1v8hi (rtx, rtx);
- extern rtx gen_aarch64_be_st1v2si (rtx, rtx);
- extern rtx gen_aarch64_be_st1v4si (rtx, rtx);
- extern rtx gen_aarch64_be_st1v2di (rtx, rtx);
- extern rtx gen_aarch64_be_st1v4hf (rtx, rtx);
- extern rtx gen_aarch64_be_st1v8hf (rtx, rtx);
- extern rtx gen_aarch64_be_st1v4bf (rtx, rtx);
- extern rtx gen_aarch64_be_st1v8bf (rtx, rtx);
- extern rtx gen_aarch64_be_st1v2sf (rtx, rtx);
- extern rtx gen_aarch64_be_st1v4sf (rtx, rtx);
- extern rtx gen_aarch64_be_st1v2df (rtx, rtx);
- extern rtx gen_aarch64_be_st1di (rtx, rtx);
- extern rtx gen_aarch64_ld2v8qi_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld2v4hi_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld2v4hf_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld2v2si_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld2v2sf_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld2v4bf_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld2di_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld2df_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld3v8qi_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld3v4hi_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld3v4hf_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld3v2si_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld3v2sf_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld3v4bf_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld3di_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld3df_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld4v8qi_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld4v4hi_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld4v4hf_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld4v2si_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld4v2sf_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld4v4bf_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld4di_dreg (rtx, rtx);
- extern rtx gen_aarch64_ld4df_dreg (rtx, rtx);
- extern rtx gen_aarch64_tbl1v8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_tbl1v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_tbl2v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_tbl3v8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_tbl3v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_tbx4v8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_tbx4v16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_qtbl3v8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_qtbl3v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_qtbx3v8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_qtbx3v16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_qtbl4v8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_qtbl4v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_qtbx4v8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_qtbx4v16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_combinev16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip1v8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip2v8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn1v8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn2v8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp1v8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp2v8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip1v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip2v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn1v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn2v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp1v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp2v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip1v4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip2v4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn1v4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn2v4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp1v4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp2v4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip1v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip2v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn1v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn2v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp1v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp2v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip1v2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip2v2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn1v2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn2v2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp1v2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp2v2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip1v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip2v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn1v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn2v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp1v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp2v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip1v2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip2v2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn1v2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn2v2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp1v2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp2v2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip1v4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip2v4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn1v4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn2v4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp1v4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp2v4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip1v8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip2v8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn1v8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn2v8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp1v8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp2v8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip1v4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip2v4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn1v4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn2v4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp1v4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp2v4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip1v8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip2v8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn1v8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn2v8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp1v8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp2v8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip1v2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip2v2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn1v2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn2v2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp1v2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp2v2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip1v4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip2v4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn1v4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn2v4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp1v4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp2v4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip1v2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_zip2v2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn1v2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_trn2v2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp1v2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_uzp2v2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_extv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_extv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_extv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_extv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_extv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_extv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_extv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_extv4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_extv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_extv4bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_extv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_extv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_extv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_extv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_rev64v8qi (rtx, rtx);
- extern rtx gen_aarch64_rev32v8qi (rtx, rtx);
- extern rtx gen_aarch64_rev16v8qi (rtx, rtx);
- extern rtx gen_aarch64_rev64v16qi (rtx, rtx);
- extern rtx gen_aarch64_rev32v16qi (rtx, rtx);
- extern rtx gen_aarch64_rev16v16qi (rtx, rtx);
- extern rtx gen_aarch64_rev64v4hi (rtx, rtx);
- extern rtx gen_aarch64_rev32v4hi (rtx, rtx);
- extern rtx gen_aarch64_rev16v4hi (rtx, rtx);
- extern rtx gen_aarch64_rev64v8hi (rtx, rtx);
- extern rtx gen_aarch64_rev32v8hi (rtx, rtx);
- extern rtx gen_aarch64_rev16v8hi (rtx, rtx);
- extern rtx gen_aarch64_rev64v2si (rtx, rtx);
- extern rtx gen_aarch64_rev32v2si (rtx, rtx);
- extern rtx gen_aarch64_rev16v2si (rtx, rtx);
- extern rtx gen_aarch64_rev64v4si (rtx, rtx);
- extern rtx gen_aarch64_rev32v4si (rtx, rtx);
- extern rtx gen_aarch64_rev16v4si (rtx, rtx);
- extern rtx gen_aarch64_rev64v2di (rtx, rtx);
- extern rtx gen_aarch64_rev32v2di (rtx, rtx);
- extern rtx gen_aarch64_rev16v2di (rtx, rtx);
- extern rtx gen_aarch64_rev64v4hf (rtx, rtx);
- extern rtx gen_aarch64_rev32v4hf (rtx, rtx);
- extern rtx gen_aarch64_rev16v4hf (rtx, rtx);
- extern rtx gen_aarch64_rev64v8hf (rtx, rtx);
- extern rtx gen_aarch64_rev32v8hf (rtx, rtx);
- extern rtx gen_aarch64_rev16v8hf (rtx, rtx);
- extern rtx gen_aarch64_rev64v4bf (rtx, rtx);
- extern rtx gen_aarch64_rev32v4bf (rtx, rtx);
- extern rtx gen_aarch64_rev16v4bf (rtx, rtx);
- extern rtx gen_aarch64_rev64v8bf (rtx, rtx);
- extern rtx gen_aarch64_rev32v8bf (rtx, rtx);
- extern rtx gen_aarch64_rev16v8bf (rtx, rtx);
- extern rtx gen_aarch64_rev64v2sf (rtx, rtx);
- extern rtx gen_aarch64_rev32v2sf (rtx, rtx);
- extern rtx gen_aarch64_rev16v2sf (rtx, rtx);
- extern rtx gen_aarch64_rev64v4sf (rtx, rtx);
- extern rtx gen_aarch64_rev32v4sf (rtx, rtx);
- extern rtx gen_aarch64_rev16v4sf (rtx, rtx);
- extern rtx gen_aarch64_rev64v2df (rtx, rtx);
- extern rtx gen_aarch64_rev32v2df (rtx, rtx);
- extern rtx gen_aarch64_rev16v2df (rtx, rtx);
- extern rtx gen_aarch64_st2v8qi_dreg (rtx, rtx);
- extern rtx gen_aarch64_st2v4hi_dreg (rtx, rtx);
- extern rtx gen_aarch64_st2v4hf_dreg (rtx, rtx);
- extern rtx gen_aarch64_st2v2si_dreg (rtx, rtx);
- extern rtx gen_aarch64_st2v2sf_dreg (rtx, rtx);
- extern rtx gen_aarch64_st2v4bf_dreg (rtx, rtx);
- extern rtx gen_aarch64_st2di_dreg (rtx, rtx);
- extern rtx gen_aarch64_st2df_dreg (rtx, rtx);
- extern rtx gen_aarch64_st3v8qi_dreg (rtx, rtx);
- extern rtx gen_aarch64_st3v4hi_dreg (rtx, rtx);
- extern rtx gen_aarch64_st3v4hf_dreg (rtx, rtx);
- extern rtx gen_aarch64_st3v2si_dreg (rtx, rtx);
- extern rtx gen_aarch64_st3v2sf_dreg (rtx, rtx);
- extern rtx gen_aarch64_st3v4bf_dreg (rtx, rtx);
- extern rtx gen_aarch64_st3di_dreg (rtx, rtx);
- extern rtx gen_aarch64_st3df_dreg (rtx, rtx);
- extern rtx gen_aarch64_st4v8qi_dreg (rtx, rtx);
- extern rtx gen_aarch64_st4v4hi_dreg (rtx, rtx);
- extern rtx gen_aarch64_st4v4hf_dreg (rtx, rtx);
- extern rtx gen_aarch64_st4v2si_dreg (rtx, rtx);
- extern rtx gen_aarch64_st4v2sf_dreg (rtx, rtx);
- extern rtx gen_aarch64_st4v4bf_dreg (rtx, rtx);
- extern rtx gen_aarch64_st4di_dreg (rtx, rtx);
- extern rtx gen_aarch64_st4df_dreg (rtx, rtx);
- extern rtx gen_aarch64_simd_ld1v16qi_x2 (rtx, rtx);
- extern rtx gen_aarch64_simd_ld1v8hi_x2 (rtx, rtx);
- extern rtx gen_aarch64_simd_ld1v4si_x2 (rtx, rtx);
- extern rtx gen_aarch64_simd_ld1v2di_x2 (rtx, rtx);
- extern rtx gen_aarch64_simd_ld1v8hf_x2 (rtx, rtx);
- extern rtx gen_aarch64_simd_ld1v4sf_x2 (rtx, rtx);
- extern rtx gen_aarch64_simd_ld1v2df_x2 (rtx, rtx);
- extern rtx gen_aarch64_simd_ld1v8bf_x2 (rtx, rtx);
- extern rtx gen_aarch64_simd_ld1v8qi_x2 (rtx, rtx);
- extern rtx gen_aarch64_simd_ld1v4hi_x2 (rtx, rtx);
- extern rtx gen_aarch64_simd_ld1v4bf_x2 (rtx, rtx);
- extern rtx gen_aarch64_simd_ld1v4hf_x2 (rtx, rtx);
- extern rtx gen_aarch64_simd_ld1v2si_x2 (rtx, rtx);
- extern rtx gen_aarch64_simd_ld1v2sf_x2 (rtx, rtx);
- extern rtx gen_aarch64_simd_ld1di_x2 (rtx, rtx);
- extern rtx gen_aarch64_simd_ld1df_x2 (rtx, rtx);
- extern rtx gen_aarch64_frecpev4hf (rtx, rtx);
- extern rtx gen_aarch64_frecpev8hf (rtx, rtx);
- extern rtx gen_aarch64_frecpev2sf (rtx, rtx);
- extern rtx gen_aarch64_frecpev4sf (rtx, rtx);
- extern rtx gen_aarch64_frecpev2df (rtx, rtx);
- extern rtx gen_aarch64_frecpehf (rtx, rtx);
- extern rtx gen_aarch64_frecpesf (rtx, rtx);
- extern rtx gen_aarch64_frecpedf (rtx, rtx);
- extern rtx gen_aarch64_frecpxhf (rtx, rtx);
- extern rtx gen_aarch64_frecpxsf (rtx, rtx);
- extern rtx gen_aarch64_frecpxdf (rtx, rtx);
- extern rtx gen_aarch64_frecpsv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_frecpsv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_frecpsv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_frecpsv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_frecpsv2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_frecpshf (rtx, rtx, rtx);
- extern rtx gen_aarch64_frecpssf (rtx, rtx, rtx);
- extern rtx gen_aarch64_frecpsdf (rtx, rtx, rtx);
- extern rtx gen_aarch64_urecpev2si (rtx, rtx);
- extern rtx gen_aarch64_urecpev4si (rtx, rtx);
- extern rtx gen_aarch64_crypto_aesev16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_crypto_aesdv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_crypto_aesmcv16qi (rtx, rtx);
- extern rtx gen_aarch64_crypto_aesimcv16qi (rtx, rtx);
- extern rtx gen_aarch64_crypto_sha1hsi (rtx, rtx);
- extern rtx gen_aarch64_crypto_sha1hv4si (rtx, rtx);
- extern rtx gen_aarch64_be_crypto_sha1hv4si (rtx, rtx);
- extern rtx gen_aarch64_crypto_sha1su1v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_crypto_sha1cv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_crypto_sha1mv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_crypto_sha1pv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_crypto_sha1su0v4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_crypto_sha256hv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_crypto_sha256h2v4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_crypto_sha256su0v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_crypto_sha256su1v4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_crypto_sha512hqv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_crypto_sha512h2qv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_crypto_sha512su0qv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_crypto_sha512su1qv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_eor3qv16qi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_eor3qv8hi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_eor3qv4si4 (rtx, rtx, rtx, rtx);
- extern rtx gen_eor3qv2di4 (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_rax1qv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_xarqv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_bcaxqv16qi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_bcaxqv8hi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_bcaxqv4si4 (rtx, rtx, rtx, rtx);
- extern rtx gen_bcaxqv2di4 (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sm3ss1qv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sm3tt1aqv4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sm3tt1bqv4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sm3tt2aqv4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sm3tt2bqv4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sm3partw1qv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sm3partw2qv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sm4eqv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sm4ekeyqv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlal_lowv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlalq_lowv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlsl_lowv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlslq_lowv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlal_highv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlalq_highv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlsl_highv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlslq_highv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlal_lane_lowv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlsl_lane_lowv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlal_lane_highv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlsl_lane_highv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlalq_laneq_lowv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlslq_laneq_lowv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlalq_laneq_highv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlslq_laneq_highv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlal_laneq_lowv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlsl_laneq_lowv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlal_laneq_highv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlsl_laneq_highv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlalq_lane_lowv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlslq_lane_lowv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlalq_lane_highv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_fmlslq_lane_highv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_crypto_pmulldi (rtx, rtx, rtx);
- extern rtx gen_aarch64_crypto_pmullv2di (rtx, rtx, rtx);
- extern rtx gen_extendv8qiv8hi2 (rtx, rtx);
- extern rtx gen_zero_extendv8qiv8hi2 (rtx, rtx);
- extern rtx gen_extendv4hiv4si2 (rtx, rtx);
- extern rtx gen_zero_extendv4hiv4si2 (rtx, rtx);
- extern rtx gen_extendv2siv2di2 (rtx, rtx);
- extern rtx gen_zero_extendv2siv2di2 (rtx, rtx);
- extern rtx gen_truncv8hiv8qi2 (rtx, rtx);
- extern rtx gen_truncv4siv4hi2 (rtx, rtx);
- extern rtx gen_truncv2div2si2 (rtx, rtx);
- extern rtx gen_aarch64_bfdotv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_bfdotv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_bfdot_lanev2sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_bfdot_laneqv2sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_bfdot_lanev4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_bfdot_laneqv4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_bfmmlaqv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_bfmlalbv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_bfmlaltv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_bfmlalb_lanev4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_bfmlalt_lanev4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_bfmlalb_lane_qv4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_bfmlalt_lane_qv4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_smmlav16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_ummlav16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_usmmlav16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_bfcvtnv4bf (rtx, rtx);
- extern rtx gen_aarch64_bfcvtn_qv8bf (rtx, rtx);
- extern rtx gen_aarch64_bfcvtn2v8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_bfcvtbf (rtx, rtx);
- extern rtx gen_aarch64_vbfcvtv4bf (rtx, rtx);
- extern rtx gen_aarch64_vbfcvtv8bf (rtx, rtx);
- extern rtx gen_aarch64_vbfcvt_highv8bf (rtx, rtx);
- extern rtx gen_aarch64_bfcvtsf (rtx, rtx);
- extern rtx gen_aarch64_compare_and_swapqi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_compare_and_swaphi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_compare_and_swapsi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_compare_and_swapdi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_compare_and_swapti (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_compare_and_swapqi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_compare_and_swaphi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_compare_and_swapsi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_compare_and_swapdi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_compare_and_swapti_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_exchangeqi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_exchangehi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_exchangesi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_exchangedi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_exchangeqi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_exchangehi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_exchangesi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_exchangedi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_addqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_subqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_orqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_xorqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_andqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_addhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_subhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_orhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_xorhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_andhi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_addsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_subsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_orsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_xorsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_andsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_adddi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_subdi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_ordi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_xordi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_anddi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_iorqi_lse (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_bicqi_lse (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_xorqi_lse (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_addqi_lse (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_iorhi_lse (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_bichi_lse (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_xorhi_lse (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_addhi_lse (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_iorsi_lse (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_bicsi_lse (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_xorsi_lse (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_addsi_lse (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_iordi_lse (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_bicdi_lse (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_xordi_lse (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_adddi_lse (rtx, rtx, rtx);
- extern rtx gen_atomic_nandqi (rtx, rtx, rtx);
- extern rtx gen_atomic_nandhi (rtx, rtx, rtx);
- extern rtx gen_atomic_nandsi (rtx, rtx, rtx);
- extern rtx gen_atomic_nanddi (rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_addqi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_subqi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_orqi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_xorqi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_andqi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_addhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_subhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_orhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_xorhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_andhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_addsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_subsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_orsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_xorsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_andsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_adddi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_subdi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_ordi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_xordi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_anddi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_iorqi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_bicqi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_xorqi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_addqi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_iorhi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_bichi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_xorhi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_addhi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_iorsi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_bicsi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_xorsi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_addsi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_iordi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_bicdi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_xordi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_fetch_adddi_lse (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_nandqi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_nandhi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_nandsi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_nanddi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_add_fetchqi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_sub_fetchqi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_or_fetchqi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_xor_fetchqi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_and_fetchqi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_add_fetchhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_sub_fetchhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_or_fetchhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_xor_fetchhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_and_fetchhi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_add_fetchsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_sub_fetchsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_or_fetchsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_xor_fetchsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_and_fetchsi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_add_fetchdi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_sub_fetchdi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_or_fetchdi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_xor_fetchdi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_atomic_and_fetchdi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_nand_fetchqi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_nand_fetchhi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_nand_fetchsi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_nand_fetchdi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_loadqi (rtx, rtx, rtx);
- extern rtx gen_atomic_loadhi (rtx, rtx, rtx);
- extern rtx gen_atomic_loadsi (rtx, rtx, rtx);
- extern rtx gen_atomic_loaddi (rtx, rtx, rtx);
- extern rtx gen_atomic_storeqi (rtx, rtx, rtx);
- extern rtx gen_atomic_storehi (rtx, rtx, rtx);
- extern rtx gen_atomic_storesi (rtx, rtx, rtx);
- extern rtx gen_atomic_storedi (rtx, rtx, rtx);
- extern rtx gen_aarch64_load_exclusiveqi (rtx, rtx, rtx);
- extern rtx gen_aarch64_load_exclusivehi (rtx, rtx, rtx);
- extern rtx gen_aarch64_load_exclusivesi (rtx, rtx, rtx);
- extern rtx gen_aarch64_load_exclusivedi (rtx, rtx, rtx);
- extern rtx gen_aarch64_load_exclusive_pair (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_store_exclusiveqi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_store_exclusivehi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_store_exclusivesi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_store_exclusivedi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_store_exclusive_pair (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx4qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx2qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx2hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx2hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx32qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx16hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx8si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx4di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx16bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx16hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx8sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx4df (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx48qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx24hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx12si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx6di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx24bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx24hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx12sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx6df (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx64qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx32hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx16si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx8di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx32bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx32hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx16sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_movvnx8df (rtx, rtx, rtx);
- extern rtx gen_aarch64_wrffr (rtx);
- extern rtx gen_aarch64_update_ffr_for_load (void);
- extern rtx gen_aarch64_copy_ffr_to_ffrt (void);
- extern rtx gen_aarch64_rdffr (rtx);
- extern rtx gen_aarch64_rdffr_z (rtx, rtx);
- extern rtx gen_aarch64_update_ffrt (void);
- extern rtx gen_maskloadvnx16qivnx16bi (rtx, rtx, rtx);
- extern rtx gen_maskloadvnx8qivnx8bi (rtx, rtx, rtx);
- extern rtx gen_maskloadvnx4qivnx4bi (rtx, rtx, rtx);
- extern rtx gen_maskloadvnx2qivnx2bi (rtx, rtx, rtx);
- extern rtx gen_maskloadvnx8hivnx8bi (rtx, rtx, rtx);
- extern rtx gen_maskloadvnx4hivnx4bi (rtx, rtx, rtx);
- extern rtx gen_maskloadvnx2hivnx2bi (rtx, rtx, rtx);
- extern rtx gen_maskloadvnx8hfvnx8bi (rtx, rtx, rtx);
- extern rtx gen_maskloadvnx4hfvnx4bi (rtx, rtx, rtx);
- extern rtx gen_maskloadvnx2hfvnx2bi (rtx, rtx, rtx);
- extern rtx gen_maskloadvnx8bfvnx8bi (rtx, rtx, rtx);
- extern rtx gen_maskloadvnx4sivnx4bi (rtx, rtx, rtx);
- extern rtx gen_maskloadvnx2sivnx2bi (rtx, rtx, rtx);
- extern rtx gen_maskloadvnx4sfvnx4bi (rtx, rtx, rtx);
- extern rtx gen_maskloadvnx2sfvnx2bi (rtx, rtx, rtx);
- extern rtx gen_maskloadvnx2divnx2bi (rtx, rtx, rtx);
- extern rtx gen_maskloadvnx2dfvnx2bi (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx32qivnx16qi (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx16hivnx8hi (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx8sivnx4si (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx4divnx2di (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx16bfvnx8bf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx16hfvnx8hf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx8sfvnx4sf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx4dfvnx2df (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx48qivnx16qi (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx24hivnx8hi (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx12sivnx4si (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx6divnx2di (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx24bfvnx8bf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx24hfvnx8hf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx12sfvnx4sf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx6dfvnx2df (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx64qivnx16qi (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx32hivnx8hi (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx16sivnx4si (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx8divnx2di (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx32bfvnx8bf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx32hfvnx8hf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx16sfvnx4sf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_load_lanesvnx8dfvnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_load_extendvnx8hivnx8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_load_zero_extendvnx8hivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_load_extendvnx8hivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx8hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx8hivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx8hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx8hivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx8hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx8hivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx8hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx8hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx8hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx8hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx8hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx8hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx8hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx8hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx8hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx8hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx8hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx8hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx8hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx4hivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx4hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx4hivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx4hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_load_extendvnx4hivnx4qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_load_zero_extendvnx4hivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_load_extendvnx4hivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx4hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx4hivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx4hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx4hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx4hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx4hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx4hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx4hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx4hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx4hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx4hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx4hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx4hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx4hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx4hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx2hivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx2hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx2hivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx2hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx2hivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx2hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx2hivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx2hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_load_extendvnx2hivnx2qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_load_zero_extendvnx2hivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_load_extendvnx2hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx2hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx2hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx2hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx2hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx2hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx2hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx2hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx2hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx2hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx2hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx2hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx4sivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx4sivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_load_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_load_zero_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_load_extendvnx4sivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx4sivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_load_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_load_zero_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_load_extendvnx4sivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx4sivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx4sivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx4sivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx2sivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx2sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx2sivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx2sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx2sivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx2sivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx2sivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx2sivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_load_extendvnx2sivnx2qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_load_zero_extendvnx2sivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_load_extendvnx2sivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx2sivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx2sivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx2sivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_load_extendvnx2sivnx2hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_load_zero_extendvnx2sivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_load_extendvnx2sivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx2sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx2sivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx2sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx2divnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx2divnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_extendvnx2divnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx2divnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_load_extendvnx2divnx2qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_load_zero_extendvnx2divnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_load_extendvnx2divnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_extendvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_load_zero_extendvnx2divnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_load_zero_extendvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_load_extendvnx2divnx2hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_load_zero_extendvnx2divnx2hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_load_extendvnx2divnx2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_load_zero_extendvnx2divnx2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1vnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1vnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1vnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1vnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1vnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1vnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1vnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1vnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_extendvnx8hivnx8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_extendvnx8hivnx8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_zero_extendvnx8hivnx8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_zero_extendvnx8hivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_ldff1_extendvnx8hivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx8hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx8hivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx8hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx8hivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx8hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx8hivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx8hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx8hivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx8hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx8hivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx8hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx8hivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx8hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx8hivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx8hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx8hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx8hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx8hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx8hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx8hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx8hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx8hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx8hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx8hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx8hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx8hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx8hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx8hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx8hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx8hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx8hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx8hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx8hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx8hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx8hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx8hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx8hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx8hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx8hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx4hivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx4hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx4hivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx4hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx4hivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx4hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx4hivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx4hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_ldff1_extendvnx4hivnx4qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_extendvnx4hivnx4qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_zero_extendvnx4hivnx4qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_zero_extendvnx4hivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_ldff1_extendvnx4hivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx4hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx4hivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx4hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx4hivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx4hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx4hivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx4hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx4hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx4hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx4hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx4hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx4hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx4hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx4hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx4hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx4hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx4hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx4hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx4hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx4hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx4hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx4hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx4hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx4hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx4hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx4hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx4hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx4hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx4hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx4hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx4hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx2hivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx2hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx2hivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx2hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx2hivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx2hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx2hivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx2hivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx2hivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx2hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx2hivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx2hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx2hivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx2hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx2hivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx2hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_ldff1_extendvnx2hivnx2qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_extendvnx2hivnx2qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_zero_extendvnx2hivnx2qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_zero_extendvnx2hivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_ldff1_extendvnx2hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx2hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx2hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx2hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx2hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx2hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx2hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx2hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx2hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx2hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx2hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx2hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx2hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx2hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx2hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx2hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx2hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx2hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx2hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx2hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx2hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx2hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx2hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx2hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx4sivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx4sivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx4sivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx4sivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_ldff1_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_zero_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_zero_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_ldff1_extendvnx4sivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx4sivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx4sivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx4sivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_ldff1_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_zero_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_zero_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_ldff1_extendvnx4sivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx4sivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx4sivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx4sivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx4sivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx4sivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx4sivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx4sivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx2sivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx2sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx2sivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx2sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx2sivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx2sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx2sivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx2sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx2sivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx2sivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx2sivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx2sivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx2sivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx2sivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx2sivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx2sivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_ldff1_extendvnx2sivnx2qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_extendvnx2sivnx2qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_zero_extendvnx2sivnx2qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_zero_extendvnx2sivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_ldff1_extendvnx2sivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx2sivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx2sivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx2sivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx2sivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx2sivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx2sivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx2sivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_ldff1_extendvnx2sivnx2hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_extendvnx2sivnx2hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_zero_extendvnx2sivnx2hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_zero_extendvnx2sivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_ldff1_extendvnx2sivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx2sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx2sivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx2sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx2sivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx2sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx2sivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx2sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx2divnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx2divnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx2divnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx2divnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_extendvnx2divnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx2divnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx2divnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx2divnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_ldff1_extendvnx2divnx2qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_extendvnx2divnx2qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_zero_extendvnx2divnx2qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_zero_extendvnx2divnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_ldff1_extendvnx2divnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_extendvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_extendvnx2divnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_extendvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldff1_zero_extendvnx2divnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldff1_zero_extendvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_ldnf1_zero_extendvnx2divnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_ldnf1_zero_extendvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_ldff1_extendvnx2divnx2hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_extendvnx2divnx2hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_zero_extendvnx2divnx2hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_zero_extendvnx2divnx2hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_extendvnx2divnx2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_extendvnx2divnx2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_zero_extendvnx2divnx2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnf1_zero_extendvnx2divnx2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnt1vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnt1vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnt1vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnt1vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnt1vnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnt1vnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnt1vnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_ldnt1vnx2df (rtx, rtx, rtx);
- extern rtx gen_mask_gather_loadvnx4qivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_gather_loadvnx4hivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_gather_loadvnx4hfvnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_gather_loadvnx4sivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_gather_loadvnx4sfvnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_gather_loadvnx2qivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_gather_loadvnx2hivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_gather_loadvnx2hfvnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_gather_loadvnx2sivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_gather_loadvnx2sfvnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_gather_loadvnx2divnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_gather_loadvnx2dfvnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_load_extendvnx4hivnx4qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_load_zero_extendvnx4hivnx4qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_gather_load_extendvnx4hivnx4hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_load_extendvnx4hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
- {
- return 0;
- }
- static inline rtx gen_aarch64_gather_load_zero_extendvnx4hivnx4hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_load_zero_extendvnx4hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
- {
- return 0;
- }
- extern rtx gen_aarch64_gather_load_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_load_zero_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_load_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_load_zero_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_load_extendvnx2hivnx2qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_load_zero_extendvnx2hivnx2qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_load_extendvnx2sivnx2qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_load_zero_extendvnx2sivnx2qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_load_extendvnx2divnx2qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_load_zero_extendvnx2divnx2qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_gather_load_extendvnx2hivnx2hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_load_extendvnx2hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
- {
- return 0;
- }
- static inline rtx gen_aarch64_gather_load_zero_extendvnx2hivnx2hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_load_zero_extendvnx2hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
- {
- return 0;
- }
- extern rtx gen_aarch64_gather_load_extendvnx2sivnx2hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_load_zero_extendvnx2sivnx2hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_load_extendvnx2divnx2hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_load_zero_extendvnx2divnx2hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_gather_load_extendvnx2hivnx2si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_load_extendvnx2hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
- {
- return 0;
- }
- static inline rtx gen_aarch64_gather_load_zero_extendvnx2hivnx2si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_load_zero_extendvnx2hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
- {
- return 0;
- }
- static inline rtx gen_aarch64_gather_load_extendvnx2sivnx2si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_load_extendvnx2sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
- {
- return 0;
- }
- static inline rtx gen_aarch64_gather_load_zero_extendvnx2sivnx2si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_load_zero_extendvnx2sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e), rtx ARG_UNUSED (f), rtx ARG_UNUSED (g))
- {
- return 0;
- }
- extern rtx gen_aarch64_gather_load_extendvnx2divnx2si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_load_zero_extendvnx2divnx2si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_gathervnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_gathervnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_gathervnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_gathervnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_gather_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_gather_zero_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_gather_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_gather_zero_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_gather_extendvnx2divnx2qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_gather_zero_extendvnx2divnx2qi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_gather_extendvnx2divnx2hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_gather_zero_extendvnx2divnx2hi (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_gather_extendvnx2divnx2si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ldff1_gather_zero_extendvnx2divnx2si (rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_prefetchvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_prefetchvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_prefetchvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_prefetchvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_gather_prefetchvnx16qivnx4si (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_gather_prefetchvnx8hivnx4si (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_gather_prefetchvnx4sivnx4si (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_gather_prefetchvnx2divnx4si (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_gather_prefetchvnx16qivnx2di (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_gather_prefetchvnx8hivnx2di (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_gather_prefetchvnx4sivnx2di (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_gather_prefetchvnx2divnx2di (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_maskstorevnx16qivnx16bi (rtx, rtx, rtx);
- extern rtx gen_maskstorevnx8qivnx8bi (rtx, rtx, rtx);
- extern rtx gen_maskstorevnx4qivnx4bi (rtx, rtx, rtx);
- extern rtx gen_maskstorevnx2qivnx2bi (rtx, rtx, rtx);
- extern rtx gen_maskstorevnx8hivnx8bi (rtx, rtx, rtx);
- extern rtx gen_maskstorevnx4hivnx4bi (rtx, rtx, rtx);
- extern rtx gen_maskstorevnx2hivnx2bi (rtx, rtx, rtx);
- extern rtx gen_maskstorevnx8hfvnx8bi (rtx, rtx, rtx);
- extern rtx gen_maskstorevnx4hfvnx4bi (rtx, rtx, rtx);
- extern rtx gen_maskstorevnx2hfvnx2bi (rtx, rtx, rtx);
- extern rtx gen_maskstorevnx8bfvnx8bi (rtx, rtx, rtx);
- extern rtx gen_maskstorevnx4sivnx4bi (rtx, rtx, rtx);
- extern rtx gen_maskstorevnx2sivnx2bi (rtx, rtx, rtx);
- extern rtx gen_maskstorevnx4sfvnx4bi (rtx, rtx, rtx);
- extern rtx gen_maskstorevnx2sfvnx2bi (rtx, rtx, rtx);
- extern rtx gen_maskstorevnx2divnx2bi (rtx, rtx, rtx);
- extern rtx gen_maskstorevnx2dfvnx2bi (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx32qivnx16qi (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx16hivnx8hi (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx8sivnx4si (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx4divnx2di (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx16bfvnx8bf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx16hfvnx8hf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx8sfvnx4sf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx4dfvnx2df (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx48qivnx16qi (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx24hivnx8hi (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx12sivnx4si (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx6divnx2di (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx24bfvnx8bf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx24hfvnx8hf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx12sfvnx4sf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx6dfvnx2df (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx64qivnx16qi (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx32hivnx8hi (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx16sivnx4si (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx8divnx2di (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx32bfvnx8bf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx32hfvnx8hf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx16sfvnx4sf (rtx, rtx, rtx);
- extern rtx gen_vec_mask_store_lanesvnx8dfvnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_truncvnx8qivnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_truncvnx4qivnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_truncvnx4hivnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_truncvnx2qivnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_truncvnx2hivnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_store_truncvnx2sivnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_stnt1vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_stnt1vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_stnt1vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_stnt1vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_stnt1vnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_stnt1vnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_stnt1vnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_stnt1vnx2df (rtx, rtx, rtx);
- extern rtx gen_mask_scatter_storevnx4qivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_scatter_storevnx4hivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_scatter_storevnx4hfvnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_scatter_storevnx4sivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_scatter_storevnx4sfvnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_scatter_storevnx2qivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_scatter_storevnx2hivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_scatter_storevnx2hfvnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_scatter_storevnx2sivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_scatter_storevnx2sfvnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_scatter_storevnx2divnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_mask_scatter_storevnx2dfvnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_scatter_store_truncvnx4qivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_scatter_store_truncvnx4hivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_scatter_store_truncvnx2qivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_scatter_store_truncvnx2hivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_scatter_store_truncvnx2sivnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_duplicate_vqvnx16qi_le (rtx, rtx);
- extern rtx gen_aarch64_vec_duplicate_vqvnx8hi_le (rtx, rtx);
- extern rtx gen_aarch64_vec_duplicate_vqvnx4si_le (rtx, rtx);
- extern rtx gen_aarch64_vec_duplicate_vqvnx2di_le (rtx, rtx);
- extern rtx gen_aarch64_vec_duplicate_vqvnx8bf_le (rtx, rtx);
- extern rtx gen_aarch64_vec_duplicate_vqvnx8hf_le (rtx, rtx);
- extern rtx gen_aarch64_vec_duplicate_vqvnx4sf_le (rtx, rtx);
- extern rtx gen_aarch64_vec_duplicate_vqvnx2df_le (rtx, rtx);
- extern rtx gen_aarch64_vec_duplicate_vqvnx16qi_be (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_duplicate_vqvnx8hi_be (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_duplicate_vqvnx4si_be (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_duplicate_vqvnx2di_be (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_duplicate_vqvnx8bf_be (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_duplicate_vqvnx8hf_be (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_duplicate_vqvnx4sf_be (rtx, rtx, rtx);
- extern rtx gen_aarch64_vec_duplicate_vqvnx2df_be (rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx4qi (rtx, rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx2qi (rtx, rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx2hi (rtx, rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx2hf (rtx, rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx2si (rtx, rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_sve_ld1rvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ld1rqvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ld1rqvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ld1rqvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ld1rqvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ld1rqvnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ld1rqvnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ld1rqvnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ld1rqvnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ld1rovnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ld1rovnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ld1rovnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ld1rovnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ld1rovnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ld1rovnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ld1rovnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ld1rovnx2df (rtx, rtx, rtx);
- extern rtx gen_vec_shl_insert_vnx16qi (rtx, rtx, rtx);
- extern rtx gen_vec_shl_insert_vnx8hi (rtx, rtx, rtx);
- extern rtx gen_vec_shl_insert_vnx4si (rtx, rtx, rtx);
- extern rtx gen_vec_shl_insert_vnx2di (rtx, rtx, rtx);
- extern rtx gen_vec_shl_insert_vnx8bf (rtx, rtx, rtx);
- extern rtx gen_vec_shl_insert_vnx8hf (rtx, rtx, rtx);
- extern rtx gen_vec_shl_insert_vnx4sf (rtx, rtx, rtx);
- extern rtx gen_vec_shl_insert_vnx2df (rtx, rtx, rtx);
- extern rtx gen_vec_seriesvnx16qi (rtx, rtx, rtx);
- extern rtx gen_vec_seriesvnx8qi (rtx, rtx, rtx);
- extern rtx gen_vec_seriesvnx4qi (rtx, rtx, rtx);
- extern rtx gen_vec_seriesvnx2qi (rtx, rtx, rtx);
- extern rtx gen_vec_seriesvnx8hi (rtx, rtx, rtx);
- extern rtx gen_vec_seriesvnx4hi (rtx, rtx, rtx);
- extern rtx gen_vec_seriesvnx2hi (rtx, rtx, rtx);
- extern rtx gen_vec_seriesvnx4si (rtx, rtx, rtx);
- extern rtx gen_vec_seriesvnx2si (rtx, rtx, rtx);
- extern rtx gen_vec_seriesvnx2di (rtx, rtx, rtx);
- extern rtx gen_extract_after_last_vnx16qi (rtx, rtx, rtx);
- extern rtx gen_extract_last_vnx16qi (rtx, rtx, rtx);
- extern rtx gen_extract_after_last_vnx8hi (rtx, rtx, rtx);
- extern rtx gen_extract_last_vnx8hi (rtx, rtx, rtx);
- extern rtx gen_extract_after_last_vnx4si (rtx, rtx, rtx);
- extern rtx gen_extract_last_vnx4si (rtx, rtx, rtx);
- extern rtx gen_extract_after_last_vnx2di (rtx, rtx, rtx);
- extern rtx gen_extract_last_vnx2di (rtx, rtx, rtx);
- extern rtx gen_extract_after_last_vnx8bf (rtx, rtx, rtx);
- extern rtx gen_extract_last_vnx8bf (rtx, rtx, rtx);
- extern rtx gen_extract_after_last_vnx8hf (rtx, rtx, rtx);
- extern rtx gen_extract_last_vnx8hf (rtx, rtx, rtx);
- extern rtx gen_extract_after_last_vnx4sf (rtx, rtx, rtx);
- extern rtx gen_extract_last_vnx4sf (rtx, rtx, rtx);
- extern rtx gen_extract_after_last_vnx2df (rtx, rtx, rtx);
- extern rtx gen_extract_last_vnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_absvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_negvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_one_cmplvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_clrsbvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_clzvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_popcountvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_qabsvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_qnegvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_absvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_negvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_one_cmplvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_clrsbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_clzvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_popcountvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_qabsvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_qnegvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_absvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_negvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_one_cmplvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_clrsbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_clzvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_popcountvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_qabsvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_qnegvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_absvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_negvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_one_cmplvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_clrsbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_clzvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_popcountvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_qabsvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_qnegvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_rbitvnx16qi (rtx, rtx, rtx);
- static inline rtx gen_aarch64_pred_revbvnx16qi (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_revbvnx16qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- static inline rtx gen_aarch64_pred_revhvnx16qi (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_revhvnx16qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- static inline rtx gen_aarch64_pred_revwvnx16qi (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_revwvnx16qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- extern rtx gen_aarch64_pred_rbitvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_revbvnx8hi (rtx, rtx, rtx);
- static inline rtx gen_aarch64_pred_revhvnx8hi (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_revhvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- static inline rtx gen_aarch64_pred_revwvnx8hi (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_revwvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- extern rtx gen_aarch64_pred_rbitvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_revbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_revhvnx4si (rtx, rtx, rtx);
- static inline rtx gen_aarch64_pred_revwvnx4si (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_revwvnx4si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- extern rtx gen_aarch64_pred_rbitvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_revbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_revhvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_revwvnx2di (rtx, rtx, rtx);
- extern rtx gen_cond_rbitvnx16qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_cond_revbvnx16qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_revbvnx16qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_cond_revhvnx16qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_revhvnx16qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_cond_revwvnx16qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_revwvnx16qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_cond_rbitvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_revbvnx8hi (rtx, rtx, rtx, rtx);
- static inline rtx gen_cond_revhvnx8hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_revhvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_cond_revwvnx8hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_revwvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_cond_rbitvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_revbvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_revhvnx4si (rtx, rtx, rtx, rtx);
- static inline rtx gen_cond_revwvnx4si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_revwvnx4si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_cond_rbitvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_revbvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_revhvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_revwvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sxtvnx8hivnx8qi (rtx, rtx, rtx);
- static inline rtx gen_aarch64_pred_sxtvnx8hivnx4qi (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_sxtvnx8hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- static inline rtx gen_aarch64_pred_sxtvnx8hivnx2qi (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_sxtvnx8hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- static inline rtx gen_aarch64_pred_sxtvnx8hivnx4hi (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_sxtvnx8hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- static inline rtx gen_aarch64_pred_sxtvnx8hivnx2hi (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_sxtvnx8hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- static inline rtx gen_aarch64_pred_sxtvnx8hivnx2si (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_sxtvnx8hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- static inline rtx gen_aarch64_pred_sxtvnx4sivnx8qi (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_sxtvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- extern rtx gen_aarch64_pred_sxtvnx4sivnx4qi (rtx, rtx, rtx);
- static inline rtx gen_aarch64_pred_sxtvnx4sivnx2qi (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_sxtvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- extern rtx gen_aarch64_pred_sxtvnx4sivnx4hi (rtx, rtx, rtx);
- static inline rtx gen_aarch64_pred_sxtvnx4sivnx2hi (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_sxtvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- static inline rtx gen_aarch64_pred_sxtvnx4sivnx2si (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_sxtvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- static inline rtx gen_aarch64_pred_sxtvnx2divnx8qi (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_sxtvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- static inline rtx gen_aarch64_pred_sxtvnx2divnx4qi (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_sxtvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- extern rtx gen_aarch64_pred_sxtvnx2divnx2qi (rtx, rtx, rtx);
- static inline rtx gen_aarch64_pred_sxtvnx2divnx4hi (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_sxtvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- extern rtx gen_aarch64_pred_sxtvnx2divnx2hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sxtvnx2divnx2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_cond_sxtvnx8hivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_cond_sxtvnx8hivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_cond_sxtvnx8hivnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_cond_sxtvnx8hivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_cond_sxtvnx8hivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_cond_sxtvnx8hivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_cond_sxtvnx8hivnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_cond_sxtvnx8hivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_cond_sxtvnx8hivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_cond_sxtvnx8hivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_cond_sxtvnx8hivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_cond_sxtvnx4sivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_cond_sxtvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_cond_sxtvnx4sivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_cond_sxtvnx4sivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_cond_sxtvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_cond_sxtvnx4sivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_cond_sxtvnx4sivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_cond_sxtvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_cond_sxtvnx4sivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_cond_sxtvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_cond_sxtvnx2divnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_cond_sxtvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_cond_sxtvnx2divnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_cond_sxtvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_cond_sxtvnx2divnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_cond_sxtvnx2divnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_cond_sxtvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_cond_sxtvnx2divnx2hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cond_sxtvnx2divnx2si (rtx, rtx, rtx, rtx);
- extern rtx gen_truncvnx8hivnx8qi2 (rtx, rtx);
- static inline rtx gen_truncvnx8hivnx4qi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx8hivnx4qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx8hivnx2qi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx8hivnx2qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx8hivnx4hi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx8hivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx8hivnx2hi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx8hivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx8hivnx2si2 (rtx, rtx);
- static inline rtx
- gen_truncvnx8hivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx4hivnx8qi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx4hivnx8qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- extern rtx gen_truncvnx4hivnx4qi2 (rtx, rtx);
- static inline rtx gen_truncvnx4hivnx2qi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx4hivnx2qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx4hivnx4hi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx4hivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx4hivnx2hi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx4hivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx4hivnx2si2 (rtx, rtx);
- static inline rtx
- gen_truncvnx4hivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx2hivnx8qi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx2hivnx8qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx2hivnx4qi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx2hivnx4qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- extern rtx gen_truncvnx2hivnx2qi2 (rtx, rtx);
- static inline rtx gen_truncvnx2hivnx4hi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx2hivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx2hivnx2hi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx2hivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx2hivnx2si2 (rtx, rtx);
- static inline rtx
- gen_truncvnx2hivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx4sivnx8qi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx4sivnx8qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- extern rtx gen_truncvnx4sivnx4qi2 (rtx, rtx);
- static inline rtx gen_truncvnx4sivnx2qi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx4sivnx2qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- extern rtx gen_truncvnx4sivnx4hi2 (rtx, rtx);
- static inline rtx gen_truncvnx4sivnx2hi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx4sivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx4sivnx2si2 (rtx, rtx);
- static inline rtx
- gen_truncvnx4sivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx2sivnx8qi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx2sivnx8qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx2sivnx4qi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx2sivnx4qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- extern rtx gen_truncvnx2sivnx2qi2 (rtx, rtx);
- static inline rtx gen_truncvnx2sivnx4hi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx2sivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- extern rtx gen_truncvnx2sivnx2hi2 (rtx, rtx);
- static inline rtx gen_truncvnx2sivnx2si2 (rtx, rtx);
- static inline rtx
- gen_truncvnx2sivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx2divnx8qi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx2divnx8qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_truncvnx2divnx4qi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx2divnx4qi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- extern rtx gen_truncvnx2divnx2qi2 (rtx, rtx);
- static inline rtx gen_truncvnx2divnx4hi2 (rtx, rtx);
- static inline rtx
- gen_truncvnx2divnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- extern rtx gen_truncvnx2divnx2hi2 (rtx, rtx);
- extern rtx gen_truncvnx2divnx2si2 (rtx, rtx);
- extern rtx gen_aarch64_sve_fexpavnx8hf (rtx, rtx);
- extern rtx gen_aarch64_sve_fexpavnx4sf (rtx, rtx);
- extern rtx gen_aarch64_sve_fexpavnx2df (rtx, rtx);
- extern rtx gen_aarch64_sve_frecpevnx8hf (rtx, rtx);
- extern rtx gen_aarch64_sve_frsqrtevnx8hf (rtx, rtx);
- extern rtx gen_aarch64_sve_frecpevnx4sf (rtx, rtx);
- extern rtx gen_aarch64_sve_frsqrtevnx4sf (rtx, rtx);
- extern rtx gen_aarch64_sve_frecpevnx2df (rtx, rtx);
- extern rtx gen_aarch64_sve_frsqrtevnx2df (rtx, rtx);
- extern rtx gen_aarch64_pred_absvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_negvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_frecpxvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_roundvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_nearbyintvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_floorvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_frintnvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_ceilvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_rintvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_btruncvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sqrtvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_absvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_negvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_frecpxvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_roundvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_nearbyintvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_floorvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_frintnvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_ceilvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_rintvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_btruncvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sqrtvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_absvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_negvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_frecpxvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_roundvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_nearbyintvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_floorvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_frintnvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_ceilvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_rintvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_btruncvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sqrtvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_mulvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smaxvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sminvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_umaxvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uminvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_mulvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smaxvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sminvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_umaxvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uminvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_mulvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smaxvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sminvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_umaxvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uminvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_mulvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smaxvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sminvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_umaxvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uminvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_addvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_addvnx8qi3 (rtx, rtx, rtx);
- extern rtx gen_addvnx4qi3 (rtx, rtx, rtx);
- extern rtx gen_addvnx2qi3 (rtx, rtx, rtx);
- extern rtx gen_addvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_addvnx4hi3 (rtx, rtx, rtx);
- extern rtx gen_addvnx2hi3 (rtx, rtx, rtx);
- extern rtx gen_addvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_addvnx2si3 (rtx, rtx, rtx);
- extern rtx gen_addvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_subvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_subvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_subvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_subvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_adrvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_adrvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sabdvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uabdvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sabdvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uabdvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sabdvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uabdvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sabdvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uabdvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssaddvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sssubvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssaddvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sssubvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssaddvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sssubvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssaddvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sssubvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_usaddvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ussubvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_usaddvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ussubvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_usaddvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ussubvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_usaddvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ussubvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smulhvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_umulhvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smulhvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_umulhvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smulhvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_umulhvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smulhvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_umulhvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_divvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_udivvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_divvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_udivvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_andvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_iorvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_xorvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_andvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_iorvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_xorvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_andvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_iorvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_xorvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_andvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_iorvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_xorvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_ashlvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_ashrvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_lshrvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_ashlvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_ashrvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_lshrvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_ashlvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_ashrvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_lshrvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_ashlvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_ashrvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_lshrvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_lslvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_asrvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_lsrvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_lslvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_asrvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_lsrvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_lslvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_asrvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_lsrvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ftsmulvnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ftsselvnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ftsmulvnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ftsselvnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ftsmulvnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ftsselvnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fscalevnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fscalevnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fscalevnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_frecpsvnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_frsqrtsvnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_frecpsvnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_frsqrtsvnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_frecpsvnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_frsqrtsvnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_divvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_mulxvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_divvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_mulxvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_divvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_mulxvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_addvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_addvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_addvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cadd90vnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cadd270vnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cadd90vnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cadd270vnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cadd90vnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cadd270vnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_subvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_subvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_subvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_mulvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_mulvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_mulvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mul_lane_vnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mul_lane_vnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mul_lane_vnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smax_nanvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smaxvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smin_nanvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sminvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smax_nanvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smaxvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smin_nanvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sminvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smax_nanvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smaxvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smin_nanvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sminvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_andvnx16bi3 (rtx, rtx, rtx);
- extern rtx gen_andvnx8bi3 (rtx, rtx, rtx);
- extern rtx gen_andvnx4bi3 (rtx, rtx, rtx);
- extern rtx gen_andvnx2bi3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_andvnx16bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_iorvnx16bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_xorvnx16bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_andvnx8bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_iorvnx8bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_xorvnx8bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_andvnx4bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_iorvnx4bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_xorvnx4bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_andvnx2bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_iorvnx2bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_xorvnx2bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_bicvnx16bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_ornvnx16bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_bicvnx8bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_ornvnx8bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_bicvnx4bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_ornvnx4bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_bicvnx2bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_ornvnx2bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_norvnx16bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_nandvnx16bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_norvnx8bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_nandvnx8bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_norvnx4bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_nandvnx4bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_norvnx2bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_nandvnx2bi_z (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fmavnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fmavnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fmavnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fmavnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fnmavnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fnmavnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fnmavnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fnmavnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_sdot_prodvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_udot_prodvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_sdot_prodvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_udot_prodvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sdot_prod_lanevnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_udot_prod_lanevnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sdot_prod_lanevnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_udot_prod_lanevnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usdot_prodvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_usdot_prod_lanevnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sudot_prod_lanevnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_smatmulvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_umatmulvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_usmatmulvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fmavnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fnmavnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fnmsvnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fmsvnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fmavnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fnmavnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fnmsvnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fmsvnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fmavnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fnmavnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fnmsvnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fmsvnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fma_lane_vnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fnma_lane_vnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fma_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fnma_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fma_lane_vnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fnma_lane_vnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmlavnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmla90vnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmla180vnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmla270vnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmlavnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmla90vnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmla180vnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmla270vnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmlavnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmla90vnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmla180vnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmla270vnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane_vnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla90_lane_vnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla180_lane_vnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla270_lane_vnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla90_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla180_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fcmla270_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_tmadvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_tmadvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_tmadvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bfdotvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bfmlalbvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bfmlaltvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bfmmlavnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bfdot_lanevnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bfmlalb_lanevnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bfmlalt_lanevnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fmmlavnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fmmlavnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sel_dupvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sel_dupvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sel_dupvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sel_dupvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sel_dupvnx8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sel_dupvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sel_dupvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sel_dupvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpltvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplevnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpeqvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpnevnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpgevnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpgtvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplovnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplsvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmphsvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmphivnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpltvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplevnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpeqvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpnevnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpgevnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpgtvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplovnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplsvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmphsvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmphivnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpltvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplevnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpeqvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpnevnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpgevnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpgtvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplovnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplsvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmphsvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmphivnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpltvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplevnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpeqvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpnevnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpgevnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpgtvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplovnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplsvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmphsvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmphivnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpeqvnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpgevnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpgtvnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmphivnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmphsvnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplevnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplovnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplsvnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpltvnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpnevnx16qi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpeqvnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpgevnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpgtvnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmphivnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmphsvnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplevnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplovnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplsvnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpltvnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpnevnx8hi_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpeqvnx4si_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpgevnx4si_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpgtvnx4si_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmphivnx4si_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmphsvnx4si_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplevnx4si_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplovnx4si_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmplsvnx4si_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpltvnx4si_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cmpnevnx4si_wide (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_lesivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_ultsivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_ulesivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_ltsivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_gesivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_gtsivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_ugtsivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_ugesivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_rwsivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_wrsivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_lesivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_ultsivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_ulesivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_ltsivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_gesivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_gtsivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_ugtsivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_ugesivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_rwsivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_wrsivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_lesivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_ultsivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_ulesivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_ltsivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_gesivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_gtsivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_ugtsivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_ugesivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_rwsivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_wrsivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_lesivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_ultsivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_ulesivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_ltsivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_gesivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_gtsivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_ugtsivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_ugesivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_rwsivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_wrsivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_ledivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_ultdivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_uledivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_ltdivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_gedivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_gtdivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_ugtdivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_ugedivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_rwdivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_wrdivnx16bi (rtx, rtx, rtx);
- extern rtx gen_while_ledivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_ultdivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_uledivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_ltdivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_gedivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_gtdivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_ugtdivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_ugedivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_rwdivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_wrdivnx8bi (rtx, rtx, rtx);
- extern rtx gen_while_ledivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_ultdivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_uledivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_ltdivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_gedivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_gtdivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_ugtdivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_ugedivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_rwdivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_wrdivnx4bi (rtx, rtx, rtx);
- extern rtx gen_while_ledivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_ultdivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_uledivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_ltdivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_gedivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_gtdivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_ugtdivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_ugedivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_rwdivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_wrdivnx2bi (rtx, rtx, rtx);
- extern rtx gen_while_lesivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ultsivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ulesivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ltsivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_gesivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_gtsivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ugtsivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ugesivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_rwsivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_wrsivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_lesivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ultsivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ulesivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ltsivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_gesivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_gtsivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ugtsivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ugesivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_rwsivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_wrsivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_lesivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ultsivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ulesivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ltsivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_gesivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_gtsivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ugtsivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ugesivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_rwsivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_wrsivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_lesivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ultsivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ulesivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ltsivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_gesivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_gtsivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ugtsivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ugesivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_rwsivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_wrsivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ledivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ultdivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_uledivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ltdivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_gedivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_gtdivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ugtdivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ugedivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_rwdivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_wrdivnx16bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ledivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ultdivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_uledivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ltdivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_gedivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_gtdivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ugtdivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ugedivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_rwdivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_wrdivnx8bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ledivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ultdivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_uledivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ltdivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_gedivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_gtdivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ugtdivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ugedivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_rwdivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_wrdivnx4bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ledivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ultdivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_uledivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ltdivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_gedivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_gtdivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ugtdivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_ugedivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_rwdivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_while_wrdivnx2bi_ptest (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmeqvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmgevnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmgtvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmlevnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmltvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmnevnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmeqvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmgevnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmgtvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmlevnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmltvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmnevnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmeqvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmgevnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmgtvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmlevnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmltvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmnevnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmuovnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmuovnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcmuovnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_vnx16bivnx16bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_vnx8bivnx8bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_vnx4bivnx4bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_vnx2bivnx2bi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ptestvnx16bi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ptestvnx8bi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ptestvnx4bi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ptestvnx2bi (rtx, rtx, rtx, rtx);
- extern rtx gen_fold_extract_after_last_vnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_fold_extract_last_vnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_fold_extract_after_last_vnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_fold_extract_last_vnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_fold_extract_after_last_vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_fold_extract_last_vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_fold_extract_after_last_vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_fold_extract_last_vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_fold_extract_after_last_vnx8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_fold_extract_last_vnx8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_fold_extract_after_last_vnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_fold_extract_last_vnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_fold_extract_after_last_vnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_fold_extract_last_vnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_fold_extract_after_last_vnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_fold_extract_last_vnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fold_extract_vector_after_last_vnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fold_extract_vector_last_vnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fold_extract_vector_after_last_vnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fold_extract_vector_last_vnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fold_extract_vector_after_last_vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fold_extract_vector_last_vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fold_extract_vector_after_last_vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fold_extract_vector_last_vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fold_extract_vector_after_last_vnx8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fold_extract_vector_last_vnx8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fold_extract_vector_after_last_vnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fold_extract_vector_last_vnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fold_extract_vector_after_last_vnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fold_extract_vector_last_vnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fold_extract_vector_after_last_vnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fold_extract_vector_last_vnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_sadd_vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_uadd_vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_sadd_vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_uadd_vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_sadd_vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_uadd_vnx4si (rtx, rtx, rtx);
- static inline rtx gen_aarch64_pred_reduc_sadd_vnx2di (rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_pred_reduc_sadd_vnx2di(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c))
- {
- return 0;
- }
- extern rtx gen_aarch64_pred_reduc_uadd_vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_and_vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_ior_vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smax_vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smin_vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_umax_vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_umin_vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_xor_vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_and_vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_ior_vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smax_vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smin_vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_umax_vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_umin_vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_xor_vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_and_vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_ior_vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smax_vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smin_vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_umax_vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_umin_vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_xor_vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_and_vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_ior_vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smax_vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smin_vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_umax_vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_umin_vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_xor_vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_plus_vnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smax_nan_vnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smax_vnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smin_nan_vnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smin_vnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_plus_vnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smax_nan_vnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smax_vnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smin_nan_vnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smin_vnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_plus_vnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smax_nan_vnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smax_vnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smin_nan_vnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_reduc_smin_vnx2df (rtx, rtx, rtx);
- extern rtx gen_mask_fold_left_plus_vnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_mask_fold_left_plus_vnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_mask_fold_left_plus_vnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_tblvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_tblvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_tblvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_tblvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_tblvnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_tblvnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_tblvnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_tblvnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_compactvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_compactvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_compactvnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_compactvnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_dup_lanevnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_dup_lanevnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_dup_lanevnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_dup_lanevnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_dup_lanevnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_dup_lanevnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_dup_lanevnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_dup_lanevnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_dupq_lanevnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_dupq_lanevnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_dupq_lanevnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_dupq_lanevnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_dupq_lanevnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_dupq_lanevnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_dupq_lanevnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_dupq_lanevnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_revvnx16qi (rtx, rtx);
- extern rtx gen_aarch64_sve_revvnx8hi (rtx, rtx);
- extern rtx gen_aarch64_sve_revvnx4si (rtx, rtx);
- extern rtx gen_aarch64_sve_revvnx2di (rtx, rtx);
- extern rtx gen_aarch64_sve_revvnx8bf (rtx, rtx);
- extern rtx gen_aarch64_sve_revvnx8hf (rtx, rtx);
- extern rtx gen_aarch64_sve_revvnx4sf (rtx, rtx);
- extern rtx gen_aarch64_sve_revvnx2df (rtx, rtx);
- extern rtx gen_aarch64_sve_splicevnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_splicevnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_splicevnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_splicevnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_splicevnx8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_splicevnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_splicevnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_splicevnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1vnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2vnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1vnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2vnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1vnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2vnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1vnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2vnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1vnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2vnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1vnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2vnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1vnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2vnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1vnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2vnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1vnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2vnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1vnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2vnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1vnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2vnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1vnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2vnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1qvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2qvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1qvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2qvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1qvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2qvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1qvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2qvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1qvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2qvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1qvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2qvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1qvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2qvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1qvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2qvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1qvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2qvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1qvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2qvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1qvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2qvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1qvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2qvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1qvnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2qvnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1qvnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2qvnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1qvnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2qvnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1qvnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2qvnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1qvnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2qvnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1qvnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2qvnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1qvnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2qvnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1qvnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2qvnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1qvnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2qvnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1qvnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2qvnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1qvnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2qvnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1qvnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2qvnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_extvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_extvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_extvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_extvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_extvnx8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_extvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_extvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_extvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_revvnx16bi (rtx, rtx);
- extern rtx gen_aarch64_sve_revvnx8bi (rtx, rtx);
- extern rtx gen_aarch64_sve_revvnx4bi (rtx, rtx);
- extern rtx gen_aarch64_sve_revvnx2bi (rtx, rtx);
- extern rtx gen_aarch64_sve_zip1vnx16bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2vnx16bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1vnx16bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2vnx16bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1vnx16bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2vnx16bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1vnx8bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2vnx8bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1vnx8bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2vnx8bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1vnx8bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2vnx8bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1vnx4bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2vnx4bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1vnx4bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2vnx4bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1vnx4bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2vnx4bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip1vnx2bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_zip2vnx2bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1vnx2bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn2vnx2bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp1vnx2bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uzp2vnx2bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1_convvnx16bi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1_convvnx8bi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1_convvnx4bi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_trn1_convvnx2bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_pack_trunc_vnx8hi (rtx, rtx, rtx);
- extern rtx gen_vec_pack_trunc_vnx4si (rtx, rtx, rtx);
- extern rtx gen_vec_pack_trunc_vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sunpkhi_vnx16qi (rtx, rtx);
- extern rtx gen_aarch64_sve_uunpkhi_vnx16qi (rtx, rtx);
- extern rtx gen_aarch64_sve_sunpklo_vnx16qi (rtx, rtx);
- extern rtx gen_aarch64_sve_uunpklo_vnx16qi (rtx, rtx);
- extern rtx gen_aarch64_sve_sunpkhi_vnx8hi (rtx, rtx);
- extern rtx gen_aarch64_sve_uunpkhi_vnx8hi (rtx, rtx);
- extern rtx gen_aarch64_sve_sunpklo_vnx8hi (rtx, rtx);
- extern rtx gen_aarch64_sve_uunpklo_vnx8hi (rtx, rtx);
- extern rtx gen_aarch64_sve_sunpkhi_vnx4si (rtx, rtx);
- extern rtx gen_aarch64_sve_uunpkhi_vnx4si (rtx, rtx);
- extern rtx gen_aarch64_sve_sunpklo_vnx4si (rtx, rtx);
- extern rtx gen_aarch64_sve_uunpklo_vnx4si (rtx, rtx);
- extern rtx gen_aarch64_sve_fix_trunc_nontruncvnx8hfvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx8hfvnx8hi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_sve_fix_trunc_nontruncvnx4sfvnx8hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_fix_trunc_nontruncvnx4sfvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx4sfvnx8hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_fixuns_trunc_nontruncvnx4sfvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_sve_fix_trunc_nontruncvnx2dfvnx8hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_fix_trunc_nontruncvnx2dfvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx2dfvnx8hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_fixuns_trunc_nontruncvnx2dfvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_sve_fix_trunc_nontruncvnx8hfvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx8hfvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fix_trunc_nontruncvnx4sfvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx4sfvnx4si (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_sve_fix_trunc_nontruncvnx2dfvnx4si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_fix_trunc_nontruncvnx2dfvnx4si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx2dfvnx4si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_fixuns_trunc_nontruncvnx2dfvnx4si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_sve_fix_trunc_nontruncvnx8hfvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx8hfvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fix_trunc_nontruncvnx4sfvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx4sfvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fix_trunc_nontruncvnx2dfvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fixuns_trunc_nontruncvnx2dfvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fix_trunc_truncvnx2dfvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fixuns_trunc_truncvnx2dfvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_float_nonextendvnx8hivnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_floatuns_nonextendvnx8hivnx8hf (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_sve_float_nonextendvnx8hivnx4sf (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_float_nonextendvnx8hivnx4sf(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_sve_floatuns_nonextendvnx8hivnx4sf (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_floatuns_nonextendvnx8hivnx4sf(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_sve_float_nonextendvnx8hivnx2df (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_float_nonextendvnx8hivnx2df(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_sve_floatuns_nonextendvnx8hivnx2df (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_floatuns_nonextendvnx8hivnx2df(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_sve_float_nonextendvnx4sivnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_floatuns_nonextendvnx4sivnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_float_nonextendvnx4sivnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_floatuns_nonextendvnx4sivnx4sf (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_sve_float_nonextendvnx4sivnx2df (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_float_nonextendvnx4sivnx2df(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_sve_floatuns_nonextendvnx4sivnx2df (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_floatuns_nonextendvnx4sivnx2df(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_sve_float_nonextendvnx2divnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_floatuns_nonextendvnx2divnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_float_nonextendvnx2divnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_floatuns_nonextendvnx2divnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_float_nonextendvnx2divnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_floatuns_nonextendvnx2divnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_float_extendvnx4sivnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_floatuns_extendvnx4sivnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fcvt_truncvnx4sfvnx8hf (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_sve_fcvt_truncvnx4sfvnx4sf (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_fcvt_truncvnx4sfvnx4sf(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_sve_fcvt_truncvnx2dfvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fcvt_truncvnx2dfvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fcvt_truncvnx4sfvnx8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cvtntvnx8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fcvt_nontruncvnx8hfvnx4sf (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_sve_fcvt_nontruncvnx4sfvnx4sf (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_fcvt_nontruncvnx4sfvnx4sf(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_sve_fcvt_nontruncvnx8hfvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fcvt_nontruncvnx4sfvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_pack_trunc_vnx8bi (rtx, rtx, rtx);
- extern rtx gen_vec_pack_trunc_vnx4bi (rtx, rtx, rtx);
- extern rtx gen_vec_pack_trunc_vnx2bi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_punpklo_vnx16bi (rtx, rtx);
- extern rtx gen_aarch64_sve_punpkhi_vnx16bi (rtx, rtx);
- extern rtx gen_aarch64_sve_punpklo_vnx8bi (rtx, rtx);
- extern rtx gen_aarch64_sve_punpkhi_vnx8bi (rtx, rtx);
- extern rtx gen_aarch64_sve_punpklo_vnx4bi (rtx, rtx);
- extern rtx gen_aarch64_sve_punpkhi_vnx4bi (rtx, rtx);
- extern rtx gen_aarch64_brka (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_brkb (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_brkn (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_brkpa (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_brkpb (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_pfirstvnx16bi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_pnextvnx16bi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_sve_pfirstvnx8bi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_pfirstvnx8bi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_sve_pnextvnx8bi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_sve_pfirstvnx4bi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_pfirstvnx4bi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_sve_pnextvnx4bi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_sve_pfirstvnx2bi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_sve_pfirstvnx2bi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_sve_pnextvnx2bi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cnt_pat (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_incdi_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqincdi_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqincdi_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqincsi_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqincsi_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_incvnx2di_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqincvnx2di_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqincvnx2di_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_incvnx4si_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqincvnx4si_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqincvnx4si_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_decdi_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdecdi_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqdecdi_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdecsi_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqdecsi_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_decvnx2di_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdecvnx2di_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqdecvnx2di_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_decvnx4si_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdecvnx4si_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqdecvnx4si_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cntpvnx16bi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cntpvnx8bi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cntpvnx4bi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cntpvnx2bi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_ldntvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_ldntvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_ldntvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_ldntvnx2df (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_gather_ldnt_extendvnx4sivnx8qi (rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_ldnt_extendvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
- {
- return 0;
- }
- static inline rtx gen_aarch64_gather_ldnt_zero_extendvnx4sivnx8qi (rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_ldnt_zero_extendvnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
- {
- return 0;
- }
- static inline rtx gen_aarch64_gather_ldnt_extendvnx2divnx8qi (rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_ldnt_extendvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
- {
- return 0;
- }
- static inline rtx gen_aarch64_gather_ldnt_zero_extendvnx2divnx8qi (rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_ldnt_zero_extendvnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
- {
- return 0;
- }
- extern rtx gen_aarch64_gather_ldnt_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_ldnt_zero_extendvnx4sivnx4qi (rtx, rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_gather_ldnt_extendvnx2divnx4qi (rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_ldnt_extendvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
- {
- return 0;
- }
- static inline rtx gen_aarch64_gather_ldnt_zero_extendvnx2divnx4qi (rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_ldnt_zero_extendvnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
- {
- return 0;
- }
- static inline rtx gen_aarch64_gather_ldnt_extendvnx4sivnx2qi (rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_ldnt_extendvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
- {
- return 0;
- }
- static inline rtx gen_aarch64_gather_ldnt_zero_extendvnx4sivnx2qi (rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_ldnt_zero_extendvnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
- {
- return 0;
- }
- extern rtx gen_aarch64_gather_ldnt_extendvnx2divnx2qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_ldnt_zero_extendvnx2divnx2qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_ldnt_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_ldnt_zero_extendvnx4sivnx4hi (rtx, rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_gather_ldnt_extendvnx2divnx4hi (rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_ldnt_extendvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
- {
- return 0;
- }
- static inline rtx gen_aarch64_gather_ldnt_zero_extendvnx2divnx4hi (rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_ldnt_zero_extendvnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
- {
- return 0;
- }
- static inline rtx gen_aarch64_gather_ldnt_extendvnx4sivnx2hi (rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_ldnt_extendvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
- {
- return 0;
- }
- static inline rtx gen_aarch64_gather_ldnt_zero_extendvnx4sivnx2hi (rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_ldnt_zero_extendvnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
- {
- return 0;
- }
- extern rtx gen_aarch64_gather_ldnt_extendvnx2divnx2hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_ldnt_zero_extendvnx2divnx2hi (rtx, rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_gather_ldnt_extendvnx4sivnx2si (rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_ldnt_extendvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
- {
- return 0;
- }
- static inline rtx gen_aarch64_gather_ldnt_zero_extendvnx4sivnx2si (rtx, rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_gather_ldnt_zero_extendvnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d), rtx ARG_UNUSED (e))
- {
- return 0;
- }
- extern rtx gen_aarch64_gather_ldnt_extendvnx2divnx2si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_gather_ldnt_zero_extendvnx2divnx2si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_scatter_stntvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_scatter_stntvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_scatter_stntvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_scatter_stntvnx2df (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_scatter_stnt_vnx4sivnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_scatter_stnt_vnx4sivnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_scatter_stnt_vnx2divnx8qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_scatter_stnt_vnx2divnx8qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_scatter_stnt_vnx4sivnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_scatter_stnt_vnx2divnx4qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_scatter_stnt_vnx2divnx4qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_scatter_stnt_vnx4sivnx2qi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_scatter_stnt_vnx4sivnx2qi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_scatter_stnt_vnx2divnx2qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_scatter_stnt_vnx4sivnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_scatter_stnt_vnx2divnx4hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_scatter_stnt_vnx2divnx4hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_aarch64_scatter_stnt_vnx4sivnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_scatter_stnt_vnx4sivnx2hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_scatter_stnt_vnx2divnx2hi (rtx, rtx, rtx, rtx);
- static inline rtx gen_aarch64_scatter_stnt_vnx4sivnx2si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_aarch64_scatter_stnt_vnx4sivnx2si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_aarch64_scatter_stnt_vnx2divnx2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mul_lane_vnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mul_lane_vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_mul_lane_vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_suqaddvnx16qi_const (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_suqaddvnx8hi_const (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_suqaddvnx4si_const (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_suqaddvnx2di_const (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_shaddvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_shsubvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sqrshlvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_srhaddvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_srshlvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uhaddvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uhsubvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uqrshlvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_urhaddvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_urshlvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_shaddvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_shsubvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sqrshlvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_srhaddvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_srshlvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uhaddvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uhsubvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uqrshlvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_urhaddvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_urshlvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_shaddvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_shsubvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sqrshlvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_srhaddvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_srshlvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uhaddvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uhsubvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uqrshlvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_urhaddvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_urshlvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_shaddvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_shsubvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sqrshlvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_srhaddvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_srshlvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uhaddvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uhsubvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uqrshlvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_urhaddvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_urshlvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmulhvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmulhvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmulhvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmulhvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmulhvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmulhvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmulhvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmulhvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmulh_lane_vnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmulh_lane_vnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmulh_lane_vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmulh_lane_vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmulh_lane_vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmulh_lane_vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sqshlvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uqshlvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sqshlvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uqshlvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sqshlvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uqshlvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sqshlvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uqshlvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_adclbvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_adcltvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_eorbtvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_eortbvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sbclbvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sbcltvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmlahvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmlshvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_adclbvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_adcltvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_eorbtvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_eortbvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sbclbvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sbcltvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmlahvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmlshvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_adclbvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_adcltvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_eorbtvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_eortbvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sbclbvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sbcltvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmlahvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmlshvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_adclbvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_adcltvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_eorbtvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_eortbvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sbclbvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sbcltvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmlahvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmlshvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmlah_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmlsh_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmlah_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmlsh_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmlah_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdmlsh_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_mul_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_mul_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_mul_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_mul_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_mul_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_mul_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_xarvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_xarvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_xarvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_xarvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_eor3vnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_eor3vnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_eor3vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_eor3vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_srshrvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_urshrvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_srshrvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_urshrvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_srshrvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_urshrvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_srshrvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_urshrvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_slivnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_srivnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_slivnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_srivnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_slivnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_srivnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_slivnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_srivnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_saddwbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_saddwtvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssubwbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssubwtvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uaddwbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uaddwtvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_usubwbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_usubwtvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_saddwbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_saddwtvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssubwbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssubwtvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uaddwbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uaddwtvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_usubwbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_usubwtvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_saddwbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_saddwtvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssubwbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssubwtvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uaddwbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uaddwtvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_usubwbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_usubwtvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sabdlbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sabdltvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_saddlbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_saddlbtvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_saddltvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_smullbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_smulltvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmullbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmulltvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssublbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssublbtvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssubltvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssubltbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uabdlbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uabdltvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uaddlbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uaddltvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_umullbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_umulltvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_usublbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_usubltvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sabdlbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sabdltvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_saddlbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_saddlbtvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_saddltvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_smullbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_smulltvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmullbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmulltvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssublbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssublbtvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssubltvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssubltbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uabdlbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uabdltvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uaddlbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uaddltvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_umullbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_umulltvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_usublbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_usubltvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sabdlbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sabdltvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_saddlbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_saddlbtvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_saddltvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_smullbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_smulltvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmullbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmulltvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssublbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssublbtvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssubltvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ssubltbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uabdlbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uabdltvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uaddlbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uaddltvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_umullbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_umulltvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_usublbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_usubltvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_smullb_lane_vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_smullt_lane_vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmullb_lane_vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmullt_lane_vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_umullb_lane_vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_umullt_lane_vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_smullb_lane_vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_smullt_lane_vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmullb_lane_vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdmullt_lane_vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_umullb_lane_vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_umullt_lane_vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sshllbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sshlltvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ushllbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ushlltvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sshllbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sshlltvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ushllbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ushlltvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sshllbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sshlltvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ushllbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_ushlltvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_sabdlbvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_sabdltvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_smullbvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_smulltvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_uabdlbvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_uabdltvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_umullbvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_umulltvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_sabdlbvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_sabdltvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_smullbvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_smulltvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_uabdlbvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_uabdltvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_umullbvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_umulltvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_sabdlbvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_sabdltvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_smullbvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_smulltvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_uabdlbvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_uabdltvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_umullbvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_umulltvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_smullb_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_smullt_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_umullb_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_umullt_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_smullb_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_smullt_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_umullb_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_umullt_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qadd_sqdmullbvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qadd_sqdmullbtvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qadd_sqdmulltvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qadd_sqdmullbvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qadd_sqdmullbtvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qadd_sqdmulltvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qadd_sqdmullbvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qadd_sqdmullbtvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qadd_sqdmulltvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qadd_sqdmullb_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qadd_sqdmullt_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qadd_sqdmullb_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qadd_sqdmullt_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_smullbvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_smulltvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_umullbvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_umulltvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_smullbvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_smulltvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_umullbvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_umulltvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_smullbvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_smulltvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_umullbvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_umulltvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_smullb_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_smullt_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_umullb_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_umullt_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_smullb_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_smullt_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_umullb_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sub_umullt_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qsub_sqdmullbvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qsub_sqdmullbtvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qsub_sqdmulltvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qsub_sqdmullbvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qsub_sqdmullbtvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qsub_sqdmulltvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qsub_sqdmullbvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qsub_sqdmullbtvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qsub_sqdmulltvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qsub_sqdmullb_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qsub_sqdmullt_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qsub_sqdmullb_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_qsub_sqdmullt_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fmlalbvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fmlaltvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fmlslbvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_fmlsltvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlalb_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlalt_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlslb_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlslt_lane_vnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqxtnbvnx8hi (rtx, rtx);
- extern rtx gen_aarch64_sve_sqxtunbvnx8hi (rtx, rtx);
- extern rtx gen_aarch64_sve_uqxtnbvnx8hi (rtx, rtx);
- extern rtx gen_aarch64_sve_sqxtnbvnx4si (rtx, rtx);
- extern rtx gen_aarch64_sve_sqxtunbvnx4si (rtx, rtx);
- extern rtx gen_aarch64_sve_uqxtnbvnx4si (rtx, rtx);
- extern rtx gen_aarch64_sve_sqxtnbvnx2di (rtx, rtx);
- extern rtx gen_aarch64_sve_sqxtunbvnx2di (rtx, rtx);
- extern rtx gen_aarch64_sve_uqxtnbvnx2di (rtx, rtx);
- extern rtx gen_aarch64_sve_sqxtntvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqxtuntvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqxtntvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqxtntvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqxtuntvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqxtntvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqxtntvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqxtuntvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqxtntvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_addhnbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_raddhnbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_rsubhnbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_subhnbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_addhnbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_raddhnbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_rsubhnbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_subhnbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_addhnbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_raddhnbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_rsubhnbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_subhnbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_addhntvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_raddhntvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_rsubhntvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_subhntvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_addhntvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_raddhntvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_rsubhntvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_subhntvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_addhntvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_raddhntvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_rsubhntvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_subhntvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_rshrnbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_shrnbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrshrnbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrshrunbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqshrnbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqshrunbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqrshrnbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqshrnbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_rshrnbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_shrnbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrshrnbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrshrunbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqshrnbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqshrunbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqrshrnbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqshrnbvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_rshrnbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_shrnbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrshrnbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrshrunbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqshrnbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqshrunbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqrshrnbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqshrnbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_rshrntvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_shrntvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrshrntvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrshruntvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqshrntvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqshruntvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqrshrntvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqshrntvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_rshrntvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_shrntvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrshrntvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrshruntvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqshrntvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqshruntvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqrshrntvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqshrntvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_rshrntvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_shrntvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrshrntvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrshruntvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqshrntvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqshruntvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqrshrntvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqshrntvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_addpvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smaxpvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sminpvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_umaxpvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uminpvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_addpvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smaxpvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sminpvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_umaxpvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uminpvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_addpvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smaxpvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sminpvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_umaxpvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uminpvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_addpvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_smaxpvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_sminpvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_umaxpvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_uminpvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_faddpvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fmaxpvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fmaxnmpvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fminpvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fminnmpvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_faddpvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fmaxpvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fmaxnmpvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fminpvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fminnmpvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_faddpvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fmaxpvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fmaxnmpvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fminpvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fminnmpvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cadd90vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cadd270vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqcadd90vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqcadd270vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cadd90vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cadd270vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqcadd90vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqcadd270vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cadd90vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cadd270vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqcadd90vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqcadd270vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cadd90vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cadd270vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqcadd90vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqcadd270vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cmlavnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cmla90vnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cmla180vnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cmla270vnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdcmlahvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdcmlah90vnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdcmlah180vnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdcmlah270vnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cmlavnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cmla90vnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cmla180vnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cmla270vnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdcmlahvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdcmlah90vnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdcmlah180vnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdcmlah270vnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cmlavnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cmla90vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cmla180vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cmla270vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdcmlahvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdcmlah90vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdcmlah180vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdcmlah270vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cmlavnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cmla90vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cmla180vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cmla270vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdcmlahvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdcmlah90vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdcmlah180vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqrdcmlah270vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cmla_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cmla90_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cmla180_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cmla270_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdcmlah_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdcmlah90_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdcmlah180_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdcmlah270_lane_vnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cmla_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cmla90_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cmla180_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cmla270_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdcmlah_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdcmlah90_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdcmlah180_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqrdcmlah270_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cdotvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cdot90vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cdot180vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cdot270vnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cdotvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cdot90vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cdot180vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cdot270vnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cdot_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cdot90_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cdot180_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cdot270_lane_vnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cdot_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cdot90_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cdot180_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cdot270_lane_vnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcvtltvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcvtltvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cvtntvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_cvtntvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_fcvtxvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_cvtxntvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_urecpevnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_ursqrtevnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_flogbvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_flogbvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_flogbvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_pmulvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_pmullbvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_pmulltvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_pmullbvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_pmulltvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_pmullb_pairvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_pmullt_pairvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_pmullb_pairvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_pmullt_pairvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_pmullb_pairvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_pmullt_pairvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_tbl2vnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_tbl2vnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_tbl2vnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_tbl2vnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_tbl2vnx8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_tbl2vnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_tbl2vnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_tbl2vnx2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_tbxvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_tbxvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_tbxvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_tbxvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_tbxvnx8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_tbxvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_tbxvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_tbxvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bdepvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bextvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bgrpvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bdepvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bextvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bgrpvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bdepvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bextvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bgrpvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bdepvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bextvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_bgrpvnx2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_histcntvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_histcntvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_histsegvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_matchvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_nmatchvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_matchvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_nmatchvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_aese (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_aesd (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_aesmc (rtx, rtx);
- extern rtx gen_aarch64_sve2_aesimc (rtx, rtx);
- extern rtx gen_aarch64_sve2_rax1 (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_sm4e (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_sm4ekey (rtx, rtx, rtx);
- extern rtx gen_cbranchsi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_cbranchdi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_cbranchsf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_cbranchdf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_cbranchcc4 (rtx, rtx, rtx, rtx);
- extern rtx gen_modsi3 (rtx, rtx, rtx);
- extern rtx gen_moddi3 (rtx, rtx, rtx);
- extern rtx gen_casesi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_casesi_dispatch (rtx, rtx);
- extern rtx gen_prologue (void);
- extern rtx gen_epilogue (void);
- extern rtx gen_sibcall_epilogue (void);
- extern rtx gen_return (void);
- extern rtx gen_call (rtx, rtx, rtx);
- extern rtx gen_call_value (rtx, rtx, rtx, rtx);
- extern rtx gen_sibcall (rtx, rtx, rtx);
- extern rtx gen_sibcall_value (rtx, rtx, rtx, rtx);
- extern rtx gen_untyped_call (rtx, rtx, rtx);
- extern rtx gen_movqi (rtx, rtx);
- extern rtx gen_movhi (rtx, rtx);
- extern rtx gen_movsi (rtx, rtx);
- extern rtx gen_movdi (rtx, rtx);
- extern rtx gen_movti (rtx, rtx);
- extern rtx gen_movhf (rtx, rtx);
- extern rtx gen_movbf (rtx, rtx);
- extern rtx gen_movsf (rtx, rtx);
- extern rtx gen_movdf (rtx, rtx);
- extern rtx gen_movtf (rtx, rtx);
- extern rtx gen_cpymemdi (rtx, rtx, rtx, rtx);
- extern rtx gen_extendsidi2 (rtx, rtx);
- extern rtx gen_zero_extendsidi2 (rtx, rtx);
- extern rtx gen_extendqisi2 (rtx, rtx);
- extern rtx gen_zero_extendqisi2 (rtx, rtx);
- extern rtx gen_extendhisi2 (rtx, rtx);
- extern rtx gen_zero_extendhisi2 (rtx, rtx);
- extern rtx gen_extendqidi2 (rtx, rtx);
- extern rtx gen_zero_extendqidi2 (rtx, rtx);
- extern rtx gen_extendhidi2 (rtx, rtx);
- extern rtx gen_zero_extendhidi2 (rtx, rtx);
- extern rtx gen_extendqihi2 (rtx, rtx);
- extern rtx gen_zero_extendqihi2 (rtx, rtx);
- extern rtx gen_addsi3 (rtx, rtx, rtx);
- extern rtx gen_adddi3 (rtx, rtx, rtx);
- extern rtx gen_addvsi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_addvdi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_uaddvsi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_uaddvdi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_addti3 (rtx, rtx, rtx);
- extern rtx gen_addvti4 (rtx, rtx, rtx, rtx);
- extern rtx gen_uaddvti4 (rtx, rtx, rtx, rtx);
- extern rtx gen_addsi3_carryin (rtx, rtx, rtx);
- extern rtx gen_adddi3_carryin (rtx, rtx, rtx);
- extern rtx gen_addsi3_carryinC (rtx, rtx, rtx);
- extern rtx gen_adddi3_carryinC (rtx, rtx, rtx);
- extern rtx gen_addsi3_carryinV (rtx, rtx, rtx);
- extern rtx gen_adddi3_carryinV (rtx, rtx, rtx);
- extern rtx gen_subvsi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_subvdi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_negvsi3 (rtx, rtx, rtx);
- extern rtx gen_negvdi3 (rtx, rtx, rtx);
- extern rtx gen_usubvsi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_usubvdi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_subti3 (rtx, rtx, rtx);
- extern rtx gen_subvti4 (rtx, rtx, rtx, rtx);
- extern rtx gen_usubvti4 (rtx, rtx, rtx, rtx);
- extern rtx gen_negvti3 (rtx, rtx, rtx);
- extern rtx gen_subsi3_carryin (rtx, rtx, rtx);
- extern rtx gen_subdi3_carryin (rtx, rtx, rtx);
- extern rtx gen_usubsi3_carryinC (rtx, rtx, rtx);
- extern rtx gen_usubdi3_carryinC (rtx, rtx, rtx);
- extern rtx gen_subsi3_carryinV (rtx, rtx, rtx);
- extern rtx gen_subdi3_carryinV (rtx, rtx, rtx);
- extern rtx gen_abssi2 (rtx, rtx);
- extern rtx gen_absdi2 (rtx, rtx);
- extern rtx gen_mulditi3 (rtx, rtx, rtx);
- extern rtx gen_umulditi3 (rtx, rtx, rtx);
- extern rtx gen_multi3 (rtx, rtx, rtx);
- extern rtx gen_cstoresi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_cstoredi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_cstorecc4 (rtx, rtx, rtx, rtx);
- extern rtx gen_cstoresf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_cstoredf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_cmovsi6 (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cmovdi6 (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cmovsf6 (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cmovdf6 (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_movqicc (rtx, rtx, rtx, rtx);
- extern rtx gen_movhicc (rtx, rtx, rtx, rtx);
- extern rtx gen_movsicc (rtx, rtx, rtx, rtx);
- extern rtx gen_movdicc (rtx, rtx, rtx, rtx);
- extern rtx gen_movsfsicc (rtx, rtx, rtx, rtx);
- extern rtx gen_movdfsicc (rtx, rtx, rtx, rtx);
- extern rtx gen_movsfdicc (rtx, rtx, rtx, rtx);
- extern rtx gen_movdfdicc (rtx, rtx, rtx, rtx);
- extern rtx gen_movsfcc (rtx, rtx, rtx, rtx);
- extern rtx gen_movdfcc (rtx, rtx, rtx, rtx);
- extern rtx gen_negsicc (rtx, rtx, rtx, rtx);
- extern rtx gen_notsicc (rtx, rtx, rtx, rtx);
- extern rtx gen_negdicc (rtx, rtx, rtx, rtx);
- extern rtx gen_notdicc (rtx, rtx, rtx, rtx);
- extern rtx gen_umaxsi3 (rtx, rtx, rtx);
- extern rtx gen_umaxdi3 (rtx, rtx, rtx);
- extern rtx gen_ffssi2 (rtx, rtx);
- extern rtx gen_ffsdi2 (rtx, rtx);
- extern rtx gen_popcountsi2 (rtx, rtx);
- extern rtx gen_popcountdi2 (rtx, rtx);
- extern rtx gen_ashlsi3 (rtx, rtx, rtx);
- extern rtx gen_ashrsi3 (rtx, rtx, rtx);
- extern rtx gen_lshrsi3 (rtx, rtx, rtx);
- extern rtx gen_ashldi3 (rtx, rtx, rtx);
- extern rtx gen_ashrdi3 (rtx, rtx, rtx);
- extern rtx gen_lshrdi3 (rtx, rtx, rtx);
- extern rtx gen_ashlqi3 (rtx, rtx, rtx);
- extern rtx gen_ashlhi3 (rtx, rtx, rtx);
- extern rtx gen_rotrsi3 (rtx, rtx, rtx);
- extern rtx gen_rotrdi3 (rtx, rtx, rtx);
- extern rtx gen_rotlsi3 (rtx, rtx, rtx);
- extern rtx gen_rotldi3 (rtx, rtx, rtx);
- extern rtx gen_extv (rtx, rtx, rtx, rtx);
- extern rtx gen_extzv (rtx, rtx, rtx, rtx);
- extern rtx gen_insvsi (rtx, rtx, rtx, rtx);
- extern rtx gen_insvdi (rtx, rtx, rtx, rtx);
- extern rtx gen_fmahf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fmasf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fmadf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmahf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmasf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmadf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fmssf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fmsdf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmssf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmsdf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_floatsihf2 (rtx, rtx);
- extern rtx gen_floatunssihf2 (rtx, rtx);
- extern rtx gen_floatdihf2 (rtx, rtx);
- extern rtx gen_floatunsdihf2 (rtx, rtx);
- extern rtx gen_divhf3 (rtx, rtx, rtx);
- extern rtx gen_divsf3 (rtx, rtx, rtx);
- extern rtx gen_divdf3 (rtx, rtx, rtx);
- extern rtx gen_sqrthf2 (rtx, rtx);
- extern rtx gen_sqrtsf2 (rtx, rtx);
- extern rtx gen_sqrtdf2 (rtx, rtx);
- extern rtx gen_lrintsfsi2 (rtx, rtx);
- extern rtx gen_lrintdfsi2 (rtx, rtx);
- extern rtx gen_lrintsfdi2 (rtx, rtx);
- extern rtx gen_lrintdfdi2 (rtx, rtx);
- extern rtx gen_copysignsf3 (rtx, rtx, rtx);
- extern rtx gen_copysigndf3 (rtx, rtx, rtx);
- extern rtx gen_xorsignsf3 (rtx, rtx, rtx);
- extern rtx gen_xorsigndf3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpsfsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpsfdi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpdfsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpdfdi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcptfsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcptfdi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv8qisi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv8qidi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv16qisi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv16qidi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv4hisi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv4hidi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv8hisi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv8hidi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv2sisi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv2sidi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv4sisi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv4sidi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv2disi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv2didi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv2sfsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv2sfdi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv4sfsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv4sfdi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv2dfsi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movcpv2dfdi (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movti (rtx, rtx, rtx);
- extern rtx gen_aarch64_reload_movtf (rtx, rtx, rtx);
- extern rtx gen_add_losym (rtx, rtx, rtx);
- extern rtx gen_tlsgd_small_si (rtx, rtx);
- extern rtx gen_tlsgd_small_di (rtx, rtx);
- extern rtx gen_tlsdesc_small_si (rtx);
- extern rtx gen_tlsdesc_small_di (rtx);
- extern rtx gen_get_thread_pointerdi (rtx);
- extern rtx gen_stack_protect_set (rtx, rtx);
- extern rtx gen_stack_protect_combined_set (rtx, rtx);
- extern rtx gen_stack_protect_test (rtx, rtx, rtx);
- extern rtx gen_stack_protect_combined_test (rtx, rtx, rtx);
- extern rtx gen_doloop_end (rtx, rtx);
- extern rtx gen_despeculate_copyqi (rtx, rtx, rtx);
- extern rtx gen_despeculate_copyhi (rtx, rtx, rtx);
- extern rtx gen_despeculate_copysi (rtx, rtx, rtx);
- extern rtx gen_despeculate_copydi (rtx, rtx, rtx);
- extern rtx gen_despeculate_copyti (rtx, rtx, rtx);
- extern rtx gen_movv8qi (rtx, rtx);
- extern rtx gen_movv16qi (rtx, rtx);
- extern rtx gen_movv4hi (rtx, rtx);
- extern rtx gen_movv8hi (rtx, rtx);
- extern rtx gen_movv2si (rtx, rtx);
- extern rtx gen_movv4si (rtx, rtx);
- extern rtx gen_movv2di (rtx, rtx);
- extern rtx gen_movv4hf (rtx, rtx);
- extern rtx gen_movv8hf (rtx, rtx);
- extern rtx gen_movv4bf (rtx, rtx);
- extern rtx gen_movv8bf (rtx, rtx);
- extern rtx gen_movv2sf (rtx, rtx);
- extern rtx gen_movv4sf (rtx, rtx);
- extern rtx gen_movv2df (rtx, rtx);
- extern rtx gen_movmisalignv8qi (rtx, rtx);
- extern rtx gen_movmisalignv16qi (rtx, rtx);
- extern rtx gen_movmisalignv4hi (rtx, rtx);
- extern rtx gen_movmisalignv8hi (rtx, rtx);
- extern rtx gen_movmisalignv2si (rtx, rtx);
- extern rtx gen_movmisalignv4si (rtx, rtx);
- extern rtx gen_movmisalignv2di (rtx, rtx);
- extern rtx gen_movmisalignv2sf (rtx, rtx);
- extern rtx gen_movmisalignv4sf (rtx, rtx);
- extern rtx gen_movmisalignv2df (rtx, rtx);
- extern rtx gen_aarch64_split_simd_movv16qi (rtx, rtx);
- extern rtx gen_aarch64_split_simd_movv8hi (rtx, rtx);
- extern rtx gen_aarch64_split_simd_movv4si (rtx, rtx);
- extern rtx gen_aarch64_split_simd_movv2di (rtx, rtx);
- extern rtx gen_aarch64_split_simd_movv8hf (rtx, rtx);
- extern rtx gen_aarch64_split_simd_movv8bf (rtx, rtx);
- extern rtx gen_aarch64_split_simd_movv4sf (rtx, rtx);
- extern rtx gen_aarch64_split_simd_movv2df (rtx, rtx);
- extern rtx gen_aarch64_get_halfv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_halfv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_halfv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_halfv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_halfv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_halfv8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_halfv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_halfv2df (rtx, rtx, rtx);
- extern rtx gen_ctzv2si2 (rtx, rtx);
- extern rtx gen_ctzv4si2 (rtx, rtx);
- extern rtx gen_xorsignv4hf3 (rtx, rtx, rtx);
- extern rtx gen_xorsignv8hf3 (rtx, rtx, rtx);
- extern rtx gen_xorsignv2sf3 (rtx, rtx, rtx);
- extern rtx gen_xorsignv4sf3 (rtx, rtx, rtx);
- extern rtx gen_xorsignv2df3 (rtx, rtx, rtx);
- extern rtx gen_sdot_prodv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_udot_prodv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_sdot_prodv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_udot_prodv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_copysignv4hf3 (rtx, rtx, rtx);
- extern rtx gen_copysignv8hf3 (rtx, rtx, rtx);
- extern rtx gen_copysignv2sf3 (rtx, rtx, rtx);
- extern rtx gen_copysignv4sf3 (rtx, rtx, rtx);
- extern rtx gen_copysignv2df3 (rtx, rtx, rtx);
- extern rtx gen_rsqrtv2sf2 (rtx, rtx);
- extern rtx gen_rsqrtv4sf2 (rtx, rtx);
- extern rtx gen_rsqrtv2df2 (rtx, rtx);
- extern rtx gen_rsqrtsf2 (rtx, rtx);
- extern rtx gen_rsqrtdf2 (rtx, rtx);
- extern rtx gen_ssadv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_usadv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_signbitv2sf2 (rtx, rtx);
- extern rtx gen_signbitv4sf2 (rtx, rtx);
- extern rtx gen_ashlv8qi3 (rtx, rtx, rtx);
- extern rtx gen_ashlv16qi3 (rtx, rtx, rtx);
- extern rtx gen_ashlv4hi3 (rtx, rtx, rtx);
- extern rtx gen_ashlv8hi3 (rtx, rtx, rtx);
- extern rtx gen_ashlv2si3 (rtx, rtx, rtx);
- extern rtx gen_ashlv4si3 (rtx, rtx, rtx);
- extern rtx gen_ashlv2di3 (rtx, rtx, rtx);
- extern rtx gen_lshrv8qi3 (rtx, rtx, rtx);
- extern rtx gen_lshrv16qi3 (rtx, rtx, rtx);
- extern rtx gen_lshrv4hi3 (rtx, rtx, rtx);
- extern rtx gen_lshrv8hi3 (rtx, rtx, rtx);
- extern rtx gen_lshrv2si3 (rtx, rtx, rtx);
- extern rtx gen_lshrv4si3 (rtx, rtx, rtx);
- extern rtx gen_lshrv2di3 (rtx, rtx, rtx);
- extern rtx gen_ashrv8qi3 (rtx, rtx, rtx);
- extern rtx gen_ashrv16qi3 (rtx, rtx, rtx);
- extern rtx gen_ashrv4hi3 (rtx, rtx, rtx);
- extern rtx gen_ashrv8hi3 (rtx, rtx, rtx);
- extern rtx gen_ashrv2si3 (rtx, rtx, rtx);
- extern rtx gen_ashrv4si3 (rtx, rtx, rtx);
- extern rtx gen_ashrv2di3 (rtx, rtx, rtx);
- extern rtx gen_vashlv8qi3 (rtx, rtx, rtx);
- extern rtx gen_vashlv16qi3 (rtx, rtx, rtx);
- extern rtx gen_vashlv4hi3 (rtx, rtx, rtx);
- extern rtx gen_vashlv8hi3 (rtx, rtx, rtx);
- extern rtx gen_vashlv2si3 (rtx, rtx, rtx);
- extern rtx gen_vashlv4si3 (rtx, rtx, rtx);
- extern rtx gen_vashlv2di3 (rtx, rtx, rtx);
- extern rtx gen_vashrv8qi3 (rtx, rtx, rtx);
- extern rtx gen_vashrv16qi3 (rtx, rtx, rtx);
- extern rtx gen_vashrv4hi3 (rtx, rtx, rtx);
- extern rtx gen_vashrv8hi3 (rtx, rtx, rtx);
- extern rtx gen_vashrv2si3 (rtx, rtx, rtx);
- extern rtx gen_vashrv4si3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_ashr_simddi (rtx, rtx, rtx);
- extern rtx gen_vlshrv8qi3 (rtx, rtx, rtx);
- extern rtx gen_vlshrv16qi3 (rtx, rtx, rtx);
- extern rtx gen_vlshrv4hi3 (rtx, rtx, rtx);
- extern rtx gen_vlshrv8hi3 (rtx, rtx, rtx);
- extern rtx gen_vlshrv2si3 (rtx, rtx, rtx);
- extern rtx gen_vlshrv4si3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_lshr_simddi (rtx, rtx, rtx);
- extern rtx gen_vec_setv8qi (rtx, rtx, rtx);
- extern rtx gen_vec_setv16qi (rtx, rtx, rtx);
- extern rtx gen_vec_setv4hi (rtx, rtx, rtx);
- extern rtx gen_vec_setv8hi (rtx, rtx, rtx);
- extern rtx gen_vec_setv2si (rtx, rtx, rtx);
- extern rtx gen_vec_setv4si (rtx, rtx, rtx);
- extern rtx gen_vec_setv2di (rtx, rtx, rtx);
- extern rtx gen_vec_setv4hf (rtx, rtx, rtx);
- extern rtx gen_vec_setv8hf (rtx, rtx, rtx);
- extern rtx gen_vec_setv4bf (rtx, rtx, rtx);
- extern rtx gen_vec_setv8bf (rtx, rtx, rtx);
- extern rtx gen_vec_setv2sf (rtx, rtx, rtx);
- extern rtx gen_vec_setv4sf (rtx, rtx, rtx);
- extern rtx gen_vec_setv2df (rtx, rtx, rtx);
- extern rtx gen_smaxv2di3 (rtx, rtx, rtx);
- extern rtx gen_sminv2di3 (rtx, rtx, rtx);
- extern rtx gen_umaxv2di3 (rtx, rtx, rtx);
- extern rtx gen_uminv2di3 (rtx, rtx, rtx);
- extern rtx gen_move_lo_quad_v16qi (rtx, rtx);
- extern rtx gen_move_lo_quad_v8hi (rtx, rtx);
- extern rtx gen_move_lo_quad_v4si (rtx, rtx);
- extern rtx gen_move_lo_quad_v2di (rtx, rtx);
- extern rtx gen_move_lo_quad_v8hf (rtx, rtx);
- extern rtx gen_move_lo_quad_v8bf (rtx, rtx);
- extern rtx gen_move_lo_quad_v4sf (rtx, rtx);
- extern rtx gen_move_lo_quad_v2df (rtx, rtx);
- extern rtx gen_move_hi_quad_v16qi (rtx, rtx);
- extern rtx gen_move_hi_quad_v8hi (rtx, rtx);
- extern rtx gen_move_hi_quad_v4si (rtx, rtx);
- extern rtx gen_move_hi_quad_v2di (rtx, rtx);
- extern rtx gen_move_hi_quad_v8hf (rtx, rtx);
- extern rtx gen_move_hi_quad_v8bf (rtx, rtx);
- extern rtx gen_move_hi_quad_v4sf (rtx, rtx);
- extern rtx gen_move_hi_quad_v2df (rtx, rtx);
- extern rtx gen_vec_pack_trunc_v4hi (rtx, rtx, rtx);
- extern rtx gen_vec_pack_trunc_v2si (rtx, rtx, rtx);
- extern rtx gen_vec_pack_trunc_di (rtx, rtx, rtx);
- extern rtx gen_vec_unpacks_hi_v16qi (rtx, rtx);
- extern rtx gen_vec_unpacku_hi_v16qi (rtx, rtx);
- extern rtx gen_vec_unpacks_hi_v8hi (rtx, rtx);
- extern rtx gen_vec_unpacku_hi_v8hi (rtx, rtx);
- extern rtx gen_vec_unpacks_hi_v4si (rtx, rtx);
- extern rtx gen_vec_unpacku_hi_v4si (rtx, rtx);
- extern rtx gen_vec_unpacks_lo_v16qi (rtx, rtx);
- extern rtx gen_vec_unpacku_lo_v16qi (rtx, rtx);
- extern rtx gen_vec_unpacks_lo_v8hi (rtx, rtx);
- extern rtx gen_vec_unpacku_lo_v8hi (rtx, rtx);
- extern rtx gen_vec_unpacks_lo_v4si (rtx, rtx);
- extern rtx gen_vec_unpacku_lo_v4si (rtx, rtx);
- extern rtx gen_vec_widen_smult_lo_v16qi (rtx, rtx, rtx);
- extern rtx gen_vec_widen_umult_lo_v16qi (rtx, rtx, rtx);
- extern rtx gen_vec_widen_smult_lo_v8hi (rtx, rtx, rtx);
- extern rtx gen_vec_widen_umult_lo_v8hi (rtx, rtx, rtx);
- extern rtx gen_vec_widen_smult_lo_v4si (rtx, rtx, rtx);
- extern rtx gen_vec_widen_umult_lo_v4si (rtx, rtx, rtx);
- extern rtx gen_vec_widen_smult_hi_v16qi (rtx, rtx, rtx);
- extern rtx gen_vec_widen_umult_hi_v16qi (rtx, rtx, rtx);
- extern rtx gen_vec_widen_smult_hi_v8hi (rtx, rtx, rtx);
- extern rtx gen_vec_widen_umult_hi_v8hi (rtx, rtx, rtx);
- extern rtx gen_vec_widen_smult_hi_v4si (rtx, rtx, rtx);
- extern rtx gen_vec_widen_umult_hi_v4si (rtx, rtx, rtx);
- extern rtx gen_divv4hf3 (rtx, rtx, rtx);
- extern rtx gen_divv8hf3 (rtx, rtx, rtx);
- extern rtx gen_divv2sf3 (rtx, rtx, rtx);
- extern rtx gen_divv4sf3 (rtx, rtx, rtx);
- extern rtx gen_divv2df3 (rtx, rtx, rtx);
- extern rtx gen_fixv4hfv4hi2 (rtx, rtx);
- extern rtx gen_fixunsv4hfv4hi2 (rtx, rtx);
- extern rtx gen_fixv8hfv8hi2 (rtx, rtx);
- extern rtx gen_fixunsv8hfv8hi2 (rtx, rtx);
- extern rtx gen_fixv2sfv2si2 (rtx, rtx);
- extern rtx gen_fixunsv2sfv2si2 (rtx, rtx);
- extern rtx gen_fixv4sfv4si2 (rtx, rtx);
- extern rtx gen_fixunsv4sfv4si2 (rtx, rtx);
- extern rtx gen_fixv2dfv2di2 (rtx, rtx);
- extern rtx gen_fixunsv2dfv2di2 (rtx, rtx);
- extern rtx gen_fix_truncv4hfv4hi2 (rtx, rtx);
- extern rtx gen_fixuns_truncv4hfv4hi2 (rtx, rtx);
- extern rtx gen_fix_truncv8hfv8hi2 (rtx, rtx);
- extern rtx gen_fixuns_truncv8hfv8hi2 (rtx, rtx);
- extern rtx gen_fix_truncv2sfv2si2 (rtx, rtx);
- extern rtx gen_fixuns_truncv2sfv2si2 (rtx, rtx);
- extern rtx gen_fix_truncv4sfv4si2 (rtx, rtx);
- extern rtx gen_fixuns_truncv4sfv4si2 (rtx, rtx);
- extern rtx gen_fix_truncv2dfv2di2 (rtx, rtx);
- extern rtx gen_fixuns_truncv2dfv2di2 (rtx, rtx);
- extern rtx gen_ftruncv4hf2 (rtx, rtx);
- extern rtx gen_ftruncv8hf2 (rtx, rtx);
- extern rtx gen_ftruncv2sf2 (rtx, rtx);
- extern rtx gen_ftruncv4sf2 (rtx, rtx);
- extern rtx gen_ftruncv2df2 (rtx, rtx);
- extern rtx gen_vec_unpacks_lo_v8hf (rtx, rtx);
- extern rtx gen_vec_unpacks_lo_v4sf (rtx, rtx);
- extern rtx gen_vec_unpacks_hi_v8hf (rtx, rtx);
- extern rtx gen_vec_unpacks_hi_v4sf (rtx, rtx);
- extern rtx gen_aarch64_float_truncate_hi_v4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_float_truncate_hi_v8hf (rtx, rtx, rtx);
- extern rtx gen_vec_pack_trunc_v2df (rtx, rtx, rtx);
- extern rtx gen_vec_pack_trunc_df (rtx, rtx, rtx);
- extern rtx gen_reduc_plus_scal_v8qi (rtx, rtx);
- extern rtx gen_reduc_plus_scal_v16qi (rtx, rtx);
- extern rtx gen_reduc_plus_scal_v4hi (rtx, rtx);
- extern rtx gen_reduc_plus_scal_v8hi (rtx, rtx);
- extern rtx gen_reduc_plus_scal_v2si (rtx, rtx);
- extern rtx gen_reduc_plus_scal_v4si (rtx, rtx);
- extern rtx gen_reduc_plus_scal_v2di (rtx, rtx);
- extern rtx gen_reduc_plus_scal_v4sf (rtx, rtx);
- extern rtx gen_reduc_smax_nan_scal_v4hf (rtx, rtx);
- extern rtx gen_reduc_smin_nan_scal_v4hf (rtx, rtx);
- extern rtx gen_reduc_smax_scal_v4hf (rtx, rtx);
- extern rtx gen_reduc_smin_scal_v4hf (rtx, rtx);
- extern rtx gen_reduc_smax_nan_scal_v8hf (rtx, rtx);
- extern rtx gen_reduc_smin_nan_scal_v8hf (rtx, rtx);
- extern rtx gen_reduc_smax_scal_v8hf (rtx, rtx);
- extern rtx gen_reduc_smin_scal_v8hf (rtx, rtx);
- extern rtx gen_reduc_smax_nan_scal_v2sf (rtx, rtx);
- extern rtx gen_reduc_smin_nan_scal_v2sf (rtx, rtx);
- extern rtx gen_reduc_smax_scal_v2sf (rtx, rtx);
- extern rtx gen_reduc_smin_scal_v2sf (rtx, rtx);
- extern rtx gen_reduc_smax_nan_scal_v4sf (rtx, rtx);
- extern rtx gen_reduc_smin_nan_scal_v4sf (rtx, rtx);
- extern rtx gen_reduc_smax_scal_v4sf (rtx, rtx);
- extern rtx gen_reduc_smin_scal_v4sf (rtx, rtx);
- extern rtx gen_reduc_smax_nan_scal_v2df (rtx, rtx);
- extern rtx gen_reduc_smin_nan_scal_v2df (rtx, rtx);
- extern rtx gen_reduc_smax_scal_v2df (rtx, rtx);
- extern rtx gen_reduc_smin_scal_v2df (rtx, rtx);
- extern rtx gen_reduc_umax_scal_v8qi (rtx, rtx);
- extern rtx gen_reduc_umin_scal_v8qi (rtx, rtx);
- extern rtx gen_reduc_smax_scal_v8qi (rtx, rtx);
- extern rtx gen_reduc_smin_scal_v8qi (rtx, rtx);
- extern rtx gen_reduc_umax_scal_v16qi (rtx, rtx);
- extern rtx gen_reduc_umin_scal_v16qi (rtx, rtx);
- extern rtx gen_reduc_smax_scal_v16qi (rtx, rtx);
- extern rtx gen_reduc_smin_scal_v16qi (rtx, rtx);
- extern rtx gen_reduc_umax_scal_v4hi (rtx, rtx);
- extern rtx gen_reduc_umin_scal_v4hi (rtx, rtx);
- extern rtx gen_reduc_smax_scal_v4hi (rtx, rtx);
- extern rtx gen_reduc_smin_scal_v4hi (rtx, rtx);
- extern rtx gen_reduc_umax_scal_v8hi (rtx, rtx);
- extern rtx gen_reduc_umin_scal_v8hi (rtx, rtx);
- extern rtx gen_reduc_smax_scal_v8hi (rtx, rtx);
- extern rtx gen_reduc_smin_scal_v8hi (rtx, rtx);
- extern rtx gen_reduc_umax_scal_v2si (rtx, rtx);
- extern rtx gen_reduc_umin_scal_v2si (rtx, rtx);
- extern rtx gen_reduc_smax_scal_v2si (rtx, rtx);
- extern rtx gen_reduc_smin_scal_v2si (rtx, rtx);
- extern rtx gen_reduc_umax_scal_v4si (rtx, rtx);
- extern rtx gen_reduc_umin_scal_v4si (rtx, rtx);
- extern rtx gen_reduc_smax_scal_v4si (rtx, rtx);
- extern rtx gen_reduc_smin_scal_v4si (rtx, rtx);
- extern rtx gen_aarch64_simd_bslv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv4bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bslv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bsldi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_bsldf (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_v8qiv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_v16qiv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_v4hiv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_v8hiv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_v2siv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_v4siv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_v2div2di (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_v2sfv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_v4sfv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_v2dfv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_didi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpv8qiv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpv16qiv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpv4hiv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpv8hiv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpv2siv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpv4siv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpv2div2di (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpdidi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpv2sfv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpv4sfv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpv2dfv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpuv8qiv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpuv16qiv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpuv4hiv4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpuv8hiv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpuv2siv2si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpuv4siv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpuv2div2di (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpudidi (rtx, rtx, rtx, rtx);
- extern rtx gen_vcondv8qiv8qi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondv16qiv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondv4hiv4hi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondv8hiv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondv2siv2si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondv4siv4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondv2div2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondv2sfv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondv4sfv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondv2dfv2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconddidi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondv2siv2sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondv2sfv2si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondv4siv4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondv4sfv4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondv2div2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondv2dfv2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduv8qiv8qi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduv16qiv16qi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduv4hiv4hi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduv8hiv8hi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduv2siv2si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduv4siv4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduv2div2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondudidi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduv2sfv2si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduv4sfv4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduv2dfv2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_combinev8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinev4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinev4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinev4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinev2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinev2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinedi (rtx, rtx, rtx);
- extern rtx gen_aarch64_combinedf (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_combinev8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_combinev4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_combinev4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_combinev4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_combinev2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_combinev2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_combinedi (rtx, rtx, rtx);
- extern rtx gen_aarch64_simd_combinedf (rtx, rtx, rtx);
- extern rtx gen_aarch64_saddl2v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_saddl2v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_saddl2v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddl2v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddl2v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddl2v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_ssubl2v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ssubl2v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ssubl2v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_usubl2v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_usubl2v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_usubl2v4si (rtx, rtx, rtx);
- extern rtx gen_widen_ssumv16qi3 (rtx, rtx, rtx);
- extern rtx gen_widen_ssumv8hi3 (rtx, rtx, rtx);
- extern rtx gen_widen_ssumv4si3 (rtx, rtx, rtx);
- extern rtx gen_widen_ssumv8qi3 (rtx, rtx, rtx);
- extern rtx gen_widen_ssumv4hi3 (rtx, rtx, rtx);
- extern rtx gen_widen_ssumv2si3 (rtx, rtx, rtx);
- extern rtx gen_widen_usumv16qi3 (rtx, rtx, rtx);
- extern rtx gen_widen_usumv8hi3 (rtx, rtx, rtx);
- extern rtx gen_widen_usumv4si3 (rtx, rtx, rtx);
- extern rtx gen_widen_usumv8qi3 (rtx, rtx, rtx);
- extern rtx gen_widen_usumv4hi3 (rtx, rtx, rtx);
- extern rtx gen_widen_usumv2si3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_saddw2v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_saddw2v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_saddw2v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddw2v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddw2v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_uaddw2v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_ssubw2v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ssubw2v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_ssubw2v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_usubw2v16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_usubw2v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_usubw2v4si (rtx, rtx, rtx);
- extern rtx gen_avgv8qi3_floor (rtx, rtx, rtx);
- extern rtx gen_uavgv8qi3_floor (rtx, rtx, rtx);
- extern rtx gen_avgv16qi3_floor (rtx, rtx, rtx);
- extern rtx gen_uavgv16qi3_floor (rtx, rtx, rtx);
- extern rtx gen_avgv4hi3_floor (rtx, rtx, rtx);
- extern rtx gen_uavgv4hi3_floor (rtx, rtx, rtx);
- extern rtx gen_avgv8hi3_floor (rtx, rtx, rtx);
- extern rtx gen_uavgv8hi3_floor (rtx, rtx, rtx);
- extern rtx gen_avgv2si3_floor (rtx, rtx, rtx);
- extern rtx gen_uavgv2si3_floor (rtx, rtx, rtx);
- extern rtx gen_avgv4si3_floor (rtx, rtx, rtx);
- extern rtx gen_uavgv4si3_floor (rtx, rtx, rtx);
- extern rtx gen_avgv8qi3_ceil (rtx, rtx, rtx);
- extern rtx gen_uavgv8qi3_ceil (rtx, rtx, rtx);
- extern rtx gen_avgv16qi3_ceil (rtx, rtx, rtx);
- extern rtx gen_uavgv16qi3_ceil (rtx, rtx, rtx);
- extern rtx gen_avgv4hi3_ceil (rtx, rtx, rtx);
- extern rtx gen_uavgv4hi3_ceil (rtx, rtx, rtx);
- extern rtx gen_avgv8hi3_ceil (rtx, rtx, rtx);
- extern rtx gen_uavgv8hi3_ceil (rtx, rtx, rtx);
- extern rtx gen_avgv2si3_ceil (rtx, rtx, rtx);
- extern rtx gen_uavgv2si3_ceil (rtx, rtx, rtx);
- extern rtx gen_avgv4si3_ceil (rtx, rtx, rtx);
- extern rtx gen_uavgv4si3_ceil (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal2v8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal2v4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl2v8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl2v4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal2_lanev8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal2_lanev4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal2_laneqv8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal2_laneqv4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl2_lanev8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl2_lanev4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl2_laneqv8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl2_laneqv4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal2_nv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlal2_nv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl2_nv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmlsl2_nv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull2v8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull2v4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull2_lanev8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull2_lanev4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull2_laneqv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull2_laneqv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull2_nv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_sqdmull2_nv4si (rtx, rtx, rtx);
- extern rtx gen_sqrtv4hf2 (rtx, rtx);
- extern rtx gen_sqrtv8hf2 (rtx, rtx);
- extern rtx gen_sqrtv2sf2 (rtx, rtx);
- extern rtx gen_sqrtv4sf2 (rtx, rtx);
- extern rtx gen_sqrtv2df2 (rtx, rtx);
- extern rtx gen_vec_load_lanesoiv16qi (rtx, rtx);
- extern rtx gen_vec_load_lanesoiv8hi (rtx, rtx);
- extern rtx gen_vec_load_lanesoiv4si (rtx, rtx);
- extern rtx gen_vec_load_lanesoiv2di (rtx, rtx);
- extern rtx gen_vec_load_lanesoiv8hf (rtx, rtx);
- extern rtx gen_vec_load_lanesoiv4sf (rtx, rtx);
- extern rtx gen_vec_load_lanesoiv2df (rtx, rtx);
- extern rtx gen_vec_load_lanesoiv8bf (rtx, rtx);
- extern rtx gen_vec_store_lanesoiv16qi (rtx, rtx);
- extern rtx gen_vec_store_lanesoiv8hi (rtx, rtx);
- extern rtx gen_vec_store_lanesoiv4si (rtx, rtx);
- extern rtx gen_vec_store_lanesoiv2di (rtx, rtx);
- extern rtx gen_vec_store_lanesoiv8hf (rtx, rtx);
- extern rtx gen_vec_store_lanesoiv4sf (rtx, rtx);
- extern rtx gen_vec_store_lanesoiv2df (rtx, rtx);
- extern rtx gen_vec_store_lanesoiv8bf (rtx, rtx);
- extern rtx gen_vec_load_lanesciv16qi (rtx, rtx);
- extern rtx gen_vec_load_lanesciv8hi (rtx, rtx);
- extern rtx gen_vec_load_lanesciv4si (rtx, rtx);
- extern rtx gen_vec_load_lanesciv2di (rtx, rtx);
- extern rtx gen_vec_load_lanesciv8hf (rtx, rtx);
- extern rtx gen_vec_load_lanesciv4sf (rtx, rtx);
- extern rtx gen_vec_load_lanesciv2df (rtx, rtx);
- extern rtx gen_vec_load_lanesciv8bf (rtx, rtx);
- extern rtx gen_vec_store_lanesciv16qi (rtx, rtx);
- extern rtx gen_vec_store_lanesciv8hi (rtx, rtx);
- extern rtx gen_vec_store_lanesciv4si (rtx, rtx);
- extern rtx gen_vec_store_lanesciv2di (rtx, rtx);
- extern rtx gen_vec_store_lanesciv8hf (rtx, rtx);
- extern rtx gen_vec_store_lanesciv4sf (rtx, rtx);
- extern rtx gen_vec_store_lanesciv2df (rtx, rtx);
- extern rtx gen_vec_store_lanesciv8bf (rtx, rtx);
- extern rtx gen_vec_load_lanesxiv16qi (rtx, rtx);
- extern rtx gen_vec_load_lanesxiv8hi (rtx, rtx);
- extern rtx gen_vec_load_lanesxiv4si (rtx, rtx);
- extern rtx gen_vec_load_lanesxiv2di (rtx, rtx);
- extern rtx gen_vec_load_lanesxiv8hf (rtx, rtx);
- extern rtx gen_vec_load_lanesxiv4sf (rtx, rtx);
- extern rtx gen_vec_load_lanesxiv2df (rtx, rtx);
- extern rtx gen_vec_load_lanesxiv8bf (rtx, rtx);
- extern rtx gen_vec_store_lanesxiv16qi (rtx, rtx);
- extern rtx gen_vec_store_lanesxiv8hi (rtx, rtx);
- extern rtx gen_vec_store_lanesxiv4si (rtx, rtx);
- extern rtx gen_vec_store_lanesxiv2di (rtx, rtx);
- extern rtx gen_vec_store_lanesxiv8hf (rtx, rtx);
- extern rtx gen_vec_store_lanesxiv4sf (rtx, rtx);
- extern rtx gen_vec_store_lanesxiv2df (rtx, rtx);
- extern rtx gen_vec_store_lanesxiv8bf (rtx, rtx);
- extern rtx gen_movoi (rtx, rtx);
- extern rtx gen_movci (rtx, rtx);
- extern rtx gen_movxi (rtx, rtx);
- extern rtx gen_aarch64_ld1x3v8qi (rtx, rtx);
- extern rtx gen_aarch64_ld1x3v16qi (rtx, rtx);
- extern rtx gen_aarch64_ld1x3v4hi (rtx, rtx);
- extern rtx gen_aarch64_ld1x3v8hi (rtx, rtx);
- extern rtx gen_aarch64_ld1x3v2si (rtx, rtx);
- extern rtx gen_aarch64_ld1x3v4si (rtx, rtx);
- extern rtx gen_aarch64_ld1x3v4bf (rtx, rtx);
- extern rtx gen_aarch64_ld1x3v8bf (rtx, rtx);
- extern rtx gen_aarch64_ld1x3v2di (rtx, rtx);
- extern rtx gen_aarch64_ld1x3v4hf (rtx, rtx);
- extern rtx gen_aarch64_ld1x3v8hf (rtx, rtx);
- extern rtx gen_aarch64_ld1x3v2sf (rtx, rtx);
- extern rtx gen_aarch64_ld1x3v4sf (rtx, rtx);
- extern rtx gen_aarch64_ld1x3v2df (rtx, rtx);
- extern rtx gen_aarch64_ld1x3di (rtx, rtx);
- extern rtx gen_aarch64_ld1x3df (rtx, rtx);
- extern rtx gen_aarch64_ld1x4v8qi (rtx, rtx);
- extern rtx gen_aarch64_ld1x4v16qi (rtx, rtx);
- extern rtx gen_aarch64_ld1x4v4hi (rtx, rtx);
- extern rtx gen_aarch64_ld1x4v8hi (rtx, rtx);
- extern rtx gen_aarch64_ld1x4v2si (rtx, rtx);
- extern rtx gen_aarch64_ld1x4v4si (rtx, rtx);
- extern rtx gen_aarch64_ld1x4v4bf (rtx, rtx);
- extern rtx gen_aarch64_ld1x4v8bf (rtx, rtx);
- extern rtx gen_aarch64_ld1x4v2di (rtx, rtx);
- extern rtx gen_aarch64_ld1x4v4hf (rtx, rtx);
- extern rtx gen_aarch64_ld1x4v8hf (rtx, rtx);
- extern rtx gen_aarch64_ld1x4v2sf (rtx, rtx);
- extern rtx gen_aarch64_ld1x4v4sf (rtx, rtx);
- extern rtx gen_aarch64_ld1x4v2df (rtx, rtx);
- extern rtx gen_aarch64_ld1x4di (rtx, rtx);
- extern rtx gen_aarch64_ld1x4df (rtx, rtx);
- extern rtx gen_aarch64_st1x2v8qi (rtx, rtx);
- extern rtx gen_aarch64_st1x2v16qi (rtx, rtx);
- extern rtx gen_aarch64_st1x2v4hi (rtx, rtx);
- extern rtx gen_aarch64_st1x2v8hi (rtx, rtx);
- extern rtx gen_aarch64_st1x2v2si (rtx, rtx);
- extern rtx gen_aarch64_st1x2v4si (rtx, rtx);
- extern rtx gen_aarch64_st1x2v4bf (rtx, rtx);
- extern rtx gen_aarch64_st1x2v8bf (rtx, rtx);
- extern rtx gen_aarch64_st1x2v2di (rtx, rtx);
- extern rtx gen_aarch64_st1x2v4hf (rtx, rtx);
- extern rtx gen_aarch64_st1x2v8hf (rtx, rtx);
- extern rtx gen_aarch64_st1x2v2sf (rtx, rtx);
- extern rtx gen_aarch64_st1x2v4sf (rtx, rtx);
- extern rtx gen_aarch64_st1x2v2df (rtx, rtx);
- extern rtx gen_aarch64_st1x2di (rtx, rtx);
- extern rtx gen_aarch64_st1x2df (rtx, rtx);
- extern rtx gen_aarch64_st1x3v8qi (rtx, rtx);
- extern rtx gen_aarch64_st1x3v16qi (rtx, rtx);
- extern rtx gen_aarch64_st1x3v4hi (rtx, rtx);
- extern rtx gen_aarch64_st1x3v8hi (rtx, rtx);
- extern rtx gen_aarch64_st1x3v2si (rtx, rtx);
- extern rtx gen_aarch64_st1x3v4si (rtx, rtx);
- extern rtx gen_aarch64_st1x3v4bf (rtx, rtx);
- extern rtx gen_aarch64_st1x3v8bf (rtx, rtx);
- extern rtx gen_aarch64_st1x3v2di (rtx, rtx);
- extern rtx gen_aarch64_st1x3v4hf (rtx, rtx);
- extern rtx gen_aarch64_st1x3v8hf (rtx, rtx);
- extern rtx gen_aarch64_st1x3v2sf (rtx, rtx);
- extern rtx gen_aarch64_st1x3v4sf (rtx, rtx);
- extern rtx gen_aarch64_st1x3v2df (rtx, rtx);
- extern rtx gen_aarch64_st1x3di (rtx, rtx);
- extern rtx gen_aarch64_st1x3df (rtx, rtx);
- extern rtx gen_aarch64_st1x4v8qi (rtx, rtx);
- extern rtx gen_aarch64_st1x4v16qi (rtx, rtx);
- extern rtx gen_aarch64_st1x4v4hi (rtx, rtx);
- extern rtx gen_aarch64_st1x4v8hi (rtx, rtx);
- extern rtx gen_aarch64_st1x4v2si (rtx, rtx);
- extern rtx gen_aarch64_st1x4v4si (rtx, rtx);
- extern rtx gen_aarch64_st1x4v4bf (rtx, rtx);
- extern rtx gen_aarch64_st1x4v8bf (rtx, rtx);
- extern rtx gen_aarch64_st1x4v2di (rtx, rtx);
- extern rtx gen_aarch64_st1x4v4hf (rtx, rtx);
- extern rtx gen_aarch64_st1x4v8hf (rtx, rtx);
- extern rtx gen_aarch64_st1x4v2sf (rtx, rtx);
- extern rtx gen_aarch64_st1x4v4sf (rtx, rtx);
- extern rtx gen_aarch64_st1x4v2df (rtx, rtx);
- extern rtx gen_aarch64_st1x4di (rtx, rtx);
- extern rtx gen_aarch64_st1x4df (rtx, rtx);
- extern rtx gen_aarch64_ld2rv8qi (rtx, rtx);
- extern rtx gen_aarch64_ld3rv8qi (rtx, rtx);
- extern rtx gen_aarch64_ld4rv8qi (rtx, rtx);
- extern rtx gen_aarch64_ld2rv16qi (rtx, rtx);
- extern rtx gen_aarch64_ld3rv16qi (rtx, rtx);
- extern rtx gen_aarch64_ld4rv16qi (rtx, rtx);
- extern rtx gen_aarch64_ld2rv4hi (rtx, rtx);
- extern rtx gen_aarch64_ld3rv4hi (rtx, rtx);
- extern rtx gen_aarch64_ld4rv4hi (rtx, rtx);
- extern rtx gen_aarch64_ld2rv8hi (rtx, rtx);
- extern rtx gen_aarch64_ld3rv8hi (rtx, rtx);
- extern rtx gen_aarch64_ld4rv8hi (rtx, rtx);
- extern rtx gen_aarch64_ld2rv2si (rtx, rtx);
- extern rtx gen_aarch64_ld3rv2si (rtx, rtx);
- extern rtx gen_aarch64_ld4rv2si (rtx, rtx);
- extern rtx gen_aarch64_ld2rv4si (rtx, rtx);
- extern rtx gen_aarch64_ld3rv4si (rtx, rtx);
- extern rtx gen_aarch64_ld4rv4si (rtx, rtx);
- extern rtx gen_aarch64_ld2rv4bf (rtx, rtx);
- extern rtx gen_aarch64_ld3rv4bf (rtx, rtx);
- extern rtx gen_aarch64_ld4rv4bf (rtx, rtx);
- extern rtx gen_aarch64_ld2rv8bf (rtx, rtx);
- extern rtx gen_aarch64_ld3rv8bf (rtx, rtx);
- extern rtx gen_aarch64_ld4rv8bf (rtx, rtx);
- extern rtx gen_aarch64_ld2rv2di (rtx, rtx);
- extern rtx gen_aarch64_ld3rv2di (rtx, rtx);
- extern rtx gen_aarch64_ld4rv2di (rtx, rtx);
- extern rtx gen_aarch64_ld2rv4hf (rtx, rtx);
- extern rtx gen_aarch64_ld3rv4hf (rtx, rtx);
- extern rtx gen_aarch64_ld4rv4hf (rtx, rtx);
- extern rtx gen_aarch64_ld2rv8hf (rtx, rtx);
- extern rtx gen_aarch64_ld3rv8hf (rtx, rtx);
- extern rtx gen_aarch64_ld4rv8hf (rtx, rtx);
- extern rtx gen_aarch64_ld2rv2sf (rtx, rtx);
- extern rtx gen_aarch64_ld3rv2sf (rtx, rtx);
- extern rtx gen_aarch64_ld4rv2sf (rtx, rtx);
- extern rtx gen_aarch64_ld2rv4sf (rtx, rtx);
- extern rtx gen_aarch64_ld3rv4sf (rtx, rtx);
- extern rtx gen_aarch64_ld4rv4sf (rtx, rtx);
- extern rtx gen_aarch64_ld2rv2df (rtx, rtx);
- extern rtx gen_aarch64_ld3rv2df (rtx, rtx);
- extern rtx gen_aarch64_ld4rv2df (rtx, rtx);
- extern rtx gen_aarch64_ld2rdi (rtx, rtx);
- extern rtx gen_aarch64_ld3rdi (rtx, rtx);
- extern rtx gen_aarch64_ld4rdi (rtx, rtx);
- extern rtx gen_aarch64_ld2rdf (rtx, rtx);
- extern rtx gen_aarch64_ld3rdf (rtx, rtx);
- extern rtx gen_aarch64_ld4rdf (rtx, rtx);
- extern rtx gen_aarch64_ld2v8qi (rtx, rtx);
- extern rtx gen_aarch64_ld2v4hi (rtx, rtx);
- extern rtx gen_aarch64_ld2v4bf (rtx, rtx);
- extern rtx gen_aarch64_ld2v4hf (rtx, rtx);
- extern rtx gen_aarch64_ld2v2si (rtx, rtx);
- extern rtx gen_aarch64_ld2v2sf (rtx, rtx);
- extern rtx gen_aarch64_ld2di (rtx, rtx);
- extern rtx gen_aarch64_ld2df (rtx, rtx);
- extern rtx gen_aarch64_ld3v8qi (rtx, rtx);
- extern rtx gen_aarch64_ld3v4hi (rtx, rtx);
- extern rtx gen_aarch64_ld3v4bf (rtx, rtx);
- extern rtx gen_aarch64_ld3v4hf (rtx, rtx);
- extern rtx gen_aarch64_ld3v2si (rtx, rtx);
- extern rtx gen_aarch64_ld3v2sf (rtx, rtx);
- extern rtx gen_aarch64_ld3di (rtx, rtx);
- extern rtx gen_aarch64_ld3df (rtx, rtx);
- extern rtx gen_aarch64_ld4v8qi (rtx, rtx);
- extern rtx gen_aarch64_ld4v4hi (rtx, rtx);
- extern rtx gen_aarch64_ld4v4bf (rtx, rtx);
- extern rtx gen_aarch64_ld4v4hf (rtx, rtx);
- extern rtx gen_aarch64_ld4v2si (rtx, rtx);
- extern rtx gen_aarch64_ld4v2sf (rtx, rtx);
- extern rtx gen_aarch64_ld4di (rtx, rtx);
- extern rtx gen_aarch64_ld4df (rtx, rtx);
- extern rtx gen_aarch64_ld1v8qi (rtx, rtx);
- extern rtx gen_aarch64_ld1v16qi (rtx, rtx);
- extern rtx gen_aarch64_ld1v4hi (rtx, rtx);
- extern rtx gen_aarch64_ld1v8hi (rtx, rtx);
- extern rtx gen_aarch64_ld1v2si (rtx, rtx);
- extern rtx gen_aarch64_ld1v4si (rtx, rtx);
- extern rtx gen_aarch64_ld1v2di (rtx, rtx);
- extern rtx gen_aarch64_ld1v4hf (rtx, rtx);
- extern rtx gen_aarch64_ld1v8hf (rtx, rtx);
- extern rtx gen_aarch64_ld1v4bf (rtx, rtx);
- extern rtx gen_aarch64_ld1v8bf (rtx, rtx);
- extern rtx gen_aarch64_ld1v2sf (rtx, rtx);
- extern rtx gen_aarch64_ld1v4sf (rtx, rtx);
- extern rtx gen_aarch64_ld1v2df (rtx, rtx);
- extern rtx gen_aarch64_ld2v16qi (rtx, rtx);
- extern rtx gen_aarch64_ld3v16qi (rtx, rtx);
- extern rtx gen_aarch64_ld4v16qi (rtx, rtx);
- extern rtx gen_aarch64_ld2v8hi (rtx, rtx);
- extern rtx gen_aarch64_ld3v8hi (rtx, rtx);
- extern rtx gen_aarch64_ld4v8hi (rtx, rtx);
- extern rtx gen_aarch64_ld2v4si (rtx, rtx);
- extern rtx gen_aarch64_ld3v4si (rtx, rtx);
- extern rtx gen_aarch64_ld4v4si (rtx, rtx);
- extern rtx gen_aarch64_ld2v2di (rtx, rtx);
- extern rtx gen_aarch64_ld3v2di (rtx, rtx);
- extern rtx gen_aarch64_ld4v2di (rtx, rtx);
- extern rtx gen_aarch64_ld2v8hf (rtx, rtx);
- extern rtx gen_aarch64_ld3v8hf (rtx, rtx);
- extern rtx gen_aarch64_ld4v8hf (rtx, rtx);
- extern rtx gen_aarch64_ld2v4sf (rtx, rtx);
- extern rtx gen_aarch64_ld3v4sf (rtx, rtx);
- extern rtx gen_aarch64_ld4v4sf (rtx, rtx);
- extern rtx gen_aarch64_ld2v2df (rtx, rtx);
- extern rtx gen_aarch64_ld3v2df (rtx, rtx);
- extern rtx gen_aarch64_ld4v2df (rtx, rtx);
- extern rtx gen_aarch64_ld2v8bf (rtx, rtx);
- extern rtx gen_aarch64_ld3v8bf (rtx, rtx);
- extern rtx gen_aarch64_ld4v8bf (rtx, rtx);
- extern rtx gen_aarch64_ld1x2v16qi (rtx, rtx);
- extern rtx gen_aarch64_ld1x2v8hi (rtx, rtx);
- extern rtx gen_aarch64_ld1x2v4si (rtx, rtx);
- extern rtx gen_aarch64_ld1x2v2di (rtx, rtx);
- extern rtx gen_aarch64_ld1x2v8hf (rtx, rtx);
- extern rtx gen_aarch64_ld1x2v4sf (rtx, rtx);
- extern rtx gen_aarch64_ld1x2v2df (rtx, rtx);
- extern rtx gen_aarch64_ld1x2v8bf (rtx, rtx);
- extern rtx gen_aarch64_ld1x2v8qi (rtx, rtx);
- extern rtx gen_aarch64_ld1x2v4hi (rtx, rtx);
- extern rtx gen_aarch64_ld1x2v4bf (rtx, rtx);
- extern rtx gen_aarch64_ld1x2v4hf (rtx, rtx);
- extern rtx gen_aarch64_ld1x2v2si (rtx, rtx);
- extern rtx gen_aarch64_ld1x2v2sf (rtx, rtx);
- extern rtx gen_aarch64_ld1x2di (rtx, rtx);
- extern rtx gen_aarch64_ld1x2df (rtx, rtx);
- extern rtx gen_aarch64_ld2_lanev8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld3_lanev8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld4_lanev8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld2_lanev16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld3_lanev16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld4_lanev16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld2_lanev4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld3_lanev4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld4_lanev4hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld2_lanev8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld3_lanev8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld4_lanev8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld2_lanev2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld3_lanev2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld4_lanev2si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld2_lanev4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld3_lanev4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld4_lanev4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld2_lanev4bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld3_lanev4bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld4_lanev4bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld2_lanev8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld3_lanev8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld4_lanev8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld2_lanev2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld3_lanev2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld4_lanev2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld2_lanev4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld3_lanev4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld4_lanev4hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld2_lanev8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld3_lanev8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld4_lanev8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld2_lanev2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld3_lanev2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld4_lanev2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld2_lanev4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld3_lanev4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld4_lanev4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld2_lanev2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld3_lanev2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld4_lanev2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld2_lanedi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld3_lanedi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld4_lanedi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld2_lanedf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld3_lanedf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_ld4_lanedf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregoiv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregoiv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregoiv4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregoiv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregoiv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregoiv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregoidi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregoidf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregciv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregciv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregciv4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregciv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregciv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregciv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregcidi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregcidf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregxiv8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregxiv4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregxiv4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregxiv4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregxiv2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregxiv2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregxidi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_dregxidf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregoiv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregciv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregxiv16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregoiv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregciv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregxiv8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregoiv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregciv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregxiv4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregoiv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregciv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregxiv2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregoiv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregciv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregxiv8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregoiv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregciv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregxiv4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregoiv2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregciv2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregxiv2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregoiv8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregciv8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_get_qregxiv8bf (rtx, rtx, rtx);
- extern rtx gen_vec_permv8qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_permv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_st2v8qi (rtx, rtx);
- extern rtx gen_aarch64_st2v4hi (rtx, rtx);
- extern rtx gen_aarch64_st2v4bf (rtx, rtx);
- extern rtx gen_aarch64_st2v4hf (rtx, rtx);
- extern rtx gen_aarch64_st2v2si (rtx, rtx);
- extern rtx gen_aarch64_st2v2sf (rtx, rtx);
- extern rtx gen_aarch64_st2di (rtx, rtx);
- extern rtx gen_aarch64_st2df (rtx, rtx);
- extern rtx gen_aarch64_st3v8qi (rtx, rtx);
- extern rtx gen_aarch64_st3v4hi (rtx, rtx);
- extern rtx gen_aarch64_st3v4bf (rtx, rtx);
- extern rtx gen_aarch64_st3v4hf (rtx, rtx);
- extern rtx gen_aarch64_st3v2si (rtx, rtx);
- extern rtx gen_aarch64_st3v2sf (rtx, rtx);
- extern rtx gen_aarch64_st3di (rtx, rtx);
- extern rtx gen_aarch64_st3df (rtx, rtx);
- extern rtx gen_aarch64_st4v8qi (rtx, rtx);
- extern rtx gen_aarch64_st4v4hi (rtx, rtx);
- extern rtx gen_aarch64_st4v4bf (rtx, rtx);
- extern rtx gen_aarch64_st4v4hf (rtx, rtx);
- extern rtx gen_aarch64_st4v2si (rtx, rtx);
- extern rtx gen_aarch64_st4v2sf (rtx, rtx);
- extern rtx gen_aarch64_st4di (rtx, rtx);
- extern rtx gen_aarch64_st4df (rtx, rtx);
- extern rtx gen_aarch64_st2v16qi (rtx, rtx);
- extern rtx gen_aarch64_st3v16qi (rtx, rtx);
- extern rtx gen_aarch64_st4v16qi (rtx, rtx);
- extern rtx gen_aarch64_st2v8hi (rtx, rtx);
- extern rtx gen_aarch64_st3v8hi (rtx, rtx);
- extern rtx gen_aarch64_st4v8hi (rtx, rtx);
- extern rtx gen_aarch64_st2v4si (rtx, rtx);
- extern rtx gen_aarch64_st3v4si (rtx, rtx);
- extern rtx gen_aarch64_st4v4si (rtx, rtx);
- extern rtx gen_aarch64_st2v2di (rtx, rtx);
- extern rtx gen_aarch64_st3v2di (rtx, rtx);
- extern rtx gen_aarch64_st4v2di (rtx, rtx);
- extern rtx gen_aarch64_st2v8hf (rtx, rtx);
- extern rtx gen_aarch64_st3v8hf (rtx, rtx);
- extern rtx gen_aarch64_st4v8hf (rtx, rtx);
- extern rtx gen_aarch64_st2v4sf (rtx, rtx);
- extern rtx gen_aarch64_st3v4sf (rtx, rtx);
- extern rtx gen_aarch64_st4v4sf (rtx, rtx);
- extern rtx gen_aarch64_st2v2df (rtx, rtx);
- extern rtx gen_aarch64_st3v2df (rtx, rtx);
- extern rtx gen_aarch64_st4v2df (rtx, rtx);
- extern rtx gen_aarch64_st2v8bf (rtx, rtx);
- extern rtx gen_aarch64_st3v8bf (rtx, rtx);
- extern rtx gen_aarch64_st4v8bf (rtx, rtx);
- extern rtx gen_aarch64_st2_lanev8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_st3_lanev8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_st4_lanev8qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_st2_lanev16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_st3_lanev16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_st4_lanev16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_st2_lanev4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_st3_lanev4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_st4_lanev4hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_st2_lanev8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_st3_lanev8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_st4_lanev8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_st2_lanev2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_st3_lanev2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_st4_lanev2si (rtx, rtx, rtx);
- extern rtx gen_aarch64_st2_lanev4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_st3_lanev4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_st4_lanev4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_st2_lanev4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st3_lanev4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st4_lanev4bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st2_lanev8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st3_lanev8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st4_lanev8bf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st2_lanev2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_st3_lanev2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_st4_lanev2di (rtx, rtx, rtx);
- extern rtx gen_aarch64_st2_lanev4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st3_lanev4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st4_lanev4hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st2_lanev8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st3_lanev8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st4_lanev8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st2_lanev2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st3_lanev2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st4_lanev2sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st2_lanev4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st3_lanev4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st4_lanev4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st2_lanev2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_st3_lanev2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_st4_lanev2df (rtx, rtx, rtx);
- extern rtx gen_aarch64_st2_lanedi (rtx, rtx, rtx);
- extern rtx gen_aarch64_st3_lanedi (rtx, rtx, rtx);
- extern rtx gen_aarch64_st4_lanedi (rtx, rtx, rtx);
- extern rtx gen_aarch64_st2_lanedf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st3_lanedf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st4_lanedf (rtx, rtx, rtx);
- extern rtx gen_aarch64_st1v8qi (rtx, rtx);
- extern rtx gen_aarch64_st1v16qi (rtx, rtx);
- extern rtx gen_aarch64_st1v4hi (rtx, rtx);
- extern rtx gen_aarch64_st1v8hi (rtx, rtx);
- extern rtx gen_aarch64_st1v2si (rtx, rtx);
- extern rtx gen_aarch64_st1v4si (rtx, rtx);
- extern rtx gen_aarch64_st1v2di (rtx, rtx);
- extern rtx gen_aarch64_st1v4hf (rtx, rtx);
- extern rtx gen_aarch64_st1v8hf (rtx, rtx);
- extern rtx gen_aarch64_st1v4bf (rtx, rtx);
- extern rtx gen_aarch64_st1v8bf (rtx, rtx);
- extern rtx gen_aarch64_st1v2sf (rtx, rtx);
- extern rtx gen_aarch64_st1v4sf (rtx, rtx);
- extern rtx gen_aarch64_st1v2df (rtx, rtx);
- extern rtx gen_aarch64_set_qregoiv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregciv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregxiv16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregoiv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregciv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregxiv8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregoiv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregciv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregxiv4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregoiv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregciv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregxiv2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregoiv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregciv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregxiv8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregoiv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregciv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregxiv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregoiv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregciv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregxiv2df (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregoiv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregciv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_set_qregxiv8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_initv8qiqi (rtx, rtx);
- extern rtx gen_vec_initv16qiqi (rtx, rtx);
- extern rtx gen_vec_initv4hihi (rtx, rtx);
- extern rtx gen_vec_initv8hihi (rtx, rtx);
- extern rtx gen_vec_initv2sisi (rtx, rtx);
- extern rtx gen_vec_initv4sisi (rtx, rtx);
- extern rtx gen_vec_initv2didi (rtx, rtx);
- extern rtx gen_vec_initv4hfhf (rtx, rtx);
- extern rtx gen_vec_initv8hfhf (rtx, rtx);
- extern rtx gen_vec_initv4bfbf (rtx, rtx);
- extern rtx gen_vec_initv8bfbf (rtx, rtx);
- extern rtx gen_vec_initv2sfsf (rtx, rtx);
- extern rtx gen_vec_initv4sfsf (rtx, rtx);
- extern rtx gen_vec_initv2dfdf (rtx, rtx);
- extern rtx gen_vec_initv16qiv8qi (rtx, rtx);
- extern rtx gen_vec_initv8hiv4hi (rtx, rtx);
- extern rtx gen_vec_initv4siv2si (rtx, rtx);
- extern rtx gen_vec_initv8hfv4hf (rtx, rtx);
- extern rtx gen_vec_initv4sfv2sf (rtx, rtx);
- extern rtx gen_vec_initv8bfv4bf (rtx, rtx);
- extern rtx gen_vec_extractv8qiqi (rtx, rtx, rtx);
- extern rtx gen_vec_extractv16qiqi (rtx, rtx, rtx);
- extern rtx gen_vec_extractv4hihi (rtx, rtx, rtx);
- extern rtx gen_vec_extractv8hihi (rtx, rtx, rtx);
- extern rtx gen_vec_extractv2sisi (rtx, rtx, rtx);
- extern rtx gen_vec_extractv4sisi (rtx, rtx, rtx);
- extern rtx gen_vec_extractv2didi (rtx, rtx, rtx);
- extern rtx gen_vec_extractv4hfhf (rtx, rtx, rtx);
- extern rtx gen_vec_extractv8hfhf (rtx, rtx, rtx);
- extern rtx gen_vec_extractv4bfbf (rtx, rtx, rtx);
- extern rtx gen_vec_extractv8bfbf (rtx, rtx, rtx);
- extern rtx gen_vec_extractv2sfsf (rtx, rtx, rtx);
- extern rtx gen_vec_extractv4sfsf (rtx, rtx, rtx);
- extern rtx gen_vec_extractv2dfdf (rtx, rtx, rtx);
- extern rtx gen_vec_extractv16qiv8qi (rtx, rtx, rtx);
- extern rtx gen_vec_extractv8hiv4hi (rtx, rtx, rtx);
- extern rtx gen_vec_extractv4siv2si (rtx, rtx, rtx);
- extern rtx gen_vec_extractv8hfv4hf (rtx, rtx, rtx);
- extern rtx gen_vec_extractv8bfv4bf (rtx, rtx, rtx);
- extern rtx gen_vec_extractv4sfv2sf (rtx, rtx, rtx);
- extern rtx gen_vec_extractv2dfv1df (rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlal_lowv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlsl_lowv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlalq_lowv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlslq_lowv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlal_highv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlsl_highv2sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlalq_highv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlslq_highv4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlal_lane_lowv2sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlsl_lane_lowv2sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlal_lane_highv2sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlsl_lane_highv2sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlalq_laneq_lowv4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlslq_laneq_lowv4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlalq_laneq_highv4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlslq_laneq_highv4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlal_laneq_lowv2sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlsl_laneq_lowv2sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlal_laneq_highv2sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlsl_laneq_highv2sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlalq_lane_lowv4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlslq_lane_lowv4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlalq_lane_highv4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_fmlslq_lane_highv4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_vget_lo_halfv8bf (rtx, rtx);
- extern rtx gen_aarch64_vget_hi_halfv8bf (rtx, rtx);
- extern rtx gen_atomic_compare_and_swapqi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_compare_and_swaphi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_compare_and_swapsi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_compare_and_swapdi (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_compare_and_swapti (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_exchangeqi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_exchangehi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_exchangesi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_exchangedi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_addqi (rtx, rtx, rtx);
- extern rtx gen_atomic_subqi (rtx, rtx, rtx);
- extern rtx gen_atomic_orqi (rtx, rtx, rtx);
- extern rtx gen_atomic_xorqi (rtx, rtx, rtx);
- extern rtx gen_atomic_andqi (rtx, rtx, rtx);
- extern rtx gen_atomic_addhi (rtx, rtx, rtx);
- extern rtx gen_atomic_subhi (rtx, rtx, rtx);
- extern rtx gen_atomic_orhi (rtx, rtx, rtx);
- extern rtx gen_atomic_xorhi (rtx, rtx, rtx);
- extern rtx gen_atomic_andhi (rtx, rtx, rtx);
- extern rtx gen_atomic_addsi (rtx, rtx, rtx);
- extern rtx gen_atomic_subsi (rtx, rtx, rtx);
- extern rtx gen_atomic_orsi (rtx, rtx, rtx);
- extern rtx gen_atomic_xorsi (rtx, rtx, rtx);
- extern rtx gen_atomic_andsi (rtx, rtx, rtx);
- extern rtx gen_atomic_adddi (rtx, rtx, rtx);
- extern rtx gen_atomic_subdi (rtx, rtx, rtx);
- extern rtx gen_atomic_ordi (rtx, rtx, rtx);
- extern rtx gen_atomic_xordi (rtx, rtx, rtx);
- extern rtx gen_atomic_anddi (rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_addqi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_subqi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_orqi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_xorqi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_andqi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_addhi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_subhi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_orhi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_xorhi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_andhi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_addsi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_subsi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_orsi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_xorsi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_andsi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_adddi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_subdi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_ordi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_xordi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_fetch_anddi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_add_fetchqi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_sub_fetchqi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_or_fetchqi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_xor_fetchqi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_and_fetchqi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_add_fetchhi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_sub_fetchhi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_or_fetchhi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_xor_fetchhi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_and_fetchhi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_add_fetchsi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_sub_fetchsi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_or_fetchsi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_xor_fetchsi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_and_fetchsi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_add_fetchdi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_sub_fetchdi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_or_fetchdi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_xor_fetchdi (rtx, rtx, rtx, rtx);
- extern rtx gen_atomic_and_fetchdi (rtx, rtx, rtx, rtx);
- extern rtx gen_mem_thread_fence (rtx);
- extern rtx gen_dmb (rtx);
- extern rtx gen_movvnx16qi (rtx, rtx);
- extern rtx gen_movvnx8qi (rtx, rtx);
- extern rtx gen_movvnx4qi (rtx, rtx);
- extern rtx gen_movvnx2qi (rtx, rtx);
- extern rtx gen_movvnx8hi (rtx, rtx);
- extern rtx gen_movvnx4hi (rtx, rtx);
- extern rtx gen_movvnx2hi (rtx, rtx);
- extern rtx gen_movvnx8hf (rtx, rtx);
- extern rtx gen_movvnx4hf (rtx, rtx);
- extern rtx gen_movvnx2hf (rtx, rtx);
- extern rtx gen_movvnx8bf (rtx, rtx);
- extern rtx gen_movvnx4si (rtx, rtx);
- extern rtx gen_movvnx2si (rtx, rtx);
- extern rtx gen_movvnx4sf (rtx, rtx);
- extern rtx gen_movvnx2sf (rtx, rtx);
- extern rtx gen_movvnx2di (rtx, rtx);
- extern rtx gen_movvnx2df (rtx, rtx);
- extern rtx gen_movmisalignvnx16qi (rtx, rtx);
- extern rtx gen_movmisalignvnx8qi (rtx, rtx);
- extern rtx gen_movmisalignvnx4qi (rtx, rtx);
- extern rtx gen_movmisalignvnx2qi (rtx, rtx);
- extern rtx gen_movmisalignvnx8hi (rtx, rtx);
- extern rtx gen_movmisalignvnx4hi (rtx, rtx);
- extern rtx gen_movmisalignvnx2hi (rtx, rtx);
- extern rtx gen_movmisalignvnx8hf (rtx, rtx);
- extern rtx gen_movmisalignvnx4hf (rtx, rtx);
- extern rtx gen_movmisalignvnx2hf (rtx, rtx);
- extern rtx gen_movmisalignvnx8bf (rtx, rtx);
- extern rtx gen_movmisalignvnx4si (rtx, rtx);
- extern rtx gen_movmisalignvnx2si (rtx, rtx);
- extern rtx gen_movmisalignvnx4sf (rtx, rtx);
- extern rtx gen_movmisalignvnx2sf (rtx, rtx);
- extern rtx gen_movmisalignvnx2di (rtx, rtx);
- extern rtx gen_movmisalignvnx2df (rtx, rtx);
- extern rtx gen_aarch64_sve_reload_mem (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx16qi (rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx8qi (rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx4qi (rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx2qi (rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx8hi (rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx4hi (rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx2hi (rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx8hf (rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx4hf (rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx2hf (rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx8bf (rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx4si (rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx2si (rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx4sf (rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx2sf (rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx2di (rtx, rtx);
- extern rtx gen_aarch64_sve_reinterpretvnx2df (rtx, rtx);
- extern rtx gen_movvnx32qi (rtx, rtx);
- extern rtx gen_movvnx16hi (rtx, rtx);
- extern rtx gen_movvnx8si (rtx, rtx);
- extern rtx gen_movvnx4di (rtx, rtx);
- extern rtx gen_movvnx16bf (rtx, rtx);
- extern rtx gen_movvnx16hf (rtx, rtx);
- extern rtx gen_movvnx8sf (rtx, rtx);
- extern rtx gen_movvnx4df (rtx, rtx);
- extern rtx gen_movvnx48qi (rtx, rtx);
- extern rtx gen_movvnx24hi (rtx, rtx);
- extern rtx gen_movvnx12si (rtx, rtx);
- extern rtx gen_movvnx6di (rtx, rtx);
- extern rtx gen_movvnx24bf (rtx, rtx);
- extern rtx gen_movvnx24hf (rtx, rtx);
- extern rtx gen_movvnx12sf (rtx, rtx);
- extern rtx gen_movvnx6df (rtx, rtx);
- extern rtx gen_movvnx64qi (rtx, rtx);
- extern rtx gen_movvnx32hi (rtx, rtx);
- extern rtx gen_movvnx16si (rtx, rtx);
- extern rtx gen_movvnx8di (rtx, rtx);
- extern rtx gen_movvnx32bf (rtx, rtx);
- extern rtx gen_movvnx32hf (rtx, rtx);
- extern rtx gen_movvnx16sf (rtx, rtx);
- extern rtx gen_movvnx8df (rtx, rtx);
- extern rtx gen_movvnx16bi (rtx, rtx);
- extern rtx gen_movvnx8bi (rtx, rtx);
- extern rtx gen_movvnx4bi (rtx, rtx);
- extern rtx gen_movvnx2bi (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx32qivnx16qi (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx16hivnx8hi (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx8sivnx4si (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx4divnx2di (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx16bfvnx8bf (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx16hfvnx8hf (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx8sfvnx4sf (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx4dfvnx2df (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx48qivnx16qi (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx24hivnx8hi (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx12sivnx4si (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx6divnx2di (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx24bfvnx8bf (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx24hfvnx8hf (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx12sfvnx4sf (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx6dfvnx2df (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx64qivnx16qi (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx32hivnx8hi (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx16sivnx4si (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx8divnx2di (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx32bfvnx8bf (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx32hfvnx8hf (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx16sfvnx4sf (rtx, rtx);
- extern rtx gen_vec_load_lanesvnx8dfvnx2df (rtx, rtx);
- extern rtx gen_gather_loadvnx2qivnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_gather_loadvnx2hivnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_gather_loadvnx2hfvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_gather_loadvnx2sivnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_gather_loadvnx2sfvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_gather_loadvnx2divnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_gather_loadvnx2dfvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_gather_loadvnx4qivnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_gather_loadvnx4hivnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_gather_loadvnx4hfvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_gather_loadvnx4sivnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_gather_loadvnx4sfvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vec_store_lanesvnx32qivnx16qi (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx16hivnx8hi (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx8sivnx4si (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx4divnx2di (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx16bfvnx8bf (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx16hfvnx8hf (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx8sfvnx4sf (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx4dfvnx2df (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx48qivnx16qi (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx24hivnx8hi (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx12sivnx4si (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx6divnx2di (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx24bfvnx8bf (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx24hfvnx8hf (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx12sfvnx4sf (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx6dfvnx2df (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx64qivnx16qi (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx32hivnx8hi (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx16sivnx4si (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx8divnx2di (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx32bfvnx8bf (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx32hfvnx8hf (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx16sfvnx4sf (rtx, rtx);
- extern rtx gen_vec_store_lanesvnx8dfvnx2df (rtx, rtx);
- extern rtx gen_scatter_storevnx2qivnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_scatter_storevnx2hivnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_scatter_storevnx2hfvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_scatter_storevnx2sivnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_scatter_storevnx2sfvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_scatter_storevnx2divnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_scatter_storevnx2dfvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_scatter_storevnx4qivnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_scatter_storevnx4hivnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_scatter_storevnx4hfvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_scatter_storevnx4sivnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_scatter_storevnx4sfvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vec_duplicatevnx16qi (rtx, rtx);
- extern rtx gen_vec_duplicatevnx8qi (rtx, rtx);
- extern rtx gen_vec_duplicatevnx4qi (rtx, rtx);
- extern rtx gen_vec_duplicatevnx2qi (rtx, rtx);
- extern rtx gen_vec_duplicatevnx8hi (rtx, rtx);
- extern rtx gen_vec_duplicatevnx4hi (rtx, rtx);
- extern rtx gen_vec_duplicatevnx2hi (rtx, rtx);
- extern rtx gen_vec_duplicatevnx8hf (rtx, rtx);
- extern rtx gen_vec_duplicatevnx4hf (rtx, rtx);
- extern rtx gen_vec_duplicatevnx2hf (rtx, rtx);
- extern rtx gen_vec_duplicatevnx8bf (rtx, rtx);
- extern rtx gen_vec_duplicatevnx4si (rtx, rtx);
- extern rtx gen_vec_duplicatevnx2si (rtx, rtx);
- extern rtx gen_vec_duplicatevnx4sf (rtx, rtx);
- extern rtx gen_vec_duplicatevnx2sf (rtx, rtx);
- extern rtx gen_vec_duplicatevnx2di (rtx, rtx);
- extern rtx gen_vec_duplicatevnx2df (rtx, rtx);
- extern rtx gen_vec_initvnx16qiqi (rtx, rtx);
- extern rtx gen_vec_initvnx8hihi (rtx, rtx);
- extern rtx gen_vec_initvnx4sisi (rtx, rtx);
- extern rtx gen_vec_initvnx2didi (rtx, rtx);
- extern rtx gen_vec_initvnx8bfbf (rtx, rtx);
- extern rtx gen_vec_initvnx8hfhf (rtx, rtx);
- extern rtx gen_vec_initvnx4sfsf (rtx, rtx);
- extern rtx gen_vec_initvnx2dfdf (rtx, rtx);
- extern rtx gen_vec_duplicatevnx16bi (rtx, rtx);
- extern rtx gen_vec_duplicatevnx8bi (rtx, rtx);
- extern rtx gen_vec_duplicatevnx4bi (rtx, rtx);
- extern rtx gen_vec_duplicatevnx2bi (rtx, rtx);
- extern rtx gen_vec_extractvnx16qiqi (rtx, rtx, rtx);
- extern rtx gen_vec_extractvnx8hihi (rtx, rtx, rtx);
- extern rtx gen_vec_extractvnx4sisi (rtx, rtx, rtx);
- extern rtx gen_vec_extractvnx2didi (rtx, rtx, rtx);
- extern rtx gen_vec_extractvnx8bfbf (rtx, rtx, rtx);
- extern rtx gen_vec_extractvnx8hfhf (rtx, rtx, rtx);
- extern rtx gen_vec_extractvnx4sfsf (rtx, rtx, rtx);
- extern rtx gen_vec_extractvnx2dfdf (rtx, rtx, rtx);
- extern rtx gen_vec_extractvnx16biqi (rtx, rtx, rtx);
- extern rtx gen_vec_extractvnx8bihi (rtx, rtx, rtx);
- extern rtx gen_vec_extractvnx4bisi (rtx, rtx, rtx);
- extern rtx gen_vec_extractvnx2bidi (rtx, rtx, rtx);
- extern rtx gen_absvnx16qi2 (rtx, rtx);
- extern rtx gen_negvnx16qi2 (rtx, rtx);
- extern rtx gen_one_cmplvnx16qi2 (rtx, rtx);
- extern rtx gen_clrsbvnx16qi2 (rtx, rtx);
- extern rtx gen_clzvnx16qi2 (rtx, rtx);
- extern rtx gen_popcountvnx16qi2 (rtx, rtx);
- extern rtx gen_qabsvnx16qi2 (rtx, rtx);
- extern rtx gen_qnegvnx16qi2 (rtx, rtx);
- extern rtx gen_absvnx8hi2 (rtx, rtx);
- extern rtx gen_negvnx8hi2 (rtx, rtx);
- extern rtx gen_one_cmplvnx8hi2 (rtx, rtx);
- extern rtx gen_clrsbvnx8hi2 (rtx, rtx);
- extern rtx gen_clzvnx8hi2 (rtx, rtx);
- extern rtx gen_popcountvnx8hi2 (rtx, rtx);
- extern rtx gen_qabsvnx8hi2 (rtx, rtx);
- extern rtx gen_qnegvnx8hi2 (rtx, rtx);
- extern rtx gen_absvnx4si2 (rtx, rtx);
- extern rtx gen_negvnx4si2 (rtx, rtx);
- extern rtx gen_one_cmplvnx4si2 (rtx, rtx);
- extern rtx gen_clrsbvnx4si2 (rtx, rtx);
- extern rtx gen_clzvnx4si2 (rtx, rtx);
- extern rtx gen_popcountvnx4si2 (rtx, rtx);
- extern rtx gen_qabsvnx4si2 (rtx, rtx);
- extern rtx gen_qnegvnx4si2 (rtx, rtx);
- extern rtx gen_absvnx2di2 (rtx, rtx);
- extern rtx gen_negvnx2di2 (rtx, rtx);
- extern rtx gen_one_cmplvnx2di2 (rtx, rtx);
- extern rtx gen_clrsbvnx2di2 (rtx, rtx);
- extern rtx gen_clzvnx2di2 (rtx, rtx);
- extern rtx gen_popcountvnx2di2 (rtx, rtx);
- extern rtx gen_qabsvnx2di2 (rtx, rtx);
- extern rtx gen_qnegvnx2di2 (rtx, rtx);
- extern rtx gen_cond_absvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_negvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_one_cmplvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_clrsbvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_clzvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_popcountvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_qabsvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_qnegvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_absvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_negvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_one_cmplvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_clrsbvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_clzvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_popcountvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_qabsvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_qnegvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_absvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_negvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_one_cmplvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_clrsbvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_clzvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_popcountvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_qabsvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_qnegvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_absvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_negvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_one_cmplvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_clrsbvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_clzvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_popcountvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_qabsvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_qnegvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_extendvnx8qivnx8hi2 (rtx, rtx);
- extern rtx gen_zero_extendvnx8qivnx8hi2 (rtx, rtx);
- static inline rtx gen_extendvnx4qivnx8hi2 (rtx, rtx);
- static inline rtx
- gen_extendvnx4qivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx4qivnx8hi2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx4qivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx2qivnx8hi2 (rtx, rtx);
- static inline rtx
- gen_extendvnx2qivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx2qivnx8hi2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx2qivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx4hivnx8hi2 (rtx, rtx);
- static inline rtx
- gen_extendvnx4hivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx4hivnx8hi2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx4hivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx2hivnx8hi2 (rtx, rtx);
- static inline rtx
- gen_extendvnx2hivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx2hivnx8hi2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx2hivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx2sivnx8hi2 (rtx, rtx);
- static inline rtx
- gen_extendvnx2sivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx2sivnx8hi2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx2sivnx8hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx8qivnx4hi2 (rtx, rtx);
- static inline rtx
- gen_extendvnx8qivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx8qivnx4hi2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx8qivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- extern rtx gen_extendvnx4qivnx4hi2 (rtx, rtx);
- extern rtx gen_zero_extendvnx4qivnx4hi2 (rtx, rtx);
- static inline rtx gen_extendvnx2qivnx4hi2 (rtx, rtx);
- static inline rtx
- gen_extendvnx2qivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx2qivnx4hi2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx2qivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx4hivnx4hi2 (rtx, rtx);
- static inline rtx
- gen_extendvnx4hivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx4hivnx4hi2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx4hivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx2hivnx4hi2 (rtx, rtx);
- static inline rtx
- gen_extendvnx2hivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx2hivnx4hi2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx2hivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx2sivnx4hi2 (rtx, rtx);
- static inline rtx
- gen_extendvnx2sivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx2sivnx4hi2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx2sivnx4hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx8qivnx2hi2 (rtx, rtx);
- static inline rtx
- gen_extendvnx8qivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx8qivnx2hi2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx8qivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx4qivnx2hi2 (rtx, rtx);
- static inline rtx
- gen_extendvnx4qivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx4qivnx2hi2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx4qivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- extern rtx gen_extendvnx2qivnx2hi2 (rtx, rtx);
- extern rtx gen_zero_extendvnx2qivnx2hi2 (rtx, rtx);
- static inline rtx gen_extendvnx4hivnx2hi2 (rtx, rtx);
- static inline rtx
- gen_extendvnx4hivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx4hivnx2hi2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx4hivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx2hivnx2hi2 (rtx, rtx);
- static inline rtx
- gen_extendvnx2hivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx2hivnx2hi2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx2hivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx2sivnx2hi2 (rtx, rtx);
- static inline rtx
- gen_extendvnx2sivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx2sivnx2hi2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx2sivnx2hi2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx8qivnx4si2 (rtx, rtx);
- static inline rtx
- gen_extendvnx8qivnx4si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx8qivnx4si2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx8qivnx4si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- extern rtx gen_extendvnx4qivnx4si2 (rtx, rtx);
- extern rtx gen_zero_extendvnx4qivnx4si2 (rtx, rtx);
- static inline rtx gen_extendvnx2qivnx4si2 (rtx, rtx);
- static inline rtx
- gen_extendvnx2qivnx4si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx2qivnx4si2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx2qivnx4si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- extern rtx gen_extendvnx4hivnx4si2 (rtx, rtx);
- extern rtx gen_zero_extendvnx4hivnx4si2 (rtx, rtx);
- static inline rtx gen_extendvnx2hivnx4si2 (rtx, rtx);
- static inline rtx
- gen_extendvnx2hivnx4si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx2hivnx4si2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx2hivnx4si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx2sivnx4si2 (rtx, rtx);
- static inline rtx
- gen_extendvnx2sivnx4si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx2sivnx4si2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx2sivnx4si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx8qivnx2si2 (rtx, rtx);
- static inline rtx
- gen_extendvnx8qivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx8qivnx2si2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx8qivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx4qivnx2si2 (rtx, rtx);
- static inline rtx
- gen_extendvnx4qivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx4qivnx2si2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx4qivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- extern rtx gen_extendvnx2qivnx2si2 (rtx, rtx);
- extern rtx gen_zero_extendvnx2qivnx2si2 (rtx, rtx);
- static inline rtx gen_extendvnx4hivnx2si2 (rtx, rtx);
- static inline rtx
- gen_extendvnx4hivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx4hivnx2si2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx4hivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- extern rtx gen_extendvnx2hivnx2si2 (rtx, rtx);
- extern rtx gen_zero_extendvnx2hivnx2si2 (rtx, rtx);
- static inline rtx gen_extendvnx2sivnx2si2 (rtx, rtx);
- static inline rtx
- gen_extendvnx2sivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx2sivnx2si2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx2sivnx2si2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx8qivnx2di2 (rtx, rtx);
- static inline rtx
- gen_extendvnx8qivnx2di2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx8qivnx2di2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx8qivnx2di2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_extendvnx4qivnx2di2 (rtx, rtx);
- static inline rtx
- gen_extendvnx4qivnx2di2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx4qivnx2di2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx4qivnx2di2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- extern rtx gen_extendvnx2qivnx2di2 (rtx, rtx);
- extern rtx gen_zero_extendvnx2qivnx2di2 (rtx, rtx);
- static inline rtx gen_extendvnx4hivnx2di2 (rtx, rtx);
- static inline rtx
- gen_extendvnx4hivnx2di2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- static inline rtx gen_zero_extendvnx4hivnx2di2 (rtx, rtx);
- static inline rtx
- gen_zero_extendvnx4hivnx2di2(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b))
- {
- return 0;
- }
- extern rtx gen_extendvnx2hivnx2di2 (rtx, rtx);
- extern rtx gen_zero_extendvnx2hivnx2di2 (rtx, rtx);
- extern rtx gen_extendvnx2sivnx2di2 (rtx, rtx);
- extern rtx gen_zero_extendvnx2sivnx2di2 (rtx, rtx);
- extern rtx gen_aarch64_pred_cnotvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cnotvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cnotvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_cnotvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_cnotvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_cnotvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_cnotvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_cnotvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_absvnx8hf2 (rtx, rtx);
- extern rtx gen_negvnx8hf2 (rtx, rtx);
- extern rtx gen_frecpxvnx8hf2 (rtx, rtx);
- extern rtx gen_roundvnx8hf2 (rtx, rtx);
- extern rtx gen_nearbyintvnx8hf2 (rtx, rtx);
- extern rtx gen_floorvnx8hf2 (rtx, rtx);
- extern rtx gen_frintnvnx8hf2 (rtx, rtx);
- extern rtx gen_ceilvnx8hf2 (rtx, rtx);
- extern rtx gen_rintvnx8hf2 (rtx, rtx);
- extern rtx gen_btruncvnx8hf2 (rtx, rtx);
- extern rtx gen_absvnx4sf2 (rtx, rtx);
- extern rtx gen_negvnx4sf2 (rtx, rtx);
- extern rtx gen_frecpxvnx4sf2 (rtx, rtx);
- extern rtx gen_roundvnx4sf2 (rtx, rtx);
- extern rtx gen_nearbyintvnx4sf2 (rtx, rtx);
- extern rtx gen_floorvnx4sf2 (rtx, rtx);
- extern rtx gen_frintnvnx4sf2 (rtx, rtx);
- extern rtx gen_ceilvnx4sf2 (rtx, rtx);
- extern rtx gen_rintvnx4sf2 (rtx, rtx);
- extern rtx gen_btruncvnx4sf2 (rtx, rtx);
- extern rtx gen_absvnx2df2 (rtx, rtx);
- extern rtx gen_negvnx2df2 (rtx, rtx);
- extern rtx gen_frecpxvnx2df2 (rtx, rtx);
- extern rtx gen_roundvnx2df2 (rtx, rtx);
- extern rtx gen_nearbyintvnx2df2 (rtx, rtx);
- extern rtx gen_floorvnx2df2 (rtx, rtx);
- extern rtx gen_frintnvnx2df2 (rtx, rtx);
- extern rtx gen_ceilvnx2df2 (rtx, rtx);
- extern rtx gen_rintvnx2df2 (rtx, rtx);
- extern rtx gen_btruncvnx2df2 (rtx, rtx);
- extern rtx gen_cond_absvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_negvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_frecpxvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_roundvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_nearbyintvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_floorvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_frintnvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ceilvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_rintvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_btruncvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sqrtvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_absvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_negvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_frecpxvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_roundvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_nearbyintvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_floorvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_frintnvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ceilvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_rintvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_btruncvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sqrtvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_absvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_negvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_frecpxvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_roundvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_nearbyintvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_floorvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_frintnvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ceilvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_rintvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_btruncvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sqrtvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_sqrtvnx8hf2 (rtx, rtx);
- extern rtx gen_sqrtvnx4sf2 (rtx, rtx);
- extern rtx gen_sqrtvnx2df2 (rtx, rtx);
- extern rtx gen_rsqrtvnx4sf2 (rtx, rtx);
- extern rtx gen_rsqrtvnx2df2 (rtx, rtx);
- extern rtx gen_aarch64_rsqrtevnx4sf (rtx, rtx);
- extern rtx gen_aarch64_rsqrtevnx2df (rtx, rtx);
- extern rtx gen_aarch64_rsqrtsvnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_rsqrtsvnx2df (rtx, rtx, rtx);
- extern rtx gen_one_cmplvnx16bi2 (rtx, rtx);
- extern rtx gen_one_cmplvnx8bi2 (rtx, rtx);
- extern rtx gen_one_cmplvnx4bi2 (rtx, rtx);
- extern rtx gen_one_cmplvnx2bi2 (rtx, rtx);
- extern rtx gen_mulvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_smaxvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_sminvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_umaxvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_uminvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_mulvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_smaxvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_sminvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_umaxvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_uminvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_mulvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_smaxvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_sminvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_umaxvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_uminvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_mulvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_smaxvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_sminvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_umaxvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_uminvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_cond_addvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_subvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_mulvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_smaxvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_umaxvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sminvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uminvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ashlvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ashrvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_lshrvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_andvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_iorvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_xorvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ssaddvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_usaddvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sssubvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ussubvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_addvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_subvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_mulvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_smaxvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_umaxvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sminvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uminvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ashlvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ashrvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_lshrvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_andvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_iorvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_xorvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ssaddvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_usaddvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sssubvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ussubvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_addvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_subvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_mulvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_smaxvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_umaxvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sminvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uminvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ashlvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ashrvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_lshrvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_andvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_iorvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_xorvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ssaddvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_usaddvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sssubvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ussubvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_addvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_subvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_mulvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_smaxvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_umaxvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sminvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uminvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ashlvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ashrvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_lshrvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_andvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_iorvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_xorvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ssaddvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_usaddvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sssubvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ussubvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_adrvnx4si_shift (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_adrvnx2di_shift (rtx, rtx, rtx, rtx);
- extern rtx gen_sabdvnx16qi_3 (rtx, rtx, rtx);
- extern rtx gen_uabdvnx16qi_3 (rtx, rtx, rtx);
- extern rtx gen_sabdvnx8hi_3 (rtx, rtx, rtx);
- extern rtx gen_uabdvnx8hi_3 (rtx, rtx, rtx);
- extern rtx gen_sabdvnx4si_3 (rtx, rtx, rtx);
- extern rtx gen_uabdvnx4si_3 (rtx, rtx, rtx);
- extern rtx gen_sabdvnx2di_3 (rtx, rtx, rtx);
- extern rtx gen_uabdvnx2di_3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_cond_sabdvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cond_uabdvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cond_sabdvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cond_uabdvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cond_sabdvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cond_uabdvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cond_sabdvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cond_uabdvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_smulvnx16qi3_highpart (rtx, rtx, rtx);
- extern rtx gen_umulvnx16qi3_highpart (rtx, rtx, rtx);
- extern rtx gen_smulvnx8hi3_highpart (rtx, rtx, rtx);
- extern rtx gen_umulvnx8hi3_highpart (rtx, rtx, rtx);
- extern rtx gen_smulvnx4si3_highpart (rtx, rtx, rtx);
- extern rtx gen_umulvnx4si3_highpart (rtx, rtx, rtx);
- extern rtx gen_smulvnx2di3_highpart (rtx, rtx, rtx);
- extern rtx gen_umulvnx2di3_highpart (rtx, rtx, rtx);
- extern rtx gen_cond_smulhvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_umulhvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_smulhvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_umulhvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_smulhvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_umulhvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_smulhvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_umulhvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_divvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_udivvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_divvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_udivvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_cond_divvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_udivvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_divvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_udivvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_bicvnx16qi (rtx, rtx, rtx);
- extern rtx gen_aarch64_bicvnx8hi (rtx, rtx, rtx);
- extern rtx gen_aarch64_bicvnx4si (rtx, rtx, rtx);
- extern rtx gen_aarch64_bicvnx2di (rtx, rtx, rtx);
- extern rtx gen_cond_bicvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_bicvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_bicvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_bicvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_ashlvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_ashrvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_lshrvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_ashlvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_ashrvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_lshrvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_ashlvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_ashrvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_lshrvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_ashlvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_ashrvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_lshrvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_vashlvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_vashrvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_vlshrvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_vashlvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_vashrvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_vlshrvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_vashlvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_vashrvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_vlshrvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_vashlvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_vashrvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_vlshrvnx2di3 (rtx, rtx, rtx);
- extern rtx gen_cond_lslvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_asrvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_lsrvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_lslvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_asrvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_lsrvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_lslvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_asrvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_lsrvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_sdiv_pow2vnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_sdiv_pow2vnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_sdiv_pow2vnx4si3 (rtx, rtx, rtx);
- extern rtx gen_sdiv_pow2vnx2di3 (rtx, rtx, rtx);
- extern rtx gen_cond_asrdvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sqshluvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_srshrvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_urshrvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_asrdvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sqshluvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_srshrvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_urshrvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_asrdvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sqshluvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_srshrvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_urshrvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_asrdvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sqshluvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_srshrvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_urshrvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fscalevnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fscalevnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fscalevnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_addvnx8hf3 (rtx, rtx, rtx);
- extern rtx gen_smax_nanvnx8hf3 (rtx, rtx, rtx);
- extern rtx gen_smaxvnx8hf3 (rtx, rtx, rtx);
- extern rtx gen_smin_nanvnx8hf3 (rtx, rtx, rtx);
- extern rtx gen_sminvnx8hf3 (rtx, rtx, rtx);
- extern rtx gen_mulvnx8hf3 (rtx, rtx, rtx);
- extern rtx gen_mulxvnx8hf3 (rtx, rtx, rtx);
- extern rtx gen_subvnx8hf3 (rtx, rtx, rtx);
- extern rtx gen_addvnx4sf3 (rtx, rtx, rtx);
- extern rtx gen_smax_nanvnx4sf3 (rtx, rtx, rtx);
- extern rtx gen_smaxvnx4sf3 (rtx, rtx, rtx);
- extern rtx gen_smin_nanvnx4sf3 (rtx, rtx, rtx);
- extern rtx gen_sminvnx4sf3 (rtx, rtx, rtx);
- extern rtx gen_mulvnx4sf3 (rtx, rtx, rtx);
- extern rtx gen_mulxvnx4sf3 (rtx, rtx, rtx);
- extern rtx gen_subvnx4sf3 (rtx, rtx, rtx);
- extern rtx gen_addvnx2df3 (rtx, rtx, rtx);
- extern rtx gen_smax_nanvnx2df3 (rtx, rtx, rtx);
- extern rtx gen_smaxvnx2df3 (rtx, rtx, rtx);
- extern rtx gen_smin_nanvnx2df3 (rtx, rtx, rtx);
- extern rtx gen_sminvnx2df3 (rtx, rtx, rtx);
- extern rtx gen_mulvnx2df3 (rtx, rtx, rtx);
- extern rtx gen_mulxvnx2df3 (rtx, rtx, rtx);
- extern rtx gen_subvnx2df3 (rtx, rtx, rtx);
- extern rtx gen_cond_addvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_divvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_smax_nanvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_smaxvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_smin_nanvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sminvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_mulvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_mulxvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_subvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_addvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_divvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_smax_nanvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_smaxvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_smin_nanvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sminvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_mulvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_mulxvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_subvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_addvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_divvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_smax_nanvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_smaxvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_smin_nanvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sminvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_mulvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_mulxvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_subvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_cadd90vnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_cadd270vnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_cadd90vnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_cadd270vnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_cadd90vnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_cadd270vnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_abdvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_abdvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_abdvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cond_abdvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cond_abdvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_cond_abdvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_divvnx8hf3 (rtx, rtx, rtx);
- extern rtx gen_divvnx4sf3 (rtx, rtx, rtx);
- extern rtx gen_divvnx2df3 (rtx, rtx, rtx);
- extern rtx gen_aarch64_frecpevnx8hf (rtx, rtx);
- extern rtx gen_aarch64_frecpevnx4sf (rtx, rtx);
- extern rtx gen_aarch64_frecpevnx2df (rtx, rtx);
- extern rtx gen_aarch64_frecpsvnx8hf (rtx, rtx, rtx);
- extern rtx gen_aarch64_frecpsvnx4sf (rtx, rtx, rtx);
- extern rtx gen_aarch64_frecpsvnx2df (rtx, rtx, rtx);
- extern rtx gen_copysignvnx8hf3 (rtx, rtx, rtx);
- extern rtx gen_copysignvnx4sf3 (rtx, rtx, rtx);
- extern rtx gen_copysignvnx2df3 (rtx, rtx, rtx);
- extern rtx gen_xorsignvnx8hf3 (rtx, rtx, rtx);
- extern rtx gen_xorsignvnx4sf3 (rtx, rtx, rtx);
- extern rtx gen_xorsignvnx2df3 (rtx, rtx, rtx);
- extern rtx gen_fmaxvnx8hf3 (rtx, rtx, rtx);
- extern rtx gen_fminvnx8hf3 (rtx, rtx, rtx);
- extern rtx gen_fmaxvnx4sf3 (rtx, rtx, rtx);
- extern rtx gen_fminvnx4sf3 (rtx, rtx, rtx);
- extern rtx gen_fmaxvnx2df3 (rtx, rtx, rtx);
- extern rtx gen_fminvnx2df3 (rtx, rtx, rtx);
- extern rtx gen_iorvnx16bi3 (rtx, rtx, rtx);
- extern rtx gen_xorvnx16bi3 (rtx, rtx, rtx);
- extern rtx gen_iorvnx8bi3 (rtx, rtx, rtx);
- extern rtx gen_xorvnx8bi3 (rtx, rtx, rtx);
- extern rtx gen_iorvnx4bi3 (rtx, rtx, rtx);
- extern rtx gen_xorvnx4bi3 (rtx, rtx, rtx);
- extern rtx gen_iorvnx2bi3 (rtx, rtx, rtx);
- extern rtx gen_xorvnx2bi3 (rtx, rtx, rtx);
- extern rtx gen_fmavnx16qi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fmavnx8hi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fmavnx4si4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fmavnx2di4 (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fmavnx16qi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fmavnx8hi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fmavnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fmavnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_fnmavnx16qi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmavnx8hi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmavnx4si4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmavnx2di4 (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fnmavnx16qi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fnmavnx8hi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fnmavnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fnmavnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_ssadvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_usadvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_ssadvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_usadvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_fmavnx8hf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmavnx8hf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmsvnx8hf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fmsvnx8hf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fmavnx4sf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmavnx4sf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmsvnx4sf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fmsvnx4sf4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fmavnx2df4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmavnx2df4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fnmsvnx2df4 (rtx, rtx, rtx, rtx);
- extern rtx gen_fmsvnx2df4 (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fmavnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fnmavnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fnmsvnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fmsvnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fmavnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fnmavnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fnmsvnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fmsvnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fmavnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fnmavnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fnmsvnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fmsvnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcmlavnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcmla90vnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcmla180vnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcmla270vnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcmlavnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcmla90vnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcmla180vnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcmla270vnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcmlavnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcmla90vnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcmla180vnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcmla270vnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_vnx16qivnx16bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_vnx8hivnx8bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_vnx4sivnx4bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_vnx2divnx2bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_vnx8bfvnx8bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_vnx8hfvnx8bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_vnx4sfvnx4bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vcond_mask_vnx2dfvnx2bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vcondvnx16qivnx16qi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondvnx8hivnx8hi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondvnx4sivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondvnx2divnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondvnx8bfvnx8hi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondvnx8hfvnx8hi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondvnx4sfvnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondvnx2dfvnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduvnx16qivnx16qi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduvnx8hivnx8hi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduvnx4sivnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduvnx2divnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduvnx8bfvnx8hi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduvnx8hfvnx8hi (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduvnx4sfvnx4si (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vconduvnx2dfvnx2di (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondvnx8hivnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondvnx4sivnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondvnx2divnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondvnx8bfvnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondvnx8hfvnx8hf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondvnx4sfvnx4sf (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vcondvnx2dfvnx2df (rtx, rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpvnx16qivnx16bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpvnx8hivnx8bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpvnx4sivnx4bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpvnx2divnx2bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpuvnx16qivnx16bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpuvnx8hivnx8bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpuvnx4sivnx4bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpuvnx2divnx2bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpvnx8hfvnx8bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpvnx4sfvnx4bi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_cmpvnx2dfvnx2bi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_facgevnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_facgtvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_faclevnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_facltvnx8hf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_facgevnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_facgtvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_faclevnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_facltvnx4sf (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_facgevnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_facgtvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_faclevnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_pred_facltvnx2df (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cbranchvnx16bi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_cbranchvnx8bi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_cbranchvnx4bi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_cbranchvnx2bi4 (rtx, rtx, rtx, rtx);
- extern rtx gen_reduc_plus_scal_vnx16qi (rtx, rtx);
- extern rtx gen_reduc_plus_scal_vnx8hi (rtx, rtx);
- extern rtx gen_reduc_plus_scal_vnx4si (rtx, rtx);
- extern rtx gen_reduc_plus_scal_vnx2di (rtx, rtx);
- extern rtx gen_reduc_and_scal_vnx16qi (rtx, rtx);
- extern rtx gen_reduc_ior_scal_vnx16qi (rtx, rtx);
- extern rtx gen_reduc_smax_scal_vnx16qi (rtx, rtx);
- extern rtx gen_reduc_smin_scal_vnx16qi (rtx, rtx);
- extern rtx gen_reduc_umax_scal_vnx16qi (rtx, rtx);
- extern rtx gen_reduc_umin_scal_vnx16qi (rtx, rtx);
- extern rtx gen_reduc_xor_scal_vnx16qi (rtx, rtx);
- extern rtx gen_reduc_and_scal_vnx8hi (rtx, rtx);
- extern rtx gen_reduc_ior_scal_vnx8hi (rtx, rtx);
- extern rtx gen_reduc_smax_scal_vnx8hi (rtx, rtx);
- extern rtx gen_reduc_smin_scal_vnx8hi (rtx, rtx);
- extern rtx gen_reduc_umax_scal_vnx8hi (rtx, rtx);
- extern rtx gen_reduc_umin_scal_vnx8hi (rtx, rtx);
- extern rtx gen_reduc_xor_scal_vnx8hi (rtx, rtx);
- extern rtx gen_reduc_and_scal_vnx4si (rtx, rtx);
- extern rtx gen_reduc_ior_scal_vnx4si (rtx, rtx);
- extern rtx gen_reduc_smax_scal_vnx4si (rtx, rtx);
- extern rtx gen_reduc_smin_scal_vnx4si (rtx, rtx);
- extern rtx gen_reduc_umax_scal_vnx4si (rtx, rtx);
- extern rtx gen_reduc_umin_scal_vnx4si (rtx, rtx);
- extern rtx gen_reduc_xor_scal_vnx4si (rtx, rtx);
- extern rtx gen_reduc_and_scal_vnx2di (rtx, rtx);
- extern rtx gen_reduc_ior_scal_vnx2di (rtx, rtx);
- extern rtx gen_reduc_smax_scal_vnx2di (rtx, rtx);
- extern rtx gen_reduc_smin_scal_vnx2di (rtx, rtx);
- extern rtx gen_reduc_umax_scal_vnx2di (rtx, rtx);
- extern rtx gen_reduc_umin_scal_vnx2di (rtx, rtx);
- extern rtx gen_reduc_xor_scal_vnx2di (rtx, rtx);
- extern rtx gen_reduc_plus_scal_vnx8hf (rtx, rtx);
- extern rtx gen_reduc_smax_nan_scal_vnx8hf (rtx, rtx);
- extern rtx gen_reduc_smax_scal_vnx8hf (rtx, rtx);
- extern rtx gen_reduc_smin_nan_scal_vnx8hf (rtx, rtx);
- extern rtx gen_reduc_smin_scal_vnx8hf (rtx, rtx);
- extern rtx gen_reduc_plus_scal_vnx4sf (rtx, rtx);
- extern rtx gen_reduc_smax_nan_scal_vnx4sf (rtx, rtx);
- extern rtx gen_reduc_smax_scal_vnx4sf (rtx, rtx);
- extern rtx gen_reduc_smin_nan_scal_vnx4sf (rtx, rtx);
- extern rtx gen_reduc_smin_scal_vnx4sf (rtx, rtx);
- extern rtx gen_reduc_plus_scal_vnx2df (rtx, rtx);
- extern rtx gen_reduc_smax_nan_scal_vnx2df (rtx, rtx);
- extern rtx gen_reduc_smax_scal_vnx2df (rtx, rtx);
- extern rtx gen_reduc_smin_nan_scal_vnx2df (rtx, rtx);
- extern rtx gen_reduc_smin_scal_vnx2df (rtx, rtx);
- extern rtx gen_fold_left_plus_vnx8hf (rtx, rtx, rtx);
- extern rtx gen_fold_left_plus_vnx4sf (rtx, rtx, rtx);
- extern rtx gen_fold_left_plus_vnx2df (rtx, rtx, rtx);
- extern rtx gen_vec_permvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_permvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_permvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_permvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_permvnx8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_permvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_permvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_permvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_unpacks_hi_vnx16qi (rtx, rtx);
- extern rtx gen_vec_unpacku_hi_vnx16qi (rtx, rtx);
- extern rtx gen_vec_unpacks_lo_vnx16qi (rtx, rtx);
- extern rtx gen_vec_unpacku_lo_vnx16qi (rtx, rtx);
- extern rtx gen_vec_unpacks_hi_vnx8hi (rtx, rtx);
- extern rtx gen_vec_unpacku_hi_vnx8hi (rtx, rtx);
- extern rtx gen_vec_unpacks_lo_vnx8hi (rtx, rtx);
- extern rtx gen_vec_unpacku_lo_vnx8hi (rtx, rtx);
- extern rtx gen_vec_unpacks_hi_vnx4si (rtx, rtx);
- extern rtx gen_vec_unpacku_hi_vnx4si (rtx, rtx);
- extern rtx gen_vec_unpacks_lo_vnx4si (rtx, rtx);
- extern rtx gen_vec_unpacku_lo_vnx4si (rtx, rtx);
- extern rtx gen_fix_truncvnx8hfvnx8hi2 (rtx, rtx);
- extern rtx gen_fixuns_truncvnx8hfvnx8hi2 (rtx, rtx);
- extern rtx gen_fix_truncvnx4sfvnx4si2 (rtx, rtx);
- extern rtx gen_fixuns_truncvnx4sfvnx4si2 (rtx, rtx);
- extern rtx gen_fix_truncvnx2dfvnx2di2 (rtx, rtx);
- extern rtx gen_fixuns_truncvnx2dfvnx2di2 (rtx, rtx);
- extern rtx gen_cond_fix_trunc_nontruncvnx8hfvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fixuns_trunc_nontruncvnx8hfvnx8hi (rtx, rtx, rtx, rtx);
- static inline rtx gen_cond_fix_trunc_nontruncvnx4sfvnx8hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_fix_trunc_nontruncvnx4sfvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_cond_fixuns_trunc_nontruncvnx4sfvnx8hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_fixuns_trunc_nontruncvnx4sfvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_cond_fix_trunc_nontruncvnx2dfvnx8hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_fix_trunc_nontruncvnx2dfvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_cond_fixuns_trunc_nontruncvnx2dfvnx8hi (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_fixuns_trunc_nontruncvnx2dfvnx8hi(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_cond_fix_trunc_nontruncvnx8hfvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fixuns_trunc_nontruncvnx8hfvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fix_trunc_nontruncvnx4sfvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fixuns_trunc_nontruncvnx4sfvnx4si (rtx, rtx, rtx, rtx);
- static inline rtx gen_cond_fix_trunc_nontruncvnx2dfvnx4si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_fix_trunc_nontruncvnx2dfvnx4si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_cond_fixuns_trunc_nontruncvnx2dfvnx4si (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_fixuns_trunc_nontruncvnx2dfvnx4si(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_cond_fix_trunc_nontruncvnx8hfvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fixuns_trunc_nontruncvnx8hfvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fix_trunc_nontruncvnx4sfvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fixuns_trunc_nontruncvnx4sfvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fix_trunc_nontruncvnx2dfvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fixuns_trunc_nontruncvnx2dfvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fix_trunc_truncvnx2dfvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fixuns_trunc_truncvnx2dfvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_pack_sfix_trunc_vnx2df (rtx, rtx, rtx);
- extern rtx gen_vec_pack_ufix_trunc_vnx2df (rtx, rtx, rtx);
- extern rtx gen_floatvnx8hivnx8hf2 (rtx, rtx);
- extern rtx gen_floatunsvnx8hivnx8hf2 (rtx, rtx);
- extern rtx gen_floatvnx4sivnx4sf2 (rtx, rtx);
- extern rtx gen_floatunsvnx4sivnx4sf2 (rtx, rtx);
- extern rtx gen_floatvnx2divnx2df2 (rtx, rtx);
- extern rtx gen_floatunsvnx2divnx2df2 (rtx, rtx);
- extern rtx gen_cond_float_nonextendvnx8hivnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_floatuns_nonextendvnx8hivnx8hf (rtx, rtx, rtx, rtx);
- static inline rtx gen_cond_float_nonextendvnx8hivnx4sf (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_float_nonextendvnx8hivnx4sf(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_cond_floatuns_nonextendvnx8hivnx4sf (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_floatuns_nonextendvnx8hivnx4sf(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_cond_float_nonextendvnx8hivnx2df (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_float_nonextendvnx8hivnx2df(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_cond_floatuns_nonextendvnx8hivnx2df (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_floatuns_nonextendvnx8hivnx2df(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_cond_float_nonextendvnx4sivnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_floatuns_nonextendvnx4sivnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_float_nonextendvnx4sivnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_floatuns_nonextendvnx4sivnx4sf (rtx, rtx, rtx, rtx);
- static inline rtx gen_cond_float_nonextendvnx4sivnx2df (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_float_nonextendvnx4sivnx2df(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- static inline rtx gen_cond_floatuns_nonextendvnx4sivnx2df (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_floatuns_nonextendvnx4sivnx2df(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_cond_float_nonextendvnx2divnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_floatuns_nonextendvnx2divnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_float_nonextendvnx2divnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_floatuns_nonextendvnx2divnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_float_nonextendvnx2divnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_floatuns_nonextendvnx2divnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_float_extendvnx4sivnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_floatuns_extendvnx4sivnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_unpacks_float_lo_vnx4si (rtx, rtx);
- extern rtx gen_vec_unpacks_float_hi_vnx4si (rtx, rtx);
- extern rtx gen_vec_unpacku_float_lo_vnx4si (rtx, rtx);
- extern rtx gen_vec_unpacku_float_hi_vnx4si (rtx, rtx);
- extern rtx gen_vec_pack_trunc_vnx4sf (rtx, rtx, rtx);
- extern rtx gen_vec_pack_trunc_vnx2df (rtx, rtx, rtx);
- extern rtx gen_cond_fcvt_truncvnx4sfvnx8hf (rtx, rtx, rtx, rtx);
- static inline rtx gen_cond_fcvt_truncvnx4sfvnx4sf (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_fcvt_truncvnx4sfvnx4sf(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_cond_fcvt_truncvnx2dfvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcvt_truncvnx2dfvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcvt_truncvnx4sfvnx8bf (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_unpacks_lo_vnx8hf (rtx, rtx);
- extern rtx gen_vec_unpacks_hi_vnx8hf (rtx, rtx);
- extern rtx gen_vec_unpacks_lo_vnx4sf (rtx, rtx);
- extern rtx gen_vec_unpacks_hi_vnx4sf (rtx, rtx);
- extern rtx gen_cond_fcvt_nontruncvnx8hfvnx4sf (rtx, rtx, rtx, rtx);
- static inline rtx gen_cond_fcvt_nontruncvnx4sfvnx4sf (rtx, rtx, rtx, rtx);
- static inline rtx
- gen_cond_fcvt_nontruncvnx4sfvnx4sf(rtx ARG_UNUSED (a), rtx ARG_UNUSED (b), rtx ARG_UNUSED (c), rtx ARG_UNUSED (d))
- {
- return 0;
- }
- extern rtx gen_cond_fcvt_nontruncvnx8hfvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcvt_nontruncvnx4sfvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_vec_unpacks_hi_vnx16bi (rtx, rtx);
- extern rtx gen_vec_unpacku_hi_vnx16bi (rtx, rtx);
- extern rtx gen_vec_unpacks_lo_vnx16bi (rtx, rtx);
- extern rtx gen_vec_unpacku_lo_vnx16bi (rtx, rtx);
- extern rtx gen_vec_unpacks_hi_vnx8bi (rtx, rtx);
- extern rtx gen_vec_unpacku_hi_vnx8bi (rtx, rtx);
- extern rtx gen_vec_unpacks_lo_vnx8bi (rtx, rtx);
- extern rtx gen_vec_unpacku_lo_vnx8bi (rtx, rtx);
- extern rtx gen_vec_unpacks_hi_vnx4bi (rtx, rtx);
- extern rtx gen_vec_unpacku_hi_vnx4bi (rtx, rtx);
- extern rtx gen_vec_unpacks_lo_vnx4bi (rtx, rtx);
- extern rtx gen_vec_unpacku_lo_vnx4bi (rtx, rtx);
- extern rtx gen_aarch64_sve_incvnx8hi_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqincvnx8hi_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqincvnx8hi_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_decvnx8hi_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdecvnx8hi_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqdecvnx8hi_pat (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_incdivnx16bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqincdivnx16bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqincdivnx16bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_incdivnx8bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqincdivnx8bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqincdivnx8bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_incdivnx4bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqincdivnx4bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqincdivnx4bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_incdivnx2bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqincdivnx2bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqincdivnx2bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqincsivnx16bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqincsivnx16bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqincsivnx8bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqincsivnx8bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqincsivnx4bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqincsivnx4bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqincsivnx2bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqincsivnx2bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_incvnx2di_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqincvnx2di_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqincvnx2di_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_incvnx4si_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqincvnx4si_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqincvnx4si_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_incvnx8hi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqincvnx8hi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqincvnx8hi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_decdivnx16bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdecdivnx16bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqdecdivnx16bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_decdivnx8bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdecdivnx8bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqdecdivnx8bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_decdivnx4bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdecdivnx4bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqdecdivnx4bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_decdivnx2bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdecdivnx2bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqdecdivnx2bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdecsivnx16bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqdecsivnx16bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdecsivnx8bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqdecsivnx8bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdecsivnx4bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqdecsivnx4bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdecsivnx2bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqdecsivnx2bi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_decvnx2di_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdecvnx2di_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqdecvnx2di_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_decvnx4si_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdecvnx4si_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqdecvnx4si_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_decvnx8hi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_sqdecvnx8hi_cntp (rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_uqdecvnx8hi_cntp (rtx, rtx, rtx);
- extern rtx gen_smulhsvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_umulhsvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_smulhrsvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_umulhrsvnx16qi3 (rtx, rtx, rtx);
- extern rtx gen_smulhsvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_umulhsvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_smulhrsvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_umulhrsvnx8hi3 (rtx, rtx, rtx);
- extern rtx gen_smulhsvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_umulhsvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_smulhrsvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_umulhrsvnx4si3 (rtx, rtx, rtx);
- extern rtx gen_avgvnx16qi3_floor (rtx, rtx, rtx);
- extern rtx gen_uavgvnx16qi3_floor (rtx, rtx, rtx);
- extern rtx gen_avgvnx8hi3_floor (rtx, rtx, rtx);
- extern rtx gen_uavgvnx8hi3_floor (rtx, rtx, rtx);
- extern rtx gen_avgvnx4si3_floor (rtx, rtx, rtx);
- extern rtx gen_uavgvnx4si3_floor (rtx, rtx, rtx);
- extern rtx gen_avgvnx2di3_floor (rtx, rtx, rtx);
- extern rtx gen_uavgvnx2di3_floor (rtx, rtx, rtx);
- extern rtx gen_avgvnx16qi3_ceil (rtx, rtx, rtx);
- extern rtx gen_uavgvnx16qi3_ceil (rtx, rtx, rtx);
- extern rtx gen_avgvnx8hi3_ceil (rtx, rtx, rtx);
- extern rtx gen_uavgvnx8hi3_ceil (rtx, rtx, rtx);
- extern rtx gen_avgvnx4si3_ceil (rtx, rtx, rtx);
- extern rtx gen_uavgvnx4si3_ceil (rtx, rtx, rtx);
- extern rtx gen_avgvnx2di3_ceil (rtx, rtx, rtx);
- extern rtx gen_uavgvnx2di3_ceil (rtx, rtx, rtx);
- extern rtx gen_cond_shaddvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_shsubvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sqrshlvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_srhaddvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_srshlvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_suqaddvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uhaddvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uhsubvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uqrshlvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_urhaddvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_urshlvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_usqaddvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_shaddvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_shsubvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sqrshlvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_srhaddvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_srshlvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_suqaddvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uhaddvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uhsubvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uqrshlvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_urhaddvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_urshlvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_usqaddvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_shaddvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_shsubvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sqrshlvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_srhaddvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_srshlvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_suqaddvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uhaddvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uhsubvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uqrshlvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_urhaddvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_urshlvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_usqaddvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_shaddvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_shsubvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sqrshlvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_srhaddvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_srshlvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_suqaddvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uhaddvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uhsubvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uqrshlvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_urhaddvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_urshlvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_usqaddvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sqshlvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uqshlvnx16qi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sqshlvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uqshlvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sqshlvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uqshlvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sqshlvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uqshlvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_bcaxvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_bcaxvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_bcaxvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_bcaxvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_bslvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_bslvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_bslvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_bslvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_nbslvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_nbslvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_nbslvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_nbslvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_bsl1nvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_bsl1nvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_bsl1nvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_bsl1nvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_bsl2nvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_bsl2nvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_bsl2nvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_bsl2nvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_asrvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_lsrvnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_asrvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_lsrvnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_asrvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_lsrvnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_asrvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve_add_lsrvnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_sabavnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_uabavnx16qi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_sabavnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_uabavnx8hi (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_sabavnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_uabavnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_sabavnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_aarch64_sve2_uabavnx2di (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sadalpvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uadalpvnx8hi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sadalpvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uadalpvnx4si (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_sadalpvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_uadalpvnx2di (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcvtltvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcvtltvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_fcvtxvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_urecpevnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_ursqrtevnx4si (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_flogbvnx8hf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_flogbvnx4sf (rtx, rtx, rtx, rtx);
- extern rtx gen_cond_flogbvnx2df (rtx, rtx, rtx, rtx);
- extern rtx gen_check_raw_ptrssi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_check_war_ptrssi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_check_raw_ptrsdi (rtx, rtx, rtx, rtx, rtx);
- extern rtx gen_check_war_ptrsdi (rtx, rtx, rtx, rtx, rtx);
- #endif /* GCC_INSN_FLAGS_H */
|