aarch64-opc.c 204 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895
  1. /* aarch64-opc.c -- AArch64 opcode support.
  2. Copyright (C) 2009-2022 Free Software Foundation, Inc.
  3. Contributed by ARM Ltd.
  4. This file is part of the GNU opcodes library.
  5. This library is free software; you can redistribute it and/or modify
  6. it under the terms of the GNU General Public License as published by
  7. the Free Software Foundation; either version 3, or (at your option)
  8. any later version.
  9. It is distributed in the hope that it will be useful, but WITHOUT
  10. ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
  11. or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
  12. License for more details.
  13. You should have received a copy of the GNU General Public License
  14. along with this program; see the file COPYING3. If not,
  15. see <http://www.gnu.org/licenses/>. */
  16. #include "sysdep.h"
  17. #include <assert.h>
  18. #include <stdlib.h>
  19. #include <stdio.h>
  20. #include <stdint.h>
  21. #include <stdarg.h>
  22. #include <inttypes.h>
  23. #include "opintl.h"
  24. #include "libiberty.h"
  25. #include "aarch64-opc.h"
  26. #ifdef DEBUG_AARCH64
  27. int debug_dump = false;
  28. #endif /* DEBUG_AARCH64 */
  29. /* The enumeration strings associated with each value of a 5-bit SVE
  30. pattern operand. A null entry indicates a reserved meaning. */
  31. const char *const aarch64_sve_pattern_array[32] = {
  32. /* 0-7. */
  33. "pow2",
  34. "vl1",
  35. "vl2",
  36. "vl3",
  37. "vl4",
  38. "vl5",
  39. "vl6",
  40. "vl7",
  41. /* 8-15. */
  42. "vl8",
  43. "vl16",
  44. "vl32",
  45. "vl64",
  46. "vl128",
  47. "vl256",
  48. 0,
  49. 0,
  50. /* 16-23. */
  51. 0,
  52. 0,
  53. 0,
  54. 0,
  55. 0,
  56. 0,
  57. 0,
  58. 0,
  59. /* 24-31. */
  60. 0,
  61. 0,
  62. 0,
  63. 0,
  64. 0,
  65. "mul4",
  66. "mul3",
  67. "all"
  68. };
  69. /* The enumeration strings associated with each value of a 4-bit SVE
  70. prefetch operand. A null entry indicates a reserved meaning. */
  71. const char *const aarch64_sve_prfop_array[16] = {
  72. /* 0-7. */
  73. "pldl1keep",
  74. "pldl1strm",
  75. "pldl2keep",
  76. "pldl2strm",
  77. "pldl3keep",
  78. "pldl3strm",
  79. 0,
  80. 0,
  81. /* 8-15. */
  82. "pstl1keep",
  83. "pstl1strm",
  84. "pstl2keep",
  85. "pstl2strm",
  86. "pstl3keep",
  87. "pstl3strm",
  88. 0,
  89. 0
  90. };
  91. /* Helper functions to determine which operand to be used to encode/decode
  92. the size:Q fields for AdvSIMD instructions. */
  93. static inline bool
  94. vector_qualifier_p (enum aarch64_opnd_qualifier qualifier)
  95. {
  96. return (qualifier >= AARCH64_OPND_QLF_V_8B
  97. && qualifier <= AARCH64_OPND_QLF_V_1Q);
  98. }
  99. static inline bool
  100. fp_qualifier_p (enum aarch64_opnd_qualifier qualifier)
  101. {
  102. return (qualifier >= AARCH64_OPND_QLF_S_B
  103. && qualifier <= AARCH64_OPND_QLF_S_Q);
  104. }
  105. enum data_pattern
  106. {
  107. DP_UNKNOWN,
  108. DP_VECTOR_3SAME,
  109. DP_VECTOR_LONG,
  110. DP_VECTOR_WIDE,
  111. DP_VECTOR_ACROSS_LANES,
  112. };
  113. static const char significant_operand_index [] =
  114. {
  115. 0, /* DP_UNKNOWN, by default using operand 0. */
  116. 0, /* DP_VECTOR_3SAME */
  117. 1, /* DP_VECTOR_LONG */
  118. 2, /* DP_VECTOR_WIDE */
  119. 1, /* DP_VECTOR_ACROSS_LANES */
  120. };
  121. /* Given a sequence of qualifiers in QUALIFIERS, determine and return
  122. the data pattern.
  123. N.B. QUALIFIERS is a possible sequence of qualifiers each of which
  124. corresponds to one of a sequence of operands. */
  125. static enum data_pattern
  126. get_data_pattern (const aarch64_opnd_qualifier_seq_t qualifiers)
  127. {
  128. if (vector_qualifier_p (qualifiers[0]))
  129. {
  130. /* e.g. v.4s, v.4s, v.4s
  131. or v.4h, v.4h, v.h[3]. */
  132. if (qualifiers[0] == qualifiers[1]
  133. && vector_qualifier_p (qualifiers[2])
  134. && (aarch64_get_qualifier_esize (qualifiers[0])
  135. == aarch64_get_qualifier_esize (qualifiers[1]))
  136. && (aarch64_get_qualifier_esize (qualifiers[0])
  137. == aarch64_get_qualifier_esize (qualifiers[2])))
  138. return DP_VECTOR_3SAME;
  139. /* e.g. v.8h, v.8b, v.8b.
  140. or v.4s, v.4h, v.h[2].
  141. or v.8h, v.16b. */
  142. if (vector_qualifier_p (qualifiers[1])
  143. && aarch64_get_qualifier_esize (qualifiers[0]) != 0
  144. && (aarch64_get_qualifier_esize (qualifiers[0])
  145. == aarch64_get_qualifier_esize (qualifiers[1]) << 1))
  146. return DP_VECTOR_LONG;
  147. /* e.g. v.8h, v.8h, v.8b. */
  148. if (qualifiers[0] == qualifiers[1]
  149. && vector_qualifier_p (qualifiers[2])
  150. && aarch64_get_qualifier_esize (qualifiers[0]) != 0
  151. && (aarch64_get_qualifier_esize (qualifiers[0])
  152. == aarch64_get_qualifier_esize (qualifiers[2]) << 1)
  153. && (aarch64_get_qualifier_esize (qualifiers[0])
  154. == aarch64_get_qualifier_esize (qualifiers[1])))
  155. return DP_VECTOR_WIDE;
  156. }
  157. else if (fp_qualifier_p (qualifiers[0]))
  158. {
  159. /* e.g. SADDLV <V><d>, <Vn>.<T>. */
  160. if (vector_qualifier_p (qualifiers[1])
  161. && qualifiers[2] == AARCH64_OPND_QLF_NIL)
  162. return DP_VECTOR_ACROSS_LANES;
  163. }
  164. return DP_UNKNOWN;
  165. }
  166. /* Select the operand to do the encoding/decoding of the 'size:Q' fields in
  167. the AdvSIMD instructions. */
  168. /* N.B. it is possible to do some optimization that doesn't call
  169. get_data_pattern each time when we need to select an operand. We can
  170. either buffer the caculated the result or statically generate the data,
  171. however, it is not obvious that the optimization will bring significant
  172. benefit. */
  173. int
  174. aarch64_select_operand_for_sizeq_field_coding (const aarch64_opcode *opcode)
  175. {
  176. return
  177. significant_operand_index [get_data_pattern (opcode->qualifiers_list[0])];
  178. }
  179. /* Instruction bit-fields.
  180. + Keep synced with 'enum aarch64_field_kind'. */
  181. const aarch64_field fields[] =
  182. {
  183. { 0, 0 }, /* NIL. */
  184. { 0, 4 }, /* cond2: condition in truly conditional-executed inst. */
  185. { 0, 4 }, /* nzcv: flag bit specifier, encoded in the "nzcv" field. */
  186. { 5, 5 }, /* defgh: d:e:f:g:h bits in AdvSIMD modified immediate. */
  187. { 16, 3 }, /* abc: a:b:c bits in AdvSIMD modified immediate. */
  188. { 5, 19 }, /* imm19: e.g. in CBZ. */
  189. { 5, 19 }, /* immhi: e.g. in ADRP. */
  190. { 29, 2 }, /* immlo: e.g. in ADRP. */
  191. { 22, 2 }, /* size: in most AdvSIMD and floating-point instructions. */
  192. { 10, 2 }, /* vldst_size: size field in the AdvSIMD load/store inst. */
  193. { 29, 1 }, /* op: in AdvSIMD modified immediate instructions. */
  194. { 30, 1 }, /* Q: in most AdvSIMD instructions. */
  195. { 0, 5 }, /* Rt: in load/store instructions. */
  196. { 0, 5 }, /* Rd: in many integer instructions. */
  197. { 5, 5 }, /* Rn: in many integer instructions. */
  198. { 10, 5 }, /* Rt2: in load/store pair instructions. */
  199. { 10, 5 }, /* Ra: in fp instructions. */
  200. { 5, 3 }, /* op2: in the system instructions. */
  201. { 8, 4 }, /* CRm: in the system instructions. */
  202. { 12, 4 }, /* CRn: in the system instructions. */
  203. { 16, 3 }, /* op1: in the system instructions. */
  204. { 19, 2 }, /* op0: in the system instructions. */
  205. { 10, 3 }, /* imm3: in add/sub extended reg instructions. */
  206. { 12, 4 }, /* cond: condition flags as a source operand. */
  207. { 12, 4 }, /* opcode: in advsimd load/store instructions. */
  208. { 12, 4 }, /* cmode: in advsimd modified immediate instructions. */
  209. { 13, 3 }, /* asisdlso_opcode: opcode in advsimd ld/st single element. */
  210. { 13, 2 }, /* len: in advsimd tbl/tbx instructions. */
  211. { 16, 5 }, /* Rm: in ld/st reg offset and some integer inst. */
  212. { 16, 5 }, /* Rs: in load/store exclusive instructions. */
  213. { 13, 3 }, /* option: in ld/st reg offset + add/sub extended reg inst. */
  214. { 12, 1 }, /* S: in load/store reg offset instructions. */
  215. { 21, 2 }, /* hw: in move wide constant instructions. */
  216. { 22, 2 }, /* opc: in load/store reg offset instructions. */
  217. { 23, 1 }, /* opc1: in load/store reg offset instructions. */
  218. { 22, 2 }, /* shift: in add/sub reg/imm shifted instructions. */
  219. { 22, 2 }, /* type: floating point type field in fp data inst. */
  220. { 30, 2 }, /* ldst_size: size field in ld/st reg offset inst. */
  221. { 10, 6 }, /* imm6: in add/sub reg shifted instructions. */
  222. { 15, 6 }, /* imm6_2: in rmif instructions. */
  223. { 11, 4 }, /* imm4: in advsimd ext and advsimd ins instructions. */
  224. { 0, 4 }, /* imm4_2: in rmif instructions. */
  225. { 10, 4 }, /* imm4_3: in adddg/subg instructions. */
  226. { 5, 4 }, /* imm4_5: in SME instructions. */
  227. { 16, 5 }, /* imm5: in conditional compare (immediate) instructions. */
  228. { 15, 7 }, /* imm7: in load/store pair pre/post index instructions. */
  229. { 13, 8 }, /* imm8: in floating-point scalar move immediate inst. */
  230. { 12, 9 }, /* imm9: in load/store pre/post index instructions. */
  231. { 10, 12 }, /* imm12: in ld/st unsigned imm or add/sub shifted inst. */
  232. { 5, 14 }, /* imm14: in test bit and branch instructions. */
  233. { 5, 16 }, /* imm16: in exception instructions. */
  234. { 0, 16 }, /* imm16_2: in udf instruction. */
  235. { 0, 26 }, /* imm26: in unconditional branch instructions. */
  236. { 10, 6 }, /* imms: in bitfield and logical immediate instructions. */
  237. { 16, 6 }, /* immr: in bitfield and logical immediate instructions. */
  238. { 16, 3 }, /* immb: in advsimd shift by immediate instructions. */
  239. { 19, 4 }, /* immh: in advsimd shift by immediate instructions. */
  240. { 22, 1 }, /* S: in LDRAA and LDRAB instructions. */
  241. { 22, 1 }, /* N: in logical (immediate) instructions. */
  242. { 11, 1 }, /* index: in ld/st inst deciding the pre/post-index. */
  243. { 24, 1 }, /* index2: in ld/st pair inst deciding the pre/post-index. */
  244. { 31, 1 }, /* sf: in integer data processing instructions. */
  245. { 30, 1 }, /* lse_size: in LSE extension atomic instructions. */
  246. { 11, 1 }, /* H: in advsimd scalar x indexed element instructions. */
  247. { 21, 1 }, /* L: in advsimd scalar x indexed element instructions. */
  248. { 20, 1 }, /* M: in advsimd scalar x indexed element instructions. */
  249. { 31, 1 }, /* b5: in the test bit and branch instructions. */
  250. { 19, 5 }, /* b40: in the test bit and branch instructions. */
  251. { 10, 6 }, /* scale: in the fixed-point scalar to fp converting inst. */
  252. { 4, 1 }, /* SVE_M_4: Merge/zero select, bit 4. */
  253. { 14, 1 }, /* SVE_M_14: Merge/zero select, bit 14. */
  254. { 16, 1 }, /* SVE_M_16: Merge/zero select, bit 16. */
  255. { 17, 1 }, /* SVE_N: SVE equivalent of N. */
  256. { 0, 4 }, /* SVE_Pd: p0-p15, bits [3,0]. */
  257. { 10, 3 }, /* SVE_Pg3: p0-p7, bits [12,10]. */
  258. { 5, 4 }, /* SVE_Pg4_5: p0-p15, bits [8,5]. */
  259. { 10, 4 }, /* SVE_Pg4_10: p0-p15, bits [13,10]. */
  260. { 16, 4 }, /* SVE_Pg4_16: p0-p15, bits [19,16]. */
  261. { 16, 4 }, /* SVE_Pm: p0-p15, bits [19,16]. */
  262. { 5, 4 }, /* SVE_Pn: p0-p15, bits [8,5]. */
  263. { 0, 4 }, /* SVE_Pt: p0-p15, bits [3,0]. */
  264. { 5, 5 }, /* SVE_Rm: SVE alternative position for Rm. */
  265. { 16, 5 }, /* SVE_Rn: SVE alternative position for Rn. */
  266. { 0, 5 }, /* SVE_Vd: Scalar SIMD&FP register, bits [4,0]. */
  267. { 5, 5 }, /* SVE_Vm: Scalar SIMD&FP register, bits [9,5]. */
  268. { 5, 5 }, /* SVE_Vn: Scalar SIMD&FP register, bits [9,5]. */
  269. { 5, 5 }, /* SVE_Za_5: SVE vector register, bits [9,5]. */
  270. { 16, 5 }, /* SVE_Za_16: SVE vector register, bits [20,16]. */
  271. { 0, 5 }, /* SVE_Zd: SVE vector register. bits [4,0]. */
  272. { 5, 5 }, /* SVE_Zm_5: SVE vector register, bits [9,5]. */
  273. { 16, 5 }, /* SVE_Zm_16: SVE vector register, bits [20,16]. */
  274. { 5, 5 }, /* SVE_Zn: SVE vector register, bits [9,5]. */
  275. { 0, 5 }, /* SVE_Zt: SVE vector register, bits [4,0]. */
  276. { 5, 1 }, /* SVE_i1: single-bit immediate. */
  277. { 22, 1 }, /* SVE_i3h: high bit of 3-bit immediate. */
  278. { 11, 1 }, /* SVE_i3l: low bit of 3-bit immediate. */
  279. { 19, 2 }, /* SVE_i3h2: two high bits of 3bit immediate, bits [20,19]. */
  280. { 20, 1 }, /* SVE_i2h: high bit of 2bit immediate, bits. */
  281. { 16, 3 }, /* SVE_imm3: 3-bit immediate field. */
  282. { 16, 4 }, /* SVE_imm4: 4-bit immediate field. */
  283. { 5, 5 }, /* SVE_imm5: 5-bit immediate field. */
  284. { 16, 5 }, /* SVE_imm5b: secondary 5-bit immediate field. */
  285. { 16, 6 }, /* SVE_imm6: 6-bit immediate field. */
  286. { 14, 7 }, /* SVE_imm7: 7-bit immediate field. */
  287. { 5, 8 }, /* SVE_imm8: 8-bit immediate field. */
  288. { 5, 9 }, /* SVE_imm9: 9-bit immediate field. */
  289. { 11, 6 }, /* SVE_immr: SVE equivalent of immr. */
  290. { 5, 6 }, /* SVE_imms: SVE equivalent of imms. */
  291. { 10, 2 }, /* SVE_msz: 2-bit shift amount for ADR. */
  292. { 5, 5 }, /* SVE_pattern: vector pattern enumeration. */
  293. { 0, 4 }, /* SVE_prfop: prefetch operation for SVE PRF[BHWD]. */
  294. { 16, 1 }, /* SVE_rot1: 1-bit rotation amount. */
  295. { 10, 2 }, /* SVE_rot2: 2-bit rotation amount. */
  296. { 10, 1 }, /* SVE_rot3: 1-bit rotation amount at bit 10. */
  297. { 22, 1 }, /* SVE_sz: 1-bit element size select. */
  298. { 17, 2 }, /* SVE_size: 2-bit element size, bits [18,17]. */
  299. { 30, 1 }, /* SVE_sz2: 1-bit element size select. */
  300. { 16, 4 }, /* SVE_tsz: triangular size select. */
  301. { 22, 2 }, /* SVE_tszh: triangular size select high, bits [23,22]. */
  302. { 8, 2 }, /* SVE_tszl_8: triangular size select low, bits [9,8]. */
  303. { 19, 2 }, /* SVE_tszl_19: triangular size select low, bits [20,19]. */
  304. { 14, 1 }, /* SVE_xs_14: UXTW/SXTW select (bit 14). */
  305. { 22, 1 }, /* SVE_xs_22: UXTW/SXTW select (bit 22). */
  306. { 0, 2 }, /* SME ZAda tile ZA0-ZA3. */
  307. { 0, 3 }, /* SME ZAda tile ZA0-ZA7. */
  308. { 22, 2 }, /* SME_size_10: size<1>, size<0> class field, [23:22]. */
  309. { 16, 1 }, /* SME_Q: Q class bit, bit 16. */
  310. { 15, 1 }, /* SME_V: (horizontal / vertical tiles), bit 15. */
  311. { 13, 2 }, /* SME_Rv: vector select register W12-W15, bits [14:13]. */
  312. { 13, 3 }, /* SME Pm second source scalable predicate register P0-P7. */
  313. { 0, 8 }, /* SME_zero_mask: list of up to 8 tile names separated by commas [7:0]. */
  314. { 16, 2 }, /* SME_Rm: index base register W12-W15 [17:16]. */
  315. { 23, 1 }, /* SME_i1: immediate field, bit 23. */
  316. { 22, 1 }, /* SME_tszh: immediate and qualifier field, bit 22. */
  317. { 18, 3 }, /* SME_tshl: immediate and qualifier field, bits [20:18]. */
  318. { 11, 2 }, /* rotate1: FCMLA immediate rotate. */
  319. { 13, 2 }, /* rotate2: Indexed element FCMLA immediate rotate. */
  320. { 12, 1 }, /* rotate3: FCADD immediate rotate. */
  321. { 12, 2 }, /* SM3: Indexed element SM3 2 bits index immediate. */
  322. { 22, 1 }, /* sz: 1-bit element size select. */
  323. { 10, 2 }, /* CRm_dsb_nxs: 2-bit imm. encoded in CRm<3:2>. */
  324. };
  325. enum aarch64_operand_class
  326. aarch64_get_operand_class (enum aarch64_opnd type)
  327. {
  328. return aarch64_operands[type].op_class;
  329. }
  330. const char *
  331. aarch64_get_operand_name (enum aarch64_opnd type)
  332. {
  333. return aarch64_operands[type].name;
  334. }
  335. /* Get operand description string.
  336. This is usually for the diagnosis purpose. */
  337. const char *
  338. aarch64_get_operand_desc (enum aarch64_opnd type)
  339. {
  340. return aarch64_operands[type].desc;
  341. }
  342. /* Table of all conditional affixes. */
  343. const aarch64_cond aarch64_conds[16] =
  344. {
  345. {{"eq", "none"}, 0x0},
  346. {{"ne", "any"}, 0x1},
  347. {{"cs", "hs", "nlast"}, 0x2},
  348. {{"cc", "lo", "ul", "last"}, 0x3},
  349. {{"mi", "first"}, 0x4},
  350. {{"pl", "nfrst"}, 0x5},
  351. {{"vs"}, 0x6},
  352. {{"vc"}, 0x7},
  353. {{"hi", "pmore"}, 0x8},
  354. {{"ls", "plast"}, 0x9},
  355. {{"ge", "tcont"}, 0xa},
  356. {{"lt", "tstop"}, 0xb},
  357. {{"gt"}, 0xc},
  358. {{"le"}, 0xd},
  359. {{"al"}, 0xe},
  360. {{"nv"}, 0xf},
  361. };
  362. const aarch64_cond *
  363. get_cond_from_value (aarch64_insn value)
  364. {
  365. assert (value < 16);
  366. return &aarch64_conds[(unsigned int) value];
  367. }
  368. const aarch64_cond *
  369. get_inverted_cond (const aarch64_cond *cond)
  370. {
  371. return &aarch64_conds[cond->value ^ 0x1];
  372. }
  373. /* Table describing the operand extension/shifting operators; indexed by
  374. enum aarch64_modifier_kind.
  375. The value column provides the most common values for encoding modifiers,
  376. which enables table-driven encoding/decoding for the modifiers. */
  377. const struct aarch64_name_value_pair aarch64_operand_modifiers [] =
  378. {
  379. {"none", 0x0},
  380. {"msl", 0x0},
  381. {"ror", 0x3},
  382. {"asr", 0x2},
  383. {"lsr", 0x1},
  384. {"lsl", 0x0},
  385. {"uxtb", 0x0},
  386. {"uxth", 0x1},
  387. {"uxtw", 0x2},
  388. {"uxtx", 0x3},
  389. {"sxtb", 0x4},
  390. {"sxth", 0x5},
  391. {"sxtw", 0x6},
  392. {"sxtx", 0x7},
  393. {"mul", 0x0},
  394. {"mul vl", 0x0},
  395. {NULL, 0},
  396. };
  397. enum aarch64_modifier_kind
  398. aarch64_get_operand_modifier (const struct aarch64_name_value_pair *desc)
  399. {
  400. return desc - aarch64_operand_modifiers;
  401. }
  402. aarch64_insn
  403. aarch64_get_operand_modifier_value (enum aarch64_modifier_kind kind)
  404. {
  405. return aarch64_operand_modifiers[kind].value;
  406. }
  407. enum aarch64_modifier_kind
  408. aarch64_get_operand_modifier_from_value (aarch64_insn value,
  409. bool extend_p)
  410. {
  411. if (extend_p)
  412. return AARCH64_MOD_UXTB + value;
  413. else
  414. return AARCH64_MOD_LSL - value;
  415. }
  416. bool
  417. aarch64_extend_operator_p (enum aarch64_modifier_kind kind)
  418. {
  419. return kind > AARCH64_MOD_LSL && kind <= AARCH64_MOD_SXTX;
  420. }
  421. static inline bool
  422. aarch64_shift_operator_p (enum aarch64_modifier_kind kind)
  423. {
  424. return kind >= AARCH64_MOD_ROR && kind <= AARCH64_MOD_LSL;
  425. }
  426. const struct aarch64_name_value_pair aarch64_barrier_options[16] =
  427. {
  428. { "#0x00", 0x0 },
  429. { "oshld", 0x1 },
  430. { "oshst", 0x2 },
  431. { "osh", 0x3 },
  432. { "#0x04", 0x4 },
  433. { "nshld", 0x5 },
  434. { "nshst", 0x6 },
  435. { "nsh", 0x7 },
  436. { "#0x08", 0x8 },
  437. { "ishld", 0x9 },
  438. { "ishst", 0xa },
  439. { "ish", 0xb },
  440. { "#0x0c", 0xc },
  441. { "ld", 0xd },
  442. { "st", 0xe },
  443. { "sy", 0xf },
  444. };
  445. const struct aarch64_name_value_pair aarch64_barrier_dsb_nxs_options[4] =
  446. { /* CRm<3:2> #imm */
  447. { "oshnxs", 16 }, /* 00 16 */
  448. { "nshnxs", 20 }, /* 01 20 */
  449. { "ishnxs", 24 }, /* 10 24 */
  450. { "synxs", 28 }, /* 11 28 */
  451. };
  452. /* Table describing the operands supported by the aliases of the HINT
  453. instruction.
  454. The name column is the operand that is accepted for the alias. The value
  455. column is the hint number of the alias. The list of operands is terminated
  456. by NULL in the name column. */
  457. const struct aarch64_name_value_pair aarch64_hint_options[] =
  458. {
  459. /* BTI. This is also the F_DEFAULT entry for AARCH64_OPND_BTI_TARGET. */
  460. { " ", HINT_ENCODE (HINT_OPD_F_NOPRINT, 0x20) },
  461. { "csync", HINT_OPD_CSYNC }, /* PSB CSYNC. */
  462. { "c", HINT_OPD_C }, /* BTI C. */
  463. { "j", HINT_OPD_J }, /* BTI J. */
  464. { "jc", HINT_OPD_JC }, /* BTI JC. */
  465. { NULL, HINT_OPD_NULL },
  466. };
  467. /* op -> op: load = 0 instruction = 1 store = 2
  468. l -> level: 1-3
  469. t -> temporal: temporal (retained) = 0 non-temporal (streaming) = 1 */
  470. #define B(op,l,t) (((op) << 3) | (((l) - 1) << 1) | (t))
  471. const struct aarch64_name_value_pair aarch64_prfops[32] =
  472. {
  473. { "pldl1keep", B(0, 1, 0) },
  474. { "pldl1strm", B(0, 1, 1) },
  475. { "pldl2keep", B(0, 2, 0) },
  476. { "pldl2strm", B(0, 2, 1) },
  477. { "pldl3keep", B(0, 3, 0) },
  478. { "pldl3strm", B(0, 3, 1) },
  479. { NULL, 0x06 },
  480. { NULL, 0x07 },
  481. { "plil1keep", B(1, 1, 0) },
  482. { "plil1strm", B(1, 1, 1) },
  483. { "plil2keep", B(1, 2, 0) },
  484. { "plil2strm", B(1, 2, 1) },
  485. { "plil3keep", B(1, 3, 0) },
  486. { "plil3strm", B(1, 3, 1) },
  487. { NULL, 0x0e },
  488. { NULL, 0x0f },
  489. { "pstl1keep", B(2, 1, 0) },
  490. { "pstl1strm", B(2, 1, 1) },
  491. { "pstl2keep", B(2, 2, 0) },
  492. { "pstl2strm", B(2, 2, 1) },
  493. { "pstl3keep", B(2, 3, 0) },
  494. { "pstl3strm", B(2, 3, 1) },
  495. { NULL, 0x16 },
  496. { NULL, 0x17 },
  497. { NULL, 0x18 },
  498. { NULL, 0x19 },
  499. { NULL, 0x1a },
  500. { NULL, 0x1b },
  501. { NULL, 0x1c },
  502. { NULL, 0x1d },
  503. { NULL, 0x1e },
  504. { NULL, 0x1f },
  505. };
  506. #undef B
  507. /* Utilities on value constraint. */
  508. static inline int
  509. value_in_range_p (int64_t value, int low, int high)
  510. {
  511. return (value >= low && value <= high) ? 1 : 0;
  512. }
  513. /* Return true if VALUE is a multiple of ALIGN. */
  514. static inline int
  515. value_aligned_p (int64_t value, int align)
  516. {
  517. return (value % align) == 0;
  518. }
  519. /* A signed value fits in a field. */
  520. static inline int
  521. value_fit_signed_field_p (int64_t value, unsigned width)
  522. {
  523. assert (width < 32);
  524. if (width < sizeof (value) * 8)
  525. {
  526. int64_t lim = (uint64_t) 1 << (width - 1);
  527. if (value >= -lim && value < lim)
  528. return 1;
  529. }
  530. return 0;
  531. }
  532. /* An unsigned value fits in a field. */
  533. static inline int
  534. value_fit_unsigned_field_p (int64_t value, unsigned width)
  535. {
  536. assert (width < 32);
  537. if (width < sizeof (value) * 8)
  538. {
  539. int64_t lim = (uint64_t) 1 << width;
  540. if (value >= 0 && value < lim)
  541. return 1;
  542. }
  543. return 0;
  544. }
  545. /* Return 1 if OPERAND is SP or WSP. */
  546. int
  547. aarch64_stack_pointer_p (const aarch64_opnd_info *operand)
  548. {
  549. return ((aarch64_get_operand_class (operand->type)
  550. == AARCH64_OPND_CLASS_INT_REG)
  551. && operand_maybe_stack_pointer (aarch64_operands + operand->type)
  552. && operand->reg.regno == 31);
  553. }
  554. /* Return 1 if OPERAND is XZR or WZP. */
  555. int
  556. aarch64_zero_register_p (const aarch64_opnd_info *operand)
  557. {
  558. return ((aarch64_get_operand_class (operand->type)
  559. == AARCH64_OPND_CLASS_INT_REG)
  560. && !operand_maybe_stack_pointer (aarch64_operands + operand->type)
  561. && operand->reg.regno == 31);
  562. }
  563. /* Return true if the operand *OPERAND that has the operand code
  564. OPERAND->TYPE and been qualified by OPERAND->QUALIFIER can be also
  565. qualified by the qualifier TARGET. */
  566. static inline int
  567. operand_also_qualified_p (const struct aarch64_opnd_info *operand,
  568. aarch64_opnd_qualifier_t target)
  569. {
  570. switch (operand->qualifier)
  571. {
  572. case AARCH64_OPND_QLF_W:
  573. if (target == AARCH64_OPND_QLF_WSP && aarch64_stack_pointer_p (operand))
  574. return 1;
  575. break;
  576. case AARCH64_OPND_QLF_X:
  577. if (target == AARCH64_OPND_QLF_SP && aarch64_stack_pointer_p (operand))
  578. return 1;
  579. break;
  580. case AARCH64_OPND_QLF_WSP:
  581. if (target == AARCH64_OPND_QLF_W
  582. && operand_maybe_stack_pointer (aarch64_operands + operand->type))
  583. return 1;
  584. break;
  585. case AARCH64_OPND_QLF_SP:
  586. if (target == AARCH64_OPND_QLF_X
  587. && operand_maybe_stack_pointer (aarch64_operands + operand->type))
  588. return 1;
  589. break;
  590. default:
  591. break;
  592. }
  593. return 0;
  594. }
  595. /* Given qualifier sequence list QSEQ_LIST and the known qualifier KNOWN_QLF
  596. for operand KNOWN_IDX, return the expected qualifier for operand IDX.
  597. Return NIL if more than one expected qualifiers are found. */
  598. aarch64_opnd_qualifier_t
  599. aarch64_get_expected_qualifier (const aarch64_opnd_qualifier_seq_t *qseq_list,
  600. int idx,
  601. const aarch64_opnd_qualifier_t known_qlf,
  602. int known_idx)
  603. {
  604. int i, saved_i;
  605. /* Special case.
  606. When the known qualifier is NIL, we have to assume that there is only
  607. one qualifier sequence in the *QSEQ_LIST and return the corresponding
  608. qualifier directly. One scenario is that for instruction
  609. PRFM <prfop>, [<Xn|SP>, #:lo12:<symbol>]
  610. which has only one possible valid qualifier sequence
  611. NIL, S_D
  612. the caller may pass NIL in KNOWN_QLF to obtain S_D so that it can
  613. determine the correct relocation type (i.e. LDST64_LO12) for PRFM.
  614. Because the qualifier NIL has dual roles in the qualifier sequence:
  615. it can mean no qualifier for the operand, or the qualifer sequence is
  616. not in use (when all qualifiers in the sequence are NILs), we have to
  617. handle this special case here. */
  618. if (known_qlf == AARCH64_OPND_NIL)
  619. {
  620. assert (qseq_list[0][known_idx] == AARCH64_OPND_NIL);
  621. return qseq_list[0][idx];
  622. }
  623. for (i = 0, saved_i = -1; i < AARCH64_MAX_QLF_SEQ_NUM; ++i)
  624. {
  625. if (qseq_list[i][known_idx] == known_qlf)
  626. {
  627. if (saved_i != -1)
  628. /* More than one sequences are found to have KNOWN_QLF at
  629. KNOWN_IDX. */
  630. return AARCH64_OPND_NIL;
  631. saved_i = i;
  632. }
  633. }
  634. return qseq_list[saved_i][idx];
  635. }
  636. enum operand_qualifier_kind
  637. {
  638. OQK_NIL,
  639. OQK_OPD_VARIANT,
  640. OQK_VALUE_IN_RANGE,
  641. OQK_MISC,
  642. };
  643. /* Operand qualifier description. */
  644. struct operand_qualifier_data
  645. {
  646. /* The usage of the three data fields depends on the qualifier kind. */
  647. int data0;
  648. int data1;
  649. int data2;
  650. /* Description. */
  651. const char *desc;
  652. /* Kind. */
  653. enum operand_qualifier_kind kind;
  654. };
  655. /* Indexed by the operand qualifier enumerators. */
  656. struct operand_qualifier_data aarch64_opnd_qualifiers[] =
  657. {
  658. {0, 0, 0, "NIL", OQK_NIL},
  659. /* Operand variant qualifiers.
  660. First 3 fields:
  661. element size, number of elements and common value for encoding. */
  662. {4, 1, 0x0, "w", OQK_OPD_VARIANT},
  663. {8, 1, 0x1, "x", OQK_OPD_VARIANT},
  664. {4, 1, 0x0, "wsp", OQK_OPD_VARIANT},
  665. {8, 1, 0x1, "sp", OQK_OPD_VARIANT},
  666. {1, 1, 0x0, "b", OQK_OPD_VARIANT},
  667. {2, 1, 0x1, "h", OQK_OPD_VARIANT},
  668. {4, 1, 0x2, "s", OQK_OPD_VARIANT},
  669. {8, 1, 0x3, "d", OQK_OPD_VARIANT},
  670. {16, 1, 0x4, "q", OQK_OPD_VARIANT},
  671. {4, 1, 0x0, "4b", OQK_OPD_VARIANT},
  672. {4, 1, 0x0, "2h", OQK_OPD_VARIANT},
  673. {1, 4, 0x0, "4b", OQK_OPD_VARIANT},
  674. {1, 8, 0x0, "8b", OQK_OPD_VARIANT},
  675. {1, 16, 0x1, "16b", OQK_OPD_VARIANT},
  676. {2, 2, 0x0, "2h", OQK_OPD_VARIANT},
  677. {2, 4, 0x2, "4h", OQK_OPD_VARIANT},
  678. {2, 8, 0x3, "8h", OQK_OPD_VARIANT},
  679. {4, 2, 0x4, "2s", OQK_OPD_VARIANT},
  680. {4, 4, 0x5, "4s", OQK_OPD_VARIANT},
  681. {8, 1, 0x6, "1d", OQK_OPD_VARIANT},
  682. {8, 2, 0x7, "2d", OQK_OPD_VARIANT},
  683. {16, 1, 0x8, "1q", OQK_OPD_VARIANT},
  684. {0, 0, 0, "z", OQK_OPD_VARIANT},
  685. {0, 0, 0, "m", OQK_OPD_VARIANT},
  686. /* Qualifier for scaled immediate for Tag granule (stg,st2g,etc). */
  687. {16, 0, 0, "tag", OQK_OPD_VARIANT},
  688. /* Qualifiers constraining the value range.
  689. First 3 fields:
  690. Lower bound, higher bound, unused. */
  691. {0, 15, 0, "CR", OQK_VALUE_IN_RANGE},
  692. {0, 7, 0, "imm_0_7" , OQK_VALUE_IN_RANGE},
  693. {0, 15, 0, "imm_0_15", OQK_VALUE_IN_RANGE},
  694. {0, 31, 0, "imm_0_31", OQK_VALUE_IN_RANGE},
  695. {0, 63, 0, "imm_0_63", OQK_VALUE_IN_RANGE},
  696. {1, 32, 0, "imm_1_32", OQK_VALUE_IN_RANGE},
  697. {1, 64, 0, "imm_1_64", OQK_VALUE_IN_RANGE},
  698. /* Qualifiers for miscellaneous purpose.
  699. First 3 fields:
  700. unused, unused and unused. */
  701. {0, 0, 0, "lsl", 0},
  702. {0, 0, 0, "msl", 0},
  703. {0, 0, 0, "retrieving", 0},
  704. };
  705. static inline bool
  706. operand_variant_qualifier_p (aarch64_opnd_qualifier_t qualifier)
  707. {
  708. return aarch64_opnd_qualifiers[qualifier].kind == OQK_OPD_VARIANT;
  709. }
  710. static inline bool
  711. qualifier_value_in_range_constraint_p (aarch64_opnd_qualifier_t qualifier)
  712. {
  713. return aarch64_opnd_qualifiers[qualifier].kind == OQK_VALUE_IN_RANGE;
  714. }
  715. const char*
  716. aarch64_get_qualifier_name (aarch64_opnd_qualifier_t qualifier)
  717. {
  718. return aarch64_opnd_qualifiers[qualifier].desc;
  719. }
  720. /* Given an operand qualifier, return the expected data element size
  721. of a qualified operand. */
  722. unsigned char
  723. aarch64_get_qualifier_esize (aarch64_opnd_qualifier_t qualifier)
  724. {
  725. assert (operand_variant_qualifier_p (qualifier));
  726. return aarch64_opnd_qualifiers[qualifier].data0;
  727. }
  728. unsigned char
  729. aarch64_get_qualifier_nelem (aarch64_opnd_qualifier_t qualifier)
  730. {
  731. assert (operand_variant_qualifier_p (qualifier));
  732. return aarch64_opnd_qualifiers[qualifier].data1;
  733. }
  734. aarch64_insn
  735. aarch64_get_qualifier_standard_value (aarch64_opnd_qualifier_t qualifier)
  736. {
  737. assert (operand_variant_qualifier_p (qualifier));
  738. return aarch64_opnd_qualifiers[qualifier].data2;
  739. }
  740. static int
  741. get_lower_bound (aarch64_opnd_qualifier_t qualifier)
  742. {
  743. assert (qualifier_value_in_range_constraint_p (qualifier));
  744. return aarch64_opnd_qualifiers[qualifier].data0;
  745. }
  746. static int
  747. get_upper_bound (aarch64_opnd_qualifier_t qualifier)
  748. {
  749. assert (qualifier_value_in_range_constraint_p (qualifier));
  750. return aarch64_opnd_qualifiers[qualifier].data1;
  751. }
  752. #ifdef DEBUG_AARCH64
  753. void
  754. aarch64_verbose (const char *str, ...)
  755. {
  756. va_list ap;
  757. va_start (ap, str);
  758. printf ("#### ");
  759. vprintf (str, ap);
  760. printf ("\n");
  761. va_end (ap);
  762. }
  763. static inline void
  764. dump_qualifier_sequence (const aarch64_opnd_qualifier_t *qualifier)
  765. {
  766. int i;
  767. printf ("#### \t");
  768. for (i = 0; i < AARCH64_MAX_OPND_NUM; ++i, ++qualifier)
  769. printf ("%s,", aarch64_get_qualifier_name (*qualifier));
  770. printf ("\n");
  771. }
  772. static void
  773. dump_match_qualifiers (const struct aarch64_opnd_info *opnd,
  774. const aarch64_opnd_qualifier_t *qualifier)
  775. {
  776. int i;
  777. aarch64_opnd_qualifier_t curr[AARCH64_MAX_OPND_NUM];
  778. aarch64_verbose ("dump_match_qualifiers:");
  779. for (i = 0; i < AARCH64_MAX_OPND_NUM; ++i)
  780. curr[i] = opnd[i].qualifier;
  781. dump_qualifier_sequence (curr);
  782. aarch64_verbose ("against");
  783. dump_qualifier_sequence (qualifier);
  784. }
  785. #endif /* DEBUG_AARCH64 */
  786. /* This function checks if the given instruction INSN is a destructive
  787. instruction based on the usage of the registers. It does not recognize
  788. unary destructive instructions. */
  789. bool
  790. aarch64_is_destructive_by_operands (const aarch64_opcode *opcode)
  791. {
  792. int i = 0;
  793. const enum aarch64_opnd *opnds = opcode->operands;
  794. if (opnds[0] == AARCH64_OPND_NIL)
  795. return false;
  796. while (opnds[++i] != AARCH64_OPND_NIL)
  797. if (opnds[i] == opnds[0])
  798. return true;
  799. return false;
  800. }
  801. /* TODO improve this, we can have an extra field at the runtime to
  802. store the number of operands rather than calculating it every time. */
  803. int
  804. aarch64_num_of_operands (const aarch64_opcode *opcode)
  805. {
  806. int i = 0;
  807. const enum aarch64_opnd *opnds = opcode->operands;
  808. while (opnds[i++] != AARCH64_OPND_NIL)
  809. ;
  810. --i;
  811. assert (i >= 0 && i <= AARCH64_MAX_OPND_NUM);
  812. return i;
  813. }
  814. /* Find the best matched qualifier sequence in *QUALIFIERS_LIST for INST.
  815. If succeeds, fill the found sequence in *RET, return 1; otherwise return 0.
  816. N.B. on the entry, it is very likely that only some operands in *INST
  817. have had their qualifiers been established.
  818. If STOP_AT is not -1, the function will only try to match
  819. the qualifier sequence for operands before and including the operand
  820. of index STOP_AT; and on success *RET will only be filled with the first
  821. (STOP_AT+1) qualifiers.
  822. A couple examples of the matching algorithm:
  823. X,W,NIL should match
  824. X,W,NIL
  825. NIL,NIL should match
  826. X ,NIL
  827. Apart from serving the main encoding routine, this can also be called
  828. during or after the operand decoding. */
  829. int
  830. aarch64_find_best_match (const aarch64_inst *inst,
  831. const aarch64_opnd_qualifier_seq_t *qualifiers_list,
  832. int stop_at, aarch64_opnd_qualifier_t *ret)
  833. {
  834. int found = 0;
  835. int i, num_opnds;
  836. const aarch64_opnd_qualifier_t *qualifiers;
  837. num_opnds = aarch64_num_of_operands (inst->opcode);
  838. if (num_opnds == 0)
  839. {
  840. DEBUG_TRACE ("SUCCEED: no operand");
  841. return 1;
  842. }
  843. if (stop_at < 0 || stop_at >= num_opnds)
  844. stop_at = num_opnds - 1;
  845. /* For each pattern. */
  846. for (i = 0; i < AARCH64_MAX_QLF_SEQ_NUM; ++i, ++qualifiers_list)
  847. {
  848. int j;
  849. qualifiers = *qualifiers_list;
  850. /* Start as positive. */
  851. found = 1;
  852. DEBUG_TRACE ("%d", i);
  853. #ifdef DEBUG_AARCH64
  854. if (debug_dump)
  855. dump_match_qualifiers (inst->operands, qualifiers);
  856. #endif
  857. /* Most opcodes has much fewer patterns in the list.
  858. First NIL qualifier indicates the end in the list. */
  859. if (empty_qualifier_sequence_p (qualifiers))
  860. {
  861. DEBUG_TRACE_IF (i == 0, "SUCCEED: empty qualifier list");
  862. if (i)
  863. found = 0;
  864. break;
  865. }
  866. for (j = 0; j < num_opnds && j <= stop_at; ++j, ++qualifiers)
  867. {
  868. if (inst->operands[j].qualifier == AARCH64_OPND_QLF_NIL)
  869. {
  870. /* Either the operand does not have qualifier, or the qualifier
  871. for the operand needs to be deduced from the qualifier
  872. sequence.
  873. In the latter case, any constraint checking related with
  874. the obtained qualifier should be done later in
  875. operand_general_constraint_met_p. */
  876. continue;
  877. }
  878. else if (*qualifiers != inst->operands[j].qualifier)
  879. {
  880. /* Unless the target qualifier can also qualify the operand
  881. (which has already had a non-nil qualifier), non-equal
  882. qualifiers are generally un-matched. */
  883. if (operand_also_qualified_p (inst->operands + j, *qualifiers))
  884. continue;
  885. else
  886. {
  887. found = 0;
  888. break;
  889. }
  890. }
  891. else
  892. continue; /* Equal qualifiers are certainly matched. */
  893. }
  894. /* Qualifiers established. */
  895. if (found == 1)
  896. break;
  897. }
  898. if (found == 1)
  899. {
  900. /* Fill the result in *RET. */
  901. int j;
  902. qualifiers = *qualifiers_list;
  903. DEBUG_TRACE ("complete qualifiers using list %d", i);
  904. #ifdef DEBUG_AARCH64
  905. if (debug_dump)
  906. dump_qualifier_sequence (qualifiers);
  907. #endif
  908. for (j = 0; j <= stop_at; ++j, ++qualifiers)
  909. ret[j] = *qualifiers;
  910. for (; j < AARCH64_MAX_OPND_NUM; ++j)
  911. ret[j] = AARCH64_OPND_QLF_NIL;
  912. DEBUG_TRACE ("SUCCESS");
  913. return 1;
  914. }
  915. DEBUG_TRACE ("FAIL");
  916. return 0;
  917. }
  918. /* Operand qualifier matching and resolving.
  919. Return 1 if the operand qualifier(s) in *INST match one of the qualifier
  920. sequences in INST->OPCODE->qualifiers_list; otherwise return 0.
  921. if UPDATE_P, update the qualifier(s) in *INST after the matching
  922. succeeds. */
  923. static int
  924. match_operands_qualifier (aarch64_inst *inst, bool update_p)
  925. {
  926. int i, nops;
  927. aarch64_opnd_qualifier_seq_t qualifiers;
  928. if (!aarch64_find_best_match (inst, inst->opcode->qualifiers_list, -1,
  929. qualifiers))
  930. {
  931. DEBUG_TRACE ("matching FAIL");
  932. return 0;
  933. }
  934. if (inst->opcode->flags & F_STRICT)
  935. {
  936. /* Require an exact qualifier match, even for NIL qualifiers. */
  937. nops = aarch64_num_of_operands (inst->opcode);
  938. for (i = 0; i < nops; ++i)
  939. if (inst->operands[i].qualifier != qualifiers[i])
  940. return false;
  941. }
  942. /* Update the qualifiers. */
  943. if (update_p)
  944. for (i = 0; i < AARCH64_MAX_OPND_NUM; ++i)
  945. {
  946. if (inst->opcode->operands[i] == AARCH64_OPND_NIL)
  947. break;
  948. DEBUG_TRACE_IF (inst->operands[i].qualifier != qualifiers[i],
  949. "update %s with %s for operand %d",
  950. aarch64_get_qualifier_name (inst->operands[i].qualifier),
  951. aarch64_get_qualifier_name (qualifiers[i]), i);
  952. inst->operands[i].qualifier = qualifiers[i];
  953. }
  954. DEBUG_TRACE ("matching SUCCESS");
  955. return 1;
  956. }
  957. /* Return TRUE if VALUE is a wide constant that can be moved into a general
  958. register by MOVZ.
  959. IS32 indicates whether value is a 32-bit immediate or not.
  960. If SHIFT_AMOUNT is not NULL, on the return of TRUE, the logical left shift
  961. amount will be returned in *SHIFT_AMOUNT. */
  962. bool
  963. aarch64_wide_constant_p (uint64_t value, int is32, unsigned int *shift_amount)
  964. {
  965. int amount;
  966. DEBUG_TRACE ("enter with 0x%" PRIx64 "(%" PRIi64 ")", value, value);
  967. if (is32)
  968. {
  969. /* Allow all zeros or all ones in top 32-bits, so that
  970. 32-bit constant expressions like ~0x80000000 are
  971. permitted. */
  972. if (value >> 32 != 0 && value >> 32 != 0xffffffff)
  973. /* Immediate out of range. */
  974. return false;
  975. value &= 0xffffffff;
  976. }
  977. /* first, try movz then movn */
  978. amount = -1;
  979. if ((value & ((uint64_t) 0xffff << 0)) == value)
  980. amount = 0;
  981. else if ((value & ((uint64_t) 0xffff << 16)) == value)
  982. amount = 16;
  983. else if (!is32 && (value & ((uint64_t) 0xffff << 32)) == value)
  984. amount = 32;
  985. else if (!is32 && (value & ((uint64_t) 0xffff << 48)) == value)
  986. amount = 48;
  987. if (amount == -1)
  988. {
  989. DEBUG_TRACE ("exit false with 0x%" PRIx64 "(%" PRIi64 ")", value, value);
  990. return false;
  991. }
  992. if (shift_amount != NULL)
  993. *shift_amount = amount;
  994. DEBUG_TRACE ("exit true with amount %d", amount);
  995. return true;
  996. }
  997. /* Build the accepted values for immediate logical SIMD instructions.
  998. The standard encodings of the immediate value are:
  999. N imms immr SIMD size R S
  1000. 1 ssssss rrrrrr 64 UInt(rrrrrr) UInt(ssssss)
  1001. 0 0sssss 0rrrrr 32 UInt(rrrrr) UInt(sssss)
  1002. 0 10ssss 00rrrr 16 UInt(rrrr) UInt(ssss)
  1003. 0 110sss 000rrr 8 UInt(rrr) UInt(sss)
  1004. 0 1110ss 0000rr 4 UInt(rr) UInt(ss)
  1005. 0 11110s 00000r 2 UInt(r) UInt(s)
  1006. where all-ones value of S is reserved.
  1007. Let's call E the SIMD size.
  1008. The immediate value is: S+1 bits '1' rotated to the right by R.
  1009. The total of valid encodings is 64*63 + 32*31 + ... + 2*1 = 5334
  1010. (remember S != E - 1). */
  1011. #define TOTAL_IMM_NB 5334
  1012. typedef struct
  1013. {
  1014. uint64_t imm;
  1015. aarch64_insn encoding;
  1016. } simd_imm_encoding;
  1017. static simd_imm_encoding simd_immediates[TOTAL_IMM_NB];
  1018. static int
  1019. simd_imm_encoding_cmp(const void *i1, const void *i2)
  1020. {
  1021. const simd_imm_encoding *imm1 = (const simd_imm_encoding *)i1;
  1022. const simd_imm_encoding *imm2 = (const simd_imm_encoding *)i2;
  1023. if (imm1->imm < imm2->imm)
  1024. return -1;
  1025. if (imm1->imm > imm2->imm)
  1026. return +1;
  1027. return 0;
  1028. }
  1029. /* immediate bitfield standard encoding
  1030. imm13<12> imm13<5:0> imm13<11:6> SIMD size R S
  1031. 1 ssssss rrrrrr 64 rrrrrr ssssss
  1032. 0 0sssss 0rrrrr 32 rrrrr sssss
  1033. 0 10ssss 00rrrr 16 rrrr ssss
  1034. 0 110sss 000rrr 8 rrr sss
  1035. 0 1110ss 0000rr 4 rr ss
  1036. 0 11110s 00000r 2 r s */
  1037. static inline int
  1038. encode_immediate_bitfield (int is64, uint32_t s, uint32_t r)
  1039. {
  1040. return (is64 << 12) | (r << 6) | s;
  1041. }
  1042. static void
  1043. build_immediate_table (void)
  1044. {
  1045. uint32_t log_e, e, s, r, s_mask;
  1046. uint64_t mask, imm;
  1047. int nb_imms;
  1048. int is64;
  1049. nb_imms = 0;
  1050. for (log_e = 1; log_e <= 6; log_e++)
  1051. {
  1052. /* Get element size. */
  1053. e = 1u << log_e;
  1054. if (log_e == 6)
  1055. {
  1056. is64 = 1;
  1057. mask = 0xffffffffffffffffull;
  1058. s_mask = 0;
  1059. }
  1060. else
  1061. {
  1062. is64 = 0;
  1063. mask = (1ull << e) - 1;
  1064. /* log_e s_mask
  1065. 1 ((1 << 4) - 1) << 2 = 111100
  1066. 2 ((1 << 3) - 1) << 3 = 111000
  1067. 3 ((1 << 2) - 1) << 4 = 110000
  1068. 4 ((1 << 1) - 1) << 5 = 100000
  1069. 5 ((1 << 0) - 1) << 6 = 000000 */
  1070. s_mask = ((1u << (5 - log_e)) - 1) << (log_e + 1);
  1071. }
  1072. for (s = 0; s < e - 1; s++)
  1073. for (r = 0; r < e; r++)
  1074. {
  1075. /* s+1 consecutive bits to 1 (s < 63) */
  1076. imm = (1ull << (s + 1)) - 1;
  1077. /* rotate right by r */
  1078. if (r != 0)
  1079. imm = (imm >> r) | ((imm << (e - r)) & mask);
  1080. /* replicate the constant depending on SIMD size */
  1081. switch (log_e)
  1082. {
  1083. case 1: imm = (imm << 2) | imm;
  1084. /* Fall through. */
  1085. case 2: imm = (imm << 4) | imm;
  1086. /* Fall through. */
  1087. case 3: imm = (imm << 8) | imm;
  1088. /* Fall through. */
  1089. case 4: imm = (imm << 16) | imm;
  1090. /* Fall through. */
  1091. case 5: imm = (imm << 32) | imm;
  1092. /* Fall through. */
  1093. case 6: break;
  1094. default: abort ();
  1095. }
  1096. simd_immediates[nb_imms].imm = imm;
  1097. simd_immediates[nb_imms].encoding =
  1098. encode_immediate_bitfield(is64, s | s_mask, r);
  1099. nb_imms++;
  1100. }
  1101. }
  1102. assert (nb_imms == TOTAL_IMM_NB);
  1103. qsort(simd_immediates, nb_imms,
  1104. sizeof(simd_immediates[0]), simd_imm_encoding_cmp);
  1105. }
  1106. /* Return TRUE if VALUE is a valid logical immediate, i.e. bitmask, that can
  1107. be accepted by logical (immediate) instructions
  1108. e.g. ORR <Xd|SP>, <Xn>, #<imm>.
  1109. ESIZE is the number of bytes in the decoded immediate value.
  1110. If ENCODING is not NULL, on the return of TRUE, the standard encoding for
  1111. VALUE will be returned in *ENCODING. */
  1112. bool
  1113. aarch64_logical_immediate_p (uint64_t value, int esize, aarch64_insn *encoding)
  1114. {
  1115. simd_imm_encoding imm_enc;
  1116. const simd_imm_encoding *imm_encoding;
  1117. static bool initialized = false;
  1118. uint64_t upper;
  1119. int i;
  1120. DEBUG_TRACE ("enter with 0x%" PRIx64 "(%" PRIi64 "), esize: %d", value,
  1121. value, esize);
  1122. if (!initialized)
  1123. {
  1124. build_immediate_table ();
  1125. initialized = true;
  1126. }
  1127. /* Allow all zeros or all ones in top bits, so that
  1128. constant expressions like ~1 are permitted. */
  1129. upper = (uint64_t) -1 << (esize * 4) << (esize * 4);
  1130. if ((value & ~upper) != value && (value | upper) != value)
  1131. return false;
  1132. /* Replicate to a full 64-bit value. */
  1133. value &= ~upper;
  1134. for (i = esize * 8; i < 64; i *= 2)
  1135. value |= (value << i);
  1136. imm_enc.imm = value;
  1137. imm_encoding = (const simd_imm_encoding *)
  1138. bsearch(&imm_enc, simd_immediates, TOTAL_IMM_NB,
  1139. sizeof(simd_immediates[0]), simd_imm_encoding_cmp);
  1140. if (imm_encoding == NULL)
  1141. {
  1142. DEBUG_TRACE ("exit with false");
  1143. return false;
  1144. }
  1145. if (encoding != NULL)
  1146. *encoding = imm_encoding->encoding;
  1147. DEBUG_TRACE ("exit with true");
  1148. return true;
  1149. }
  1150. /* If 64-bit immediate IMM is in the format of
  1151. "aaaaaaaabbbbbbbbccccccccddddddddeeeeeeeeffffffffgggggggghhhhhhhh",
  1152. where a, b, c, d, e, f, g and h are independently 0 or 1, return an integer
  1153. of value "abcdefgh". Otherwise return -1. */
  1154. int
  1155. aarch64_shrink_expanded_imm8 (uint64_t imm)
  1156. {
  1157. int i, ret;
  1158. uint32_t byte;
  1159. ret = 0;
  1160. for (i = 0; i < 8; i++)
  1161. {
  1162. byte = (imm >> (8 * i)) & 0xff;
  1163. if (byte == 0xff)
  1164. ret |= 1 << i;
  1165. else if (byte != 0x00)
  1166. return -1;
  1167. }
  1168. return ret;
  1169. }
  1170. /* Utility inline functions for operand_general_constraint_met_p. */
  1171. static inline void
  1172. set_error (aarch64_operand_error *mismatch_detail,
  1173. enum aarch64_operand_error_kind kind, int idx,
  1174. const char* error)
  1175. {
  1176. if (mismatch_detail == NULL)
  1177. return;
  1178. mismatch_detail->kind = kind;
  1179. mismatch_detail->index = idx;
  1180. mismatch_detail->error = error;
  1181. }
  1182. static inline void
  1183. set_syntax_error (aarch64_operand_error *mismatch_detail, int idx,
  1184. const char* error)
  1185. {
  1186. if (mismatch_detail == NULL)
  1187. return;
  1188. set_error (mismatch_detail, AARCH64_OPDE_SYNTAX_ERROR, idx, error);
  1189. }
  1190. static inline void
  1191. set_out_of_range_error (aarch64_operand_error *mismatch_detail,
  1192. int idx, int lower_bound, int upper_bound,
  1193. const char* error)
  1194. {
  1195. if (mismatch_detail == NULL)
  1196. return;
  1197. set_error (mismatch_detail, AARCH64_OPDE_OUT_OF_RANGE, idx, error);
  1198. mismatch_detail->data[0].i = lower_bound;
  1199. mismatch_detail->data[1].i = upper_bound;
  1200. }
  1201. static inline void
  1202. set_imm_out_of_range_error (aarch64_operand_error *mismatch_detail,
  1203. int idx, int lower_bound, int upper_bound)
  1204. {
  1205. if (mismatch_detail == NULL)
  1206. return;
  1207. set_out_of_range_error (mismatch_detail, idx, lower_bound, upper_bound,
  1208. _("immediate value"));
  1209. }
  1210. static inline void
  1211. set_offset_out_of_range_error (aarch64_operand_error *mismatch_detail,
  1212. int idx, int lower_bound, int upper_bound)
  1213. {
  1214. if (mismatch_detail == NULL)
  1215. return;
  1216. set_out_of_range_error (mismatch_detail, idx, lower_bound, upper_bound,
  1217. _("immediate offset"));
  1218. }
  1219. static inline void
  1220. set_regno_out_of_range_error (aarch64_operand_error *mismatch_detail,
  1221. int idx, int lower_bound, int upper_bound)
  1222. {
  1223. if (mismatch_detail == NULL)
  1224. return;
  1225. set_out_of_range_error (mismatch_detail, idx, lower_bound, upper_bound,
  1226. _("register number"));
  1227. }
  1228. static inline void
  1229. set_elem_idx_out_of_range_error (aarch64_operand_error *mismatch_detail,
  1230. int idx, int lower_bound, int upper_bound)
  1231. {
  1232. if (mismatch_detail == NULL)
  1233. return;
  1234. set_out_of_range_error (mismatch_detail, idx, lower_bound, upper_bound,
  1235. _("register element index"));
  1236. }
  1237. static inline void
  1238. set_sft_amount_out_of_range_error (aarch64_operand_error *mismatch_detail,
  1239. int idx, int lower_bound, int upper_bound)
  1240. {
  1241. if (mismatch_detail == NULL)
  1242. return;
  1243. set_out_of_range_error (mismatch_detail, idx, lower_bound, upper_bound,
  1244. _("shift amount"));
  1245. }
  1246. /* Report that the MUL modifier in operand IDX should be in the range
  1247. [LOWER_BOUND, UPPER_BOUND]. */
  1248. static inline void
  1249. set_multiplier_out_of_range_error (aarch64_operand_error *mismatch_detail,
  1250. int idx, int lower_bound, int upper_bound)
  1251. {
  1252. if (mismatch_detail == NULL)
  1253. return;
  1254. set_out_of_range_error (mismatch_detail, idx, lower_bound, upper_bound,
  1255. _("multiplier"));
  1256. }
  1257. static inline void
  1258. set_unaligned_error (aarch64_operand_error *mismatch_detail, int idx,
  1259. int alignment)
  1260. {
  1261. if (mismatch_detail == NULL)
  1262. return;
  1263. set_error (mismatch_detail, AARCH64_OPDE_UNALIGNED, idx, NULL);
  1264. mismatch_detail->data[0].i = alignment;
  1265. }
  1266. static inline void
  1267. set_reg_list_error (aarch64_operand_error *mismatch_detail, int idx,
  1268. int expected_num)
  1269. {
  1270. if (mismatch_detail == NULL)
  1271. return;
  1272. set_error (mismatch_detail, AARCH64_OPDE_REG_LIST, idx, NULL);
  1273. mismatch_detail->data[0].i = expected_num;
  1274. }
  1275. static inline void
  1276. set_other_error (aarch64_operand_error *mismatch_detail, int idx,
  1277. const char* error)
  1278. {
  1279. if (mismatch_detail == NULL)
  1280. return;
  1281. set_error (mismatch_detail, AARCH64_OPDE_OTHER_ERROR, idx, error);
  1282. }
  1283. /* General constraint checking based on operand code.
  1284. Return 1 if OPNDS[IDX] meets the general constraint of operand code TYPE
  1285. as the IDXth operand of opcode OPCODE. Otherwise return 0.
  1286. This function has to be called after the qualifiers for all operands
  1287. have been resolved.
  1288. Mismatching error message is returned in *MISMATCH_DETAIL upon request,
  1289. i.e. when MISMATCH_DETAIL is non-NULL. This avoids the generation
  1290. of error message during the disassembling where error message is not
  1291. wanted. We avoid the dynamic construction of strings of error messages
  1292. here (i.e. in libopcodes), as it is costly and complicated; instead, we
  1293. use a combination of error code, static string and some integer data to
  1294. represent an error. */
  1295. static int
  1296. operand_general_constraint_met_p (const aarch64_opnd_info *opnds, int idx,
  1297. enum aarch64_opnd type,
  1298. const aarch64_opcode *opcode,
  1299. aarch64_operand_error *mismatch_detail)
  1300. {
  1301. unsigned num, modifiers, shift;
  1302. unsigned char size;
  1303. int64_t imm, min_value, max_value;
  1304. uint64_t uvalue, mask;
  1305. const aarch64_opnd_info *opnd = opnds + idx;
  1306. aarch64_opnd_qualifier_t qualifier = opnd->qualifier;
  1307. int i;
  1308. assert (opcode->operands[idx] == opnd->type && opnd->type == type);
  1309. switch (aarch64_operands[type].op_class)
  1310. {
  1311. case AARCH64_OPND_CLASS_INT_REG:
  1312. /* Check pair reg constraints for cas* instructions. */
  1313. if (type == AARCH64_OPND_PAIRREG)
  1314. {
  1315. assert (idx == 1 || idx == 3);
  1316. if (opnds[idx - 1].reg.regno % 2 != 0)
  1317. {
  1318. set_syntax_error (mismatch_detail, idx - 1,
  1319. _("reg pair must start from even reg"));
  1320. return 0;
  1321. }
  1322. if (opnds[idx].reg.regno != opnds[idx - 1].reg.regno + 1)
  1323. {
  1324. set_syntax_error (mismatch_detail, idx,
  1325. _("reg pair must be contiguous"));
  1326. return 0;
  1327. }
  1328. break;
  1329. }
  1330. /* <Xt> may be optional in some IC and TLBI instructions. */
  1331. if (type == AARCH64_OPND_Rt_SYS)
  1332. {
  1333. assert (idx == 1 && (aarch64_get_operand_class (opnds[0].type)
  1334. == AARCH64_OPND_CLASS_SYSTEM));
  1335. if (opnds[1].present
  1336. && !aarch64_sys_ins_reg_has_xt (opnds[0].sysins_op))
  1337. {
  1338. set_other_error (mismatch_detail, idx, _("extraneous register"));
  1339. return 0;
  1340. }
  1341. if (!opnds[1].present
  1342. && aarch64_sys_ins_reg_has_xt (opnds[0].sysins_op))
  1343. {
  1344. set_other_error (mismatch_detail, idx, _("missing register"));
  1345. return 0;
  1346. }
  1347. }
  1348. switch (qualifier)
  1349. {
  1350. case AARCH64_OPND_QLF_WSP:
  1351. case AARCH64_OPND_QLF_SP:
  1352. if (!aarch64_stack_pointer_p (opnd))
  1353. {
  1354. set_other_error (mismatch_detail, idx,
  1355. _("stack pointer register expected"));
  1356. return 0;
  1357. }
  1358. break;
  1359. default:
  1360. break;
  1361. }
  1362. break;
  1363. case AARCH64_OPND_CLASS_SVE_REG:
  1364. switch (type)
  1365. {
  1366. case AARCH64_OPND_SVE_Zm3_INDEX:
  1367. case AARCH64_OPND_SVE_Zm3_22_INDEX:
  1368. case AARCH64_OPND_SVE_Zm3_11_INDEX:
  1369. case AARCH64_OPND_SVE_Zm4_11_INDEX:
  1370. case AARCH64_OPND_SVE_Zm4_INDEX:
  1371. size = get_operand_fields_width (get_operand_from_code (type));
  1372. shift = get_operand_specific_data (&aarch64_operands[type]);
  1373. mask = (1 << shift) - 1;
  1374. if (opnd->reg.regno > mask)
  1375. {
  1376. assert (mask == 7 || mask == 15);
  1377. set_other_error (mismatch_detail, idx,
  1378. mask == 15
  1379. ? _("z0-z15 expected")
  1380. : _("z0-z7 expected"));
  1381. return 0;
  1382. }
  1383. mask = (1u << (size - shift)) - 1;
  1384. if (!value_in_range_p (opnd->reglane.index, 0, mask))
  1385. {
  1386. set_elem_idx_out_of_range_error (mismatch_detail, idx, 0, mask);
  1387. return 0;
  1388. }
  1389. break;
  1390. case AARCH64_OPND_SVE_Zn_INDEX:
  1391. size = aarch64_get_qualifier_esize (opnd->qualifier);
  1392. if (!value_in_range_p (opnd->reglane.index, 0, 64 / size - 1))
  1393. {
  1394. set_elem_idx_out_of_range_error (mismatch_detail, idx,
  1395. 0, 64 / size - 1);
  1396. return 0;
  1397. }
  1398. break;
  1399. case AARCH64_OPND_SVE_ZnxN:
  1400. case AARCH64_OPND_SVE_ZtxN:
  1401. if (opnd->reglist.num_regs != get_opcode_dependent_value (opcode))
  1402. {
  1403. set_other_error (mismatch_detail, idx,
  1404. _("invalid register list"));
  1405. return 0;
  1406. }
  1407. break;
  1408. default:
  1409. break;
  1410. }
  1411. break;
  1412. case AARCH64_OPND_CLASS_PRED_REG:
  1413. if (opnd->reg.regno >= 8
  1414. && get_operand_fields_width (get_operand_from_code (type)) == 3)
  1415. {
  1416. set_other_error (mismatch_detail, idx, _("p0-p7 expected"));
  1417. return 0;
  1418. }
  1419. break;
  1420. case AARCH64_OPND_CLASS_COND:
  1421. if (type == AARCH64_OPND_COND1
  1422. && (opnds[idx].cond->value & 0xe) == 0xe)
  1423. {
  1424. /* Not allow AL or NV. */
  1425. set_syntax_error (mismatch_detail, idx, NULL);
  1426. }
  1427. break;
  1428. case AARCH64_OPND_CLASS_ADDRESS:
  1429. /* Check writeback. */
  1430. switch (opcode->iclass)
  1431. {
  1432. case ldst_pos:
  1433. case ldst_unscaled:
  1434. case ldstnapair_offs:
  1435. case ldstpair_off:
  1436. case ldst_unpriv:
  1437. if (opnd->addr.writeback == 1)
  1438. {
  1439. set_syntax_error (mismatch_detail, idx,
  1440. _("unexpected address writeback"));
  1441. return 0;
  1442. }
  1443. break;
  1444. case ldst_imm10:
  1445. if (opnd->addr.writeback == 1 && opnd->addr.preind != 1)
  1446. {
  1447. set_syntax_error (mismatch_detail, idx,
  1448. _("unexpected address writeback"));
  1449. return 0;
  1450. }
  1451. break;
  1452. case ldst_imm9:
  1453. case ldstpair_indexed:
  1454. case asisdlsep:
  1455. case asisdlsop:
  1456. if (opnd->addr.writeback == 0)
  1457. {
  1458. set_syntax_error (mismatch_detail, idx,
  1459. _("address writeback expected"));
  1460. return 0;
  1461. }
  1462. break;
  1463. default:
  1464. assert (opnd->addr.writeback == 0);
  1465. break;
  1466. }
  1467. switch (type)
  1468. {
  1469. case AARCH64_OPND_ADDR_SIMM7:
  1470. /* Scaled signed 7 bits immediate offset. */
  1471. /* Get the size of the data element that is accessed, which may be
  1472. different from that of the source register size,
  1473. e.g. in strb/ldrb. */
  1474. size = aarch64_get_qualifier_esize (opnd->qualifier);
  1475. if (!value_in_range_p (opnd->addr.offset.imm, -64 * size, 63 * size))
  1476. {
  1477. set_offset_out_of_range_error (mismatch_detail, idx,
  1478. -64 * size, 63 * size);
  1479. return 0;
  1480. }
  1481. if (!value_aligned_p (opnd->addr.offset.imm, size))
  1482. {
  1483. set_unaligned_error (mismatch_detail, idx, size);
  1484. return 0;
  1485. }
  1486. break;
  1487. case AARCH64_OPND_ADDR_OFFSET:
  1488. case AARCH64_OPND_ADDR_SIMM9:
  1489. /* Unscaled signed 9 bits immediate offset. */
  1490. if (!value_in_range_p (opnd->addr.offset.imm, -256, 255))
  1491. {
  1492. set_offset_out_of_range_error (mismatch_detail, idx, -256, 255);
  1493. return 0;
  1494. }
  1495. break;
  1496. case AARCH64_OPND_ADDR_SIMM9_2:
  1497. /* Unscaled signed 9 bits immediate offset, which has to be negative
  1498. or unaligned. */
  1499. size = aarch64_get_qualifier_esize (qualifier);
  1500. if ((value_in_range_p (opnd->addr.offset.imm, 0, 255)
  1501. && !value_aligned_p (opnd->addr.offset.imm, size))
  1502. || value_in_range_p (opnd->addr.offset.imm, -256, -1))
  1503. return 1;
  1504. set_other_error (mismatch_detail, idx,
  1505. _("negative or unaligned offset expected"));
  1506. return 0;
  1507. case AARCH64_OPND_ADDR_SIMM10:
  1508. /* Scaled signed 10 bits immediate offset. */
  1509. if (!value_in_range_p (opnd->addr.offset.imm, -4096, 4088))
  1510. {
  1511. set_offset_out_of_range_error (mismatch_detail, idx, -4096, 4088);
  1512. return 0;
  1513. }
  1514. if (!value_aligned_p (opnd->addr.offset.imm, 8))
  1515. {
  1516. set_unaligned_error (mismatch_detail, idx, 8);
  1517. return 0;
  1518. }
  1519. break;
  1520. case AARCH64_OPND_ADDR_SIMM11:
  1521. /* Signed 11 bits immediate offset (multiple of 16). */
  1522. if (!value_in_range_p (opnd->addr.offset.imm, -1024, 1008))
  1523. {
  1524. set_offset_out_of_range_error (mismatch_detail, idx, -1024, 1008);
  1525. return 0;
  1526. }
  1527. if (!value_aligned_p (opnd->addr.offset.imm, 16))
  1528. {
  1529. set_unaligned_error (mismatch_detail, idx, 16);
  1530. return 0;
  1531. }
  1532. break;
  1533. case AARCH64_OPND_ADDR_SIMM13:
  1534. /* Signed 13 bits immediate offset (multiple of 16). */
  1535. if (!value_in_range_p (opnd->addr.offset.imm, -4096, 4080))
  1536. {
  1537. set_offset_out_of_range_error (mismatch_detail, idx, -4096, 4080);
  1538. return 0;
  1539. }
  1540. if (!value_aligned_p (opnd->addr.offset.imm, 16))
  1541. {
  1542. set_unaligned_error (mismatch_detail, idx, 16);
  1543. return 0;
  1544. }
  1545. break;
  1546. case AARCH64_OPND_SIMD_ADDR_POST:
  1547. /* AdvSIMD load/store multiple structures, post-index. */
  1548. assert (idx == 1);
  1549. if (opnd->addr.offset.is_reg)
  1550. {
  1551. if (value_in_range_p (opnd->addr.offset.regno, 0, 30))
  1552. return 1;
  1553. else
  1554. {
  1555. set_other_error (mismatch_detail, idx,
  1556. _("invalid register offset"));
  1557. return 0;
  1558. }
  1559. }
  1560. else
  1561. {
  1562. const aarch64_opnd_info *prev = &opnds[idx-1];
  1563. unsigned num_bytes; /* total number of bytes transferred. */
  1564. /* The opcode dependent area stores the number of elements in
  1565. each structure to be loaded/stored. */
  1566. int is_ld1r = get_opcode_dependent_value (opcode) == 1;
  1567. if (opcode->operands[0] == AARCH64_OPND_LVt_AL)
  1568. /* Special handling of loading single structure to all lane. */
  1569. num_bytes = (is_ld1r ? 1 : prev->reglist.num_regs)
  1570. * aarch64_get_qualifier_esize (prev->qualifier);
  1571. else
  1572. num_bytes = prev->reglist.num_regs
  1573. * aarch64_get_qualifier_esize (prev->qualifier)
  1574. * aarch64_get_qualifier_nelem (prev->qualifier);
  1575. if ((int) num_bytes != opnd->addr.offset.imm)
  1576. {
  1577. set_other_error (mismatch_detail, idx,
  1578. _("invalid post-increment amount"));
  1579. return 0;
  1580. }
  1581. }
  1582. break;
  1583. case AARCH64_OPND_ADDR_REGOFF:
  1584. /* Get the size of the data element that is accessed, which may be
  1585. different from that of the source register size,
  1586. e.g. in strb/ldrb. */
  1587. size = aarch64_get_qualifier_esize (opnd->qualifier);
  1588. /* It is either no shift or shift by the binary logarithm of SIZE. */
  1589. if (opnd->shifter.amount != 0
  1590. && opnd->shifter.amount != (int)get_logsz (size))
  1591. {
  1592. set_other_error (mismatch_detail, idx,
  1593. _("invalid shift amount"));
  1594. return 0;
  1595. }
  1596. /* Only UXTW, LSL, SXTW and SXTX are the accepted extending
  1597. operators. */
  1598. switch (opnd->shifter.kind)
  1599. {
  1600. case AARCH64_MOD_UXTW:
  1601. case AARCH64_MOD_LSL:
  1602. case AARCH64_MOD_SXTW:
  1603. case AARCH64_MOD_SXTX: break;
  1604. default:
  1605. set_other_error (mismatch_detail, idx,
  1606. _("invalid extend/shift operator"));
  1607. return 0;
  1608. }
  1609. break;
  1610. case AARCH64_OPND_ADDR_UIMM12:
  1611. imm = opnd->addr.offset.imm;
  1612. /* Get the size of the data element that is accessed, which may be
  1613. different from that of the source register size,
  1614. e.g. in strb/ldrb. */
  1615. size = aarch64_get_qualifier_esize (qualifier);
  1616. if (!value_in_range_p (opnd->addr.offset.imm, 0, 4095 * size))
  1617. {
  1618. set_offset_out_of_range_error (mismatch_detail, idx,
  1619. 0, 4095 * size);
  1620. return 0;
  1621. }
  1622. if (!value_aligned_p (opnd->addr.offset.imm, size))
  1623. {
  1624. set_unaligned_error (mismatch_detail, idx, size);
  1625. return 0;
  1626. }
  1627. break;
  1628. case AARCH64_OPND_ADDR_PCREL14:
  1629. case AARCH64_OPND_ADDR_PCREL19:
  1630. case AARCH64_OPND_ADDR_PCREL21:
  1631. case AARCH64_OPND_ADDR_PCREL26:
  1632. imm = opnd->imm.value;
  1633. if (operand_need_shift_by_two (get_operand_from_code (type)))
  1634. {
  1635. /* The offset value in a PC-relative branch instruction is alway
  1636. 4-byte aligned and is encoded without the lowest 2 bits. */
  1637. if (!value_aligned_p (imm, 4))
  1638. {
  1639. set_unaligned_error (mismatch_detail, idx, 4);
  1640. return 0;
  1641. }
  1642. /* Right shift by 2 so that we can carry out the following check
  1643. canonically. */
  1644. imm >>= 2;
  1645. }
  1646. size = get_operand_fields_width (get_operand_from_code (type));
  1647. if (!value_fit_signed_field_p (imm, size))
  1648. {
  1649. set_other_error (mismatch_detail, idx,
  1650. _("immediate out of range"));
  1651. return 0;
  1652. }
  1653. break;
  1654. case AARCH64_OPND_SME_ADDR_RI_U4xVL:
  1655. if (!value_in_range_p (opnd->addr.offset.imm, 0, 15))
  1656. {
  1657. set_offset_out_of_range_error (mismatch_detail, idx, 0, 15);
  1658. return 0;
  1659. }
  1660. break;
  1661. case AARCH64_OPND_SVE_ADDR_RI_S4xVL:
  1662. case AARCH64_OPND_SVE_ADDR_RI_S4x2xVL:
  1663. case AARCH64_OPND_SVE_ADDR_RI_S4x3xVL:
  1664. case AARCH64_OPND_SVE_ADDR_RI_S4x4xVL:
  1665. min_value = -8;
  1666. max_value = 7;
  1667. sve_imm_offset_vl:
  1668. assert (!opnd->addr.offset.is_reg);
  1669. assert (opnd->addr.preind);
  1670. num = 1 + get_operand_specific_data (&aarch64_operands[type]);
  1671. min_value *= num;
  1672. max_value *= num;
  1673. if ((opnd->addr.offset.imm != 0 && !opnd->shifter.operator_present)
  1674. || (opnd->shifter.operator_present
  1675. && opnd->shifter.kind != AARCH64_MOD_MUL_VL))
  1676. {
  1677. set_other_error (mismatch_detail, idx,
  1678. _("invalid addressing mode"));
  1679. return 0;
  1680. }
  1681. if (!value_in_range_p (opnd->addr.offset.imm, min_value, max_value))
  1682. {
  1683. set_offset_out_of_range_error (mismatch_detail, idx,
  1684. min_value, max_value);
  1685. return 0;
  1686. }
  1687. if (!value_aligned_p (opnd->addr.offset.imm, num))
  1688. {
  1689. set_unaligned_error (mismatch_detail, idx, num);
  1690. return 0;
  1691. }
  1692. break;
  1693. case AARCH64_OPND_SVE_ADDR_RI_S6xVL:
  1694. min_value = -32;
  1695. max_value = 31;
  1696. goto sve_imm_offset_vl;
  1697. case AARCH64_OPND_SVE_ADDR_RI_S9xVL:
  1698. min_value = -256;
  1699. max_value = 255;
  1700. goto sve_imm_offset_vl;
  1701. case AARCH64_OPND_SVE_ADDR_RI_U6:
  1702. case AARCH64_OPND_SVE_ADDR_RI_U6x2:
  1703. case AARCH64_OPND_SVE_ADDR_RI_U6x4:
  1704. case AARCH64_OPND_SVE_ADDR_RI_U6x8:
  1705. min_value = 0;
  1706. max_value = 63;
  1707. sve_imm_offset:
  1708. assert (!opnd->addr.offset.is_reg);
  1709. assert (opnd->addr.preind);
  1710. num = 1 << get_operand_specific_data (&aarch64_operands[type]);
  1711. min_value *= num;
  1712. max_value *= num;
  1713. if (opnd->shifter.operator_present
  1714. || opnd->shifter.amount_present)
  1715. {
  1716. set_other_error (mismatch_detail, idx,
  1717. _("invalid addressing mode"));
  1718. return 0;
  1719. }
  1720. if (!value_in_range_p (opnd->addr.offset.imm, min_value, max_value))
  1721. {
  1722. set_offset_out_of_range_error (mismatch_detail, idx,
  1723. min_value, max_value);
  1724. return 0;
  1725. }
  1726. if (!value_aligned_p (opnd->addr.offset.imm, num))
  1727. {
  1728. set_unaligned_error (mismatch_detail, idx, num);
  1729. return 0;
  1730. }
  1731. break;
  1732. case AARCH64_OPND_SVE_ADDR_RI_S4x16:
  1733. case AARCH64_OPND_SVE_ADDR_RI_S4x32:
  1734. min_value = -8;
  1735. max_value = 7;
  1736. goto sve_imm_offset;
  1737. case AARCH64_OPND_SVE_ADDR_ZX:
  1738. /* Everything is already ensured by parse_operands or
  1739. aarch64_ext_sve_addr_rr_lsl (because this is a very specific
  1740. argument type). */
  1741. assert (opnd->addr.offset.is_reg);
  1742. assert (opnd->addr.preind);
  1743. assert ((aarch64_operands[type].flags & OPD_F_NO_ZR) == 0);
  1744. assert (opnd->shifter.kind == AARCH64_MOD_LSL);
  1745. assert (opnd->shifter.operator_present == 0);
  1746. break;
  1747. case AARCH64_OPND_SVE_ADDR_R:
  1748. case AARCH64_OPND_SVE_ADDR_RR:
  1749. case AARCH64_OPND_SVE_ADDR_RR_LSL1:
  1750. case AARCH64_OPND_SVE_ADDR_RR_LSL2:
  1751. case AARCH64_OPND_SVE_ADDR_RR_LSL3:
  1752. case AARCH64_OPND_SVE_ADDR_RR_LSL4:
  1753. case AARCH64_OPND_SVE_ADDR_RX:
  1754. case AARCH64_OPND_SVE_ADDR_RX_LSL1:
  1755. case AARCH64_OPND_SVE_ADDR_RX_LSL2:
  1756. case AARCH64_OPND_SVE_ADDR_RX_LSL3:
  1757. case AARCH64_OPND_SVE_ADDR_RZ:
  1758. case AARCH64_OPND_SVE_ADDR_RZ_LSL1:
  1759. case AARCH64_OPND_SVE_ADDR_RZ_LSL2:
  1760. case AARCH64_OPND_SVE_ADDR_RZ_LSL3:
  1761. modifiers = 1 << AARCH64_MOD_LSL;
  1762. sve_rr_operand:
  1763. assert (opnd->addr.offset.is_reg);
  1764. assert (opnd->addr.preind);
  1765. if ((aarch64_operands[type].flags & OPD_F_NO_ZR) != 0
  1766. && opnd->addr.offset.regno == 31)
  1767. {
  1768. set_other_error (mismatch_detail, idx,
  1769. _("index register xzr is not allowed"));
  1770. return 0;
  1771. }
  1772. if (((1 << opnd->shifter.kind) & modifiers) == 0
  1773. || (opnd->shifter.amount
  1774. != get_operand_specific_data (&aarch64_operands[type])))
  1775. {
  1776. set_other_error (mismatch_detail, idx,
  1777. _("invalid addressing mode"));
  1778. return 0;
  1779. }
  1780. break;
  1781. case AARCH64_OPND_SVE_ADDR_RZ_XTW_14:
  1782. case AARCH64_OPND_SVE_ADDR_RZ_XTW_22:
  1783. case AARCH64_OPND_SVE_ADDR_RZ_XTW1_14:
  1784. case AARCH64_OPND_SVE_ADDR_RZ_XTW1_22:
  1785. case AARCH64_OPND_SVE_ADDR_RZ_XTW2_14:
  1786. case AARCH64_OPND_SVE_ADDR_RZ_XTW2_22:
  1787. case AARCH64_OPND_SVE_ADDR_RZ_XTW3_14:
  1788. case AARCH64_OPND_SVE_ADDR_RZ_XTW3_22:
  1789. modifiers = (1 << AARCH64_MOD_SXTW) | (1 << AARCH64_MOD_UXTW);
  1790. goto sve_rr_operand;
  1791. case AARCH64_OPND_SVE_ADDR_ZI_U5:
  1792. case AARCH64_OPND_SVE_ADDR_ZI_U5x2:
  1793. case AARCH64_OPND_SVE_ADDR_ZI_U5x4:
  1794. case AARCH64_OPND_SVE_ADDR_ZI_U5x8:
  1795. min_value = 0;
  1796. max_value = 31;
  1797. goto sve_imm_offset;
  1798. case AARCH64_OPND_SVE_ADDR_ZZ_LSL:
  1799. modifiers = 1 << AARCH64_MOD_LSL;
  1800. sve_zz_operand:
  1801. assert (opnd->addr.offset.is_reg);
  1802. assert (opnd->addr.preind);
  1803. if (((1 << opnd->shifter.kind) & modifiers) == 0
  1804. || opnd->shifter.amount < 0
  1805. || opnd->shifter.amount > 3)
  1806. {
  1807. set_other_error (mismatch_detail, idx,
  1808. _("invalid addressing mode"));
  1809. return 0;
  1810. }
  1811. break;
  1812. case AARCH64_OPND_SVE_ADDR_ZZ_SXTW:
  1813. modifiers = (1 << AARCH64_MOD_SXTW);
  1814. goto sve_zz_operand;
  1815. case AARCH64_OPND_SVE_ADDR_ZZ_UXTW:
  1816. modifiers = 1 << AARCH64_MOD_UXTW;
  1817. goto sve_zz_operand;
  1818. default:
  1819. break;
  1820. }
  1821. break;
  1822. case AARCH64_OPND_CLASS_SIMD_REGLIST:
  1823. if (type == AARCH64_OPND_LEt)
  1824. {
  1825. /* Get the upper bound for the element index. */
  1826. num = 16 / aarch64_get_qualifier_esize (qualifier) - 1;
  1827. if (!value_in_range_p (opnd->reglist.index, 0, num))
  1828. {
  1829. set_elem_idx_out_of_range_error (mismatch_detail, idx, 0, num);
  1830. return 0;
  1831. }
  1832. }
  1833. /* The opcode dependent area stores the number of elements in
  1834. each structure to be loaded/stored. */
  1835. num = get_opcode_dependent_value (opcode);
  1836. switch (type)
  1837. {
  1838. case AARCH64_OPND_LVt:
  1839. assert (num >= 1 && num <= 4);
  1840. /* Unless LD1/ST1, the number of registers should be equal to that
  1841. of the structure elements. */
  1842. if (num != 1 && opnd->reglist.num_regs != num)
  1843. {
  1844. set_reg_list_error (mismatch_detail, idx, num);
  1845. return 0;
  1846. }
  1847. break;
  1848. case AARCH64_OPND_LVt_AL:
  1849. case AARCH64_OPND_LEt:
  1850. assert (num >= 1 && num <= 4);
  1851. /* The number of registers should be equal to that of the structure
  1852. elements. */
  1853. if (opnd->reglist.num_regs != num)
  1854. {
  1855. set_reg_list_error (mismatch_detail, idx, num);
  1856. return 0;
  1857. }
  1858. break;
  1859. default:
  1860. break;
  1861. }
  1862. break;
  1863. case AARCH64_OPND_CLASS_IMMEDIATE:
  1864. /* Constraint check on immediate operand. */
  1865. imm = opnd->imm.value;
  1866. /* E.g. imm_0_31 constrains value to be 0..31. */
  1867. if (qualifier_value_in_range_constraint_p (qualifier)
  1868. && !value_in_range_p (imm, get_lower_bound (qualifier),
  1869. get_upper_bound (qualifier)))
  1870. {
  1871. set_imm_out_of_range_error (mismatch_detail, idx,
  1872. get_lower_bound (qualifier),
  1873. get_upper_bound (qualifier));
  1874. return 0;
  1875. }
  1876. switch (type)
  1877. {
  1878. case AARCH64_OPND_AIMM:
  1879. if (opnd->shifter.kind != AARCH64_MOD_LSL)
  1880. {
  1881. set_other_error (mismatch_detail, idx,
  1882. _("invalid shift operator"));
  1883. return 0;
  1884. }
  1885. if (opnd->shifter.amount != 0 && opnd->shifter.amount != 12)
  1886. {
  1887. set_other_error (mismatch_detail, idx,
  1888. _("shift amount must be 0 or 12"));
  1889. return 0;
  1890. }
  1891. if (!value_fit_unsigned_field_p (opnd->imm.value, 12))
  1892. {
  1893. set_other_error (mismatch_detail, idx,
  1894. _("immediate out of range"));
  1895. return 0;
  1896. }
  1897. break;
  1898. case AARCH64_OPND_HALF:
  1899. assert (idx == 1 && opnds[0].type == AARCH64_OPND_Rd);
  1900. if (opnd->shifter.kind != AARCH64_MOD_LSL)
  1901. {
  1902. set_other_error (mismatch_detail, idx,
  1903. _("invalid shift operator"));
  1904. return 0;
  1905. }
  1906. size = aarch64_get_qualifier_esize (opnds[0].qualifier);
  1907. if (!value_aligned_p (opnd->shifter.amount, 16))
  1908. {
  1909. set_other_error (mismatch_detail, idx,
  1910. _("shift amount must be a multiple of 16"));
  1911. return 0;
  1912. }
  1913. if (!value_in_range_p (opnd->shifter.amount, 0, size * 8 - 16))
  1914. {
  1915. set_sft_amount_out_of_range_error (mismatch_detail, idx,
  1916. 0, size * 8 - 16);
  1917. return 0;
  1918. }
  1919. if (opnd->imm.value < 0)
  1920. {
  1921. set_other_error (mismatch_detail, idx,
  1922. _("negative immediate value not allowed"));
  1923. return 0;
  1924. }
  1925. if (!value_fit_unsigned_field_p (opnd->imm.value, 16))
  1926. {
  1927. set_other_error (mismatch_detail, idx,
  1928. _("immediate out of range"));
  1929. return 0;
  1930. }
  1931. break;
  1932. case AARCH64_OPND_IMM_MOV:
  1933. {
  1934. int esize = aarch64_get_qualifier_esize (opnds[0].qualifier);
  1935. imm = opnd->imm.value;
  1936. assert (idx == 1);
  1937. switch (opcode->op)
  1938. {
  1939. case OP_MOV_IMM_WIDEN:
  1940. imm = ~imm;
  1941. /* Fall through. */
  1942. case OP_MOV_IMM_WIDE:
  1943. if (!aarch64_wide_constant_p (imm, esize == 4, NULL))
  1944. {
  1945. set_other_error (mismatch_detail, idx,
  1946. _("immediate out of range"));
  1947. return 0;
  1948. }
  1949. break;
  1950. case OP_MOV_IMM_LOG:
  1951. if (!aarch64_logical_immediate_p (imm, esize, NULL))
  1952. {
  1953. set_other_error (mismatch_detail, idx,
  1954. _("immediate out of range"));
  1955. return 0;
  1956. }
  1957. break;
  1958. default:
  1959. assert (0);
  1960. return 0;
  1961. }
  1962. }
  1963. break;
  1964. case AARCH64_OPND_NZCV:
  1965. case AARCH64_OPND_CCMP_IMM:
  1966. case AARCH64_OPND_EXCEPTION:
  1967. case AARCH64_OPND_UNDEFINED:
  1968. case AARCH64_OPND_TME_UIMM16:
  1969. case AARCH64_OPND_UIMM4:
  1970. case AARCH64_OPND_UIMM4_ADDG:
  1971. case AARCH64_OPND_UIMM7:
  1972. case AARCH64_OPND_UIMM3_OP1:
  1973. case AARCH64_OPND_UIMM3_OP2:
  1974. case AARCH64_OPND_SVE_UIMM3:
  1975. case AARCH64_OPND_SVE_UIMM7:
  1976. case AARCH64_OPND_SVE_UIMM8:
  1977. case AARCH64_OPND_SVE_UIMM8_53:
  1978. size = get_operand_fields_width (get_operand_from_code (type));
  1979. assert (size < 32);
  1980. if (!value_fit_unsigned_field_p (opnd->imm.value, size))
  1981. {
  1982. set_imm_out_of_range_error (mismatch_detail, idx, 0,
  1983. (1u << size) - 1);
  1984. return 0;
  1985. }
  1986. break;
  1987. case AARCH64_OPND_UIMM10:
  1988. /* Scaled unsigned 10 bits immediate offset. */
  1989. if (!value_in_range_p (opnd->imm.value, 0, 1008))
  1990. {
  1991. set_imm_out_of_range_error (mismatch_detail, idx, 0, 1008);
  1992. return 0;
  1993. }
  1994. if (!value_aligned_p (opnd->imm.value, 16))
  1995. {
  1996. set_unaligned_error (mismatch_detail, idx, 16);
  1997. return 0;
  1998. }
  1999. break;
  2000. case AARCH64_OPND_SIMM5:
  2001. case AARCH64_OPND_SVE_SIMM5:
  2002. case AARCH64_OPND_SVE_SIMM5B:
  2003. case AARCH64_OPND_SVE_SIMM6:
  2004. case AARCH64_OPND_SVE_SIMM8:
  2005. size = get_operand_fields_width (get_operand_from_code (type));
  2006. assert (size < 32);
  2007. if (!value_fit_signed_field_p (opnd->imm.value, size))
  2008. {
  2009. set_imm_out_of_range_error (mismatch_detail, idx,
  2010. -(1 << (size - 1)),
  2011. (1 << (size - 1)) - 1);
  2012. return 0;
  2013. }
  2014. break;
  2015. case AARCH64_OPND_WIDTH:
  2016. assert (idx > 1 && opnds[idx-1].type == AARCH64_OPND_IMM
  2017. && opnds[0].type == AARCH64_OPND_Rd);
  2018. size = get_upper_bound (qualifier);
  2019. if (opnd->imm.value + opnds[idx-1].imm.value > size)
  2020. /* lsb+width <= reg.size */
  2021. {
  2022. set_imm_out_of_range_error (mismatch_detail, idx, 1,
  2023. size - opnds[idx-1].imm.value);
  2024. return 0;
  2025. }
  2026. break;
  2027. case AARCH64_OPND_LIMM:
  2028. case AARCH64_OPND_SVE_LIMM:
  2029. {
  2030. int esize = aarch64_get_qualifier_esize (opnds[0].qualifier);
  2031. uint64_t uimm = opnd->imm.value;
  2032. if (opcode->op == OP_BIC)
  2033. uimm = ~uimm;
  2034. if (!aarch64_logical_immediate_p (uimm, esize, NULL))
  2035. {
  2036. set_other_error (mismatch_detail, idx,
  2037. _("immediate out of range"));
  2038. return 0;
  2039. }
  2040. }
  2041. break;
  2042. case AARCH64_OPND_IMM0:
  2043. case AARCH64_OPND_FPIMM0:
  2044. if (opnd->imm.value != 0)
  2045. {
  2046. set_other_error (mismatch_detail, idx,
  2047. _("immediate zero expected"));
  2048. return 0;
  2049. }
  2050. break;
  2051. case AARCH64_OPND_IMM_ROT1:
  2052. case AARCH64_OPND_IMM_ROT2:
  2053. case AARCH64_OPND_SVE_IMM_ROT2:
  2054. if (opnd->imm.value != 0
  2055. && opnd->imm.value != 90
  2056. && opnd->imm.value != 180
  2057. && opnd->imm.value != 270)
  2058. {
  2059. set_other_error (mismatch_detail, idx,
  2060. _("rotate expected to be 0, 90, 180 or 270"));
  2061. return 0;
  2062. }
  2063. break;
  2064. case AARCH64_OPND_IMM_ROT3:
  2065. case AARCH64_OPND_SVE_IMM_ROT1:
  2066. case AARCH64_OPND_SVE_IMM_ROT3:
  2067. if (opnd->imm.value != 90 && opnd->imm.value != 270)
  2068. {
  2069. set_other_error (mismatch_detail, idx,
  2070. _("rotate expected to be 90 or 270"));
  2071. return 0;
  2072. }
  2073. break;
  2074. case AARCH64_OPND_SHLL_IMM:
  2075. assert (idx == 2);
  2076. size = 8 * aarch64_get_qualifier_esize (opnds[idx - 1].qualifier);
  2077. if (opnd->imm.value != size)
  2078. {
  2079. set_other_error (mismatch_detail, idx,
  2080. _("invalid shift amount"));
  2081. return 0;
  2082. }
  2083. break;
  2084. case AARCH64_OPND_IMM_VLSL:
  2085. size = aarch64_get_qualifier_esize (qualifier);
  2086. if (!value_in_range_p (opnd->imm.value, 0, size * 8 - 1))
  2087. {
  2088. set_imm_out_of_range_error (mismatch_detail, idx, 0,
  2089. size * 8 - 1);
  2090. return 0;
  2091. }
  2092. break;
  2093. case AARCH64_OPND_IMM_VLSR:
  2094. size = aarch64_get_qualifier_esize (qualifier);
  2095. if (!value_in_range_p (opnd->imm.value, 1, size * 8))
  2096. {
  2097. set_imm_out_of_range_error (mismatch_detail, idx, 1, size * 8);
  2098. return 0;
  2099. }
  2100. break;
  2101. case AARCH64_OPND_SIMD_IMM:
  2102. case AARCH64_OPND_SIMD_IMM_SFT:
  2103. /* Qualifier check. */
  2104. switch (qualifier)
  2105. {
  2106. case AARCH64_OPND_QLF_LSL:
  2107. if (opnd->shifter.kind != AARCH64_MOD_LSL)
  2108. {
  2109. set_other_error (mismatch_detail, idx,
  2110. _("invalid shift operator"));
  2111. return 0;
  2112. }
  2113. break;
  2114. case AARCH64_OPND_QLF_MSL:
  2115. if (opnd->shifter.kind != AARCH64_MOD_MSL)
  2116. {
  2117. set_other_error (mismatch_detail, idx,
  2118. _("invalid shift operator"));
  2119. return 0;
  2120. }
  2121. break;
  2122. case AARCH64_OPND_QLF_NIL:
  2123. if (opnd->shifter.kind != AARCH64_MOD_NONE)
  2124. {
  2125. set_other_error (mismatch_detail, idx,
  2126. _("shift is not permitted"));
  2127. return 0;
  2128. }
  2129. break;
  2130. default:
  2131. assert (0);
  2132. return 0;
  2133. }
  2134. /* Is the immediate valid? */
  2135. assert (idx == 1);
  2136. if (aarch64_get_qualifier_esize (opnds[0].qualifier) != 8)
  2137. {
  2138. /* uimm8 or simm8 */
  2139. if (!value_in_range_p (opnd->imm.value, -128, 255))
  2140. {
  2141. set_imm_out_of_range_error (mismatch_detail, idx, -128, 255);
  2142. return 0;
  2143. }
  2144. }
  2145. else if (aarch64_shrink_expanded_imm8 (opnd->imm.value) < 0)
  2146. {
  2147. /* uimm64 is not
  2148. 'aaaaaaaabbbbbbbbccccccccddddddddeeeeeeee
  2149. ffffffffgggggggghhhhhhhh'. */
  2150. set_other_error (mismatch_detail, idx,
  2151. _("invalid value for immediate"));
  2152. return 0;
  2153. }
  2154. /* Is the shift amount valid? */
  2155. switch (opnd->shifter.kind)
  2156. {
  2157. case AARCH64_MOD_LSL:
  2158. size = aarch64_get_qualifier_esize (opnds[0].qualifier);
  2159. if (!value_in_range_p (opnd->shifter.amount, 0, (size - 1) * 8))
  2160. {
  2161. set_sft_amount_out_of_range_error (mismatch_detail, idx, 0,
  2162. (size - 1) * 8);
  2163. return 0;
  2164. }
  2165. if (!value_aligned_p (opnd->shifter.amount, 8))
  2166. {
  2167. set_unaligned_error (mismatch_detail, idx, 8);
  2168. return 0;
  2169. }
  2170. break;
  2171. case AARCH64_MOD_MSL:
  2172. /* Only 8 and 16 are valid shift amount. */
  2173. if (opnd->shifter.amount != 8 && opnd->shifter.amount != 16)
  2174. {
  2175. set_other_error (mismatch_detail, idx,
  2176. _("shift amount must be 0 or 16"));
  2177. return 0;
  2178. }
  2179. break;
  2180. default:
  2181. if (opnd->shifter.kind != AARCH64_MOD_NONE)
  2182. {
  2183. set_other_error (mismatch_detail, idx,
  2184. _("invalid shift operator"));
  2185. return 0;
  2186. }
  2187. break;
  2188. }
  2189. break;
  2190. case AARCH64_OPND_FPIMM:
  2191. case AARCH64_OPND_SIMD_FPIMM:
  2192. case AARCH64_OPND_SVE_FPIMM8:
  2193. if (opnd->imm.is_fp == 0)
  2194. {
  2195. set_other_error (mismatch_detail, idx,
  2196. _("floating-point immediate expected"));
  2197. return 0;
  2198. }
  2199. /* The value is expected to be an 8-bit floating-point constant with
  2200. sign, 3-bit exponent and normalized 4 bits of precision, encoded
  2201. in "a:b:c:d:e:f:g:h" or FLD_imm8 (depending on the type of the
  2202. instruction). */
  2203. if (!value_in_range_p (opnd->imm.value, 0, 255))
  2204. {
  2205. set_other_error (mismatch_detail, idx,
  2206. _("immediate out of range"));
  2207. return 0;
  2208. }
  2209. if (opnd->shifter.kind != AARCH64_MOD_NONE)
  2210. {
  2211. set_other_error (mismatch_detail, idx,
  2212. _("invalid shift operator"));
  2213. return 0;
  2214. }
  2215. break;
  2216. case AARCH64_OPND_SVE_AIMM:
  2217. min_value = 0;
  2218. sve_aimm:
  2219. assert (opnd->shifter.kind == AARCH64_MOD_LSL);
  2220. size = aarch64_get_qualifier_esize (opnds[0].qualifier);
  2221. mask = ~((uint64_t) -1 << (size * 4) << (size * 4));
  2222. uvalue = opnd->imm.value;
  2223. shift = opnd->shifter.amount;
  2224. if (size == 1)
  2225. {
  2226. if (shift != 0)
  2227. {
  2228. set_other_error (mismatch_detail, idx,
  2229. _("no shift amount allowed for"
  2230. " 8-bit constants"));
  2231. return 0;
  2232. }
  2233. }
  2234. else
  2235. {
  2236. if (shift != 0 && shift != 8)
  2237. {
  2238. set_other_error (mismatch_detail, idx,
  2239. _("shift amount must be 0 or 8"));
  2240. return 0;
  2241. }
  2242. if (shift == 0 && (uvalue & 0xff) == 0)
  2243. {
  2244. shift = 8;
  2245. uvalue = (int64_t) uvalue / 256;
  2246. }
  2247. }
  2248. mask >>= shift;
  2249. if ((uvalue & mask) != uvalue && (uvalue | ~mask) != uvalue)
  2250. {
  2251. set_other_error (mismatch_detail, idx,
  2252. _("immediate too big for element size"));
  2253. return 0;
  2254. }
  2255. uvalue = (uvalue - min_value) & mask;
  2256. if (uvalue > 0xff)
  2257. {
  2258. set_other_error (mismatch_detail, idx,
  2259. _("invalid arithmetic immediate"));
  2260. return 0;
  2261. }
  2262. break;
  2263. case AARCH64_OPND_SVE_ASIMM:
  2264. min_value = -128;
  2265. goto sve_aimm;
  2266. case AARCH64_OPND_SVE_I1_HALF_ONE:
  2267. assert (opnd->imm.is_fp);
  2268. if (opnd->imm.value != 0x3f000000 && opnd->imm.value != 0x3f800000)
  2269. {
  2270. set_other_error (mismatch_detail, idx,
  2271. _("floating-point value must be 0.5 or 1.0"));
  2272. return 0;
  2273. }
  2274. break;
  2275. case AARCH64_OPND_SVE_I1_HALF_TWO:
  2276. assert (opnd->imm.is_fp);
  2277. if (opnd->imm.value != 0x3f000000 && opnd->imm.value != 0x40000000)
  2278. {
  2279. set_other_error (mismatch_detail, idx,
  2280. _("floating-point value must be 0.5 or 2.0"));
  2281. return 0;
  2282. }
  2283. break;
  2284. case AARCH64_OPND_SVE_I1_ZERO_ONE:
  2285. assert (opnd->imm.is_fp);
  2286. if (opnd->imm.value != 0 && opnd->imm.value != 0x3f800000)
  2287. {
  2288. set_other_error (mismatch_detail, idx,
  2289. _("floating-point value must be 0.0 or 1.0"));
  2290. return 0;
  2291. }
  2292. break;
  2293. case AARCH64_OPND_SVE_INV_LIMM:
  2294. {
  2295. int esize = aarch64_get_qualifier_esize (opnds[0].qualifier);
  2296. uint64_t uimm = ~opnd->imm.value;
  2297. if (!aarch64_logical_immediate_p (uimm, esize, NULL))
  2298. {
  2299. set_other_error (mismatch_detail, idx,
  2300. _("immediate out of range"));
  2301. return 0;
  2302. }
  2303. }
  2304. break;
  2305. case AARCH64_OPND_SVE_LIMM_MOV:
  2306. {
  2307. int esize = aarch64_get_qualifier_esize (opnds[0].qualifier);
  2308. uint64_t uimm = opnd->imm.value;
  2309. if (!aarch64_logical_immediate_p (uimm, esize, NULL))
  2310. {
  2311. set_other_error (mismatch_detail, idx,
  2312. _("immediate out of range"));
  2313. return 0;
  2314. }
  2315. if (!aarch64_sve_dupm_mov_immediate_p (uimm, esize))
  2316. {
  2317. set_other_error (mismatch_detail, idx,
  2318. _("invalid replicated MOV immediate"));
  2319. return 0;
  2320. }
  2321. }
  2322. break;
  2323. case AARCH64_OPND_SVE_PATTERN_SCALED:
  2324. assert (opnd->shifter.kind == AARCH64_MOD_MUL);
  2325. if (!value_in_range_p (opnd->shifter.amount, 1, 16))
  2326. {
  2327. set_multiplier_out_of_range_error (mismatch_detail, idx, 1, 16);
  2328. return 0;
  2329. }
  2330. break;
  2331. case AARCH64_OPND_SVE_SHLIMM_PRED:
  2332. case AARCH64_OPND_SVE_SHLIMM_UNPRED:
  2333. case AARCH64_OPND_SVE_SHLIMM_UNPRED_22:
  2334. size = aarch64_get_qualifier_esize (opnds[idx - 1].qualifier);
  2335. if (!value_in_range_p (opnd->imm.value, 0, 8 * size - 1))
  2336. {
  2337. set_imm_out_of_range_error (mismatch_detail, idx,
  2338. 0, 8 * size - 1);
  2339. return 0;
  2340. }
  2341. break;
  2342. case AARCH64_OPND_SVE_SHRIMM_PRED:
  2343. case AARCH64_OPND_SVE_SHRIMM_UNPRED:
  2344. case AARCH64_OPND_SVE_SHRIMM_UNPRED_22:
  2345. num = (type == AARCH64_OPND_SVE_SHRIMM_UNPRED_22) ? 2 : 1;
  2346. size = aarch64_get_qualifier_esize (opnds[idx - num].qualifier);
  2347. if (!value_in_range_p (opnd->imm.value, 1, 8 * size))
  2348. {
  2349. set_imm_out_of_range_error (mismatch_detail, idx, 1, 8*size);
  2350. return 0;
  2351. }
  2352. break;
  2353. default:
  2354. break;
  2355. }
  2356. break;
  2357. case AARCH64_OPND_CLASS_SYSTEM:
  2358. switch (type)
  2359. {
  2360. case AARCH64_OPND_PSTATEFIELD:
  2361. for (i = 0; aarch64_pstatefields[i].name; ++i)
  2362. if (aarch64_pstatefields[i].value == opnd->pstatefield)
  2363. break;
  2364. assert (aarch64_pstatefields[i].name);
  2365. assert (idx == 0 && opnds[1].type == AARCH64_OPND_UIMM4);
  2366. max_value = F_GET_REG_MAX_VALUE (aarch64_pstatefields[i].flags);
  2367. if (opnds[1].imm.value < 0 || opnds[1].imm.value > max_value)
  2368. {
  2369. set_imm_out_of_range_error (mismatch_detail, 1, 0, max_value);
  2370. return 0;
  2371. }
  2372. break;
  2373. default:
  2374. break;
  2375. }
  2376. break;
  2377. case AARCH64_OPND_CLASS_SIMD_ELEMENT:
  2378. /* Get the upper bound for the element index. */
  2379. if (opcode->op == OP_FCMLA_ELEM)
  2380. /* FCMLA index range depends on the vector size of other operands
  2381. and is halfed because complex numbers take two elements. */
  2382. num = aarch64_get_qualifier_nelem (opnds[0].qualifier)
  2383. * aarch64_get_qualifier_esize (opnds[0].qualifier) / 2;
  2384. else
  2385. num = 16;
  2386. num = num / aarch64_get_qualifier_esize (qualifier) - 1;
  2387. assert (aarch64_get_qualifier_nelem (qualifier) == 1);
  2388. /* Index out-of-range. */
  2389. if (!value_in_range_p (opnd->reglane.index, 0, num))
  2390. {
  2391. set_elem_idx_out_of_range_error (mismatch_detail, idx, 0, num);
  2392. return 0;
  2393. }
  2394. /* SMLAL<Q> <Vd>.<Ta>, <Vn>.<Tb>, <Vm>.<Ts>[<index>].
  2395. <Vm> Is the vector register (V0-V31) or (V0-V15), whose
  2396. number is encoded in "size:M:Rm":
  2397. size <Vm>
  2398. 00 RESERVED
  2399. 01 0:Rm
  2400. 10 M:Rm
  2401. 11 RESERVED */
  2402. if (type == AARCH64_OPND_Em16 && qualifier == AARCH64_OPND_QLF_S_H
  2403. && !value_in_range_p (opnd->reglane.regno, 0, 15))
  2404. {
  2405. set_regno_out_of_range_error (mismatch_detail, idx, 0, 15);
  2406. return 0;
  2407. }
  2408. break;
  2409. case AARCH64_OPND_CLASS_MODIFIED_REG:
  2410. assert (idx == 1 || idx == 2);
  2411. switch (type)
  2412. {
  2413. case AARCH64_OPND_Rm_EXT:
  2414. if (!aarch64_extend_operator_p (opnd->shifter.kind)
  2415. && opnd->shifter.kind != AARCH64_MOD_LSL)
  2416. {
  2417. set_other_error (mismatch_detail, idx,
  2418. _("extend operator expected"));
  2419. return 0;
  2420. }
  2421. /* It is not optional unless at least one of "Rd" or "Rn" is '11111'
  2422. (i.e. SP), in which case it defaults to LSL. The LSL alias is
  2423. only valid when "Rd" or "Rn" is '11111', and is preferred in that
  2424. case. */
  2425. if (!aarch64_stack_pointer_p (opnds + 0)
  2426. && (idx != 2 || !aarch64_stack_pointer_p (opnds + 1)))
  2427. {
  2428. if (!opnd->shifter.operator_present)
  2429. {
  2430. set_other_error (mismatch_detail, idx,
  2431. _("missing extend operator"));
  2432. return 0;
  2433. }
  2434. else if (opnd->shifter.kind == AARCH64_MOD_LSL)
  2435. {
  2436. set_other_error (mismatch_detail, idx,
  2437. _("'LSL' operator not allowed"));
  2438. return 0;
  2439. }
  2440. }
  2441. assert (opnd->shifter.operator_present /* Default to LSL. */
  2442. || opnd->shifter.kind == AARCH64_MOD_LSL);
  2443. if (!value_in_range_p (opnd->shifter.amount, 0, 4))
  2444. {
  2445. set_sft_amount_out_of_range_error (mismatch_detail, idx, 0, 4);
  2446. return 0;
  2447. }
  2448. /* In the 64-bit form, the final register operand is written as Wm
  2449. for all but the (possibly omitted) UXTX/LSL and SXTX
  2450. operators.
  2451. N.B. GAS allows X register to be used with any operator as a
  2452. programming convenience. */
  2453. if (qualifier == AARCH64_OPND_QLF_X
  2454. && opnd->shifter.kind != AARCH64_MOD_LSL
  2455. && opnd->shifter.kind != AARCH64_MOD_UXTX
  2456. && opnd->shifter.kind != AARCH64_MOD_SXTX)
  2457. {
  2458. set_other_error (mismatch_detail, idx, _("W register expected"));
  2459. return 0;
  2460. }
  2461. break;
  2462. case AARCH64_OPND_Rm_SFT:
  2463. /* ROR is not available to the shifted register operand in
  2464. arithmetic instructions. */
  2465. if (!aarch64_shift_operator_p (opnd->shifter.kind))
  2466. {
  2467. set_other_error (mismatch_detail, idx,
  2468. _("shift operator expected"));
  2469. return 0;
  2470. }
  2471. if (opnd->shifter.kind == AARCH64_MOD_ROR
  2472. && opcode->iclass != log_shift)
  2473. {
  2474. set_other_error (mismatch_detail, idx,
  2475. _("'ROR' operator not allowed"));
  2476. return 0;
  2477. }
  2478. num = qualifier == AARCH64_OPND_QLF_W ? 31 : 63;
  2479. if (!value_in_range_p (opnd->shifter.amount, 0, num))
  2480. {
  2481. set_sft_amount_out_of_range_error (mismatch_detail, idx, 0, num);
  2482. return 0;
  2483. }
  2484. break;
  2485. default:
  2486. break;
  2487. }
  2488. break;
  2489. default:
  2490. break;
  2491. }
  2492. return 1;
  2493. }
  2494. /* Main entrypoint for the operand constraint checking.
  2495. Return 1 if operands of *INST meet the constraint applied by the operand
  2496. codes and operand qualifiers; otherwise return 0 and if MISMATCH_DETAIL is
  2497. not NULL, return the detail of the error in *MISMATCH_DETAIL. N.B. when
  2498. adding more constraint checking, make sure MISMATCH_DETAIL->KIND is set
  2499. with a proper error kind rather than AARCH64_OPDE_NIL (GAS asserts non-NIL
  2500. error kind when it is notified that an instruction does not pass the check).
  2501. Un-determined operand qualifiers may get established during the process. */
  2502. int
  2503. aarch64_match_operands_constraint (aarch64_inst *inst,
  2504. aarch64_operand_error *mismatch_detail)
  2505. {
  2506. int i;
  2507. DEBUG_TRACE ("enter");
  2508. i = inst->opcode->tied_operand;
  2509. if (i > 0)
  2510. {
  2511. /* Check for tied_operands with specific opcode iclass. */
  2512. switch (inst->opcode->iclass)
  2513. {
  2514. /* For SME LDR and STR instructions #imm must have the same numerical
  2515. value for both operands.
  2516. */
  2517. case sme_ldr:
  2518. case sme_str:
  2519. assert (inst->operands[0].type == AARCH64_OPND_SME_ZA_array);
  2520. assert (inst->operands[1].type == AARCH64_OPND_SME_ADDR_RI_U4xVL);
  2521. if (inst->operands[0].za_tile_vector.index.imm
  2522. != inst->operands[1].addr.offset.imm)
  2523. {
  2524. if (mismatch_detail)
  2525. {
  2526. mismatch_detail->kind = AARCH64_OPDE_UNTIED_IMMS;
  2527. mismatch_detail->index = i;
  2528. }
  2529. return 0;
  2530. }
  2531. break;
  2532. default:
  2533. /* Check for cases where a source register needs to be the same as the
  2534. destination register. Do this before matching qualifiers since if
  2535. an instruction has both invalid tying and invalid qualifiers,
  2536. the error about qualifiers would suggest several alternative
  2537. instructions that also have invalid tying. */
  2538. if (inst->operands[0].reg.regno
  2539. != inst->operands[i].reg.regno)
  2540. {
  2541. if (mismatch_detail)
  2542. {
  2543. mismatch_detail->kind = AARCH64_OPDE_UNTIED_OPERAND;
  2544. mismatch_detail->index = i;
  2545. mismatch_detail->error = NULL;
  2546. }
  2547. return 0;
  2548. }
  2549. break;
  2550. }
  2551. }
  2552. /* Match operands' qualifier.
  2553. *INST has already had qualifier establish for some, if not all, of
  2554. its operands; we need to find out whether these established
  2555. qualifiers match one of the qualifier sequence in
  2556. INST->OPCODE->QUALIFIERS_LIST. If yes, we will assign each operand
  2557. with the corresponding qualifier in such a sequence.
  2558. Only basic operand constraint checking is done here; the more thorough
  2559. constraint checking will carried out by operand_general_constraint_met_p,
  2560. which has be to called after this in order to get all of the operands'
  2561. qualifiers established. */
  2562. if (match_operands_qualifier (inst, true /* update_p */) == 0)
  2563. {
  2564. DEBUG_TRACE ("FAIL on operand qualifier matching");
  2565. if (mismatch_detail)
  2566. {
  2567. /* Return an error type to indicate that it is the qualifier
  2568. matching failure; we don't care about which operand as there
  2569. are enough information in the opcode table to reproduce it. */
  2570. mismatch_detail->kind = AARCH64_OPDE_INVALID_VARIANT;
  2571. mismatch_detail->index = -1;
  2572. mismatch_detail->error = NULL;
  2573. }
  2574. return 0;
  2575. }
  2576. /* Match operands' constraint. */
  2577. for (i = 0; i < AARCH64_MAX_OPND_NUM; ++i)
  2578. {
  2579. enum aarch64_opnd type = inst->opcode->operands[i];
  2580. if (type == AARCH64_OPND_NIL)
  2581. break;
  2582. if (inst->operands[i].skip)
  2583. {
  2584. DEBUG_TRACE ("skip the incomplete operand %d", i);
  2585. continue;
  2586. }
  2587. if (operand_general_constraint_met_p (inst->operands, i, type,
  2588. inst->opcode, mismatch_detail) == 0)
  2589. {
  2590. DEBUG_TRACE ("FAIL on operand %d", i);
  2591. return 0;
  2592. }
  2593. }
  2594. DEBUG_TRACE ("PASS");
  2595. return 1;
  2596. }
  2597. /* Replace INST->OPCODE with OPCODE and return the replaced OPCODE.
  2598. Also updates the TYPE of each INST->OPERANDS with the corresponding
  2599. value of OPCODE->OPERANDS.
  2600. Note that some operand qualifiers may need to be manually cleared by
  2601. the caller before it further calls the aarch64_opcode_encode; by
  2602. doing this, it helps the qualifier matching facilities work
  2603. properly. */
  2604. const aarch64_opcode*
  2605. aarch64_replace_opcode (aarch64_inst *inst, const aarch64_opcode *opcode)
  2606. {
  2607. int i;
  2608. const aarch64_opcode *old = inst->opcode;
  2609. inst->opcode = opcode;
  2610. /* Update the operand types. */
  2611. for (i = 0; i < AARCH64_MAX_OPND_NUM; ++i)
  2612. {
  2613. inst->operands[i].type = opcode->operands[i];
  2614. if (opcode->operands[i] == AARCH64_OPND_NIL)
  2615. break;
  2616. }
  2617. DEBUG_TRACE ("replace %s with %s", old->name, opcode->name);
  2618. return old;
  2619. }
  2620. int
  2621. aarch64_operand_index (const enum aarch64_opnd *operands, enum aarch64_opnd operand)
  2622. {
  2623. int i;
  2624. for (i = 0; i < AARCH64_MAX_OPND_NUM; ++i)
  2625. if (operands[i] == operand)
  2626. return i;
  2627. else if (operands[i] == AARCH64_OPND_NIL)
  2628. break;
  2629. return -1;
  2630. }
  2631. /* R0...R30, followed by FOR31. */
  2632. #define BANK(R, FOR31) \
  2633. { R (0), R (1), R (2), R (3), R (4), R (5), R (6), R (7), \
  2634. R (8), R (9), R (10), R (11), R (12), R (13), R (14), R (15), \
  2635. R (16), R (17), R (18), R (19), R (20), R (21), R (22), R (23), \
  2636. R (24), R (25), R (26), R (27), R (28), R (29), R (30), FOR31 }
  2637. /* [0][0] 32-bit integer regs with sp Wn
  2638. [0][1] 64-bit integer regs with sp Xn sf=1
  2639. [1][0] 32-bit integer regs with #0 Wn
  2640. [1][1] 64-bit integer regs with #0 Xn sf=1 */
  2641. static const char *int_reg[2][2][32] = {
  2642. #define R32(X) "w" #X
  2643. #define R64(X) "x" #X
  2644. { BANK (R32, "wsp"), BANK (R64, "sp") },
  2645. { BANK (R32, "wzr"), BANK (R64, "xzr") }
  2646. #undef R64
  2647. #undef R32
  2648. };
  2649. /* Names of the SVE vector registers, first with .S suffixes,
  2650. then with .D suffixes. */
  2651. static const char *sve_reg[2][32] = {
  2652. #define ZS(X) "z" #X ".s"
  2653. #define ZD(X) "z" #X ".d"
  2654. BANK (ZS, ZS (31)), BANK (ZD, ZD (31))
  2655. #undef ZD
  2656. #undef ZS
  2657. };
  2658. #undef BANK
  2659. /* Return the integer register name.
  2660. if SP_REG_P is not 0, R31 is an SP reg, other R31 is the zero reg. */
  2661. static inline const char *
  2662. get_int_reg_name (int regno, aarch64_opnd_qualifier_t qualifier, int sp_reg_p)
  2663. {
  2664. const int has_zr = sp_reg_p ? 0 : 1;
  2665. const int is_64 = aarch64_get_qualifier_esize (qualifier) == 4 ? 0 : 1;
  2666. return int_reg[has_zr][is_64][regno];
  2667. }
  2668. /* Like get_int_reg_name, but IS_64 is always 1. */
  2669. static inline const char *
  2670. get_64bit_int_reg_name (int regno, int sp_reg_p)
  2671. {
  2672. const int has_zr = sp_reg_p ? 0 : 1;
  2673. return int_reg[has_zr][1][regno];
  2674. }
  2675. /* Get the name of the integer offset register in OPND, using the shift type
  2676. to decide whether it's a word or doubleword. */
  2677. static inline const char *
  2678. get_offset_int_reg_name (const aarch64_opnd_info *opnd)
  2679. {
  2680. switch (opnd->shifter.kind)
  2681. {
  2682. case AARCH64_MOD_UXTW:
  2683. case AARCH64_MOD_SXTW:
  2684. return get_int_reg_name (opnd->addr.offset.regno, AARCH64_OPND_QLF_W, 0);
  2685. case AARCH64_MOD_LSL:
  2686. case AARCH64_MOD_SXTX:
  2687. return get_int_reg_name (opnd->addr.offset.regno, AARCH64_OPND_QLF_X, 0);
  2688. default:
  2689. abort ();
  2690. }
  2691. }
  2692. /* Get the name of the SVE vector offset register in OPND, using the operand
  2693. qualifier to decide whether the suffix should be .S or .D. */
  2694. static inline const char *
  2695. get_addr_sve_reg_name (int regno, aarch64_opnd_qualifier_t qualifier)
  2696. {
  2697. assert (qualifier == AARCH64_OPND_QLF_S_S
  2698. || qualifier == AARCH64_OPND_QLF_S_D);
  2699. return sve_reg[qualifier == AARCH64_OPND_QLF_S_D][regno];
  2700. }
  2701. /* Types for expanding an encoded 8-bit value to a floating-point value. */
  2702. typedef union
  2703. {
  2704. uint64_t i;
  2705. double d;
  2706. } double_conv_t;
  2707. typedef union
  2708. {
  2709. uint32_t i;
  2710. float f;
  2711. } single_conv_t;
  2712. typedef union
  2713. {
  2714. uint32_t i;
  2715. float f;
  2716. } half_conv_t;
  2717. /* IMM8 is an 8-bit floating-point constant with sign, 3-bit exponent and
  2718. normalized 4 bits of precision, encoded in "a:b:c:d:e:f:g:h" or FLD_imm8
  2719. (depending on the type of the instruction). IMM8 will be expanded to a
  2720. single-precision floating-point value (SIZE == 4) or a double-precision
  2721. floating-point value (SIZE == 8). A half-precision floating-point value
  2722. (SIZE == 2) is expanded to a single-precision floating-point value. The
  2723. expanded value is returned. */
  2724. static uint64_t
  2725. expand_fp_imm (int size, uint32_t imm8)
  2726. {
  2727. uint64_t imm = 0;
  2728. uint32_t imm8_7, imm8_6_0, imm8_6, imm8_6_repl4;
  2729. imm8_7 = (imm8 >> 7) & 0x01; /* imm8<7> */
  2730. imm8_6_0 = imm8 & 0x7f; /* imm8<6:0> */
  2731. imm8_6 = imm8_6_0 >> 6; /* imm8<6> */
  2732. imm8_6_repl4 = (imm8_6 << 3) | (imm8_6 << 2)
  2733. | (imm8_6 << 1) | imm8_6; /* Replicate(imm8<6>,4) */
  2734. if (size == 8)
  2735. {
  2736. imm = (imm8_7 << (63-32)) /* imm8<7> */
  2737. | ((imm8_6 ^ 1) << (62-32)) /* NOT(imm8<6) */
  2738. | (imm8_6_repl4 << (58-32)) | (imm8_6 << (57-32))
  2739. | (imm8_6 << (56-32)) | (imm8_6 << (55-32)) /* Replicate(imm8<6>,7) */
  2740. | (imm8_6_0 << (48-32)); /* imm8<6>:imm8<5:0> */
  2741. imm <<= 32;
  2742. }
  2743. else if (size == 4 || size == 2)
  2744. {
  2745. imm = (imm8_7 << 31) /* imm8<7> */
  2746. | ((imm8_6 ^ 1) << 30) /* NOT(imm8<6>) */
  2747. | (imm8_6_repl4 << 26) /* Replicate(imm8<6>,4) */
  2748. | (imm8_6_0 << 19); /* imm8<6>:imm8<5:0> */
  2749. }
  2750. else
  2751. {
  2752. /* An unsupported size. */
  2753. assert (0);
  2754. }
  2755. return imm;
  2756. }
  2757. /* Produce the string representation of the register list operand *OPND
  2758. in the buffer pointed by BUF of size SIZE. PREFIX is the part of
  2759. the register name that comes before the register number, such as "v". */
  2760. static void
  2761. print_register_list (char *buf, size_t size, const aarch64_opnd_info *opnd,
  2762. const char *prefix)
  2763. {
  2764. const int num_regs = opnd->reglist.num_regs;
  2765. const int first_reg = opnd->reglist.first_regno;
  2766. const int last_reg = (first_reg + num_regs - 1) & 0x1f;
  2767. const char *qlf_name = aarch64_get_qualifier_name (opnd->qualifier);
  2768. char tb[8]; /* Temporary buffer. */
  2769. assert (opnd->type != AARCH64_OPND_LEt || opnd->reglist.has_index);
  2770. assert (num_regs >= 1 && num_regs <= 4);
  2771. /* Prepare the index if any. */
  2772. if (opnd->reglist.has_index)
  2773. /* PR 21096: The %100 is to silence a warning about possible truncation. */
  2774. snprintf (tb, 8, "[%" PRIi64 "]", (opnd->reglist.index % 100));
  2775. else
  2776. tb[0] = '\0';
  2777. /* The hyphenated form is preferred for disassembly if there are
  2778. more than two registers in the list, and the register numbers
  2779. are monotonically increasing in increments of one. */
  2780. if (num_regs > 2 && last_reg > first_reg)
  2781. snprintf (buf, size, "{%s%d.%s-%s%d.%s}%s", prefix, first_reg, qlf_name,
  2782. prefix, last_reg, qlf_name, tb);
  2783. else
  2784. {
  2785. const int reg0 = first_reg;
  2786. const int reg1 = (first_reg + 1) & 0x1f;
  2787. const int reg2 = (first_reg + 2) & 0x1f;
  2788. const int reg3 = (first_reg + 3) & 0x1f;
  2789. switch (num_regs)
  2790. {
  2791. case 1:
  2792. snprintf (buf, size, "{%s%d.%s}%s", prefix, reg0, qlf_name, tb);
  2793. break;
  2794. case 2:
  2795. snprintf (buf, size, "{%s%d.%s, %s%d.%s}%s", prefix, reg0, qlf_name,
  2796. prefix, reg1, qlf_name, tb);
  2797. break;
  2798. case 3:
  2799. snprintf (buf, size, "{%s%d.%s, %s%d.%s, %s%d.%s}%s",
  2800. prefix, reg0, qlf_name, prefix, reg1, qlf_name,
  2801. prefix, reg2, qlf_name, tb);
  2802. break;
  2803. case 4:
  2804. snprintf (buf, size, "{%s%d.%s, %s%d.%s, %s%d.%s, %s%d.%s}%s",
  2805. prefix, reg0, qlf_name, prefix, reg1, qlf_name,
  2806. prefix, reg2, qlf_name, prefix, reg3, qlf_name, tb);
  2807. break;
  2808. }
  2809. }
  2810. }
  2811. /* Print the register+immediate address in OPND to BUF, which has SIZE
  2812. characters. BASE is the name of the base register. */
  2813. static void
  2814. print_immediate_offset_address (char *buf, size_t size,
  2815. const aarch64_opnd_info *opnd,
  2816. const char *base)
  2817. {
  2818. if (opnd->addr.writeback)
  2819. {
  2820. if (opnd->addr.preind)
  2821. {
  2822. if (opnd->type == AARCH64_OPND_ADDR_SIMM10 && !opnd->addr.offset.imm)
  2823. snprintf (buf, size, "[%s]!", base);
  2824. else
  2825. snprintf (buf, size, "[%s, #%d]!", base, opnd->addr.offset.imm);
  2826. }
  2827. else
  2828. snprintf (buf, size, "[%s], #%d", base, opnd->addr.offset.imm);
  2829. }
  2830. else
  2831. {
  2832. if (opnd->shifter.operator_present)
  2833. {
  2834. assert (opnd->shifter.kind == AARCH64_MOD_MUL_VL);
  2835. snprintf (buf, size, "[%s, #%d, mul vl]",
  2836. base, opnd->addr.offset.imm);
  2837. }
  2838. else if (opnd->addr.offset.imm)
  2839. snprintf (buf, size, "[%s, #%d]", base, opnd->addr.offset.imm);
  2840. else
  2841. snprintf (buf, size, "[%s]", base);
  2842. }
  2843. }
  2844. /* Produce the string representation of the register offset address operand
  2845. *OPND in the buffer pointed by BUF of size SIZE. BASE and OFFSET are
  2846. the names of the base and offset registers. */
  2847. static void
  2848. print_register_offset_address (char *buf, size_t size,
  2849. const aarch64_opnd_info *opnd,
  2850. const char *base, const char *offset)
  2851. {
  2852. char tb[16]; /* Temporary buffer. */
  2853. bool print_extend_p = true;
  2854. bool print_amount_p = true;
  2855. const char *shift_name = aarch64_operand_modifiers[opnd->shifter.kind].name;
  2856. if (!opnd->shifter.amount && (opnd->qualifier != AARCH64_OPND_QLF_S_B
  2857. || !opnd->shifter.amount_present))
  2858. {
  2859. /* Not print the shift/extend amount when the amount is zero and
  2860. when it is not the special case of 8-bit load/store instruction. */
  2861. print_amount_p = false;
  2862. /* Likewise, no need to print the shift operator LSL in such a
  2863. situation. */
  2864. if (opnd->shifter.kind == AARCH64_MOD_LSL)
  2865. print_extend_p = false;
  2866. }
  2867. /* Prepare for the extend/shift. */
  2868. if (print_extend_p)
  2869. {
  2870. if (print_amount_p)
  2871. snprintf (tb, sizeof (tb), ", %s #%" PRIi64, shift_name,
  2872. /* PR 21096: The %100 is to silence a warning about possible truncation. */
  2873. (opnd->shifter.amount % 100));
  2874. else
  2875. snprintf (tb, sizeof (tb), ", %s", shift_name);
  2876. }
  2877. else
  2878. tb[0] = '\0';
  2879. snprintf (buf, size, "[%s, %s%s]", base, offset, tb);
  2880. }
  2881. /* Print ZA tiles from imm8 in ZERO instruction.
  2882. The preferred disassembly of this instruction uses the shortest list of tile
  2883. names that represent the encoded immediate mask.
  2884. For example:
  2885. * An all-ones immediate is disassembled as {ZA}.
  2886. * An all-zeros immediate is disassembled as an empty list { }.
  2887. */
  2888. static void
  2889. print_sme_za_list(char *buf, size_t size, int mask)
  2890. {
  2891. const char* zan[] = { "za", "za0.h", "za1.h", "za0.s",
  2892. "za1.s", "za2.s", "za3.s", "za0.d",
  2893. "za1.d", "za2.d", "za3.d", "za4.d",
  2894. "za5.d", "za6.d", "za7.d", " " };
  2895. const int zan_v[] = { 0xff, 0x55, 0xaa, 0x11,
  2896. 0x22, 0x44, 0x88, 0x01,
  2897. 0x02, 0x04, 0x08, 0x10,
  2898. 0x20, 0x40, 0x80, 0x00 };
  2899. int i, k;
  2900. const int ZAN_SIZE = sizeof(zan) / sizeof(zan[0]);
  2901. k = snprintf (buf, size, "{");
  2902. for (i = 0; i < ZAN_SIZE; i++)
  2903. {
  2904. if ((mask & zan_v[i]) == zan_v[i])
  2905. {
  2906. mask &= ~zan_v[i];
  2907. if (k > 1)
  2908. k += snprintf (buf + k, size - k, ", %s", zan[i]);
  2909. else
  2910. k += snprintf (buf + k, size - k, "%s", zan[i]);
  2911. }
  2912. if (mask == 0)
  2913. break;
  2914. }
  2915. snprintf (buf + k, size - k, "}");
  2916. }
  2917. /* Generate the string representation of the operand OPNDS[IDX] for OPCODE
  2918. in *BUF. The caller should pass in the maximum size of *BUF in SIZE.
  2919. PC, PCREL_P and ADDRESS are used to pass in and return information about
  2920. the PC-relative address calculation, where the PC value is passed in
  2921. PC. If the operand is pc-relative related, *PCREL_P (if PCREL_P non-NULL)
  2922. will return 1 and *ADDRESS (if ADDRESS non-NULL) will return the
  2923. calculated address; otherwise, *PCREL_P (if PCREL_P non-NULL) returns 0.
  2924. The function serves both the disassembler and the assembler diagnostics
  2925. issuer, which is the reason why it lives in this file. */
  2926. void
  2927. aarch64_print_operand (char *buf, size_t size, bfd_vma pc,
  2928. const aarch64_opcode *opcode,
  2929. const aarch64_opnd_info *opnds, int idx, int *pcrel_p,
  2930. bfd_vma *address, char** notes,
  2931. aarch64_feature_set features)
  2932. {
  2933. unsigned int i, num_conds;
  2934. const char *name = NULL;
  2935. const aarch64_opnd_info *opnd = opnds + idx;
  2936. enum aarch64_modifier_kind kind;
  2937. uint64_t addr, enum_value;
  2938. buf[0] = '\0';
  2939. if (pcrel_p)
  2940. *pcrel_p = 0;
  2941. switch (opnd->type)
  2942. {
  2943. case AARCH64_OPND_Rd:
  2944. case AARCH64_OPND_Rn:
  2945. case AARCH64_OPND_Rm:
  2946. case AARCH64_OPND_Rt:
  2947. case AARCH64_OPND_Rt2:
  2948. case AARCH64_OPND_Rs:
  2949. case AARCH64_OPND_Ra:
  2950. case AARCH64_OPND_Rt_LS64:
  2951. case AARCH64_OPND_Rt_SYS:
  2952. case AARCH64_OPND_PAIRREG:
  2953. case AARCH64_OPND_SVE_Rm:
  2954. /* The optional-ness of <Xt> in e.g. IC <ic_op>{, <Xt>} is determined by
  2955. the <ic_op>, therefore we use opnd->present to override the
  2956. generic optional-ness information. */
  2957. if (opnd->type == AARCH64_OPND_Rt_SYS)
  2958. {
  2959. if (!opnd->present)
  2960. break;
  2961. }
  2962. /* Omit the operand, e.g. RET. */
  2963. else if (optional_operand_p (opcode, idx)
  2964. && (opnd->reg.regno
  2965. == get_optional_operand_default_value (opcode)))
  2966. break;
  2967. assert (opnd->qualifier == AARCH64_OPND_QLF_W
  2968. || opnd->qualifier == AARCH64_OPND_QLF_X);
  2969. snprintf (buf, size, "%s",
  2970. get_int_reg_name (opnd->reg.regno, opnd->qualifier, 0));
  2971. break;
  2972. case AARCH64_OPND_Rd_SP:
  2973. case AARCH64_OPND_Rn_SP:
  2974. case AARCH64_OPND_Rt_SP:
  2975. case AARCH64_OPND_SVE_Rn_SP:
  2976. case AARCH64_OPND_Rm_SP:
  2977. assert (opnd->qualifier == AARCH64_OPND_QLF_W
  2978. || opnd->qualifier == AARCH64_OPND_QLF_WSP
  2979. || opnd->qualifier == AARCH64_OPND_QLF_X
  2980. || opnd->qualifier == AARCH64_OPND_QLF_SP);
  2981. snprintf (buf, size, "%s",
  2982. get_int_reg_name (opnd->reg.regno, opnd->qualifier, 1));
  2983. break;
  2984. case AARCH64_OPND_Rm_EXT:
  2985. kind = opnd->shifter.kind;
  2986. assert (idx == 1 || idx == 2);
  2987. if ((aarch64_stack_pointer_p (opnds)
  2988. || (idx == 2 && aarch64_stack_pointer_p (opnds + 1)))
  2989. && ((opnd->qualifier == AARCH64_OPND_QLF_W
  2990. && opnds[0].qualifier == AARCH64_OPND_QLF_W
  2991. && kind == AARCH64_MOD_UXTW)
  2992. || (opnd->qualifier == AARCH64_OPND_QLF_X
  2993. && kind == AARCH64_MOD_UXTX)))
  2994. {
  2995. /* 'LSL' is the preferred form in this case. */
  2996. kind = AARCH64_MOD_LSL;
  2997. if (opnd->shifter.amount == 0)
  2998. {
  2999. /* Shifter omitted. */
  3000. snprintf (buf, size, "%s",
  3001. get_int_reg_name (opnd->reg.regno, opnd->qualifier, 0));
  3002. break;
  3003. }
  3004. }
  3005. if (opnd->shifter.amount)
  3006. snprintf (buf, size, "%s, %s #%" PRIi64,
  3007. get_int_reg_name (opnd->reg.regno, opnd->qualifier, 0),
  3008. aarch64_operand_modifiers[kind].name,
  3009. opnd->shifter.amount);
  3010. else
  3011. snprintf (buf, size, "%s, %s",
  3012. get_int_reg_name (opnd->reg.regno, opnd->qualifier, 0),
  3013. aarch64_operand_modifiers[kind].name);
  3014. break;
  3015. case AARCH64_OPND_Rm_SFT:
  3016. assert (opnd->qualifier == AARCH64_OPND_QLF_W
  3017. || opnd->qualifier == AARCH64_OPND_QLF_X);
  3018. if (opnd->shifter.amount == 0 && opnd->shifter.kind == AARCH64_MOD_LSL)
  3019. snprintf (buf, size, "%s",
  3020. get_int_reg_name (opnd->reg.regno, opnd->qualifier, 0));
  3021. else
  3022. snprintf (buf, size, "%s, %s #%" PRIi64,
  3023. get_int_reg_name (opnd->reg.regno, opnd->qualifier, 0),
  3024. aarch64_operand_modifiers[opnd->shifter.kind].name,
  3025. opnd->shifter.amount);
  3026. break;
  3027. case AARCH64_OPND_Fd:
  3028. case AARCH64_OPND_Fn:
  3029. case AARCH64_OPND_Fm:
  3030. case AARCH64_OPND_Fa:
  3031. case AARCH64_OPND_Ft:
  3032. case AARCH64_OPND_Ft2:
  3033. case AARCH64_OPND_Sd:
  3034. case AARCH64_OPND_Sn:
  3035. case AARCH64_OPND_Sm:
  3036. case AARCH64_OPND_SVE_VZn:
  3037. case AARCH64_OPND_SVE_Vd:
  3038. case AARCH64_OPND_SVE_Vm:
  3039. case AARCH64_OPND_SVE_Vn:
  3040. snprintf (buf, size, "%s%d", aarch64_get_qualifier_name (opnd->qualifier),
  3041. opnd->reg.regno);
  3042. break;
  3043. case AARCH64_OPND_Va:
  3044. case AARCH64_OPND_Vd:
  3045. case AARCH64_OPND_Vn:
  3046. case AARCH64_OPND_Vm:
  3047. snprintf (buf, size, "v%d.%s", opnd->reg.regno,
  3048. aarch64_get_qualifier_name (opnd->qualifier));
  3049. break;
  3050. case AARCH64_OPND_Ed:
  3051. case AARCH64_OPND_En:
  3052. case AARCH64_OPND_Em:
  3053. case AARCH64_OPND_Em16:
  3054. case AARCH64_OPND_SM3_IMM2:
  3055. snprintf (buf, size, "v%d.%s[%" PRIi64 "]", opnd->reglane.regno,
  3056. aarch64_get_qualifier_name (opnd->qualifier),
  3057. opnd->reglane.index);
  3058. break;
  3059. case AARCH64_OPND_VdD1:
  3060. case AARCH64_OPND_VnD1:
  3061. snprintf (buf, size, "v%d.d[1]", opnd->reg.regno);
  3062. break;
  3063. case AARCH64_OPND_LVn:
  3064. case AARCH64_OPND_LVt:
  3065. case AARCH64_OPND_LVt_AL:
  3066. case AARCH64_OPND_LEt:
  3067. print_register_list (buf, size, opnd, "v");
  3068. break;
  3069. case AARCH64_OPND_SVE_Pd:
  3070. case AARCH64_OPND_SVE_Pg3:
  3071. case AARCH64_OPND_SVE_Pg4_5:
  3072. case AARCH64_OPND_SVE_Pg4_10:
  3073. case AARCH64_OPND_SVE_Pg4_16:
  3074. case AARCH64_OPND_SVE_Pm:
  3075. case AARCH64_OPND_SVE_Pn:
  3076. case AARCH64_OPND_SVE_Pt:
  3077. case AARCH64_OPND_SME_Pm:
  3078. if (opnd->qualifier == AARCH64_OPND_QLF_NIL)
  3079. snprintf (buf, size, "p%d", opnd->reg.regno);
  3080. else if (opnd->qualifier == AARCH64_OPND_QLF_P_Z
  3081. || opnd->qualifier == AARCH64_OPND_QLF_P_M)
  3082. snprintf (buf, size, "p%d/%s", opnd->reg.regno,
  3083. aarch64_get_qualifier_name (opnd->qualifier));
  3084. else
  3085. snprintf (buf, size, "p%d.%s", opnd->reg.regno,
  3086. aarch64_get_qualifier_name (opnd->qualifier));
  3087. break;
  3088. case AARCH64_OPND_SVE_Za_5:
  3089. case AARCH64_OPND_SVE_Za_16:
  3090. case AARCH64_OPND_SVE_Zd:
  3091. case AARCH64_OPND_SVE_Zm_5:
  3092. case AARCH64_OPND_SVE_Zm_16:
  3093. case AARCH64_OPND_SVE_Zn:
  3094. case AARCH64_OPND_SVE_Zt:
  3095. if (opnd->qualifier == AARCH64_OPND_QLF_NIL)
  3096. snprintf (buf, size, "z%d", opnd->reg.regno);
  3097. else
  3098. snprintf (buf, size, "z%d.%s", opnd->reg.regno,
  3099. aarch64_get_qualifier_name (opnd->qualifier));
  3100. break;
  3101. case AARCH64_OPND_SVE_ZnxN:
  3102. case AARCH64_OPND_SVE_ZtxN:
  3103. print_register_list (buf, size, opnd, "z");
  3104. break;
  3105. case AARCH64_OPND_SVE_Zm3_INDEX:
  3106. case AARCH64_OPND_SVE_Zm3_22_INDEX:
  3107. case AARCH64_OPND_SVE_Zm3_11_INDEX:
  3108. case AARCH64_OPND_SVE_Zm4_11_INDEX:
  3109. case AARCH64_OPND_SVE_Zm4_INDEX:
  3110. case AARCH64_OPND_SVE_Zn_INDEX:
  3111. snprintf (buf, size, "z%d.%s[%" PRIi64 "]", opnd->reglane.regno,
  3112. aarch64_get_qualifier_name (opnd->qualifier),
  3113. opnd->reglane.index);
  3114. break;
  3115. case AARCH64_OPND_SME_ZAda_2b:
  3116. case AARCH64_OPND_SME_ZAda_3b:
  3117. snprintf (buf, size, "za%d.%s", opnd->reg.regno,
  3118. aarch64_get_qualifier_name (opnd->qualifier));
  3119. break;
  3120. case AARCH64_OPND_SME_ZA_HV_idx_src:
  3121. case AARCH64_OPND_SME_ZA_HV_idx_dest:
  3122. case AARCH64_OPND_SME_ZA_HV_idx_ldstr:
  3123. snprintf (buf, size, "%sza%d%c.%s[w%d, %d]%s",
  3124. opnd->type == AARCH64_OPND_SME_ZA_HV_idx_ldstr ? "{" : "",
  3125. opnd->za_tile_vector.regno,
  3126. opnd->za_tile_vector.v == 1 ? 'v' : 'h',
  3127. aarch64_get_qualifier_name (opnd->qualifier),
  3128. opnd->za_tile_vector.index.regno,
  3129. opnd->za_tile_vector.index.imm,
  3130. opnd->type == AARCH64_OPND_SME_ZA_HV_idx_ldstr ? "}" : "");
  3131. break;
  3132. case AARCH64_OPND_SME_list_of_64bit_tiles:
  3133. print_sme_za_list (buf, size, opnd->reg.regno);
  3134. break;
  3135. case AARCH64_OPND_SME_ZA_array:
  3136. snprintf (buf, size, "za[w%d, %d]",
  3137. opnd->za_tile_vector.index.regno,
  3138. opnd->za_tile_vector.index.imm);
  3139. break;
  3140. case AARCH64_OPND_SME_SM_ZA:
  3141. snprintf (buf, size, "%s", opnd->reg.regno == 's' ? "sm" : "za");
  3142. break;
  3143. case AARCH64_OPND_SME_PnT_Wm_imm:
  3144. snprintf (buf, size, "p%d.%s[w%d, %d]",
  3145. opnd->za_tile_vector.regno,
  3146. aarch64_get_qualifier_name (opnd->qualifier),
  3147. opnd->za_tile_vector.index.regno,
  3148. opnd->za_tile_vector.index.imm);
  3149. break;
  3150. case AARCH64_OPND_CRn:
  3151. case AARCH64_OPND_CRm:
  3152. snprintf (buf, size, "C%" PRIi64, opnd->imm.value);
  3153. break;
  3154. case AARCH64_OPND_IDX:
  3155. case AARCH64_OPND_MASK:
  3156. case AARCH64_OPND_IMM:
  3157. case AARCH64_OPND_IMM_2:
  3158. case AARCH64_OPND_WIDTH:
  3159. case AARCH64_OPND_UIMM3_OP1:
  3160. case AARCH64_OPND_UIMM3_OP2:
  3161. case AARCH64_OPND_BIT_NUM:
  3162. case AARCH64_OPND_IMM_VLSL:
  3163. case AARCH64_OPND_IMM_VLSR:
  3164. case AARCH64_OPND_SHLL_IMM:
  3165. case AARCH64_OPND_IMM0:
  3166. case AARCH64_OPND_IMMR:
  3167. case AARCH64_OPND_IMMS:
  3168. case AARCH64_OPND_UNDEFINED:
  3169. case AARCH64_OPND_FBITS:
  3170. case AARCH64_OPND_TME_UIMM16:
  3171. case AARCH64_OPND_SIMM5:
  3172. case AARCH64_OPND_SVE_SHLIMM_PRED:
  3173. case AARCH64_OPND_SVE_SHLIMM_UNPRED:
  3174. case AARCH64_OPND_SVE_SHLIMM_UNPRED_22:
  3175. case AARCH64_OPND_SVE_SHRIMM_PRED:
  3176. case AARCH64_OPND_SVE_SHRIMM_UNPRED:
  3177. case AARCH64_OPND_SVE_SHRIMM_UNPRED_22:
  3178. case AARCH64_OPND_SVE_SIMM5:
  3179. case AARCH64_OPND_SVE_SIMM5B:
  3180. case AARCH64_OPND_SVE_SIMM6:
  3181. case AARCH64_OPND_SVE_SIMM8:
  3182. case AARCH64_OPND_SVE_UIMM3:
  3183. case AARCH64_OPND_SVE_UIMM7:
  3184. case AARCH64_OPND_SVE_UIMM8:
  3185. case AARCH64_OPND_SVE_UIMM8_53:
  3186. case AARCH64_OPND_IMM_ROT1:
  3187. case AARCH64_OPND_IMM_ROT2:
  3188. case AARCH64_OPND_IMM_ROT3:
  3189. case AARCH64_OPND_SVE_IMM_ROT1:
  3190. case AARCH64_OPND_SVE_IMM_ROT2:
  3191. case AARCH64_OPND_SVE_IMM_ROT3:
  3192. snprintf (buf, size, "#%" PRIi64, opnd->imm.value);
  3193. break;
  3194. case AARCH64_OPND_SVE_I1_HALF_ONE:
  3195. case AARCH64_OPND_SVE_I1_HALF_TWO:
  3196. case AARCH64_OPND_SVE_I1_ZERO_ONE:
  3197. {
  3198. single_conv_t c;
  3199. c.i = opnd->imm.value;
  3200. snprintf (buf, size, "#%.1f", c.f);
  3201. break;
  3202. }
  3203. case AARCH64_OPND_SVE_PATTERN:
  3204. if (optional_operand_p (opcode, idx)
  3205. && opnd->imm.value == get_optional_operand_default_value (opcode))
  3206. break;
  3207. enum_value = opnd->imm.value;
  3208. assert (enum_value < ARRAY_SIZE (aarch64_sve_pattern_array));
  3209. if (aarch64_sve_pattern_array[enum_value])
  3210. snprintf (buf, size, "%s", aarch64_sve_pattern_array[enum_value]);
  3211. else
  3212. snprintf (buf, size, "#%" PRIi64, opnd->imm.value);
  3213. break;
  3214. case AARCH64_OPND_SVE_PATTERN_SCALED:
  3215. if (optional_operand_p (opcode, idx)
  3216. && !opnd->shifter.operator_present
  3217. && opnd->imm.value == get_optional_operand_default_value (opcode))
  3218. break;
  3219. enum_value = opnd->imm.value;
  3220. assert (enum_value < ARRAY_SIZE (aarch64_sve_pattern_array));
  3221. if (aarch64_sve_pattern_array[opnd->imm.value])
  3222. snprintf (buf, size, "%s", aarch64_sve_pattern_array[opnd->imm.value]);
  3223. else
  3224. snprintf (buf, size, "#%" PRIi64, opnd->imm.value);
  3225. if (opnd->shifter.operator_present)
  3226. {
  3227. size_t len = strlen (buf);
  3228. snprintf (buf + len, size - len, ", %s #%" PRIi64,
  3229. aarch64_operand_modifiers[opnd->shifter.kind].name,
  3230. opnd->shifter.amount);
  3231. }
  3232. break;
  3233. case AARCH64_OPND_SVE_PRFOP:
  3234. enum_value = opnd->imm.value;
  3235. assert (enum_value < ARRAY_SIZE (aarch64_sve_prfop_array));
  3236. if (aarch64_sve_prfop_array[enum_value])
  3237. snprintf (buf, size, "%s", aarch64_sve_prfop_array[enum_value]);
  3238. else
  3239. snprintf (buf, size, "#%" PRIi64, opnd->imm.value);
  3240. break;
  3241. case AARCH64_OPND_IMM_MOV:
  3242. switch (aarch64_get_qualifier_esize (opnds[0].qualifier))
  3243. {
  3244. case 4: /* e.g. MOV Wd, #<imm32>. */
  3245. {
  3246. int imm32 = opnd->imm.value;
  3247. snprintf (buf, size, "#0x%-20x\t// #%d", imm32, imm32);
  3248. }
  3249. break;
  3250. case 8: /* e.g. MOV Xd, #<imm64>. */
  3251. snprintf (buf, size, "#0x%-20" PRIx64 "\t// #%" PRIi64,
  3252. opnd->imm.value, opnd->imm.value);
  3253. break;
  3254. default:
  3255. snprintf (buf, size, "<invalid>");
  3256. break;
  3257. }
  3258. break;
  3259. case AARCH64_OPND_FPIMM0:
  3260. snprintf (buf, size, "#0.0");
  3261. break;
  3262. case AARCH64_OPND_LIMM:
  3263. case AARCH64_OPND_AIMM:
  3264. case AARCH64_OPND_HALF:
  3265. case AARCH64_OPND_SVE_INV_LIMM:
  3266. case AARCH64_OPND_SVE_LIMM:
  3267. case AARCH64_OPND_SVE_LIMM_MOV:
  3268. if (opnd->shifter.amount)
  3269. snprintf (buf, size, "#0x%" PRIx64 ", lsl #%" PRIi64, opnd->imm.value,
  3270. opnd->shifter.amount);
  3271. else
  3272. snprintf (buf, size, "#0x%" PRIx64, opnd->imm.value);
  3273. break;
  3274. case AARCH64_OPND_SIMD_IMM:
  3275. case AARCH64_OPND_SIMD_IMM_SFT:
  3276. if ((! opnd->shifter.amount && opnd->shifter.kind == AARCH64_MOD_LSL)
  3277. || opnd->shifter.kind == AARCH64_MOD_NONE)
  3278. snprintf (buf, size, "#0x%" PRIx64, opnd->imm.value);
  3279. else
  3280. snprintf (buf, size, "#0x%" PRIx64 ", %s #%" PRIi64, opnd->imm.value,
  3281. aarch64_operand_modifiers[opnd->shifter.kind].name,
  3282. opnd->shifter.amount);
  3283. break;
  3284. case AARCH64_OPND_SVE_AIMM:
  3285. case AARCH64_OPND_SVE_ASIMM:
  3286. if (opnd->shifter.amount)
  3287. snprintf (buf, size, "#%" PRIi64 ", lsl #%" PRIi64, opnd->imm.value,
  3288. opnd->shifter.amount);
  3289. else
  3290. snprintf (buf, size, "#%" PRIi64, opnd->imm.value);
  3291. break;
  3292. case AARCH64_OPND_FPIMM:
  3293. case AARCH64_OPND_SIMD_FPIMM:
  3294. case AARCH64_OPND_SVE_FPIMM8:
  3295. switch (aarch64_get_qualifier_esize (opnds[0].qualifier))
  3296. {
  3297. case 2: /* e.g. FMOV <Hd>, #<imm>. */
  3298. {
  3299. half_conv_t c;
  3300. c.i = expand_fp_imm (2, opnd->imm.value);
  3301. snprintf (buf, size, "#%.18e", c.f);
  3302. }
  3303. break;
  3304. case 4: /* e.g. FMOV <Vd>.4S, #<imm>. */
  3305. {
  3306. single_conv_t c;
  3307. c.i = expand_fp_imm (4, opnd->imm.value);
  3308. snprintf (buf, size, "#%.18e", c.f);
  3309. }
  3310. break;
  3311. case 8: /* e.g. FMOV <Sd>, #<imm>. */
  3312. {
  3313. double_conv_t c;
  3314. c.i = expand_fp_imm (8, opnd->imm.value);
  3315. snprintf (buf, size, "#%.18e", c.d);
  3316. }
  3317. break;
  3318. default:
  3319. snprintf (buf, size, "<invalid>");
  3320. break;
  3321. }
  3322. break;
  3323. case AARCH64_OPND_CCMP_IMM:
  3324. case AARCH64_OPND_NZCV:
  3325. case AARCH64_OPND_EXCEPTION:
  3326. case AARCH64_OPND_UIMM4:
  3327. case AARCH64_OPND_UIMM4_ADDG:
  3328. case AARCH64_OPND_UIMM7:
  3329. case AARCH64_OPND_UIMM10:
  3330. if (optional_operand_p (opcode, idx)
  3331. && (opnd->imm.value ==
  3332. (int64_t) get_optional_operand_default_value (opcode)))
  3333. /* Omit the operand, e.g. DCPS1. */
  3334. break;
  3335. snprintf (buf, size, "#0x%x", (unsigned int)opnd->imm.value);
  3336. break;
  3337. case AARCH64_OPND_COND:
  3338. case AARCH64_OPND_COND1:
  3339. snprintf (buf, size, "%s", opnd->cond->names[0]);
  3340. num_conds = ARRAY_SIZE (opnd->cond->names);
  3341. for (i = 1; i < num_conds && opnd->cond->names[i]; ++i)
  3342. {
  3343. size_t len = strlen (buf);
  3344. if (i == 1)
  3345. snprintf (buf + len, size - len, " // %s = %s",
  3346. opnd->cond->names[0], opnd->cond->names[i]);
  3347. else
  3348. snprintf (buf + len, size - len, ", %s",
  3349. opnd->cond->names[i]);
  3350. }
  3351. break;
  3352. case AARCH64_OPND_ADDR_ADRP:
  3353. addr = ((pc + AARCH64_PCREL_OFFSET) & ~(uint64_t)0xfff)
  3354. + opnd->imm.value;
  3355. if (pcrel_p)
  3356. *pcrel_p = 1;
  3357. if (address)
  3358. *address = addr;
  3359. /* This is not necessary during the disassembling, as print_address_func
  3360. in the disassemble_info will take care of the printing. But some
  3361. other callers may be still interested in getting the string in *STR,
  3362. so here we do snprintf regardless. */
  3363. snprintf (buf, size, "#0x%" PRIx64, addr);
  3364. break;
  3365. case AARCH64_OPND_ADDR_PCREL14:
  3366. case AARCH64_OPND_ADDR_PCREL19:
  3367. case AARCH64_OPND_ADDR_PCREL21:
  3368. case AARCH64_OPND_ADDR_PCREL26:
  3369. addr = pc + AARCH64_PCREL_OFFSET + opnd->imm.value;
  3370. if (pcrel_p)
  3371. *pcrel_p = 1;
  3372. if (address)
  3373. *address = addr;
  3374. /* This is not necessary during the disassembling, as print_address_func
  3375. in the disassemble_info will take care of the printing. But some
  3376. other callers may be still interested in getting the string in *STR,
  3377. so here we do snprintf regardless. */
  3378. snprintf (buf, size, "#0x%" PRIx64, addr);
  3379. break;
  3380. case AARCH64_OPND_ADDR_SIMPLE:
  3381. case AARCH64_OPND_SIMD_ADDR_SIMPLE:
  3382. case AARCH64_OPND_SIMD_ADDR_POST:
  3383. name = get_64bit_int_reg_name (opnd->addr.base_regno, 1);
  3384. if (opnd->type == AARCH64_OPND_SIMD_ADDR_POST)
  3385. {
  3386. if (opnd->addr.offset.is_reg)
  3387. snprintf (buf, size, "[%s], x%d", name, opnd->addr.offset.regno);
  3388. else
  3389. snprintf (buf, size, "[%s], #%d", name, opnd->addr.offset.imm);
  3390. }
  3391. else
  3392. snprintf (buf, size, "[%s]", name);
  3393. break;
  3394. case AARCH64_OPND_ADDR_REGOFF:
  3395. case AARCH64_OPND_SVE_ADDR_R:
  3396. case AARCH64_OPND_SVE_ADDR_RR:
  3397. case AARCH64_OPND_SVE_ADDR_RR_LSL1:
  3398. case AARCH64_OPND_SVE_ADDR_RR_LSL2:
  3399. case AARCH64_OPND_SVE_ADDR_RR_LSL3:
  3400. case AARCH64_OPND_SVE_ADDR_RR_LSL4:
  3401. case AARCH64_OPND_SVE_ADDR_RX:
  3402. case AARCH64_OPND_SVE_ADDR_RX_LSL1:
  3403. case AARCH64_OPND_SVE_ADDR_RX_LSL2:
  3404. case AARCH64_OPND_SVE_ADDR_RX_LSL3:
  3405. print_register_offset_address
  3406. (buf, size, opnd, get_64bit_int_reg_name (opnd->addr.base_regno, 1),
  3407. get_offset_int_reg_name (opnd));
  3408. break;
  3409. case AARCH64_OPND_SVE_ADDR_ZX:
  3410. print_register_offset_address
  3411. (buf, size, opnd,
  3412. get_addr_sve_reg_name (opnd->addr.base_regno, opnd->qualifier),
  3413. get_64bit_int_reg_name (opnd->addr.offset.regno, 0));
  3414. break;
  3415. case AARCH64_OPND_SVE_ADDR_RZ:
  3416. case AARCH64_OPND_SVE_ADDR_RZ_LSL1:
  3417. case AARCH64_OPND_SVE_ADDR_RZ_LSL2:
  3418. case AARCH64_OPND_SVE_ADDR_RZ_LSL3:
  3419. case AARCH64_OPND_SVE_ADDR_RZ_XTW_14:
  3420. case AARCH64_OPND_SVE_ADDR_RZ_XTW_22:
  3421. case AARCH64_OPND_SVE_ADDR_RZ_XTW1_14:
  3422. case AARCH64_OPND_SVE_ADDR_RZ_XTW1_22:
  3423. case AARCH64_OPND_SVE_ADDR_RZ_XTW2_14:
  3424. case AARCH64_OPND_SVE_ADDR_RZ_XTW2_22:
  3425. case AARCH64_OPND_SVE_ADDR_RZ_XTW3_14:
  3426. case AARCH64_OPND_SVE_ADDR_RZ_XTW3_22:
  3427. print_register_offset_address
  3428. (buf, size, opnd, get_64bit_int_reg_name (opnd->addr.base_regno, 1),
  3429. get_addr_sve_reg_name (opnd->addr.offset.regno, opnd->qualifier));
  3430. break;
  3431. case AARCH64_OPND_ADDR_SIMM7:
  3432. case AARCH64_OPND_ADDR_SIMM9:
  3433. case AARCH64_OPND_ADDR_SIMM9_2:
  3434. case AARCH64_OPND_ADDR_SIMM10:
  3435. case AARCH64_OPND_ADDR_SIMM11:
  3436. case AARCH64_OPND_ADDR_SIMM13:
  3437. case AARCH64_OPND_ADDR_OFFSET:
  3438. case AARCH64_OPND_SME_ADDR_RI_U4xVL:
  3439. case AARCH64_OPND_SVE_ADDR_RI_S4x16:
  3440. case AARCH64_OPND_SVE_ADDR_RI_S4x32:
  3441. case AARCH64_OPND_SVE_ADDR_RI_S4xVL:
  3442. case AARCH64_OPND_SVE_ADDR_RI_S4x2xVL:
  3443. case AARCH64_OPND_SVE_ADDR_RI_S4x3xVL:
  3444. case AARCH64_OPND_SVE_ADDR_RI_S4x4xVL:
  3445. case AARCH64_OPND_SVE_ADDR_RI_S6xVL:
  3446. case AARCH64_OPND_SVE_ADDR_RI_S9xVL:
  3447. case AARCH64_OPND_SVE_ADDR_RI_U6:
  3448. case AARCH64_OPND_SVE_ADDR_RI_U6x2:
  3449. case AARCH64_OPND_SVE_ADDR_RI_U6x4:
  3450. case AARCH64_OPND_SVE_ADDR_RI_U6x8:
  3451. print_immediate_offset_address
  3452. (buf, size, opnd, get_64bit_int_reg_name (opnd->addr.base_regno, 1));
  3453. break;
  3454. case AARCH64_OPND_SVE_ADDR_ZI_U5:
  3455. case AARCH64_OPND_SVE_ADDR_ZI_U5x2:
  3456. case AARCH64_OPND_SVE_ADDR_ZI_U5x4:
  3457. case AARCH64_OPND_SVE_ADDR_ZI_U5x8:
  3458. print_immediate_offset_address
  3459. (buf, size, opnd,
  3460. get_addr_sve_reg_name (opnd->addr.base_regno, opnd->qualifier));
  3461. break;
  3462. case AARCH64_OPND_SVE_ADDR_ZZ_LSL:
  3463. case AARCH64_OPND_SVE_ADDR_ZZ_SXTW:
  3464. case AARCH64_OPND_SVE_ADDR_ZZ_UXTW:
  3465. print_register_offset_address
  3466. (buf, size, opnd,
  3467. get_addr_sve_reg_name (opnd->addr.base_regno, opnd->qualifier),
  3468. get_addr_sve_reg_name (opnd->addr.offset.regno, opnd->qualifier));
  3469. break;
  3470. case AARCH64_OPND_ADDR_UIMM12:
  3471. name = get_64bit_int_reg_name (opnd->addr.base_regno, 1);
  3472. if (opnd->addr.offset.imm)
  3473. snprintf (buf, size, "[%s, #%d]", name, opnd->addr.offset.imm);
  3474. else
  3475. snprintf (buf, size, "[%s]", name);
  3476. break;
  3477. case AARCH64_OPND_SYSREG:
  3478. for (i = 0; aarch64_sys_regs[i].name; ++i)
  3479. {
  3480. const aarch64_sys_reg *sr = aarch64_sys_regs + i;
  3481. bool exact_match
  3482. = (!(sr->flags & (F_REG_READ | F_REG_WRITE))
  3483. || (sr->flags & opnd->sysreg.flags) == opnd->sysreg.flags)
  3484. && AARCH64_CPU_HAS_FEATURE (features, sr->features);
  3485. /* Try and find an exact match, But if that fails, return the first
  3486. partial match that was found. */
  3487. if (aarch64_sys_regs[i].value == opnd->sysreg.value
  3488. && ! aarch64_sys_reg_deprecated_p (aarch64_sys_regs[i].flags)
  3489. && (name == NULL || exact_match))
  3490. {
  3491. name = aarch64_sys_regs[i].name;
  3492. if (exact_match)
  3493. {
  3494. if (notes)
  3495. *notes = NULL;
  3496. break;
  3497. }
  3498. /* If we didn't match exactly, that means the presense of a flag
  3499. indicates what we didn't want for this instruction. e.g. If
  3500. F_REG_READ is there, that means we were looking for a write
  3501. register. See aarch64_ext_sysreg. */
  3502. if (aarch64_sys_regs[i].flags & F_REG_WRITE)
  3503. *notes = _("reading from a write-only register");
  3504. else if (aarch64_sys_regs[i].flags & F_REG_READ)
  3505. *notes = _("writing to a read-only register");
  3506. }
  3507. }
  3508. if (name)
  3509. snprintf (buf, size, "%s", name);
  3510. else
  3511. {
  3512. /* Implementation defined system register. */
  3513. unsigned int value = opnd->sysreg.value;
  3514. snprintf (buf, size, "s%u_%u_c%u_c%u_%u", (value >> 14) & 0x3,
  3515. (value >> 11) & 0x7, (value >> 7) & 0xf, (value >> 3) & 0xf,
  3516. value & 0x7);
  3517. }
  3518. break;
  3519. case AARCH64_OPND_PSTATEFIELD:
  3520. for (i = 0; aarch64_pstatefields[i].name; ++i)
  3521. if (aarch64_pstatefields[i].value == opnd->pstatefield)
  3522. {
  3523. /* PSTATEFIELD name is encoded partially in CRm[3:1] for SVCRSM,
  3524. SVCRZA and SVCRSMZA. */
  3525. uint32_t flags = aarch64_pstatefields[i].flags;
  3526. if (flags & F_REG_IN_CRM
  3527. && (PSTATE_DECODE_CRM (opnd->sysreg.flags)
  3528. != PSTATE_DECODE_CRM (flags)))
  3529. continue;
  3530. break;
  3531. }
  3532. assert (aarch64_pstatefields[i].name);
  3533. snprintf (buf, size, "%s", aarch64_pstatefields[i].name);
  3534. break;
  3535. case AARCH64_OPND_SYSREG_AT:
  3536. case AARCH64_OPND_SYSREG_DC:
  3537. case AARCH64_OPND_SYSREG_IC:
  3538. case AARCH64_OPND_SYSREG_TLBI:
  3539. case AARCH64_OPND_SYSREG_SR:
  3540. snprintf (buf, size, "%s", opnd->sysins_op->name);
  3541. break;
  3542. case AARCH64_OPND_BARRIER:
  3543. case AARCH64_OPND_BARRIER_DSB_NXS:
  3544. snprintf (buf, size, "%s", opnd->barrier->name);
  3545. break;
  3546. case AARCH64_OPND_BARRIER_ISB:
  3547. /* Operand can be omitted, e.g. in DCPS1. */
  3548. if (! optional_operand_p (opcode, idx)
  3549. || (opnd->barrier->value
  3550. != get_optional_operand_default_value (opcode)))
  3551. snprintf (buf, size, "#0x%x", opnd->barrier->value);
  3552. break;
  3553. case AARCH64_OPND_PRFOP:
  3554. if (opnd->prfop->name != NULL)
  3555. snprintf (buf, size, "%s", opnd->prfop->name);
  3556. else
  3557. snprintf (buf, size, "#0x%02x", opnd->prfop->value);
  3558. break;
  3559. case AARCH64_OPND_BARRIER_PSB:
  3560. snprintf (buf, size, "csync");
  3561. break;
  3562. case AARCH64_OPND_BTI_TARGET:
  3563. if ((HINT_FLAG (opnd->hint_option->value) & HINT_OPD_F_NOPRINT) == 0)
  3564. snprintf (buf, size, "%s", opnd->hint_option->name);
  3565. break;
  3566. case AARCH64_OPND_MOPS_ADDR_Rd:
  3567. case AARCH64_OPND_MOPS_ADDR_Rs:
  3568. snprintf (buf, size, "[%s]!",
  3569. get_int_reg_name (opnd->reg.regno, AARCH64_OPND_QLF_X, 0));
  3570. break;
  3571. case AARCH64_OPND_MOPS_WB_Rn:
  3572. snprintf (buf, size, "%s!",
  3573. get_int_reg_name (opnd->reg.regno, AARCH64_OPND_QLF_X, 0));
  3574. break;
  3575. default:
  3576. snprintf (buf, size, "<invalid>");
  3577. break;
  3578. }
  3579. }
  3580. #define CPENC(op0,op1,crn,crm,op2) \
  3581. ((((op0) << 19) | ((op1) << 16) | ((crn) << 12) | ((crm) << 8) | ((op2) << 5)) >> 5)
  3582. /* for 3.9.3 Instructions for Accessing Special Purpose Registers */
  3583. #define CPEN_(op1,crm,op2) CPENC(3,(op1),4,(crm),(op2))
  3584. /* for 3.9.10 System Instructions */
  3585. #define CPENS(op1,crn,crm,op2) CPENC(1,(op1),(crn),(crm),(op2))
  3586. #define C0 0
  3587. #define C1 1
  3588. #define C2 2
  3589. #define C3 3
  3590. #define C4 4
  3591. #define C5 5
  3592. #define C6 6
  3593. #define C7 7
  3594. #define C8 8
  3595. #define C9 9
  3596. #define C10 10
  3597. #define C11 11
  3598. #define C12 12
  3599. #define C13 13
  3600. #define C14 14
  3601. #define C15 15
  3602. #define SYSREG(name, encoding, flags, features) \
  3603. { name, encoding, flags, features }
  3604. #define SR_CORE(n,e,f) SYSREG (n,e,f,0)
  3605. #define SR_FEAT(n,e,f,feat) \
  3606. SYSREG ((n), (e), (f) | F_ARCHEXT, AARCH64_FEATURE_##feat)
  3607. #define SR_FEAT2(n,e,f,fe1,fe2) \
  3608. SYSREG ((n), (e), (f) | F_ARCHEXT, \
  3609. AARCH64_FEATURE_##fe1 | AARCH64_FEATURE_##fe2)
  3610. #define SR_V8_1_A(n,e,f) SR_FEAT2(n,e,f,V8_A,V8_1)
  3611. #define SR_V8_4_A(n,e,f) SR_FEAT2(n,e,f,V8_A,V8_4)
  3612. #define SR_V8_A(n,e,f) SR_FEAT (n,e,f,V8_A)
  3613. #define SR_V8_R(n,e,f) SR_FEAT (n,e,f,V8_R)
  3614. #define SR_V8_1(n,e,f) SR_FEAT (n,e,f,V8_1)
  3615. #define SR_V8_2(n,e,f) SR_FEAT (n,e,f,V8_2)
  3616. #define SR_V8_3(n,e,f) SR_FEAT (n,e,f,V8_3)
  3617. #define SR_V8_4(n,e,f) SR_FEAT (n,e,f,V8_4)
  3618. #define SR_V8_6(n,e,f) SR_FEAT (n,e,f,V8_6)
  3619. #define SR_V8_7(n,e,f) SR_FEAT (n,e,f,V8_7)
  3620. #define SR_V8_8(n,e,f) SR_FEAT (n,e,f,V8_8)
  3621. /* Has no separate libopcodes feature flag, but separated out for clarity. */
  3622. #define SR_GIC(n,e,f) SR_CORE (n,e,f)
  3623. /* Has no separate libopcodes feature flag, but separated out for clarity. */
  3624. #define SR_AMU(n,e,f) SR_FEAT (n,e,f,V8_4)
  3625. #define SR_LOR(n,e,f) SR_FEAT (n,e,f,LOR)
  3626. #define SR_PAN(n,e,f) SR_FEAT (n,e,f,PAN)
  3627. #define SR_RAS(n,e,f) SR_FEAT (n,e,f,RAS)
  3628. #define SR_RNG(n,e,f) SR_FEAT (n,e,f,RNG)
  3629. #define SR_SME(n,e,f) SR_FEAT (n,e,f,SME)
  3630. #define SR_SSBS(n,e,f) SR_FEAT (n,e,f,SSBS)
  3631. #define SR_SVE(n,e,f) SR_FEAT (n,e,f,SVE)
  3632. #define SR_ID_PFR2(n,e,f) SR_FEAT (n,e,f,ID_PFR2)
  3633. #define SR_PROFILE(n,e,f) SR_FEAT (n,e,f,PROFILE)
  3634. #define SR_MEMTAG(n,e,f) SR_FEAT (n,e,f,MEMTAG)
  3635. #define SR_SCXTNUM(n,e,f) SR_FEAT (n,e,f,SCXTNUM)
  3636. #define SR_EXPAND_ELx(f,x) \
  3637. f (x, 1), \
  3638. f (x, 2), \
  3639. f (x, 3), \
  3640. f (x, 4), \
  3641. f (x, 5), \
  3642. f (x, 6), \
  3643. f (x, 7), \
  3644. f (x, 8), \
  3645. f (x, 9), \
  3646. f (x, 10), \
  3647. f (x, 11), \
  3648. f (x, 12), \
  3649. f (x, 13), \
  3650. f (x, 14), \
  3651. f (x, 15),
  3652. #define SR_EXPAND_EL12(f) \
  3653. SR_EXPAND_ELx (f,1) \
  3654. SR_EXPAND_ELx (f,2)
  3655. /* TODO there is one more issues need to be resolved
  3656. 1. handle cpu-implementation-defined system registers.
  3657. Note that the F_REG_{READ,WRITE} flags mean read-only and write-only
  3658. respectively. If neither of these are set then the register is read-write. */
  3659. const aarch64_sys_reg aarch64_sys_regs [] =
  3660. {
  3661. SR_CORE ("spsr_el1", CPEN_ (0,C0,0), 0), /* = spsr_svc. */
  3662. SR_V8_1 ("spsr_el12", CPEN_ (5,C0,0), 0),
  3663. SR_CORE ("elr_el1", CPEN_ (0,C0,1), 0),
  3664. SR_V8_1 ("elr_el12", CPEN_ (5,C0,1), 0),
  3665. SR_CORE ("sp_el0", CPEN_ (0,C1,0), 0),
  3666. SR_CORE ("spsel", CPEN_ (0,C2,0), 0),
  3667. SR_CORE ("daif", CPEN_ (3,C2,1), 0),
  3668. SR_CORE ("currentel", CPEN_ (0,C2,2), F_REG_READ),
  3669. SR_PAN ("pan", CPEN_ (0,C2,3), 0),
  3670. SR_V8_2 ("uao", CPEN_ (0,C2,4), 0),
  3671. SR_CORE ("nzcv", CPEN_ (3,C2,0), 0),
  3672. SR_SSBS ("ssbs", CPEN_ (3,C2,6), 0),
  3673. SR_CORE ("fpcr", CPEN_ (3,C4,0), 0),
  3674. SR_CORE ("fpsr", CPEN_ (3,C4,1), 0),
  3675. SR_CORE ("dspsr_el0", CPEN_ (3,C5,0), 0),
  3676. SR_CORE ("dlr_el0", CPEN_ (3,C5,1), 0),
  3677. SR_CORE ("spsr_el2", CPEN_ (4,C0,0), 0), /* = spsr_hyp. */
  3678. SR_CORE ("elr_el2", CPEN_ (4,C0,1), 0),
  3679. SR_CORE ("sp_el1", CPEN_ (4,C1,0), 0),
  3680. SR_CORE ("spsr_irq", CPEN_ (4,C3,0), 0),
  3681. SR_CORE ("spsr_abt", CPEN_ (4,C3,1), 0),
  3682. SR_CORE ("spsr_und", CPEN_ (4,C3,2), 0),
  3683. SR_CORE ("spsr_fiq", CPEN_ (4,C3,3), 0),
  3684. SR_CORE ("spsr_el3", CPEN_ (6,C0,0), 0),
  3685. SR_CORE ("elr_el3", CPEN_ (6,C0,1), 0),
  3686. SR_CORE ("sp_el2", CPEN_ (6,C1,0), 0),
  3687. SR_CORE ("spsr_svc", CPEN_ (0,C0,0), F_DEPRECATED), /* = spsr_el1. */
  3688. SR_CORE ("spsr_hyp", CPEN_ (4,C0,0), F_DEPRECATED), /* = spsr_el2. */
  3689. SR_CORE ("midr_el1", CPENC (3,0,C0,C0,0), F_REG_READ),
  3690. SR_CORE ("ctr_el0", CPENC (3,3,C0,C0,1), F_REG_READ),
  3691. SR_CORE ("mpidr_el1", CPENC (3,0,C0,C0,5), F_REG_READ),
  3692. SR_CORE ("revidr_el1", CPENC (3,0,C0,C0,6), F_REG_READ),
  3693. SR_CORE ("aidr_el1", CPENC (3,1,C0,C0,7), F_REG_READ),
  3694. SR_CORE ("dczid_el0", CPENC (3,3,C0,C0,7), F_REG_READ),
  3695. SR_CORE ("id_dfr0_el1", CPENC (3,0,C0,C1,2), F_REG_READ),
  3696. SR_CORE ("id_dfr1_el1", CPENC (3,0,C0,C3,5), F_REG_READ),
  3697. SR_CORE ("id_pfr0_el1", CPENC (3,0,C0,C1,0), F_REG_READ),
  3698. SR_CORE ("id_pfr1_el1", CPENC (3,0,C0,C1,1), F_REG_READ),
  3699. SR_ID_PFR2 ("id_pfr2_el1", CPENC (3,0,C0,C3,4), F_REG_READ),
  3700. SR_CORE ("id_afr0_el1", CPENC (3,0,C0,C1,3), F_REG_READ),
  3701. SR_CORE ("id_mmfr0_el1", CPENC (3,0,C0,C1,4), F_REG_READ),
  3702. SR_CORE ("id_mmfr1_el1", CPENC (3,0,C0,C1,5), F_REG_READ),
  3703. SR_CORE ("id_mmfr2_el1", CPENC (3,0,C0,C1,6), F_REG_READ),
  3704. SR_CORE ("id_mmfr3_el1", CPENC (3,0,C0,C1,7), F_REG_READ),
  3705. SR_CORE ("id_mmfr4_el1", CPENC (3,0,C0,C2,6), F_REG_READ),
  3706. SR_CORE ("id_mmfr5_el1", CPENC (3,0,C0,C3,6), F_REG_READ),
  3707. SR_CORE ("id_isar0_el1", CPENC (3,0,C0,C2,0), F_REG_READ),
  3708. SR_CORE ("id_isar1_el1", CPENC (3,0,C0,C2,1), F_REG_READ),
  3709. SR_CORE ("id_isar2_el1", CPENC (3,0,C0,C2,2), F_REG_READ),
  3710. SR_CORE ("id_isar3_el1", CPENC (3,0,C0,C2,3), F_REG_READ),
  3711. SR_CORE ("id_isar4_el1", CPENC (3,0,C0,C2,4), F_REG_READ),
  3712. SR_CORE ("id_isar5_el1", CPENC (3,0,C0,C2,5), F_REG_READ),
  3713. SR_CORE ("id_isar6_el1", CPENC (3,0,C0,C2,7), F_REG_READ),
  3714. SR_CORE ("mvfr0_el1", CPENC (3,0,C0,C3,0), F_REG_READ),
  3715. SR_CORE ("mvfr1_el1", CPENC (3,0,C0,C3,1), F_REG_READ),
  3716. SR_CORE ("mvfr2_el1", CPENC (3,0,C0,C3,2), F_REG_READ),
  3717. SR_CORE ("ccsidr_el1", CPENC (3,1,C0,C0,0), F_REG_READ),
  3718. SR_V8_3 ("ccsidr2_el1", CPENC (3,1,C0,C0,2), F_REG_READ),
  3719. SR_CORE ("id_aa64pfr0_el1", CPENC (3,0,C0,C4,0), F_REG_READ),
  3720. SR_CORE ("id_aa64pfr1_el1", CPENC (3,0,C0,C4,1), F_REG_READ),
  3721. SR_CORE ("id_aa64dfr0_el1", CPENC (3,0,C0,C5,0), F_REG_READ),
  3722. SR_CORE ("id_aa64dfr1_el1", CPENC (3,0,C0,C5,1), F_REG_READ),
  3723. SR_CORE ("id_aa64isar0_el1", CPENC (3,0,C0,C6,0), F_REG_READ),
  3724. SR_CORE ("id_aa64isar1_el1", CPENC (3,0,C0,C6,1), F_REG_READ),
  3725. SR_CORE ("id_aa64isar2_el1", CPENC (3,0,C0,C6,2), F_REG_READ),
  3726. SR_CORE ("id_aa64mmfr0_el1", CPENC (3,0,C0,C7,0), F_REG_READ),
  3727. SR_CORE ("id_aa64mmfr1_el1", CPENC (3,0,C0,C7,1), F_REG_READ),
  3728. SR_CORE ("id_aa64mmfr2_el1", CPENC (3,0,C0,C7,2), F_REG_READ),
  3729. SR_CORE ("id_aa64afr0_el1", CPENC (3,0,C0,C5,4), F_REG_READ),
  3730. SR_CORE ("id_aa64afr1_el1", CPENC (3,0,C0,C5,5), F_REG_READ),
  3731. SR_SVE ("id_aa64zfr0_el1", CPENC (3,0,C0,C4,4), F_REG_READ),
  3732. SR_CORE ("clidr_el1", CPENC (3,1,C0,C0,1), F_REG_READ),
  3733. SR_CORE ("csselr_el1", CPENC (3,2,C0,C0,0), 0),
  3734. SR_CORE ("vpidr_el2", CPENC (3,4,C0,C0,0), 0),
  3735. SR_CORE ("vmpidr_el2", CPENC (3,4,C0,C0,5), 0),
  3736. SR_CORE ("sctlr_el1", CPENC (3,0,C1,C0,0), 0),
  3737. SR_CORE ("sctlr_el2", CPENC (3,4,C1,C0,0), 0),
  3738. SR_CORE ("sctlr_el3", CPENC (3,6,C1,C0,0), 0),
  3739. SR_V8_1 ("sctlr_el12", CPENC (3,5,C1,C0,0), 0),
  3740. SR_CORE ("actlr_el1", CPENC (3,0,C1,C0,1), 0),
  3741. SR_CORE ("actlr_el2", CPENC (3,4,C1,C0,1), 0),
  3742. SR_CORE ("actlr_el3", CPENC (3,6,C1,C0,1), 0),
  3743. SR_CORE ("cpacr_el1", CPENC (3,0,C1,C0,2), 0),
  3744. SR_V8_1 ("cpacr_el12", CPENC (3,5,C1,C0,2), 0),
  3745. SR_CORE ("cptr_el2", CPENC (3,4,C1,C1,2), 0),
  3746. SR_CORE ("cptr_el3", CPENC (3,6,C1,C1,2), 0),
  3747. SR_CORE ("scr_el3", CPENC (3,6,C1,C1,0), 0),
  3748. SR_CORE ("hcr_el2", CPENC (3,4,C1,C1,0), 0),
  3749. SR_CORE ("mdcr_el2", CPENC (3,4,C1,C1,1), 0),
  3750. SR_CORE ("mdcr_el3", CPENC (3,6,C1,C3,1), 0),
  3751. SR_CORE ("hstr_el2", CPENC (3,4,C1,C1,3), 0),
  3752. SR_CORE ("hacr_el2", CPENC (3,4,C1,C1,7), 0),
  3753. SR_SVE ("zcr_el1", CPENC (3,0,C1,C2,0), 0),
  3754. SR_SVE ("zcr_el12", CPENC (3,5,C1,C2,0), 0),
  3755. SR_SVE ("zcr_el2", CPENC (3,4,C1,C2,0), 0),
  3756. SR_SVE ("zcr_el3", CPENC (3,6,C1,C2,0), 0),
  3757. SR_CORE ("ttbr0_el1", CPENC (3,0,C2,C0,0), 0),
  3758. SR_CORE ("ttbr1_el1", CPENC (3,0,C2,C0,1), 0),
  3759. SR_V8_A ("ttbr0_el2", CPENC (3,4,C2,C0,0), 0),
  3760. SR_V8_1_A ("ttbr1_el2", CPENC (3,4,C2,C0,1), 0),
  3761. SR_CORE ("ttbr0_el3", CPENC (3,6,C2,C0,0), 0),
  3762. SR_V8_1 ("ttbr0_el12", CPENC (3,5,C2,C0,0), 0),
  3763. SR_V8_1 ("ttbr1_el12", CPENC (3,5,C2,C0,1), 0),
  3764. SR_V8_A ("vttbr_el2", CPENC (3,4,C2,C1,0), 0),
  3765. SR_CORE ("tcr_el1", CPENC (3,0,C2,C0,2), 0),
  3766. SR_CORE ("tcr_el2", CPENC (3,4,C2,C0,2), 0),
  3767. SR_CORE ("tcr_el3", CPENC (3,6,C2,C0,2), 0),
  3768. SR_V8_1 ("tcr_el12", CPENC (3,5,C2,C0,2), 0),
  3769. SR_CORE ("vtcr_el2", CPENC (3,4,C2,C1,2), 0),
  3770. SR_V8_3 ("apiakeylo_el1", CPENC (3,0,C2,C1,0), 0),
  3771. SR_V8_3 ("apiakeyhi_el1", CPENC (3,0,C2,C1,1), 0),
  3772. SR_V8_3 ("apibkeylo_el1", CPENC (3,0,C2,C1,2), 0),
  3773. SR_V8_3 ("apibkeyhi_el1", CPENC (3,0,C2,C1,3), 0),
  3774. SR_V8_3 ("apdakeylo_el1", CPENC (3,0,C2,C2,0), 0),
  3775. SR_V8_3 ("apdakeyhi_el1", CPENC (3,0,C2,C2,1), 0),
  3776. SR_V8_3 ("apdbkeylo_el1", CPENC (3,0,C2,C2,2), 0),
  3777. SR_V8_3 ("apdbkeyhi_el1", CPENC (3,0,C2,C2,3), 0),
  3778. SR_V8_3 ("apgakeylo_el1", CPENC (3,0,C2,C3,0), 0),
  3779. SR_V8_3 ("apgakeyhi_el1", CPENC (3,0,C2,C3,1), 0),
  3780. SR_CORE ("afsr0_el1", CPENC (3,0,C5,C1,0), 0),
  3781. SR_CORE ("afsr1_el1", CPENC (3,0,C5,C1,1), 0),
  3782. SR_CORE ("afsr0_el2", CPENC (3,4,C5,C1,0), 0),
  3783. SR_CORE ("afsr1_el2", CPENC (3,4,C5,C1,1), 0),
  3784. SR_CORE ("afsr0_el3", CPENC (3,6,C5,C1,0), 0),
  3785. SR_V8_1 ("afsr0_el12", CPENC (3,5,C5,C1,0), 0),
  3786. SR_CORE ("afsr1_el3", CPENC (3,6,C5,C1,1), 0),
  3787. SR_V8_1 ("afsr1_el12", CPENC (3,5,C5,C1,1), 0),
  3788. SR_CORE ("esr_el1", CPENC (3,0,C5,C2,0), 0),
  3789. SR_CORE ("esr_el2", CPENC (3,4,C5,C2,0), 0),
  3790. SR_CORE ("esr_el3", CPENC (3,6,C5,C2,0), 0),
  3791. SR_V8_1 ("esr_el12", CPENC (3,5,C5,C2,0), 0),
  3792. SR_RAS ("vsesr_el2", CPENC (3,4,C5,C2,3), 0),
  3793. SR_CORE ("fpexc32_el2", CPENC (3,4,C5,C3,0), 0),
  3794. SR_RAS ("erridr_el1", CPENC (3,0,C5,C3,0), F_REG_READ),
  3795. SR_RAS ("errselr_el1", CPENC (3,0,C5,C3,1), 0),
  3796. SR_RAS ("erxfr_el1", CPENC (3,0,C5,C4,0), F_REG_READ),
  3797. SR_RAS ("erxctlr_el1", CPENC (3,0,C5,C4,1), 0),
  3798. SR_RAS ("erxstatus_el1", CPENC (3,0,C5,C4,2), 0),
  3799. SR_RAS ("erxaddr_el1", CPENC (3,0,C5,C4,3), 0),
  3800. SR_RAS ("erxmisc0_el1", CPENC (3,0,C5,C5,0), 0),
  3801. SR_RAS ("erxmisc1_el1", CPENC (3,0,C5,C5,1), 0),
  3802. SR_RAS ("erxmisc2_el1", CPENC (3,0,C5,C5,2), 0),
  3803. SR_RAS ("erxmisc3_el1", CPENC (3,0,C5,C5,3), 0),
  3804. SR_RAS ("erxpfgcdn_el1", CPENC (3,0,C5,C4,6), 0),
  3805. SR_RAS ("erxpfgctl_el1", CPENC (3,0,C5,C4,5), 0),
  3806. SR_RAS ("erxpfgf_el1", CPENC (3,0,C5,C4,4), F_REG_READ),
  3807. SR_CORE ("far_el1", CPENC (3,0,C6,C0,0), 0),
  3808. SR_CORE ("far_el2", CPENC (3,4,C6,C0,0), 0),
  3809. SR_CORE ("far_el3", CPENC (3,6,C6,C0,0), 0),
  3810. SR_V8_1 ("far_el12", CPENC (3,5,C6,C0,0), 0),
  3811. SR_CORE ("hpfar_el2", CPENC (3,4,C6,C0,4), 0),
  3812. SR_CORE ("par_el1", CPENC (3,0,C7,C4,0), 0),
  3813. SR_CORE ("mair_el1", CPENC (3,0,C10,C2,0), 0),
  3814. SR_CORE ("mair_el2", CPENC (3,4,C10,C2,0), 0),
  3815. SR_CORE ("mair_el3", CPENC (3,6,C10,C2,0), 0),
  3816. SR_V8_1 ("mair_el12", CPENC (3,5,C10,C2,0), 0),
  3817. SR_CORE ("amair_el1", CPENC (3,0,C10,C3,0), 0),
  3818. SR_CORE ("amair_el2", CPENC (3,4,C10,C3,0), 0),
  3819. SR_CORE ("amair_el3", CPENC (3,6,C10,C3,0), 0),
  3820. SR_V8_1 ("amair_el12", CPENC (3,5,C10,C3,0), 0),
  3821. SR_CORE ("vbar_el1", CPENC (3,0,C12,C0,0), 0),
  3822. SR_CORE ("vbar_el2", CPENC (3,4,C12,C0,0), 0),
  3823. SR_CORE ("vbar_el3", CPENC (3,6,C12,C0,0), 0),
  3824. SR_V8_1 ("vbar_el12", CPENC (3,5,C12,C0,0), 0),
  3825. SR_CORE ("rvbar_el1", CPENC (3,0,C12,C0,1), F_REG_READ),
  3826. SR_CORE ("rvbar_el2", CPENC (3,4,C12,C0,1), F_REG_READ),
  3827. SR_CORE ("rvbar_el3", CPENC (3,6,C12,C0,1), F_REG_READ),
  3828. SR_CORE ("rmr_el1", CPENC (3,0,C12,C0,2), 0),
  3829. SR_CORE ("rmr_el2", CPENC (3,4,C12,C0,2), 0),
  3830. SR_CORE ("rmr_el3", CPENC (3,6,C12,C0,2), 0),
  3831. SR_CORE ("isr_el1", CPENC (3,0,C12,C1,0), F_REG_READ),
  3832. SR_RAS ("disr_el1", CPENC (3,0,C12,C1,1), 0),
  3833. SR_RAS ("vdisr_el2", CPENC (3,4,C12,C1,1), 0),
  3834. SR_CORE ("contextidr_el1", CPENC (3,0,C13,C0,1), 0),
  3835. SR_V8_1 ("contextidr_el2", CPENC (3,4,C13,C0,1), 0),
  3836. SR_V8_1 ("contextidr_el12", CPENC (3,5,C13,C0,1), 0),
  3837. SR_RNG ("rndr", CPENC (3,3,C2,C4,0), F_REG_READ),
  3838. SR_RNG ("rndrrs", CPENC (3,3,C2,C4,1), F_REG_READ),
  3839. SR_MEMTAG ("tco", CPENC (3,3,C4,C2,7), 0),
  3840. SR_MEMTAG ("tfsre0_el1", CPENC (3,0,C5,C6,1), 0),
  3841. SR_MEMTAG ("tfsr_el1", CPENC (3,0,C5,C6,0), 0),
  3842. SR_MEMTAG ("tfsr_el2", CPENC (3,4,C5,C6,0), 0),
  3843. SR_MEMTAG ("tfsr_el3", CPENC (3,6,C5,C6,0), 0),
  3844. SR_MEMTAG ("tfsr_el12", CPENC (3,5,C5,C6,0), 0),
  3845. SR_MEMTAG ("rgsr_el1", CPENC (3,0,C1,C0,5), 0),
  3846. SR_MEMTAG ("gcr_el1", CPENC (3,0,C1,C0,6), 0),
  3847. SR_MEMTAG ("gmid_el1", CPENC (3,1,C0,C0,4), F_REG_READ),
  3848. SR_CORE ("tpidr_el0", CPENC (3,3,C13,C0,2), 0),
  3849. SR_CORE ("tpidrro_el0", CPENC (3,3,C13,C0,3), 0),
  3850. SR_CORE ("tpidr_el1", CPENC (3,0,C13,C0,4), 0),
  3851. SR_CORE ("tpidr_el2", CPENC (3,4,C13,C0,2), 0),
  3852. SR_CORE ("tpidr_el3", CPENC (3,6,C13,C0,2), 0),
  3853. SR_SCXTNUM ("scxtnum_el0", CPENC (3,3,C13,C0,7), 0),
  3854. SR_SCXTNUM ("scxtnum_el1", CPENC (3,0,C13,C0,7), 0),
  3855. SR_SCXTNUM ("scxtnum_el2", CPENC (3,4,C13,C0,7), 0),
  3856. SR_SCXTNUM ("scxtnum_el12", CPENC (3,5,C13,C0,7), 0),
  3857. SR_SCXTNUM ("scxtnum_el3", CPENC (3,6,C13,C0,7), 0),
  3858. SR_CORE ("teecr32_el1", CPENC (2,2,C0, C0,0), 0), /* See section 3.9.7.1. */
  3859. SR_CORE ("cntfrq_el0", CPENC (3,3,C14,C0,0), 0),
  3860. SR_CORE ("cntpct_el0", CPENC (3,3,C14,C0,1), F_REG_READ),
  3861. SR_CORE ("cntvct_el0", CPENC (3,3,C14,C0,2), F_REG_READ),
  3862. SR_CORE ("cntvoff_el2", CPENC (3,4,C14,C0,3), 0),
  3863. SR_CORE ("cntkctl_el1", CPENC (3,0,C14,C1,0), 0),
  3864. SR_V8_1 ("cntkctl_el12", CPENC (3,5,C14,C1,0), 0),
  3865. SR_CORE ("cnthctl_el2", CPENC (3,4,C14,C1,0), 0),
  3866. SR_CORE ("cntp_tval_el0", CPENC (3,3,C14,C2,0), 0),
  3867. SR_V8_1 ("cntp_tval_el02", CPENC (3,5,C14,C2,0), 0),
  3868. SR_CORE ("cntp_ctl_el0", CPENC (3,3,C14,C2,1), 0),
  3869. SR_V8_1 ("cntp_ctl_el02", CPENC (3,5,C14,C2,1), 0),
  3870. SR_CORE ("cntp_cval_el0", CPENC (3,3,C14,C2,2), 0),
  3871. SR_V8_1 ("cntp_cval_el02", CPENC (3,5,C14,C2,2), 0),
  3872. SR_CORE ("cntv_tval_el0", CPENC (3,3,C14,C3,0), 0),
  3873. SR_V8_1 ("cntv_tval_el02", CPENC (3,5,C14,C3,0), 0),
  3874. SR_CORE ("cntv_ctl_el0", CPENC (3,3,C14,C3,1), 0),
  3875. SR_V8_1 ("cntv_ctl_el02", CPENC (3,5,C14,C3,1), 0),
  3876. SR_CORE ("cntv_cval_el0", CPENC (3,3,C14,C3,2), 0),
  3877. SR_V8_1 ("cntv_cval_el02", CPENC (3,5,C14,C3,2), 0),
  3878. SR_CORE ("cnthp_tval_el2", CPENC (3,4,C14,C2,0), 0),
  3879. SR_CORE ("cnthp_ctl_el2", CPENC (3,4,C14,C2,1), 0),
  3880. SR_CORE ("cnthp_cval_el2", CPENC (3,4,C14,C2,2), 0),
  3881. SR_CORE ("cntps_tval_el1", CPENC (3,7,C14,C2,0), 0),
  3882. SR_CORE ("cntps_ctl_el1", CPENC (3,7,C14,C2,1), 0),
  3883. SR_CORE ("cntps_cval_el1", CPENC (3,7,C14,C2,2), 0),
  3884. SR_V8_1 ("cnthv_tval_el2", CPENC (3,4,C14,C3,0), 0),
  3885. SR_V8_1 ("cnthv_ctl_el2", CPENC (3,4,C14,C3,1), 0),
  3886. SR_V8_1 ("cnthv_cval_el2", CPENC (3,4,C14,C3,2), 0),
  3887. SR_CORE ("dacr32_el2", CPENC (3,4,C3,C0,0), 0),
  3888. SR_CORE ("ifsr32_el2", CPENC (3,4,C5,C0,1), 0),
  3889. SR_CORE ("teehbr32_el1", CPENC (2,2,C1,C0,0), 0),
  3890. SR_CORE ("sder32_el3", CPENC (3,6,C1,C1,1), 0),
  3891. SR_CORE ("mdscr_el1", CPENC (2,0,C0,C2,2), 0),
  3892. SR_CORE ("mdccsr_el0", CPENC (2,3,C0,C1,0), F_REG_READ),
  3893. SR_CORE ("mdccint_el1", CPENC (2,0,C0,C2,0), 0),
  3894. SR_CORE ("dbgdtr_el0", CPENC (2,3,C0,C4,0), 0),
  3895. SR_CORE ("dbgdtrrx_el0", CPENC (2,3,C0,C5,0), F_REG_READ),
  3896. SR_CORE ("dbgdtrtx_el0", CPENC (2,3,C0,C5,0), F_REG_WRITE),
  3897. SR_CORE ("osdtrrx_el1", CPENC (2,0,C0,C0,2), 0),
  3898. SR_CORE ("osdtrtx_el1", CPENC (2,0,C0,C3,2), 0),
  3899. SR_CORE ("oseccr_el1", CPENC (2,0,C0,C6,2), 0),
  3900. SR_CORE ("dbgvcr32_el2", CPENC (2,4,C0,C7,0), 0),
  3901. SR_CORE ("dbgbvr0_el1", CPENC (2,0,C0,C0,4), 0),
  3902. SR_CORE ("dbgbvr1_el1", CPENC (2,0,C0,C1,4), 0),
  3903. SR_CORE ("dbgbvr2_el1", CPENC (2,0,C0,C2,4), 0),
  3904. SR_CORE ("dbgbvr3_el1", CPENC (2,0,C0,C3,4), 0),
  3905. SR_CORE ("dbgbvr4_el1", CPENC (2,0,C0,C4,4), 0),
  3906. SR_CORE ("dbgbvr5_el1", CPENC (2,0,C0,C5,4), 0),
  3907. SR_CORE ("dbgbvr6_el1", CPENC (2,0,C0,C6,4), 0),
  3908. SR_CORE ("dbgbvr7_el1", CPENC (2,0,C0,C7,4), 0),
  3909. SR_CORE ("dbgbvr8_el1", CPENC (2,0,C0,C8,4), 0),
  3910. SR_CORE ("dbgbvr9_el1", CPENC (2,0,C0,C9,4), 0),
  3911. SR_CORE ("dbgbvr10_el1", CPENC (2,0,C0,C10,4), 0),
  3912. SR_CORE ("dbgbvr11_el1", CPENC (2,0,C0,C11,4), 0),
  3913. SR_CORE ("dbgbvr12_el1", CPENC (2,0,C0,C12,4), 0),
  3914. SR_CORE ("dbgbvr13_el1", CPENC (2,0,C0,C13,4), 0),
  3915. SR_CORE ("dbgbvr14_el1", CPENC (2,0,C0,C14,4), 0),
  3916. SR_CORE ("dbgbvr15_el1", CPENC (2,0,C0,C15,4), 0),
  3917. SR_CORE ("dbgbcr0_el1", CPENC (2,0,C0,C0,5), 0),
  3918. SR_CORE ("dbgbcr1_el1", CPENC (2,0,C0,C1,5), 0),
  3919. SR_CORE ("dbgbcr2_el1", CPENC (2,0,C0,C2,5), 0),
  3920. SR_CORE ("dbgbcr3_el1", CPENC (2,0,C0,C3,5), 0),
  3921. SR_CORE ("dbgbcr4_el1", CPENC (2,0,C0,C4,5), 0),
  3922. SR_CORE ("dbgbcr5_el1", CPENC (2,0,C0,C5,5), 0),
  3923. SR_CORE ("dbgbcr6_el1", CPENC (2,0,C0,C6,5), 0),
  3924. SR_CORE ("dbgbcr7_el1", CPENC (2,0,C0,C7,5), 0),
  3925. SR_CORE ("dbgbcr8_el1", CPENC (2,0,C0,C8,5), 0),
  3926. SR_CORE ("dbgbcr9_el1", CPENC (2,0,C0,C9,5), 0),
  3927. SR_CORE ("dbgbcr10_el1", CPENC (2,0,C0,C10,5), 0),
  3928. SR_CORE ("dbgbcr11_el1", CPENC (2,0,C0,C11,5), 0),
  3929. SR_CORE ("dbgbcr12_el1", CPENC (2,0,C0,C12,5), 0),
  3930. SR_CORE ("dbgbcr13_el1", CPENC (2,0,C0,C13,5), 0),
  3931. SR_CORE ("dbgbcr14_el1", CPENC (2,0,C0,C14,5), 0),
  3932. SR_CORE ("dbgbcr15_el1", CPENC (2,0,C0,C15,5), 0),
  3933. SR_CORE ("dbgwvr0_el1", CPENC (2,0,C0,C0,6), 0),
  3934. SR_CORE ("dbgwvr1_el1", CPENC (2,0,C0,C1,6), 0),
  3935. SR_CORE ("dbgwvr2_el1", CPENC (2,0,C0,C2,6), 0),
  3936. SR_CORE ("dbgwvr3_el1", CPENC (2,0,C0,C3,6), 0),
  3937. SR_CORE ("dbgwvr4_el1", CPENC (2,0,C0,C4,6), 0),
  3938. SR_CORE ("dbgwvr5_el1", CPENC (2,0,C0,C5,6), 0),
  3939. SR_CORE ("dbgwvr6_el1", CPENC (2,0,C0,C6,6), 0),
  3940. SR_CORE ("dbgwvr7_el1", CPENC (2,0,C0,C7,6), 0),
  3941. SR_CORE ("dbgwvr8_el1", CPENC (2,0,C0,C8,6), 0),
  3942. SR_CORE ("dbgwvr9_el1", CPENC (2,0,C0,C9,6), 0),
  3943. SR_CORE ("dbgwvr10_el1", CPENC (2,0,C0,C10,6), 0),
  3944. SR_CORE ("dbgwvr11_el1", CPENC (2,0,C0,C11,6), 0),
  3945. SR_CORE ("dbgwvr12_el1", CPENC (2,0,C0,C12,6), 0),
  3946. SR_CORE ("dbgwvr13_el1", CPENC (2,0,C0,C13,6), 0),
  3947. SR_CORE ("dbgwvr14_el1", CPENC (2,0,C0,C14,6), 0),
  3948. SR_CORE ("dbgwvr15_el1", CPENC (2,0,C0,C15,6), 0),
  3949. SR_CORE ("dbgwcr0_el1", CPENC (2,0,C0,C0,7), 0),
  3950. SR_CORE ("dbgwcr1_el1", CPENC (2,0,C0,C1,7), 0),
  3951. SR_CORE ("dbgwcr2_el1", CPENC (2,0,C0,C2,7), 0),
  3952. SR_CORE ("dbgwcr3_el1", CPENC (2,0,C0,C3,7), 0),
  3953. SR_CORE ("dbgwcr4_el1", CPENC (2,0,C0,C4,7), 0),
  3954. SR_CORE ("dbgwcr5_el1", CPENC (2,0,C0,C5,7), 0),
  3955. SR_CORE ("dbgwcr6_el1", CPENC (2,0,C0,C6,7), 0),
  3956. SR_CORE ("dbgwcr7_el1", CPENC (2,0,C0,C7,7), 0),
  3957. SR_CORE ("dbgwcr8_el1", CPENC (2,0,C0,C8,7), 0),
  3958. SR_CORE ("dbgwcr9_el1", CPENC (2,0,C0,C9,7), 0),
  3959. SR_CORE ("dbgwcr10_el1", CPENC (2,0,C0,C10,7), 0),
  3960. SR_CORE ("dbgwcr11_el1", CPENC (2,0,C0,C11,7), 0),
  3961. SR_CORE ("dbgwcr12_el1", CPENC (2,0,C0,C12,7), 0),
  3962. SR_CORE ("dbgwcr13_el1", CPENC (2,0,C0,C13,7), 0),
  3963. SR_CORE ("dbgwcr14_el1", CPENC (2,0,C0,C14,7), 0),
  3964. SR_CORE ("dbgwcr15_el1", CPENC (2,0,C0,C15,7), 0),
  3965. SR_CORE ("mdrar_el1", CPENC (2,0,C1,C0,0), F_REG_READ),
  3966. SR_CORE ("oslar_el1", CPENC (2,0,C1,C0,4), F_REG_WRITE),
  3967. SR_CORE ("oslsr_el1", CPENC (2,0,C1,C1,4), F_REG_READ),
  3968. SR_CORE ("osdlr_el1", CPENC (2,0,C1,C3,4), 0),
  3969. SR_CORE ("dbgprcr_el1", CPENC (2,0,C1,C4,4), 0),
  3970. SR_CORE ("dbgclaimset_el1", CPENC (2,0,C7,C8,6), 0),
  3971. SR_CORE ("dbgclaimclr_el1", CPENC (2,0,C7,C9,6), 0),
  3972. SR_CORE ("dbgauthstatus_el1", CPENC (2,0,C7,C14,6), F_REG_READ),
  3973. SR_PROFILE ("pmblimitr_el1", CPENC (3,0,C9,C10,0), 0),
  3974. SR_PROFILE ("pmbptr_el1", CPENC (3,0,C9,C10,1), 0),
  3975. SR_PROFILE ("pmbsr_el1", CPENC (3,0,C9,C10,3), 0),
  3976. SR_PROFILE ("pmbidr_el1", CPENC (3,0,C9,C10,7), F_REG_READ),
  3977. SR_PROFILE ("pmscr_el1", CPENC (3,0,C9,C9,0), 0),
  3978. SR_PROFILE ("pmsicr_el1", CPENC (3,0,C9,C9,2), 0),
  3979. SR_PROFILE ("pmsirr_el1", CPENC (3,0,C9,C9,3), 0),
  3980. SR_PROFILE ("pmsfcr_el1", CPENC (3,0,C9,C9,4), 0),
  3981. SR_PROFILE ("pmsevfr_el1", CPENC (3,0,C9,C9,5), 0),
  3982. SR_PROFILE ("pmslatfr_el1", CPENC (3,0,C9,C9,6), 0),
  3983. SR_PROFILE ("pmsidr_el1", CPENC (3,0,C9,C9,7), F_REG_READ),
  3984. SR_PROFILE ("pmscr_el2", CPENC (3,4,C9,C9,0), 0),
  3985. SR_PROFILE ("pmscr_el12", CPENC (3,5,C9,C9,0), 0),
  3986. SR_CORE ("pmcr_el0", CPENC (3,3,C9,C12,0), 0),
  3987. SR_CORE ("pmcntenset_el0", CPENC (3,3,C9,C12,1), 0),
  3988. SR_CORE ("pmcntenclr_el0", CPENC (3,3,C9,C12,2), 0),
  3989. SR_CORE ("pmovsclr_el0", CPENC (3,3,C9,C12,3), 0),
  3990. SR_CORE ("pmswinc_el0", CPENC (3,3,C9,C12,4), F_REG_WRITE),
  3991. SR_CORE ("pmselr_el0", CPENC (3,3,C9,C12,5), 0),
  3992. SR_CORE ("pmceid0_el0", CPENC (3,3,C9,C12,6), F_REG_READ),
  3993. SR_CORE ("pmceid1_el0", CPENC (3,3,C9,C12,7), F_REG_READ),
  3994. SR_CORE ("pmccntr_el0", CPENC (3,3,C9,C13,0), 0),
  3995. SR_CORE ("pmxevtyper_el0", CPENC (3,3,C9,C13,1), 0),
  3996. SR_CORE ("pmxevcntr_el0", CPENC (3,3,C9,C13,2), 0),
  3997. SR_CORE ("pmuserenr_el0", CPENC (3,3,C9,C14,0), 0),
  3998. SR_CORE ("pmintenset_el1", CPENC (3,0,C9,C14,1), 0),
  3999. SR_CORE ("pmintenclr_el1", CPENC (3,0,C9,C14,2), 0),
  4000. SR_CORE ("pmovsset_el0", CPENC (3,3,C9,C14,3), 0),
  4001. SR_CORE ("pmevcntr0_el0", CPENC (3,3,C14,C8,0), 0),
  4002. SR_CORE ("pmevcntr1_el0", CPENC (3,3,C14,C8,1), 0),
  4003. SR_CORE ("pmevcntr2_el0", CPENC (3,3,C14,C8,2), 0),
  4004. SR_CORE ("pmevcntr3_el0", CPENC (3,3,C14,C8,3), 0),
  4005. SR_CORE ("pmevcntr4_el0", CPENC (3,3,C14,C8,4), 0),
  4006. SR_CORE ("pmevcntr5_el0", CPENC (3,3,C14,C8,5), 0),
  4007. SR_CORE ("pmevcntr6_el0", CPENC (3,3,C14,C8,6), 0),
  4008. SR_CORE ("pmevcntr7_el0", CPENC (3,3,C14,C8,7), 0),
  4009. SR_CORE ("pmevcntr8_el0", CPENC (3,3,C14,C9,0), 0),
  4010. SR_CORE ("pmevcntr9_el0", CPENC (3,3,C14,C9,1), 0),
  4011. SR_CORE ("pmevcntr10_el0", CPENC (3,3,C14,C9,2), 0),
  4012. SR_CORE ("pmevcntr11_el0", CPENC (3,3,C14,C9,3), 0),
  4013. SR_CORE ("pmevcntr12_el0", CPENC (3,3,C14,C9,4), 0),
  4014. SR_CORE ("pmevcntr13_el0", CPENC (3,3,C14,C9,5), 0),
  4015. SR_CORE ("pmevcntr14_el0", CPENC (3,3,C14,C9,6), 0),
  4016. SR_CORE ("pmevcntr15_el0", CPENC (3,3,C14,C9,7), 0),
  4017. SR_CORE ("pmevcntr16_el0", CPENC (3,3,C14,C10,0), 0),
  4018. SR_CORE ("pmevcntr17_el0", CPENC (3,3,C14,C10,1), 0),
  4019. SR_CORE ("pmevcntr18_el0", CPENC (3,3,C14,C10,2), 0),
  4020. SR_CORE ("pmevcntr19_el0", CPENC (3,3,C14,C10,3), 0),
  4021. SR_CORE ("pmevcntr20_el0", CPENC (3,3,C14,C10,4), 0),
  4022. SR_CORE ("pmevcntr21_el0", CPENC (3,3,C14,C10,5), 0),
  4023. SR_CORE ("pmevcntr22_el0", CPENC (3,3,C14,C10,6), 0),
  4024. SR_CORE ("pmevcntr23_el0", CPENC (3,3,C14,C10,7), 0),
  4025. SR_CORE ("pmevcntr24_el0", CPENC (3,3,C14,C11,0), 0),
  4026. SR_CORE ("pmevcntr25_el0", CPENC (3,3,C14,C11,1), 0),
  4027. SR_CORE ("pmevcntr26_el0", CPENC (3,3,C14,C11,2), 0),
  4028. SR_CORE ("pmevcntr27_el0", CPENC (3,3,C14,C11,3), 0),
  4029. SR_CORE ("pmevcntr28_el0", CPENC (3,3,C14,C11,4), 0),
  4030. SR_CORE ("pmevcntr29_el0", CPENC (3,3,C14,C11,5), 0),
  4031. SR_CORE ("pmevcntr30_el0", CPENC (3,3,C14,C11,6), 0),
  4032. SR_CORE ("pmevtyper0_el0", CPENC (3,3,C14,C12,0), 0),
  4033. SR_CORE ("pmevtyper1_el0", CPENC (3,3,C14,C12,1), 0),
  4034. SR_CORE ("pmevtyper2_el0", CPENC (3,3,C14,C12,2), 0),
  4035. SR_CORE ("pmevtyper3_el0", CPENC (3,3,C14,C12,3), 0),
  4036. SR_CORE ("pmevtyper4_el0", CPENC (3,3,C14,C12,4), 0),
  4037. SR_CORE ("pmevtyper5_el0", CPENC (3,3,C14,C12,5), 0),
  4038. SR_CORE ("pmevtyper6_el0", CPENC (3,3,C14,C12,6), 0),
  4039. SR_CORE ("pmevtyper7_el0", CPENC (3,3,C14,C12,7), 0),
  4040. SR_CORE ("pmevtyper8_el0", CPENC (3,3,C14,C13,0), 0),
  4041. SR_CORE ("pmevtyper9_el0", CPENC (3,3,C14,C13,1), 0),
  4042. SR_CORE ("pmevtyper10_el0", CPENC (3,3,C14,C13,2), 0),
  4043. SR_CORE ("pmevtyper11_el0", CPENC (3,3,C14,C13,3), 0),
  4044. SR_CORE ("pmevtyper12_el0", CPENC (3,3,C14,C13,4), 0),
  4045. SR_CORE ("pmevtyper13_el0", CPENC (3,3,C14,C13,5), 0),
  4046. SR_CORE ("pmevtyper14_el0", CPENC (3,3,C14,C13,6), 0),
  4047. SR_CORE ("pmevtyper15_el0", CPENC (3,3,C14,C13,7), 0),
  4048. SR_CORE ("pmevtyper16_el0", CPENC (3,3,C14,C14,0), 0),
  4049. SR_CORE ("pmevtyper17_el0", CPENC (3,3,C14,C14,1), 0),
  4050. SR_CORE ("pmevtyper18_el0", CPENC (3,3,C14,C14,2), 0),
  4051. SR_CORE ("pmevtyper19_el0", CPENC (3,3,C14,C14,3), 0),
  4052. SR_CORE ("pmevtyper20_el0", CPENC (3,3,C14,C14,4), 0),
  4053. SR_CORE ("pmevtyper21_el0", CPENC (3,3,C14,C14,5), 0),
  4054. SR_CORE ("pmevtyper22_el0", CPENC (3,3,C14,C14,6), 0),
  4055. SR_CORE ("pmevtyper23_el0", CPENC (3,3,C14,C14,7), 0),
  4056. SR_CORE ("pmevtyper24_el0", CPENC (3,3,C14,C15,0), 0),
  4057. SR_CORE ("pmevtyper25_el0", CPENC (3,3,C14,C15,1), 0),
  4058. SR_CORE ("pmevtyper26_el0", CPENC (3,3,C14,C15,2), 0),
  4059. SR_CORE ("pmevtyper27_el0", CPENC (3,3,C14,C15,3), 0),
  4060. SR_CORE ("pmevtyper28_el0", CPENC (3,3,C14,C15,4), 0),
  4061. SR_CORE ("pmevtyper29_el0", CPENC (3,3,C14,C15,5), 0),
  4062. SR_CORE ("pmevtyper30_el0", CPENC (3,3,C14,C15,6), 0),
  4063. SR_CORE ("pmccfiltr_el0", CPENC (3,3,C14,C15,7), 0),
  4064. SR_V8_4 ("dit", CPEN_ (3,C2,5), 0),
  4065. SR_V8_4 ("trfcr_el1", CPENC (3,0,C1,C2,1), 0),
  4066. SR_V8_4 ("pmmir_el1", CPENC (3,0,C9,C14,6), F_REG_READ),
  4067. SR_V8_4 ("trfcr_el2", CPENC (3,4,C1,C2,1), 0),
  4068. SR_V8_4 ("vstcr_el2", CPENC (3,4,C2,C6,2), 0),
  4069. SR_V8_4_A ("vsttbr_el2", CPENC (3,4,C2,C6,0), 0),
  4070. SR_V8_4 ("cnthvs_tval_el2", CPENC (3,4,C14,C4,0), 0),
  4071. SR_V8_4 ("cnthvs_cval_el2", CPENC (3,4,C14,C4,2), 0),
  4072. SR_V8_4 ("cnthvs_ctl_el2", CPENC (3,4,C14,C4,1), 0),
  4073. SR_V8_4 ("cnthps_tval_el2", CPENC (3,4,C14,C5,0), 0),
  4074. SR_V8_4 ("cnthps_cval_el2", CPENC (3,4,C14,C5,2), 0),
  4075. SR_V8_4 ("cnthps_ctl_el2", CPENC (3,4,C14,C5,1), 0),
  4076. SR_V8_4 ("sder32_el2", CPENC (3,4,C1,C3,1), 0),
  4077. SR_V8_4 ("vncr_el2", CPENC (3,4,C2,C2,0), 0),
  4078. SR_V8_4 ("trfcr_el12", CPENC (3,5,C1,C2,1), 0),
  4079. SR_CORE ("mpam0_el1", CPENC (3,0,C10,C5,1), 0),
  4080. SR_CORE ("mpam1_el1", CPENC (3,0,C10,C5,0), 0),
  4081. SR_CORE ("mpam1_el12", CPENC (3,5,C10,C5,0), 0),
  4082. SR_CORE ("mpam2_el2", CPENC (3,4,C10,C5,0), 0),
  4083. SR_CORE ("mpam3_el3", CPENC (3,6,C10,C5,0), 0),
  4084. SR_CORE ("mpamhcr_el2", CPENC (3,4,C10,C4,0), 0),
  4085. SR_CORE ("mpamidr_el1", CPENC (3,0,C10,C4,4), F_REG_READ),
  4086. SR_CORE ("mpamvpm0_el2", CPENC (3,4,C10,C6,0), 0),
  4087. SR_CORE ("mpamvpm1_el2", CPENC (3,4,C10,C6,1), 0),
  4088. SR_CORE ("mpamvpm2_el2", CPENC (3,4,C10,C6,2), 0),
  4089. SR_CORE ("mpamvpm3_el2", CPENC (3,4,C10,C6,3), 0),
  4090. SR_CORE ("mpamvpm4_el2", CPENC (3,4,C10,C6,4), 0),
  4091. SR_CORE ("mpamvpm5_el2", CPENC (3,4,C10,C6,5), 0),
  4092. SR_CORE ("mpamvpm6_el2", CPENC (3,4,C10,C6,6), 0),
  4093. SR_CORE ("mpamvpm7_el2", CPENC (3,4,C10,C6,7), 0),
  4094. SR_CORE ("mpamvpmv_el2", CPENC (3,4,C10,C4,1), 0),
  4095. SR_V8_R ("mpuir_el1", CPENC (3,0,C0,C0,4), F_REG_READ),
  4096. SR_V8_R ("mpuir_el2", CPENC (3,4,C0,C0,4), F_REG_READ),
  4097. SR_V8_R ("prbar_el1", CPENC (3,0,C6,C8,0), 0),
  4098. SR_V8_R ("prbar_el2", CPENC (3,4,C6,C8,0), 0),
  4099. #define ENC_BARLAR(x,n,lar) \
  4100. CPENC (3, (x-1) << 2, C6, 8 | (n >> 1), ((n & 1) << 2) | lar)
  4101. #define PRBARn_ELx(x,n) SR_V8_R ("prbar" #n "_el" #x, ENC_BARLAR (x,n,0), 0)
  4102. #define PRLARn_ELx(x,n) SR_V8_R ("prlar" #n "_el" #x, ENC_BARLAR (x,n,1), 0)
  4103. SR_EXPAND_EL12 (PRBARn_ELx)
  4104. SR_V8_R ("prenr_el1", CPENC (3,0,C6,C1,1), 0),
  4105. SR_V8_R ("prenr_el2", CPENC (3,4,C6,C1,1), 0),
  4106. SR_V8_R ("prlar_el1", CPENC (3,0,C6,C8,1), 0),
  4107. SR_V8_R ("prlar_el2", CPENC (3,4,C6,C8,1), 0),
  4108. SR_EXPAND_EL12 (PRLARn_ELx)
  4109. SR_V8_R ("prselr_el1", CPENC (3,0,C6,C2,1), 0),
  4110. SR_V8_R ("prselr_el2", CPENC (3,4,C6,C2,1), 0),
  4111. SR_V8_R ("vsctlr_el2", CPENC (3,4,C2,C0,0), 0),
  4112. SR_CORE("trbbaser_el1", CPENC (3,0,C9,C11,2), 0),
  4113. SR_CORE("trbidr_el1", CPENC (3,0,C9,C11,7), F_REG_READ),
  4114. SR_CORE("trblimitr_el1", CPENC (3,0,C9,C11,0), 0),
  4115. SR_CORE("trbmar_el1", CPENC (3,0,C9,C11,4), 0),
  4116. SR_CORE("trbptr_el1", CPENC (3,0,C9,C11,1), 0),
  4117. SR_CORE("trbsr_el1", CPENC (3,0,C9,C11,3), 0),
  4118. SR_CORE("trbtrg_el1", CPENC (3,0,C9,C11,6), 0),
  4119. SR_CORE ("trcauthstatus", CPENC (2,1,C7,C14,6), F_REG_READ),
  4120. SR_CORE ("trccidr0", CPENC (2,1,C7,C12,7), F_REG_READ),
  4121. SR_CORE ("trccidr1", CPENC (2,1,C7,C13,7), F_REG_READ),
  4122. SR_CORE ("trccidr2", CPENC (2,1,C7,C14,7), F_REG_READ),
  4123. SR_CORE ("trccidr3", CPENC (2,1,C7,C15,7), F_REG_READ),
  4124. SR_CORE ("trcdevaff0", CPENC (2,1,C7,C10,6), F_REG_READ),
  4125. SR_CORE ("trcdevaff1", CPENC (2,1,C7,C11,6), F_REG_READ),
  4126. SR_CORE ("trcdevarch", CPENC (2,1,C7,C15,6), F_REG_READ),
  4127. SR_CORE ("trcdevid", CPENC (2,1,C7,C2,7), F_REG_READ),
  4128. SR_CORE ("trcdevtype", CPENC (2,1,C7,C3,7), F_REG_READ),
  4129. SR_CORE ("trcidr0", CPENC (2,1,C0,C8,7), F_REG_READ),
  4130. SR_CORE ("trcidr1", CPENC (2,1,C0,C9,7), F_REG_READ),
  4131. SR_CORE ("trcidr2", CPENC (2,1,C0,C10,7), F_REG_READ),
  4132. SR_CORE ("trcidr3", CPENC (2,1,C0,C11,7), F_REG_READ),
  4133. SR_CORE ("trcidr4", CPENC (2,1,C0,C12,7), F_REG_READ),
  4134. SR_CORE ("trcidr5", CPENC (2,1,C0,C13,7), F_REG_READ),
  4135. SR_CORE ("trcidr6", CPENC (2,1,C0,C14,7), F_REG_READ),
  4136. SR_CORE ("trcidr7", CPENC (2,1,C0,C15,7), F_REG_READ),
  4137. SR_CORE ("trcidr8", CPENC (2,1,C0,C0,6), F_REG_READ),
  4138. SR_CORE ("trcidr9", CPENC (2,1,C0,C1,6), F_REG_READ),
  4139. SR_CORE ("trcidr10", CPENC (2,1,C0,C2,6), F_REG_READ),
  4140. SR_CORE ("trcidr11", CPENC (2,1,C0,C3,6), F_REG_READ),
  4141. SR_CORE ("trcidr12", CPENC (2,1,C0,C4,6), F_REG_READ),
  4142. SR_CORE ("trcidr13", CPENC (2,1,C0,C5,6), F_REG_READ),
  4143. SR_CORE ("trclsr", CPENC (2,1,C7,C13,6), F_REG_READ),
  4144. SR_CORE ("trcoslsr", CPENC (2,1,C1,C1,4), F_REG_READ),
  4145. SR_CORE ("trcpdsr", CPENC (2,1,C1,C5,4), F_REG_READ),
  4146. SR_CORE ("trcpidr0", CPENC (2,1,C7,C8,7), F_REG_READ),
  4147. SR_CORE ("trcpidr1", CPENC (2,1,C7,C9,7), F_REG_READ),
  4148. SR_CORE ("trcpidr2", CPENC (2,1,C7,C10,7), F_REG_READ),
  4149. SR_CORE ("trcpidr3", CPENC (2,1,C7,C11,7), F_REG_READ),
  4150. SR_CORE ("trcpidr4", CPENC (2,1,C7,C4,7), F_REG_READ),
  4151. SR_CORE ("trcpidr5", CPENC (2,1,C7,C5,7), F_REG_READ),
  4152. SR_CORE ("trcpidr6", CPENC (2,1,C7,C6,7), F_REG_READ),
  4153. SR_CORE ("trcpidr7", CPENC (2,1,C7,C7,7), F_REG_READ),
  4154. SR_CORE ("trcstatr", CPENC (2,1,C0,C3,0), F_REG_READ),
  4155. SR_CORE ("trcacatr0", CPENC (2,1,C2,C0,2), 0),
  4156. SR_CORE ("trcacatr1", CPENC (2,1,C2,C2,2), 0),
  4157. SR_CORE ("trcacatr2", CPENC (2,1,C2,C4,2), 0),
  4158. SR_CORE ("trcacatr3", CPENC (2,1,C2,C6,2), 0),
  4159. SR_CORE ("trcacatr4", CPENC (2,1,C2,C8,2), 0),
  4160. SR_CORE ("trcacatr5", CPENC (2,1,C2,C10,2), 0),
  4161. SR_CORE ("trcacatr6", CPENC (2,1,C2,C12,2), 0),
  4162. SR_CORE ("trcacatr7", CPENC (2,1,C2,C14,2), 0),
  4163. SR_CORE ("trcacatr8", CPENC (2,1,C2,C0,3), 0),
  4164. SR_CORE ("trcacatr9", CPENC (2,1,C2,C2,3), 0),
  4165. SR_CORE ("trcacatr10", CPENC (2,1,C2,C4,3), 0),
  4166. SR_CORE ("trcacatr11", CPENC (2,1,C2,C6,3), 0),
  4167. SR_CORE ("trcacatr12", CPENC (2,1,C2,C8,3), 0),
  4168. SR_CORE ("trcacatr13", CPENC (2,1,C2,C10,3), 0),
  4169. SR_CORE ("trcacatr14", CPENC (2,1,C2,C12,3), 0),
  4170. SR_CORE ("trcacatr15", CPENC (2,1,C2,C14,3), 0),
  4171. SR_CORE ("trcacvr0", CPENC (2,1,C2,C0,0), 0),
  4172. SR_CORE ("trcacvr1", CPENC (2,1,C2,C2,0), 0),
  4173. SR_CORE ("trcacvr2", CPENC (2,1,C2,C4,0), 0),
  4174. SR_CORE ("trcacvr3", CPENC (2,1,C2,C6,0), 0),
  4175. SR_CORE ("trcacvr4", CPENC (2,1,C2,C8,0), 0),
  4176. SR_CORE ("trcacvr5", CPENC (2,1,C2,C10,0), 0),
  4177. SR_CORE ("trcacvr6", CPENC (2,1,C2,C12,0), 0),
  4178. SR_CORE ("trcacvr7", CPENC (2,1,C2,C14,0), 0),
  4179. SR_CORE ("trcacvr8", CPENC (2,1,C2,C0,1), 0),
  4180. SR_CORE ("trcacvr9", CPENC (2,1,C2,C2,1), 0),
  4181. SR_CORE ("trcacvr10", CPENC (2,1,C2,C4,1), 0),
  4182. SR_CORE ("trcacvr11", CPENC (2,1,C2,C6,1), 0),
  4183. SR_CORE ("trcacvr12", CPENC (2,1,C2,C8,1), 0),
  4184. SR_CORE ("trcacvr13", CPENC (2,1,C2,C10,1), 0),
  4185. SR_CORE ("trcacvr14", CPENC (2,1,C2,C12,1), 0),
  4186. SR_CORE ("trcacvr15", CPENC (2,1,C2,C14,1), 0),
  4187. SR_CORE ("trcauxctlr", CPENC (2,1,C0,C6,0), 0),
  4188. SR_CORE ("trcbbctlr", CPENC (2,1,C0,C15,0), 0),
  4189. SR_CORE ("trcccctlr", CPENC (2,1,C0,C14,0), 0),
  4190. SR_CORE ("trccidcctlr0", CPENC (2,1,C3,C0,2), 0),
  4191. SR_CORE ("trccidcctlr1", CPENC (2,1,C3,C1,2), 0),
  4192. SR_CORE ("trccidcvr0", CPENC (2,1,C3,C0,0), 0),
  4193. SR_CORE ("trccidcvr1", CPENC (2,1,C3,C2,0), 0),
  4194. SR_CORE ("trccidcvr2", CPENC (2,1,C3,C4,0), 0),
  4195. SR_CORE ("trccidcvr3", CPENC (2,1,C3,C6,0), 0),
  4196. SR_CORE ("trccidcvr4", CPENC (2,1,C3,C8,0), 0),
  4197. SR_CORE ("trccidcvr5", CPENC (2,1,C3,C10,0), 0),
  4198. SR_CORE ("trccidcvr6", CPENC (2,1,C3,C12,0), 0),
  4199. SR_CORE ("trccidcvr7", CPENC (2,1,C3,C14,0), 0),
  4200. SR_CORE ("trcclaimclr", CPENC (2,1,C7,C9,6), 0),
  4201. SR_CORE ("trcclaimset", CPENC (2,1,C7,C8,6), 0),
  4202. SR_CORE ("trccntctlr0", CPENC (2,1,C0,C4,5), 0),
  4203. SR_CORE ("trccntctlr1", CPENC (2,1,C0,C5,5), 0),
  4204. SR_CORE ("trccntctlr2", CPENC (2,1,C0,C6,5), 0),
  4205. SR_CORE ("trccntctlr3", CPENC (2,1,C0,C7,5), 0),
  4206. SR_CORE ("trccntrldvr0", CPENC (2,1,C0,C0,5), 0),
  4207. SR_CORE ("trccntrldvr1", CPENC (2,1,C0,C1,5), 0),
  4208. SR_CORE ("trccntrldvr2", CPENC (2,1,C0,C2,5), 0),
  4209. SR_CORE ("trccntrldvr3", CPENC (2,1,C0,C3,5), 0),
  4210. SR_CORE ("trccntvr0", CPENC (2,1,C0,C8,5), 0),
  4211. SR_CORE ("trccntvr1", CPENC (2,1,C0,C9,5), 0),
  4212. SR_CORE ("trccntvr2", CPENC (2,1,C0,C10,5), 0),
  4213. SR_CORE ("trccntvr3", CPENC (2,1,C0,C11,5), 0),
  4214. SR_CORE ("trcconfigr", CPENC (2,1,C0,C4,0), 0),
  4215. SR_CORE ("trcdvcmr0", CPENC (2,1,C2,C0,6), 0),
  4216. SR_CORE ("trcdvcmr1", CPENC (2,1,C2,C4,6), 0),
  4217. SR_CORE ("trcdvcmr2", CPENC (2,1,C2,C8,6), 0),
  4218. SR_CORE ("trcdvcmr3", CPENC (2,1,C2,C12,6), 0),
  4219. SR_CORE ("trcdvcmr4", CPENC (2,1,C2,C0,7), 0),
  4220. SR_CORE ("trcdvcmr5", CPENC (2,1,C2,C4,7), 0),
  4221. SR_CORE ("trcdvcmr6", CPENC (2,1,C2,C8,7), 0),
  4222. SR_CORE ("trcdvcmr7", CPENC (2,1,C2,C12,7), 0),
  4223. SR_CORE ("trcdvcvr0", CPENC (2,1,C2,C0,4), 0),
  4224. SR_CORE ("trcdvcvr1", CPENC (2,1,C2,C4,4), 0),
  4225. SR_CORE ("trcdvcvr2", CPENC (2,1,C2,C8,4), 0),
  4226. SR_CORE ("trcdvcvr3", CPENC (2,1,C2,C12,4), 0),
  4227. SR_CORE ("trcdvcvr4", CPENC (2,1,C2,C0,5), 0),
  4228. SR_CORE ("trcdvcvr5", CPENC (2,1,C2,C4,5), 0),
  4229. SR_CORE ("trcdvcvr6", CPENC (2,1,C2,C8,5), 0),
  4230. SR_CORE ("trcdvcvr7", CPENC (2,1,C2,C12,5), 0),
  4231. SR_CORE ("trceventctl0r", CPENC (2,1,C0,C8,0), 0),
  4232. SR_CORE ("trceventctl1r", CPENC (2,1,C0,C9,0), 0),
  4233. SR_CORE ("trcextinselr0", CPENC (2,1,C0,C8,4), 0),
  4234. SR_CORE ("trcextinselr", CPENC (2,1,C0,C8,4), 0),
  4235. SR_CORE ("trcextinselr1", CPENC (2,1,C0,C9,4), 0),
  4236. SR_CORE ("trcextinselr2", CPENC (2,1,C0,C10,4), 0),
  4237. SR_CORE ("trcextinselr3", CPENC (2,1,C0,C11,4), 0),
  4238. SR_CORE ("trcimspec0", CPENC (2,1,C0,C0,7), 0),
  4239. SR_CORE ("trcimspec1", CPENC (2,1,C0,C1,7), 0),
  4240. SR_CORE ("trcimspec2", CPENC (2,1,C0,C2,7), 0),
  4241. SR_CORE ("trcimspec3", CPENC (2,1,C0,C3,7), 0),
  4242. SR_CORE ("trcimspec4", CPENC (2,1,C0,C4,7), 0),
  4243. SR_CORE ("trcimspec5", CPENC (2,1,C0,C5,7), 0),
  4244. SR_CORE ("trcimspec6", CPENC (2,1,C0,C6,7), 0),
  4245. SR_CORE ("trcimspec7", CPENC (2,1,C0,C7,7), 0),
  4246. SR_CORE ("trcitctrl", CPENC (2,1,C7,C0,4), 0),
  4247. SR_CORE ("trcpdcr", CPENC (2,1,C1,C4,4), 0),
  4248. SR_CORE ("trcprgctlr", CPENC (2,1,C0,C1,0), 0),
  4249. SR_CORE ("trcprocselr", CPENC (2,1,C0,C2,0), 0),
  4250. SR_CORE ("trcqctlr", CPENC (2,1,C0,C1,1), 0),
  4251. SR_CORE ("trcrsr", CPENC (2,1,C0,C10,0), 0),
  4252. SR_CORE ("trcrsctlr2", CPENC (2,1,C1,C2,0), 0),
  4253. SR_CORE ("trcrsctlr3", CPENC (2,1,C1,C3,0), 0),
  4254. SR_CORE ("trcrsctlr4", CPENC (2,1,C1,C4,0), 0),
  4255. SR_CORE ("trcrsctlr5", CPENC (2,1,C1,C5,0), 0),
  4256. SR_CORE ("trcrsctlr6", CPENC (2,1,C1,C6,0), 0),
  4257. SR_CORE ("trcrsctlr7", CPENC (2,1,C1,C7,0), 0),
  4258. SR_CORE ("trcrsctlr8", CPENC (2,1,C1,C8,0), 0),
  4259. SR_CORE ("trcrsctlr9", CPENC (2,1,C1,C9,0), 0),
  4260. SR_CORE ("trcrsctlr10", CPENC (2,1,C1,C10,0), 0),
  4261. SR_CORE ("trcrsctlr11", CPENC (2,1,C1,C11,0), 0),
  4262. SR_CORE ("trcrsctlr12", CPENC (2,1,C1,C12,0), 0),
  4263. SR_CORE ("trcrsctlr13", CPENC (2,1,C1,C13,0), 0),
  4264. SR_CORE ("trcrsctlr14", CPENC (2,1,C1,C14,0), 0),
  4265. SR_CORE ("trcrsctlr15", CPENC (2,1,C1,C15,0), 0),
  4266. SR_CORE ("trcrsctlr16", CPENC (2,1,C1,C0,1), 0),
  4267. SR_CORE ("trcrsctlr17", CPENC (2,1,C1,C1,1), 0),
  4268. SR_CORE ("trcrsctlr18", CPENC (2,1,C1,C2,1), 0),
  4269. SR_CORE ("trcrsctlr19", CPENC (2,1,C1,C3,1), 0),
  4270. SR_CORE ("trcrsctlr20", CPENC (2,1,C1,C4,1), 0),
  4271. SR_CORE ("trcrsctlr21", CPENC (2,1,C1,C5,1), 0),
  4272. SR_CORE ("trcrsctlr22", CPENC (2,1,C1,C6,1), 0),
  4273. SR_CORE ("trcrsctlr23", CPENC (2,1,C1,C7,1), 0),
  4274. SR_CORE ("trcrsctlr24", CPENC (2,1,C1,C8,1), 0),
  4275. SR_CORE ("trcrsctlr25", CPENC (2,1,C1,C9,1), 0),
  4276. SR_CORE ("trcrsctlr26", CPENC (2,1,C1,C10,1), 0),
  4277. SR_CORE ("trcrsctlr27", CPENC (2,1,C1,C11,1), 0),
  4278. SR_CORE ("trcrsctlr28", CPENC (2,1,C1,C12,1), 0),
  4279. SR_CORE ("trcrsctlr29", CPENC (2,1,C1,C13,1), 0),
  4280. SR_CORE ("trcrsctlr30", CPENC (2,1,C1,C14,1), 0),
  4281. SR_CORE ("trcrsctlr31", CPENC (2,1,C1,C15,1), 0),
  4282. SR_CORE ("trcseqevr0", CPENC (2,1,C0,C0,4), 0),
  4283. SR_CORE ("trcseqevr1", CPENC (2,1,C0,C1,4), 0),
  4284. SR_CORE ("trcseqevr2", CPENC (2,1,C0,C2,4), 0),
  4285. SR_CORE ("trcseqrstevr", CPENC (2,1,C0,C6,4), 0),
  4286. SR_CORE ("trcseqstr", CPENC (2,1,C0,C7,4), 0),
  4287. SR_CORE ("trcssccr0", CPENC (2,1,C1,C0,2), 0),
  4288. SR_CORE ("trcssccr1", CPENC (2,1,C1,C1,2), 0),
  4289. SR_CORE ("trcssccr2", CPENC (2,1,C1,C2,2), 0),
  4290. SR_CORE ("trcssccr3", CPENC (2,1,C1,C3,2), 0),
  4291. SR_CORE ("trcssccr4", CPENC (2,1,C1,C4,2), 0),
  4292. SR_CORE ("trcssccr5", CPENC (2,1,C1,C5,2), 0),
  4293. SR_CORE ("trcssccr6", CPENC (2,1,C1,C6,2), 0),
  4294. SR_CORE ("trcssccr7", CPENC (2,1,C1,C7,2), 0),
  4295. SR_CORE ("trcsscsr0", CPENC (2,1,C1,C8,2), 0),
  4296. SR_CORE ("trcsscsr1", CPENC (2,1,C1,C9,2), 0),
  4297. SR_CORE ("trcsscsr2", CPENC (2,1,C1,C10,2), 0),
  4298. SR_CORE ("trcsscsr3", CPENC (2,1,C1,C11,2), 0),
  4299. SR_CORE ("trcsscsr4", CPENC (2,1,C1,C12,2), 0),
  4300. SR_CORE ("trcsscsr5", CPENC (2,1,C1,C13,2), 0),
  4301. SR_CORE ("trcsscsr6", CPENC (2,1,C1,C14,2), 0),
  4302. SR_CORE ("trcsscsr7", CPENC (2,1,C1,C15,2), 0),
  4303. SR_CORE ("trcsspcicr0", CPENC (2,1,C1,C0,3), 0),
  4304. SR_CORE ("trcsspcicr1", CPENC (2,1,C1,C1,3), 0),
  4305. SR_CORE ("trcsspcicr2", CPENC (2,1,C1,C2,3), 0),
  4306. SR_CORE ("trcsspcicr3", CPENC (2,1,C1,C3,3), 0),
  4307. SR_CORE ("trcsspcicr4", CPENC (2,1,C1,C4,3), 0),
  4308. SR_CORE ("trcsspcicr5", CPENC (2,1,C1,C5,3), 0),
  4309. SR_CORE ("trcsspcicr6", CPENC (2,1,C1,C6,3), 0),
  4310. SR_CORE ("trcsspcicr7", CPENC (2,1,C1,C7,3), 0),
  4311. SR_CORE ("trcstallctlr", CPENC (2,1,C0,C11,0), 0),
  4312. SR_CORE ("trcsyncpr", CPENC (2,1,C0,C13,0), 0),
  4313. SR_CORE ("trctraceidr", CPENC (2,1,C0,C0,1), 0),
  4314. SR_CORE ("trctsctlr", CPENC (2,1,C0,C12,0), 0),
  4315. SR_CORE ("trcvdarcctlr", CPENC (2,1,C0,C10,2), 0),
  4316. SR_CORE ("trcvdctlr", CPENC (2,1,C0,C8,2), 0),
  4317. SR_CORE ("trcvdsacctlr", CPENC (2,1,C0,C9,2), 0),
  4318. SR_CORE ("trcvictlr", CPENC (2,1,C0,C0,2), 0),
  4319. SR_CORE ("trcviiectlr", CPENC (2,1,C0,C1,2), 0),
  4320. SR_CORE ("trcvipcssctlr", CPENC (2,1,C0,C3,2), 0),
  4321. SR_CORE ("trcvissctlr", CPENC (2,1,C0,C2,2), 0),
  4322. SR_CORE ("trcvmidcctlr0", CPENC (2,1,C3,C2,2), 0),
  4323. SR_CORE ("trcvmidcctlr1", CPENC (2,1,C3,C3,2), 0),
  4324. SR_CORE ("trcvmidcvr0", CPENC (2,1,C3,C0,1), 0),
  4325. SR_CORE ("trcvmidcvr1", CPENC (2,1,C3,C2,1), 0),
  4326. SR_CORE ("trcvmidcvr2", CPENC (2,1,C3,C4,1), 0),
  4327. SR_CORE ("trcvmidcvr3", CPENC (2,1,C3,C6,1), 0),
  4328. SR_CORE ("trcvmidcvr4", CPENC (2,1,C3,C8,1), 0),
  4329. SR_CORE ("trcvmidcvr5", CPENC (2,1,C3,C10,1), 0),
  4330. SR_CORE ("trcvmidcvr6", CPENC (2,1,C3,C12,1), 0),
  4331. SR_CORE ("trcvmidcvr7", CPENC (2,1,C3,C14,1), 0),
  4332. SR_CORE ("trclar", CPENC (2,1,C7,C12,6), F_REG_WRITE),
  4333. SR_CORE ("trcoslar", CPENC (2,1,C1,C0,4), F_REG_WRITE),
  4334. SR_CORE ("csrcr_el0", CPENC (2,3,C8,C0,0), 0),
  4335. SR_CORE ("csrptr_el0", CPENC (2,3,C8,C0,1), 0),
  4336. SR_CORE ("csridr_el0", CPENC (2,3,C8,C0,2), F_REG_READ),
  4337. SR_CORE ("csrptridx_el0", CPENC (2,3,C8,C0,3), F_REG_READ),
  4338. SR_CORE ("csrcr_el1", CPENC (2,0,C8,C0,0), 0),
  4339. SR_CORE ("csrcr_el12", CPENC (2,5,C8,C0,0), 0),
  4340. SR_CORE ("csrptr_el1", CPENC (2,0,C8,C0,1), 0),
  4341. SR_CORE ("csrptr_el12", CPENC (2,5,C8,C0,1), 0),
  4342. SR_CORE ("csrptridx_el1", CPENC (2,0,C8,C0,3), F_REG_READ),
  4343. SR_CORE ("csrcr_el2", CPENC (2,4,C8,C0,0), 0),
  4344. SR_CORE ("csrptr_el2", CPENC (2,4,C8,C0,1), 0),
  4345. SR_CORE ("csrptridx_el2", CPENC (2,4,C8,C0,3), F_REG_READ),
  4346. SR_LOR ("lorid_el1", CPENC (3,0,C10,C4,7), F_REG_READ),
  4347. SR_LOR ("lorc_el1", CPENC (3,0,C10,C4,3), 0),
  4348. SR_LOR ("lorea_el1", CPENC (3,0,C10,C4,1), 0),
  4349. SR_LOR ("lorn_el1", CPENC (3,0,C10,C4,2), 0),
  4350. SR_LOR ("lorsa_el1", CPENC (3,0,C10,C4,0), 0),
  4351. SR_CORE ("icc_ctlr_el3", CPENC (3,6,C12,C12,4), 0),
  4352. SR_CORE ("icc_sre_el1", CPENC (3,0,C12,C12,5), 0),
  4353. SR_CORE ("icc_sre_el2", CPENC (3,4,C12,C9,5), 0),
  4354. SR_CORE ("icc_sre_el3", CPENC (3,6,C12,C12,5), 0),
  4355. SR_CORE ("ich_vtr_el2", CPENC (3,4,C12,C11,1), F_REG_READ),
  4356. SR_CORE ("brbcr_el1", CPENC (2,1,C9,C0,0), 0),
  4357. SR_CORE ("brbcr_el12", CPENC (2,5,C9,C0,0), 0),
  4358. SR_CORE ("brbfcr_el1", CPENC (2,1,C9,C0,1), 0),
  4359. SR_CORE ("brbts_el1", CPENC (2,1,C9,C0,2), 0),
  4360. SR_CORE ("brbinfinj_el1", CPENC (2,1,C9,C1,0), 0),
  4361. SR_CORE ("brbsrcinj_el1", CPENC (2,1,C9,C1,1), 0),
  4362. SR_CORE ("brbtgtinj_el1", CPENC (2,1,C9,C1,2), 0),
  4363. SR_CORE ("brbidr0_el1", CPENC (2,1,C9,C2,0), F_REG_READ),
  4364. SR_CORE ("brbcr_el2", CPENC (2,4,C9,C0,0), 0),
  4365. SR_CORE ("brbsrc0_el1", CPENC (2,1,C8,C0,1), F_REG_READ),
  4366. SR_CORE ("brbsrc1_el1", CPENC (2,1,C8,C1,1), F_REG_READ),
  4367. SR_CORE ("brbsrc2_el1", CPENC (2,1,C8,C2,1), F_REG_READ),
  4368. SR_CORE ("brbsrc3_el1", CPENC (2,1,C8,C3,1), F_REG_READ),
  4369. SR_CORE ("brbsrc4_el1", CPENC (2,1,C8,C4,1), F_REG_READ),
  4370. SR_CORE ("brbsrc5_el1", CPENC (2,1,C8,C5,1), F_REG_READ),
  4371. SR_CORE ("brbsrc6_el1", CPENC (2,1,C8,C6,1), F_REG_READ),
  4372. SR_CORE ("brbsrc7_el1", CPENC (2,1,C8,C7,1), F_REG_READ),
  4373. SR_CORE ("brbsrc8_el1", CPENC (2,1,C8,C8,1), F_REG_READ),
  4374. SR_CORE ("brbsrc9_el1", CPENC (2,1,C8,C9,1), F_REG_READ),
  4375. SR_CORE ("brbsrc10_el1", CPENC (2,1,C8,C10,1), F_REG_READ),
  4376. SR_CORE ("brbsrc11_el1", CPENC (2,1,C8,C11,1), F_REG_READ),
  4377. SR_CORE ("brbsrc12_el1", CPENC (2,1,C8,C12,1), F_REG_READ),
  4378. SR_CORE ("brbsrc13_el1", CPENC (2,1,C8,C13,1), F_REG_READ),
  4379. SR_CORE ("brbsrc14_el1", CPENC (2,1,C8,C14,1), F_REG_READ),
  4380. SR_CORE ("brbsrc15_el1", CPENC (2,1,C8,C15,1), F_REG_READ),
  4381. SR_CORE ("brbsrc16_el1", CPENC (2,1,C8,C0,5), F_REG_READ),
  4382. SR_CORE ("brbsrc17_el1", CPENC (2,1,C8,C1,5), F_REG_READ),
  4383. SR_CORE ("brbsrc18_el1", CPENC (2,1,C8,C2,5), F_REG_READ),
  4384. SR_CORE ("brbsrc19_el1", CPENC (2,1,C8,C3,5), F_REG_READ),
  4385. SR_CORE ("brbsrc20_el1", CPENC (2,1,C8,C4,5), F_REG_READ),
  4386. SR_CORE ("brbsrc21_el1", CPENC (2,1,C8,C5,5), F_REG_READ),
  4387. SR_CORE ("brbsrc22_el1", CPENC (2,1,C8,C6,5), F_REG_READ),
  4388. SR_CORE ("brbsrc23_el1", CPENC (2,1,C8,C7,5), F_REG_READ),
  4389. SR_CORE ("brbsrc24_el1", CPENC (2,1,C8,C8,5), F_REG_READ),
  4390. SR_CORE ("brbsrc25_el1", CPENC (2,1,C8,C9,5), F_REG_READ),
  4391. SR_CORE ("brbsrc26_el1", CPENC (2,1,C8,C10,5), F_REG_READ),
  4392. SR_CORE ("brbsrc27_el1", CPENC (2,1,C8,C11,5), F_REG_READ),
  4393. SR_CORE ("brbsrc28_el1", CPENC (2,1,C8,C12,5), F_REG_READ),
  4394. SR_CORE ("brbsrc29_el1", CPENC (2,1,C8,C13,5), F_REG_READ),
  4395. SR_CORE ("brbsrc30_el1", CPENC (2,1,C8,C14,5), F_REG_READ),
  4396. SR_CORE ("brbsrc31_el1", CPENC (2,1,C8,C15,5), F_REG_READ),
  4397. SR_CORE ("brbtgt0_el1", CPENC (2,1,C8,C0,2), F_REG_READ),
  4398. SR_CORE ("brbtgt1_el1", CPENC (2,1,C8,C1,2), F_REG_READ),
  4399. SR_CORE ("brbtgt2_el1", CPENC (2,1,C8,C2,2), F_REG_READ),
  4400. SR_CORE ("brbtgt3_el1", CPENC (2,1,C8,C3,2), F_REG_READ),
  4401. SR_CORE ("brbtgt4_el1", CPENC (2,1,C8,C4,2), F_REG_READ),
  4402. SR_CORE ("brbtgt5_el1", CPENC (2,1,C8,C5,2), F_REG_READ),
  4403. SR_CORE ("brbtgt6_el1", CPENC (2,1,C8,C6,2), F_REG_READ),
  4404. SR_CORE ("brbtgt7_el1", CPENC (2,1,C8,C7,2), F_REG_READ),
  4405. SR_CORE ("brbtgt8_el1", CPENC (2,1,C8,C8,2), F_REG_READ),
  4406. SR_CORE ("brbtgt9_el1", CPENC (2,1,C8,C9,2), F_REG_READ),
  4407. SR_CORE ("brbtgt10_el1", CPENC (2,1,C8,C10,2), F_REG_READ),
  4408. SR_CORE ("brbtgt11_el1", CPENC (2,1,C8,C11,2), F_REG_READ),
  4409. SR_CORE ("brbtgt12_el1", CPENC (2,1,C8,C12,2), F_REG_READ),
  4410. SR_CORE ("brbtgt13_el1", CPENC (2,1,C8,C13,2), F_REG_READ),
  4411. SR_CORE ("brbtgt14_el1", CPENC (2,1,C8,C14,2), F_REG_READ),
  4412. SR_CORE ("brbtgt15_el1", CPENC (2,1,C8,C15,2), F_REG_READ),
  4413. SR_CORE ("brbtgt16_el1", CPENC (2,1,C8,C0,6), F_REG_READ),
  4414. SR_CORE ("brbtgt17_el1", CPENC (2,1,C8,C1,6), F_REG_READ),
  4415. SR_CORE ("brbtgt18_el1", CPENC (2,1,C8,C2,6), F_REG_READ),
  4416. SR_CORE ("brbtgt19_el1", CPENC (2,1,C8,C3,6), F_REG_READ),
  4417. SR_CORE ("brbtgt20_el1", CPENC (2,1,C8,C4,6), F_REG_READ),
  4418. SR_CORE ("brbtgt21_el1", CPENC (2,1,C8,C5,6), F_REG_READ),
  4419. SR_CORE ("brbtgt22_el1", CPENC (2,1,C8,C6,6), F_REG_READ),
  4420. SR_CORE ("brbtgt23_el1", CPENC (2,1,C8,C7,6), F_REG_READ),
  4421. SR_CORE ("brbtgt24_el1", CPENC (2,1,C8,C8,6), F_REG_READ),
  4422. SR_CORE ("brbtgt25_el1", CPENC (2,1,C8,C9,6), F_REG_READ),
  4423. SR_CORE ("brbtgt26_el1", CPENC (2,1,C8,C10,6), F_REG_READ),
  4424. SR_CORE ("brbtgt27_el1", CPENC (2,1,C8,C11,6), F_REG_READ),
  4425. SR_CORE ("brbtgt28_el1", CPENC (2,1,C8,C12,6), F_REG_READ),
  4426. SR_CORE ("brbtgt29_el1", CPENC (2,1,C8,C13,6), F_REG_READ),
  4427. SR_CORE ("brbtgt30_el1", CPENC (2,1,C8,C14,6), F_REG_READ),
  4428. SR_CORE ("brbtgt31_el1", CPENC (2,1,C8,C15,6), F_REG_READ),
  4429. SR_CORE ("brbinf0_el1", CPENC (2,1,C8,C0,0), F_REG_READ),
  4430. SR_CORE ("brbinf1_el1", CPENC (2,1,C8,C1,0), F_REG_READ),
  4431. SR_CORE ("brbinf2_el1", CPENC (2,1,C8,C2,0), F_REG_READ),
  4432. SR_CORE ("brbinf3_el1", CPENC (2,1,C8,C3,0), F_REG_READ),
  4433. SR_CORE ("brbinf4_el1", CPENC (2,1,C8,C4,0), F_REG_READ),
  4434. SR_CORE ("brbinf5_el1", CPENC (2,1,C8,C5,0), F_REG_READ),
  4435. SR_CORE ("brbinf6_el1", CPENC (2,1,C8,C6,0), F_REG_READ),
  4436. SR_CORE ("brbinf7_el1", CPENC (2,1,C8,C7,0), F_REG_READ),
  4437. SR_CORE ("brbinf8_el1", CPENC (2,1,C8,C8,0), F_REG_READ),
  4438. SR_CORE ("brbinf9_el1", CPENC (2,1,C8,C9,0), F_REG_READ),
  4439. SR_CORE ("brbinf10_el1", CPENC (2,1,C8,C10,0), F_REG_READ),
  4440. SR_CORE ("brbinf11_el1", CPENC (2,1,C8,C11,0), F_REG_READ),
  4441. SR_CORE ("brbinf12_el1", CPENC (2,1,C8,C12,0), F_REG_READ),
  4442. SR_CORE ("brbinf13_el1", CPENC (2,1,C8,C13,0), F_REG_READ),
  4443. SR_CORE ("brbinf14_el1", CPENC (2,1,C8,C14,0), F_REG_READ),
  4444. SR_CORE ("brbinf15_el1", CPENC (2,1,C8,C15,0), F_REG_READ),
  4445. SR_CORE ("brbinf16_el1", CPENC (2,1,C8,C0,4), F_REG_READ),
  4446. SR_CORE ("brbinf17_el1", CPENC (2,1,C8,C1,4), F_REG_READ),
  4447. SR_CORE ("brbinf18_el1", CPENC (2,1,C8,C2,4), F_REG_READ),
  4448. SR_CORE ("brbinf19_el1", CPENC (2,1,C8,C3,4), F_REG_READ),
  4449. SR_CORE ("brbinf20_el1", CPENC (2,1,C8,C4,4), F_REG_READ),
  4450. SR_CORE ("brbinf21_el1", CPENC (2,1,C8,C5,4), F_REG_READ),
  4451. SR_CORE ("brbinf22_el1", CPENC (2,1,C8,C6,4), F_REG_READ),
  4452. SR_CORE ("brbinf23_el1", CPENC (2,1,C8,C7,4), F_REG_READ),
  4453. SR_CORE ("brbinf24_el1", CPENC (2,1,C8,C8,4), F_REG_READ),
  4454. SR_CORE ("brbinf25_el1", CPENC (2,1,C8,C9,4), F_REG_READ),
  4455. SR_CORE ("brbinf26_el1", CPENC (2,1,C8,C10,4), F_REG_READ),
  4456. SR_CORE ("brbinf27_el1", CPENC (2,1,C8,C11,4), F_REG_READ),
  4457. SR_CORE ("brbinf28_el1", CPENC (2,1,C8,C12,4), F_REG_READ),
  4458. SR_CORE ("brbinf29_el1", CPENC (2,1,C8,C13,4), F_REG_READ),
  4459. SR_CORE ("brbinf30_el1", CPENC (2,1,C8,C14,4), F_REG_READ),
  4460. SR_CORE ("brbinf31_el1", CPENC (2,1,C8,C15,4), F_REG_READ),
  4461. SR_CORE ("accdata_el1", CPENC (3,0,C13,C0,5), 0),
  4462. SR_CORE ("mfar_el3", CPENC (3,6,C6,C0,5), 0),
  4463. SR_CORE ("gpccr_el3", CPENC (3,6,C2,C1,6), 0),
  4464. SR_CORE ("gptbr_el3", CPENC (3,6,C2,C1,4), 0),
  4465. SR_SME ("svcr", CPENC (3,3,C4,C2,2), 0),
  4466. SR_SME ("id_aa64smfr0_el1", CPENC (3,0,C0,C4,5), F_REG_READ),
  4467. SR_SME ("smcr_el1", CPENC (3,0,C1,C2,6), 0),
  4468. SR_SME ("smcr_el12", CPENC (3,5,C1,C2,6), 0),
  4469. SR_SME ("smcr_el2", CPENC (3,4,C1,C2,6), 0),
  4470. SR_SME ("smcr_el3", CPENC (3,6,C1,C2,6), 0),
  4471. SR_SME ("smpri_el1", CPENC (3,0,C1,C2,4), 0),
  4472. SR_SME ("smprimap_el2", CPENC (3,4,C1,C2,5), 0),
  4473. SR_SME ("smidr_el1", CPENC (3,1,C0,C0,6), F_REG_READ),
  4474. SR_SME ("tpidr2_el0", CPENC (3,3,C13,C0,5), 0),
  4475. SR_SME ("mpamsm_el1", CPENC (3,0,C10,C5,3), 0),
  4476. SR_AMU ("amcr_el0", CPENC (3,3,C13,C2,0), 0),
  4477. SR_AMU ("amcfgr_el0", CPENC (3,3,C13,C2,1), F_REG_READ),
  4478. SR_AMU ("amcgcr_el0", CPENC (3,3,C13,C2,2), F_REG_READ),
  4479. SR_AMU ("amuserenr_el0", CPENC (3,3,C13,C2,3), 0),
  4480. SR_AMU ("amcntenclr0_el0", CPENC (3,3,C13,C2,4), 0),
  4481. SR_AMU ("amcntenset0_el0", CPENC (3,3,C13,C2,5), 0),
  4482. SR_AMU ("amcntenclr1_el0", CPENC (3,3,C13,C3,0), 0),
  4483. SR_AMU ("amcntenset1_el0", CPENC (3,3,C13,C3,1), 0),
  4484. SR_AMU ("amevcntr00_el0", CPENC (3,3,C13,C4,0), 0),
  4485. SR_AMU ("amevcntr01_el0", CPENC (3,3,C13,C4,1), 0),
  4486. SR_AMU ("amevcntr02_el0", CPENC (3,3,C13,C4,2), 0),
  4487. SR_AMU ("amevcntr03_el0", CPENC (3,3,C13,C4,3), 0),
  4488. SR_AMU ("amevtyper00_el0", CPENC (3,3,C13,C6,0), F_REG_READ),
  4489. SR_AMU ("amevtyper01_el0", CPENC (3,3,C13,C6,1), F_REG_READ),
  4490. SR_AMU ("amevtyper02_el0", CPENC (3,3,C13,C6,2), F_REG_READ),
  4491. SR_AMU ("amevtyper03_el0", CPENC (3,3,C13,C6,3), F_REG_READ),
  4492. SR_AMU ("amevcntr10_el0", CPENC (3,3,C13,C12,0), 0),
  4493. SR_AMU ("amevcntr11_el0", CPENC (3,3,C13,C12,1), 0),
  4494. SR_AMU ("amevcntr12_el0", CPENC (3,3,C13,C12,2), 0),
  4495. SR_AMU ("amevcntr13_el0", CPENC (3,3,C13,C12,3), 0),
  4496. SR_AMU ("amevcntr14_el0", CPENC (3,3,C13,C12,4), 0),
  4497. SR_AMU ("amevcntr15_el0", CPENC (3,3,C13,C12,5), 0),
  4498. SR_AMU ("amevcntr16_el0", CPENC (3,3,C13,C12,6), 0),
  4499. SR_AMU ("amevcntr17_el0", CPENC (3,3,C13,C12,7), 0),
  4500. SR_AMU ("amevcntr18_el0", CPENC (3,3,C13,C13,0), 0),
  4501. SR_AMU ("amevcntr19_el0", CPENC (3,3,C13,C13,1), 0),
  4502. SR_AMU ("amevcntr110_el0", CPENC (3,3,C13,C13,2), 0),
  4503. SR_AMU ("amevcntr111_el0", CPENC (3,3,C13,C13,3), 0),
  4504. SR_AMU ("amevcntr112_el0", CPENC (3,3,C13,C13,4), 0),
  4505. SR_AMU ("amevcntr113_el0", CPENC (3,3,C13,C13,5), 0),
  4506. SR_AMU ("amevcntr114_el0", CPENC (3,3,C13,C13,6), 0),
  4507. SR_AMU ("amevcntr115_el0", CPENC (3,3,C13,C13,7), 0),
  4508. SR_AMU ("amevtyper10_el0", CPENC (3,3,C13,C14,0), 0),
  4509. SR_AMU ("amevtyper11_el0", CPENC (3,3,C13,C14,1), 0),
  4510. SR_AMU ("amevtyper12_el0", CPENC (3,3,C13,C14,2), 0),
  4511. SR_AMU ("amevtyper13_el0", CPENC (3,3,C13,C14,3), 0),
  4512. SR_AMU ("amevtyper14_el0", CPENC (3,3,C13,C14,4), 0),
  4513. SR_AMU ("amevtyper15_el0", CPENC (3,3,C13,C14,5), 0),
  4514. SR_AMU ("amevtyper16_el0", CPENC (3,3,C13,C14,6), 0),
  4515. SR_AMU ("amevtyper17_el0", CPENC (3,3,C13,C14,7), 0),
  4516. SR_AMU ("amevtyper18_el0", CPENC (3,3,C13,C15,0), 0),
  4517. SR_AMU ("amevtyper19_el0", CPENC (3,3,C13,C15,1), 0),
  4518. SR_AMU ("amevtyper110_el0", CPENC (3,3,C13,C15,2), 0),
  4519. SR_AMU ("amevtyper111_el0", CPENC (3,3,C13,C15,3), 0),
  4520. SR_AMU ("amevtyper112_el0", CPENC (3,3,C13,C15,4), 0),
  4521. SR_AMU ("amevtyper113_el0", CPENC (3,3,C13,C15,5), 0),
  4522. SR_AMU ("amevtyper114_el0", CPENC (3,3,C13,C15,6), 0),
  4523. SR_AMU ("amevtyper115_el0", CPENC (3,3,C13,C15,7), 0),
  4524. SR_GIC ("icc_pmr_el1", CPENC (3,0,C4,C6,0), 0),
  4525. SR_GIC ("icc_iar0_el1", CPENC (3,0,C12,C8,0), F_REG_READ),
  4526. SR_GIC ("icc_eoir0_el1", CPENC (3,0,C12,C8,1), F_REG_WRITE),
  4527. SR_GIC ("icc_hppir0_el1", CPENC (3,0,C12,C8,2), F_REG_READ),
  4528. SR_GIC ("icc_bpr0_el1", CPENC (3,0,C12,C8,3), 0),
  4529. SR_GIC ("icc_ap0r0_el1", CPENC (3,0,C12,C8,4), 0),
  4530. SR_GIC ("icc_ap0r1_el1", CPENC (3,0,C12,C8,5), 0),
  4531. SR_GIC ("icc_ap0r2_el1", CPENC (3,0,C12,C8,6), 0),
  4532. SR_GIC ("icc_ap0r3_el1", CPENC (3,0,C12,C8,7), 0),
  4533. SR_GIC ("icc_ap1r0_el1", CPENC (3,0,C12,C9,0), 0),
  4534. SR_GIC ("icc_ap1r1_el1", CPENC (3,0,C12,C9,1), 0),
  4535. SR_GIC ("icc_ap1r2_el1", CPENC (3,0,C12,C9,2), 0),
  4536. SR_GIC ("icc_ap1r3_el1", CPENC (3,0,C12,C9,3), 0),
  4537. SR_GIC ("icc_dir_el1", CPENC (3,0,C12,C11,1), F_REG_WRITE),
  4538. SR_GIC ("icc_rpr_el1", CPENC (3,0,C12,C11,3), F_REG_READ),
  4539. SR_GIC ("icc_sgi1r_el1", CPENC (3,0,C12,C11,5), F_REG_WRITE),
  4540. SR_GIC ("icc_asgi1r_el1", CPENC (3,0,C12,C11,6), F_REG_WRITE),
  4541. SR_GIC ("icc_sgi0r_el1", CPENC (3,0,C12,C11,7), F_REG_WRITE),
  4542. SR_GIC ("icc_iar1_el1", CPENC (3,0,C12,C12,0), F_REG_READ),
  4543. SR_GIC ("icc_eoir1_el1", CPENC (3,0,C12,C12,1), F_REG_WRITE),
  4544. SR_GIC ("icc_hppir1_el1", CPENC (3,0,C12,C12,2), F_REG_READ),
  4545. SR_GIC ("icc_bpr1_el1", CPENC (3,0,C12,C12,3), 0),
  4546. SR_GIC ("icc_ctlr_el1", CPENC (3,0,C12,C12,4), 0),
  4547. SR_GIC ("icc_igrpen0_el1", CPENC (3,0,C12,C12,6), 0),
  4548. SR_GIC ("icc_igrpen1_el1", CPENC (3,0,C12,C12,7), 0),
  4549. SR_GIC ("ich_ap0r0_el2", CPENC (3,4,C12,C8,0), 0),
  4550. SR_GIC ("ich_ap0r1_el2", CPENC (3,4,C12,C8,1), 0),
  4551. SR_GIC ("ich_ap0r2_el2", CPENC (3,4,C12,C8,2), 0),
  4552. SR_GIC ("ich_ap0r3_el2", CPENC (3,4,C12,C8,3), 0),
  4553. SR_GIC ("ich_ap1r0_el2", CPENC (3,4,C12,C9,0), 0),
  4554. SR_GIC ("ich_ap1r1_el2", CPENC (3,4,C12,C9,1), 0),
  4555. SR_GIC ("ich_ap1r2_el2", CPENC (3,4,C12,C9,2), 0),
  4556. SR_GIC ("ich_ap1r3_el2", CPENC (3,4,C12,C9,3), 0),
  4557. SR_GIC ("ich_hcr_el2", CPENC (3,4,C12,C11,0), 0),
  4558. SR_GIC ("ich_misr_el2", CPENC (3,4,C12,C11,2), F_REG_READ),
  4559. SR_GIC ("ich_eisr_el2", CPENC (3,4,C12,C11,3), F_REG_READ),
  4560. SR_GIC ("ich_elrsr_el2", CPENC (3,4,C12,C11,5), F_REG_READ),
  4561. SR_GIC ("ich_vmcr_el2", CPENC (3,4,C12,C11,7), 0),
  4562. SR_GIC ("ich_lr0_el2", CPENC (3,4,C12,C12,0), 0),
  4563. SR_GIC ("ich_lr1_el2", CPENC (3,4,C12,C12,1), 0),
  4564. SR_GIC ("ich_lr2_el2", CPENC (3,4,C12,C12,2), 0),
  4565. SR_GIC ("ich_lr3_el2", CPENC (3,4,C12,C12,3), 0),
  4566. SR_GIC ("ich_lr4_el2", CPENC (3,4,C12,C12,4), 0),
  4567. SR_GIC ("ich_lr5_el2", CPENC (3,4,C12,C12,5), 0),
  4568. SR_GIC ("ich_lr6_el2", CPENC (3,4,C12,C12,6), 0),
  4569. SR_GIC ("ich_lr7_el2", CPENC (3,4,C12,C12,7), 0),
  4570. SR_GIC ("ich_lr8_el2", CPENC (3,4,C12,C13,0), 0),
  4571. SR_GIC ("ich_lr9_el2", CPENC (3,4,C12,C13,1), 0),
  4572. SR_GIC ("ich_lr10_el2", CPENC (3,4,C12,C13,2), 0),
  4573. SR_GIC ("ich_lr11_el2", CPENC (3,4,C12,C13,3), 0),
  4574. SR_GIC ("ich_lr12_el2", CPENC (3,4,C12,C13,4), 0),
  4575. SR_GIC ("ich_lr13_el2", CPENC (3,4,C12,C13,5), 0),
  4576. SR_GIC ("ich_lr14_el2", CPENC (3,4,C12,C13,6), 0),
  4577. SR_GIC ("ich_lr15_el2", CPENC (3,4,C12,C13,7), 0),
  4578. SR_GIC ("icc_igrpen1_el3", CPENC (3,6,C12,C12,7), 0),
  4579. SR_V8_6 ("amcg1idr_el0", CPENC (3,3,C13,C2,6), F_REG_READ),
  4580. SR_V8_6 ("cntpctss_el0", CPENC (3,3,C14,C0,5), F_REG_READ),
  4581. SR_V8_6 ("cntvctss_el0", CPENC (3,3,C14,C0,6), F_REG_READ),
  4582. SR_V8_6 ("hfgrtr_el2", CPENC (3,4,C1,C1,4), 0),
  4583. SR_V8_6 ("hfgwtr_el2", CPENC (3,4,C1,C1,5), 0),
  4584. SR_V8_6 ("hfgitr_el2", CPENC (3,4,C1,C1,6), 0),
  4585. SR_V8_6 ("hdfgrtr_el2", CPENC (3,4,C3,C1,4), 0),
  4586. SR_V8_6 ("hdfgwtr_el2", CPENC (3,4,C3,C1,5), 0),
  4587. SR_V8_6 ("hafgrtr_el2", CPENC (3,4,C3,C1,6), 0),
  4588. SR_V8_6 ("amevcntvoff00_el2", CPENC (3,4,C13,C8,0), 0),
  4589. SR_V8_6 ("amevcntvoff01_el2", CPENC (3,4,C13,C8,1), 0),
  4590. SR_V8_6 ("amevcntvoff02_el2", CPENC (3,4,C13,C8,2), 0),
  4591. SR_V8_6 ("amevcntvoff03_el2", CPENC (3,4,C13,C8,3), 0),
  4592. SR_V8_6 ("amevcntvoff04_el2", CPENC (3,4,C13,C8,4), 0),
  4593. SR_V8_6 ("amevcntvoff05_el2", CPENC (3,4,C13,C8,5), 0),
  4594. SR_V8_6 ("amevcntvoff06_el2", CPENC (3,4,C13,C8,6), 0),
  4595. SR_V8_6 ("amevcntvoff07_el2", CPENC (3,4,C13,C8,7), 0),
  4596. SR_V8_6 ("amevcntvoff08_el2", CPENC (3,4,C13,C9,0), 0),
  4597. SR_V8_6 ("amevcntvoff09_el2", CPENC (3,4,C13,C9,1), 0),
  4598. SR_V8_6 ("amevcntvoff010_el2", CPENC (3,4,C13,C9,2), 0),
  4599. SR_V8_6 ("amevcntvoff011_el2", CPENC (3,4,C13,C9,3), 0),
  4600. SR_V8_6 ("amevcntvoff012_el2", CPENC (3,4,C13,C9,4), 0),
  4601. SR_V8_6 ("amevcntvoff013_el2", CPENC (3,4,C13,C9,5), 0),
  4602. SR_V8_6 ("amevcntvoff014_el2", CPENC (3,4,C13,C9,6), 0),
  4603. SR_V8_6 ("amevcntvoff015_el2", CPENC (3,4,C13,C9,7), 0),
  4604. SR_V8_6 ("amevcntvoff10_el2", CPENC (3,4,C13,C10,0), 0),
  4605. SR_V8_6 ("amevcntvoff11_el2", CPENC (3,4,C13,C10,1), 0),
  4606. SR_V8_6 ("amevcntvoff12_el2", CPENC (3,4,C13,C10,2), 0),
  4607. SR_V8_6 ("amevcntvoff13_el2", CPENC (3,4,C13,C10,3), 0),
  4608. SR_V8_6 ("amevcntvoff14_el2", CPENC (3,4,C13,C10,4), 0),
  4609. SR_V8_6 ("amevcntvoff15_el2", CPENC (3,4,C13,C10,5), 0),
  4610. SR_V8_6 ("amevcntvoff16_el2", CPENC (3,4,C13,C10,6), 0),
  4611. SR_V8_6 ("amevcntvoff17_el2", CPENC (3,4,C13,C10,7), 0),
  4612. SR_V8_6 ("amevcntvoff18_el2", CPENC (3,4,C13,C11,0), 0),
  4613. SR_V8_6 ("amevcntvoff19_el2", CPENC (3,4,C13,C11,1), 0),
  4614. SR_V8_6 ("amevcntvoff110_el2", CPENC (3,4,C13,C11,2), 0),
  4615. SR_V8_6 ("amevcntvoff111_el2", CPENC (3,4,C13,C11,3), 0),
  4616. SR_V8_6 ("amevcntvoff112_el2", CPENC (3,4,C13,C11,4), 0),
  4617. SR_V8_6 ("amevcntvoff113_el2", CPENC (3,4,C13,C11,5), 0),
  4618. SR_V8_6 ("amevcntvoff114_el2", CPENC (3,4,C13,C11,6), 0),
  4619. SR_V8_6 ("amevcntvoff115_el2", CPENC (3,4,C13,C11,7), 0),
  4620. SR_V8_6 ("cntpoff_el2", CPENC (3,4,C14,C0,6), 0),
  4621. SR_V8_7 ("pmsnevfr_el1", CPENC (3,0,C9,C9,1), 0),
  4622. SR_V8_7 ("hcrx_el2", CPENC (3,4,C1,C2,2), 0),
  4623. SR_V8_8 ("allint", CPENC (3,0,C4,C3,0), 0),
  4624. SR_V8_8 ("icc_nmiar1_el1", CPENC (3,0,C12,C9,5), F_REG_READ),
  4625. { 0, CPENC (0,0,0,0,0), 0, 0 }
  4626. };
  4627. bool
  4628. aarch64_sys_reg_deprecated_p (const uint32_t reg_flags)
  4629. {
  4630. return (reg_flags & F_DEPRECATED) != 0;
  4631. }
  4632. /* The CPENC below is fairly misleading, the fields
  4633. here are not in CPENC form. They are in op2op1 form. The fields are encoded
  4634. by ins_pstatefield, which just shifts the value by the width of the fields
  4635. in a loop. So if you CPENC them only the first value will be set, the rest
  4636. are masked out to 0. As an example. op2 = 3, op1=2. CPENC would produce a
  4637. value of 0b110000000001000000 (0x30040) while what you want is
  4638. 0b011010 (0x1a). */
  4639. const aarch64_sys_reg aarch64_pstatefields [] =
  4640. {
  4641. SR_CORE ("spsel", 0x05, F_REG_MAX_VALUE (1)),
  4642. SR_CORE ("daifset", 0x1e, F_REG_MAX_VALUE (15)),
  4643. SR_CORE ("daifclr", 0x1f, F_REG_MAX_VALUE (15)),
  4644. SR_PAN ("pan", 0x04, F_REG_MAX_VALUE (1)),
  4645. SR_V8_2 ("uao", 0x03, F_REG_MAX_VALUE (1)),
  4646. SR_SSBS ("ssbs", 0x19, F_REG_MAX_VALUE (1)),
  4647. SR_V8_4 ("dit", 0x1a, F_REG_MAX_VALUE (1)),
  4648. SR_MEMTAG ("tco", 0x1c, F_REG_MAX_VALUE (1)),
  4649. SR_SME ("svcrsm", 0x1b, PSTATE_ENCODE_CRM_AND_IMM(0x2,0x1)
  4650. | F_REG_MAX_VALUE (1)),
  4651. SR_SME ("svcrza", 0x1b, PSTATE_ENCODE_CRM_AND_IMM(0x4,0x1)
  4652. | F_REG_MAX_VALUE (1)),
  4653. SR_SME ("svcrsmza", 0x1b, PSTATE_ENCODE_CRM_AND_IMM(0x6,0x1)
  4654. | F_REG_MAX_VALUE (1)),
  4655. SR_V8_8 ("allint", 0x08, F_REG_MAX_VALUE (1)),
  4656. { 0, CPENC (0,0,0,0,0), 0, 0 },
  4657. };
  4658. bool
  4659. aarch64_pstatefield_supported_p (const aarch64_feature_set features,
  4660. const aarch64_sys_reg *reg)
  4661. {
  4662. if (!(reg->flags & F_ARCHEXT))
  4663. return true;
  4664. return AARCH64_CPU_HAS_ALL_FEATURES (features, reg->features);
  4665. }
  4666. const aarch64_sys_ins_reg aarch64_sys_regs_ic[] =
  4667. {
  4668. { "ialluis", CPENS(0,C7,C1,0), 0 },
  4669. { "iallu", CPENS(0,C7,C5,0), 0 },
  4670. { "ivau", CPENS (3, C7, C5, 1), F_HASXT },
  4671. { 0, CPENS(0,0,0,0), 0 }
  4672. };
  4673. const aarch64_sys_ins_reg aarch64_sys_regs_dc[] =
  4674. {
  4675. { "zva", CPENS (3, C7, C4, 1), F_HASXT },
  4676. { "gva", CPENS (3, C7, C4, 3), F_HASXT | F_ARCHEXT },
  4677. { "gzva", CPENS (3, C7, C4, 4), F_HASXT | F_ARCHEXT },
  4678. { "ivac", CPENS (0, C7, C6, 1), F_HASXT },
  4679. { "igvac", CPENS (0, C7, C6, 3), F_HASXT | F_ARCHEXT },
  4680. { "igsw", CPENS (0, C7, C6, 4), F_HASXT | F_ARCHEXT },
  4681. { "isw", CPENS (0, C7, C6, 2), F_HASXT },
  4682. { "igdvac", CPENS (0, C7, C6, 5), F_HASXT | F_ARCHEXT },
  4683. { "igdsw", CPENS (0, C7, C6, 6), F_HASXT | F_ARCHEXT },
  4684. { "cvac", CPENS (3, C7, C10, 1), F_HASXT },
  4685. { "cgvac", CPENS (3, C7, C10, 3), F_HASXT | F_ARCHEXT },
  4686. { "cgdvac", CPENS (3, C7, C10, 5), F_HASXT | F_ARCHEXT },
  4687. { "csw", CPENS (0, C7, C10, 2), F_HASXT },
  4688. { "cgsw", CPENS (0, C7, C10, 4), F_HASXT | F_ARCHEXT },
  4689. { "cgdsw", CPENS (0, C7, C10, 6), F_HASXT | F_ARCHEXT },
  4690. { "cvau", CPENS (3, C7, C11, 1), F_HASXT },
  4691. { "cvap", CPENS (3, C7, C12, 1), F_HASXT | F_ARCHEXT },
  4692. { "cgvap", CPENS (3, C7, C12, 3), F_HASXT | F_ARCHEXT },
  4693. { "cgdvap", CPENS (3, C7, C12, 5), F_HASXT | F_ARCHEXT },
  4694. { "cvadp", CPENS (3, C7, C13, 1), F_HASXT | F_ARCHEXT },
  4695. { "cgvadp", CPENS (3, C7, C13, 3), F_HASXT | F_ARCHEXT },
  4696. { "cgdvadp", CPENS (3, C7, C13, 5), F_HASXT | F_ARCHEXT },
  4697. { "civac", CPENS (3, C7, C14, 1), F_HASXT },
  4698. { "cigvac", CPENS (3, C7, C14, 3), F_HASXT | F_ARCHEXT },
  4699. { "cigdvac", CPENS (3, C7, C14, 5), F_HASXT | F_ARCHEXT },
  4700. { "cisw", CPENS (0, C7, C14, 2), F_HASXT },
  4701. { "cigsw", CPENS (0, C7, C14, 4), F_HASXT | F_ARCHEXT },
  4702. { "cigdsw", CPENS (0, C7, C14, 6), F_HASXT | F_ARCHEXT },
  4703. { "cipapa", CPENS (6, C7, C14, 1), F_HASXT },
  4704. { "cigdpapa", CPENS (6, C7, C14, 5), F_HASXT },
  4705. { 0, CPENS(0,0,0,0), 0 }
  4706. };
  4707. const aarch64_sys_ins_reg aarch64_sys_regs_at[] =
  4708. {
  4709. { "s1e1r", CPENS (0, C7, C8, 0), F_HASXT },
  4710. { "s1e1w", CPENS (0, C7, C8, 1), F_HASXT },
  4711. { "s1e0r", CPENS (0, C7, C8, 2), F_HASXT },
  4712. { "s1e0w", CPENS (0, C7, C8, 3), F_HASXT },
  4713. { "s12e1r", CPENS (4, C7, C8, 4), F_HASXT },
  4714. { "s12e1w", CPENS (4, C7, C8, 5), F_HASXT },
  4715. { "s12e0r", CPENS (4, C7, C8, 6), F_HASXT },
  4716. { "s12e0w", CPENS (4, C7, C8, 7), F_HASXT },
  4717. { "s1e2r", CPENS (4, C7, C8, 0), F_HASXT },
  4718. { "s1e2w", CPENS (4, C7, C8, 1), F_HASXT },
  4719. { "s1e3r", CPENS (6, C7, C8, 0), F_HASXT },
  4720. { "s1e3w", CPENS (6, C7, C8, 1), F_HASXT },
  4721. { "s1e1rp", CPENS (0, C7, C9, 0), F_HASXT | F_ARCHEXT },
  4722. { "s1e1wp", CPENS (0, C7, C9, 1), F_HASXT | F_ARCHEXT },
  4723. { 0, CPENS(0,0,0,0), 0 }
  4724. };
  4725. const aarch64_sys_ins_reg aarch64_sys_regs_tlbi[] =
  4726. {
  4727. { "vmalle1", CPENS(0,C8,C7,0), 0 },
  4728. { "vae1", CPENS (0, C8, C7, 1), F_HASXT },
  4729. { "aside1", CPENS (0, C8, C7, 2), F_HASXT },
  4730. { "vaae1", CPENS (0, C8, C7, 3), F_HASXT },
  4731. { "vmalle1is", CPENS(0,C8,C3,0), 0 },
  4732. { "vae1is", CPENS (0, C8, C3, 1), F_HASXT },
  4733. { "aside1is", CPENS (0, C8, C3, 2), F_HASXT },
  4734. { "vaae1is", CPENS (0, C8, C3, 3), F_HASXT },
  4735. { "ipas2e1is", CPENS (4, C8, C0, 1), F_HASXT },
  4736. { "ipas2le1is",CPENS (4, C8, C0, 5), F_HASXT },
  4737. { "ipas2e1", CPENS (4, C8, C4, 1), F_HASXT },
  4738. { "ipas2le1", CPENS (4, C8, C4, 5), F_HASXT },
  4739. { "vae2", CPENS (4, C8, C7, 1), F_HASXT },
  4740. { "vae2is", CPENS (4, C8, C3, 1), F_HASXT },
  4741. { "vmalls12e1",CPENS(4,C8,C7,6), 0 },
  4742. { "vmalls12e1is",CPENS(4,C8,C3,6), 0 },
  4743. { "vae3", CPENS (6, C8, C7, 1), F_HASXT },
  4744. { "vae3is", CPENS (6, C8, C3, 1), F_HASXT },
  4745. { "alle2", CPENS(4,C8,C7,0), 0 },
  4746. { "alle2is", CPENS(4,C8,C3,0), 0 },
  4747. { "alle1", CPENS(4,C8,C7,4), 0 },
  4748. { "alle1is", CPENS(4,C8,C3,4), 0 },
  4749. { "alle3", CPENS(6,C8,C7,0), 0 },
  4750. { "alle3is", CPENS(6,C8,C3,0), 0 },
  4751. { "vale1is", CPENS (0, C8, C3, 5), F_HASXT },
  4752. { "vale2is", CPENS (4, C8, C3, 5), F_HASXT },
  4753. { "vale3is", CPENS (6, C8, C3, 5), F_HASXT },
  4754. { "vaale1is", CPENS (0, C8, C3, 7), F_HASXT },
  4755. { "vale1", CPENS (0, C8, C7, 5), F_HASXT },
  4756. { "vale2", CPENS (4, C8, C7, 5), F_HASXT },
  4757. { "vale3", CPENS (6, C8, C7, 5), F_HASXT },
  4758. { "vaale1", CPENS (0, C8, C7, 7), F_HASXT },
  4759. { "vmalle1os", CPENS (0, C8, C1, 0), F_ARCHEXT },
  4760. { "vae1os", CPENS (0, C8, C1, 1), F_HASXT | F_ARCHEXT },
  4761. { "aside1os", CPENS (0, C8, C1, 2), F_HASXT | F_ARCHEXT },
  4762. { "vaae1os", CPENS (0, C8, C1, 3), F_HASXT | F_ARCHEXT },
  4763. { "vale1os", CPENS (0, C8, C1, 5), F_HASXT | F_ARCHEXT },
  4764. { "vaale1os", CPENS (0, C8, C1, 7), F_HASXT | F_ARCHEXT },
  4765. { "ipas2e1os", CPENS (4, C8, C4, 0), F_HASXT | F_ARCHEXT },
  4766. { "ipas2le1os", CPENS (4, C8, C4, 4), F_HASXT | F_ARCHEXT },
  4767. { "vae2os", CPENS (4, C8, C1, 1), F_HASXT | F_ARCHEXT },
  4768. { "vale2os", CPENS (4, C8, C1, 5), F_HASXT | F_ARCHEXT },
  4769. { "vmalls12e1os", CPENS (4, C8, C1, 6), F_ARCHEXT },
  4770. { "vae3os", CPENS (6, C8, C1, 1), F_HASXT | F_ARCHEXT },
  4771. { "vale3os", CPENS (6, C8, C1, 5), F_HASXT | F_ARCHEXT },
  4772. { "alle2os", CPENS (4, C8, C1, 0), F_ARCHEXT },
  4773. { "alle1os", CPENS (4, C8, C1, 4), F_ARCHEXT },
  4774. { "alle3os", CPENS (6, C8, C1, 0), F_ARCHEXT },
  4775. { "rvae1", CPENS (0, C8, C6, 1), F_HASXT | F_ARCHEXT },
  4776. { "rvaae1", CPENS (0, C8, C6, 3), F_HASXT | F_ARCHEXT },
  4777. { "rvale1", CPENS (0, C8, C6, 5), F_HASXT | F_ARCHEXT },
  4778. { "rvaale1", CPENS (0, C8, C6, 7), F_HASXT | F_ARCHEXT },
  4779. { "rvae1is", CPENS (0, C8, C2, 1), F_HASXT | F_ARCHEXT },
  4780. { "rvaae1is", CPENS (0, C8, C2, 3), F_HASXT | F_ARCHEXT },
  4781. { "rvale1is", CPENS (0, C8, C2, 5), F_HASXT | F_ARCHEXT },
  4782. { "rvaale1is", CPENS (0, C8, C2, 7), F_HASXT | F_ARCHEXT },
  4783. { "rvae1os", CPENS (0, C8, C5, 1), F_HASXT | F_ARCHEXT },
  4784. { "rvaae1os", CPENS (0, C8, C5, 3), F_HASXT | F_ARCHEXT },
  4785. { "rvale1os", CPENS (0, C8, C5, 5), F_HASXT | F_ARCHEXT },
  4786. { "rvaale1os", CPENS (0, C8, C5, 7), F_HASXT | F_ARCHEXT },
  4787. { "ripas2e1is", CPENS (4, C8, C0, 2), F_HASXT | F_ARCHEXT },
  4788. { "ripas2le1is",CPENS (4, C8, C0, 6), F_HASXT | F_ARCHEXT },
  4789. { "ripas2e1", CPENS (4, C8, C4, 2), F_HASXT | F_ARCHEXT },
  4790. { "ripas2le1", CPENS (4, C8, C4, 6), F_HASXT | F_ARCHEXT },
  4791. { "ripas2e1os", CPENS (4, C8, C4, 3), F_HASXT | F_ARCHEXT },
  4792. { "ripas2le1os",CPENS (4, C8, C4, 7), F_HASXT | F_ARCHEXT },
  4793. { "rvae2", CPENS (4, C8, C6, 1), F_HASXT | F_ARCHEXT },
  4794. { "rvale2", CPENS (4, C8, C6, 5), F_HASXT | F_ARCHEXT },
  4795. { "rvae2is", CPENS (4, C8, C2, 1), F_HASXT | F_ARCHEXT },
  4796. { "rvale2is", CPENS (4, C8, C2, 5), F_HASXT | F_ARCHEXT },
  4797. { "rvae2os", CPENS (4, C8, C5, 1), F_HASXT | F_ARCHEXT },
  4798. { "rvale2os", CPENS (4, C8, C5, 5), F_HASXT | F_ARCHEXT },
  4799. { "rvae3", CPENS (6, C8, C6, 1), F_HASXT | F_ARCHEXT },
  4800. { "rvale3", CPENS (6, C8, C6, 5), F_HASXT | F_ARCHEXT },
  4801. { "rvae3is", CPENS (6, C8, C2, 1), F_HASXT | F_ARCHEXT },
  4802. { "rvale3is", CPENS (6, C8, C2, 5), F_HASXT | F_ARCHEXT },
  4803. { "rvae3os", CPENS (6, C8, C5, 1), F_HASXT | F_ARCHEXT },
  4804. { "rvale3os", CPENS (6, C8, C5, 5), F_HASXT | F_ARCHEXT },
  4805. { "rpaos", CPENS (6, C8, C4, 3), F_HASXT },
  4806. { "rpalos", CPENS (6, C8, C4, 7), F_HASXT },
  4807. { "paallos", CPENS (6, C8, C1, 4), 0},
  4808. { "paall", CPENS (6, C8, C7, 4), 0},
  4809. { 0, CPENS(0,0,0,0), 0 }
  4810. };
  4811. const aarch64_sys_ins_reg aarch64_sys_regs_sr[] =
  4812. {
  4813. /* RCTX is somewhat unique in a way that it has different values
  4814. (op2) based on the instruction in which it is used (cfp/dvp/cpp).
  4815. Thus op2 is masked out and instead encoded directly in the
  4816. aarch64_opcode_table entries for the respective instructions. */
  4817. { "rctx", CPENS(3,C7,C3,0), F_HASXT | F_ARCHEXT | F_REG_WRITE}, /* WO */
  4818. { 0, CPENS(0,0,0,0), 0 }
  4819. };
  4820. bool
  4821. aarch64_sys_ins_reg_has_xt (const aarch64_sys_ins_reg *sys_ins_reg)
  4822. {
  4823. return (sys_ins_reg->flags & F_HASXT) != 0;
  4824. }
  4825. extern bool
  4826. aarch64_sys_ins_reg_supported_p (const aarch64_feature_set features,
  4827. const char *reg_name,
  4828. aarch64_insn reg_value,
  4829. uint32_t reg_flags,
  4830. aarch64_feature_set reg_features)
  4831. {
  4832. /* Armv8-R has no EL3. */
  4833. if (AARCH64_CPU_HAS_FEATURE (features, AARCH64_FEATURE_V8_R))
  4834. {
  4835. const char *suffix = strrchr (reg_name, '_');
  4836. if (suffix && !strcmp (suffix, "_el3"))
  4837. return false;
  4838. }
  4839. if (!(reg_flags & F_ARCHEXT))
  4840. return true;
  4841. if (reg_features
  4842. && AARCH64_CPU_HAS_ALL_FEATURES (features, reg_features))
  4843. return true;
  4844. /* ARMv8.4 TLB instructions. */
  4845. if ((reg_value == CPENS (0, C8, C1, 0)
  4846. || reg_value == CPENS (0, C8, C1, 1)
  4847. || reg_value == CPENS (0, C8, C1, 2)
  4848. || reg_value == CPENS (0, C8, C1, 3)
  4849. || reg_value == CPENS (0, C8, C1, 5)
  4850. || reg_value == CPENS (0, C8, C1, 7)
  4851. || reg_value == CPENS (4, C8, C4, 0)
  4852. || reg_value == CPENS (4, C8, C4, 4)
  4853. || reg_value == CPENS (4, C8, C1, 1)
  4854. || reg_value == CPENS (4, C8, C1, 5)
  4855. || reg_value == CPENS (4, C8, C1, 6)
  4856. || reg_value == CPENS (6, C8, C1, 1)
  4857. || reg_value == CPENS (6, C8, C1, 5)
  4858. || reg_value == CPENS (4, C8, C1, 0)
  4859. || reg_value == CPENS (4, C8, C1, 4)
  4860. || reg_value == CPENS (6, C8, C1, 0)
  4861. || reg_value == CPENS (0, C8, C6, 1)
  4862. || reg_value == CPENS (0, C8, C6, 3)
  4863. || reg_value == CPENS (0, C8, C6, 5)
  4864. || reg_value == CPENS (0, C8, C6, 7)
  4865. || reg_value == CPENS (0, C8, C2, 1)
  4866. || reg_value == CPENS (0, C8, C2, 3)
  4867. || reg_value == CPENS (0, C8, C2, 5)
  4868. || reg_value == CPENS (0, C8, C2, 7)
  4869. || reg_value == CPENS (0, C8, C5, 1)
  4870. || reg_value == CPENS (0, C8, C5, 3)
  4871. || reg_value == CPENS (0, C8, C5, 5)
  4872. || reg_value == CPENS (0, C8, C5, 7)
  4873. || reg_value == CPENS (4, C8, C0, 2)
  4874. || reg_value == CPENS (4, C8, C0, 6)
  4875. || reg_value == CPENS (4, C8, C4, 2)
  4876. || reg_value == CPENS (4, C8, C4, 6)
  4877. || reg_value == CPENS (4, C8, C4, 3)
  4878. || reg_value == CPENS (4, C8, C4, 7)
  4879. || reg_value == CPENS (4, C8, C6, 1)
  4880. || reg_value == CPENS (4, C8, C6, 5)
  4881. || reg_value == CPENS (4, C8, C2, 1)
  4882. || reg_value == CPENS (4, C8, C2, 5)
  4883. || reg_value == CPENS (4, C8, C5, 1)
  4884. || reg_value == CPENS (4, C8, C5, 5)
  4885. || reg_value == CPENS (6, C8, C6, 1)
  4886. || reg_value == CPENS (6, C8, C6, 5)
  4887. || reg_value == CPENS (6, C8, C2, 1)
  4888. || reg_value == CPENS (6, C8, C2, 5)
  4889. || reg_value == CPENS (6, C8, C5, 1)
  4890. || reg_value == CPENS (6, C8, C5, 5))
  4891. && AARCH64_CPU_HAS_FEATURE (features, AARCH64_FEATURE_V8_4))
  4892. return true;
  4893. /* DC CVAP. Values are from aarch64_sys_regs_dc. */
  4894. if (reg_value == CPENS (3, C7, C12, 1)
  4895. && AARCH64_CPU_HAS_FEATURE (features, AARCH64_FEATURE_V8_2))
  4896. return true;
  4897. /* DC CVADP. Values are from aarch64_sys_regs_dc. */
  4898. if (reg_value == CPENS (3, C7, C13, 1)
  4899. && AARCH64_CPU_HAS_FEATURE (features, AARCH64_FEATURE_CVADP))
  4900. return true;
  4901. /* DC <dc_op> for ARMv8.5-A Memory Tagging Extension. */
  4902. if ((reg_value == CPENS (0, C7, C6, 3)
  4903. || reg_value == CPENS (0, C7, C6, 4)
  4904. || reg_value == CPENS (0, C7, C10, 4)
  4905. || reg_value == CPENS (0, C7, C14, 4)
  4906. || reg_value == CPENS (3, C7, C10, 3)
  4907. || reg_value == CPENS (3, C7, C12, 3)
  4908. || reg_value == CPENS (3, C7, C13, 3)
  4909. || reg_value == CPENS (3, C7, C14, 3)
  4910. || reg_value == CPENS (3, C7, C4, 3)
  4911. || reg_value == CPENS (0, C7, C6, 5)
  4912. || reg_value == CPENS (0, C7, C6, 6)
  4913. || reg_value == CPENS (0, C7, C10, 6)
  4914. || reg_value == CPENS (0, C7, C14, 6)
  4915. || reg_value == CPENS (3, C7, C10, 5)
  4916. || reg_value == CPENS (3, C7, C12, 5)
  4917. || reg_value == CPENS (3, C7, C13, 5)
  4918. || reg_value == CPENS (3, C7, C14, 5)
  4919. || reg_value == CPENS (3, C7, C4, 4))
  4920. && AARCH64_CPU_HAS_FEATURE (features, AARCH64_FEATURE_MEMTAG))
  4921. return true;
  4922. /* AT S1E1RP, AT S1E1WP. Values are from aarch64_sys_regs_at. */
  4923. if ((reg_value == CPENS (0, C7, C9, 0)
  4924. || reg_value == CPENS (0, C7, C9, 1))
  4925. && AARCH64_CPU_HAS_FEATURE (features, AARCH64_FEATURE_V8_2))
  4926. return true;
  4927. /* CFP/DVP/CPP RCTX : Value are from aarch64_sys_regs_sr. */
  4928. if (reg_value == CPENS (3, C7, C3, 0)
  4929. && AARCH64_CPU_HAS_FEATURE (features, AARCH64_FEATURE_PREDRES))
  4930. return true;
  4931. return false;
  4932. }
  4933. #undef C0
  4934. #undef C1
  4935. #undef C2
  4936. #undef C3
  4937. #undef C4
  4938. #undef C5
  4939. #undef C6
  4940. #undef C7
  4941. #undef C8
  4942. #undef C9
  4943. #undef C10
  4944. #undef C11
  4945. #undef C12
  4946. #undef C13
  4947. #undef C14
  4948. #undef C15
  4949. #define BIT(INSN,BT) (((INSN) >> (BT)) & 1)
  4950. #define BITS(INSN,HI,LO) (((INSN) >> (LO)) & ((1 << (((HI) - (LO)) + 1)) - 1))
  4951. static enum err_type
  4952. verify_ldpsw (const struct aarch64_inst *inst ATTRIBUTE_UNUSED,
  4953. const aarch64_insn insn, bfd_vma pc ATTRIBUTE_UNUSED,
  4954. bool encoding ATTRIBUTE_UNUSED,
  4955. aarch64_operand_error *mismatch_detail ATTRIBUTE_UNUSED,
  4956. aarch64_instr_sequence *insn_sequence ATTRIBUTE_UNUSED)
  4957. {
  4958. int t = BITS (insn, 4, 0);
  4959. int n = BITS (insn, 9, 5);
  4960. int t2 = BITS (insn, 14, 10);
  4961. if (BIT (insn, 23))
  4962. {
  4963. /* Write back enabled. */
  4964. if ((t == n || t2 == n) && n != 31)
  4965. return ERR_UND;
  4966. }
  4967. if (BIT (insn, 22))
  4968. {
  4969. /* Load */
  4970. if (t == t2)
  4971. return ERR_UND;
  4972. }
  4973. return ERR_OK;
  4974. }
  4975. /* Verifier for vector by element 3 operands functions where the
  4976. conditions `if sz:L == 11 then UNDEFINED` holds. */
  4977. static enum err_type
  4978. verify_elem_sd (const struct aarch64_inst *inst, const aarch64_insn insn,
  4979. bfd_vma pc ATTRIBUTE_UNUSED, bool encoding,
  4980. aarch64_operand_error *mismatch_detail ATTRIBUTE_UNUSED,
  4981. aarch64_instr_sequence *insn_sequence ATTRIBUTE_UNUSED)
  4982. {
  4983. const aarch64_insn undef_pattern = 0x3;
  4984. aarch64_insn value;
  4985. assert (inst->opcode);
  4986. assert (inst->opcode->operands[2] == AARCH64_OPND_Em);
  4987. value = encoding ? inst->value : insn;
  4988. assert (value);
  4989. if (undef_pattern == extract_fields (value, 0, 2, FLD_sz, FLD_L))
  4990. return ERR_UND;
  4991. return ERR_OK;
  4992. }
  4993. /* Check an instruction that takes three register operands and that
  4994. requires the register numbers to be distinct from one another. */
  4995. static enum err_type
  4996. verify_three_different_regs (const struct aarch64_inst *inst,
  4997. const aarch64_insn insn ATTRIBUTE_UNUSED,
  4998. bfd_vma pc ATTRIBUTE_UNUSED,
  4999. bool encoding ATTRIBUTE_UNUSED,
  5000. aarch64_operand_error *mismatch_detail
  5001. ATTRIBUTE_UNUSED,
  5002. aarch64_instr_sequence *insn_sequence
  5003. ATTRIBUTE_UNUSED)
  5004. {
  5005. int rd, rs, rn;
  5006. rd = inst->operands[0].reg.regno;
  5007. rs = inst->operands[1].reg.regno;
  5008. rn = inst->operands[2].reg.regno;
  5009. if (rd == rs || rd == rn || rs == rn)
  5010. {
  5011. mismatch_detail->kind = AARCH64_OPDE_SYNTAX_ERROR;
  5012. mismatch_detail->error
  5013. = _("the three register operands must be distinct from one another");
  5014. mismatch_detail->index = -1;
  5015. return ERR_UND;
  5016. }
  5017. return ERR_OK;
  5018. }
  5019. /* Add INST to the end of INSN_SEQUENCE. */
  5020. static void
  5021. add_insn_to_sequence (const struct aarch64_inst *inst,
  5022. aarch64_instr_sequence *insn_sequence)
  5023. {
  5024. insn_sequence->instr[insn_sequence->num_added_insns++] = *inst;
  5025. }
  5026. /* Initialize an instruction sequence insn_sequence with the instruction INST.
  5027. If INST is NULL the given insn_sequence is cleared and the sequence is left
  5028. uninitialized. */
  5029. void
  5030. init_insn_sequence (const struct aarch64_inst *inst,
  5031. aarch64_instr_sequence *insn_sequence)
  5032. {
  5033. int num_req_entries = 0;
  5034. if (insn_sequence->instr)
  5035. {
  5036. XDELETE (insn_sequence->instr);
  5037. insn_sequence->instr = NULL;
  5038. }
  5039. /* Handle all the cases here. May need to think of something smarter than
  5040. a giant if/else chain if this grows. At that time, a lookup table may be
  5041. best. */
  5042. if (inst && inst->opcode->constraints & C_SCAN_MOVPRFX)
  5043. num_req_entries = 1;
  5044. if (inst && (inst->opcode->constraints & C_SCAN_MOPS_PME) == C_SCAN_MOPS_P)
  5045. num_req_entries = 2;
  5046. insn_sequence->num_added_insns = 0;
  5047. insn_sequence->num_allocated_insns = num_req_entries;
  5048. if (num_req_entries != 0)
  5049. {
  5050. insn_sequence->instr = XCNEWVEC (aarch64_inst, num_req_entries);
  5051. add_insn_to_sequence (inst, insn_sequence);
  5052. }
  5053. }
  5054. /* Subroutine of verify_constraints. Check whether the instruction
  5055. is part of a MOPS P/M/E sequence and, if so, whether sequencing
  5056. expectations are met. Return true if the check passes, otherwise
  5057. describe the problem in MISMATCH_DETAIL.
  5058. IS_NEW_SECTION is true if INST is assumed to start a new section.
  5059. The other arguments are as for verify_constraints. */
  5060. static bool
  5061. verify_mops_pme_sequence (const struct aarch64_inst *inst,
  5062. bool is_new_section,
  5063. aarch64_operand_error *mismatch_detail,
  5064. aarch64_instr_sequence *insn_sequence)
  5065. {
  5066. const struct aarch64_opcode *opcode;
  5067. const struct aarch64_inst *prev_insn;
  5068. int i;
  5069. opcode = inst->opcode;
  5070. if (insn_sequence->instr)
  5071. prev_insn = insn_sequence->instr + (insn_sequence->num_added_insns - 1);
  5072. else
  5073. prev_insn = NULL;
  5074. if (prev_insn
  5075. && (prev_insn->opcode->constraints & C_SCAN_MOPS_PME)
  5076. && prev_insn->opcode != opcode - 1)
  5077. {
  5078. mismatch_detail->kind = AARCH64_OPDE_EXPECTED_A_AFTER_B;
  5079. mismatch_detail->error = NULL;
  5080. mismatch_detail->index = -1;
  5081. mismatch_detail->data[0].s = prev_insn->opcode[1].name;
  5082. mismatch_detail->data[1].s = prev_insn->opcode->name;
  5083. mismatch_detail->non_fatal = true;
  5084. return false;
  5085. }
  5086. if (opcode->constraints & C_SCAN_MOPS_PME)
  5087. {
  5088. if (is_new_section || !prev_insn || prev_insn->opcode != opcode - 1)
  5089. {
  5090. mismatch_detail->kind = AARCH64_OPDE_A_SHOULD_FOLLOW_B;
  5091. mismatch_detail->error = NULL;
  5092. mismatch_detail->index = -1;
  5093. mismatch_detail->data[0].s = opcode->name;
  5094. mismatch_detail->data[1].s = opcode[-1].name;
  5095. mismatch_detail->non_fatal = true;
  5096. return false;
  5097. }
  5098. for (i = 0; i < 3; ++i)
  5099. /* There's no specific requirement for the data register to be
  5100. the same between consecutive SET* instructions. */
  5101. if ((opcode->operands[i] == AARCH64_OPND_MOPS_ADDR_Rd
  5102. || opcode->operands[i] == AARCH64_OPND_MOPS_ADDR_Rs
  5103. || opcode->operands[i] == AARCH64_OPND_MOPS_WB_Rn)
  5104. && prev_insn->operands[i].reg.regno != inst->operands[i].reg.regno)
  5105. {
  5106. mismatch_detail->kind = AARCH64_OPDE_SYNTAX_ERROR;
  5107. if (opcode->operands[i] == AARCH64_OPND_MOPS_ADDR_Rd)
  5108. mismatch_detail->error = _("destination register differs from "
  5109. "preceding instruction");
  5110. else if (opcode->operands[i] == AARCH64_OPND_MOPS_ADDR_Rs)
  5111. mismatch_detail->error = _("source register differs from "
  5112. "preceding instruction");
  5113. else
  5114. mismatch_detail->error = _("size register differs from "
  5115. "preceding instruction");
  5116. mismatch_detail->index = i;
  5117. mismatch_detail->non_fatal = true;
  5118. return false;
  5119. }
  5120. }
  5121. return true;
  5122. }
  5123. /* This function verifies that the instruction INST adheres to its specified
  5124. constraints. If it does then ERR_OK is returned, if not then ERR_VFI is
  5125. returned and MISMATCH_DETAIL contains the reason why verification failed.
  5126. The function is called both during assembly and disassembly. If assembling
  5127. then ENCODING will be TRUE, else FALSE. If dissassembling PC will be set
  5128. and will contain the PC of the current instruction w.r.t to the section.
  5129. If ENCODING and PC=0 then you are at a start of a section. The constraints
  5130. are verified against the given state insn_sequence which is updated as it
  5131. transitions through the verification. */
  5132. enum err_type
  5133. verify_constraints (const struct aarch64_inst *inst,
  5134. const aarch64_insn insn ATTRIBUTE_UNUSED,
  5135. bfd_vma pc,
  5136. bool encoding,
  5137. aarch64_operand_error *mismatch_detail,
  5138. aarch64_instr_sequence *insn_sequence)
  5139. {
  5140. assert (inst);
  5141. assert (inst->opcode);
  5142. const struct aarch64_opcode *opcode = inst->opcode;
  5143. if (!opcode->constraints && !insn_sequence->instr)
  5144. return ERR_OK;
  5145. assert (insn_sequence);
  5146. enum err_type res = ERR_OK;
  5147. /* This instruction puts a constraint on the insn_sequence. */
  5148. if (opcode->flags & F_SCAN)
  5149. {
  5150. if (insn_sequence->instr)
  5151. {
  5152. mismatch_detail->kind = AARCH64_OPDE_SYNTAX_ERROR;
  5153. mismatch_detail->error = _("instruction opens new dependency "
  5154. "sequence without ending previous one");
  5155. mismatch_detail->index = -1;
  5156. mismatch_detail->non_fatal = true;
  5157. res = ERR_VFI;
  5158. }
  5159. init_insn_sequence (inst, insn_sequence);
  5160. return res;
  5161. }
  5162. bool is_new_section = (!encoding && pc == 0);
  5163. if (!verify_mops_pme_sequence (inst, is_new_section, mismatch_detail,
  5164. insn_sequence))
  5165. {
  5166. res = ERR_VFI;
  5167. if ((opcode->constraints & C_SCAN_MOPS_PME) != C_SCAN_MOPS_M)
  5168. init_insn_sequence (NULL, insn_sequence);
  5169. }
  5170. /* Verify constraints on an existing sequence. */
  5171. if (insn_sequence->instr)
  5172. {
  5173. const struct aarch64_opcode* inst_opcode = insn_sequence->instr->opcode;
  5174. /* If we're decoding and we hit PC=0 with an open sequence then we haven't
  5175. closed a previous one that we should have. */
  5176. if (is_new_section && res == ERR_OK)
  5177. {
  5178. mismatch_detail->kind = AARCH64_OPDE_SYNTAX_ERROR;
  5179. mismatch_detail->error = _("previous `movprfx' sequence not closed");
  5180. mismatch_detail->index = -1;
  5181. mismatch_detail->non_fatal = true;
  5182. res = ERR_VFI;
  5183. /* Reset the sequence. */
  5184. init_insn_sequence (NULL, insn_sequence);
  5185. return res;
  5186. }
  5187. /* Validate C_SCAN_MOVPRFX constraints. Move this to a lookup table. */
  5188. if (inst_opcode->constraints & C_SCAN_MOVPRFX)
  5189. {
  5190. /* Check to see if the MOVPRFX SVE instruction is followed by an SVE
  5191. instruction for better error messages. */
  5192. if (!opcode->avariant
  5193. || !(*opcode->avariant &
  5194. (AARCH64_FEATURE_SVE | AARCH64_FEATURE_SVE2)))
  5195. {
  5196. mismatch_detail->kind = AARCH64_OPDE_SYNTAX_ERROR;
  5197. mismatch_detail->error = _("SVE instruction expected after "
  5198. "`movprfx'");
  5199. mismatch_detail->index = -1;
  5200. mismatch_detail->non_fatal = true;
  5201. res = ERR_VFI;
  5202. goto done;
  5203. }
  5204. /* Check to see if the MOVPRFX SVE instruction is followed by an SVE
  5205. instruction that is allowed to be used with a MOVPRFX. */
  5206. if (!(opcode->constraints & C_SCAN_MOVPRFX))
  5207. {
  5208. mismatch_detail->kind = AARCH64_OPDE_SYNTAX_ERROR;
  5209. mismatch_detail->error = _("SVE `movprfx' compatible instruction "
  5210. "expected");
  5211. mismatch_detail->index = -1;
  5212. mismatch_detail->non_fatal = true;
  5213. res = ERR_VFI;
  5214. goto done;
  5215. }
  5216. /* Next check for usage of the predicate register. */
  5217. aarch64_opnd_info blk_dest = insn_sequence->instr->operands[0];
  5218. aarch64_opnd_info blk_pred, inst_pred;
  5219. memset (&blk_pred, 0, sizeof (aarch64_opnd_info));
  5220. memset (&inst_pred, 0, sizeof (aarch64_opnd_info));
  5221. bool predicated = false;
  5222. assert (blk_dest.type == AARCH64_OPND_SVE_Zd);
  5223. /* Determine if the movprfx instruction used is predicated or not. */
  5224. if (insn_sequence->instr->operands[1].type == AARCH64_OPND_SVE_Pg3)
  5225. {
  5226. predicated = true;
  5227. blk_pred = insn_sequence->instr->operands[1];
  5228. }
  5229. unsigned char max_elem_size = 0;
  5230. unsigned char current_elem_size;
  5231. int num_op_used = 0, last_op_usage = 0;
  5232. int i, inst_pred_idx = -1;
  5233. int num_ops = aarch64_num_of_operands (opcode);
  5234. for (i = 0; i < num_ops; i++)
  5235. {
  5236. aarch64_opnd_info inst_op = inst->operands[i];
  5237. switch (inst_op.type)
  5238. {
  5239. case AARCH64_OPND_SVE_Zd:
  5240. case AARCH64_OPND_SVE_Zm_5:
  5241. case AARCH64_OPND_SVE_Zm_16:
  5242. case AARCH64_OPND_SVE_Zn:
  5243. case AARCH64_OPND_SVE_Zt:
  5244. case AARCH64_OPND_SVE_Vm:
  5245. case AARCH64_OPND_SVE_Vn:
  5246. case AARCH64_OPND_Va:
  5247. case AARCH64_OPND_Vn:
  5248. case AARCH64_OPND_Vm:
  5249. case AARCH64_OPND_Sn:
  5250. case AARCH64_OPND_Sm:
  5251. if (inst_op.reg.regno == blk_dest.reg.regno)
  5252. {
  5253. num_op_used++;
  5254. last_op_usage = i;
  5255. }
  5256. current_elem_size
  5257. = aarch64_get_qualifier_esize (inst_op.qualifier);
  5258. if (current_elem_size > max_elem_size)
  5259. max_elem_size = current_elem_size;
  5260. break;
  5261. case AARCH64_OPND_SVE_Pd:
  5262. case AARCH64_OPND_SVE_Pg3:
  5263. case AARCH64_OPND_SVE_Pg4_5:
  5264. case AARCH64_OPND_SVE_Pg4_10:
  5265. case AARCH64_OPND_SVE_Pg4_16:
  5266. case AARCH64_OPND_SVE_Pm:
  5267. case AARCH64_OPND_SVE_Pn:
  5268. case AARCH64_OPND_SVE_Pt:
  5269. case AARCH64_OPND_SME_Pm:
  5270. inst_pred = inst_op;
  5271. inst_pred_idx = i;
  5272. break;
  5273. default:
  5274. break;
  5275. }
  5276. }
  5277. assert (max_elem_size != 0);
  5278. aarch64_opnd_info inst_dest = inst->operands[0];
  5279. /* Determine the size that should be used to compare against the
  5280. movprfx size. */
  5281. current_elem_size
  5282. = opcode->constraints & C_MAX_ELEM
  5283. ? max_elem_size
  5284. : aarch64_get_qualifier_esize (inst_dest.qualifier);
  5285. /* If movprfx is predicated do some extra checks. */
  5286. if (predicated)
  5287. {
  5288. /* The instruction must be predicated. */
  5289. if (inst_pred_idx < 0)
  5290. {
  5291. mismatch_detail->kind = AARCH64_OPDE_SYNTAX_ERROR;
  5292. mismatch_detail->error = _("predicated instruction expected "
  5293. "after `movprfx'");
  5294. mismatch_detail->index = -1;
  5295. mismatch_detail->non_fatal = true;
  5296. res = ERR_VFI;
  5297. goto done;
  5298. }
  5299. /* The instruction must have a merging predicate. */
  5300. if (inst_pred.qualifier != AARCH64_OPND_QLF_P_M)
  5301. {
  5302. mismatch_detail->kind = AARCH64_OPDE_SYNTAX_ERROR;
  5303. mismatch_detail->error = _("merging predicate expected due "
  5304. "to preceding `movprfx'");
  5305. mismatch_detail->index = inst_pred_idx;
  5306. mismatch_detail->non_fatal = true;
  5307. res = ERR_VFI;
  5308. goto done;
  5309. }
  5310. /* The same register must be used in instruction. */
  5311. if (blk_pred.reg.regno != inst_pred.reg.regno)
  5312. {
  5313. mismatch_detail->kind = AARCH64_OPDE_SYNTAX_ERROR;
  5314. mismatch_detail->error = _("predicate register differs "
  5315. "from that in preceding "
  5316. "`movprfx'");
  5317. mismatch_detail->index = inst_pred_idx;
  5318. mismatch_detail->non_fatal = true;
  5319. res = ERR_VFI;
  5320. goto done;
  5321. }
  5322. }
  5323. /* Destructive operations by definition must allow one usage of the
  5324. same register. */
  5325. int allowed_usage
  5326. = aarch64_is_destructive_by_operands (opcode) ? 2 : 1;
  5327. /* Operand is not used at all. */
  5328. if (num_op_used == 0)
  5329. {
  5330. mismatch_detail->kind = AARCH64_OPDE_SYNTAX_ERROR;
  5331. mismatch_detail->error = _("output register of preceding "
  5332. "`movprfx' not used in current "
  5333. "instruction");
  5334. mismatch_detail->index = 0;
  5335. mismatch_detail->non_fatal = true;
  5336. res = ERR_VFI;
  5337. goto done;
  5338. }
  5339. /* We now know it's used, now determine exactly where it's used. */
  5340. if (blk_dest.reg.regno != inst_dest.reg.regno)
  5341. {
  5342. mismatch_detail->kind = AARCH64_OPDE_SYNTAX_ERROR;
  5343. mismatch_detail->error = _("output register of preceding "
  5344. "`movprfx' expected as output");
  5345. mismatch_detail->index = 0;
  5346. mismatch_detail->non_fatal = true;
  5347. res = ERR_VFI;
  5348. goto done;
  5349. }
  5350. /* Operand used more than allowed for the specific opcode type. */
  5351. if (num_op_used > allowed_usage)
  5352. {
  5353. mismatch_detail->kind = AARCH64_OPDE_SYNTAX_ERROR;
  5354. mismatch_detail->error = _("output register of preceding "
  5355. "`movprfx' used as input");
  5356. mismatch_detail->index = last_op_usage;
  5357. mismatch_detail->non_fatal = true;
  5358. res = ERR_VFI;
  5359. goto done;
  5360. }
  5361. /* Now the only thing left is the qualifiers checks. The register
  5362. must have the same maximum element size. */
  5363. if (inst_dest.qualifier
  5364. && blk_dest.qualifier
  5365. && current_elem_size
  5366. != aarch64_get_qualifier_esize (blk_dest.qualifier))
  5367. {
  5368. mismatch_detail->kind = AARCH64_OPDE_SYNTAX_ERROR;
  5369. mismatch_detail->error = _("register size not compatible with "
  5370. "previous `movprfx'");
  5371. mismatch_detail->index = 0;
  5372. mismatch_detail->non_fatal = true;
  5373. res = ERR_VFI;
  5374. goto done;
  5375. }
  5376. }
  5377. done:
  5378. if (insn_sequence->num_added_insns == insn_sequence->num_allocated_insns)
  5379. /* We've checked the last instruction in the sequence and so
  5380. don't need the sequence any more. */
  5381. init_insn_sequence (NULL, insn_sequence);
  5382. else
  5383. add_insn_to_sequence (inst, insn_sequence);
  5384. }
  5385. return res;
  5386. }
  5387. /* Return true if VALUE cannot be moved into an SVE register using DUP
  5388. (with any element size, not just ESIZE) and if using DUPM would
  5389. therefore be OK. ESIZE is the number of bytes in the immediate. */
  5390. bool
  5391. aarch64_sve_dupm_mov_immediate_p (uint64_t uvalue, int esize)
  5392. {
  5393. int64_t svalue = uvalue;
  5394. uint64_t upper = (uint64_t) -1 << (esize * 4) << (esize * 4);
  5395. if ((uvalue & ~upper) != uvalue && (uvalue | upper) != uvalue)
  5396. return false;
  5397. if (esize <= 4 || (uint32_t) uvalue == (uint32_t) (uvalue >> 32))
  5398. {
  5399. svalue = (int32_t) uvalue;
  5400. if (esize <= 2 || (uint16_t) uvalue == (uint16_t) (uvalue >> 16))
  5401. {
  5402. svalue = (int16_t) uvalue;
  5403. if (esize == 1 || (uint8_t) uvalue == (uint8_t) (uvalue >> 8))
  5404. return false;
  5405. }
  5406. }
  5407. if ((svalue & 0xff) == 0)
  5408. svalue /= 256;
  5409. return svalue < -128 || svalue >= 128;
  5410. }
  5411. /* Include the opcode description table as well as the operand description
  5412. table. */
  5413. #define VERIFIER(x) verify_##x
  5414. #include "aarch64-tbl.h"