valops.c 123 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138
  1. /* Perform non-arithmetic operations on values, for GDB.
  2. Copyright (C) 1986-2022 Free Software Foundation, Inc.
  3. This file is part of GDB.
  4. This program is free software; you can redistribute it and/or modify
  5. it under the terms of the GNU General Public License as published by
  6. the Free Software Foundation; either version 3 of the License, or
  7. (at your option) any later version.
  8. This program is distributed in the hope that it will be useful,
  9. but WITHOUT ANY WARRANTY; without even the implied warranty of
  10. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  11. GNU General Public License for more details.
  12. You should have received a copy of the GNU General Public License
  13. along with this program. If not, see <http://www.gnu.org/licenses/>. */
  14. #include "defs.h"
  15. #include "symtab.h"
  16. #include "gdbtypes.h"
  17. #include "value.h"
  18. #include "frame.h"
  19. #include "inferior.h"
  20. #include "gdbcore.h"
  21. #include "target.h"
  22. #include "demangle.h"
  23. #include "language.h"
  24. #include "gdbcmd.h"
  25. #include "regcache.h"
  26. #include "cp-abi.h"
  27. #include "block.h"
  28. #include "infcall.h"
  29. #include "dictionary.h"
  30. #include "cp-support.h"
  31. #include "target-float.h"
  32. #include "tracepoint.h"
  33. #include "observable.h"
  34. #include "objfiles.h"
  35. #include "extension.h"
  36. #include "gdbtypes.h"
  37. #include "gdbsupport/byte-vector.h"
  38. /* Local functions. */
  39. static int typecmp (bool staticp, bool varargs, int nargs,
  40. struct field t1[], const gdb::array_view<value *> t2);
  41. static struct value *search_struct_field (const char *, struct value *,
  42. struct type *, int);
  43. static struct value *search_struct_method (const char *, struct value **,
  44. gdb::optional<gdb::array_view<value *>>,
  45. LONGEST, int *, struct type *);
  46. static int find_oload_champ_namespace (gdb::array_view<value *> args,
  47. const char *, const char *,
  48. std::vector<symbol *> *oload_syms,
  49. badness_vector *,
  50. const int no_adl);
  51. static int find_oload_champ_namespace_loop (gdb::array_view<value *> args,
  52. const char *, const char *,
  53. int, std::vector<symbol *> *oload_syms,
  54. badness_vector *, int *,
  55. const int no_adl);
  56. static int find_oload_champ (gdb::array_view<value *> args,
  57. size_t num_fns,
  58. fn_field *methods,
  59. xmethod_worker_up *xmethods,
  60. symbol **functions,
  61. badness_vector *oload_champ_bv);
  62. static int oload_method_static_p (struct fn_field *, int);
  63. enum oload_classification { STANDARD, NON_STANDARD, INCOMPATIBLE };
  64. static enum oload_classification classify_oload_match
  65. (const badness_vector &, int, int);
  66. static struct value *value_struct_elt_for_reference (struct type *,
  67. int, struct type *,
  68. const char *,
  69. struct type *,
  70. int, enum noside);
  71. static struct value *value_namespace_elt (const struct type *,
  72. const char *, int , enum noside);
  73. static struct value *value_maybe_namespace_elt (const struct type *,
  74. const char *, int,
  75. enum noside);
  76. static CORE_ADDR allocate_space_in_inferior (int);
  77. static struct value *cast_into_complex (struct type *, struct value *);
  78. bool overload_resolution = false;
  79. static void
  80. show_overload_resolution (struct ui_file *file, int from_tty,
  81. struct cmd_list_element *c,
  82. const char *value)
  83. {
  84. gdb_printf (file, _("Overload resolution in evaluating "
  85. "C++ functions is %s.\n"),
  86. value);
  87. }
  88. /* Find the address of function name NAME in the inferior. If OBJF_P
  89. is non-NULL, *OBJF_P will be set to the OBJFILE where the function
  90. is defined. */
  91. struct value *
  92. find_function_in_inferior (const char *name, struct objfile **objf_p)
  93. {
  94. struct block_symbol sym;
  95. sym = lookup_symbol (name, 0, VAR_DOMAIN, 0);
  96. if (sym.symbol != NULL)
  97. {
  98. if (sym.symbol->aclass () != LOC_BLOCK)
  99. {
  100. error (_("\"%s\" exists in this program but is not a function."),
  101. name);
  102. }
  103. if (objf_p)
  104. *objf_p = symbol_objfile (sym.symbol);
  105. return value_of_variable (sym.symbol, sym.block);
  106. }
  107. else
  108. {
  109. struct bound_minimal_symbol msymbol =
  110. lookup_bound_minimal_symbol (name);
  111. if (msymbol.minsym != NULL)
  112. {
  113. struct objfile *objfile = msymbol.objfile;
  114. struct gdbarch *gdbarch = objfile->arch ();
  115. struct type *type;
  116. CORE_ADDR maddr;
  117. type = lookup_pointer_type (builtin_type (gdbarch)->builtin_char);
  118. type = lookup_function_type (type);
  119. type = lookup_pointer_type (type);
  120. maddr = BMSYMBOL_VALUE_ADDRESS (msymbol);
  121. if (objf_p)
  122. *objf_p = objfile;
  123. return value_from_pointer (type, maddr);
  124. }
  125. else
  126. {
  127. if (!target_has_execution ())
  128. error (_("evaluation of this expression "
  129. "requires the target program to be active"));
  130. else
  131. error (_("evaluation of this expression requires the "
  132. "program to have a function \"%s\"."),
  133. name);
  134. }
  135. }
  136. }
  137. /* Allocate NBYTES of space in the inferior using the inferior's
  138. malloc and return a value that is a pointer to the allocated
  139. space. */
  140. struct value *
  141. value_allocate_space_in_inferior (int len)
  142. {
  143. struct objfile *objf;
  144. struct value *val = find_function_in_inferior ("malloc", &objf);
  145. struct gdbarch *gdbarch = objf->arch ();
  146. struct value *blocklen;
  147. blocklen = value_from_longest (builtin_type (gdbarch)->builtin_int, len);
  148. val = call_function_by_hand (val, NULL, blocklen);
  149. if (value_logical_not (val))
  150. {
  151. if (!target_has_execution ())
  152. error (_("No memory available to program now: "
  153. "you need to start the target first"));
  154. else
  155. error (_("No memory available to program: call to malloc failed"));
  156. }
  157. return val;
  158. }
  159. static CORE_ADDR
  160. allocate_space_in_inferior (int len)
  161. {
  162. return value_as_long (value_allocate_space_in_inferior (len));
  163. }
  164. /* Cast struct value VAL to type TYPE and return as a value.
  165. Both type and val must be of TYPE_CODE_STRUCT or TYPE_CODE_UNION
  166. for this to work. Typedef to one of the codes is permitted.
  167. Returns NULL if the cast is neither an upcast nor a downcast. */
  168. static struct value *
  169. value_cast_structs (struct type *type, struct value *v2)
  170. {
  171. struct type *t1;
  172. struct type *t2;
  173. struct value *v;
  174. gdb_assert (type != NULL && v2 != NULL);
  175. t1 = check_typedef (type);
  176. t2 = check_typedef (value_type (v2));
  177. /* Check preconditions. */
  178. gdb_assert ((t1->code () == TYPE_CODE_STRUCT
  179. || t1->code () == TYPE_CODE_UNION)
  180. && !!"Precondition is that type is of STRUCT or UNION kind.");
  181. gdb_assert ((t2->code () == TYPE_CODE_STRUCT
  182. || t2->code () == TYPE_CODE_UNION)
  183. && !!"Precondition is that value is of STRUCT or UNION kind");
  184. if (t1->name () != NULL
  185. && t2->name () != NULL
  186. && !strcmp (t1->name (), t2->name ()))
  187. return NULL;
  188. /* Upcasting: look in the type of the source to see if it contains the
  189. type of the target as a superclass. If so, we'll need to
  190. offset the pointer rather than just change its type. */
  191. if (t1->name () != NULL)
  192. {
  193. v = search_struct_field (t1->name (),
  194. v2, t2, 1);
  195. if (v)
  196. return v;
  197. }
  198. /* Downcasting: look in the type of the target to see if it contains the
  199. type of the source as a superclass. If so, we'll need to
  200. offset the pointer rather than just change its type. */
  201. if (t2->name () != NULL)
  202. {
  203. /* Try downcasting using the run-time type of the value. */
  204. int full, using_enc;
  205. LONGEST top;
  206. struct type *real_type;
  207. real_type = value_rtti_type (v2, &full, &top, &using_enc);
  208. if (real_type)
  209. {
  210. v = value_full_object (v2, real_type, full, top, using_enc);
  211. v = value_at_lazy (real_type, value_address (v));
  212. real_type = value_type (v);
  213. /* We might be trying to cast to the outermost enclosing
  214. type, in which case search_struct_field won't work. */
  215. if (real_type->name () != NULL
  216. && !strcmp (real_type->name (), t1->name ()))
  217. return v;
  218. v = search_struct_field (t2->name (), v, real_type, 1);
  219. if (v)
  220. return v;
  221. }
  222. /* Try downcasting using information from the destination type
  223. T2. This wouldn't work properly for classes with virtual
  224. bases, but those were handled above. */
  225. v = search_struct_field (t2->name (),
  226. value_zero (t1, not_lval), t1, 1);
  227. if (v)
  228. {
  229. /* Downcasting is possible (t1 is superclass of v2). */
  230. CORE_ADDR addr2 = value_address (v2);
  231. addr2 -= value_address (v) + value_embedded_offset (v);
  232. return value_at (type, addr2);
  233. }
  234. }
  235. return NULL;
  236. }
  237. /* Cast one pointer or reference type to another. Both TYPE and
  238. the type of ARG2 should be pointer types, or else both should be
  239. reference types. If SUBCLASS_CHECK is non-zero, this will force a
  240. check to see whether TYPE is a superclass of ARG2's type. If
  241. SUBCLASS_CHECK is zero, then the subclass check is done only when
  242. ARG2 is itself non-zero. Returns the new pointer or reference. */
  243. struct value *
  244. value_cast_pointers (struct type *type, struct value *arg2,
  245. int subclass_check)
  246. {
  247. struct type *type1 = check_typedef (type);
  248. struct type *type2 = check_typedef (value_type (arg2));
  249. struct type *t1 = check_typedef (TYPE_TARGET_TYPE (type1));
  250. struct type *t2 = check_typedef (TYPE_TARGET_TYPE (type2));
  251. if (t1->code () == TYPE_CODE_STRUCT
  252. && t2->code () == TYPE_CODE_STRUCT
  253. && (subclass_check || !value_logical_not (arg2)))
  254. {
  255. struct value *v2;
  256. if (TYPE_IS_REFERENCE (type2))
  257. v2 = coerce_ref (arg2);
  258. else
  259. v2 = value_ind (arg2);
  260. gdb_assert (check_typedef (value_type (v2))->code ()
  261. == TYPE_CODE_STRUCT && !!"Why did coercion fail?");
  262. v2 = value_cast_structs (t1, v2);
  263. /* At this point we have what we can have, un-dereference if needed. */
  264. if (v2)
  265. {
  266. struct value *v = value_addr (v2);
  267. deprecated_set_value_type (v, type);
  268. return v;
  269. }
  270. }
  271. /* No superclass found, just change the pointer type. */
  272. arg2 = value_copy (arg2);
  273. deprecated_set_value_type (arg2, type);
  274. set_value_enclosing_type (arg2, type);
  275. set_value_pointed_to_offset (arg2, 0); /* pai: chk_val */
  276. return arg2;
  277. }
  278. /* See value.h. */
  279. gdb_mpq
  280. value_to_gdb_mpq (struct value *value)
  281. {
  282. struct type *type = check_typedef (value_type (value));
  283. gdb_mpq result;
  284. if (is_floating_type (type))
  285. {
  286. double d = target_float_to_host_double (value_contents (value).data (),
  287. type);
  288. mpq_set_d (result.val, d);
  289. }
  290. else
  291. {
  292. gdb_assert (is_integral_type (type)
  293. || is_fixed_point_type (type));
  294. gdb_mpz vz;
  295. vz.read (value_contents (value), type_byte_order (type),
  296. type->is_unsigned ());
  297. mpq_set_z (result.val, vz.val);
  298. if (is_fixed_point_type (type))
  299. mpq_mul (result.val, result.val,
  300. type->fixed_point_scaling_factor ().val);
  301. }
  302. return result;
  303. }
  304. /* Assuming that TO_TYPE is a fixed point type, return a value
  305. corresponding to the cast of FROM_VAL to that type. */
  306. static struct value *
  307. value_cast_to_fixed_point (struct type *to_type, struct value *from_val)
  308. {
  309. struct type *from_type = value_type (from_val);
  310. if (from_type == to_type)
  311. return from_val;
  312. if (!is_floating_type (from_type)
  313. && !is_integral_type (from_type)
  314. && !is_fixed_point_type (from_type))
  315. error (_("Invalid conversion from type %s to fixed point type %s"),
  316. from_type->name (), to_type->name ());
  317. gdb_mpq vq = value_to_gdb_mpq (from_val);
  318. /* Divide that value by the scaling factor to obtain the unscaled
  319. value, first in rational form, and then in integer form. */
  320. mpq_div (vq.val, vq.val, to_type->fixed_point_scaling_factor ().val);
  321. gdb_mpz unscaled = vq.get_rounded ();
  322. /* Finally, create the result value, and pack the unscaled value
  323. in it. */
  324. struct value *result = allocate_value (to_type);
  325. unscaled.write (value_contents_raw (result),
  326. type_byte_order (to_type),
  327. to_type->is_unsigned ());
  328. return result;
  329. }
  330. /* Cast value ARG2 to type TYPE and return as a value.
  331. More general than a C cast: accepts any two types of the same length,
  332. and if ARG2 is an lvalue it can be cast into anything at all. */
  333. /* In C++, casts may change pointer or object representations. */
  334. struct value *
  335. value_cast (struct type *type, struct value *arg2)
  336. {
  337. enum type_code code1;
  338. enum type_code code2;
  339. int scalar;
  340. struct type *type2;
  341. int convert_to_boolean = 0;
  342. /* TYPE might be equal in meaning to the existing type of ARG2, but for
  343. many reasons, might be a different type object (e.g. TYPE might be a
  344. gdbarch owned type, while VALUE_TYPE (ARG2) could be an objfile owned
  345. type).
  346. In this case we want to preserve the LVAL of ARG2 as this allows the
  347. resulting value to be used in more places. We do this by calling
  348. VALUE_COPY if appropriate. */
  349. if (types_deeply_equal (value_type (arg2), type))
  350. {
  351. /* If the types are exactly equal then we can avoid creating a new
  352. value completely. */
  353. if (value_type (arg2) != type)
  354. {
  355. arg2 = value_copy (arg2);
  356. deprecated_set_value_type (arg2, type);
  357. }
  358. return arg2;
  359. }
  360. if (is_fixed_point_type (type))
  361. return value_cast_to_fixed_point (type, arg2);
  362. /* Check if we are casting struct reference to struct reference. */
  363. if (TYPE_IS_REFERENCE (check_typedef (type)))
  364. {
  365. /* We dereference type; then we recurse and finally
  366. we generate value of the given reference. Nothing wrong with
  367. that. */
  368. struct type *t1 = check_typedef (type);
  369. struct type *dereftype = check_typedef (TYPE_TARGET_TYPE (t1));
  370. struct value *val = value_cast (dereftype, arg2);
  371. return value_ref (val, t1->code ());
  372. }
  373. if (TYPE_IS_REFERENCE (check_typedef (value_type (arg2))))
  374. /* We deref the value and then do the cast. */
  375. return value_cast (type, coerce_ref (arg2));
  376. /* Strip typedefs / resolve stubs in order to get at the type's
  377. code/length, but remember the original type, to use as the
  378. resulting type of the cast, in case it was a typedef. */
  379. struct type *to_type = type;
  380. type = check_typedef (type);
  381. code1 = type->code ();
  382. arg2 = coerce_ref (arg2);
  383. type2 = check_typedef (value_type (arg2));
  384. /* You can't cast to a reference type. See value_cast_pointers
  385. instead. */
  386. gdb_assert (!TYPE_IS_REFERENCE (type));
  387. /* A cast to an undetermined-length array_type, such as
  388. (TYPE [])OBJECT, is treated like a cast to (TYPE [N])OBJECT,
  389. where N is sizeof(OBJECT)/sizeof(TYPE). */
  390. if (code1 == TYPE_CODE_ARRAY)
  391. {
  392. struct type *element_type = TYPE_TARGET_TYPE (type);
  393. unsigned element_length = TYPE_LENGTH (check_typedef (element_type));
  394. if (element_length > 0 && type->bounds ()->high.kind () == PROP_UNDEFINED)
  395. {
  396. struct type *range_type = type->index_type ();
  397. int val_length = TYPE_LENGTH (type2);
  398. LONGEST low_bound, high_bound, new_length;
  399. if (!get_discrete_bounds (range_type, &low_bound, &high_bound))
  400. low_bound = 0, high_bound = 0;
  401. new_length = val_length / element_length;
  402. if (val_length % element_length != 0)
  403. warning (_("array element type size does not "
  404. "divide object size in cast"));
  405. /* FIXME-type-allocation: need a way to free this type when
  406. we are done with it. */
  407. range_type = create_static_range_type (NULL,
  408. TYPE_TARGET_TYPE (range_type),
  409. low_bound,
  410. new_length + low_bound - 1);
  411. deprecated_set_value_type (arg2,
  412. create_array_type (NULL,
  413. element_type,
  414. range_type));
  415. return arg2;
  416. }
  417. }
  418. if (current_language->c_style_arrays_p ()
  419. && type2->code () == TYPE_CODE_ARRAY
  420. && !type2->is_vector ())
  421. arg2 = value_coerce_array (arg2);
  422. if (type2->code () == TYPE_CODE_FUNC)
  423. arg2 = value_coerce_function (arg2);
  424. type2 = check_typedef (value_type (arg2));
  425. code2 = type2->code ();
  426. if (code1 == TYPE_CODE_COMPLEX)
  427. return cast_into_complex (to_type, arg2);
  428. if (code1 == TYPE_CODE_BOOL)
  429. {
  430. code1 = TYPE_CODE_INT;
  431. convert_to_boolean = 1;
  432. }
  433. if (code1 == TYPE_CODE_CHAR)
  434. code1 = TYPE_CODE_INT;
  435. if (code2 == TYPE_CODE_BOOL || code2 == TYPE_CODE_CHAR)
  436. code2 = TYPE_CODE_INT;
  437. scalar = (code2 == TYPE_CODE_INT || code2 == TYPE_CODE_FLT
  438. || code2 == TYPE_CODE_DECFLOAT || code2 == TYPE_CODE_ENUM
  439. || code2 == TYPE_CODE_RANGE
  440. || is_fixed_point_type (type2));
  441. if ((code1 == TYPE_CODE_STRUCT || code1 == TYPE_CODE_UNION)
  442. && (code2 == TYPE_CODE_STRUCT || code2 == TYPE_CODE_UNION)
  443. && type->name () != 0)
  444. {
  445. struct value *v = value_cast_structs (to_type, arg2);
  446. if (v)
  447. return v;
  448. }
  449. if (is_floating_type (type) && scalar)
  450. {
  451. if (is_floating_value (arg2))
  452. {
  453. struct value *v = allocate_value (to_type);
  454. target_float_convert (value_contents (arg2).data (), type2,
  455. value_contents_raw (v).data (), type);
  456. return v;
  457. }
  458. else if (is_fixed_point_type (type2))
  459. {
  460. gdb_mpq fp_val;
  461. fp_val.read_fixed_point (value_contents (arg2),
  462. type_byte_order (type2),
  463. type2->is_unsigned (),
  464. type2->fixed_point_scaling_factor ());
  465. struct value *v = allocate_value (to_type);
  466. target_float_from_host_double (value_contents_raw (v).data (),
  467. to_type, mpq_get_d (fp_val.val));
  468. return v;
  469. }
  470. /* The only option left is an integral type. */
  471. if (type2->is_unsigned ())
  472. return value_from_ulongest (to_type, value_as_long (arg2));
  473. else
  474. return value_from_longest (to_type, value_as_long (arg2));
  475. }
  476. else if ((code1 == TYPE_CODE_INT || code1 == TYPE_CODE_ENUM
  477. || code1 == TYPE_CODE_RANGE)
  478. && (scalar || code2 == TYPE_CODE_PTR
  479. || code2 == TYPE_CODE_MEMBERPTR))
  480. {
  481. LONGEST longest;
  482. /* When we cast pointers to integers, we mustn't use
  483. gdbarch_pointer_to_address to find the address the pointer
  484. represents, as value_as_long would. GDB should evaluate
  485. expressions just as the compiler would --- and the compiler
  486. sees a cast as a simple reinterpretation of the pointer's
  487. bits. */
  488. if (code2 == TYPE_CODE_PTR)
  489. longest = extract_unsigned_integer
  490. (value_contents (arg2), type_byte_order (type2));
  491. else
  492. longest = value_as_long (arg2);
  493. return value_from_longest (to_type, convert_to_boolean ?
  494. (LONGEST) (longest ? 1 : 0) : longest);
  495. }
  496. else if (code1 == TYPE_CODE_PTR && (code2 == TYPE_CODE_INT
  497. || code2 == TYPE_CODE_ENUM
  498. || code2 == TYPE_CODE_RANGE))
  499. {
  500. /* TYPE_LENGTH (type) is the length of a pointer, but we really
  501. want the length of an address! -- we are really dealing with
  502. addresses (i.e., gdb representations) not pointers (i.e.,
  503. target representations) here.
  504. This allows things like "print *(int *)0x01000234" to work
  505. without printing a misleading message -- which would
  506. otherwise occur when dealing with a target having two byte
  507. pointers and four byte addresses. */
  508. int addr_bit = gdbarch_addr_bit (type2->arch ());
  509. LONGEST longest = value_as_long (arg2);
  510. if (addr_bit < sizeof (LONGEST) * HOST_CHAR_BIT)
  511. {
  512. if (longest >= ((LONGEST) 1 << addr_bit)
  513. || longest <= -((LONGEST) 1 << addr_bit))
  514. warning (_("value truncated"));
  515. }
  516. return value_from_longest (to_type, longest);
  517. }
  518. else if (code1 == TYPE_CODE_METHODPTR && code2 == TYPE_CODE_INT
  519. && value_as_long (arg2) == 0)
  520. {
  521. struct value *result = allocate_value (to_type);
  522. cplus_make_method_ptr (to_type,
  523. value_contents_writeable (result).data (), 0, 0);
  524. return result;
  525. }
  526. else if (code1 == TYPE_CODE_MEMBERPTR && code2 == TYPE_CODE_INT
  527. && value_as_long (arg2) == 0)
  528. {
  529. /* The Itanium C++ ABI represents NULL pointers to members as
  530. minus one, instead of biasing the normal case. */
  531. return value_from_longest (to_type, -1);
  532. }
  533. else if (code1 == TYPE_CODE_ARRAY && type->is_vector ()
  534. && code2 == TYPE_CODE_ARRAY && type2->is_vector ()
  535. && TYPE_LENGTH (type) != TYPE_LENGTH (type2))
  536. error (_("Cannot convert between vector values of different sizes"));
  537. else if (code1 == TYPE_CODE_ARRAY && type->is_vector () && scalar
  538. && TYPE_LENGTH (type) != TYPE_LENGTH (type2))
  539. error (_("can only cast scalar to vector of same size"));
  540. else if (code1 == TYPE_CODE_VOID)
  541. {
  542. return value_zero (to_type, not_lval);
  543. }
  544. else if (TYPE_LENGTH (type) == TYPE_LENGTH (type2))
  545. {
  546. if (code1 == TYPE_CODE_PTR && code2 == TYPE_CODE_PTR)
  547. return value_cast_pointers (to_type, arg2, 0);
  548. arg2 = value_copy (arg2);
  549. deprecated_set_value_type (arg2, to_type);
  550. set_value_enclosing_type (arg2, to_type);
  551. set_value_pointed_to_offset (arg2, 0); /* pai: chk_val */
  552. return arg2;
  553. }
  554. else if (VALUE_LVAL (arg2) == lval_memory)
  555. return value_at_lazy (to_type, value_address (arg2));
  556. else
  557. {
  558. if (current_language->la_language == language_ada)
  559. error (_("Invalid type conversion."));
  560. error (_("Invalid cast."));
  561. }
  562. }
  563. /* The C++ reinterpret_cast operator. */
  564. struct value *
  565. value_reinterpret_cast (struct type *type, struct value *arg)
  566. {
  567. struct value *result;
  568. struct type *real_type = check_typedef (type);
  569. struct type *arg_type, *dest_type;
  570. int is_ref = 0;
  571. enum type_code dest_code, arg_code;
  572. /* Do reference, function, and array conversion. */
  573. arg = coerce_array (arg);
  574. /* Attempt to preserve the type the user asked for. */
  575. dest_type = type;
  576. /* If we are casting to a reference type, transform
  577. reinterpret_cast<T&[&]>(V) to *reinterpret_cast<T*>(&V). */
  578. if (TYPE_IS_REFERENCE (real_type))
  579. {
  580. is_ref = 1;
  581. arg = value_addr (arg);
  582. dest_type = lookup_pointer_type (TYPE_TARGET_TYPE (dest_type));
  583. real_type = lookup_pointer_type (real_type);
  584. }
  585. arg_type = value_type (arg);
  586. dest_code = real_type->code ();
  587. arg_code = arg_type->code ();
  588. /* We can convert pointer types, or any pointer type to int, or int
  589. type to pointer. */
  590. if ((dest_code == TYPE_CODE_PTR && arg_code == TYPE_CODE_INT)
  591. || (dest_code == TYPE_CODE_INT && arg_code == TYPE_CODE_PTR)
  592. || (dest_code == TYPE_CODE_METHODPTR && arg_code == TYPE_CODE_INT)
  593. || (dest_code == TYPE_CODE_INT && arg_code == TYPE_CODE_METHODPTR)
  594. || (dest_code == TYPE_CODE_MEMBERPTR && arg_code == TYPE_CODE_INT)
  595. || (dest_code == TYPE_CODE_INT && arg_code == TYPE_CODE_MEMBERPTR)
  596. || (dest_code == arg_code
  597. && (dest_code == TYPE_CODE_PTR
  598. || dest_code == TYPE_CODE_METHODPTR
  599. || dest_code == TYPE_CODE_MEMBERPTR)))
  600. result = value_cast (dest_type, arg);
  601. else
  602. error (_("Invalid reinterpret_cast"));
  603. if (is_ref)
  604. result = value_cast (type, value_ref (value_ind (result),
  605. type->code ()));
  606. return result;
  607. }
  608. /* A helper for value_dynamic_cast. This implements the first of two
  609. runtime checks: we iterate over all the base classes of the value's
  610. class which are equal to the desired class; if only one of these
  611. holds the value, then it is the answer. */
  612. static int
  613. dynamic_cast_check_1 (struct type *desired_type,
  614. const gdb_byte *valaddr,
  615. LONGEST embedded_offset,
  616. CORE_ADDR address,
  617. struct value *val,
  618. struct type *search_type,
  619. CORE_ADDR arg_addr,
  620. struct type *arg_type,
  621. struct value **result)
  622. {
  623. int i, result_count = 0;
  624. for (i = 0; i < TYPE_N_BASECLASSES (search_type) && result_count < 2; ++i)
  625. {
  626. LONGEST offset = baseclass_offset (search_type, i, valaddr,
  627. embedded_offset,
  628. address, val);
  629. if (class_types_same_p (desired_type, TYPE_BASECLASS (search_type, i)))
  630. {
  631. if (address + embedded_offset + offset >= arg_addr
  632. && address + embedded_offset + offset < arg_addr + TYPE_LENGTH (arg_type))
  633. {
  634. ++result_count;
  635. if (!*result)
  636. *result = value_at_lazy (TYPE_BASECLASS (search_type, i),
  637. address + embedded_offset + offset);
  638. }
  639. }
  640. else
  641. result_count += dynamic_cast_check_1 (desired_type,
  642. valaddr,
  643. embedded_offset + offset,
  644. address, val,
  645. TYPE_BASECLASS (search_type, i),
  646. arg_addr,
  647. arg_type,
  648. result);
  649. }
  650. return result_count;
  651. }
  652. /* A helper for value_dynamic_cast. This implements the second of two
  653. runtime checks: we look for a unique public sibling class of the
  654. argument's declared class. */
  655. static int
  656. dynamic_cast_check_2 (struct type *desired_type,
  657. const gdb_byte *valaddr,
  658. LONGEST embedded_offset,
  659. CORE_ADDR address,
  660. struct value *val,
  661. struct type *search_type,
  662. struct value **result)
  663. {
  664. int i, result_count = 0;
  665. for (i = 0; i < TYPE_N_BASECLASSES (search_type) && result_count < 2; ++i)
  666. {
  667. LONGEST offset;
  668. if (! BASETYPE_VIA_PUBLIC (search_type, i))
  669. continue;
  670. offset = baseclass_offset (search_type, i, valaddr, embedded_offset,
  671. address, val);
  672. if (class_types_same_p (desired_type, TYPE_BASECLASS (search_type, i)))
  673. {
  674. ++result_count;
  675. if (*result == NULL)
  676. *result = value_at_lazy (TYPE_BASECLASS (search_type, i),
  677. address + embedded_offset + offset);
  678. }
  679. else
  680. result_count += dynamic_cast_check_2 (desired_type,
  681. valaddr,
  682. embedded_offset + offset,
  683. address, val,
  684. TYPE_BASECLASS (search_type, i),
  685. result);
  686. }
  687. return result_count;
  688. }
  689. /* The C++ dynamic_cast operator. */
  690. struct value *
  691. value_dynamic_cast (struct type *type, struct value *arg)
  692. {
  693. int full, using_enc;
  694. LONGEST top;
  695. struct type *resolved_type = check_typedef (type);
  696. struct type *arg_type = check_typedef (value_type (arg));
  697. struct type *class_type, *rtti_type;
  698. struct value *result, *tem, *original_arg = arg;
  699. CORE_ADDR addr;
  700. int is_ref = TYPE_IS_REFERENCE (resolved_type);
  701. if (resolved_type->code () != TYPE_CODE_PTR
  702. && !TYPE_IS_REFERENCE (resolved_type))
  703. error (_("Argument to dynamic_cast must be a pointer or reference type"));
  704. if (TYPE_TARGET_TYPE (resolved_type)->code () != TYPE_CODE_VOID
  705. && TYPE_TARGET_TYPE (resolved_type)->code () != TYPE_CODE_STRUCT)
  706. error (_("Argument to dynamic_cast must be pointer to class or `void *'"));
  707. class_type = check_typedef (TYPE_TARGET_TYPE (resolved_type));
  708. if (resolved_type->code () == TYPE_CODE_PTR)
  709. {
  710. if (arg_type->code () != TYPE_CODE_PTR
  711. && ! (arg_type->code () == TYPE_CODE_INT
  712. && value_as_long (arg) == 0))
  713. error (_("Argument to dynamic_cast does not have pointer type"));
  714. if (arg_type->code () == TYPE_CODE_PTR)
  715. {
  716. arg_type = check_typedef (TYPE_TARGET_TYPE (arg_type));
  717. if (arg_type->code () != TYPE_CODE_STRUCT)
  718. error (_("Argument to dynamic_cast does "
  719. "not have pointer to class type"));
  720. }
  721. /* Handle NULL pointers. */
  722. if (value_as_long (arg) == 0)
  723. return value_zero (type, not_lval);
  724. arg = value_ind (arg);
  725. }
  726. else
  727. {
  728. if (arg_type->code () != TYPE_CODE_STRUCT)
  729. error (_("Argument to dynamic_cast does not have class type"));
  730. }
  731. /* If the classes are the same, just return the argument. */
  732. if (class_types_same_p (class_type, arg_type))
  733. return value_cast (type, arg);
  734. /* If the target type is a unique base class of the argument's
  735. declared type, just cast it. */
  736. if (is_ancestor (class_type, arg_type))
  737. {
  738. if (is_unique_ancestor (class_type, arg))
  739. return value_cast (type, original_arg);
  740. error (_("Ambiguous dynamic_cast"));
  741. }
  742. rtti_type = value_rtti_type (arg, &full, &top, &using_enc);
  743. if (! rtti_type)
  744. error (_("Couldn't determine value's most derived type for dynamic_cast"));
  745. /* Compute the most derived object's address. */
  746. addr = value_address (arg);
  747. if (full)
  748. {
  749. /* Done. */
  750. }
  751. else if (using_enc)
  752. addr += top;
  753. else
  754. addr += top + value_embedded_offset (arg);
  755. /* dynamic_cast<void *> means to return a pointer to the
  756. most-derived object. */
  757. if (resolved_type->code () == TYPE_CODE_PTR
  758. && TYPE_TARGET_TYPE (resolved_type)->code () == TYPE_CODE_VOID)
  759. return value_at_lazy (type, addr);
  760. tem = value_at (type, addr);
  761. type = value_type (tem);
  762. /* The first dynamic check specified in 5.2.7. */
  763. if (is_public_ancestor (arg_type, TYPE_TARGET_TYPE (resolved_type)))
  764. {
  765. if (class_types_same_p (rtti_type, TYPE_TARGET_TYPE (resolved_type)))
  766. return tem;
  767. result = NULL;
  768. if (dynamic_cast_check_1 (TYPE_TARGET_TYPE (resolved_type),
  769. value_contents_for_printing (tem).data (),
  770. value_embedded_offset (tem),
  771. value_address (tem), tem,
  772. rtti_type, addr,
  773. arg_type,
  774. &result) == 1)
  775. return value_cast (type,
  776. is_ref
  777. ? value_ref (result, resolved_type->code ())
  778. : value_addr (result));
  779. }
  780. /* The second dynamic check specified in 5.2.7. */
  781. result = NULL;
  782. if (is_public_ancestor (arg_type, rtti_type)
  783. && dynamic_cast_check_2 (TYPE_TARGET_TYPE (resolved_type),
  784. value_contents_for_printing (tem).data (),
  785. value_embedded_offset (tem),
  786. value_address (tem), tem,
  787. rtti_type, &result) == 1)
  788. return value_cast (type,
  789. is_ref
  790. ? value_ref (result, resolved_type->code ())
  791. : value_addr (result));
  792. if (resolved_type->code () == TYPE_CODE_PTR)
  793. return value_zero (type, not_lval);
  794. error (_("dynamic_cast failed"));
  795. }
  796. /* Create a not_lval value of numeric type TYPE that is one, and return it. */
  797. struct value *
  798. value_one (struct type *type)
  799. {
  800. struct type *type1 = check_typedef (type);
  801. struct value *val;
  802. if (is_integral_type (type1) || is_floating_type (type1))
  803. {
  804. val = value_from_longest (type, (LONGEST) 1);
  805. }
  806. else if (type1->code () == TYPE_CODE_ARRAY && type1->is_vector ())
  807. {
  808. struct type *eltype = check_typedef (TYPE_TARGET_TYPE (type1));
  809. int i;
  810. LONGEST low_bound, high_bound;
  811. if (!get_array_bounds (type1, &low_bound, &high_bound))
  812. error (_("Could not determine the vector bounds"));
  813. val = allocate_value (type);
  814. gdb::array_view<gdb_byte> val_contents = value_contents_writeable (val);
  815. int elt_len = TYPE_LENGTH (eltype);
  816. for (i = 0; i < high_bound - low_bound + 1; i++)
  817. {
  818. value *tmp = value_one (eltype);
  819. copy (value_contents_all (tmp),
  820. val_contents.slice (i * elt_len, elt_len));
  821. }
  822. }
  823. else
  824. {
  825. error (_("Not a numeric type."));
  826. }
  827. /* value_one result is never used for assignments to. */
  828. gdb_assert (VALUE_LVAL (val) == not_lval);
  829. return val;
  830. }
  831. /* Helper function for value_at, value_at_lazy, and value_at_lazy_stack.
  832. The type of the created value may differ from the passed type TYPE.
  833. Make sure to retrieve the returned values's new type after this call
  834. e.g. in case the type is a variable length array. */
  835. static struct value *
  836. get_value_at (struct type *type, CORE_ADDR addr, int lazy)
  837. {
  838. struct value *val;
  839. if (check_typedef (type)->code () == TYPE_CODE_VOID)
  840. error (_("Attempt to dereference a generic pointer."));
  841. val = value_from_contents_and_address (type, NULL, addr);
  842. if (!lazy)
  843. value_fetch_lazy (val);
  844. return val;
  845. }
  846. /* Return a value with type TYPE located at ADDR.
  847. Call value_at only if the data needs to be fetched immediately;
  848. if we can be 'lazy' and defer the fetch, perhaps indefinitely, call
  849. value_at_lazy instead. value_at_lazy simply records the address of
  850. the data and sets the lazy-evaluation-required flag. The lazy flag
  851. is tested in the value_contents macro, which is used if and when
  852. the contents are actually required. The type of the created value
  853. may differ from the passed type TYPE. Make sure to retrieve the
  854. returned values's new type after this call e.g. in case the type
  855. is a variable length array.
  856. Note: value_at does *NOT* handle embedded offsets; perform such
  857. adjustments before or after calling it. */
  858. struct value *
  859. value_at (struct type *type, CORE_ADDR addr)
  860. {
  861. return get_value_at (type, addr, 0);
  862. }
  863. /* Return a lazy value with type TYPE located at ADDR (cf. value_at).
  864. The type of the created value may differ from the passed type TYPE.
  865. Make sure to retrieve the returned values's new type after this call
  866. e.g. in case the type is a variable length array. */
  867. struct value *
  868. value_at_lazy (struct type *type, CORE_ADDR addr)
  869. {
  870. return get_value_at (type, addr, 1);
  871. }
  872. void
  873. read_value_memory (struct value *val, LONGEST bit_offset,
  874. int stack, CORE_ADDR memaddr,
  875. gdb_byte *buffer, size_t length)
  876. {
  877. ULONGEST xfered_total = 0;
  878. struct gdbarch *arch = get_value_arch (val);
  879. int unit_size = gdbarch_addressable_memory_unit_size (arch);
  880. enum target_object object;
  881. object = stack ? TARGET_OBJECT_STACK_MEMORY : TARGET_OBJECT_MEMORY;
  882. while (xfered_total < length)
  883. {
  884. enum target_xfer_status status;
  885. ULONGEST xfered_partial;
  886. status = target_xfer_partial (current_inferior ()->top_target (),
  887. object, NULL,
  888. buffer + xfered_total * unit_size, NULL,
  889. memaddr + xfered_total,
  890. length - xfered_total,
  891. &xfered_partial);
  892. if (status == TARGET_XFER_OK)
  893. /* nothing */;
  894. else if (status == TARGET_XFER_UNAVAILABLE)
  895. mark_value_bits_unavailable (val, (xfered_total * HOST_CHAR_BIT
  896. + bit_offset),
  897. xfered_partial * HOST_CHAR_BIT);
  898. else if (status == TARGET_XFER_EOF)
  899. memory_error (TARGET_XFER_E_IO, memaddr + xfered_total);
  900. else
  901. memory_error (status, memaddr + xfered_total);
  902. xfered_total += xfered_partial;
  903. QUIT;
  904. }
  905. }
  906. /* Store the contents of FROMVAL into the location of TOVAL.
  907. Return a new value with the location of TOVAL and contents of FROMVAL. */
  908. struct value *
  909. value_assign (struct value *toval, struct value *fromval)
  910. {
  911. struct type *type;
  912. struct value *val;
  913. struct frame_id old_frame;
  914. if (!deprecated_value_modifiable (toval))
  915. error (_("Left operand of assignment is not a modifiable lvalue."));
  916. toval = coerce_ref (toval);
  917. type = value_type (toval);
  918. if (VALUE_LVAL (toval) != lval_internalvar)
  919. fromval = value_cast (type, fromval);
  920. else
  921. {
  922. /* Coerce arrays and functions to pointers, except for arrays
  923. which only live in GDB's storage. */
  924. if (!value_must_coerce_to_target (fromval))
  925. fromval = coerce_array (fromval);
  926. }
  927. type = check_typedef (type);
  928. /* Since modifying a register can trash the frame chain, and
  929. modifying memory can trash the frame cache, we save the old frame
  930. and then restore the new frame afterwards. */
  931. old_frame = get_frame_id (deprecated_safe_get_selected_frame ());
  932. switch (VALUE_LVAL (toval))
  933. {
  934. case lval_internalvar:
  935. set_internalvar (VALUE_INTERNALVAR (toval), fromval);
  936. return value_of_internalvar (type->arch (),
  937. VALUE_INTERNALVAR (toval));
  938. case lval_internalvar_component:
  939. {
  940. LONGEST offset = value_offset (toval);
  941. /* Are we dealing with a bitfield?
  942. It is important to mention that `value_parent (toval)' is
  943. non-NULL iff `value_bitsize (toval)' is non-zero. */
  944. if (value_bitsize (toval))
  945. {
  946. /* VALUE_INTERNALVAR below refers to the parent value, while
  947. the offset is relative to this parent value. */
  948. gdb_assert (value_parent (value_parent (toval)) == NULL);
  949. offset += value_offset (value_parent (toval));
  950. }
  951. set_internalvar_component (VALUE_INTERNALVAR (toval),
  952. offset,
  953. value_bitpos (toval),
  954. value_bitsize (toval),
  955. fromval);
  956. }
  957. break;
  958. case lval_memory:
  959. {
  960. const gdb_byte *dest_buffer;
  961. CORE_ADDR changed_addr;
  962. int changed_len;
  963. gdb_byte buffer[sizeof (LONGEST)];
  964. if (value_bitsize (toval))
  965. {
  966. struct value *parent = value_parent (toval);
  967. changed_addr = value_address (parent) + value_offset (toval);
  968. changed_len = (value_bitpos (toval)
  969. + value_bitsize (toval)
  970. + HOST_CHAR_BIT - 1)
  971. / HOST_CHAR_BIT;
  972. /* If we can read-modify-write exactly the size of the
  973. containing type (e.g. short or int) then do so. This
  974. is safer for volatile bitfields mapped to hardware
  975. registers. */
  976. if (changed_len < TYPE_LENGTH (type)
  977. && TYPE_LENGTH (type) <= (int) sizeof (LONGEST)
  978. && ((LONGEST) changed_addr % TYPE_LENGTH (type)) == 0)
  979. changed_len = TYPE_LENGTH (type);
  980. if (changed_len > (int) sizeof (LONGEST))
  981. error (_("Can't handle bitfields which "
  982. "don't fit in a %d bit word."),
  983. (int) sizeof (LONGEST) * HOST_CHAR_BIT);
  984. read_memory (changed_addr, buffer, changed_len);
  985. modify_field (type, buffer, value_as_long (fromval),
  986. value_bitpos (toval), value_bitsize (toval));
  987. dest_buffer = buffer;
  988. }
  989. else
  990. {
  991. changed_addr = value_address (toval);
  992. changed_len = type_length_units (type);
  993. dest_buffer = value_contents (fromval).data ();
  994. }
  995. write_memory_with_notification (changed_addr, dest_buffer, changed_len);
  996. }
  997. break;
  998. case lval_register:
  999. {
  1000. struct frame_info *frame;
  1001. struct gdbarch *gdbarch;
  1002. int value_reg;
  1003. /* Figure out which frame this register value is in. The value
  1004. holds the frame_id for the next frame, that is the frame this
  1005. register value was unwound from.
  1006. Below we will call put_frame_register_bytes which requires that
  1007. we pass it the actual frame in which the register value is
  1008. valid, i.e. not the next frame. */
  1009. frame = frame_find_by_id (VALUE_NEXT_FRAME_ID (toval));
  1010. frame = get_prev_frame_always (frame);
  1011. value_reg = VALUE_REGNUM (toval);
  1012. if (!frame)
  1013. error (_("Value being assigned to is no longer active."));
  1014. gdbarch = get_frame_arch (frame);
  1015. if (value_bitsize (toval))
  1016. {
  1017. struct value *parent = value_parent (toval);
  1018. LONGEST offset = value_offset (parent) + value_offset (toval);
  1019. size_t changed_len;
  1020. gdb_byte buffer[sizeof (LONGEST)];
  1021. int optim, unavail;
  1022. changed_len = (value_bitpos (toval)
  1023. + value_bitsize (toval)
  1024. + HOST_CHAR_BIT - 1)
  1025. / HOST_CHAR_BIT;
  1026. if (changed_len > sizeof (LONGEST))
  1027. error (_("Can't handle bitfields which "
  1028. "don't fit in a %d bit word."),
  1029. (int) sizeof (LONGEST) * HOST_CHAR_BIT);
  1030. if (!get_frame_register_bytes (frame, value_reg, offset,
  1031. {buffer, changed_len},
  1032. &optim, &unavail))
  1033. {
  1034. if (optim)
  1035. throw_error (OPTIMIZED_OUT_ERROR,
  1036. _("value has been optimized out"));
  1037. if (unavail)
  1038. throw_error (NOT_AVAILABLE_ERROR,
  1039. _("value is not available"));
  1040. }
  1041. modify_field (type, buffer, value_as_long (fromval),
  1042. value_bitpos (toval), value_bitsize (toval));
  1043. put_frame_register_bytes (frame, value_reg, offset,
  1044. {buffer, changed_len});
  1045. }
  1046. else
  1047. {
  1048. if (gdbarch_convert_register_p (gdbarch, VALUE_REGNUM (toval),
  1049. type))
  1050. {
  1051. /* If TOVAL is a special machine register requiring
  1052. conversion of program values to a special raw
  1053. format. */
  1054. gdbarch_value_to_register (gdbarch, frame,
  1055. VALUE_REGNUM (toval), type,
  1056. value_contents (fromval).data ());
  1057. }
  1058. else
  1059. put_frame_register_bytes (frame, value_reg,
  1060. value_offset (toval),
  1061. value_contents (fromval));
  1062. }
  1063. gdb::observers::register_changed.notify (frame, value_reg);
  1064. break;
  1065. }
  1066. case lval_computed:
  1067. {
  1068. const struct lval_funcs *funcs = value_computed_funcs (toval);
  1069. if (funcs->write != NULL)
  1070. {
  1071. funcs->write (toval, fromval);
  1072. break;
  1073. }
  1074. }
  1075. /* Fall through. */
  1076. default:
  1077. error (_("Left operand of assignment is not an lvalue."));
  1078. }
  1079. /* Assigning to the stack pointer, frame pointer, and other
  1080. (architecture and calling convention specific) registers may
  1081. cause the frame cache and regcache to be out of date. Assigning to memory
  1082. also can. We just do this on all assignments to registers or
  1083. memory, for simplicity's sake; I doubt the slowdown matters. */
  1084. switch (VALUE_LVAL (toval))
  1085. {
  1086. case lval_memory:
  1087. case lval_register:
  1088. case lval_computed:
  1089. gdb::observers::target_changed.notify
  1090. (current_inferior ()->top_target ());
  1091. /* Having destroyed the frame cache, restore the selected
  1092. frame. */
  1093. /* FIXME: cagney/2002-11-02: There has to be a better way of
  1094. doing this. Instead of constantly saving/restoring the
  1095. frame. Why not create a get_selected_frame() function that,
  1096. having saved the selected frame's ID can automatically
  1097. re-find the previously selected frame automatically. */
  1098. {
  1099. struct frame_info *fi = frame_find_by_id (old_frame);
  1100. if (fi != NULL)
  1101. select_frame (fi);
  1102. }
  1103. break;
  1104. default:
  1105. break;
  1106. }
  1107. /* If the field does not entirely fill a LONGEST, then zero the sign
  1108. bits. If the field is signed, and is negative, then sign
  1109. extend. */
  1110. if ((value_bitsize (toval) > 0)
  1111. && (value_bitsize (toval) < 8 * (int) sizeof (LONGEST)))
  1112. {
  1113. LONGEST fieldval = value_as_long (fromval);
  1114. LONGEST valmask = (((ULONGEST) 1) << value_bitsize (toval)) - 1;
  1115. fieldval &= valmask;
  1116. if (!type->is_unsigned ()
  1117. && (fieldval & (valmask ^ (valmask >> 1))))
  1118. fieldval |= ~valmask;
  1119. fromval = value_from_longest (type, fieldval);
  1120. }
  1121. /* The return value is a copy of TOVAL so it shares its location
  1122. information, but its contents are updated from FROMVAL. This
  1123. implies the returned value is not lazy, even if TOVAL was. */
  1124. val = value_copy (toval);
  1125. set_value_lazy (val, 0);
  1126. copy (value_contents (fromval), value_contents_raw (val));
  1127. /* We copy over the enclosing type and pointed-to offset from FROMVAL
  1128. in the case of pointer types. For object types, the enclosing type
  1129. and embedded offset must *not* be copied: the target object refered
  1130. to by TOVAL retains its original dynamic type after assignment. */
  1131. if (type->code () == TYPE_CODE_PTR)
  1132. {
  1133. set_value_enclosing_type (val, value_enclosing_type (fromval));
  1134. set_value_pointed_to_offset (val, value_pointed_to_offset (fromval));
  1135. }
  1136. return val;
  1137. }
  1138. /* Extend a value ARG1 to COUNT repetitions of its type. */
  1139. struct value *
  1140. value_repeat (struct value *arg1, int count)
  1141. {
  1142. struct value *val;
  1143. if (VALUE_LVAL (arg1) != lval_memory)
  1144. error (_("Only values in memory can be extended with '@'."));
  1145. if (count < 1)
  1146. error (_("Invalid number %d of repetitions."), count);
  1147. val = allocate_repeat_value (value_enclosing_type (arg1), count);
  1148. VALUE_LVAL (val) = lval_memory;
  1149. set_value_address (val, value_address (arg1));
  1150. read_value_memory (val, 0, value_stack (val), value_address (val),
  1151. value_contents_all_raw (val).data (),
  1152. type_length_units (value_enclosing_type (val)));
  1153. return val;
  1154. }
  1155. struct value *
  1156. value_of_variable (struct symbol *var, const struct block *b)
  1157. {
  1158. struct frame_info *frame = NULL;
  1159. if (symbol_read_needs_frame (var))
  1160. frame = get_selected_frame (_("No frame selected."));
  1161. return read_var_value (var, b, frame);
  1162. }
  1163. struct value *
  1164. address_of_variable (struct symbol *var, const struct block *b)
  1165. {
  1166. struct type *type = var->type ();
  1167. struct value *val;
  1168. /* Evaluate it first; if the result is a memory address, we're fine.
  1169. Lazy evaluation pays off here. */
  1170. val = value_of_variable (var, b);
  1171. type = value_type (val);
  1172. if ((VALUE_LVAL (val) == lval_memory && value_lazy (val))
  1173. || type->code () == TYPE_CODE_FUNC)
  1174. {
  1175. CORE_ADDR addr = value_address (val);
  1176. return value_from_pointer (lookup_pointer_type (type), addr);
  1177. }
  1178. /* Not a memory address; check what the problem was. */
  1179. switch (VALUE_LVAL (val))
  1180. {
  1181. case lval_register:
  1182. {
  1183. struct frame_info *frame;
  1184. const char *regname;
  1185. frame = frame_find_by_id (VALUE_NEXT_FRAME_ID (val));
  1186. gdb_assert (frame);
  1187. regname = gdbarch_register_name (get_frame_arch (frame),
  1188. VALUE_REGNUM (val));
  1189. gdb_assert (regname && *regname);
  1190. error (_("Address requested for identifier "
  1191. "\"%s\" which is in register $%s"),
  1192. var->print_name (), regname);
  1193. break;
  1194. }
  1195. default:
  1196. error (_("Can't take address of \"%s\" which isn't an lvalue."),
  1197. var->print_name ());
  1198. break;
  1199. }
  1200. return val;
  1201. }
  1202. /* See value.h. */
  1203. bool
  1204. value_must_coerce_to_target (struct value *val)
  1205. {
  1206. struct type *valtype;
  1207. /* The only lval kinds which do not live in target memory. */
  1208. if (VALUE_LVAL (val) != not_lval
  1209. && VALUE_LVAL (val) != lval_internalvar
  1210. && VALUE_LVAL (val) != lval_xcallable)
  1211. return false;
  1212. valtype = check_typedef (value_type (val));
  1213. switch (valtype->code ())
  1214. {
  1215. case TYPE_CODE_ARRAY:
  1216. return valtype->is_vector () ? 0 : 1;
  1217. case TYPE_CODE_STRING:
  1218. return true;
  1219. default:
  1220. return false;
  1221. }
  1222. }
  1223. /* Make sure that VAL lives in target memory if it's supposed to. For
  1224. instance, strings are constructed as character arrays in GDB's
  1225. storage, and this function copies them to the target. */
  1226. struct value *
  1227. value_coerce_to_target (struct value *val)
  1228. {
  1229. LONGEST length;
  1230. CORE_ADDR addr;
  1231. if (!value_must_coerce_to_target (val))
  1232. return val;
  1233. length = TYPE_LENGTH (check_typedef (value_type (val)));
  1234. addr = allocate_space_in_inferior (length);
  1235. write_memory (addr, value_contents (val).data (), length);
  1236. return value_at_lazy (value_type (val), addr);
  1237. }
  1238. /* Given a value which is an array, return a value which is a pointer
  1239. to its first element, regardless of whether or not the array has a
  1240. nonzero lower bound.
  1241. FIXME: A previous comment here indicated that this routine should
  1242. be substracting the array's lower bound. It's not clear to me that
  1243. this is correct. Given an array subscripting operation, it would
  1244. certainly work to do the adjustment here, essentially computing:
  1245. (&array[0] - (lowerbound * sizeof array[0])) + (index * sizeof array[0])
  1246. However I believe a more appropriate and logical place to account
  1247. for the lower bound is to do so in value_subscript, essentially
  1248. computing:
  1249. (&array[0] + ((index - lowerbound) * sizeof array[0]))
  1250. As further evidence consider what would happen with operations
  1251. other than array subscripting, where the caller would get back a
  1252. value that had an address somewhere before the actual first element
  1253. of the array, and the information about the lower bound would be
  1254. lost because of the coercion to pointer type. */
  1255. struct value *
  1256. value_coerce_array (struct value *arg1)
  1257. {
  1258. struct type *type = check_typedef (value_type (arg1));
  1259. /* If the user tries to do something requiring a pointer with an
  1260. array that has not yet been pushed to the target, then this would
  1261. be a good time to do so. */
  1262. arg1 = value_coerce_to_target (arg1);
  1263. if (VALUE_LVAL (arg1) != lval_memory)
  1264. error (_("Attempt to take address of value not located in memory."));
  1265. return value_from_pointer (lookup_pointer_type (TYPE_TARGET_TYPE (type)),
  1266. value_address (arg1));
  1267. }
  1268. /* Given a value which is a function, return a value which is a pointer
  1269. to it. */
  1270. struct value *
  1271. value_coerce_function (struct value *arg1)
  1272. {
  1273. struct value *retval;
  1274. if (VALUE_LVAL (arg1) != lval_memory)
  1275. error (_("Attempt to take address of value not located in memory."));
  1276. retval = value_from_pointer (lookup_pointer_type (value_type (arg1)),
  1277. value_address (arg1));
  1278. return retval;
  1279. }
  1280. /* Return a pointer value for the object for which ARG1 is the
  1281. contents. */
  1282. struct value *
  1283. value_addr (struct value *arg1)
  1284. {
  1285. struct value *arg2;
  1286. struct type *type = check_typedef (value_type (arg1));
  1287. if (TYPE_IS_REFERENCE (type))
  1288. {
  1289. if (value_bits_synthetic_pointer (arg1, value_embedded_offset (arg1),
  1290. TARGET_CHAR_BIT * TYPE_LENGTH (type)))
  1291. arg1 = coerce_ref (arg1);
  1292. else
  1293. {
  1294. /* Copy the value, but change the type from (T&) to (T*). We
  1295. keep the same location information, which is efficient, and
  1296. allows &(&X) to get the location containing the reference.
  1297. Do the same to its enclosing type for consistency. */
  1298. struct type *type_ptr
  1299. = lookup_pointer_type (TYPE_TARGET_TYPE (type));
  1300. struct type *enclosing_type
  1301. = check_typedef (value_enclosing_type (arg1));
  1302. struct type *enclosing_type_ptr
  1303. = lookup_pointer_type (TYPE_TARGET_TYPE (enclosing_type));
  1304. arg2 = value_copy (arg1);
  1305. deprecated_set_value_type (arg2, type_ptr);
  1306. set_value_enclosing_type (arg2, enclosing_type_ptr);
  1307. return arg2;
  1308. }
  1309. }
  1310. if (type->code () == TYPE_CODE_FUNC)
  1311. return value_coerce_function (arg1);
  1312. /* If this is an array that has not yet been pushed to the target,
  1313. then this would be a good time to force it to memory. */
  1314. arg1 = value_coerce_to_target (arg1);
  1315. if (VALUE_LVAL (arg1) != lval_memory)
  1316. error (_("Attempt to take address of value not located in memory."));
  1317. /* Get target memory address. */
  1318. arg2 = value_from_pointer (lookup_pointer_type (value_type (arg1)),
  1319. (value_address (arg1)
  1320. + value_embedded_offset (arg1)));
  1321. /* This may be a pointer to a base subobject; so remember the
  1322. full derived object's type ... */
  1323. set_value_enclosing_type (arg2,
  1324. lookup_pointer_type (value_enclosing_type (arg1)));
  1325. /* ... and also the relative position of the subobject in the full
  1326. object. */
  1327. set_value_pointed_to_offset (arg2, value_embedded_offset (arg1));
  1328. return arg2;
  1329. }
  1330. /* Return a reference value for the object for which ARG1 is the
  1331. contents. */
  1332. struct value *
  1333. value_ref (struct value *arg1, enum type_code refcode)
  1334. {
  1335. struct value *arg2;
  1336. struct type *type = check_typedef (value_type (arg1));
  1337. gdb_assert (refcode == TYPE_CODE_REF || refcode == TYPE_CODE_RVALUE_REF);
  1338. if ((type->code () == TYPE_CODE_REF
  1339. || type->code () == TYPE_CODE_RVALUE_REF)
  1340. && type->code () == refcode)
  1341. return arg1;
  1342. arg2 = value_addr (arg1);
  1343. deprecated_set_value_type (arg2, lookup_reference_type (type, refcode));
  1344. return arg2;
  1345. }
  1346. /* Given a value of a pointer type, apply the C unary * operator to
  1347. it. */
  1348. struct value *
  1349. value_ind (struct value *arg1)
  1350. {
  1351. struct type *base_type;
  1352. struct value *arg2;
  1353. arg1 = coerce_array (arg1);
  1354. base_type = check_typedef (value_type (arg1));
  1355. if (VALUE_LVAL (arg1) == lval_computed)
  1356. {
  1357. const struct lval_funcs *funcs = value_computed_funcs (arg1);
  1358. if (funcs->indirect)
  1359. {
  1360. struct value *result = funcs->indirect (arg1);
  1361. if (result)
  1362. return result;
  1363. }
  1364. }
  1365. if (base_type->code () == TYPE_CODE_PTR)
  1366. {
  1367. struct type *enc_type;
  1368. /* We may be pointing to something embedded in a larger object.
  1369. Get the real type of the enclosing object. */
  1370. enc_type = check_typedef (value_enclosing_type (arg1));
  1371. enc_type = TYPE_TARGET_TYPE (enc_type);
  1372. CORE_ADDR base_addr;
  1373. if (check_typedef (enc_type)->code () == TYPE_CODE_FUNC
  1374. || check_typedef (enc_type)->code () == TYPE_CODE_METHOD)
  1375. {
  1376. /* For functions, go through find_function_addr, which knows
  1377. how to handle function descriptors. */
  1378. base_addr = find_function_addr (arg1, NULL);
  1379. }
  1380. else
  1381. {
  1382. /* Retrieve the enclosing object pointed to. */
  1383. base_addr = (value_as_address (arg1)
  1384. - value_pointed_to_offset (arg1));
  1385. }
  1386. arg2 = value_at_lazy (enc_type, base_addr);
  1387. enc_type = value_type (arg2);
  1388. return readjust_indirect_value_type (arg2, enc_type, base_type,
  1389. arg1, base_addr);
  1390. }
  1391. error (_("Attempt to take contents of a non-pointer value."));
  1392. }
  1393. /* Create a value for an array by allocating space in GDB, copying the
  1394. data into that space, and then setting up an array value.
  1395. The array bounds are set from LOWBOUND and HIGHBOUND, and the array
  1396. is populated from the values passed in ELEMVEC.
  1397. The element type of the array is inherited from the type of the
  1398. first element, and all elements must have the same size (though we
  1399. don't currently enforce any restriction on their types). */
  1400. struct value *
  1401. value_array (int lowbound, int highbound, struct value **elemvec)
  1402. {
  1403. int nelem;
  1404. int idx;
  1405. ULONGEST typelength;
  1406. struct value *val;
  1407. struct type *arraytype;
  1408. /* Validate that the bounds are reasonable and that each of the
  1409. elements have the same size. */
  1410. nelem = highbound - lowbound + 1;
  1411. if (nelem <= 0)
  1412. {
  1413. error (_("bad array bounds (%d, %d)"), lowbound, highbound);
  1414. }
  1415. typelength = type_length_units (value_enclosing_type (elemvec[0]));
  1416. for (idx = 1; idx < nelem; idx++)
  1417. {
  1418. if (type_length_units (value_enclosing_type (elemvec[idx]))
  1419. != typelength)
  1420. {
  1421. error (_("array elements must all be the same size"));
  1422. }
  1423. }
  1424. arraytype = lookup_array_range_type (value_enclosing_type (elemvec[0]),
  1425. lowbound, highbound);
  1426. if (!current_language->c_style_arrays_p ())
  1427. {
  1428. val = allocate_value (arraytype);
  1429. for (idx = 0; idx < nelem; idx++)
  1430. value_contents_copy (val, idx * typelength, elemvec[idx], 0,
  1431. typelength);
  1432. return val;
  1433. }
  1434. /* Allocate space to store the array, and then initialize it by
  1435. copying in each element. */
  1436. val = allocate_value (arraytype);
  1437. for (idx = 0; idx < nelem; idx++)
  1438. value_contents_copy (val, idx * typelength, elemvec[idx], 0, typelength);
  1439. return val;
  1440. }
  1441. struct value *
  1442. value_cstring (const char *ptr, ssize_t len, struct type *char_type)
  1443. {
  1444. struct value *val;
  1445. int lowbound = current_language->string_lower_bound ();
  1446. ssize_t highbound = len / TYPE_LENGTH (char_type);
  1447. struct type *stringtype
  1448. = lookup_array_range_type (char_type, lowbound, highbound + lowbound - 1);
  1449. val = allocate_value (stringtype);
  1450. memcpy (value_contents_raw (val).data (), ptr, len);
  1451. return val;
  1452. }
  1453. /* Create a value for a string constant by allocating space in the
  1454. inferior, copying the data into that space, and returning the
  1455. address with type TYPE_CODE_STRING. PTR points to the string
  1456. constant data; LEN is number of characters.
  1457. Note that string types are like array of char types with a lower
  1458. bound of zero and an upper bound of LEN - 1. Also note that the
  1459. string may contain embedded null bytes. */
  1460. struct value *
  1461. value_string (const char *ptr, ssize_t len, struct type *char_type)
  1462. {
  1463. struct value *val;
  1464. int lowbound = current_language->string_lower_bound ();
  1465. ssize_t highbound = len / TYPE_LENGTH (char_type);
  1466. struct type *stringtype
  1467. = lookup_string_range_type (char_type, lowbound, highbound + lowbound - 1);
  1468. val = allocate_value (stringtype);
  1469. memcpy (value_contents_raw (val).data (), ptr, len);
  1470. return val;
  1471. }
  1472. /* See if we can pass arguments in T2 to a function which takes arguments
  1473. of types T1. T1 is a list of NARGS arguments, and T2 is an array_view
  1474. of the values we're trying to pass. If some arguments need coercion of
  1475. some sort, then the coerced values are written into T2. Return value is
  1476. 0 if the arguments could be matched, or the position at which they
  1477. differ if not.
  1478. STATICP is nonzero if the T1 argument list came from a static
  1479. member function. T2 must still include the ``this'' pointer, but
  1480. it will be skipped.
  1481. For non-static member functions, we ignore the first argument,
  1482. which is the type of the instance variable. This is because we
  1483. want to handle calls with objects from derived classes. This is
  1484. not entirely correct: we should actually check to make sure that a
  1485. requested operation is type secure, shouldn't we? FIXME. */
  1486. static int
  1487. typecmp (bool staticp, bool varargs, int nargs,
  1488. struct field t1[], gdb::array_view<value *> t2)
  1489. {
  1490. int i;
  1491. /* Skip ``this'' argument if applicable. T2 will always include
  1492. THIS. */
  1493. if (staticp)
  1494. t2 = t2.slice (1);
  1495. for (i = 0;
  1496. (i < nargs) && t1[i].type ()->code () != TYPE_CODE_VOID;
  1497. i++)
  1498. {
  1499. struct type *tt1, *tt2;
  1500. if (i == t2.size ())
  1501. return i + 1;
  1502. tt1 = check_typedef (t1[i].type ());
  1503. tt2 = check_typedef (value_type (t2[i]));
  1504. if (TYPE_IS_REFERENCE (tt1)
  1505. /* We should be doing hairy argument matching, as below. */
  1506. && (check_typedef (TYPE_TARGET_TYPE (tt1))->code ()
  1507. == tt2->code ()))
  1508. {
  1509. if (tt2->code () == TYPE_CODE_ARRAY)
  1510. t2[i] = value_coerce_array (t2[i]);
  1511. else
  1512. t2[i] = value_ref (t2[i], tt1->code ());
  1513. continue;
  1514. }
  1515. /* djb - 20000715 - Until the new type structure is in the
  1516. place, and we can attempt things like implicit conversions,
  1517. we need to do this so you can take something like a map<const
  1518. char *>, and properly access map["hello"], because the
  1519. argument to [] will be a reference to a pointer to a char,
  1520. and the argument will be a pointer to a char. */
  1521. while (TYPE_IS_REFERENCE (tt1) || tt1->code () == TYPE_CODE_PTR)
  1522. {
  1523. tt1 = check_typedef ( TYPE_TARGET_TYPE (tt1) );
  1524. }
  1525. while (tt2->code () == TYPE_CODE_ARRAY
  1526. || tt2->code () == TYPE_CODE_PTR
  1527. || TYPE_IS_REFERENCE (tt2))
  1528. {
  1529. tt2 = check_typedef (TYPE_TARGET_TYPE (tt2));
  1530. }
  1531. if (tt1->code () == tt2->code ())
  1532. continue;
  1533. /* Array to pointer is a `trivial conversion' according to the
  1534. ARM. */
  1535. /* We should be doing much hairier argument matching (see
  1536. section 13.2 of the ARM), but as a quick kludge, just check
  1537. for the same type code. */
  1538. if (t1[i].type ()->code () != value_type (t2[i])->code ())
  1539. return i + 1;
  1540. }
  1541. if (varargs || i == t2.size ())
  1542. return 0;
  1543. return i + 1;
  1544. }
  1545. /* Helper class for search_struct_field that keeps track of found
  1546. results and possibly throws an exception if the search yields
  1547. ambiguous results. See search_struct_field for description of
  1548. LOOKING_FOR_BASECLASS. */
  1549. struct struct_field_searcher
  1550. {
  1551. /* A found field. */
  1552. struct found_field
  1553. {
  1554. /* Path to the structure where the field was found. */
  1555. std::vector<struct type *> path;
  1556. /* The field found. */
  1557. struct value *field_value;
  1558. };
  1559. /* See corresponding fields for description of parameters. */
  1560. struct_field_searcher (const char *name,
  1561. struct type *outermost_type,
  1562. bool looking_for_baseclass)
  1563. : m_name (name),
  1564. m_looking_for_baseclass (looking_for_baseclass),
  1565. m_outermost_type (outermost_type)
  1566. {
  1567. }
  1568. /* The search entry point. If LOOKING_FOR_BASECLASS is true and the
  1569. base class search yields ambiguous results, this throws an
  1570. exception. If LOOKING_FOR_BASECLASS is false, the found fields
  1571. are accumulated and the caller (search_struct_field) takes care
  1572. of throwing an error if the field search yields ambiguous
  1573. results. The latter is done that way so that the error message
  1574. can include a list of all the found candidates. */
  1575. void search (struct value *arg, LONGEST offset, struct type *type);
  1576. const std::vector<found_field> &fields ()
  1577. {
  1578. return m_fields;
  1579. }
  1580. struct value *baseclass ()
  1581. {
  1582. return m_baseclass;
  1583. }
  1584. private:
  1585. /* Update results to include V, a found field/baseclass. */
  1586. void update_result (struct value *v, LONGEST boffset);
  1587. /* The name of the field/baseclass we're searching for. */
  1588. const char *m_name;
  1589. /* Whether we're looking for a baseclass, or a field. */
  1590. const bool m_looking_for_baseclass;
  1591. /* The offset of the baseclass containing the field/baseclass we
  1592. last recorded. */
  1593. LONGEST m_last_boffset = 0;
  1594. /* If looking for a baseclass, then the result is stored here. */
  1595. struct value *m_baseclass = nullptr;
  1596. /* When looking for fields, the found candidates are stored
  1597. here. */
  1598. std::vector<found_field> m_fields;
  1599. /* The type of the initial type passed to search_struct_field; this
  1600. is used for error reporting when the lookup is ambiguous. */
  1601. struct type *m_outermost_type;
  1602. /* The full path to the struct being inspected. E.g. for field 'x'
  1603. defined in class B inherited by class A, we have A and B pushed
  1604. on the path. */
  1605. std::vector <struct type *> m_struct_path;
  1606. };
  1607. void
  1608. struct_field_searcher::update_result (struct value *v, LONGEST boffset)
  1609. {
  1610. if (v != NULL)
  1611. {
  1612. if (m_looking_for_baseclass)
  1613. {
  1614. if (m_baseclass != nullptr
  1615. /* The result is not ambiguous if all the classes that are
  1616. found occupy the same space. */
  1617. && m_last_boffset != boffset)
  1618. error (_("base class '%s' is ambiguous in type '%s'"),
  1619. m_name, TYPE_SAFE_NAME (m_outermost_type));
  1620. m_baseclass = v;
  1621. m_last_boffset = boffset;
  1622. }
  1623. else
  1624. {
  1625. /* The field is not ambiguous if it occupies the same
  1626. space. */
  1627. if (m_fields.empty () || m_last_boffset != boffset)
  1628. m_fields.push_back ({m_struct_path, v});
  1629. else
  1630. {
  1631. /*Fields can occupy the same space and have the same name (be
  1632. ambiguous). This can happen when fields in two different base
  1633. classes are marked [[no_unique_address]] and have the same name.
  1634. The C++ standard says that such fields can only occupy the same
  1635. space if they are of different type, but we don't rely on that in
  1636. the following code. */
  1637. bool ambiguous = false, insert = true;
  1638. for (const found_field &field: m_fields)
  1639. {
  1640. if(field.path.back () != m_struct_path.back ())
  1641. {
  1642. /* Same boffset points to members of different classes.
  1643. We have found an ambiguity and should record it. */
  1644. ambiguous = true;
  1645. }
  1646. else
  1647. {
  1648. /* We don't need to insert this value again, because a
  1649. non-ambiguous path already leads to it. */
  1650. insert = false;
  1651. break;
  1652. }
  1653. }
  1654. if (ambiguous && insert)
  1655. m_fields.push_back ({m_struct_path, v});
  1656. }
  1657. }
  1658. }
  1659. }
  1660. /* A helper for search_struct_field. This does all the work; most
  1661. arguments are as passed to search_struct_field. */
  1662. void
  1663. struct_field_searcher::search (struct value *arg1, LONGEST offset,
  1664. struct type *type)
  1665. {
  1666. int i;
  1667. int nbases;
  1668. m_struct_path.push_back (type);
  1669. SCOPE_EXIT { m_struct_path.pop_back (); };
  1670. type = check_typedef (type);
  1671. nbases = TYPE_N_BASECLASSES (type);
  1672. if (!m_looking_for_baseclass)
  1673. for (i = type->num_fields () - 1; i >= nbases; i--)
  1674. {
  1675. const char *t_field_name = type->field (i).name ();
  1676. if (t_field_name && (strcmp_iw (t_field_name, m_name) == 0))
  1677. {
  1678. struct value *v;
  1679. if (field_is_static (&type->field (i)))
  1680. v = value_static_field (type, i);
  1681. else
  1682. v = value_primitive_field (arg1, offset, i, type);
  1683. update_result (v, offset);
  1684. return;
  1685. }
  1686. if (t_field_name
  1687. && t_field_name[0] == '\0')
  1688. {
  1689. struct type *field_type = type->field (i).type ();
  1690. if (field_type->code () == TYPE_CODE_UNION
  1691. || field_type->code () == TYPE_CODE_STRUCT)
  1692. {
  1693. /* Look for a match through the fields of an anonymous
  1694. union, or anonymous struct. C++ provides anonymous
  1695. unions.
  1696. In the GNU Chill (now deleted from GDB)
  1697. implementation of variant record types, each
  1698. <alternative field> has an (anonymous) union type,
  1699. each member of the union represents a <variant
  1700. alternative>. Each <variant alternative> is
  1701. represented as a struct, with a member for each
  1702. <variant field>. */
  1703. LONGEST new_offset = offset;
  1704. /* This is pretty gross. In G++, the offset in an
  1705. anonymous union is relative to the beginning of the
  1706. enclosing struct. In the GNU Chill (now deleted
  1707. from GDB) implementation of variant records, the
  1708. bitpos is zero in an anonymous union field, so we
  1709. have to add the offset of the union here. */
  1710. if (field_type->code () == TYPE_CODE_STRUCT
  1711. || (field_type->num_fields () > 0
  1712. && field_type->field (0).loc_bitpos () == 0))
  1713. new_offset += type->field (i).loc_bitpos () / 8;
  1714. search (arg1, new_offset, field_type);
  1715. }
  1716. }
  1717. }
  1718. for (i = 0; i < nbases; i++)
  1719. {
  1720. struct value *v = NULL;
  1721. struct type *basetype = check_typedef (TYPE_BASECLASS (type, i));
  1722. /* If we are looking for baseclasses, this is what we get when
  1723. we hit them. But it could happen that the base part's member
  1724. name is not yet filled in. */
  1725. int found_baseclass = (m_looking_for_baseclass
  1726. && TYPE_BASECLASS_NAME (type, i) != NULL
  1727. && (strcmp_iw (m_name,
  1728. TYPE_BASECLASS_NAME (type,
  1729. i)) == 0));
  1730. LONGEST boffset = value_embedded_offset (arg1) + offset;
  1731. if (BASETYPE_VIA_VIRTUAL (type, i))
  1732. {
  1733. struct value *v2;
  1734. boffset = baseclass_offset (type, i,
  1735. value_contents_for_printing (arg1).data (),
  1736. value_embedded_offset (arg1) + offset,
  1737. value_address (arg1),
  1738. arg1);
  1739. /* The virtual base class pointer might have been clobbered
  1740. by the user program. Make sure that it still points to a
  1741. valid memory location. */
  1742. boffset += value_embedded_offset (arg1) + offset;
  1743. if (boffset < 0
  1744. || boffset >= TYPE_LENGTH (value_enclosing_type (arg1)))
  1745. {
  1746. CORE_ADDR base_addr;
  1747. base_addr = value_address (arg1) + boffset;
  1748. v2 = value_at_lazy (basetype, base_addr);
  1749. if (target_read_memory (base_addr,
  1750. value_contents_raw (v2).data (),
  1751. TYPE_LENGTH (value_type (v2))) != 0)
  1752. error (_("virtual baseclass botch"));
  1753. }
  1754. else
  1755. {
  1756. v2 = value_copy (arg1);
  1757. deprecated_set_value_type (v2, basetype);
  1758. set_value_embedded_offset (v2, boffset);
  1759. }
  1760. if (found_baseclass)
  1761. v = v2;
  1762. else
  1763. search (v2, 0, TYPE_BASECLASS (type, i));
  1764. }
  1765. else if (found_baseclass)
  1766. v = value_primitive_field (arg1, offset, i, type);
  1767. else
  1768. {
  1769. search (arg1, offset + TYPE_BASECLASS_BITPOS (type, i) / 8,
  1770. basetype);
  1771. }
  1772. update_result (v, boffset);
  1773. }
  1774. }
  1775. /* Helper function used by value_struct_elt to recurse through
  1776. baseclasses. Look for a field NAME in ARG1. Search in it assuming
  1777. it has (class) type TYPE. If found, return value, else return NULL.
  1778. If LOOKING_FOR_BASECLASS, then instead of looking for struct
  1779. fields, look for a baseclass named NAME. */
  1780. static struct value *
  1781. search_struct_field (const char *name, struct value *arg1,
  1782. struct type *type, int looking_for_baseclass)
  1783. {
  1784. struct_field_searcher searcher (name, type, looking_for_baseclass);
  1785. searcher.search (arg1, 0, type);
  1786. if (!looking_for_baseclass)
  1787. {
  1788. const auto &fields = searcher.fields ();
  1789. if (fields.empty ())
  1790. return nullptr;
  1791. else if (fields.size () == 1)
  1792. return fields[0].field_value;
  1793. else
  1794. {
  1795. std::string candidates;
  1796. for (auto &&candidate : fields)
  1797. {
  1798. gdb_assert (!candidate.path.empty ());
  1799. struct type *field_type = value_type (candidate.field_value);
  1800. struct type *struct_type = candidate.path.back ();
  1801. std::string path;
  1802. bool first = true;
  1803. for (struct type *t : candidate.path)
  1804. {
  1805. if (first)
  1806. first = false;
  1807. else
  1808. path += " -> ";
  1809. path += t->name ();
  1810. }
  1811. candidates += string_printf ("\n '%s %s::%s' (%s)",
  1812. TYPE_SAFE_NAME (field_type),
  1813. TYPE_SAFE_NAME (struct_type),
  1814. name,
  1815. path.c_str ());
  1816. }
  1817. error (_("Request for member '%s' is ambiguous in type '%s'."
  1818. " Candidates are:%s"),
  1819. name, TYPE_SAFE_NAME (type),
  1820. candidates.c_str ());
  1821. }
  1822. }
  1823. else
  1824. return searcher.baseclass ();
  1825. }
  1826. /* Helper function used by value_struct_elt to recurse through
  1827. baseclasses. Look for a field NAME in ARG1. Adjust the address of
  1828. ARG1 by OFFSET bytes, and search in it assuming it has (class) type
  1829. TYPE.
  1830. ARGS is an optional array of argument values used to help finding NAME.
  1831. The contents of ARGS can be adjusted if type coercion is required in
  1832. order to find a matching NAME.
  1833. If found, return value, else if name matched and args not return
  1834. (value) -1, else return NULL. */
  1835. static struct value *
  1836. search_struct_method (const char *name, struct value **arg1p,
  1837. gdb::optional<gdb::array_view<value *>> args,
  1838. LONGEST offset, int *static_memfuncp,
  1839. struct type *type)
  1840. {
  1841. int i;
  1842. struct value *v;
  1843. int name_matched = 0;
  1844. type = check_typedef (type);
  1845. for (i = TYPE_NFN_FIELDS (type) - 1; i >= 0; i--)
  1846. {
  1847. const char *t_field_name = TYPE_FN_FIELDLIST_NAME (type, i);
  1848. if (t_field_name && (strcmp_iw (t_field_name, name) == 0))
  1849. {
  1850. int j = TYPE_FN_FIELDLIST_LENGTH (type, i) - 1;
  1851. struct fn_field *f = TYPE_FN_FIELDLIST1 (type, i);
  1852. name_matched = 1;
  1853. check_stub_method_group (type, i);
  1854. if (j > 0 && !args.has_value ())
  1855. error (_("cannot resolve overloaded method "
  1856. "`%s': no arguments supplied"), name);
  1857. else if (j == 0 && !args.has_value ())
  1858. {
  1859. v = value_fn_field (arg1p, f, j, type, offset);
  1860. if (v != NULL)
  1861. return v;
  1862. }
  1863. else
  1864. while (j >= 0)
  1865. {
  1866. gdb_assert (args.has_value ());
  1867. if (!typecmp (TYPE_FN_FIELD_STATIC_P (f, j),
  1868. TYPE_FN_FIELD_TYPE (f, j)->has_varargs (),
  1869. TYPE_FN_FIELD_TYPE (f, j)->num_fields (),
  1870. TYPE_FN_FIELD_ARGS (f, j), *args))
  1871. {
  1872. if (TYPE_FN_FIELD_VIRTUAL_P (f, j))
  1873. return value_virtual_fn_field (arg1p, f, j,
  1874. type, offset);
  1875. if (TYPE_FN_FIELD_STATIC_P (f, j)
  1876. && static_memfuncp)
  1877. *static_memfuncp = 1;
  1878. v = value_fn_field (arg1p, f, j, type, offset);
  1879. if (v != NULL)
  1880. return v;
  1881. }
  1882. j--;
  1883. }
  1884. }
  1885. }
  1886. for (i = TYPE_N_BASECLASSES (type) - 1; i >= 0; i--)
  1887. {
  1888. LONGEST base_offset;
  1889. LONGEST this_offset;
  1890. if (BASETYPE_VIA_VIRTUAL (type, i))
  1891. {
  1892. struct type *baseclass = check_typedef (TYPE_BASECLASS (type, i));
  1893. struct value *base_val;
  1894. const gdb_byte *base_valaddr;
  1895. /* The virtual base class pointer might have been
  1896. clobbered by the user program. Make sure that it
  1897. still points to a valid memory location. */
  1898. if (offset < 0 || offset >= TYPE_LENGTH (type))
  1899. {
  1900. CORE_ADDR address;
  1901. gdb::byte_vector tmp (TYPE_LENGTH (baseclass));
  1902. address = value_address (*arg1p);
  1903. if (target_read_memory (address + offset,
  1904. tmp.data (), TYPE_LENGTH (baseclass)) != 0)
  1905. error (_("virtual baseclass botch"));
  1906. base_val = value_from_contents_and_address (baseclass,
  1907. tmp.data (),
  1908. address + offset);
  1909. base_valaddr = value_contents_for_printing (base_val).data ();
  1910. this_offset = 0;
  1911. }
  1912. else
  1913. {
  1914. base_val = *arg1p;
  1915. base_valaddr = value_contents_for_printing (*arg1p).data ();
  1916. this_offset = offset;
  1917. }
  1918. base_offset = baseclass_offset (type, i, base_valaddr,
  1919. this_offset, value_address (base_val),
  1920. base_val);
  1921. }
  1922. else
  1923. {
  1924. base_offset = TYPE_BASECLASS_BITPOS (type, i) / 8;
  1925. }
  1926. v = search_struct_method (name, arg1p, args, base_offset + offset,
  1927. static_memfuncp, TYPE_BASECLASS (type, i));
  1928. if (v == (struct value *) - 1)
  1929. {
  1930. name_matched = 1;
  1931. }
  1932. else if (v)
  1933. {
  1934. /* FIXME-bothner: Why is this commented out? Why is it here? */
  1935. /* *arg1p = arg1_tmp; */
  1936. return v;
  1937. }
  1938. }
  1939. if (name_matched)
  1940. return (struct value *) - 1;
  1941. else
  1942. return NULL;
  1943. }
  1944. /* Given *ARGP, a value of type (pointer to a)* structure/union,
  1945. extract the component named NAME from the ultimate target
  1946. structure/union and return it as a value with its appropriate type.
  1947. ERR is used in the error message if *ARGP's type is wrong.
  1948. C++: ARGS is a list of argument types to aid in the selection of
  1949. an appropriate method. Also, handle derived types.
  1950. STATIC_MEMFUNCP, if non-NULL, points to a caller-supplied location
  1951. where the truthvalue of whether the function that was resolved was
  1952. a static member function or not is stored.
  1953. ERR is an error message to be printed in case the field is not
  1954. found. */
  1955. struct value *
  1956. value_struct_elt (struct value **argp,
  1957. gdb::optional<gdb::array_view<value *>> args,
  1958. const char *name, int *static_memfuncp, const char *err)
  1959. {
  1960. struct type *t;
  1961. struct value *v;
  1962. *argp = coerce_array (*argp);
  1963. t = check_typedef (value_type (*argp));
  1964. /* Follow pointers until we get to a non-pointer. */
  1965. while (t->is_pointer_or_reference ())
  1966. {
  1967. *argp = value_ind (*argp);
  1968. /* Don't coerce fn pointer to fn and then back again! */
  1969. if (check_typedef (value_type (*argp))->code () != TYPE_CODE_FUNC)
  1970. *argp = coerce_array (*argp);
  1971. t = check_typedef (value_type (*argp));
  1972. }
  1973. if (t->code () != TYPE_CODE_STRUCT
  1974. && t->code () != TYPE_CODE_UNION)
  1975. error (_("Attempt to extract a component of a value that is not a %s."),
  1976. err);
  1977. /* Assume it's not, unless we see that it is. */
  1978. if (static_memfuncp)
  1979. *static_memfuncp = 0;
  1980. if (!args.has_value ())
  1981. {
  1982. /* if there are no arguments ...do this... */
  1983. /* Try as a field first, because if we succeed, there is less
  1984. work to be done. */
  1985. v = search_struct_field (name, *argp, t, 0);
  1986. if (v)
  1987. return v;
  1988. if (current_language->la_language == language_fortran)
  1989. {
  1990. /* If it is not a field it is the type name of an inherited
  1991. structure. */
  1992. v = search_struct_field (name, *argp, t, 1);
  1993. if (v)
  1994. return v;
  1995. }
  1996. /* C++: If it was not found as a data field, then try to
  1997. return it as a pointer to a method. */
  1998. v = search_struct_method (name, argp, args, 0,
  1999. static_memfuncp, t);
  2000. if (v == (struct value *) - 1)
  2001. error (_("Cannot take address of method %s."), name);
  2002. else if (v == 0)
  2003. {
  2004. if (TYPE_NFN_FIELDS (t))
  2005. error (_("There is no member or method named %s."), name);
  2006. else
  2007. error (_("There is no member named %s."), name);
  2008. }
  2009. return v;
  2010. }
  2011. v = search_struct_method (name, argp, args, 0,
  2012. static_memfuncp, t);
  2013. if (v == (struct value *) - 1)
  2014. {
  2015. error (_("One of the arguments you tried to pass to %s could not "
  2016. "be converted to what the function wants."), name);
  2017. }
  2018. else if (v == 0)
  2019. {
  2020. /* See if user tried to invoke data as function. If so, hand it
  2021. back. If it's not callable (i.e., a pointer to function),
  2022. gdb should give an error. */
  2023. v = search_struct_field (name, *argp, t, 0);
  2024. /* If we found an ordinary field, then it is not a method call.
  2025. So, treat it as if it were a static member function. */
  2026. if (v && static_memfuncp)
  2027. *static_memfuncp = 1;
  2028. }
  2029. if (!v)
  2030. throw_error (NOT_FOUND_ERROR,
  2031. _("Structure has no component named %s."), name);
  2032. return v;
  2033. }
  2034. /* Given *ARGP, a value of type structure or union, or a pointer/reference
  2035. to a structure or union, extract and return its component (field) of
  2036. type FTYPE at the specified BITPOS.
  2037. Throw an exception on error. */
  2038. struct value *
  2039. value_struct_elt_bitpos (struct value **argp, int bitpos, struct type *ftype,
  2040. const char *err)
  2041. {
  2042. struct type *t;
  2043. int i;
  2044. *argp = coerce_array (*argp);
  2045. t = check_typedef (value_type (*argp));
  2046. while (t->is_pointer_or_reference ())
  2047. {
  2048. *argp = value_ind (*argp);
  2049. if (check_typedef (value_type (*argp))->code () != TYPE_CODE_FUNC)
  2050. *argp = coerce_array (*argp);
  2051. t = check_typedef (value_type (*argp));
  2052. }
  2053. if (t->code () != TYPE_CODE_STRUCT
  2054. && t->code () != TYPE_CODE_UNION)
  2055. error (_("Attempt to extract a component of a value that is not a %s."),
  2056. err);
  2057. for (i = TYPE_N_BASECLASSES (t); i < t->num_fields (); i++)
  2058. {
  2059. if (!field_is_static (&t->field (i))
  2060. && bitpos == t->field (i).loc_bitpos ()
  2061. && types_equal (ftype, t->field (i).type ()))
  2062. return value_primitive_field (*argp, 0, i, t);
  2063. }
  2064. error (_("No field with matching bitpos and type."));
  2065. /* Never hit. */
  2066. return NULL;
  2067. }
  2068. /* Search through the methods of an object (and its bases) to find a
  2069. specified method. Return a reference to the fn_field list METHODS of
  2070. overloaded instances defined in the source language. If available
  2071. and matching, a vector of matching xmethods defined in extension
  2072. languages are also returned in XMETHODS.
  2073. Helper function for value_find_oload_list.
  2074. ARGP is a pointer to a pointer to a value (the object).
  2075. METHOD is a string containing the method name.
  2076. OFFSET is the offset within the value.
  2077. TYPE is the assumed type of the object.
  2078. METHODS is a pointer to the matching overloaded instances defined
  2079. in the source language. Since this is a recursive function,
  2080. *METHODS should be set to NULL when calling this function.
  2081. NUM_FNS is the number of overloaded instances. *NUM_FNS should be set to
  2082. 0 when calling this function.
  2083. XMETHODS is the vector of matching xmethod workers. *XMETHODS
  2084. should also be set to NULL when calling this function.
  2085. BASETYPE is set to the actual type of the subobject where the
  2086. method is found.
  2087. BOFFSET is the offset of the base subobject where the method is found. */
  2088. static void
  2089. find_method_list (struct value **argp, const char *method,
  2090. LONGEST offset, struct type *type,
  2091. gdb::array_view<fn_field> *methods,
  2092. std::vector<xmethod_worker_up> *xmethods,
  2093. struct type **basetype, LONGEST *boffset)
  2094. {
  2095. int i;
  2096. struct fn_field *f = NULL;
  2097. gdb_assert (methods != NULL && xmethods != NULL);
  2098. type = check_typedef (type);
  2099. /* First check in object itself.
  2100. This function is called recursively to search through base classes.
  2101. If there is a source method match found at some stage, then we need not
  2102. look for source methods in consequent recursive calls. */
  2103. if (methods->empty ())
  2104. {
  2105. for (i = TYPE_NFN_FIELDS (type) - 1; i >= 0; i--)
  2106. {
  2107. /* pai: FIXME What about operators and type conversions? */
  2108. const char *fn_field_name = TYPE_FN_FIELDLIST_NAME (type, i);
  2109. if (fn_field_name && (strcmp_iw (fn_field_name, method) == 0))
  2110. {
  2111. int len = TYPE_FN_FIELDLIST_LENGTH (type, i);
  2112. f = TYPE_FN_FIELDLIST1 (type, i);
  2113. *methods = gdb::make_array_view (f, len);
  2114. *basetype = type;
  2115. *boffset = offset;
  2116. /* Resolve any stub methods. */
  2117. check_stub_method_group (type, i);
  2118. break;
  2119. }
  2120. }
  2121. }
  2122. /* Unlike source methods, xmethods can be accumulated over successive
  2123. recursive calls. In other words, an xmethod named 'm' in a class
  2124. will not hide an xmethod named 'm' in its base class(es). We want
  2125. it to be this way because xmethods are after all convenience functions
  2126. and hence there is no point restricting them with something like method
  2127. hiding. Moreover, if hiding is done for xmethods as well, then we will
  2128. have to provide a mechanism to un-hide (like the 'using' construct). */
  2129. get_matching_xmethod_workers (type, method, xmethods);
  2130. /* If source methods are not found in current class, look for them in the
  2131. base classes. We also have to go through the base classes to gather
  2132. extension methods. */
  2133. for (i = TYPE_N_BASECLASSES (type) - 1; i >= 0; i--)
  2134. {
  2135. LONGEST base_offset;
  2136. if (BASETYPE_VIA_VIRTUAL (type, i))
  2137. {
  2138. base_offset = baseclass_offset (type, i,
  2139. value_contents_for_printing (*argp).data (),
  2140. value_offset (*argp) + offset,
  2141. value_address (*argp), *argp);
  2142. }
  2143. else /* Non-virtual base, simply use bit position from debug
  2144. info. */
  2145. {
  2146. base_offset = TYPE_BASECLASS_BITPOS (type, i) / 8;
  2147. }
  2148. find_method_list (argp, method, base_offset + offset,
  2149. TYPE_BASECLASS (type, i), methods,
  2150. xmethods, basetype, boffset);
  2151. }
  2152. }
  2153. /* Return the list of overloaded methods of a specified name. The methods
  2154. could be those GDB finds in the binary, or xmethod. Methods found in
  2155. the binary are returned in METHODS, and xmethods are returned in
  2156. XMETHODS.
  2157. ARGP is a pointer to a pointer to a value (the object).
  2158. METHOD is the method name.
  2159. OFFSET is the offset within the value contents.
  2160. METHODS is the list of matching overloaded instances defined in
  2161. the source language.
  2162. XMETHODS is the vector of matching xmethod workers defined in
  2163. extension languages.
  2164. BASETYPE is set to the type of the base subobject that defines the
  2165. method.
  2166. BOFFSET is the offset of the base subobject which defines the method. */
  2167. static void
  2168. value_find_oload_method_list (struct value **argp, const char *method,
  2169. LONGEST offset,
  2170. gdb::array_view<fn_field> *methods,
  2171. std::vector<xmethod_worker_up> *xmethods,
  2172. struct type **basetype, LONGEST *boffset)
  2173. {
  2174. struct type *t;
  2175. t = check_typedef (value_type (*argp));
  2176. /* Code snarfed from value_struct_elt. */
  2177. while (t->is_pointer_or_reference ())
  2178. {
  2179. *argp = value_ind (*argp);
  2180. /* Don't coerce fn pointer to fn and then back again! */
  2181. if (check_typedef (value_type (*argp))->code () != TYPE_CODE_FUNC)
  2182. *argp = coerce_array (*argp);
  2183. t = check_typedef (value_type (*argp));
  2184. }
  2185. if (t->code () != TYPE_CODE_STRUCT
  2186. && t->code () != TYPE_CODE_UNION)
  2187. error (_("Attempt to extract a component of a "
  2188. "value that is not a struct or union"));
  2189. gdb_assert (methods != NULL && xmethods != NULL);
  2190. /* Clear the lists. */
  2191. *methods = {};
  2192. xmethods->clear ();
  2193. find_method_list (argp, method, 0, t, methods, xmethods,
  2194. basetype, boffset);
  2195. }
  2196. /* Given an array of arguments (ARGS) (which includes an entry for
  2197. "this" in the case of C++ methods), the NAME of a function, and
  2198. whether it's a method or not (METHOD), find the best function that
  2199. matches on the argument types according to the overload resolution
  2200. rules.
  2201. METHOD can be one of three values:
  2202. NON_METHOD for non-member functions.
  2203. METHOD: for member functions.
  2204. BOTH: used for overload resolution of operators where the
  2205. candidates are expected to be either member or non member
  2206. functions. In this case the first argument ARGTYPES
  2207. (representing 'this') is expected to be a reference to the
  2208. target object, and will be dereferenced when attempting the
  2209. non-member search.
  2210. In the case of class methods, the parameter OBJ is an object value
  2211. in which to search for overloaded methods.
  2212. In the case of non-method functions, the parameter FSYM is a symbol
  2213. corresponding to one of the overloaded functions.
  2214. Return value is an integer: 0 -> good match, 10 -> debugger applied
  2215. non-standard coercions, 100 -> incompatible.
  2216. If a method is being searched for, VALP will hold the value.
  2217. If a non-method is being searched for, SYMP will hold the symbol
  2218. for it.
  2219. If a method is being searched for, and it is a static method,
  2220. then STATICP will point to a non-zero value.
  2221. If NO_ADL argument dependent lookup is disabled. This is used to prevent
  2222. ADL overload candidates when performing overload resolution for a fully
  2223. qualified name.
  2224. If NOSIDE is EVAL_AVOID_SIDE_EFFECTS, then OBJP's memory cannot be
  2225. read while picking the best overload match (it may be all zeroes and thus
  2226. not have a vtable pointer), in which case skip virtual function lookup.
  2227. This is ok as typically EVAL_AVOID_SIDE_EFFECTS is only used to determine
  2228. the result type.
  2229. Note: This function does *not* check the value of
  2230. overload_resolution. Caller must check it to see whether overload
  2231. resolution is permitted. */
  2232. int
  2233. find_overload_match (gdb::array_view<value *> args,
  2234. const char *name, enum oload_search_type method,
  2235. struct value **objp, struct symbol *fsym,
  2236. struct value **valp, struct symbol **symp,
  2237. int *staticp, const int no_adl,
  2238. const enum noside noside)
  2239. {
  2240. struct value *obj = (objp ? *objp : NULL);
  2241. struct type *obj_type = obj ? value_type (obj) : NULL;
  2242. /* Index of best overloaded function. */
  2243. int func_oload_champ = -1;
  2244. int method_oload_champ = -1;
  2245. int src_method_oload_champ = -1;
  2246. int ext_method_oload_champ = -1;
  2247. /* The measure for the current best match. */
  2248. badness_vector method_badness;
  2249. badness_vector func_badness;
  2250. badness_vector ext_method_badness;
  2251. badness_vector src_method_badness;
  2252. struct value *temp = obj;
  2253. /* For methods, the list of overloaded methods. */
  2254. gdb::array_view<fn_field> methods;
  2255. /* For non-methods, the list of overloaded function symbols. */
  2256. std::vector<symbol *> functions;
  2257. /* For xmethods, the vector of xmethod workers. */
  2258. std::vector<xmethod_worker_up> xmethods;
  2259. struct type *basetype = NULL;
  2260. LONGEST boffset;
  2261. const char *obj_type_name = NULL;
  2262. const char *func_name = NULL;
  2263. gdb::unique_xmalloc_ptr<char> temp_func;
  2264. enum oload_classification match_quality;
  2265. enum oload_classification method_match_quality = INCOMPATIBLE;
  2266. enum oload_classification src_method_match_quality = INCOMPATIBLE;
  2267. enum oload_classification ext_method_match_quality = INCOMPATIBLE;
  2268. enum oload_classification func_match_quality = INCOMPATIBLE;
  2269. /* Get the list of overloaded methods or functions. */
  2270. if (method == METHOD || method == BOTH)
  2271. {
  2272. gdb_assert (obj);
  2273. /* OBJ may be a pointer value rather than the object itself. */
  2274. obj = coerce_ref (obj);
  2275. while (check_typedef (value_type (obj))->code () == TYPE_CODE_PTR)
  2276. obj = coerce_ref (value_ind (obj));
  2277. obj_type_name = value_type (obj)->name ();
  2278. /* First check whether this is a data member, e.g. a pointer to
  2279. a function. */
  2280. if (check_typedef (value_type (obj))->code () == TYPE_CODE_STRUCT)
  2281. {
  2282. *valp = search_struct_field (name, obj,
  2283. check_typedef (value_type (obj)), 0);
  2284. if (*valp)
  2285. {
  2286. *staticp = 1;
  2287. return 0;
  2288. }
  2289. }
  2290. /* Retrieve the list of methods with the name NAME. */
  2291. value_find_oload_method_list (&temp, name, 0, &methods,
  2292. &xmethods, &basetype, &boffset);
  2293. /* If this is a method only search, and no methods were found
  2294. the search has failed. */
  2295. if (method == METHOD && methods.empty () && xmethods.empty ())
  2296. error (_("Couldn't find method %s%s%s"),
  2297. obj_type_name,
  2298. (obj_type_name && *obj_type_name) ? "::" : "",
  2299. name);
  2300. /* If we are dealing with stub method types, they should have
  2301. been resolved by find_method_list via
  2302. value_find_oload_method_list above. */
  2303. if (!methods.empty ())
  2304. {
  2305. gdb_assert (TYPE_SELF_TYPE (methods[0].type) != NULL);
  2306. src_method_oload_champ
  2307. = find_oload_champ (args,
  2308. methods.size (),
  2309. methods.data (), NULL, NULL,
  2310. &src_method_badness);
  2311. src_method_match_quality = classify_oload_match
  2312. (src_method_badness, args.size (),
  2313. oload_method_static_p (methods.data (), src_method_oload_champ));
  2314. }
  2315. if (!xmethods.empty ())
  2316. {
  2317. ext_method_oload_champ
  2318. = find_oload_champ (args,
  2319. xmethods.size (),
  2320. NULL, xmethods.data (), NULL,
  2321. &ext_method_badness);
  2322. ext_method_match_quality = classify_oload_match (ext_method_badness,
  2323. args.size (), 0);
  2324. }
  2325. if (src_method_oload_champ >= 0 && ext_method_oload_champ >= 0)
  2326. {
  2327. switch (compare_badness (ext_method_badness, src_method_badness))
  2328. {
  2329. case 0: /* Src method and xmethod are equally good. */
  2330. /* If src method and xmethod are equally good, then
  2331. xmethod should be the winner. Hence, fall through to the
  2332. case where a xmethod is better than the source
  2333. method, except when the xmethod match quality is
  2334. non-standard. */
  2335. /* FALLTHROUGH */
  2336. case 1: /* Src method and ext method are incompatible. */
  2337. /* If ext method match is not standard, then let source method
  2338. win. Otherwise, fallthrough to let xmethod win. */
  2339. if (ext_method_match_quality != STANDARD)
  2340. {
  2341. method_oload_champ = src_method_oload_champ;
  2342. method_badness = src_method_badness;
  2343. ext_method_oload_champ = -1;
  2344. method_match_quality = src_method_match_quality;
  2345. break;
  2346. }
  2347. /* FALLTHROUGH */
  2348. case 2: /* Ext method is champion. */
  2349. method_oload_champ = ext_method_oload_champ;
  2350. method_badness = ext_method_badness;
  2351. src_method_oload_champ = -1;
  2352. method_match_quality = ext_method_match_quality;
  2353. break;
  2354. case 3: /* Src method is champion. */
  2355. method_oload_champ = src_method_oload_champ;
  2356. method_badness = src_method_badness;
  2357. ext_method_oload_champ = -1;
  2358. method_match_quality = src_method_match_quality;
  2359. break;
  2360. default:
  2361. gdb_assert_not_reached ("Unexpected overload comparison "
  2362. "result");
  2363. break;
  2364. }
  2365. }
  2366. else if (src_method_oload_champ >= 0)
  2367. {
  2368. method_oload_champ = src_method_oload_champ;
  2369. method_badness = src_method_badness;
  2370. method_match_quality = src_method_match_quality;
  2371. }
  2372. else if (ext_method_oload_champ >= 0)
  2373. {
  2374. method_oload_champ = ext_method_oload_champ;
  2375. method_badness = ext_method_badness;
  2376. method_match_quality = ext_method_match_quality;
  2377. }
  2378. }
  2379. if (method == NON_METHOD || method == BOTH)
  2380. {
  2381. const char *qualified_name = NULL;
  2382. /* If the overload match is being search for both as a method
  2383. and non member function, the first argument must now be
  2384. dereferenced. */
  2385. if (method == BOTH)
  2386. args[0] = value_ind (args[0]);
  2387. if (fsym)
  2388. {
  2389. qualified_name = fsym->natural_name ();
  2390. /* If we have a function with a C++ name, try to extract just
  2391. the function part. Do not try this for non-functions (e.g.
  2392. function pointers). */
  2393. if (qualified_name
  2394. && (check_typedef (fsym->type ())->code ()
  2395. == TYPE_CODE_FUNC))
  2396. {
  2397. temp_func = cp_func_name (qualified_name);
  2398. /* If cp_func_name did not remove anything, the name of the
  2399. symbol did not include scope or argument types - it was
  2400. probably a C-style function. */
  2401. if (temp_func != nullptr)
  2402. {
  2403. if (strcmp (temp_func.get (), qualified_name) == 0)
  2404. func_name = NULL;
  2405. else
  2406. func_name = temp_func.get ();
  2407. }
  2408. }
  2409. }
  2410. else
  2411. {
  2412. func_name = name;
  2413. qualified_name = name;
  2414. }
  2415. /* If there was no C++ name, this must be a C-style function or
  2416. not a function at all. Just return the same symbol. Do the
  2417. same if cp_func_name fails for some reason. */
  2418. if (func_name == NULL)
  2419. {
  2420. *symp = fsym;
  2421. return 0;
  2422. }
  2423. func_oload_champ = find_oload_champ_namespace (args,
  2424. func_name,
  2425. qualified_name,
  2426. &functions,
  2427. &func_badness,
  2428. no_adl);
  2429. if (func_oload_champ >= 0)
  2430. func_match_quality = classify_oload_match (func_badness,
  2431. args.size (), 0);
  2432. }
  2433. /* Did we find a match ? */
  2434. if (method_oload_champ == -1 && func_oload_champ == -1)
  2435. throw_error (NOT_FOUND_ERROR,
  2436. _("No symbol \"%s\" in current context."),
  2437. name);
  2438. /* If we have found both a method match and a function
  2439. match, find out which one is better, and calculate match
  2440. quality. */
  2441. if (method_oload_champ >= 0 && func_oload_champ >= 0)
  2442. {
  2443. switch (compare_badness (func_badness, method_badness))
  2444. {
  2445. case 0: /* Top two contenders are equally good. */
  2446. /* FIXME: GDB does not support the general ambiguous case.
  2447. All candidates should be collected and presented the
  2448. user. */
  2449. error (_("Ambiguous overload resolution"));
  2450. break;
  2451. case 1: /* Incomparable top contenders. */
  2452. /* This is an error incompatible candidates
  2453. should not have been proposed. */
  2454. error (_("Internal error: incompatible "
  2455. "overload candidates proposed"));
  2456. break;
  2457. case 2: /* Function champion. */
  2458. method_oload_champ = -1;
  2459. match_quality = func_match_quality;
  2460. break;
  2461. case 3: /* Method champion. */
  2462. func_oload_champ = -1;
  2463. match_quality = method_match_quality;
  2464. break;
  2465. default:
  2466. error (_("Internal error: unexpected overload comparison result"));
  2467. break;
  2468. }
  2469. }
  2470. else
  2471. {
  2472. /* We have either a method match or a function match. */
  2473. if (method_oload_champ >= 0)
  2474. match_quality = method_match_quality;
  2475. else
  2476. match_quality = func_match_quality;
  2477. }
  2478. if (match_quality == INCOMPATIBLE)
  2479. {
  2480. if (method == METHOD)
  2481. error (_("Cannot resolve method %s%s%s to any overloaded instance"),
  2482. obj_type_name,
  2483. (obj_type_name && *obj_type_name) ? "::" : "",
  2484. name);
  2485. else
  2486. error (_("Cannot resolve function %s to any overloaded instance"),
  2487. func_name);
  2488. }
  2489. else if (match_quality == NON_STANDARD)
  2490. {
  2491. if (method == METHOD)
  2492. warning (_("Using non-standard conversion to match "
  2493. "method %s%s%s to supplied arguments"),
  2494. obj_type_name,
  2495. (obj_type_name && *obj_type_name) ? "::" : "",
  2496. name);
  2497. else
  2498. warning (_("Using non-standard conversion to match "
  2499. "function %s to supplied arguments"),
  2500. func_name);
  2501. }
  2502. if (staticp != NULL)
  2503. *staticp = oload_method_static_p (methods.data (), method_oload_champ);
  2504. if (method_oload_champ >= 0)
  2505. {
  2506. if (src_method_oload_champ >= 0)
  2507. {
  2508. if (TYPE_FN_FIELD_VIRTUAL_P (methods, method_oload_champ)
  2509. && noside != EVAL_AVOID_SIDE_EFFECTS)
  2510. {
  2511. *valp = value_virtual_fn_field (&temp, methods.data (),
  2512. method_oload_champ, basetype,
  2513. boffset);
  2514. }
  2515. else
  2516. *valp = value_fn_field (&temp, methods.data (),
  2517. method_oload_champ, basetype, boffset);
  2518. }
  2519. else
  2520. *valp = value_from_xmethod
  2521. (std::move (xmethods[ext_method_oload_champ]));
  2522. }
  2523. else
  2524. *symp = functions[func_oload_champ];
  2525. if (objp)
  2526. {
  2527. struct type *temp_type = check_typedef (value_type (temp));
  2528. struct type *objtype = check_typedef (obj_type);
  2529. if (temp_type->code () != TYPE_CODE_PTR
  2530. && objtype->is_pointer_or_reference ())
  2531. {
  2532. temp = value_addr (temp);
  2533. }
  2534. *objp = temp;
  2535. }
  2536. switch (match_quality)
  2537. {
  2538. case INCOMPATIBLE:
  2539. return 100;
  2540. case NON_STANDARD:
  2541. return 10;
  2542. default: /* STANDARD */
  2543. return 0;
  2544. }
  2545. }
  2546. /* Find the best overload match, searching for FUNC_NAME in namespaces
  2547. contained in QUALIFIED_NAME until it either finds a good match or
  2548. runs out of namespaces. It stores the overloaded functions in
  2549. *OLOAD_SYMS, and the badness vector in *OLOAD_CHAMP_BV. If NO_ADL,
  2550. argument dependent lookup is not performed. */
  2551. static int
  2552. find_oload_champ_namespace (gdb::array_view<value *> args,
  2553. const char *func_name,
  2554. const char *qualified_name,
  2555. std::vector<symbol *> *oload_syms,
  2556. badness_vector *oload_champ_bv,
  2557. const int no_adl)
  2558. {
  2559. int oload_champ;
  2560. find_oload_champ_namespace_loop (args,
  2561. func_name,
  2562. qualified_name, 0,
  2563. oload_syms, oload_champ_bv,
  2564. &oload_champ,
  2565. no_adl);
  2566. return oload_champ;
  2567. }
  2568. /* Helper function for find_oload_champ_namespace; NAMESPACE_LEN is
  2569. how deep we've looked for namespaces, and the champ is stored in
  2570. OLOAD_CHAMP. The return value is 1 if the champ is a good one, 0
  2571. if it isn't. Other arguments are the same as in
  2572. find_oload_champ_namespace. */
  2573. static int
  2574. find_oload_champ_namespace_loop (gdb::array_view<value *> args,
  2575. const char *func_name,
  2576. const char *qualified_name,
  2577. int namespace_len,
  2578. std::vector<symbol *> *oload_syms,
  2579. badness_vector *oload_champ_bv,
  2580. int *oload_champ,
  2581. const int no_adl)
  2582. {
  2583. int next_namespace_len = namespace_len;
  2584. int searched_deeper = 0;
  2585. int new_oload_champ;
  2586. char *new_namespace;
  2587. if (next_namespace_len != 0)
  2588. {
  2589. gdb_assert (qualified_name[next_namespace_len] == ':');
  2590. next_namespace_len += 2;
  2591. }
  2592. next_namespace_len +=
  2593. cp_find_first_component (qualified_name + next_namespace_len);
  2594. /* First, see if we have a deeper namespace we can search in.
  2595. If we get a good match there, use it. */
  2596. if (qualified_name[next_namespace_len] == ':')
  2597. {
  2598. searched_deeper = 1;
  2599. if (find_oload_champ_namespace_loop (args,
  2600. func_name, qualified_name,
  2601. next_namespace_len,
  2602. oload_syms, oload_champ_bv,
  2603. oload_champ, no_adl))
  2604. {
  2605. return 1;
  2606. }
  2607. };
  2608. /* If we reach here, either we're in the deepest namespace or we
  2609. didn't find a good match in a deeper namespace. But, in the
  2610. latter case, we still have a bad match in a deeper namespace;
  2611. note that we might not find any match at all in the current
  2612. namespace. (There's always a match in the deepest namespace,
  2613. because this overload mechanism only gets called if there's a
  2614. function symbol to start off with.) */
  2615. new_namespace = (char *) alloca (namespace_len + 1);
  2616. strncpy (new_namespace, qualified_name, namespace_len);
  2617. new_namespace[namespace_len] = '\0';
  2618. std::vector<symbol *> new_oload_syms
  2619. = make_symbol_overload_list (func_name, new_namespace);
  2620. /* If we have reached the deepest level perform argument
  2621. determined lookup. */
  2622. if (!searched_deeper && !no_adl)
  2623. {
  2624. int ix;
  2625. struct type **arg_types;
  2626. /* Prepare list of argument types for overload resolution. */
  2627. arg_types = (struct type **)
  2628. alloca (args.size () * (sizeof (struct type *)));
  2629. for (ix = 0; ix < args.size (); ix++)
  2630. arg_types[ix] = value_type (args[ix]);
  2631. add_symbol_overload_list_adl ({arg_types, args.size ()}, func_name,
  2632. &new_oload_syms);
  2633. }
  2634. badness_vector new_oload_champ_bv;
  2635. new_oload_champ = find_oload_champ (args,
  2636. new_oload_syms.size (),
  2637. NULL, NULL, new_oload_syms.data (),
  2638. &new_oload_champ_bv);
  2639. /* Case 1: We found a good match. Free earlier matches (if any),
  2640. and return it. Case 2: We didn't find a good match, but we're
  2641. not the deepest function. Then go with the bad match that the
  2642. deeper function found. Case 3: We found a bad match, and we're
  2643. the deepest function. Then return what we found, even though
  2644. it's a bad match. */
  2645. if (new_oload_champ != -1
  2646. && classify_oload_match (new_oload_champ_bv, args.size (), 0) == STANDARD)
  2647. {
  2648. *oload_syms = std::move (new_oload_syms);
  2649. *oload_champ = new_oload_champ;
  2650. *oload_champ_bv = std::move (new_oload_champ_bv);
  2651. return 1;
  2652. }
  2653. else if (searched_deeper)
  2654. {
  2655. return 0;
  2656. }
  2657. else
  2658. {
  2659. *oload_syms = std::move (new_oload_syms);
  2660. *oload_champ = new_oload_champ;
  2661. *oload_champ_bv = std::move (new_oload_champ_bv);
  2662. return 0;
  2663. }
  2664. }
  2665. /* Look for a function to take ARGS. Find the best match from among
  2666. the overloaded methods or functions given by METHODS or FUNCTIONS
  2667. or XMETHODS, respectively. One, and only one of METHODS, FUNCTIONS
  2668. and XMETHODS can be non-NULL.
  2669. NUM_FNS is the length of the array pointed at by METHODS, FUNCTIONS
  2670. or XMETHODS, whichever is non-NULL.
  2671. Return the index of the best match; store an indication of the
  2672. quality of the match in OLOAD_CHAMP_BV. */
  2673. static int
  2674. find_oload_champ (gdb::array_view<value *> args,
  2675. size_t num_fns,
  2676. fn_field *methods,
  2677. xmethod_worker_up *xmethods,
  2678. symbol **functions,
  2679. badness_vector *oload_champ_bv)
  2680. {
  2681. /* A measure of how good an overloaded instance is. */
  2682. badness_vector bv;
  2683. /* Index of best overloaded function. */
  2684. int oload_champ = -1;
  2685. /* Current ambiguity state for overload resolution. */
  2686. int oload_ambiguous = 0;
  2687. /* 0 => no ambiguity, 1 => two good funcs, 2 => incomparable funcs. */
  2688. /* A champion can be found among methods alone, or among functions
  2689. alone, or in xmethods alone, but not in more than one of these
  2690. groups. */
  2691. gdb_assert ((methods != NULL) + (functions != NULL) + (xmethods != NULL)
  2692. == 1);
  2693. /* Consider each candidate in turn. */
  2694. for (size_t ix = 0; ix < num_fns; ix++)
  2695. {
  2696. int jj;
  2697. int static_offset = 0;
  2698. std::vector<type *> parm_types;
  2699. if (xmethods != NULL)
  2700. parm_types = xmethods[ix]->get_arg_types ();
  2701. else
  2702. {
  2703. size_t nparms;
  2704. if (methods != NULL)
  2705. {
  2706. nparms = TYPE_FN_FIELD_TYPE (methods, ix)->num_fields ();
  2707. static_offset = oload_method_static_p (methods, ix);
  2708. }
  2709. else
  2710. nparms = functions[ix]->type ()->num_fields ();
  2711. parm_types.reserve (nparms);
  2712. for (jj = 0; jj < nparms; jj++)
  2713. {
  2714. type *t = (methods != NULL
  2715. ? (TYPE_FN_FIELD_ARGS (methods, ix)[jj].type ())
  2716. : functions[ix]->type ()->field (jj).type ());
  2717. parm_types.push_back (t);
  2718. }
  2719. }
  2720. /* Compare parameter types to supplied argument types. Skip
  2721. THIS for static methods. */
  2722. bv = rank_function (parm_types,
  2723. args.slice (static_offset));
  2724. if (overload_debug)
  2725. {
  2726. if (methods != NULL)
  2727. gdb_printf (gdb_stderr,
  2728. "Overloaded method instance %s, # of parms %d\n",
  2729. methods[ix].physname, (int) parm_types.size ());
  2730. else if (xmethods != NULL)
  2731. gdb_printf (gdb_stderr,
  2732. "Xmethod worker, # of parms %d\n",
  2733. (int) parm_types.size ());
  2734. else
  2735. gdb_printf (gdb_stderr,
  2736. "Overloaded function instance "
  2737. "%s # of parms %d\n",
  2738. functions[ix]->demangled_name (),
  2739. (int) parm_types.size ());
  2740. gdb_printf (gdb_stderr,
  2741. "...Badness of length : {%d, %d}\n",
  2742. bv[0].rank, bv[0].subrank);
  2743. for (jj = 1; jj < bv.size (); jj++)
  2744. gdb_printf (gdb_stderr,
  2745. "...Badness of arg %d : {%d, %d}\n",
  2746. jj, bv[jj].rank, bv[jj].subrank);
  2747. }
  2748. if (oload_champ_bv->empty ())
  2749. {
  2750. *oload_champ_bv = std::move (bv);
  2751. oload_champ = 0;
  2752. }
  2753. else /* See whether current candidate is better or worse than
  2754. previous best. */
  2755. switch (compare_badness (bv, *oload_champ_bv))
  2756. {
  2757. case 0: /* Top two contenders are equally good. */
  2758. oload_ambiguous = 1;
  2759. break;
  2760. case 1: /* Incomparable top contenders. */
  2761. oload_ambiguous = 2;
  2762. break;
  2763. case 2: /* New champion, record details. */
  2764. *oload_champ_bv = std::move (bv);
  2765. oload_ambiguous = 0;
  2766. oload_champ = ix;
  2767. break;
  2768. case 3:
  2769. default:
  2770. break;
  2771. }
  2772. if (overload_debug)
  2773. gdb_printf (gdb_stderr, "Overload resolution "
  2774. "champion is %d, ambiguous? %d\n",
  2775. oload_champ, oload_ambiguous);
  2776. }
  2777. return oload_champ;
  2778. }
  2779. /* Return 1 if we're looking at a static method, 0 if we're looking at
  2780. a non-static method or a function that isn't a method. */
  2781. static int
  2782. oload_method_static_p (struct fn_field *fns_ptr, int index)
  2783. {
  2784. if (fns_ptr && index >= 0 && TYPE_FN_FIELD_STATIC_P (fns_ptr, index))
  2785. return 1;
  2786. else
  2787. return 0;
  2788. }
  2789. /* Check how good an overload match OLOAD_CHAMP_BV represents. */
  2790. static enum oload_classification
  2791. classify_oload_match (const badness_vector &oload_champ_bv,
  2792. int nargs,
  2793. int static_offset)
  2794. {
  2795. int ix;
  2796. enum oload_classification worst = STANDARD;
  2797. for (ix = 1; ix <= nargs - static_offset; ix++)
  2798. {
  2799. /* If this conversion is as bad as INCOMPATIBLE_TYPE_BADNESS
  2800. or worse return INCOMPATIBLE. */
  2801. if (compare_ranks (oload_champ_bv[ix],
  2802. INCOMPATIBLE_TYPE_BADNESS) <= 0)
  2803. return INCOMPATIBLE; /* Truly mismatched types. */
  2804. /* Otherwise If this conversion is as bad as
  2805. NS_POINTER_CONVERSION_BADNESS or worse return NON_STANDARD. */
  2806. else if (compare_ranks (oload_champ_bv[ix],
  2807. NS_POINTER_CONVERSION_BADNESS) <= 0)
  2808. worst = NON_STANDARD; /* Non-standard type conversions
  2809. needed. */
  2810. }
  2811. /* If no INCOMPATIBLE classification was found, return the worst one
  2812. that was found (if any). */
  2813. return worst;
  2814. }
  2815. /* C++: return 1 is NAME is a legitimate name for the destructor of
  2816. type TYPE. If TYPE does not have a destructor, or if NAME is
  2817. inappropriate for TYPE, an error is signaled. Parameter TYPE should not yet
  2818. have CHECK_TYPEDEF applied, this function will apply it itself. */
  2819. int
  2820. destructor_name_p (const char *name, struct type *type)
  2821. {
  2822. if (name[0] == '~')
  2823. {
  2824. const char *dname = type_name_or_error (type);
  2825. const char *cp = strchr (dname, '<');
  2826. unsigned int len;
  2827. /* Do not compare the template part for template classes. */
  2828. if (cp == NULL)
  2829. len = strlen (dname);
  2830. else
  2831. len = cp - dname;
  2832. if (strlen (name + 1) != len || strncmp (dname, name + 1, len) != 0)
  2833. error (_("name of destructor must equal name of class"));
  2834. else
  2835. return 1;
  2836. }
  2837. return 0;
  2838. }
  2839. /* Find an enum constant named NAME in TYPE. TYPE must be an "enum
  2840. class". If the name is found, return a value representing it;
  2841. otherwise throw an exception. */
  2842. static struct value *
  2843. enum_constant_from_type (struct type *type, const char *name)
  2844. {
  2845. int i;
  2846. int name_len = strlen (name);
  2847. gdb_assert (type->code () == TYPE_CODE_ENUM
  2848. && type->is_declared_class ());
  2849. for (i = TYPE_N_BASECLASSES (type); i < type->num_fields (); ++i)
  2850. {
  2851. const char *fname = type->field (i).name ();
  2852. int len;
  2853. if (type->field (i).loc_kind () != FIELD_LOC_KIND_ENUMVAL
  2854. || fname == NULL)
  2855. continue;
  2856. /* Look for the trailing "::NAME", since enum class constant
  2857. names are qualified here. */
  2858. len = strlen (fname);
  2859. if (len + 2 >= name_len
  2860. && fname[len - name_len - 2] == ':'
  2861. && fname[len - name_len - 1] == ':'
  2862. && strcmp (&fname[len - name_len], name) == 0)
  2863. return value_from_longest (type, type->field (i).loc_enumval ());
  2864. }
  2865. error (_("no constant named \"%s\" in enum \"%s\""),
  2866. name, type->name ());
  2867. }
  2868. /* C++: Given an aggregate type CURTYPE, and a member name NAME,
  2869. return the appropriate member (or the address of the member, if
  2870. WANT_ADDRESS). This function is used to resolve user expressions
  2871. of the form "DOMAIN::NAME". For more details on what happens, see
  2872. the comment before value_struct_elt_for_reference. */
  2873. struct value *
  2874. value_aggregate_elt (struct type *curtype, const char *name,
  2875. struct type *expect_type, int want_address,
  2876. enum noside noside)
  2877. {
  2878. switch (curtype->code ())
  2879. {
  2880. case TYPE_CODE_STRUCT:
  2881. case TYPE_CODE_UNION:
  2882. return value_struct_elt_for_reference (curtype, 0, curtype,
  2883. name, expect_type,
  2884. want_address, noside);
  2885. case TYPE_CODE_NAMESPACE:
  2886. return value_namespace_elt (curtype, name,
  2887. want_address, noside);
  2888. case TYPE_CODE_ENUM:
  2889. return enum_constant_from_type (curtype, name);
  2890. default:
  2891. internal_error (__FILE__, __LINE__,
  2892. _("non-aggregate type in value_aggregate_elt"));
  2893. }
  2894. }
  2895. /* Compares the two method/function types T1 and T2 for "equality"
  2896. with respect to the methods' parameters. If the types of the
  2897. two parameter lists are the same, returns 1; 0 otherwise. This
  2898. comparison may ignore any artificial parameters in T1 if
  2899. SKIP_ARTIFICIAL is non-zero. This function will ALWAYS skip
  2900. the first artificial parameter in T1, assumed to be a 'this' pointer.
  2901. The type T2 is expected to have come from make_params (in eval.c). */
  2902. static int
  2903. compare_parameters (struct type *t1, struct type *t2, int skip_artificial)
  2904. {
  2905. int start = 0;
  2906. if (t1->num_fields () > 0 && TYPE_FIELD_ARTIFICIAL (t1, 0))
  2907. ++start;
  2908. /* If skipping artificial fields, find the first real field
  2909. in T1. */
  2910. if (skip_artificial)
  2911. {
  2912. while (start < t1->num_fields ()
  2913. && TYPE_FIELD_ARTIFICIAL (t1, start))
  2914. ++start;
  2915. }
  2916. /* Now compare parameters. */
  2917. /* Special case: a method taking void. T1 will contain no
  2918. non-artificial fields, and T2 will contain TYPE_CODE_VOID. */
  2919. if ((t1->num_fields () - start) == 0 && t2->num_fields () == 1
  2920. && t2->field (0).type ()->code () == TYPE_CODE_VOID)
  2921. return 1;
  2922. if ((t1->num_fields () - start) == t2->num_fields ())
  2923. {
  2924. int i;
  2925. for (i = 0; i < t2->num_fields (); ++i)
  2926. {
  2927. if (compare_ranks (rank_one_type (t1->field (start + i).type (),
  2928. t2->field (i).type (), NULL),
  2929. EXACT_MATCH_BADNESS) != 0)
  2930. return 0;
  2931. }
  2932. return 1;
  2933. }
  2934. return 0;
  2935. }
  2936. /* C++: Given an aggregate type VT, and a class type CLS, search
  2937. recursively for CLS using value V; If found, store the offset
  2938. which is either fetched from the virtual base pointer if CLS
  2939. is virtual or accumulated offset of its parent classes if
  2940. CLS is non-virtual in *BOFFS, set ISVIRT to indicate if CLS
  2941. is virtual, and return true. If not found, return false. */
  2942. static bool
  2943. get_baseclass_offset (struct type *vt, struct type *cls,
  2944. struct value *v, int *boffs, bool *isvirt)
  2945. {
  2946. for (int i = 0; i < TYPE_N_BASECLASSES (vt); i++)
  2947. {
  2948. struct type *t = vt->field (i).type ();
  2949. if (types_equal (t, cls))
  2950. {
  2951. if (BASETYPE_VIA_VIRTUAL (vt, i))
  2952. {
  2953. const gdb_byte *adr = value_contents_for_printing (v).data ();
  2954. *boffs = baseclass_offset (vt, i, adr, value_offset (v),
  2955. value_as_long (v), v);
  2956. *isvirt = true;
  2957. }
  2958. else
  2959. *isvirt = false;
  2960. return true;
  2961. }
  2962. if (get_baseclass_offset (check_typedef (t), cls, v, boffs, isvirt))
  2963. {
  2964. if (*isvirt == false) /* Add non-virtual base offset. */
  2965. {
  2966. const gdb_byte *adr = value_contents_for_printing (v).data ();
  2967. *boffs += baseclass_offset (vt, i, adr, value_offset (v),
  2968. value_as_long (v), v);
  2969. }
  2970. return true;
  2971. }
  2972. }
  2973. return false;
  2974. }
  2975. /* C++: Given an aggregate type CURTYPE, and a member name NAME,
  2976. return the address of this member as a "pointer to member" type.
  2977. If INTYPE is non-null, then it will be the type of the member we
  2978. are looking for. This will help us resolve "pointers to member
  2979. functions". This function is used to resolve user expressions of
  2980. the form "DOMAIN::NAME". */
  2981. static struct value *
  2982. value_struct_elt_for_reference (struct type *domain, int offset,
  2983. struct type *curtype, const char *name,
  2984. struct type *intype,
  2985. int want_address,
  2986. enum noside noside)
  2987. {
  2988. struct type *t = check_typedef (curtype);
  2989. int i;
  2990. struct value *result;
  2991. if (t->code () != TYPE_CODE_STRUCT
  2992. && t->code () != TYPE_CODE_UNION)
  2993. error (_("Internal error: non-aggregate type "
  2994. "to value_struct_elt_for_reference"));
  2995. for (i = t->num_fields () - 1; i >= TYPE_N_BASECLASSES (t); i--)
  2996. {
  2997. const char *t_field_name = t->field (i).name ();
  2998. if (t_field_name && strcmp (t_field_name, name) == 0)
  2999. {
  3000. if (field_is_static (&t->field (i)))
  3001. {
  3002. struct value *v = value_static_field (t, i);
  3003. if (want_address)
  3004. v = value_addr (v);
  3005. return v;
  3006. }
  3007. if (TYPE_FIELD_PACKED (t, i))
  3008. error (_("pointers to bitfield members not allowed"));
  3009. if (want_address)
  3010. return value_from_longest
  3011. (lookup_memberptr_type (t->field (i).type (), domain),
  3012. offset + (LONGEST) (t->field (i).loc_bitpos () >> 3));
  3013. else if (noside != EVAL_NORMAL)
  3014. return allocate_value (t->field (i).type ());
  3015. else
  3016. {
  3017. /* Try to evaluate NAME as a qualified name with implicit
  3018. this pointer. In this case, attempt to return the
  3019. equivalent to `this->*(&TYPE::NAME)'. */
  3020. struct value *v = value_of_this_silent (current_language);
  3021. if (v != NULL)
  3022. {
  3023. struct value *ptr, *this_v = v;
  3024. long mem_offset;
  3025. struct type *type, *tmp;
  3026. ptr = value_aggregate_elt (domain, name, NULL, 1, noside);
  3027. type = check_typedef (value_type (ptr));
  3028. gdb_assert (type != NULL
  3029. && type->code () == TYPE_CODE_MEMBERPTR);
  3030. tmp = lookup_pointer_type (TYPE_SELF_TYPE (type));
  3031. v = value_cast_pointers (tmp, v, 1);
  3032. mem_offset = value_as_long (ptr);
  3033. if (domain != curtype)
  3034. {
  3035. /* Find class offset of type CURTYPE from either its
  3036. parent type DOMAIN or the type of implied this. */
  3037. int boff = 0;
  3038. bool isvirt = false;
  3039. if (get_baseclass_offset (domain, curtype, v, &boff,
  3040. &isvirt))
  3041. mem_offset += boff;
  3042. else
  3043. {
  3044. struct type *p = check_typedef (value_type (this_v));
  3045. p = check_typedef (TYPE_TARGET_TYPE (p));
  3046. if (get_baseclass_offset (p, curtype, this_v,
  3047. &boff, &isvirt))
  3048. mem_offset += boff;
  3049. }
  3050. }
  3051. tmp = lookup_pointer_type (TYPE_TARGET_TYPE (type));
  3052. result = value_from_pointer (tmp,
  3053. value_as_long (v) + mem_offset);
  3054. return value_ind (result);
  3055. }
  3056. error (_("Cannot reference non-static field \"%s\""), name);
  3057. }
  3058. }
  3059. }
  3060. /* C++: If it was not found as a data field, then try to return it
  3061. as a pointer to a method. */
  3062. /* Perform all necessary dereferencing. */
  3063. while (intype && intype->code () == TYPE_CODE_PTR)
  3064. intype = TYPE_TARGET_TYPE (intype);
  3065. for (i = TYPE_NFN_FIELDS (t) - 1; i >= 0; --i)
  3066. {
  3067. const char *t_field_name = TYPE_FN_FIELDLIST_NAME (t, i);
  3068. if (t_field_name && strcmp (t_field_name, name) == 0)
  3069. {
  3070. int j;
  3071. int len = TYPE_FN_FIELDLIST_LENGTH (t, i);
  3072. struct fn_field *f = TYPE_FN_FIELDLIST1 (t, i);
  3073. check_stub_method_group (t, i);
  3074. if (intype)
  3075. {
  3076. for (j = 0; j < len; ++j)
  3077. {
  3078. if (TYPE_CONST (intype) != TYPE_FN_FIELD_CONST (f, j))
  3079. continue;
  3080. if (TYPE_VOLATILE (intype) != TYPE_FN_FIELD_VOLATILE (f, j))
  3081. continue;
  3082. if (compare_parameters (TYPE_FN_FIELD_TYPE (f, j), intype, 0)
  3083. || compare_parameters (TYPE_FN_FIELD_TYPE (f, j),
  3084. intype, 1))
  3085. break;
  3086. }
  3087. if (j == len)
  3088. error (_("no member function matches "
  3089. "that type instantiation"));
  3090. }
  3091. else
  3092. {
  3093. int ii;
  3094. j = -1;
  3095. for (ii = 0; ii < len; ++ii)
  3096. {
  3097. /* Skip artificial methods. This is necessary if,
  3098. for example, the user wants to "print
  3099. subclass::subclass" with only one user-defined
  3100. constructor. There is no ambiguity in this case.
  3101. We are careful here to allow artificial methods
  3102. if they are the unique result. */
  3103. if (TYPE_FN_FIELD_ARTIFICIAL (f, ii))
  3104. {
  3105. if (j == -1)
  3106. j = ii;
  3107. continue;
  3108. }
  3109. /* Desired method is ambiguous if more than one
  3110. method is defined. */
  3111. if (j != -1 && !TYPE_FN_FIELD_ARTIFICIAL (f, j))
  3112. error (_("non-unique member `%s' requires "
  3113. "type instantiation"), name);
  3114. j = ii;
  3115. }
  3116. if (j == -1)
  3117. error (_("no matching member function"));
  3118. }
  3119. if (TYPE_FN_FIELD_STATIC_P (f, j))
  3120. {
  3121. struct symbol *s =
  3122. lookup_symbol (TYPE_FN_FIELD_PHYSNAME (f, j),
  3123. 0, VAR_DOMAIN, 0).symbol;
  3124. if (s == NULL)
  3125. return NULL;
  3126. if (want_address)
  3127. return value_addr (read_var_value (s, 0, 0));
  3128. else
  3129. return read_var_value (s, 0, 0);
  3130. }
  3131. if (TYPE_FN_FIELD_VIRTUAL_P (f, j))
  3132. {
  3133. if (want_address)
  3134. {
  3135. result = allocate_value
  3136. (lookup_methodptr_type (TYPE_FN_FIELD_TYPE (f, j)));
  3137. cplus_make_method_ptr (value_type (result),
  3138. value_contents_writeable (result).data (),
  3139. TYPE_FN_FIELD_VOFFSET (f, j), 1);
  3140. }
  3141. else if (noside == EVAL_AVOID_SIDE_EFFECTS)
  3142. return allocate_value (TYPE_FN_FIELD_TYPE (f, j));
  3143. else
  3144. error (_("Cannot reference virtual member function \"%s\""),
  3145. name);
  3146. }
  3147. else
  3148. {
  3149. struct symbol *s =
  3150. lookup_symbol (TYPE_FN_FIELD_PHYSNAME (f, j),
  3151. 0, VAR_DOMAIN, 0).symbol;
  3152. if (s == NULL)
  3153. return NULL;
  3154. struct value *v = read_var_value (s, 0, 0);
  3155. if (!want_address)
  3156. result = v;
  3157. else
  3158. {
  3159. result = allocate_value (lookup_methodptr_type (TYPE_FN_FIELD_TYPE (f, j)));
  3160. cplus_make_method_ptr (value_type (result),
  3161. value_contents_writeable (result).data (),
  3162. value_address (v), 0);
  3163. }
  3164. }
  3165. return result;
  3166. }
  3167. }
  3168. for (i = TYPE_N_BASECLASSES (t) - 1; i >= 0; i--)
  3169. {
  3170. struct value *v;
  3171. int base_offset;
  3172. if (BASETYPE_VIA_VIRTUAL (t, i))
  3173. base_offset = 0;
  3174. else
  3175. base_offset = TYPE_BASECLASS_BITPOS (t, i) / 8;
  3176. v = value_struct_elt_for_reference (domain,
  3177. offset + base_offset,
  3178. TYPE_BASECLASS (t, i),
  3179. name, intype,
  3180. want_address, noside);
  3181. if (v)
  3182. return v;
  3183. }
  3184. /* As a last chance, pretend that CURTYPE is a namespace, and look
  3185. it up that way; this (frequently) works for types nested inside
  3186. classes. */
  3187. return value_maybe_namespace_elt (curtype, name,
  3188. want_address, noside);
  3189. }
  3190. /* C++: Return the member NAME of the namespace given by the type
  3191. CURTYPE. */
  3192. static struct value *
  3193. value_namespace_elt (const struct type *curtype,
  3194. const char *name, int want_address,
  3195. enum noside noside)
  3196. {
  3197. struct value *retval = value_maybe_namespace_elt (curtype, name,
  3198. want_address,
  3199. noside);
  3200. if (retval == NULL)
  3201. error (_("No symbol \"%s\" in namespace \"%s\"."),
  3202. name, curtype->name ());
  3203. return retval;
  3204. }
  3205. /* A helper function used by value_namespace_elt and
  3206. value_struct_elt_for_reference. It looks up NAME inside the
  3207. context CURTYPE; this works if CURTYPE is a namespace or if CURTYPE
  3208. is a class and NAME refers to a type in CURTYPE itself (as opposed
  3209. to, say, some base class of CURTYPE). */
  3210. static struct value *
  3211. value_maybe_namespace_elt (const struct type *curtype,
  3212. const char *name, int want_address,
  3213. enum noside noside)
  3214. {
  3215. const char *namespace_name = curtype->name ();
  3216. struct block_symbol sym;
  3217. struct value *result;
  3218. sym = cp_lookup_symbol_namespace (namespace_name, name,
  3219. get_selected_block (0), VAR_DOMAIN);
  3220. if (sym.symbol == NULL)
  3221. return NULL;
  3222. else if ((noside == EVAL_AVOID_SIDE_EFFECTS)
  3223. && (sym.symbol->aclass () == LOC_TYPEDEF))
  3224. result = allocate_value (sym.symbol->type ());
  3225. else
  3226. result = value_of_variable (sym.symbol, sym.block);
  3227. if (want_address)
  3228. result = value_addr (result);
  3229. return result;
  3230. }
  3231. /* Given a pointer or a reference value V, find its real (RTTI) type.
  3232. Other parameters FULL, TOP, USING_ENC as with value_rtti_type()
  3233. and refer to the values computed for the object pointed to. */
  3234. struct type *
  3235. value_rtti_indirect_type (struct value *v, int *full,
  3236. LONGEST *top, int *using_enc)
  3237. {
  3238. struct value *target = NULL;
  3239. struct type *type, *real_type, *target_type;
  3240. type = value_type (v);
  3241. type = check_typedef (type);
  3242. if (TYPE_IS_REFERENCE (type))
  3243. target = coerce_ref (v);
  3244. else if (type->code () == TYPE_CODE_PTR)
  3245. {
  3246. try
  3247. {
  3248. target = value_ind (v);
  3249. }
  3250. catch (const gdb_exception_error &except)
  3251. {
  3252. if (except.error == MEMORY_ERROR)
  3253. {
  3254. /* value_ind threw a memory error. The pointer is NULL or
  3255. contains an uninitialized value: we can't determine any
  3256. type. */
  3257. return NULL;
  3258. }
  3259. throw;
  3260. }
  3261. }
  3262. else
  3263. return NULL;
  3264. real_type = value_rtti_type (target, full, top, using_enc);
  3265. if (real_type)
  3266. {
  3267. /* Copy qualifiers to the referenced object. */
  3268. target_type = value_type (target);
  3269. real_type = make_cv_type (TYPE_CONST (target_type),
  3270. TYPE_VOLATILE (target_type), real_type, NULL);
  3271. if (TYPE_IS_REFERENCE (type))
  3272. real_type = lookup_reference_type (real_type, type->code ());
  3273. else if (type->code () == TYPE_CODE_PTR)
  3274. real_type = lookup_pointer_type (real_type);
  3275. else
  3276. internal_error (__FILE__, __LINE__, _("Unexpected value type."));
  3277. /* Copy qualifiers to the pointer/reference. */
  3278. real_type = make_cv_type (TYPE_CONST (type), TYPE_VOLATILE (type),
  3279. real_type, NULL);
  3280. }
  3281. return real_type;
  3282. }
  3283. /* Given a value pointed to by ARGP, check its real run-time type, and
  3284. if that is different from the enclosing type, create a new value
  3285. using the real run-time type as the enclosing type (and of the same
  3286. type as ARGP) and return it, with the embedded offset adjusted to
  3287. be the correct offset to the enclosed object. RTYPE is the type,
  3288. and XFULL, XTOP, and XUSING_ENC are the other parameters, computed
  3289. by value_rtti_type(). If these are available, they can be supplied
  3290. and a second call to value_rtti_type() is avoided. (Pass RTYPE ==
  3291. NULL if they're not available. */
  3292. struct value *
  3293. value_full_object (struct value *argp,
  3294. struct type *rtype,
  3295. int xfull, int xtop,
  3296. int xusing_enc)
  3297. {
  3298. struct type *real_type;
  3299. int full = 0;
  3300. LONGEST top = -1;
  3301. int using_enc = 0;
  3302. struct value *new_val;
  3303. if (rtype)
  3304. {
  3305. real_type = rtype;
  3306. full = xfull;
  3307. top = xtop;
  3308. using_enc = xusing_enc;
  3309. }
  3310. else
  3311. real_type = value_rtti_type (argp, &full, &top, &using_enc);
  3312. /* If no RTTI data, or if object is already complete, do nothing. */
  3313. if (!real_type || real_type == value_enclosing_type (argp))
  3314. return argp;
  3315. /* In a destructor we might see a real type that is a superclass of
  3316. the object's type. In this case it is better to leave the object
  3317. as-is. */
  3318. if (full
  3319. && TYPE_LENGTH (real_type) < TYPE_LENGTH (value_enclosing_type (argp)))
  3320. return argp;
  3321. /* If we have the full object, but for some reason the enclosing
  3322. type is wrong, set it. */
  3323. /* pai: FIXME -- sounds iffy */
  3324. if (full)
  3325. {
  3326. argp = value_copy (argp);
  3327. set_value_enclosing_type (argp, real_type);
  3328. return argp;
  3329. }
  3330. /* Check if object is in memory. */
  3331. if (VALUE_LVAL (argp) != lval_memory)
  3332. {
  3333. warning (_("Couldn't retrieve complete object of RTTI "
  3334. "type %s; object may be in register(s)."),
  3335. real_type->name ());
  3336. return argp;
  3337. }
  3338. /* All other cases -- retrieve the complete object. */
  3339. /* Go back by the computed top_offset from the beginning of the
  3340. object, adjusting for the embedded offset of argp if that's what
  3341. value_rtti_type used for its computation. */
  3342. new_val = value_at_lazy (real_type, value_address (argp) - top +
  3343. (using_enc ? 0 : value_embedded_offset (argp)));
  3344. deprecated_set_value_type (new_val, value_type (argp));
  3345. set_value_embedded_offset (new_val, (using_enc
  3346. ? top + value_embedded_offset (argp)
  3347. : top));
  3348. return new_val;
  3349. }
  3350. /* Return the value of the local variable, if one exists. Throw error
  3351. otherwise, such as if the request is made in an inappropriate context. */
  3352. struct value *
  3353. value_of_this (const struct language_defn *lang)
  3354. {
  3355. struct block_symbol sym;
  3356. const struct block *b;
  3357. struct frame_info *frame;
  3358. if (lang->name_of_this () == NULL)
  3359. error (_("no `this' in current language"));
  3360. frame = get_selected_frame (_("no frame selected"));
  3361. b = get_frame_block (frame, NULL);
  3362. sym = lookup_language_this (lang, b);
  3363. if (sym.symbol == NULL)
  3364. error (_("current stack frame does not contain a variable named `%s'"),
  3365. lang->name_of_this ());
  3366. return read_var_value (sym.symbol, sym.block, frame);
  3367. }
  3368. /* Return the value of the local variable, if one exists. Return NULL
  3369. otherwise. Never throw error. */
  3370. struct value *
  3371. value_of_this_silent (const struct language_defn *lang)
  3372. {
  3373. struct value *ret = NULL;
  3374. try
  3375. {
  3376. ret = value_of_this (lang);
  3377. }
  3378. catch (const gdb_exception_error &except)
  3379. {
  3380. }
  3381. return ret;
  3382. }
  3383. /* Create a slice (sub-string, sub-array) of ARRAY, that is LENGTH
  3384. elements long, starting at LOWBOUND. The result has the same lower
  3385. bound as the original ARRAY. */
  3386. struct value *
  3387. value_slice (struct value *array, int lowbound, int length)
  3388. {
  3389. struct type *slice_range_type, *slice_type, *range_type;
  3390. LONGEST lowerbound, upperbound;
  3391. struct value *slice;
  3392. struct type *array_type;
  3393. array_type = check_typedef (value_type (array));
  3394. if (array_type->code () != TYPE_CODE_ARRAY
  3395. && array_type->code () != TYPE_CODE_STRING)
  3396. error (_("cannot take slice of non-array"));
  3397. if (type_not_allocated (array_type))
  3398. error (_("array not allocated"));
  3399. if (type_not_associated (array_type))
  3400. error (_("array not associated"));
  3401. range_type = array_type->index_type ();
  3402. if (!get_discrete_bounds (range_type, &lowerbound, &upperbound))
  3403. error (_("slice from bad array or bitstring"));
  3404. if (lowbound < lowerbound || length < 0
  3405. || lowbound + length - 1 > upperbound)
  3406. error (_("slice out of range"));
  3407. /* FIXME-type-allocation: need a way to free this type when we are
  3408. done with it. */
  3409. slice_range_type = create_static_range_type (NULL,
  3410. TYPE_TARGET_TYPE (range_type),
  3411. lowbound,
  3412. lowbound + length - 1);
  3413. {
  3414. struct type *element_type = TYPE_TARGET_TYPE (array_type);
  3415. LONGEST offset
  3416. = (lowbound - lowerbound) * TYPE_LENGTH (check_typedef (element_type));
  3417. slice_type = create_array_type (NULL,
  3418. element_type,
  3419. slice_range_type);
  3420. slice_type->set_code (array_type->code ());
  3421. if (VALUE_LVAL (array) == lval_memory && value_lazy (array))
  3422. slice = allocate_value_lazy (slice_type);
  3423. else
  3424. {
  3425. slice = allocate_value (slice_type);
  3426. value_contents_copy (slice, 0, array, offset,
  3427. type_length_units (slice_type));
  3428. }
  3429. set_value_component_location (slice, array);
  3430. set_value_offset (slice, value_offset (array) + offset);
  3431. }
  3432. return slice;
  3433. }
  3434. /* See value.h. */
  3435. struct value *
  3436. value_literal_complex (struct value *arg1,
  3437. struct value *arg2,
  3438. struct type *type)
  3439. {
  3440. struct value *val;
  3441. struct type *real_type = TYPE_TARGET_TYPE (type);
  3442. val = allocate_value (type);
  3443. arg1 = value_cast (real_type, arg1);
  3444. arg2 = value_cast (real_type, arg2);
  3445. int len = TYPE_LENGTH (real_type);
  3446. copy (value_contents (arg1),
  3447. value_contents_raw (val).slice (0, len));
  3448. copy (value_contents (arg2),
  3449. value_contents_raw (val).slice (len, len));
  3450. return val;
  3451. }
  3452. /* See value.h. */
  3453. struct value *
  3454. value_real_part (struct value *value)
  3455. {
  3456. struct type *type = check_typedef (value_type (value));
  3457. struct type *ttype = TYPE_TARGET_TYPE (type);
  3458. gdb_assert (type->code () == TYPE_CODE_COMPLEX);
  3459. return value_from_component (value, ttype, 0);
  3460. }
  3461. /* See value.h. */
  3462. struct value *
  3463. value_imaginary_part (struct value *value)
  3464. {
  3465. struct type *type = check_typedef (value_type (value));
  3466. struct type *ttype = TYPE_TARGET_TYPE (type);
  3467. gdb_assert (type->code () == TYPE_CODE_COMPLEX);
  3468. return value_from_component (value, ttype,
  3469. TYPE_LENGTH (check_typedef (ttype)));
  3470. }
  3471. /* Cast a value into the appropriate complex data type. */
  3472. static struct value *
  3473. cast_into_complex (struct type *type, struct value *val)
  3474. {
  3475. struct type *real_type = TYPE_TARGET_TYPE (type);
  3476. if (value_type (val)->code () == TYPE_CODE_COMPLEX)
  3477. {
  3478. struct type *val_real_type = TYPE_TARGET_TYPE (value_type (val));
  3479. struct value *re_val = allocate_value (val_real_type);
  3480. struct value *im_val = allocate_value (val_real_type);
  3481. int len = TYPE_LENGTH (val_real_type);
  3482. copy (value_contents (val).slice (0, len),
  3483. value_contents_raw (re_val));
  3484. copy (value_contents (val).slice (len, len),
  3485. value_contents_raw (im_val));
  3486. return value_literal_complex (re_val, im_val, type);
  3487. }
  3488. else if (value_type (val)->code () == TYPE_CODE_FLT
  3489. || value_type (val)->code () == TYPE_CODE_INT)
  3490. return value_literal_complex (val,
  3491. value_zero (real_type, not_lval),
  3492. type);
  3493. else
  3494. error (_("cannot cast non-number to complex"));
  3495. }
  3496. void _initialize_valops ();
  3497. void
  3498. _initialize_valops ()
  3499. {
  3500. add_setshow_boolean_cmd ("overload-resolution", class_support,
  3501. &overload_resolution, _("\
  3502. Set overload resolution in evaluating C++ functions."), _("\
  3503. Show overload resolution in evaluating C++ functions."),
  3504. NULL, NULL,
  3505. show_overload_resolution,
  3506. &setlist, &showlist);
  3507. overload_resolution = 1;
  3508. }