expr.h 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322
  1. /* Definitions for code generation pass of GNU compiler.
  2. Copyright (C) 1987-2019 Free Software Foundation, Inc.
  3. This file is part of GCC.
  4. GCC is free software; you can redistribute it and/or modify it under
  5. the terms of the GNU General Public License as published by the Free
  6. Software Foundation; either version 3, or (at your option) any later
  7. version.
  8. GCC is distributed in the hope that it will be useful, but WITHOUT ANY
  9. WARRANTY; without even the implied warranty of MERCHANTABILITY or
  10. FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
  11. for more details.
  12. You should have received a copy of the GNU General Public License
  13. along with GCC; see the file COPYING3. If not see
  14. <http://www.gnu.org/licenses/>. */
  15. #ifndef GCC_EXPR_H
  16. #define GCC_EXPR_H
  17. /* This is the 4th arg to `expand_expr'.
  18. EXPAND_STACK_PARM means we are possibly expanding a call param onto
  19. the stack.
  20. EXPAND_SUM means it is ok to return a PLUS rtx or MULT rtx.
  21. EXPAND_INITIALIZER is similar but also record any labels on forced_labels.
  22. EXPAND_CONST_ADDRESS means it is ok to return a MEM whose address
  23. is a constant that is not a legitimate address.
  24. EXPAND_WRITE means we are only going to write to the resulting rtx.
  25. EXPAND_MEMORY means we are interested in a memory result, even if
  26. the memory is constant and we could have propagated a constant value,
  27. or the memory is unaligned on a STRICT_ALIGNMENT target. */
  28. enum expand_modifier {EXPAND_NORMAL = 0, EXPAND_STACK_PARM, EXPAND_SUM,
  29. EXPAND_CONST_ADDRESS, EXPAND_INITIALIZER, EXPAND_WRITE,
  30. EXPAND_MEMORY};
  31. /* Prevent the compiler from deferring stack pops. See
  32. inhibit_defer_pop for more information. */
  33. #define NO_DEFER_POP (inhibit_defer_pop += 1)
  34. /* Allow the compiler to defer stack pops. See inhibit_defer_pop for
  35. more information. */
  36. #define OK_DEFER_POP (inhibit_defer_pop -= 1)
  37. /* This structure is used to pass around information about exploded
  38. unary, binary and trinary expressions between expand_expr_real_1 and
  39. friends. */
  40. typedef struct separate_ops
  41. {
  42. enum tree_code code;
  43. location_t location;
  44. tree type;
  45. tree op0, op1, op2;
  46. } *sepops;
  47. /* This is run during target initialization to set up which modes can be
  48. used directly in memory and to initialize the block move optab. */
  49. extern void init_expr_target (void);
  50. /* This is run at the start of compiling a function. */
  51. extern void init_expr (void);
  52. /* Emit some rtl insns to move data between rtx's, converting machine modes.
  53. Both modes must be floating or both fixed. */
  54. extern void convert_move (rtx, rtx, int);
  55. /* Convert an rtx to specified machine mode and return the result. */
  56. extern rtx convert_to_mode (machine_mode, rtx, int);
  57. /* Convert an rtx to MODE from OLDMODE and return the result. */
  58. extern rtx convert_modes (machine_mode, machine_mode, rtx, int);
  59. /* Expand a call to memcpy or memmove or memcmp, and return the result. */
  60. extern rtx emit_block_op_via_libcall (enum built_in_function, rtx, rtx, rtx,
  61. bool);
  62. static inline rtx
  63. emit_block_copy_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false)
  64. {
  65. return emit_block_op_via_libcall (BUILT_IN_MEMCPY, dst, src, size, tailcall);
  66. }
  67. static inline rtx
  68. emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false)
  69. {
  70. return emit_block_op_via_libcall (BUILT_IN_MEMMOVE, dst, src, size, tailcall);
  71. }
  72. static inline rtx
  73. emit_block_comp_via_libcall (rtx dst, rtx src, rtx size, bool tailcall = false)
  74. {
  75. return emit_block_op_via_libcall (BUILT_IN_MEMCMP, dst, src, size, tailcall);
  76. }
  77. /* Emit code to move a block Y to a block X. */
  78. enum block_op_methods
  79. {
  80. BLOCK_OP_NORMAL,
  81. BLOCK_OP_NO_LIBCALL,
  82. BLOCK_OP_CALL_PARM,
  83. /* Like BLOCK_OP_NORMAL, but the libcall can be tail call optimized. */
  84. BLOCK_OP_TAILCALL,
  85. /* Like BLOCK_OP_NO_LIBCALL, but instead of emitting a libcall return
  86. pc_rtx to indicate nothing has been emitted and let the caller handle
  87. it. */
  88. BLOCK_OP_NO_LIBCALL_RET
  89. };
  90. typedef rtx (*by_pieces_constfn) (void *, HOST_WIDE_INT, scalar_int_mode);
  91. extern rtx emit_block_move (rtx, rtx, rtx, enum block_op_methods);
  92. extern rtx emit_block_move_hints (rtx, rtx, rtx, enum block_op_methods,
  93. unsigned int, HOST_WIDE_INT,
  94. unsigned HOST_WIDE_INT,
  95. unsigned HOST_WIDE_INT,
  96. unsigned HOST_WIDE_INT);
  97. extern rtx emit_block_cmp_hints (rtx, rtx, rtx, tree, rtx, bool,
  98. by_pieces_constfn, void *);
  99. extern bool emit_storent_insn (rtx to, rtx from);
  100. /* Copy all or part of a value X into registers starting at REGNO.
  101. The number of registers to be filled is NREGS. */
  102. extern void move_block_to_reg (int, rtx, int, machine_mode);
  103. /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
  104. The number of registers to be filled is NREGS. */
  105. extern void move_block_from_reg (int, rtx, int);
  106. /* Generate a non-consecutive group of registers represented by a PARALLEL. */
  107. extern rtx gen_group_rtx (rtx);
  108. /* Load a BLKmode value into non-consecutive registers represented by a
  109. PARALLEL. */
  110. extern void emit_group_load (rtx, rtx, tree, poly_int64);
  111. /* Similarly, but load into new temporaries. */
  112. extern rtx emit_group_load_into_temps (rtx, rtx, tree, poly_int64);
  113. /* Move a non-consecutive group of registers represented by a PARALLEL into
  114. a non-consecutive group of registers represented by a PARALLEL. */
  115. extern void emit_group_move (rtx, rtx);
  116. /* Move a group of registers represented by a PARALLEL into pseudos. */
  117. extern rtx emit_group_move_into_temps (rtx);
  118. /* Store a BLKmode value from non-consecutive registers represented by a
  119. PARALLEL. */
  120. extern void emit_group_store (rtx, rtx, tree, poly_int64);
  121. extern rtx maybe_emit_group_store (rtx, tree);
  122. /* Mark REG as holding a parameter for the next CALL_INSN.
  123. Mode is TYPE_MODE of the non-promoted parameter, or VOIDmode. */
  124. extern void use_reg_mode (rtx *, rtx, machine_mode);
  125. extern void clobber_reg_mode (rtx *, rtx, machine_mode);
  126. extern rtx copy_blkmode_to_reg (machine_mode, tree);
  127. /* Mark REG as holding a parameter for the next CALL_INSN. */
  128. static inline void
  129. use_reg (rtx *fusage, rtx reg)
  130. {
  131. use_reg_mode (fusage, reg, VOIDmode);
  132. }
  133. /* Mark REG as clobbered by the call with FUSAGE as CALL_INSN_FUNCTION_USAGE. */
  134. static inline void
  135. clobber_reg (rtx *fusage, rtx reg)
  136. {
  137. clobber_reg_mode (fusage, reg, VOIDmode);
  138. }
  139. /* Mark NREGS consecutive regs, starting at REGNO, as holding parameters
  140. for the next CALL_INSN. */
  141. extern void use_regs (rtx *, int, int);
  142. /* Mark a PARALLEL as holding a parameter for the next CALL_INSN. */
  143. extern void use_group_regs (rtx *, rtx);
  144. #ifdef GCC_INSN_CODES_H
  145. extern rtx expand_cmpstrn_or_cmpmem (insn_code, rtx, rtx, rtx, tree, rtx,
  146. HOST_WIDE_INT);
  147. #endif
  148. /* Write zeros through the storage of OBJECT.
  149. If OBJECT has BLKmode, SIZE is its length in bytes. */
  150. extern rtx clear_storage (rtx, rtx, enum block_op_methods);
  151. extern rtx clear_storage_hints (rtx, rtx, enum block_op_methods,
  152. unsigned int, HOST_WIDE_INT,
  153. unsigned HOST_WIDE_INT,
  154. unsigned HOST_WIDE_INT,
  155. unsigned HOST_WIDE_INT);
  156. /* The same, but always output an library call. */
  157. extern rtx set_storage_via_libcall (rtx, rtx, rtx, bool = false);
  158. /* Expand a setmem pattern; return true if successful. */
  159. extern bool set_storage_via_setmem (rtx, rtx, rtx, unsigned int,
  160. unsigned int, HOST_WIDE_INT,
  161. unsigned HOST_WIDE_INT,
  162. unsigned HOST_WIDE_INT,
  163. unsigned HOST_WIDE_INT);
  164. /* Return nonzero if it is desirable to store LEN bytes generated by
  165. CONSTFUN with several move instructions by store_by_pieces
  166. function. CONSTFUNDATA is a pointer which will be passed as argument
  167. in every CONSTFUN call.
  168. ALIGN is maximum alignment we can assume.
  169. MEMSETP is true if this is a real memset/bzero, not a copy
  170. of a const string. */
  171. extern int can_store_by_pieces (unsigned HOST_WIDE_INT,
  172. by_pieces_constfn,
  173. void *, unsigned int, bool);
  174. /* Generate several move instructions to store LEN bytes generated by
  175. CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
  176. pointer which will be passed as argument in every CONSTFUN call.
  177. ALIGN is maximum alignment we can assume.
  178. MEMSETP is true if this is a real memset/bzero, not a copy.
  179. Returns TO + LEN. */
  180. extern rtx store_by_pieces (rtx, unsigned HOST_WIDE_INT, by_pieces_constfn,
  181. void *, unsigned int, bool, memop_ret);
  182. /* Emit insns to set X from Y. */
  183. extern rtx_insn *emit_move_insn (rtx, rtx);
  184. extern rtx_insn *gen_move_insn (rtx, rtx);
  185. /* Emit insns to set X from Y, with no frills. */
  186. extern rtx_insn *emit_move_insn_1 (rtx, rtx);
  187. extern rtx_insn *emit_move_complex_push (machine_mode, rtx, rtx);
  188. extern rtx_insn *emit_move_complex_parts (rtx, rtx);
  189. extern rtx read_complex_part (rtx, bool);
  190. extern void write_complex_part (rtx, rtx, bool);
  191. extern rtx read_complex_part (rtx, bool);
  192. extern rtx emit_move_resolve_push (machine_mode, rtx);
  193. /* Push a block of length SIZE (perhaps variable)
  194. and return an rtx to address the beginning of the block. */
  195. extern rtx push_block (rtx, poly_int64, int);
  196. /* Generate code to push something onto the stack, given its mode and type. */
  197. extern bool emit_push_insn (rtx, machine_mode, tree, rtx, unsigned int,
  198. int, rtx, poly_int64, rtx, rtx, int, rtx, bool);
  199. /* Extract the accessible bit-range from a COMPONENT_REF. */
  200. extern void get_bit_range (poly_uint64_pod *, poly_uint64_pod *, tree,
  201. poly_int64_pod *, tree *);
  202. /* Expand an assignment that stores the value of FROM into TO. */
  203. extern void expand_assignment (tree, tree, bool);
  204. /* Generate code for computing expression EXP,
  205. and storing the value into TARGET.
  206. If SUGGEST_REG is nonzero, copy the value through a register
  207. and return that register, if that is possible. */
  208. extern rtx store_expr (tree, rtx, int, bool, bool);
  209. /* Given an rtx that may include add and multiply operations,
  210. generate them as insns and return a pseudo-reg containing the value.
  211. Useful after calling expand_expr with 1 as sum_ok. */
  212. extern rtx force_operand (rtx, rtx);
  213. /* Work horses for expand_expr. */
  214. extern rtx expand_expr_real (tree, rtx, machine_mode,
  215. enum expand_modifier, rtx *, bool);
  216. extern rtx expand_expr_real_1 (tree, rtx, machine_mode,
  217. enum expand_modifier, rtx *, bool);
  218. extern rtx expand_expr_real_2 (sepops, rtx, machine_mode,
  219. enum expand_modifier);
  220. /* Generate code for computing expression EXP.
  221. An rtx for the computed value is returned. The value is never null.
  222. In the case of a void EXP, const0_rtx is returned. */
  223. static inline rtx
  224. expand_expr (tree exp, rtx target, machine_mode mode,
  225. enum expand_modifier modifier)
  226. {
  227. return expand_expr_real (exp, target, mode, modifier, NULL, false);
  228. }
  229. static inline rtx
  230. expand_normal (tree exp)
  231. {
  232. return expand_expr_real (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL, NULL, false);
  233. }
  234. /* Return the tree node and offset if a given argument corresponds to
  235. a string constant. */
  236. extern tree string_constant (tree, tree *, tree *, tree *);
  237. extern enum tree_code maybe_optimize_mod_cmp (enum tree_code, tree *, tree *);
  238. /* Two different ways of generating switch statements. */
  239. extern int try_casesi (tree, tree, tree, tree, rtx, rtx, rtx, profile_probability);
  240. extern int try_tablejump (tree, tree, tree, tree, rtx, rtx, profile_probability);
  241. extern int safe_from_p (const_rtx, tree, int);
  242. /* Get the personality libfunc for a function decl. */
  243. rtx get_personality_function (tree);
  244. /* Determine whether the LEN bytes can be moved by using several move
  245. instructions. Return nonzero if a call to move_by_pieces should
  246. succeed. */
  247. extern bool can_move_by_pieces (unsigned HOST_WIDE_INT, unsigned int);
  248. extern unsigned HOST_WIDE_INT highest_pow2_factor (const_tree);
  249. extern bool categorize_ctor_elements (const_tree, HOST_WIDE_INT *,
  250. HOST_WIDE_INT *, HOST_WIDE_INT *,
  251. bool *);
  252. extern void expand_operands (tree, tree, rtx, rtx*, rtx*,
  253. enum expand_modifier);
  254. /* rtl.h and tree.h were included. */
  255. /* Return an rtx for the size in bytes of the value of an expr. */
  256. extern rtx expr_size (tree);
  257. #endif /* GCC_EXPR_H */