| /* Expand builtin functions. |
| Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, |
| 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. |
| |
| This file is part of GCC. |
| |
| GCC is free software; you can redistribute it and/or modify it under |
| the terms of the GNU General Public License as published by the Free |
| Software Foundation; either version 2, or (at your option) any later |
| version. |
| |
| GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
| WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| for more details. |
| |
| You should have received a copy of the GNU General Public License |
| along with GCC; see the file COPYING. If not, write to the Free |
| Software Foundation, 59 Temple Place - Suite 330, Boston, MA |
| 02111-1307, USA. */ |
| |
| #include "config.h" |
| #include "system.h" |
| #include "coretypes.h" |
| #include "tm.h" |
| #include "machmode.h" |
| #include "real.h" |
| #include "rtl.h" |
| #include "tree.h" |
| #include "tree-gimple.h" |
| #include "flags.h" |
| #include "regs.h" |
| #include "hard-reg-set.h" |
| #include "except.h" |
| #include "function.h" |
| #include "insn-config.h" |
| #include "expr.h" |
| #include "optabs.h" |
| #include "libfuncs.h" |
| #include "recog.h" |
| #include "output.h" |
| #include "typeclass.h" |
| #include "toplev.h" |
| #include "predict.h" |
| #include "tm_p.h" |
| #include "target.h" |
| #include "langhooks.h" |
| #include "basic-block.h" |
| #include "tree-mudflap.h" |
| |
| #define CALLED_AS_BUILT_IN(NODE) \ |
| (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10)) |
| |
| #ifndef PAD_VARARGS_DOWN |
| #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN |
| #endif |
| |
| /* Define the names of the builtin function types and codes. */ |
| const char *const built_in_class_names[4] |
| = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"}; |
| |
| #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X, |
| const char * built_in_names[(int) END_BUILTINS] = |
| { |
| #include "builtins.def" |
| }; |
| #undef DEF_BUILTIN |
| |
| /* Setup an array of _DECL trees, make sure each element is |
| initialized to NULL_TREE. */ |
| tree built_in_decls[(int) END_BUILTINS]; |
| /* Declarations used when constructing the builtin implicitly in the compiler. |
| It may be NULL_TREE when this is invalid (for instance runtime is not |
| required to implement the function call in all cases). */ |
| tree implicit_built_in_decls[(int) END_BUILTINS]; |
| |
| /* APPLE LOCAL LLVM */ |
| int get_pointer_alignment (tree, unsigned int); |
| static const char *c_getstr (tree); |
| static rtx c_readstr (const char *, enum machine_mode); |
| static int target_char_cast (tree, char *); |
| static rtx get_memory_rtx (tree); |
| static tree build_string_literal (int, const char *); |
| static int apply_args_size (void); |
| static int apply_result_size (void); |
| #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return) |
| static rtx result_vector (int, rtx); |
| #endif |
| static rtx expand_builtin_setjmp (tree, rtx); |
| static void expand_builtin_update_setjmp_buf (rtx); |
| static void expand_builtin_prefetch (tree); |
| static rtx expand_builtin_apply_args (void); |
| static rtx expand_builtin_apply_args_1 (void); |
| static rtx expand_builtin_apply (rtx, rtx, rtx); |
| static void expand_builtin_return (rtx); |
| static enum type_class type_to_class (tree); |
| static rtx expand_builtin_classify_type (tree); |
| static void expand_errno_check (tree, rtx); |
| static rtx expand_builtin_mathfn (tree, rtx, rtx); |
| static rtx expand_builtin_mathfn_2 (tree, rtx, rtx); |
| static rtx expand_builtin_mathfn_3 (tree, rtx, rtx); |
| static rtx expand_builtin_args_info (tree); |
| static rtx expand_builtin_next_arg (void); |
| static rtx expand_builtin_va_start (tree); |
| static rtx expand_builtin_va_end (tree); |
| static rtx expand_builtin_va_copy (tree); |
| static rtx expand_builtin_memcmp (tree, tree, rtx, enum machine_mode); |
| static rtx expand_builtin_strcmp (tree, rtx, enum machine_mode); |
| static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode); |
| static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode); |
| static rtx expand_builtin_strcat (tree, tree, rtx, enum machine_mode); |
| static rtx expand_builtin_strncat (tree, rtx, enum machine_mode); |
| static rtx expand_builtin_strspn (tree, rtx, enum machine_mode); |
| static rtx expand_builtin_strcspn (tree, rtx, enum machine_mode); |
| static rtx expand_builtin_memcpy (tree, rtx, enum machine_mode); |
| static rtx expand_builtin_mempcpy (tree, tree, rtx, enum machine_mode, int); |
| static rtx expand_builtin_memmove (tree, tree, rtx, enum machine_mode, tree); |
| static rtx expand_builtin_bcopy (tree); |
| static rtx expand_builtin_strcpy (tree, rtx, enum machine_mode); |
| static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode); |
| static rtx builtin_strncpy_read_str (void *, HOST_WIDE_INT, enum machine_mode); |
| static rtx expand_builtin_strncpy (tree, rtx, enum machine_mode); |
| static rtx builtin_memset_read_str (void *, HOST_WIDE_INT, enum machine_mode); |
| static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode); |
| static rtx expand_builtin_memset (tree, rtx, enum machine_mode, tree); |
| static rtx expand_builtin_bzero (tree); |
| static rtx expand_builtin_strlen (tree, rtx, enum machine_mode); |
| static rtx expand_builtin_strstr (tree, tree, rtx, enum machine_mode); |
| static rtx expand_builtin_strpbrk (tree, tree, rtx, enum machine_mode); |
| static rtx expand_builtin_strchr (tree, tree, rtx, enum machine_mode); |
| static rtx expand_builtin_strrchr (tree, tree, rtx, enum machine_mode); |
| static rtx expand_builtin_alloca (tree, rtx); |
| static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab); |
| static rtx expand_builtin_frame_address (tree, tree); |
| static rtx expand_builtin_fputs (tree, rtx, bool); |
| static rtx expand_builtin_printf (tree, rtx, enum machine_mode, bool); |
| static rtx expand_builtin_fprintf (tree, rtx, enum machine_mode, bool); |
| static rtx expand_builtin_sprintf (tree, rtx, enum machine_mode); |
| static tree stabilize_va_list (tree, int); |
| static rtx expand_builtin_expect (tree, rtx); |
| static tree fold_builtin_constant_p (tree); |
| static tree fold_builtin_classify_type (tree); |
| static tree fold_builtin_strlen (tree); |
| static tree fold_builtin_inf (tree, int); |
| static tree fold_builtin_nan (tree, tree, int); |
| /* APPLE LOCAL begin LLVM */ |
| int validate_arglist (tree, ...); |
| /* APPLE LOCAL end LLVM */ |
| static bool integer_valued_real_p (tree); |
| static tree fold_trunc_transparent_mathfn (tree); |
| static bool readonly_data_expr (tree); |
| static rtx expand_builtin_fabs (tree, rtx, rtx); |
| static rtx expand_builtin_signbit (tree, rtx); |
| static tree fold_builtin_cabs (tree, tree); |
| static tree fold_builtin_sqrt (tree, tree); |
| static tree fold_builtin_cbrt (tree, tree); |
| static tree fold_builtin_pow (tree, tree, tree); |
| static tree fold_builtin_powi (tree, tree, tree); |
| static tree fold_builtin_sin (tree); |
| static tree fold_builtin_cos (tree, tree, tree); |
| static tree fold_builtin_tan (tree); |
| static tree fold_builtin_atan (tree, tree); |
| static tree fold_builtin_trunc (tree); |
| static tree fold_builtin_floor (tree); |
| static tree fold_builtin_ceil (tree); |
| static tree fold_builtin_round (tree); |
| static tree fold_builtin_bitop (tree); |
| static tree fold_builtin_memcpy (tree); |
| static tree fold_builtin_mempcpy (tree, tree, int); |
| static tree fold_builtin_memmove (tree, tree); |
| static tree fold_builtin_strchr (tree, tree); |
| static tree fold_builtin_memcmp (tree); |
| static tree fold_builtin_strcmp (tree); |
| static tree fold_builtin_strncmp (tree); |
| static tree fold_builtin_signbit (tree); |
| static tree fold_builtin_copysign (tree, tree, tree); |
| static tree fold_builtin_isascii (tree); |
| static tree fold_builtin_toascii (tree); |
| static tree fold_builtin_isdigit (tree); |
| static tree fold_builtin_fabs (tree, tree); |
| static tree fold_builtin_abs (tree, tree); |
| static tree fold_builtin_unordered_cmp (tree, enum tree_code, enum tree_code); |
| static tree fold_builtin_1 (tree, bool); |
| |
| static tree fold_builtin_strpbrk (tree, tree); |
| static tree fold_builtin_strstr (tree, tree); |
| static tree fold_builtin_strrchr (tree, tree); |
| static tree fold_builtin_strcat (tree); |
| static tree fold_builtin_strncat (tree); |
| static tree fold_builtin_strspn (tree); |
| static tree fold_builtin_strcspn (tree); |
| static tree fold_builtin_sprintf (tree, int); |
| |
| /* APPLE LOCAL begin mainline */ |
| static rtx expand_builtin_object_size (tree); |
| static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode, |
| enum built_in_function); |
| static void maybe_emit_chk_warning (tree, enum built_in_function); |
| static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function); |
| static tree fold_builtin_object_size (tree); |
| static tree fold_builtin_strcat_chk (tree, tree); |
| static tree fold_builtin_strncat_chk (tree, tree); |
| static tree fold_builtin_sprintf_chk (tree, enum built_in_function); |
| static tree fold_builtin_printf (tree, tree, bool, enum built_in_function); |
| static tree fold_builtin_fprintf (tree, tree, bool, enum built_in_function); |
| static bool init_target_chars (void); |
| |
| static unsigned HOST_WIDE_INT target_newline; |
| static unsigned HOST_WIDE_INT target_percent; |
| static unsigned HOST_WIDE_INT target_c; |
| static unsigned HOST_WIDE_INT target_s; |
| static char target_percent_c[3]; |
| static char target_percent_s[3]; |
| static char target_percent_s_newline[4]; |
| /* APPLE LOCAL end mainline */ |
| |
| /* Return the alignment in bits of EXP, a pointer valued expression. |
| But don't return more than MAX_ALIGN no matter what. |
| The alignment returned is, by default, the alignment of the thing that |
| EXP points to. If it is not a POINTER_TYPE, 0 is returned. |
| |
| Otherwise, look at the expression to see if we can do better, i.e., if the |
| expression is actually pointing at an object whose alignment is tighter. */ |
| |
| /* APPLE LOCAL LLVM */ |
| int |
| get_pointer_alignment (tree exp, unsigned int max_align) |
| { |
| unsigned int align, inner; |
| |
| if (! POINTER_TYPE_P (TREE_TYPE (exp))) |
| return 0; |
| |
| align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); |
| align = MIN (align, max_align); |
| |
| while (1) |
| { |
| switch (TREE_CODE (exp)) |
| { |
| case NOP_EXPR: |
| case CONVERT_EXPR: |
| case NON_LVALUE_EXPR: |
| exp = TREE_OPERAND (exp, 0); |
| if (! POINTER_TYPE_P (TREE_TYPE (exp))) |
| return align; |
| |
| inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))); |
| align = MIN (inner, max_align); |
| break; |
| |
| case PLUS_EXPR: |
| /* If sum of pointer + int, restrict our maximum alignment to that |
| imposed by the integer. If not, we can't do any better than |
| ALIGN. */ |
| if (! host_integerp (TREE_OPERAND (exp, 1), 1)) |
| return align; |
| |
| while (((tree_low_cst (TREE_OPERAND (exp, 1), 1)) |
| & (max_align / BITS_PER_UNIT - 1)) |
| != 0) |
| max_align >>= 1; |
| |
| exp = TREE_OPERAND (exp, 0); |
| break; |
| |
| case ADDR_EXPR: |
| /* See what we are pointing at and look at its alignment. */ |
| exp = TREE_OPERAND (exp, 0); |
| if (TREE_CODE (exp) == FUNCTION_DECL) |
| align = FUNCTION_BOUNDARY; |
| else if (DECL_P (exp)) |
| align = DECL_ALIGN (exp); |
| #ifdef CONSTANT_ALIGNMENT |
| else if (CONSTANT_CLASS_P (exp)) |
| align = CONSTANT_ALIGNMENT (exp, align); |
| #endif |
| return MIN (align, max_align); |
| |
| default: |
| return align; |
| } |
| } |
| } |
| |
| /* Compute the length of a C string. TREE_STRING_LENGTH is not the right |
| way, because it could contain a zero byte in the middle. |
| TREE_STRING_LENGTH is the size of the character array, not the string. |
| |
| ONLY_VALUE should be nonzero if the result is not going to be emitted |
| into the instruction stream and zero if it is going to be expanded. |
| E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3 |
| is returned, otherwise NULL, since |
| len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not |
| evaluate the side-effects. |
| |
| The value returned is of type `ssizetype'. |
| |
| Unfortunately, string_constant can't access the values of const char |
| arrays with initializers, so neither can we do so here. */ |
| |
| tree |
| c_strlen (tree src, int only_value) |
| { |
| tree offset_node; |
| HOST_WIDE_INT offset; |
| int max; |
| const char *ptr; |
| |
| STRIP_NOPS (src); |
| if (TREE_CODE (src) == COND_EXPR |
| && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0)))) |
| { |
| tree len1, len2; |
| |
| len1 = c_strlen (TREE_OPERAND (src, 1), only_value); |
| len2 = c_strlen (TREE_OPERAND (src, 2), only_value); |
| if (tree_int_cst_equal (len1, len2)) |
| return len1; |
| } |
| |
| if (TREE_CODE (src) == COMPOUND_EXPR |
| && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0)))) |
| return c_strlen (TREE_OPERAND (src, 1), only_value); |
| |
| src = string_constant (src, &offset_node); |
| if (src == 0) |
| return 0; |
| |
| max = TREE_STRING_LENGTH (src) - 1; |
| ptr = TREE_STRING_POINTER (src); |
| |
| if (offset_node && TREE_CODE (offset_node) != INTEGER_CST) |
| { |
| /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't |
| compute the offset to the following null if we don't know where to |
| start searching for it. */ |
| int i; |
| |
| for (i = 0; i < max; i++) |
| if (ptr[i] == 0) |
| return 0; |
| |
| /* We don't know the starting offset, but we do know that the string |
| has no internal zero bytes. We can assume that the offset falls |
| within the bounds of the string; otherwise, the programmer deserves |
| what he gets. Subtract the offset from the length of the string, |
| and return that. This would perhaps not be valid if we were dealing |
| with named arrays in addition to literal string constants. */ |
| |
| return size_diffop (size_int (max), offset_node); |
| } |
| |
| /* We have a known offset into the string. Start searching there for |
| a null character if we can represent it as a single HOST_WIDE_INT. */ |
| if (offset_node == 0) |
| offset = 0; |
| else if (! host_integerp (offset_node, 0)) |
| offset = -1; |
| else |
| offset = tree_low_cst (offset_node, 0); |
| |
| /* If the offset is known to be out of bounds, warn, and call strlen at |
| runtime. */ |
| if (offset < 0 || offset > max) |
| { |
| warning ("offset outside bounds of constant string"); |
| return 0; |
| } |
| |
| /* Use strlen to search for the first zero byte. Since any strings |
| constructed with build_string will have nulls appended, we win even |
| if we get handed something like (char[4])"abcd". |
| |
| Since OFFSET is our starting index into the string, no further |
| calculation is needed. */ |
| return ssize_int (strlen (ptr + offset)); |
| } |
| |
| /* Return a char pointer for a C string if it is a string constant |
| or sum of string constant and integer constant. */ |
| |
| static const char * |
| c_getstr (tree src) |
| { |
| tree offset_node; |
| |
| src = string_constant (src, &offset_node); |
| if (src == 0) |
| return 0; |
| |
| if (offset_node == 0) |
| return TREE_STRING_POINTER (src); |
| else if (!host_integerp (offset_node, 1) |
| || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0) |
| return 0; |
| |
| return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1); |
| } |
| |
| /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading |
| GET_MODE_BITSIZE (MODE) bits from string constant STR. */ |
| |
| static rtx |
| c_readstr (const char *str, enum machine_mode mode) |
| { |
| HOST_WIDE_INT c[2]; |
| HOST_WIDE_INT ch; |
| unsigned int i, j; |
| |
| gcc_assert (GET_MODE_CLASS (mode) == MODE_INT); |
| |
| c[0] = 0; |
| c[1] = 0; |
| ch = 1; |
| for (i = 0; i < GET_MODE_SIZE (mode); i++) |
| { |
| j = i; |
| if (WORDS_BIG_ENDIAN) |
| j = GET_MODE_SIZE (mode) - i - 1; |
| if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN |
| && GET_MODE_SIZE (mode) > UNITS_PER_WORD) |
| j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1; |
| j *= BITS_PER_UNIT; |
| gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT); |
| |
| if (ch) |
| ch = (unsigned char) str[i]; |
| c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT); |
| } |
| return immed_double_const (c[0], c[1], mode); |
| } |
| |
| /* Cast a target constant CST to target CHAR and if that value fits into |
| host char type, return zero and put that value into variable pointed by |
| P. */ |
| |
| static int |
| target_char_cast (tree cst, char *p) |
| { |
| unsigned HOST_WIDE_INT val, hostval; |
| |
| if (!host_integerp (cst, 1) |
| || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT) |
| return 1; |
| |
| val = tree_low_cst (cst, 1); |
| if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT) |
| val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1; |
| |
| hostval = val; |
| if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT) |
| hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1; |
| |
| if (val != hostval) |
| return 1; |
| |
| *p = hostval; |
| return 0; |
| } |
| |
| /* Similar to save_expr, but assumes that arbitrary code is not executed |
| in between the multiple evaluations. In particular, we assume that a |
| non-addressable local variable will not be modified. */ |
| |
| static tree |
| builtin_save_expr (tree exp) |
| { |
| if (TREE_ADDRESSABLE (exp) == 0 |
| && (TREE_CODE (exp) == PARM_DECL |
| || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))) |
| return exp; |
| |
| return save_expr (exp); |
| } |
| |
| /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT |
| times to get the address of either a higher stack frame, or a return |
| address located within it (depending on FNDECL_CODE). */ |
| |
| static rtx |
| expand_builtin_return_addr (enum built_in_function fndecl_code, int count) |
| { |
| int i; |
| |
| #ifdef INITIAL_FRAME_ADDRESS_RTX |
| rtx tem = INITIAL_FRAME_ADDRESS_RTX; |
| #else |
| rtx tem = hard_frame_pointer_rtx; |
| #endif |
| |
| /* Some machines need special handling before we can access |
| arbitrary frames. For example, on the sparc, we must first flush |
| all register windows to the stack. */ |
| #ifdef SETUP_FRAME_ADDRESSES |
| if (count > 0) |
| SETUP_FRAME_ADDRESSES (); |
| #endif |
| |
| /* On the sparc, the return address is not in the frame, it is in a |
| register. There is no way to access it off of the current frame |
| pointer, but it can be accessed off the previous frame pointer by |
| reading the value from the register window save area. */ |
| #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME |
| if (fndecl_code == BUILT_IN_RETURN_ADDRESS) |
| count--; |
| #endif |
| |
| /* Scan back COUNT frames to the specified frame. */ |
| for (i = 0; i < count; i++) |
| { |
| /* Assume the dynamic chain pointer is in the word that the |
| frame address points to, unless otherwise specified. */ |
| #ifdef DYNAMIC_CHAIN_ADDRESS |
| tem = DYNAMIC_CHAIN_ADDRESS (tem); |
| #endif |
| tem = memory_address (Pmode, tem); |
| tem = gen_rtx_MEM (Pmode, tem); |
| set_mem_alias_set (tem, get_frame_alias_set ()); |
| tem = copy_to_reg (tem); |
| } |
| |
| /* For __builtin_frame_address, return what we've got. */ |
| if (fndecl_code == BUILT_IN_FRAME_ADDRESS) |
| return tem; |
| |
| /* For __builtin_return_address, Get the return address from that |
| frame. */ |
| #ifdef RETURN_ADDR_RTX |
| tem = RETURN_ADDR_RTX (count, tem); |
| #else |
| tem = memory_address (Pmode, |
| plus_constant (tem, GET_MODE_SIZE (Pmode))); |
| tem = gen_rtx_MEM (Pmode, tem); |
| set_mem_alias_set (tem, get_frame_alias_set ()); |
| #endif |
| return tem; |
| } |
| |
| /* Alias set used for setjmp buffer. */ |
| static HOST_WIDE_INT setjmp_alias_set = -1; |
| |
| /* Construct the leading half of a __builtin_setjmp call. Control will |
| return to RECEIVER_LABEL. This is used directly by sjlj exception |
| handling code. */ |
| |
| void |
| expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label) |
| { |
| enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); |
| rtx stack_save; |
| rtx mem; |
| |
| if (setjmp_alias_set == -1) |
| setjmp_alias_set = new_alias_set (); |
| |
| buf_addr = convert_memory_address (Pmode, buf_addr); |
| |
| buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX)); |
| |
| /* We store the frame pointer and the address of receiver_label in |
| the buffer and use the rest of it for the stack save area, which |
| is machine-dependent. */ |
| |
| mem = gen_rtx_MEM (Pmode, buf_addr); |
| set_mem_alias_set (mem, setjmp_alias_set); |
| emit_move_insn (mem, targetm.builtin_setjmp_frame_value ()); |
| |
| mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))), |
| set_mem_alias_set (mem, setjmp_alias_set); |
| |
| emit_move_insn (validize_mem (mem), |
| force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label))); |
| |
| stack_save = gen_rtx_MEM (sa_mode, |
| plus_constant (buf_addr, |
| 2 * GET_MODE_SIZE (Pmode))); |
| set_mem_alias_set (stack_save, setjmp_alias_set); |
| emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX); |
| |
| /* If there is further processing to do, do it. */ |
| #ifdef HAVE_builtin_setjmp_setup |
| if (HAVE_builtin_setjmp_setup) |
| emit_insn (gen_builtin_setjmp_setup (buf_addr)); |
| #endif |
| |
| /* Tell optimize_save_area_alloca that extra work is going to |
| need to go on during alloca. */ |
| current_function_calls_setjmp = 1; |
| |
| /* Set this so all the registers get saved in our frame; we need to be |
| able to copy the saved values for any registers from frames we unwind. */ |
| current_function_has_nonlocal_label = 1; |
| } |
| |
| /* Construct the trailing part of a __builtin_setjmp call. |
| This is used directly by sjlj exception handling code. */ |
| |
| void |
| expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED) |
| { |
| /* Clobber the FP when we get here, so we have to make sure it's |
| marked as used by this function. */ |
| emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); |
| |
| /* Mark the static chain as clobbered here so life information |
| doesn't get messed up for it. */ |
| emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx)); |
| |
| /* Now put in the code to restore the frame pointer, and argument |
| pointer, if needed. */ |
| #ifdef HAVE_nonlocal_goto |
| if (! HAVE_nonlocal_goto) |
| #endif |
| /* APPLE LOCAL begin mainline */ |
| { |
| emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx); |
| /* This might change the hard frame pointer in ways that aren't |
| apparent to early optimization passes, so force a clobber. */ |
| emit_insn (gen_rtx_CLOBBER (VOIDmode, hard_frame_pointer_rtx)); |
| } |
| /* APPLE LOCAL end mainline */ |
| |
| #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM |
| if (fixed_regs[ARG_POINTER_REGNUM]) |
| { |
| #ifdef ELIMINABLE_REGS |
| size_t i; |
| static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS; |
| |
| for (i = 0; i < ARRAY_SIZE (elim_regs); i++) |
| if (elim_regs[i].from == ARG_POINTER_REGNUM |
| && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM) |
| break; |
| |
| if (i == ARRAY_SIZE (elim_regs)) |
| #endif |
| { |
| /* Now restore our arg pointer from the address at which it |
| was saved in our stack frame. */ |
| emit_move_insn (virtual_incoming_args_rtx, |
| copy_to_reg (get_arg_pointer_save_area (cfun))); |
| } |
| } |
| #endif |
| |
| #ifdef HAVE_builtin_setjmp_receiver |
| if (HAVE_builtin_setjmp_receiver) |
| emit_insn (gen_builtin_setjmp_receiver (receiver_label)); |
| else |
| #endif |
| #ifdef HAVE_nonlocal_goto_receiver |
| if (HAVE_nonlocal_goto_receiver) |
| emit_insn (gen_nonlocal_goto_receiver ()); |
| else |
| #endif |
| { /* Nothing */ } |
| |
| /* @@@ This is a kludge. Not all machine descriptions define a blockage |
| insn, but we must not allow the code we just generated to be reordered |
| by scheduling. Specifically, the update of the frame pointer must |
| happen immediately, not later. So emit an ASM_INPUT to act as blockage |
| insn. */ |
| emit_insn (gen_rtx_ASM_INPUT (VOIDmode, "")); |
| } |
| |
| /* __builtin_setjmp is passed a pointer to an array of five words (not |
| all will be used on all machines). It operates similarly to the C |
| library function of the same name, but is more efficient. Much of |
| the code below (and for longjmp) is copied from the handling of |
| non-local gotos. |
| |
| NOTE: This is intended for use by GNAT and the exception handling |
| scheme in the compiler and will only work in the method used by |
| them. */ |
| |
| static rtx |
| expand_builtin_setjmp (tree arglist, rtx target) |
| { |
| rtx buf_addr, next_lab, cont_lab; |
| |
| if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE)) |
| return NULL_RTX; |
| |
| if (target == 0 || !REG_P (target) |
| || REGNO (target) < FIRST_PSEUDO_REGISTER) |
| target = gen_reg_rtx (TYPE_MODE (integer_type_node)); |
| |
| buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0); |
| |
| next_lab = gen_label_rtx (); |
| cont_lab = gen_label_rtx (); |
| |
| expand_builtin_setjmp_setup (buf_addr, next_lab); |
| |
| /* Set TARGET to zero and branch to the continue label. Use emit_jump to |
| ensure that pending stack adjustments are flushed. */ |
| emit_move_insn (target, const0_rtx); |
| emit_jump (cont_lab); |
| |
| emit_label (next_lab); |
| |
| /* APPLE LOCAL begin mainline */ |
| /* Because setjmp and longjmp are not represented in the CFG, a cfgcleanup |
| may find that the basic block starting with NEXT_LAB is unreachable. |
| The whole block, along with NEXT_LAB, would be removed (see PR26983). |
| Make sure that never happens. */ |
| LABEL_PRESERVE_P (next_lab) = 1; |
| /* APPLE LOCAL end mainline */ |
| expand_builtin_setjmp_receiver (next_lab); |
| |
| /* Set TARGET to one. */ |
| emit_move_insn (target, const1_rtx); |
| emit_label (cont_lab); |
| |
| /* Tell flow about the strange goings on. Putting `next_lab' on |
| `nonlocal_goto_handler_labels' to indicates that function |
| calls may traverse the arc back to this label. */ |
| |
| current_function_has_nonlocal_label = 1; |
| nonlocal_goto_handler_labels |
| = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels); |
| |
| return target; |
| } |
| |
| /* __builtin_longjmp is passed a pointer to an array of five words (not |
| all will be used on all machines). It operates similarly to the C |
| library function of the same name, but is more efficient. Much of |
| the code below is copied from the handling of non-local gotos. |
| |
| NOTE: This is intended for use by GNAT and the exception handling |
| scheme in the compiler and will only work in the method used by |
| them. */ |
| |
| static void |
| expand_builtin_longjmp (rtx buf_addr, rtx value) |
| { |
| rtx fp, lab, stack, insn, last; |
| enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); |
| |
| if (setjmp_alias_set == -1) |
| setjmp_alias_set = new_alias_set (); |
| |
| buf_addr = convert_memory_address (Pmode, buf_addr); |
| |
| buf_addr = force_reg (Pmode, buf_addr); |
| |
| /* We used to store value in static_chain_rtx, but that fails if pointers |
| are smaller than integers. We instead require that the user must pass |
| a second argument of 1, because that is what builtin_setjmp will |
| return. This also makes EH slightly more efficient, since we are no |
| longer copying around a value that we don't care about. */ |
| gcc_assert (value == const1_rtx); |
| |
| last = get_last_insn (); |
| #ifdef HAVE_builtin_longjmp |
| if (HAVE_builtin_longjmp) |
| emit_insn (gen_builtin_longjmp (buf_addr)); |
| else |
| #endif |
| { |
| fp = gen_rtx_MEM (Pmode, buf_addr); |
| lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr, |
| GET_MODE_SIZE (Pmode))); |
| |
| stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr, |
| 2 * GET_MODE_SIZE (Pmode))); |
| set_mem_alias_set (fp, setjmp_alias_set); |
| set_mem_alias_set (lab, setjmp_alias_set); |
| set_mem_alias_set (stack, setjmp_alias_set); |
| |
| /* Pick up FP, label, and SP from the block and jump. This code is |
| from expand_goto in stmt.c; see there for detailed comments. */ |
| #if HAVE_nonlocal_goto |
| if (HAVE_nonlocal_goto) |
| /* We have to pass a value to the nonlocal_goto pattern that will |
| get copied into the static_chain pointer, but it does not matter |
| what that value is, because builtin_setjmp does not use it. */ |
| emit_insn (gen_nonlocal_goto (value, lab, stack, fp)); |
| else |
| #endif |
| { |
| lab = copy_to_reg (lab); |
| |
| emit_insn (gen_rtx_CLOBBER (VOIDmode, |
| gen_rtx_MEM (BLKmode, |
| gen_rtx_SCRATCH (VOIDmode)))); |
| emit_insn (gen_rtx_CLOBBER (VOIDmode, |
| gen_rtx_MEM (BLKmode, |
| hard_frame_pointer_rtx))); |
| |
| emit_move_insn (hard_frame_pointer_rtx, fp); |
| emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX); |
| |
| emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); |
| emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx)); |
| emit_indirect_jump (lab); |
| } |
| } |
| |
| /* Search backwards and mark the jump insn as a non-local goto. |
| Note that this precludes the use of __builtin_longjmp to a |
| __builtin_setjmp target in the same function. However, we've |
| already cautioned the user that these functions are for |
| internal exception handling use only. */ |
| for (insn = get_last_insn (); insn; insn = PREV_INSN (insn)) |
| { |
| gcc_assert (insn != last); |
| |
| if (JUMP_P (insn)) |
| { |
| REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx, |
| REG_NOTES (insn)); |
| break; |
| } |
| else if (CALL_P (insn)) |
| break; |
| } |
| } |
| |
| /* Expand a call to __builtin_nonlocal_goto. We're passed the target label |
| and the address of the save area. */ |
| |
| static rtx |
| expand_builtin_nonlocal_goto (tree arglist) |
| { |
| tree t_label, t_save_area; |
| rtx r_label, r_save_area, r_fp, r_sp, insn; |
| |
| if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) |
| return NULL_RTX; |
| |
| t_label = TREE_VALUE (arglist); |
| arglist = TREE_CHAIN (arglist); |
| t_save_area = TREE_VALUE (arglist); |
| |
| r_label = expand_expr (t_label, NULL_RTX, VOIDmode, 0); |
| r_label = convert_memory_address (Pmode, r_label); |
| r_save_area = expand_expr (t_save_area, NULL_RTX, VOIDmode, 0); |
| r_save_area = convert_memory_address (Pmode, r_save_area); |
| r_fp = gen_rtx_MEM (Pmode, r_save_area); |
| r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL), |
| plus_constant (r_save_area, GET_MODE_SIZE (Pmode))); |
| |
| current_function_has_nonlocal_goto = 1; |
| |
| #if HAVE_nonlocal_goto |
| /* ??? We no longer need to pass the static chain value, afaik. */ |
| if (HAVE_nonlocal_goto) |
| emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp)); |
| else |
| #endif |
| { |
| r_label = copy_to_reg (r_label); |
| |
| emit_insn (gen_rtx_CLOBBER (VOIDmode, |
| gen_rtx_MEM (BLKmode, |
| gen_rtx_SCRATCH (VOIDmode)))); |
| |
| emit_insn (gen_rtx_CLOBBER (VOIDmode, |
| gen_rtx_MEM (BLKmode, |
| hard_frame_pointer_rtx))); |
| |
| /* Restore frame pointer for containing function. |
| This sets the actual hard register used for the frame pointer |
| to the location of the function's incoming static chain info. |
| The non-local goto handler will then adjust it to contain the |
| proper value and reload the argument pointer, if needed. */ |
| emit_move_insn (hard_frame_pointer_rtx, r_fp); |
| emit_stack_restore (SAVE_NONLOCAL, r_sp, NULL_RTX); |
| |
| /* USE of hard_frame_pointer_rtx added for consistency; |
| not clear if really needed. */ |
| emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx)); |
| emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx)); |
| emit_indirect_jump (r_label); |
| } |
| |
| /* Search backwards to the jump insn and mark it as a |
| non-local goto. */ |
| for (insn = get_last_insn (); insn; insn = PREV_INSN (insn)) |
| { |
| if (JUMP_P (insn)) |
| { |
| REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, |
| const0_rtx, REG_NOTES (insn)); |
| break; |
| } |
| else if (CALL_P (insn)) |
| break; |
| } |
| |
| return const0_rtx; |
| } |
| |
| /* __builtin_update_setjmp_buf is passed a pointer to an array of five words |
| (not all will be used on all machines) that was passed to __builtin_setjmp. |
| It updates the stack pointer in that block to correspond to the current |
| stack pointer. */ |
| |
| static void |
| expand_builtin_update_setjmp_buf (rtx buf_addr) |
| { |
| enum machine_mode sa_mode = Pmode; |
| rtx stack_save; |
| |
| |
| #ifdef HAVE_save_stack_nonlocal |
| if (HAVE_save_stack_nonlocal) |
| sa_mode = insn_data[(int) CODE_FOR_save_stack_nonlocal].operand[0].mode; |
| #endif |
| #ifdef STACK_SAVEAREA_MODE |
| sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL); |
| #endif |
| |
| stack_save |
| = gen_rtx_MEM (sa_mode, |
| memory_address |
| (sa_mode, |
| plus_constant (buf_addr, 2 * GET_MODE_SIZE (Pmode)))); |
| |
| #ifdef HAVE_setjmp |
| if (HAVE_setjmp) |
| emit_insn (gen_setjmp ()); |
| #endif |
| |
| emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX); |
| } |
| |
| /* Expand a call to __builtin_prefetch. For a target that does not support |
| data prefetch, evaluate the memory address argument in case it has side |
| effects. */ |
| |
| static void |
| expand_builtin_prefetch (tree arglist) |
| { |
| tree arg0, arg1, arg2; |
| rtx op0, op1, op2; |
| |
| if (!validate_arglist (arglist, POINTER_TYPE, 0)) |
| return; |
| |
| arg0 = TREE_VALUE (arglist); |
| /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to |
| zero (read) and argument 2 (locality) defaults to 3 (high degree of |
| locality). */ |
| if (TREE_CHAIN (arglist)) |
| { |
| arg1 = TREE_VALUE (TREE_CHAIN (arglist)); |
| if (TREE_CHAIN (TREE_CHAIN (arglist))) |
| arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); |
| else |
| arg2 = build_int_cst (NULL_TREE, 3); |
| } |
| else |
| { |
| arg1 = integer_zero_node; |
| arg2 = build_int_cst (NULL_TREE, 3); |
| } |
| |
| /* Argument 0 is an address. */ |
| op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL); |
| |
| /* Argument 1 (read/write flag) must be a compile-time constant int. */ |
| if (TREE_CODE (arg1) != INTEGER_CST) |
| { |
| error ("second argument to %<__builtin_prefetch%> must be a constant"); |
| arg1 = integer_zero_node; |
| } |
| op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0); |
| /* Argument 1 must be either zero or one. */ |
| if (INTVAL (op1) != 0 && INTVAL (op1) != 1) |
| { |
| warning ("invalid second argument to %<__builtin_prefetch%>;" |
| " using zero"); |
| op1 = const0_rtx; |
| } |
| |
| /* Argument 2 (locality) must be a compile-time constant int. */ |
| if (TREE_CODE (arg2) != INTEGER_CST) |
| { |
| error ("third argument to %<__builtin_prefetch%> must be a constant"); |
| arg2 = integer_zero_node; |
| } |
| op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0); |
| /* Argument 2 must be 0, 1, 2, or 3. */ |
| if (INTVAL (op2) < 0 || INTVAL (op2) > 3) |
| { |
| warning ("invalid third argument to %<__builtin_prefetch%>; using zero"); |
| op2 = const0_rtx; |
| } |
| |
| #ifdef HAVE_prefetch |
| if (HAVE_prefetch) |
| { |
| if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate) |
| (op0, |
| insn_data[(int) CODE_FOR_prefetch].operand[0].mode)) |
| || (GET_MODE (op0) != Pmode)) |
| { |
| op0 = convert_memory_address (Pmode, op0); |
| op0 = force_reg (Pmode, op0); |
| } |
| emit_insn (gen_prefetch (op0, op1, op2)); |
| } |
| #endif |
| |
| /* Don't do anything with direct references to volatile memory, but |
| generate code to handle other side effects. */ |
| if (!MEM_P (op0) && side_effects_p (op0)) |
| emit_insn (op0); |
| } |
| |
| /* Get a MEM rtx for expression EXP which is the address of an operand |
| to be used to be used in a string instruction (cmpstrsi, movmemsi, ..). */ |
| |
| static rtx |
| get_memory_rtx (tree exp) |
| { |
| rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_NORMAL); |
| rtx mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr)); |
| |
| /* Get an expression we can use to find the attributes to assign to MEM. |
| If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if |
| we can. First remove any nops. */ |
| while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR |
| || TREE_CODE (exp) == NON_LVALUE_EXPR) |
| && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0)))) |
| exp = TREE_OPERAND (exp, 0); |
| |
| if (TREE_CODE (exp) == ADDR_EXPR) |
| exp = TREE_OPERAND (exp, 0); |
| else if (POINTER_TYPE_P (TREE_TYPE (exp))) |
| exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp); |
| else |
| exp = NULL; |
| |
| /* Honor attributes derived from exp, except for the alias set |
| (as builtin stringops may alias with anything) and the size |
| (as stringops may access multiple array elements). */ |
| if (exp) |
| { |
| set_mem_attributes (mem, exp, 0); |
| set_mem_alias_set (mem, 0); |
| set_mem_size (mem, NULL_RTX); |
| } |
| |
| return mem; |
| } |
| |
| /* Built-in functions to perform an untyped call and return. */ |
| |
| /* For each register that may be used for calling a function, this |
| gives a mode used to copy the register's value. VOIDmode indicates |
| the register is not used for calling a function. If the machine |
| has register windows, this gives only the outbound registers. |
| INCOMING_REGNO gives the corresponding inbound register. */ |
| static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER]; |
| |
| /* For each register that may be used for returning values, this gives |
| a mode used to copy the register's value. VOIDmode indicates the |
| register is not used for returning values. If the machine has |
| register windows, this gives only the outbound registers. |
| INCOMING_REGNO gives the corresponding inbound register. */ |
| static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER]; |
| |
| /* For each register that may be used for calling a function, this |
| gives the offset of that register into the block returned by |
| __builtin_apply_args. 0 indicates that the register is not |
| used for calling a function. */ |
| static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER]; |
| |
| /* Return the size required for the block returned by __builtin_apply_args, |
| and initialize apply_args_mode. */ |
| |
| static int |
| apply_args_size (void) |
| { |
| static int size = -1; |
| int align; |
| unsigned int regno; |
| enum machine_mode mode; |
| |
| /* The values computed by this function never change. */ |
| if (size < 0) |
| { |
| /* The first value is the incoming arg-pointer. */ |
| size = GET_MODE_SIZE (Pmode); |
| |
| /* The second value is the structure value address unless this is |
| passed as an "invisible" first argument. */ |
| if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0)) |
| size += GET_MODE_SIZE (Pmode); |
| |
| for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
| if (FUNCTION_ARG_REGNO_P (regno)) |
| { |
| mode = reg_raw_mode[regno]; |
| |
| gcc_assert (mode != VOIDmode); |
| |
| align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; |
| if (size % align != 0) |
| size = CEIL (size, align) * align; |
| apply_args_reg_offset[regno] = size; |
| size += GET_MODE_SIZE (mode); |
| apply_args_mode[regno] = mode; |
| } |
| else |
| { |
| apply_args_mode[regno] = VOIDmode; |
| apply_args_reg_offset[regno] = 0; |
| } |
| } |
| return size; |
| } |
| |
| /* Return the size required for the block returned by __builtin_apply, |
| and initialize apply_result_mode. */ |
| |
| static int |
| apply_result_size (void) |
| { |
| static int size = -1; |
| int align, regno; |
| enum machine_mode mode; |
| |
| /* The values computed by this function never change. */ |
| if (size < 0) |
| { |
| size = 0; |
| |
| for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
| if (FUNCTION_VALUE_REGNO_P (regno)) |
| { |
| mode = reg_raw_mode[regno]; |
| |
| gcc_assert (mode != VOIDmode); |
| |
| align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; |
| if (size % align != 0) |
| size = CEIL (size, align) * align; |
| size += GET_MODE_SIZE (mode); |
| apply_result_mode[regno] = mode; |
| } |
| else |
| apply_result_mode[regno] = VOIDmode; |
| |
| /* Allow targets that use untyped_call and untyped_return to override |
| the size so that machine-specific information can be stored here. */ |
| #ifdef APPLY_RESULT_SIZE |
| size = APPLY_RESULT_SIZE; |
| #endif |
| } |
| return size; |
| } |
| |
| #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return) |
| /* Create a vector describing the result block RESULT. If SAVEP is true, |
| the result block is used to save the values; otherwise it is used to |
| restore the values. */ |
| |
| static rtx |
| result_vector (int savep, rtx result) |
| { |
| int regno, size, align, nelts; |
| enum machine_mode mode; |
| rtx reg, mem; |
| rtx *savevec = alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx)); |
| |
| size = nelts = 0; |
| for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
| if ((mode = apply_result_mode[regno]) != VOIDmode) |
| { |
| align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; |
| if (size % align != 0) |
| size = CEIL (size, align) * align; |
| reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno)); |
| mem = adjust_address (result, mode, size); |
| savevec[nelts++] = (savep |
| ? gen_rtx_SET (VOIDmode, mem, reg) |
| : gen_rtx_SET (VOIDmode, reg, mem)); |
| size += GET_MODE_SIZE (mode); |
| } |
| return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec)); |
| } |
| #endif /* HAVE_untyped_call or HAVE_untyped_return */ |
| |
| /* Save the state required to perform an untyped call with the same |
| arguments as were passed to the current function. */ |
| |
| static rtx |
| expand_builtin_apply_args_1 (void) |
| { |
| rtx registers, tem; |
| int size, align, regno; |
| enum machine_mode mode; |
| rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1); |
| |
| /* Create a block where the arg-pointer, structure value address, |
| and argument registers can be saved. */ |
| registers = assign_stack_local (BLKmode, apply_args_size (), -1); |
| |
| /* Walk past the arg-pointer and structure value address. */ |
| size = GET_MODE_SIZE (Pmode); |
| if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0)) |
| size += GET_MODE_SIZE (Pmode); |
| |
| /* Save each register used in calling a function to the block. */ |
| for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
| if ((mode = apply_args_mode[regno]) != VOIDmode) |
| { |
| align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; |
| if (size % align != 0) |
| size = CEIL (size, align) * align; |
| |
| tem = gen_rtx_REG (mode, INCOMING_REGNO (regno)); |
| |
| emit_move_insn (adjust_address (registers, mode, size), tem); |
| size += GET_MODE_SIZE (mode); |
| } |
| |
| /* Save the arg pointer to the block. */ |
| tem = copy_to_reg (virtual_incoming_args_rtx); |
| #ifdef STACK_GROWS_DOWNWARD |
| /* We need the pointer as the caller actually passed them to us, not |
| as we might have pretended they were passed. Make sure it's a valid |
| operand, as emit_move_insn isn't expected to handle a PLUS. */ |
| tem |
| = force_operand (plus_constant (tem, current_function_pretend_args_size), |
| NULL_RTX); |
| #endif |
| emit_move_insn (adjust_address (registers, Pmode, 0), tem); |
| |
| size = GET_MODE_SIZE (Pmode); |
| |
| /* Save the structure value address unless this is passed as an |
| "invisible" first argument. */ |
| if (struct_incoming_value) |
| { |
| emit_move_insn (adjust_address (registers, Pmode, size), |
| copy_to_reg (struct_incoming_value)); |
| size += GET_MODE_SIZE (Pmode); |
| } |
| |
| /* Return the address of the block. */ |
| return copy_addr_to_reg (XEXP (registers, 0)); |
| } |
| |
| /* __builtin_apply_args returns block of memory allocated on |
| the stack into which is stored the arg pointer, structure |
| value address, static chain, and all the registers that might |
| possibly be used in performing a function call. The code is |
| moved to the start of the function so the incoming values are |
| saved. */ |
| |
| static rtx |
| expand_builtin_apply_args (void) |
| { |
| /* Don't do __builtin_apply_args more than once in a function. |
| Save the result of the first call and reuse it. */ |
| if (apply_args_value != 0) |
| return apply_args_value; |
| { |
| /* When this function is called, it means that registers must be |
| saved on entry to this function. So we migrate the |
| call to the first insn of this function. */ |
| rtx temp; |
| rtx seq; |
| |
| start_sequence (); |
| temp = expand_builtin_apply_args_1 (); |
| seq = get_insns (); |
| end_sequence (); |
| |
| apply_args_value = temp; |
| |
| /* Put the insns after the NOTE that starts the function. |
| If this is inside a start_sequence, make the outer-level insn |
| chain current, so the code is placed at the start of the |
| function. */ |
| push_topmost_sequence (); |
| emit_insn_before (seq, NEXT_INSN (entry_of_function ())); |
| pop_topmost_sequence (); |
| return temp; |
| } |
| } |
| |
| /* Perform an untyped call and save the state required to perform an |
| untyped return of whatever value was returned by the given function. */ |
| |
| static rtx |
| expand_builtin_apply (rtx function, rtx arguments, rtx argsize) |
| { |
| int size, align, regno; |
| enum machine_mode mode; |
| rtx incoming_args, result, reg, dest, src, call_insn; |
| rtx old_stack_level = 0; |
| rtx call_fusage = 0; |
| rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0); |
| |
| arguments = convert_memory_address (Pmode, arguments); |
| |
| /* Create a block where the return registers can be saved. */ |
| result = assign_stack_local (BLKmode, apply_result_size (), -1); |
| |
| /* Fetch the arg pointer from the ARGUMENTS block. */ |
| incoming_args = gen_reg_rtx (Pmode); |
| emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments)); |
| #ifndef STACK_GROWS_DOWNWARD |
| incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize, |
| incoming_args, 0, OPTAB_LIB_WIDEN); |
| #endif |
| |
| /* Push a new argument block and copy the arguments. Do not allow |
| the (potential) memcpy call below to interfere with our stack |
| manipulations. */ |
| do_pending_stack_adjust (); |
| NO_DEFER_POP; |
| |
| /* Save the stack with nonlocal if available. */ |
| #ifdef HAVE_save_stack_nonlocal |
| if (HAVE_save_stack_nonlocal) |
| emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX); |
| else |
| #endif |
| emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX); |
| |
| /* Allocate a block of memory onto the stack and copy the memory |
| arguments to the outgoing arguments address. */ |
| allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT); |
| dest = virtual_outgoing_args_rtx; |
| #ifndef STACK_GROWS_DOWNWARD |
| if (GET_CODE (argsize) == CONST_INT) |
| dest = plus_constant (dest, -INTVAL (argsize)); |
| else |
| dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize)); |
| #endif |
| dest = gen_rtx_MEM (BLKmode, dest); |
| set_mem_align (dest, PARM_BOUNDARY); |
| src = gen_rtx_MEM (BLKmode, incoming_args); |
| set_mem_align (src, PARM_BOUNDARY); |
| emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL); |
| |
| /* Refer to the argument block. */ |
| apply_args_size (); |
| arguments = gen_rtx_MEM (BLKmode, arguments); |
| set_mem_align (arguments, PARM_BOUNDARY); |
| |
| /* Walk past the arg-pointer and structure value address. */ |
| size = GET_MODE_SIZE (Pmode); |
| if (struct_value) |
| size += GET_MODE_SIZE (Pmode); |
| |
| /* Restore each of the registers previously saved. Make USE insns |
| for each of these registers for use in making the call. */ |
| for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
| if ((mode = apply_args_mode[regno]) != VOIDmode) |
| { |
| align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; |
| if (size % align != 0) |
| size = CEIL (size, align) * align; |
| reg = gen_rtx_REG (mode, regno); |
| emit_move_insn (reg, adjust_address (arguments, mode, size)); |
| use_reg (&call_fusage, reg); |
| size += GET_MODE_SIZE (mode); |
| } |
| |
| /* Restore the structure value address unless this is passed as an |
| "invisible" first argument. */ |
| size = GET_MODE_SIZE (Pmode); |
| if (struct_value) |
| { |
| rtx value = gen_reg_rtx (Pmode); |
| emit_move_insn (value, adjust_address (arguments, Pmode, size)); |
| emit_move_insn (struct_value, value); |
| if (REG_P (struct_value)) |
| use_reg (&call_fusage, struct_value); |
| size += GET_MODE_SIZE (Pmode); |
| } |
| |
| /* All arguments and registers used for the call are set up by now! */ |
| function = prepare_call_address (function, NULL, &call_fusage, 0, 0); |
| |
| /* Ensure address is valid. SYMBOL_REF is already valid, so no need, |
| and we don't want to load it into a register as an optimization, |
| because prepare_call_address already did it if it should be done. */ |
| if (GET_CODE (function) != SYMBOL_REF) |
| function = memory_address (FUNCTION_MODE, function); |
| |
| /* Generate the actual call instruction and save the return value. */ |
| #ifdef HAVE_untyped_call |
| if (HAVE_untyped_call) |
| emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function), |
| result, result_vector (1, result))); |
| else |
| #endif |
| #ifdef HAVE_call_value |
| if (HAVE_call_value) |
| { |
| rtx valreg = 0; |
| |
| /* Locate the unique return register. It is not possible to |
| express a call that sets more than one return register using |
| call_value; use untyped_call for that. In fact, untyped_call |
| only needs to save the return registers in the given block. */ |
| for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
| if ((mode = apply_result_mode[regno]) != VOIDmode) |
| { |
| gcc_assert (!valreg); /* HAVE_untyped_call required. */ |
| |
| valreg = gen_rtx_REG (mode, regno); |
| } |
| |
| emit_call_insn (GEN_CALL_VALUE (valreg, |
| gen_rtx_MEM (FUNCTION_MODE, function), |
| const0_rtx, NULL_RTX, const0_rtx)); |
| |
| emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg); |
| } |
| else |
| #endif |
| gcc_unreachable (); |
| |
| /* Find the CALL insn we just emitted, and attach the register usage |
| information. */ |
| call_insn = last_call_insn (); |
| add_function_usage_to (call_insn, call_fusage); |
| |
| /* Restore the stack. */ |
| #ifdef HAVE_save_stack_nonlocal |
| if (HAVE_save_stack_nonlocal) |
| emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX); |
| else |
| #endif |
| emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX); |
| |
| OK_DEFER_POP; |
| |
| /* Return the address of the result block. */ |
| result = copy_addr_to_reg (XEXP (result, 0)); |
| return convert_memory_address (ptr_mode, result); |
| } |
| |
| /* Perform an untyped return. */ |
| |
| static void |
| expand_builtin_return (rtx result) |
| { |
| int size, align, regno; |
| enum machine_mode mode; |
| rtx reg; |
| rtx call_fusage = 0; |
| |
| result = convert_memory_address (Pmode, result); |
| |
| apply_result_size (); |
| result = gen_rtx_MEM (BLKmode, result); |
| |
| #ifdef HAVE_untyped_return |
| if (HAVE_untyped_return) |
| { |
| emit_jump_insn (gen_untyped_return (result, result_vector (0, result))); |
| emit_barrier (); |
| return; |
| } |
| #endif |
| |
| /* Restore the return value and note that each value is used. */ |
| size = 0; |
| for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
| if ((mode = apply_result_mode[regno]) != VOIDmode) |
| { |
| align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; |
| if (size % align != 0) |
| size = CEIL (size, align) * align; |
| reg = gen_rtx_REG (mode, INCOMING_REGNO (regno)); |
| emit_move_insn (reg, adjust_address (result, mode, size)); |
| |
| push_to_sequence (call_fusage); |
| emit_insn (gen_rtx_USE (VOIDmode, reg)); |
| call_fusage = get_insns (); |
| end_sequence (); |
| size += GET_MODE_SIZE (mode); |
| } |
| |
| /* Put the USE insns before the return. */ |
| emit_insn (call_fusage); |
| |
| /* Return whatever values was restored by jumping directly to the end |
| of the function. */ |
| expand_naked_return (); |
| } |
| |
| /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */ |
| |
| static enum type_class |
| type_to_class (tree type) |
| { |
| switch (TREE_CODE (type)) |
| { |
| case VOID_TYPE: return void_type_class; |
| case INTEGER_TYPE: return integer_type_class; |
| case CHAR_TYPE: return char_type_class; |
| case ENUMERAL_TYPE: return enumeral_type_class; |
| case BOOLEAN_TYPE: return boolean_type_class; |
| case POINTER_TYPE: return pointer_type_class; |
| case REFERENCE_TYPE: return reference_type_class; |
| case OFFSET_TYPE: return offset_type_class; |
| case REAL_TYPE: return real_type_class; |
| case COMPLEX_TYPE: return complex_type_class; |
| case FUNCTION_TYPE: return function_type_class; |
| case METHOD_TYPE: return method_type_class; |
| case RECORD_TYPE: return record_type_class; |
| case UNION_TYPE: |
| case QUAL_UNION_TYPE: return union_type_class; |
| case ARRAY_TYPE: return (TYPE_STRING_FLAG (type) |
| ? string_type_class : array_type_class); |
| case FILE_TYPE: return file_type_class; |
| case LANG_TYPE: return lang_type_class; |
| default: return no_type_class; |
| } |
| } |
| |
| /* Expand a call to __builtin_classify_type with arguments found in |
| ARGLIST. */ |
| |
| static rtx |
| expand_builtin_classify_type (tree arglist) |
| { |
| if (arglist != 0) |
| return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist)))); |
| return GEN_INT (no_type_class); |
| } |
| |
| /* This helper macro, meant to be used in mathfn_built_in below, |
| determines which among a set of three builtin math functions is |
| appropriate for a given type mode. The `F' and `L' cases are |
| automatically generated from the `double' case. */ |
| #define CASE_MATHFN(BUILT_IN_MATHFN) \ |
| case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \ |
| fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \ |
| fcodel = BUILT_IN_MATHFN##L ; break; |
| |
| /* Return mathematic function equivalent to FN but operating directly |
| on TYPE, if available. If we can't do the conversion, return zero. */ |
| tree |
| mathfn_built_in (tree type, enum built_in_function fn) |
| { |
| enum built_in_function fcode, fcodef, fcodel; |
| |
| switch (fn) |
| { |
| CASE_MATHFN (BUILT_IN_ACOS) |
| CASE_MATHFN (BUILT_IN_ACOSH) |
| CASE_MATHFN (BUILT_IN_ASIN) |
| CASE_MATHFN (BUILT_IN_ASINH) |
| CASE_MATHFN (BUILT_IN_ATAN) |
| CASE_MATHFN (BUILT_IN_ATAN2) |
| CASE_MATHFN (BUILT_IN_ATANH) |
| CASE_MATHFN (BUILT_IN_CBRT) |
| CASE_MATHFN (BUILT_IN_CEIL) |
| CASE_MATHFN (BUILT_IN_COPYSIGN) |
| CASE_MATHFN (BUILT_IN_COS) |
| CASE_MATHFN (BUILT_IN_COSH) |
| CASE_MATHFN (BUILT_IN_DREM) |
| CASE_MATHFN (BUILT_IN_ERF) |
| CASE_MATHFN (BUILT_IN_ERFC) |
| CASE_MATHFN (BUILT_IN_EXP) |
| CASE_MATHFN (BUILT_IN_EXP10) |
| CASE_MATHFN (BUILT_IN_EXP2) |
| CASE_MATHFN (BUILT_IN_EXPM1) |
| CASE_MATHFN (BUILT_IN_FABS) |
| CASE_MATHFN (BUILT_IN_FDIM) |
| CASE_MATHFN (BUILT_IN_FLOOR) |
| CASE_MATHFN (BUILT_IN_FMA) |
| CASE_MATHFN (BUILT_IN_FMAX) |
| CASE_MATHFN (BUILT_IN_FMIN) |
| CASE_MATHFN (BUILT_IN_FMOD) |
| CASE_MATHFN (BUILT_IN_FREXP) |
| CASE_MATHFN (BUILT_IN_GAMMA) |
| CASE_MATHFN (BUILT_IN_HUGE_VAL) |
| CASE_MATHFN (BUILT_IN_HYPOT) |
| CASE_MATHFN (BUILT_IN_ILOGB) |
| CASE_MATHFN (BUILT_IN_INF) |
| CASE_MATHFN (BUILT_IN_J0) |
| CASE_MATHFN (BUILT_IN_J1) |
| CASE_MATHFN (BUILT_IN_JN) |
| CASE_MATHFN (BUILT_IN_LDEXP) |
| CASE_MATHFN (BUILT_IN_LGAMMA) |
| CASE_MATHFN (BUILT_IN_LLRINT) |
| CASE_MATHFN (BUILT_IN_LLROUND) |
| CASE_MATHFN (BUILT_IN_LOG) |
| CASE_MATHFN (BUILT_IN_LOG10) |
| CASE_MATHFN (BUILT_IN_LOG1P) |
| CASE_MATHFN (BUILT_IN_LOG2) |
| CASE_MATHFN (BUILT_IN_LOGB) |
| CASE_MATHFN (BUILT_IN_LRINT) |
| CASE_MATHFN (BUILT_IN_LROUND) |
| CASE_MATHFN (BUILT_IN_MODF) |
| CASE_MATHFN (BUILT_IN_NAN) |
| CASE_MATHFN (BUILT_IN_NANS) |
| CASE_MATHFN (BUILT_IN_NEARBYINT) |
| CASE_MATHFN (BUILT_IN_NEXTAFTER) |
| CASE_MATHFN (BUILT_IN_NEXTTOWARD) |
| CASE_MATHFN (BUILT_IN_POW) |
| CASE_MATHFN (BUILT_IN_POWI) |
| CASE_MATHFN (BUILT_IN_POW10) |
| CASE_MATHFN (BUILT_IN_REMAINDER) |
| CASE_MATHFN (BUILT_IN_REMQUO) |
| CASE_MATHFN (BUILT_IN_RINT) |
| CASE_MATHFN (BUILT_IN_ROUND) |
| CASE_MATHFN (BUILT_IN_SCALB) |
| CASE_MATHFN (BUILT_IN_SCALBLN) |
| CASE_MATHFN (BUILT_IN_SCALBN) |
| CASE_MATHFN (BUILT_IN_SIGNIFICAND) |
| CASE_MATHFN (BUILT_IN_SIN) |
| CASE_MATHFN (BUILT_IN_SINCOS) |
| CASE_MATHFN (BUILT_IN_SINH) |
| CASE_MATHFN (BUILT_IN_SQRT) |
| CASE_MATHFN (BUILT_IN_TAN) |
| CASE_MATHFN (BUILT_IN_TANH) |
| CASE_MATHFN (BUILT_IN_TGAMMA) |
| CASE_MATHFN (BUILT_IN_TRUNC) |
| CASE_MATHFN (BUILT_IN_Y0) |
| CASE_MATHFN (BUILT_IN_Y1) |
| CASE_MATHFN (BUILT_IN_YN) |
| |
| default: |
| return 0; |
| } |
| |
| if (TYPE_MAIN_VARIANT (type) == double_type_node) |
| return implicit_built_in_decls[fcode]; |
| else if (TYPE_MAIN_VARIANT (type) == float_type_node) |
| return implicit_built_in_decls[fcodef]; |
| else if (TYPE_MAIN_VARIANT (type) == long_double_type_node) |
| return implicit_built_in_decls[fcodel]; |
| else |
| return 0; |
| } |
| |
| /* If errno must be maintained, expand the RTL to check if the result, |
| TARGET, of a built-in function call, EXP, is NaN, and if so set |
| errno to EDOM. */ |
| |
| static void |
| expand_errno_check (tree exp, rtx target) |
| { |
| rtx lab = gen_label_rtx (); |
| |
| /* Test the result; if it is NaN, set errno=EDOM because |
| the argument was not in the domain. */ |
| emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target), |
| 0, lab); |
| |
| #ifdef TARGET_EDOM |
| /* If this built-in doesn't throw an exception, set errno directly. */ |
| if (TREE_NOTHROW (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))) |
| { |
| #ifdef GEN_ERRNO_RTX |
| rtx errno_rtx = GEN_ERRNO_RTX; |
| #else |
| rtx errno_rtx |
| = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno")); |
| #endif |
| emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM)); |
| emit_label (lab); |
| return; |
| } |
| #endif |
| |
| /* We can't set errno=EDOM directly; let the library call do it. |
| Pop the arguments right away in case the call gets deleted. */ |
| NO_DEFER_POP; |
| expand_call (exp, target, 0); |
| OK_DEFER_POP; |
| emit_label (lab); |
| } |
| |
| |
| /* Expand a call to one of the builtin math functions (sqrt, exp, or log). |
| Return 0 if a normal call should be emitted rather than expanding the |
| function in-line. EXP is the expression that is a call to the builtin |
| function; if convenient, the result should be placed in TARGET. |
| SUBTARGET may be used as the target for computing one of EXP's operands. */ |
| |
| static rtx |
| expand_builtin_mathfn (tree exp, rtx target, rtx subtarget) |
| { |
| optab builtin_optab; |
| rtx op0, insns, before_call; |
| tree fndecl = get_callee_fndecl (exp); |
| tree arglist = TREE_OPERAND (exp, 1); |
| enum machine_mode mode; |
| bool errno_set = false; |
| tree arg, narg; |
| |
| if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE)) |
| return 0; |
| |
| arg = TREE_VALUE (arglist); |
| |
| switch (DECL_FUNCTION_CODE (fndecl)) |
| { |
| case BUILT_IN_SQRT: |
| case BUILT_IN_SQRTF: |
| case BUILT_IN_SQRTL: |
| errno_set = ! tree_expr_nonnegative_p (arg); |
| builtin_optab = sqrt_optab; |
| break; |
| case BUILT_IN_EXP: |
| case BUILT_IN_EXPF: |
| case BUILT_IN_EXPL: |
| errno_set = true; builtin_optab = exp_optab; break; |
| case BUILT_IN_EXP10: |
| case BUILT_IN_EXP10F: |
| case BUILT_IN_EXP10L: |
| case BUILT_IN_POW10: |
| case BUILT_IN_POW10F: |
| case BUILT_IN_POW10L: |
| errno_set = true; builtin_optab = exp10_optab; break; |
| case BUILT_IN_EXP2: |
| case BUILT_IN_EXP2F: |
| case BUILT_IN_EXP2L: |
| errno_set = true; builtin_optab = exp2_optab; break; |
| case BUILT_IN_EXPM1: |
| case BUILT_IN_EXPM1F: |
| case BUILT_IN_EXPM1L: |
| errno_set = true; builtin_optab = expm1_optab; break; |
| case BUILT_IN_LOGB: |
| case BUILT_IN_LOGBF: |
| case BUILT_IN_LOGBL: |
| errno_set = true; builtin_optab = logb_optab; break; |
| case BUILT_IN_ILOGB: |
| case BUILT_IN_ILOGBF: |
| case BUILT_IN_ILOGBL: |
| errno_set = true; builtin_optab = ilogb_optab; break; |
| case BUILT_IN_LOG: |
| case BUILT_IN_LOGF: |
| case BUILT_IN_LOGL: |
| errno_set = true; builtin_optab = log_optab; break; |
| case BUILT_IN_LOG10: |
| case BUILT_IN_LOG10F: |
| case BUILT_IN_LOG10L: |
| errno_set = true; builtin_optab = log10_optab; break; |
| case BUILT_IN_LOG2: |
| case BUILT_IN_LOG2F: |
| case BUILT_IN_LOG2L: |
| errno_set = true; builtin_optab = log2_optab; break; |
| case BUILT_IN_LOG1P: |
| case BUILT_IN_LOG1PF: |
| case BUILT_IN_LOG1PL: |
| errno_set = true; builtin_optab = log1p_optab; break; |
| case BUILT_IN_ASIN: |
| case BUILT_IN_ASINF: |
| case BUILT_IN_ASINL: |
| builtin_optab = asin_optab; break; |
| case BUILT_IN_ACOS: |
| case BUILT_IN_ACOSF: |
| case BUILT_IN_ACOSL: |
| builtin_optab = acos_optab; break; |
| case BUILT_IN_TAN: |
| case BUILT_IN_TANF: |
| case BUILT_IN_TANL: |
| builtin_optab = tan_optab; break; |
| case BUILT_IN_ATAN: |
| case BUILT_IN_ATANF: |
| case BUILT_IN_ATANL: |
| builtin_optab = atan_optab; break; |
| case BUILT_IN_FLOOR: |
| case BUILT_IN_FLOORF: |
| case BUILT_IN_FLOORL: |
| builtin_optab = floor_optab; break; |
| case BUILT_IN_CEIL: |
| case BUILT_IN_CEILF: |
| case BUILT_IN_CEILL: |
| builtin_optab = ceil_optab; break; |
| case BUILT_IN_TRUNC: |
| case BUILT_IN_TRUNCF: |
| case BUILT_IN_TRUNCL: |
| builtin_optab = btrunc_optab; break; |
| case BUILT_IN_ROUND: |
| case BUILT_IN_ROUNDF: |
| case BUILT_IN_ROUNDL: |
| builtin_optab = round_optab; break; |
| case BUILT_IN_NEARBYINT: |
| case BUILT_IN_NEARBYINTF: |
| case BUILT_IN_NEARBYINTL: |
| builtin_optab = nearbyint_optab; break; |
| case BUILT_IN_RINT: |
| case BUILT_IN_RINTF: |
| case BUILT_IN_RINTL: |
| builtin_optab = rint_optab; break; |
| default: |
| gcc_unreachable (); |
| } |
| |
| /* Make a suitable register to place result in. */ |
| mode = TYPE_MODE (TREE_TYPE (exp)); |
| |
| if (! flag_errno_math || ! HONOR_NANS (mode)) |
| errno_set = false; |
| |
| /* Before working hard, check whether the instruction is available. */ |
| if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) |
| { |
| target = gen_reg_rtx (mode); |
| |
| /* Wrap the computation of the argument in a SAVE_EXPR, as we may |
| need to expand the argument again. This way, we will not perform |
| side-effects more the once. */ |
| narg = builtin_save_expr (arg); |
| if (narg != arg) |
| { |
| arg = narg; |
| arglist = build_tree_list (NULL_TREE, arg); |
| exp = build_function_call_expr (fndecl, arglist); |
| } |
| |
| op0 = expand_expr (arg, subtarget, VOIDmode, 0); |
| |
| start_sequence (); |
| |
| /* Compute into TARGET. |
| Set TARGET to wherever the result comes back. */ |
| target = expand_unop (mode, builtin_optab, op0, target, 0); |
| |
| if (target != 0) |
| { |
| if (errno_set) |
| expand_errno_check (exp, target); |
| |
| /* Output the entire sequence. */ |
| insns = get_insns (); |
| end_sequence (); |
| emit_insn (insns); |
| return target; |
| } |
| |
| /* If we were unable to expand via the builtin, stop the sequence |
| (without outputting the insns) and call to the library function |
| with the stabilized argument list. */ |
| end_sequence (); |
| } |
| |
| before_call = get_last_insn (); |
| |
| target = expand_call (exp, target, target == const0_rtx); |
| |
| /* If this is a sqrt operation and we don't care about errno, try to |
| attach a REG_EQUAL note with a SQRT rtx to the emitted libcall. |
| This allows the semantics of the libcall to be visible to the RTL |
| optimizers. */ |
| if (builtin_optab == sqrt_optab && !errno_set) |
| { |
| /* Search backwards through the insns emitted by expand_call looking |
| for the instruction with the REG_RETVAL note. */ |
| rtx last = get_last_insn (); |
| while (last != before_call) |
| { |
| if (find_reg_note (last, REG_RETVAL, NULL)) |
| { |
| rtx note = find_reg_note (last, REG_EQUAL, NULL); |
| /* Check that the REQ_EQUAL note is an EXPR_LIST with |
| two elements, i.e. symbol_ref(sqrt) and the operand. */ |
| if (note |
| && GET_CODE (note) == EXPR_LIST |
| && GET_CODE (XEXP (note, 0)) == EXPR_LIST |
| && XEXP (XEXP (note, 0), 1) != NULL_RTX |
| && XEXP (XEXP (XEXP (note, 0), 1), 1) == NULL_RTX) |
| { |
| rtx operand = XEXP (XEXP (XEXP (note, 0), 1), 0); |
| /* Check operand is a register with expected mode. */ |
| if (operand |
| && REG_P (operand) |
| && GET_MODE (operand) == mode) |
| { |
| /* Replace the REG_EQUAL note with a SQRT rtx. */ |
| rtx equiv = gen_rtx_SQRT (mode, operand); |
| set_unique_reg_note (last, REG_EQUAL, equiv); |
| } |
| } |
| break; |
| } |
| last = PREV_INSN (last); |
| } |
| } |
| |
| return target; |
| } |
| |
| /* Expand a call to the builtin binary math functions (pow and atan2). |
| Return 0 if a normal call should be emitted rather than expanding the |
| function in-line. EXP is the expression that is a call to the builtin |
| function; if convenient, the result should be placed in TARGET. |
| SUBTARGET may be used as the target for computing one of EXP's |
| operands. */ |
| |
| static rtx |
| expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget) |
| { |
| optab builtin_optab; |
| rtx op0, op1, insns; |
| int op1_type = REAL_TYPE; |
| tree fndecl = get_callee_fndecl (exp); |
| tree arglist = TREE_OPERAND (exp, 1); |
| tree arg0, arg1, temp, narg; |
| enum machine_mode mode; |
| bool errno_set = true; |
| bool stable = true; |
| |
| if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXP) |
| || (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXPF) |
| || (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LDEXPL)) |
| op1_type = INTEGER_TYPE; |
| |
| if (!validate_arglist (arglist, REAL_TYPE, op1_type, VOID_TYPE)) |
| return 0; |
| |
| arg0 = TREE_VALUE (arglist); |
| arg1 = TREE_VALUE (TREE_CHAIN (arglist)); |
| |
| switch (DECL_FUNCTION_CODE (fndecl)) |
| { |
| case BUILT_IN_POW: |
| case BUILT_IN_POWF: |
| case BUILT_IN_POWL: |
| builtin_optab = pow_optab; break; |
| case BUILT_IN_ATAN2: |
| case BUILT_IN_ATAN2F: |
| case BUILT_IN_ATAN2L: |
| builtin_optab = atan2_optab; break; |
| case BUILT_IN_LDEXP: |
| case BUILT_IN_LDEXPF: |
| case BUILT_IN_LDEXPL: |
| builtin_optab = ldexp_optab; break; |
| case BUILT_IN_FMOD: |
| case BUILT_IN_FMODF: |
| case BUILT_IN_FMODL: |
| builtin_optab = fmod_optab; break; |
| case BUILT_IN_DREM: |
| case BUILT_IN_DREMF: |
| case BUILT_IN_DREML: |
| builtin_optab = drem_optab; break; |
| default: |
| gcc_unreachable (); |
| } |
| |
| /* Make a suitable register to place result in. */ |
| mode = TYPE_MODE (TREE_TYPE (exp)); |
| |
| /* Before working hard, check whether the instruction is available. */ |
| if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing) |
| return 0; |
| |
| target = gen_reg_rtx (mode); |
| |
| if (! flag_errno_math || ! HONOR_NANS (mode)) |
| errno_set = false; |
| |
| /* Always stabilize the argument list. */ |
| narg = builtin_save_expr (arg1); |
| if (narg != arg1) |
| { |
| arg1 = narg; |
| temp = build_tree_list (NULL_TREE, narg); |
| stable = false; |
| } |
| else |
| temp = TREE_CHAIN (arglist); |
| |
| narg = builtin_save_expr (arg0); |
| if (narg != arg0) |
| { |
| arg0 = narg; |
| arglist = tree_cons (NULL_TREE, narg, temp); |
| stable = false; |
| } |
| else if (! stable) |
| arglist = tree_cons (NULL_TREE, arg0, temp); |
| |
| if (! stable) |
| exp = build_function_call_expr (fndecl, arglist); |
| |
| op0 = expand_expr (arg0, subtarget, VOIDmode, 0); |
| op1 = expand_expr (arg1, 0, VOIDmode, 0); |
| |
| start_sequence (); |
| |
| /* Compute into TARGET. |
| Set TARGET to wherever the result comes back. */ |
| target = expand_binop (mode, builtin_optab, op0, op1, |
| target, 0, OPTAB_DIRECT); |
| |
| /* If we were unable to expand via the builtin, stop the sequence |
| (without outputting the insns) and call to the library function |
| with the stabilized argument list. */ |
| if (target == 0) |
| { |
| end_sequence (); |
| return expand_call (exp, target, target == const0_rtx); |
| } |
| |
| if (errno_set) |
| expand_errno_check (exp, target); |
| |
| /* Output the entire sequence. */ |
| insns = get_insns (); |
| end_sequence (); |
| emit_insn (insns); |
| |
| return target; |
| } |
| |
| /* Expand a call to the builtin sin and cos math functions. |
| Return 0 if a normal call should be emitted rather than expanding the |
| function in-line. EXP is the expression that is a call to the builtin |
| function; if convenient, the result should be placed in TARGET. |
| SUBTARGET may be used as the target for computing one of EXP's |
| operands. */ |
| |
| static rtx |
| expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget) |
| { |
| optab builtin_optab; |
| rtx op0, insns, before_call; |
| tree fndecl = get_callee_fndecl (exp); |
| tree arglist = TREE_OPERAND (exp, 1); |
| enum machine_mode mode; |
| bool errno_set = false; |
| tree arg, narg; |
| |
| if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE)) |
| return 0; |
| |
| arg = TREE_VALUE (arglist); |
| |
| switch (DECL_FUNCTION_CODE (fndecl)) |
| { |
| case BUILT_IN_SIN: |
| case BUILT_IN_SINF: |
| case BUILT_IN_SINL: |
| case BUILT_IN_COS: |
| case BUILT_IN_COSF: |
| case BUILT_IN_COSL: |
| builtin_optab = sincos_optab; break; |
| default: |
| gcc_unreachable (); |
| } |
| |
| /* Make a suitable register to place result in. */ |
| mode = TYPE_MODE (TREE_TYPE (exp)); |
| |
| if (! flag_errno_math || ! HONOR_NANS (mode)) |
| errno_set = false; |
| |
| /* Check if sincos insn is available, otherwise fallback |
| to sin or cos insn. */ |
| if (builtin_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing) { |
| switch (DECL_FUNCTION_CODE (fndecl)) |
| { |
| case BUILT_IN_SIN: |
| case BUILT_IN_SINF: |
| case BUILT_IN_SINL: |
| builtin_optab = sin_optab; break; |
| case BUILT_IN_COS: |
| case BUILT_IN_COSF: |
| case BUILT_IN_COSL: |
| builtin_optab = cos_optab; break; |
| default: |
| gcc_unreachable (); |
| } |
| } |
| |
| /* Before working hard, check whether the instruction is available. */ |
| if (builtin_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing) |
| { |
| target = gen_reg_rtx (mode); |
| |
| /* Wrap the computation of the argument in a SAVE_EXPR, as we may |
| need to expand the argument again. This way, we will not perform |
| side-effects more the once. */ |
| narg = save_expr (arg); |
| if (narg != arg) |
| { |
| arg = narg; |
| arglist = build_tree_list (NULL_TREE, arg); |
| exp = build_function_call_expr (fndecl, arglist); |
| } |
| |
| op0 = expand_expr (arg, subtarget, VOIDmode, 0); |
| |
| start_sequence (); |
| |
| /* Compute into TARGET. |
| Set TARGET to wherever the result comes back. */ |
| if (builtin_optab == sincos_optab) |
| { |
| int result; |
| |
| switch (DECL_FUNCTION_CODE (fndecl)) |
| { |
| case BUILT_IN_SIN: |
| case BUILT_IN_SINF: |
| case BUILT_IN_SINL: |
| result = expand_twoval_unop (builtin_optab, op0, 0, target, 0); |
| break; |
| case BUILT_IN_COS: |
| case BUILT_IN_COSF: |
| case BUILT_IN_COSL: |
| result = expand_twoval_unop (builtin_optab, op0, target, 0, 0); |
| break; |
| default: |
| gcc_unreachable (); |
| } |
| gcc_assert (result); |
| } |
| else |
| { |
| target = expand_unop (mode, builtin_optab, op0, target, 0); |
| } |
| |
| if (target != 0) |
| { |
| if (errno_set) |
| expand_errno_check (exp, target); |
| |
| /* Output the entire sequence. */ |
| insns = get_insns (); |
| end_sequence (); |
| emit_insn (insns); |
| return target; |
| } |
| |
| /* If we were unable to expand via the builtin, stop the sequence |
| (without outputting the insns) and call to the library function |
| with the stabilized argument list. */ |
| end_sequence (); |
| } |
| |
| before_call = get_last_insn (); |
| |
| target = expand_call (exp, target, target == const0_rtx); |
| |
| return target; |
| } |
| |
| /* To evaluate powi(x,n), the floating point value x raised to the |
| constant integer exponent n, we use a hybrid algorithm that |
| combines the "window method" with look-up tables. For an |
| introduction to exponentiation algorithms and "addition chains", |
| see section 4.6.3, "Evaluation of Powers" of Donald E. Knuth, |
| "Seminumerical Algorithms", Vol. 2, "The Art of Computer Programming", |
| 3rd Edition, 1998, and Daniel M. Gordon, "A Survey of Fast Exponentiation |
| Methods", Journal of Algorithms, Vol. 27, pp. 129-146, 1998. */ |
| |
| /* Provide a default value for POWI_MAX_MULTS, the maximum number of |
| multiplications to inline before calling the system library's pow |
| function. powi(x,n) requires at worst 2*bits(n)-2 multiplications, |
| so this default never requires calling pow, powf or powl. */ |
| |
| #ifndef POWI_MAX_MULTS |
| #define POWI_MAX_MULTS (2*HOST_BITS_PER_WIDE_INT-2) |
| #endif |
| |
| /* The size of the "optimal power tree" lookup table. All |
| exponents less than this value are simply looked up in the |
| powi_table below. This threshold is also used to size the |
| cache of pseudo registers that hold intermediate results. */ |
| #define POWI_TABLE_SIZE 256 |
| |
| /* The size, in bits of the window, used in the "window method" |
| exponentiation algorithm. This is equivalent to a radix of |
| (1<<POWI_WINDOW_SIZE) in the corresponding "m-ary method". */ |
| #define POWI_WINDOW_SIZE 3 |
| |
| /* The following table is an efficient representation of an |
| "optimal power tree". For each value, i, the corresponding |
| value, j, in the table states than an optimal evaluation |
| sequence for calculating pow(x,i) can be found by evaluating |
| pow(x,j)*pow(x,i-j). An optimal power tree for the first |
| 100 integers is given in Knuth's "Seminumerical algorithms". */ |
| |
| static const unsigned char powi_table[POWI_TABLE_SIZE] = |
| { |
| 0, 1, 1, 2, 2, 3, 3, 4, /* 0 - 7 */ |
| 4, 6, 5, 6, 6, 10, 7, 9, /* 8 - 15 */ |
| 8, 16, 9, 16, 10, 12, 11, 13, /* 16 - 23 */ |
| 12, 17, 13, 18, 14, 24, 15, 26, /* 24 - 31 */ |
| 16, 17, 17, 19, 18, 33, 19, 26, /* 32 - 39 */ |
| 20, 25, 21, 40, 22, 27, 23, 44, /* 40 - 47 */ |
| 24, 32, 25, 34, 26, 29, 27, 44, /* 48 - 55 */ |
| 28, 31, 29, 34, 30, 60, 31, 36, /* 56 - 63 */ |
| 32, 64, 33, 34, 34, 46, 35, 37, /* 64 - 71 */ |
| 36, 65, 37, 50, 38, 48, 39, 69, /* 72 - 79 */ |
| 40, 49, 41, 43, 42, 51, 43, 58, /* 80 - 87 */ |
| 44, 64, 45, 47, 46, 59, 47, 76, /* 88 - 95 */ |
| 48, 65, 49, 66, 50, 67, 51, 66, /* 96 - 103 */ |
| 52, 70, 53, 74, 54, 104, 55, 74, /* 104 - 111 */ |
| 56, 64, 57, 69, 58, 78, 59, 68, /* 112 - 119 */ |
| 60, 61, 61, 80, 62, 75, 63, 68, /* 120 - 127 */ |
| 64, 65, 65, 128, 66, 129, 67, 90, /* 128 - 135 */ |
| 68, 73, 69, 131, 70, 94, 71, 88, /* 136 - 143 */ |
| 72, 128, 73, 98, 74, 132, 75, 121, /* 144 - 151 */ |
| 76, 102, 77, 124, 78, 132, 79, 106, /* 152 - 159 */ |
| 80, 97, 81, 160, 82, 99, 83, 134, /* 160 - 167 */ |
| 84, 86, 85, 95, 86, 160, 87, 100, /* 168 - 175 */ |
| 88, 113, 89, 98, 90, 107, 91, 122, /* 176 - 183 */ |
| 92, 111, 93, 102, 94, 126, 95, 150, /* 184 - 191 */ |
| 96, 128, 97, 130, 98, 133, 99, 195, /* 192 - 199 */ |
| 100, 128, 101, 123, 102, 164, 103, 138, /* 200 - 207 */ |
| 104, 145, 105, 146, 106, 109, 107, 149, /* 208 - 215 */ |
| 108, 200, 109, 146, 110, 170, 111, 157, /* 216 - 223 */ |
| 112, 128, 113, 130, 114, 182, 115, 132, /* 224 - 231 */ |
| 116, 200, 117, 132, 118, 158, 119, 206, /* 232 - 239 */ |
| 120, 240, 121, 162, 122, 147, 123, 152, /* 240 - 247 */ |
| 124, 166, 125, 214, 126, 138, 127, 153, /* 248 - 255 */ |
| }; |
| |
| |
| /* Return the number of multiplications required to calculate |
| powi(x,n) where n is less than POWI_TABLE_SIZE. This is a |
| subroutine of powi_cost. CACHE is an array indicating |
| which exponents have already been calculated. */ |
| |
| static int |
| powi_lookup_cost (unsigned HOST_WIDE_INT n, bool *cache) |
| { |
| /* If we've already calculated this exponent, then this evaluation |
| doesn't require any additional multiplications. */ |
| if (cache[n]) |
| return 0; |
| |
| cache[n] = true; |
| return powi_lookup_cost (n - powi_table[n], cache) |
| + powi_lookup_cost (powi_table[n], cache) + 1; |
| } |
| |
| /* Return the number of multiplications required to calculate |
| powi(x,n) for an arbitrary x, given the exponent N. This |
| function needs to be kept in sync with expand_powi below. */ |
| |
| static int |
| powi_cost (HOST_WIDE_INT n) |
| { |
| bool cache[POWI_TABLE_SIZE]; |
| unsigned HOST_WIDE_INT digit; |
| unsigned HOST_WIDE_INT val; |
| int result; |
| |
| if (n == 0) |
| return 0; |
| |
| /* Ignore the reciprocal when calculating the cost. */ |
| val = (n < 0) ? -n : n; |
| |
| /* Initialize the exponent cache. */ |
| memset (cache, 0, POWI_TABLE_SIZE * sizeof (bool)); |
| cache[1] = true; |
| |
| result = 0; |
| |
| while (val >= POWI_TABLE_SIZE) |
| { |
| if (val & 1) |
| { |
| digit = val & ((1 << POWI_WINDOW_SIZE) - 1); |
| result += powi_lookup_cost (digit, cache) |
| + POWI_WINDOW_SIZE + 1; |
| val >>= POWI_WINDOW_SIZE; |
| } |
| else |
| { |
| val >>= 1; |
| result++; |
| } |
| } |
| |
| return result + powi_lookup_cost (val, cache); |
| } |
| |
| /* Recursive subroutine of expand_powi. This function takes the array, |
| CACHE, of already calculated exponents and an exponent N and returns |
| an RTX that corresponds to CACHE[1]**N, as calculated in mode MODE. */ |
| |
| static rtx |
| expand_powi_1 (enum machine_mode mode, unsigned HOST_WIDE_INT n, rtx *cache) |
| { |
| unsigned HOST_WIDE_INT digit; |
| rtx target, result; |
| rtx op0, op1; |
| |
| if (n < POWI_TABLE_SIZE) |
| { |
| if (cache[n]) |
| return cache[n]; |
| |
| target = gen_reg_rtx (mode); |
| cache[n] = target; |
| |
| op0 = expand_powi_1 (mode, n - powi_table[n], cache); |
| op1 = expand_powi_1 (mode, powi_table[n], cache); |
| } |
| else if (n & 1) |
| { |
| target = gen_reg_rtx (mode); |
| digit = n & ((1 << POWI_WINDOW_SIZE) - 1); |
| op0 = expand_powi_1 (mode, n - digit, cache); |
| op1 = expand_powi_1 (mode, digit, cache); |
| } |
| else |
| { |
| target = gen_reg_rtx (mode); |
| op0 = expand_powi_1 (mode, n >> 1, cache); |
| op1 = op0; |
| } |
| |
| result = expand_mult (mode, op0, op1, target, 0); |
| if (result != target) |
| emit_move_insn (target, result); |
| return target; |
| } |
| |
| /* Expand the RTL to evaluate powi(x,n) in mode MODE. X is the |
| floating point operand in mode MODE, and N is the exponent. This |
| function needs to be kept in sync with powi_cost above. */ |
| |
| static rtx |
| expand_powi (rtx x, enum machine_mode mode, HOST_WIDE_INT n) |
| { |
| unsigned HOST_WIDE_INT val; |
| rtx cache[POWI_TABLE_SIZE]; |
| rtx result; |
| |
| if (n == 0) |
| return CONST1_RTX (mode); |
| |
| val = (n < 0) ? -n : n; |
| |
| memset (cache, 0, sizeof (cache)); |
| cache[1] = x; |
| |
| result = expand_powi_1 (mode, (n < 0) ? -n : n, cache); |
| |
| /* If the original exponent was negative, reciprocate the result. */ |
| if (n < 0) |
| result = expand_binop (mode, sdiv_optab, CONST1_RTX (mode), |
| result, NULL_RTX, 0, OPTAB_LIB_WIDEN); |
| |
| return result; |
| } |
| |
| /* Expand a call to the pow built-in mathematical function. Return 0 if |
| a normal call should be emitted rather than expanding the function |
| in-line. EXP is the expression that is a call to the builtin |
| function; if convenient, the result should be placed in TARGET. */ |
| |
| static rtx |
| expand_builtin_pow (tree exp, rtx target, rtx subtarget) |
| { |
| tree arglist = TREE_OPERAND (exp, 1); |
| tree arg0, arg1; |
| |
| if (! validate_arglist (arglist, REAL_TYPE, REAL_TYPE, VOID_TYPE)) |
| return 0; |
| |
| arg0 = TREE_VALUE (arglist); |
| arg1 = TREE_VALUE (TREE_CHAIN (arglist)); |
| |
| if (TREE_CODE (arg1) == REAL_CST |
| && ! TREE_CONSTANT_OVERFLOW (arg1)) |
| { |
| REAL_VALUE_TYPE cint; |
| REAL_VALUE_TYPE c; |
| HOST_WIDE_INT n; |
| |
| c = TREE_REAL_CST (arg1); |
| n = real_to_integer (&c); |
| real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0); |
| if (real_identical (&c, &cint)) |
| { |
| /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact. |
| Otherwise, check the number of multiplications required. |
| Note that pow never sets errno for an integer exponent. */ |
| if ((n >= -1 && n <= 2) |
| || (flag_unsafe_math_optimizations |
| && ! optimize_size |
| && powi_cost (n) <= POWI_MAX_MULTS)) |
| { |
| enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); |
| rtx op = expand_expr (arg0, subtarget, VOIDmode, 0); |
| op = force_reg (mode, op); |
| return expand_powi (op, mode, n); |
| } |
| } |
| } |
| |
| if (! flag_unsafe_math_optimizations) |
| return NULL_RTX; |
| return expand_builtin_mathfn_2 (exp, target, subtarget); |
| } |
| |
| /* Expand a call to the powi built-in mathematical function. Return 0 if |
| a normal call should be emitted rather than expanding the function |
| in-line. EXP is the expression that is a call to the builtin |
| function; if convenient, the result should be placed in TARGET. */ |
| |
| static rtx |
| expand_builtin_powi (tree exp, rtx target, rtx subtarget) |
| { |
| tree arglist = TREE_OPERAND (exp, 1); |
| tree arg0, arg1; |
| rtx op0, op1; |
| enum machine_mode mode; |
| /* APPLE LOCAL mainline 2005-03-30 */ |
| enum machine_mode mode2; |
| |
| if (! validate_arglist (arglist, REAL_TYPE, INTEGER_TYPE, VOID_TYPE)) |
| return 0; |
| |
| arg0 = TREE_VALUE (arglist); |
| arg1 = TREE_VALUE (TREE_CHAIN (arglist)); |
| mode = TYPE_MODE (TREE_TYPE (exp)); |
| |
| /* Handle constant power. */ |
| |
| if (TREE_CODE (arg1) == INTEGER_CST |
| && ! TREE_CONSTANT_OVERFLOW (arg1)) |
| { |
| HOST_WIDE_INT n = TREE_INT_CST_LOW (arg1); |
| |
| /* If the exponent is -1, 0, 1 or 2, then expand_powi is exact. |
| Otherwise, check the number of multiplications required. */ |
| if ((TREE_INT_CST_HIGH (arg1) == 0 |
| || TREE_INT_CST_HIGH (arg1) == -1) |
| && ((n >= -1 && n <= 2) |
| || (! optimize_size |
| && powi_cost (n) <= POWI_MAX_MULTS))) |
| { |
| op0 = expand_expr (arg0, subtarget, VOIDmode, 0); |
| op0 = force_reg (mode, op0); |
| return expand_powi (op0, mode, n); |
| } |
| } |
| |
| /* Emit a libcall to libgcc. */ |
| |
| /* APPLE LOCAL begin mainline 2005-03-30 */ |
| /* Mode of the 2nd argument must match that of an int. */ |
| mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0); |
| |
| if (target == NULL_RTX) |
| target = gen_reg_rtx (mode); |
| |
| op0 = expand_expr (arg0, subtarget, mode, 0); |
| if (GET_MODE (op0) != mode) |
| op0 = convert_to_mode (mode, op0, 0); |
| op1 = expand_expr (arg1, 0, mode2, 0); |
| if (GET_MODE (op1) != mode2) |
| op1 = convert_to_mode (mode2, op1, 0); |
| |
| target = emit_library_call_value (powi_optab->handlers[(int) mode].libfunc, |
| target, LCT_CONST_MAKE_BLOCK, mode, 2, |
| op0, mode, op1, mode2); |
| /* APPLE LOCAL end mainline 2005-03-30 */ |
| |
| return target; |
| } |
| |
| /* Expand expression EXP which is a call to the strlen builtin. Return 0 |
| if we failed the caller should emit a normal call, otherwise |
| try to get the result in TARGET, if convenient. */ |
| |
| static rtx |
| expand_builtin_strlen (tree arglist, rtx target, |
| enum machine_mode target_mode) |
| { |
| if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE)) |
| return 0; |
| else |
| { |
| rtx pat; |
| tree len, src = TREE_VALUE (arglist); |
| rtx result, src_reg, char_rtx, before_strlen; |
| enum machine_mode insn_mode = target_mode, char_mode; |
| enum insn_code icode = CODE_FOR_nothing; |
| int align; |
| |
| /* If the length can be computed at compile-time, return it. */ |
| len = c_strlen (src, 0); |
| if (len) |
| return expand_expr (len, target, target_mode, EXPAND_NORMAL); |
| |
| /* If the length can be computed at compile-time and is constant |
| integer, but there are side-effects in src, evaluate |
| src for side-effects, then return len. |
| E.g. x = strlen (i++ ? "xfoo" + 1 : "bar"); |
| can be optimized into: i++; x = 3; */ |
| len = c_strlen (src, 1); |
| if (len && TREE_CODE (len) == INTEGER_CST) |
| { |
| expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL); |
| return expand_expr (len, target, target_mode, EXPAND_NORMAL); |
| } |
| |
| align = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT; |
| |
| /* If SRC is not a pointer type, don't do this operation inline. */ |
| if (align == 0) |
| return 0; |
| |
| /* Bail out if we can't compute strlen in the right mode. */ |
| while (insn_mode != VOIDmode) |
| { |
| icode = strlen_optab->handlers[(int) insn_mode].insn_code; |
| if (icode != CODE_FOR_nothing) |
| break; |
| |
| insn_mode = GET_MODE_WIDER_MODE (insn_mode); |
| } |
| if (insn_mode == VOIDmode) |
| return 0; |
| |
| /* Make a place to write the result of the instruction. */ |
| result = target; |
| if (! (result != 0 |
| && REG_P (result) |
| && GET_MODE (result) == insn_mode |
| && REGNO (result) >= FIRST_PSEUDO_REGISTER)) |
| result = gen_reg_rtx (insn_mode); |
| |
| /* Make a place to hold the source address. We will not expand |
| the actual source until we are sure that the expansion will |
| not fail -- there are trees that cannot be expanded twice. */ |
| src_reg = gen_reg_rtx (Pmode); |
| |
| /* Mark the beginning of the strlen sequence so we can emit the |
| source operand later. */ |
| before_strlen = get_last_insn (); |
| |
| char_rtx = const0_rtx; |
| char_mode = insn_data[(int) icode].operand[2].mode; |
| if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx, |
| char_mode)) |
| char_rtx = copy_to_mode_reg (char_mode, char_rtx); |
| |
| pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg), |
| char_rtx, GEN_INT (align)); |
| if (! pat) |
| return 0; |
| emit_insn (pat); |
| |
| /* Now that we are assured of success, expand the source. */ |
| start_sequence (); |
| pat = expand_expr (src, src_reg, ptr_mode, EXPAND_NORMAL); |
| if (pat != src_reg) |
| emit_move_insn (src_reg, pat); |
| pat = get_insns (); |
| end_sequence (); |
| |
| if (before_strlen) |
| emit_insn_after (pat, before_strlen); |
| else |
| emit_insn_before (pat, get_insns ()); |
| |
| /* Return the value in the proper mode for this function. */ |
| if (GET_MODE (result) == target_mode) |
| target = result; |
| else if (target != 0) |
| convert_move (target, result, 0); |
| else |
| target = convert_to_mode (target_mode, result, 0); |
| |
| return target; |
| } |
| } |
| |
| /* Expand a call to the strstr builtin. Return 0 if we failed the |
| caller should emit a normal call, otherwise try to get the result |
| in TARGET, if convenient (and in mode MODE if that's convenient). */ |
| |
| static rtx |
| expand_builtin_strstr (tree arglist, tree type, rtx target, enum machine_mode mode) |
| { |
| if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) |
| { |
| tree result = fold_builtin_strstr (arglist, type); |
| if (result) |
| return expand_expr (result, target, mode, EXPAND_NORMAL); |
| } |
| return 0; |
| } |
| |
| /* Expand a call to the strchr builtin. Return 0 if we failed the |
| caller should emit a normal call, otherwise try to get the result |
| in TARGET, if convenient (and in mode MODE if that's convenient). */ |
| |
| static rtx |
| expand_builtin_strchr (tree arglist, tree type, rtx target, enum machine_mode mode) |
| { |
| if (validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
| { |
| tree result = fold_builtin_strchr (arglist, type); |
| if (result) |
| return expand_expr (result, target, mode, EXPAND_NORMAL); |
| |
| /* FIXME: Should use strchrM optab so that ports can optimize this. */ |
| } |
| return 0; |
| } |
| |
| /* Expand a call to the strrchr builtin. Return 0 if we failed the |
| caller should emit a normal call, otherwise try to get the result |
| in TARGET, if convenient (and in mode MODE if that's convenient). */ |
| |
| static rtx |
| expand_builtin_strrchr (tree arglist, tree type, rtx target, enum machine_mode mode) |
| { |
| if (validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
| { |
| tree result = fold_builtin_strrchr (arglist, type); |
| if (result) |
| return expand_expr (result, target, mode, EXPAND_NORMAL); |
| } |
| return 0; |
| } |
| |
| /* Expand a call to the strpbrk builtin. Return 0 if we failed the |
| caller should emit a normal call, otherwise try to get the result |
| in TARGET, if convenient (and in mode MODE if that's convenient). */ |
| |
| static rtx |
| expand_builtin_strpbrk (tree arglist, tree type, rtx target, enum machine_mode mode) |
| { |
| if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) |
| { |
| tree result = fold_builtin_strpbrk (arglist, type); |
| if (result) |
| return expand_expr (result, target, mode, EXPAND_NORMAL); |
| } |
| return 0; |
| } |
| |
| /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE) |
| bytes from constant string DATA + OFFSET and return it as target |
| constant. */ |
| |
| static rtx |
| builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset, |
| enum machine_mode mode) |
| { |
| const char *str = (const char *) data; |
| |
| gcc_assert (offset >= 0 |
| && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode) |
| <= strlen (str) + 1)); |
| |
| return c_readstr (str + offset, mode); |
| } |
| |
| /* Expand a call to the memcpy builtin, with arguments in ARGLIST. |
| Return 0 if we failed, the caller should emit a normal call, |
| otherwise try to get the result in TARGET, if convenient (and in |
| mode MODE if that's convenient). */ |
| static rtx |
| expand_builtin_memcpy (tree exp, rtx target, enum machine_mode mode) |
| { |
| tree arglist = TREE_OPERAND (exp, 1); |
| if (!validate_arglist (arglist, |
| POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
| return 0; |
| else |
| { |
| tree dest = TREE_VALUE (arglist); |
| tree src = TREE_VALUE (TREE_CHAIN (arglist)); |
| tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); |
| const char *src_str; |
| unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT); |
| unsigned int dest_align |
| = get_pointer_alignment (dest, BIGGEST_ALIGNMENT); |
| rtx dest_mem, src_mem, dest_addr, len_rtx; |
| tree result = fold_builtin_memcpy (exp); |
| |
| if (result) |
| return expand_expr (result, target, mode, EXPAND_NORMAL); |
| |
| /* If DEST is not a pointer type, call the normal function. */ |
| if (dest_align == 0) |
| return 0; |
| |
| /* If either SRC is not a pointer type, don't do this |
| operation in-line. */ |
| if (src_align == 0) |
| return 0; |
| |
| dest_mem = get_memory_rtx (dest); |
| set_mem_align (dest_mem, dest_align); |
| len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0); |
| src_str = c_getstr (src); |
| |
| /* If SRC is a string constant and block move would be done |
| by pieces, we can avoid loading the string from memory |
| and only stored the computed constants. */ |
| if (src_str |
| && GET_CODE (len_rtx) == CONST_INT |
| && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1 |
| && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str, |
| (void *) src_str, dest_align)) |
| { |
| dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx), |
| builtin_memcpy_read_str, |
| (void *) src_str, dest_align, 0); |
| dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX); |
| dest_mem = convert_memory_address (ptr_mode, dest_mem); |
| return dest_mem; |
| } |
| |
| src_mem = get_memory_rtx (src); |
| set_mem_align (src_mem, src_align); |
| |
| /* Copy word part most expediently. */ |
| dest_addr = emit_block_move (dest_mem, src_mem, len_rtx, |
| CALL_EXPR_TAILCALL (exp) |
| ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL); |
| |
| if (dest_addr == 0) |
| { |
| dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX); |
| dest_addr = convert_memory_address (ptr_mode, dest_addr); |
| } |
| return dest_addr; |
| } |
| } |
| |
| /* Expand a call to the mempcpy builtin, with arguments in ARGLIST. |
| Return 0 if we failed the caller should emit a normal call, |
| otherwise try to get the result in TARGET, if convenient (and in |
| mode MODE if that's convenient). If ENDP is 0 return the |
| destination pointer, if ENDP is 1 return the end pointer ala |
| mempcpy, and if ENDP is 2 return the end pointer minus one ala |
| stpcpy. */ |
| |
| static rtx |
| expand_builtin_mempcpy (tree arglist, tree type, rtx target, enum machine_mode mode, |
| int endp) |
| { |
| if (!validate_arglist (arglist, |
| POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
| return 0; |
| /* If return value is ignored, transform mempcpy into memcpy. */ |
| else if (target == const0_rtx) |
| { |
| tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY]; |
| |
| if (!fn) |
| return 0; |
| |
| return expand_expr (build_function_call_expr (fn, arglist), |
| target, mode, EXPAND_NORMAL); |
| } |
| else |
| { |
| tree dest = TREE_VALUE (arglist); |
| tree src = TREE_VALUE (TREE_CHAIN (arglist)); |
| tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); |
| const char *src_str; |
| unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT); |
| unsigned int dest_align |
| = get_pointer_alignment (dest, BIGGEST_ALIGNMENT); |
| rtx dest_mem, src_mem, len_rtx; |
| tree result = fold_builtin_mempcpy (arglist, type, endp); |
| |
| if (result) |
| return expand_expr (result, target, mode, EXPAND_NORMAL); |
| |
| /* If either SRC or DEST is not a pointer type, don't do this |
| operation in-line. */ |
| if (dest_align == 0 || src_align == 0) |
| return 0; |
| |
| /* If LEN is not constant, call the normal function. */ |
| if (! host_integerp (len, 1)) |
| return 0; |
| |
| len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0); |
| src_str = c_getstr (src); |
| |
| /* If SRC is a string constant and block move would be done |
| by pieces, we can avoid loading the string from memory |
| and only stored the computed constants. */ |
| if (src_str |
| && GET_CODE (len_rtx) == CONST_INT |
| && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1 |
| && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str, |
| (void *) src_str, dest_align)) |
| { |
| dest_mem = get_memory_rtx (dest); |
| set_mem_align (dest_mem, dest_align); |
| dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx), |
| builtin_memcpy_read_str, |
| (void *) src_str, dest_align, endp); |
| dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX); |
| dest_mem = convert_memory_address (ptr_mode, dest_mem); |
| return dest_mem; |
| } |
| |
| if (GET_CODE (len_rtx) == CONST_INT |
| && can_move_by_pieces (INTVAL (len_rtx), |
| MIN (dest_align, src_align))) |
| { |
| dest_mem = get_memory_rtx (dest); |
| set_mem_align (dest_mem, dest_align); |
| src_mem = get_memory_rtx (src); |
| set_mem_align (src_mem, src_align); |
| dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx), |
| MIN (dest_align, src_align), endp); |
| dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX); |
| dest_mem = convert_memory_address (ptr_mode, dest_mem); |
| return dest_mem; |
| } |
| |
| return 0; |
| } |
| } |
| |
| /* Expand expression EXP, which is a call to the memmove builtin. Return 0 |
| if we failed the caller should emit a normal call. */ |
| |
| static rtx |
| expand_builtin_memmove (tree arglist, tree type, rtx target, |
| enum machine_mode mode, tree orig_exp) |
| { |
| if (!validate_arglist (arglist, |
| POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
| return 0; |
| else |
| { |
| tree dest = TREE_VALUE (arglist); |
| tree src = TREE_VALUE (TREE_CHAIN (arglist)); |
| tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); |
| |
| unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT); |
| unsigned int dest_align |
| = get_pointer_alignment (dest, BIGGEST_ALIGNMENT); |
| tree result = fold_builtin_memmove (arglist, type); |
| |
| if (result) |
| return expand_expr (result, target, mode, EXPAND_NORMAL); |
| |
| /* If DEST is not a pointer type, call the normal function. */ |
| if (dest_align == 0) |
| return 0; |
| |
| /* If either SRC is not a pointer type, don't do this |
| operation in-line. */ |
| if (src_align == 0) |
| return 0; |
| |
| /* If src is categorized for a readonly section we can use |
| normal memcpy. */ |
| if (readonly_data_expr (src)) |
| { |
| tree fn = implicit_built_in_decls[BUILT_IN_MEMCPY]; |
| if (!fn) |
| return 0; |
| fn = build_function_call_expr (fn, arglist); |
| if (TREE_CODE (fn) == CALL_EXPR) |
| CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp); |
| return expand_expr (fn, target, mode, EXPAND_NORMAL); |
| } |
| |
| /* If length is 1 and we can expand memcpy call inline, |
| it is ok to use memcpy as well. */ |
| if (integer_onep (len)) |
| { |
| rtx ret = expand_builtin_mempcpy (arglist, type, target, mode, |
| /*endp=*/0); |
| if (ret) |
| return ret; |
| } |
| |
| /* Otherwise, call the normal function. */ |
| return 0; |
| } |
| } |
| |
| /* Expand expression EXP, which is a call to the bcopy builtin. Return 0 |
| if we failed the caller should emit a normal call. */ |
| |
| static rtx |
| expand_builtin_bcopy (tree exp) |
| { |
| tree arglist = TREE_OPERAND (exp, 1); |
| tree type = TREE_TYPE (exp); |
| tree src, dest, size, newarglist; |
| |
| if (!validate_arglist (arglist, |
| POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
| return NULL_RTX; |
| |
| src = TREE_VALUE (arglist); |
| dest = TREE_VALUE (TREE_CHAIN (arglist)); |
| size = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))); |
| |
| /* New argument list transforming bcopy(ptr x, ptr y, int z) to |
| memmove(ptr y, ptr x, size_t z). This is done this way |
| so that if it isn't expanded inline, we fallback to |
| calling bcopy instead of memmove. */ |
| |
| newarglist = build_tree_list (NULL_TREE, fold_convert (sizetype, size)); |
| newarglist = tree_cons (NULL_TREE, src, newarglist); |
| newarglist = tree_cons (NULL_TREE, dest, newarglist); |
| |
| return expand_builtin_memmove (newarglist, type, const0_rtx, VOIDmode, exp); |
| } |
| |
| #ifndef HAVE_movstr |
| # define HAVE_movstr 0 |
| # define CODE_FOR_movstr CODE_FOR_nothing |
| #endif |
| |
| /* Expand into a movstr instruction, if one is available. Return 0 if |
| we failed, the caller should emit a normal call, otherwise try to |
| get the result in TARGET, if convenient. If ENDP is 0 return the |
| destination pointer, if ENDP is 1 return the end pointer ala |
| mempcpy, and if ENDP is 2 return the end pointer minus one ala |
| stpcpy. */ |
| |
| static rtx |
| expand_movstr (tree dest, tree src, rtx target, int endp) |
| { |
| rtx end; |
| rtx dest_mem; |
| rtx src_mem; |
| rtx insn; |
| const struct insn_data * data; |
| |
| if (!HAVE_movstr) |
| return 0; |
| |
| dest_mem = get_memory_rtx (dest); |
| src_mem = get_memory_rtx (src); |
| if (!endp) |
| { |
| target = force_reg (Pmode, XEXP (dest_mem, 0)); |
| dest_mem = replace_equiv_address (dest_mem, target); |
| end = gen_reg_rtx (Pmode); |
| } |
| else |
| { |
| if (target == 0 || target == const0_rtx) |
| { |
| end = gen_reg_rtx (Pmode); |
| if (target == 0) |
| target = end; |
| } |
| else |
| end = target; |
| } |
| |
| data = insn_data + CODE_FOR_movstr; |
| |
| if (data->operand[0].mode != VOIDmode) |
| end = gen_lowpart (data->operand[0].mode, end); |
| |
| insn = data->genfun (end, dest_mem, src_mem); |
| |
| gcc_assert (insn); |
| |
| emit_insn (insn); |
| |
| /* movstr is supposed to set end to the address of the NUL |
| terminator. If the caller requested a mempcpy-like return value, |
| adjust it. */ |
| if (endp == 1 && target != const0_rtx) |
| { |
| rtx tem = plus_constant (gen_lowpart (GET_MODE (target), end), 1); |
| emit_move_insn (target, force_operand (tem, NULL_RTX)); |
| } |
| |
| return target; |
| } |
| |
| /* Expand expression EXP, which is a call to the strcpy builtin. Return 0 |
| if we failed the caller should emit a normal call, otherwise try to get |
| the result in TARGET, if convenient (and in mode MODE if that's |
| convenient). */ |
| |
| static rtx |
| expand_builtin_strcpy (tree exp, rtx target, enum machine_mode mode) |
| { |
| tree arglist = TREE_OPERAND (exp, 1); |
| if (validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) |
| { |
| tree result = fold_builtin_strcpy (exp, 0); |
| if (result) |
| return expand_expr (result, target, mode, EXPAND_NORMAL); |
| |
| return expand_movstr (TREE_VALUE (arglist), |
| TREE_VALUE (TREE_CHAIN (arglist)), |
| target, /*endp=*/0); |
| } |
| return 0; |
| } |
| |
| /* Expand a call to the stpcpy builtin, with arguments in ARGLIST. |
| Return 0 if we failed the caller should emit a normal call, |
| otherwise try to get the result in TARGET, if convenient (and in |
| mode MODE if that's convenient). */ |
| |
| static rtx |
| expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode) |
| { |
| tree arglist = TREE_OPERAND (exp, 1); |
| /* If return value is ignored, transform stpcpy into strcpy. */ |
| if (target == const0_rtx) |
| { |
| tree fn = implicit_built_in_decls[BUILT_IN_STRCPY]; |
| if (!fn) |
| return 0; |
| |
| return expand_expr (build_function_call_expr (fn, arglist), |
| target, mode, EXPAND_NORMAL); |
| } |
| |
| if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)) |
| return 0; |
| else |
| { |
| tree dst, src, len, lenp1; |
| tree narglist; |
| rtx ret; |
| |
| /* Ensure we get an actual string whose length can be evaluated at |
| compile-time, not an expression containing a string. This is |
| because the latter will potentially produce pessimized code |
| when used to produce the return value. */ |
| src = TREE_VALUE (TREE_CHAIN (arglist)); |
| if (! c_getstr (src) || ! (len = c_strlen (src, 0))) |
| return expand_movstr (TREE_VALUE (arglist), |
| TREE_VALUE (TREE_CHAIN (arglist)), |
| target, /*endp=*/2); |
| |
| dst = TREE_VALUE (arglist); |
| lenp1 = size_binop (PLUS_EXPR, len, ssize_int (1)); |
| narglist = build_tree_list (NULL_TREE, lenp1); |
| narglist = tree_cons (NULL_TREE, src, narglist); |
| narglist = tree_cons (NULL_TREE, dst, narglist); |
| ret = expand_builtin_mempcpy (narglist, TREE_TYPE (exp), |
| target, mode, /*endp=*/2); |
| |
| if (ret) |
| return ret; |
| |
| if (TREE_CODE (len) == INTEGER_CST) |
| { |
| rtx len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0); |
| |
| if (GET_CODE (len_rtx) == CONST_INT) |
| { |
| ret = expand_builtin_strcpy (exp, target, mode); |
| |
| if (ret) |
| { |
| if (! target) |
| { |
| if (mode != VOIDmode) |
| target = gen_reg_rtx (mode); |
| else |
| target = gen_reg_rtx (GET_MODE (ret)); |
| } |
| if (GET_MODE (target) != GET_MODE (ret)) |
| ret = gen_lowpart (GET_MODE (target), ret); |
| |
| ret = plus_constant (ret, INTVAL (len_rtx)); |
| ret = emit_move_insn (target, force_operand (ret, NULL_RTX)); |
| gcc_assert (ret); |
| |
| return target; |
| } |
| } |
| } |
| |
| return expand_movstr (TREE_VALUE (arglist), |
| TREE_VALUE (TREE_CHAIN (arglist)), |
| target, /*endp=*/2); |
| } |
| } |
| |
| /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE) |
| bytes from constant string DATA + OFFSET and return it as target |
| constant. */ |
| |
| static rtx |
| builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset, |
| enum machine_mode mode) |
| { |
| const char *str = (const char *) data; |
| |
| if ((unsigned HOST_WIDE_INT) offset > strlen (str)) |
| return const0_rtx; |
| |
| return c_readstr (str + offset, mode); |
| } |
| |
| /* Expand expression EXP, which is a call to the strncpy builtin. Return 0 |
| if we failed the caller should emit a normal call. */ |
| |
| static rtx |
| expand_builtin_strncpy (tree exp, rtx target, enum machine_mode mode) |
| { |
| tree arglist = TREE_OPERAND (exp, 1); |
| if (validate_arglist (arglist, |
| POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)) |
| { |
|