diff options
author | Joshua Brindle <method@gentoo.org> | 2003-03-24 03:00:37 +0000 |
---|---|---|
committer | Joshua Brindle <method@gentoo.org> | 2003-03-24 03:00:37 +0000 |
commit | db4a165ad6f5f804efc246c76fc88fa68491614e (patch) | |
tree | 0593dae7308ab9acb1fa8ea8e98085ac589489dd /sys-devel/gcc/files | |
parent | added propolice support (diff) | |
download | gentoo-2-db4a165ad6f5f804efc246c76fc88fa68491614e.tar.gz gentoo-2-db4a165ad6f5f804efc246c76fc88fa68491614e.tar.bz2 gentoo-2-db4a165ad6f5f804efc246c76fc88fa68491614e.zip |
propolice stuff
Diffstat (limited to 'sys-devel/gcc/files')
-rw-r--r-- | sys-devel/gcc/files/3.2/protector.c | 2448 | ||||
-rw-r--r-- | sys-devel/gcc/files/3.2/protector.h | 48 | ||||
-rw-r--r-- | sys-devel/gcc/files/3.2/protector.patch | 1046 |
3 files changed, 3542 insertions, 0 deletions
diff --git a/sys-devel/gcc/files/3.2/protector.c b/sys-devel/gcc/files/3.2/protector.c new file mode 100644 index 000000000000..854de98dbef1 --- /dev/null +++ b/sys-devel/gcc/files/3.2/protector.c @@ -0,0 +1,2448 @@ +/* RTL buffer overflow protection function for GNU C compiler + Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc. + +This file is part of GCC. + +GCC is free software; you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free +Software Foundation; either version 2, or (at your option) any later +version. + +GCC is distributed in the hope that it will be useful, but WITHOUT ANY +WARRANTY; without even the implied warranty of MERCHANTABILITY or +FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +for more details. + +You should have received a copy of the GNU General Public License +along with GCC; see the file COPYING. If not, write to the Free +Software Foundation, 59 Temple Place - Suite 330, Boston, MA +02111-1307, USA. */ + +#include "config.h" +#include "system.h" +#include "machmode.h" + +#include "rtl.h" +#include "tree.h" +#include "regs.h" +#include "flags.h" +#include "insn-config.h" +#include "insn-flags.h" +#include "expr.h" +#include "output.h" +#include "recog.h" +#include "hard-reg-set.h" +#include "real.h" +#include "except.h" +#include "function.h" +#include "toplev.h" +#include "conditions.h" +#include "insn-attr.h" +#include "c-tree.h" +#include "optabs.h" +#include "protector.h" + + +void prepare_stack_protection PARAMS ((int inlinable)); +int search_string_def PARAMS ((tree names)); +rtx assign_stack_local_for_pseudo_reg PARAMS ((enum machine_mode, HOST_WIDE_INT, int)); + + +/* Warn when not issuing stack smashing protection for some reason */ +int warn_stack_protector; + +/* Round a value to the lowest integer less than it that is a multiple of + the required alignment. Avoid using division in case the value is + negative. Assume the alignment is a power of two. */ +#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1)) + +/* Similar, but round to the next highest integer that meets the + alignment. */ +#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1)) + + +/* Nonzero means use propolice as a stack protection method */ +extern int flag_propolice_protection; + +/* This file contains several memory arrangement functions to protect + the return address and the frame pointer of the stack + from a stack-smashing attack. It also + provides the function that protects pointer variables. */ + +/* Nonzero if function being compiled can define string buffers that may be + damaged by the stack-smash attack */ +static int current_function_defines_vulnerable_string; +static int current_function_defines_short_string; +static int current_function_has_variable_string; +static int current_function_defines_vsized_array; +static int current_function_is_inlinable; + +static rtx guard_area, _guard; +static rtx function_first_insn, prologue_insert_point; +static rtx debuginsn; + +/* */ +static HOST_WIDE_INT sweep_frame_offset; +static HOST_WIDE_INT push_allocated_offset = 0; +static HOST_WIDE_INT push_frame_offset = 0; +static int saved_cse_not_expected = 0; + +static int search_string_from_argsandvars PARAMS ((int caller)); +static int search_string_from_local_vars PARAMS ((tree block)); +static int search_pointer_def PARAMS ((tree names)); +static int search_func_pointer PARAMS ((tree type, int mark)); +static void reset_used_flags_for_insns PARAMS ((rtx insn)); +static void reset_used_flags_for_decls PARAMS ((tree block)); +static void reset_used_flags_of_plus PARAMS ((rtx x)); +static void rtl_prologue PARAMS ((rtx insn)); +static void rtl_epilogue PARAMS ((rtx fnlastinsn)); +static void arrange_var_order PARAMS ((tree blocks)); +static void copy_args_for_protection PARAMS ((void)); +static void sweep_string_variable PARAMS ((rtx sweep_var, HOST_WIDE_INT var_size)); +static void sweep_string_in_decls PARAMS ((tree block, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size)); +static void sweep_string_in_args PARAMS ((tree parms, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size)); +static void sweep_string_use_of_insns PARAMS ((rtx insn, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size)); +static void sweep_string_in_operand PARAMS ((rtx insn, rtx *loc, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size)); +static void move_arg_location PARAMS ((rtx insn, rtx orig, rtx new, HOST_WIDE_INT var_size)); +static void change_arg_use_of_insns PARAMS ((rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size)); +static void change_arg_use_of_insns_2 PARAMS ((rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size)); +static void change_arg_use_in_operand PARAMS ((rtx x, rtx orig, rtx *new, HOST_WIDE_INT size)); +static void validate_insns_of_varrefs PARAMS ((rtx insn)); +static void validate_operand_of_varrefs PARAMS ((rtx insn, rtx *loc)); + +#define SUSPICIOUS_BUF_SIZE 8 + +#define AUTO_BASEPTR(X) \ + (GET_CODE (X) == PLUS ? XEXP (X, 0) : X) +#define AUTO_OFFSET(X) \ + (GET_CODE (X) == PLUS ? INTVAL (XEXP (X, 1)) : 0) +#undef PARM_PASSED_IN_MEMORY +#define PARM_PASSED_IN_MEMORY(PARM) \ + (GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM) +#define VIRTUAL_STACK_VARS_P(X) \ + ((X) == virtual_stack_vars_rtx || (GET_CODE (X) == REG && (X)->used)) + + + +void +prepare_stack_protection (inlinable) + int inlinable; +{ + tree blocks = DECL_INITIAL (current_function_decl); + current_function_is_inlinable = inlinable && !flag_no_inline; + push_frame_offset = push_allocated_offset = 0; + saved_cse_not_expected = 0; + + /* + skip the protection if the function has no block or it is an inline function + */ + if (current_function_is_inlinable) validate_insns_of_varrefs (get_insns ()); + if (! blocks || current_function_is_inlinable) return; + + current_function_defines_vulnerable_string = search_string_from_argsandvars (0); + + if (current_function_defines_vulnerable_string) + { + HOST_WIDE_INT offset; + function_first_insn = get_insns (); + + if (current_function_contains_functions) { + if (warn_stack_protector) + warning ("not protecting function: it contains functions"); + return; + } + + /* Initialize recognition, indicating that volatile is OK. */ + init_recog (); + + sweep_frame_offset = 0; + +#ifdef STACK_GROWS_DOWNWARD + /* + frame_offset: offset to end of allocated area of stack frame. + It is defined in the function.c + */ + + /* the location must be before buffers */ + guard_area = assign_stack_local (BLKmode, UNITS_PER_GUARD, -1); + PUT_MODE (guard_area, GUARD_m); + MEM_VOLATILE_P (guard_area) = 1; + +#ifndef FRAME_GROWS_DOWNWARD + sweep_frame_offset = frame_offset; +#endif + + /* For making room for guard value, scan all insns and fix the offset address + of the variable that is based on frame pointer. + Scan all declarations of variables and fix the offset address of the variable that + is based on the frame pointer */ + sweep_string_variable (guard_area, UNITS_PER_GUARD); + + + /* the location of guard area moves to the beginning of stack frame */ + if ((offset = AUTO_OFFSET(XEXP (guard_area, 0)))) + XEXP (XEXP (guard_area, 0), 1) = gen_rtx_CONST_INT (VOIDmode, sweep_frame_offset); + + + /* Insert prologue rtl instructions */ + rtl_prologue (function_first_insn); + + if (! current_function_has_variable_string) + { + /* Generate argument saving instruction */ + copy_args_for_protection (); + +#ifndef FRAME_GROWS_DOWNWARD + /* If frame grows upward, character string copied from an arg stays top of + the guard variable. So sweep the guard variable again */ + sweep_frame_offset = CEIL_ROUND (frame_offset, BIGGEST_ALIGNMENT / BITS_PER_UNIT); + sweep_string_variable (guard_area, UNITS_PER_GUARD); +#endif + } + else if (warn_stack_protector) + warning ("not protecting variables: it has a variable length buffer"); +#endif +#ifndef FRAME_GROWS_DOWNWARD + if (STARTING_FRAME_OFFSET == 0) + { + /* this may be only for alpha */ + push_allocated_offset = BIGGEST_ALIGNMENT / BITS_PER_UNIT; + assign_stack_local (BLKmode, push_allocated_offset, -1); + sweep_frame_offset = frame_offset; + sweep_string_variable (const0_rtx, -push_allocated_offset); + sweep_frame_offset = AUTO_OFFSET (XEXP (guard_area, 0)); + } +#endif + + /* Arrange the order of local variables */ + arrange_var_order (blocks); + +#ifdef STACK_GROWS_DOWNWARD + /* Insert epilogue rtl instructions */ + rtl_epilogue (get_last_insn ()); +#endif + init_recog_no_volatile (); + } + else if (current_function_defines_short_string + && warn_stack_protector) + warning ("not protecting function: buffer is less than %d bytes long", + SUSPICIOUS_BUF_SIZE); +} + +/* + search string from arguments and local variables + caller: 0 means call from protector_stack_protection + 1 means call from push_frame +*/ +static int +search_string_from_argsandvars (caller) + int caller; +{ + tree blocks, parms; + int string_p; + + /* saves a latest search result as a cached infomation */ + static tree __latest_search_decl = 0; + static int __latest_search_result = FALSE; + + if (__latest_search_decl == current_function_decl) + return __latest_search_result; + else if (caller) return FALSE; + __latest_search_decl = current_function_decl; + __latest_search_result = TRUE; + + current_function_defines_short_string = FALSE; + current_function_has_variable_string = FALSE; + current_function_defines_vsized_array = FALSE; + + /* + search a string variable from local variables + */ + blocks = DECL_INITIAL (current_function_decl); + string_p = search_string_from_local_vars (blocks); + + if (!current_function_defines_vsized_array && current_function_calls_alloca) + { + current_function_has_variable_string = TRUE; + return TRUE; + } + + if (string_p) return TRUE; + +#ifdef STACK_GROWS_DOWNWARD + /* + search a string variable from arguments + */ + parms = DECL_ARGUMENTS (current_function_decl); + + for (; parms; parms = TREE_CHAIN (parms)) + if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node) + { + if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms)) + { + string_p = search_string_def (TREE_TYPE(parms)); + if (string_p) return TRUE; + } + } +#endif + + __latest_search_result = FALSE; + return FALSE; +} + + +static int +search_string_from_local_vars (block) + tree block; +{ + tree types; + int found = FALSE; + + while (block && TREE_CODE(block)==BLOCK) + { + types = BLOCK_VARS(block); + + while (types) + { + /* skip the declaration that refers an external variable */ + /* name: types.decl.name.identifier.id */ + if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types) + && TREE_CODE (types) == VAR_DECL + && ! DECL_ARTIFICIAL (types) + && DECL_RTL_SET_P (types) + && GET_CODE (DECL_RTL (types)) == MEM) + { + if (search_string_def (TREE_TYPE (types))) + { + rtx home = DECL_RTL (types); + + if (GET_CODE (home) == MEM + && (GET_CODE (XEXP (home, 0)) == MEM + || (GET_CODE (XEXP (home, 0)) == REG + && XEXP (home, 0) != virtual_stack_vars_rtx + && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM + && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM +#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM + && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM +#endif + ))) + /* If the value is indirect by memory or by a register + that isn't the frame pointer + then it means the object is variable-sized and address through + that register or stack slot. The protection has no way to hide pointer variables + behind the array, so all we can do is staying the order of variables and arguments. */ + { + current_function_has_variable_string = TRUE; + } + + /* found character array */ + found = TRUE; + } + } + + types = TREE_CHAIN(types); + } + + if (search_string_from_local_vars (BLOCK_SUBBLOCKS (block))) + { + found = TRUE; + } + + block = BLOCK_CHAIN (block); + } + + return found; +} + + +/* + * search a character array from the specified type tree + */ +int +search_string_def (type) + tree type; +{ + tree tem; + + if (! type) + return FALSE; + + switch (TREE_CODE (type)) + { + case ARRAY_TYPE: + /* Check if the array is a variable-sized array */ + if (TYPE_DOMAIN (type) == 0 + || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0 + && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR)) + current_function_defines_vsized_array = TRUE; + + /* TREE_CODE( TREE_TYPE(type) ) == INTEGER_TYPE */ + if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node + || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node + || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node) + { + /* Check if the string is a variable string */ + if (TYPE_DOMAIN (type) == 0 + || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0 + && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR)) + return TRUE; + + /* Check if the string size is greater than SUSPICIOUS_BUF_SIZE */ + if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0 + && TREE_INT_CST_LOW(TYPE_MAX_VALUE(TYPE_DOMAIN(type)))+1 >= SUSPICIOUS_BUF_SIZE) + return TRUE; + + current_function_defines_short_string = TRUE; + } + return search_string_def(TREE_TYPE(type)); + + case UNION_TYPE: + case QUAL_UNION_TYPE: + case RECORD_TYPE: + /* Output the name, type, position (in bits), size (in bits) of each + field. */ + for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem)) + { + /* Omit here local type decls until we know how to support them. */ + if ((TREE_CODE (tem) == TYPE_DECL) + || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem))) + continue; + + if (search_string_def(TREE_TYPE(tem))) return TRUE; + } + break; + + case POINTER_TYPE: + case REFERENCE_TYPE: + /* I'm not sure whether OFFSET_TYPE needs this treatment, + so I'll play safe and return 1. */ + case OFFSET_TYPE: + default: + break; + } + + return FALSE; +} + +/* + * examine whether the input contains frame pointer addressing + */ +int +contains_fp (op) + rtx op; +{ + register enum rtx_code code; + rtx x; + int i, j; + const char *fmt; + + x = op; + if (x == 0) + return FALSE; + + code = GET_CODE (x); + + switch (code) + { + case CONST_INT: + case CONST_DOUBLE: + case CONST: + case SYMBOL_REF: + case CODE_LABEL: + case REG: + case ADDRESSOF: + return FALSE; + + case PLUS: + if (XEXP (x, 0) == virtual_stack_vars_rtx + && CONSTANT_P (XEXP (x, 1))) + return TRUE; + + default: + break; + } + + /* Scan all subexpressions. */ + fmt = GET_RTX_FORMAT (code); + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) + if (*fmt == 'e') + { + if (contains_fp (XEXP (x, i))) return TRUE; + } + else if (*fmt == 'E') + for (j = 0; j < XVECLEN (x, i); j++) + if (contains_fp (XVECEXP (x, i, j))) return TRUE; + + return FALSE; +} + + +static int +search_pointer_def (type) + tree type; +{ + tree tem; + + if (! type) + return FALSE; + + switch (TREE_CODE (type)) + { + case UNION_TYPE: + case QUAL_UNION_TYPE: + case RECORD_TYPE: + /* Output the name, type, position (in bits), size (in bits) of each + field. */ + for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem)) + { + /* Omit here local type decls until we know how to support them. */ + if ((TREE_CODE (tem) == TYPE_DECL) + || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem))) + continue; + + if (search_pointer_def (TREE_TYPE(tem))) return TRUE; + } + break; + + case ARRAY_TYPE: + return search_pointer_def (TREE_TYPE(type)); + + case POINTER_TYPE: + case REFERENCE_TYPE: + /* I'm not sure whether OFFSET_TYPE needs this treatment, + so I'll play safe and return 1. */ + case OFFSET_TYPE: + if (TYPE_READONLY (TREE_TYPE (type))) + { + int funcp = search_func_pointer (TREE_TYPE (type), 1); + /* Un-mark the type as having been visited already */ + search_func_pointer (TREE_TYPE (type), 0); + return funcp; + } + return TRUE; + + default: + break; + } + + return FALSE; +} + + +static int +search_func_pointer (type, mark) + tree type; + int mark; +{ + tree tem; + + if (! type) + return FALSE; + + switch (TREE_CODE (type)) + { + case UNION_TYPE: + case QUAL_UNION_TYPE: + case RECORD_TYPE: + if (TREE_ASM_WRITTEN (type) != mark) + { + /* mark the type as having been visited already */ + TREE_ASM_WRITTEN (type) = mark; + + /* Output the name, type, position (in bits), size (in bits) of + each field. */ + for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem)) + { + /* Omit here local type decls until we know how to support them. */ + if (TREE_CODE (tem) == FIELD_DECL + && search_func_pointer (TREE_TYPE(tem), mark)) return TRUE; + } + } + break; + + case ARRAY_TYPE: + return search_func_pointer (TREE_TYPE(type), mark); + + case POINTER_TYPE: + case REFERENCE_TYPE: + /* I'm not sure whether OFFSET_TYPE needs this treatment, + so I'll play safe and return 1. */ + case OFFSET_TYPE: + return TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE; + + default: + break; + } + + return FALSE; +} + + +static void +reset_used_flags_for_insns (insn) + rtx insn; +{ + register int i, j; + register enum rtx_code code; + register const char *format_ptr; + + for (; insn; insn = NEXT_INSN (insn)) + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN + || GET_CODE (insn) == CALL_INSN) + { + code = GET_CODE (insn); + insn->used = 0; + format_ptr = GET_RTX_FORMAT (code); + + for (i = 0; i < GET_RTX_LENGTH (code); i++) + { + switch (*format_ptr++) { + case 'e': + reset_used_flags_of_plus (XEXP (insn, i)); + break; + + case 'E': + for (j = 0; j < XVECLEN (insn, i); j++) + reset_used_flags_of_plus (XVECEXP (insn, i, j)); + break; + } + } + } +} + +static void +reset_used_flags_for_decls (block) + tree block; +{ + tree types; + rtx home; + + while (block && TREE_CODE(block)==BLOCK) + { + types = BLOCK_VARS(block); + + while (types) + { + /* skip the declaration that refers an external variable and + also skip an global variable */ + if (! DECL_EXTERNAL (types)) + { + if (!DECL_RTL_SET_P (types)) goto next; + home = DECL_RTL (types); + + if (GET_CODE (home) == MEM + && GET_CODE (XEXP (home, 0)) == PLUS + && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT) + { + XEXP (home, 0)->used = 0; + } + } + next: + types = TREE_CHAIN(types); + } + + reset_used_flags_for_decls (BLOCK_SUBBLOCKS (block)); + + block = BLOCK_CHAIN (block); + } +} + +/* Clear the USED bits only of type PLUS in X */ + +static void +reset_used_flags_of_plus (x) + rtx x; +{ + register int i, j; + register enum rtx_code code; + register const char *format_ptr; + + if (x == 0) + return; + + code = GET_CODE (x); + + /* These types may be freely shared so we needn't do any resetting + for them. */ + + switch (code) + { + case REG: + case QUEUED: + case CONST_INT: + case CONST_DOUBLE: + case SYMBOL_REF: + case CODE_LABEL: + case PC: + case CC0: + return; + + case INSN: + case JUMP_INSN: + case CALL_INSN: + case NOTE: + case LABEL_REF: + case BARRIER: + /* The chain of insns is not being copied. */ + return; + + case PLUS: + x->used = 0; + break; + + case CALL_PLACEHOLDER: + reset_used_flags_for_insns (XEXP (x, 0)); + reset_used_flags_for_insns (XEXP (x, 1)); + reset_used_flags_for_insns (XEXP (x, 2)); + break; + + default: + break; + } + + format_ptr = GET_RTX_FORMAT (code); + for (i = 0; i < GET_RTX_LENGTH (code); i++) + { + switch (*format_ptr++) + { + case 'e': + reset_used_flags_of_plus (XEXP (x, i)); + break; + + case 'E': + for (j = 0; j < XVECLEN (x, i); j++) + reset_used_flags_of_plus (XVECEXP (x, i, j)); + break; + } + } +} + + +static void +rtl_prologue (insn) + rtx insn; +{ +#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main) +#undef HAS_INIT_SECTION +#define HAS_INIT_SECTION +#endif + + rtx _val; + + for (; insn; insn = NEXT_INSN (insn)) + if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG) + break; + +#if !defined (HAS_INIT_SECTION) + /* If this function is `main', skip a call to `__main' + to run guard instruments after global initializers, etc. */ + if (DECL_NAME (current_function_decl) + && MAIN_NAME_P (DECL_NAME (current_function_decl)) + && DECL_CONTEXT (current_function_decl) == NULL_TREE) + { + rtx fbinsn = insn; + for (; insn; insn = NEXT_INSN (insn)) + if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG) + break; + if (insn == 0) insn = fbinsn; + } +#endif + + prologue_insert_point = NEXT_INSN (insn); /* mark the next insn of FUNCTION_BEG insn */ + + start_sequence (); + + _guard = gen_rtx_MEM (GUARD_m, gen_rtx_SYMBOL_REF (Pmode, "__guard")); + emit_move_insn ( guard_area, _guard); + + _val = gen_sequence (); + end_sequence (); + + emit_insn_before (_val, prologue_insert_point); +} + +static void +rtl_epilogue (insn) + rtx insn; +{ + rtx if_false_label; + rtx _val; + rtx funcname; + tree funcstr; + int flag_have_return = FALSE; + + start_sequence (); + +#ifdef HAVE_return + if (HAVE_return) + { + rtx insn; + return_label = gen_label_rtx (); + + for (insn = prologue_insert_point; insn; insn = NEXT_INSN (insn)) + if (GET_CODE (insn) == JUMP_INSN + && GET_CODE (PATTERN (insn)) == RETURN + && GET_MODE (PATTERN (insn)) == VOIDmode) + { + rtx pat = gen_rtx_SET (VOIDmode, + pc_rtx, + gen_rtx_LABEL_REF (VOIDmode, + return_label)); + PATTERN (insn) = pat; + flag_have_return = TRUE; + } + + + emit_label (return_label); + } +#endif + + compare_from_rtx (guard_area, _guard, NE, 0, GUARD_m, NULL_RTX); /* if (guard_area != _guard) */ + + if_false_label = gen_label_rtx (); /* { */ + emit_jump_insn ( gen_beq(if_false_label)); + + /* generate string for the current function name */ + funcstr = build_string (strlen(current_function_name)+1, current_function_name); + TREE_TYPE (funcstr) = build_array_type (char_type_node, 0);/* = char_array_type_node;*/ + funcname = output_constant_def (funcstr, 1); + + emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__stack_smash_handler"), + 0, VOIDmode, 2, + XEXP (funcname, 0), Pmode, guard_area, GUARD_m); + + /* generate RTL to return from the current function */ + + emit_barrier (); /* } */ + emit_label (if_false_label); + + /* generate RTL to return from the current function */ + if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl))) + use_return_register (); + +#ifdef HAVE_return + if (HAVE_return && flag_have_return) + { + emit_jump_insn (gen_return ()); + emit_barrier (); + } +#endif + + _val = gen_sequence (); + end_sequence (); + + emit_insn_after (_val, insn); +} + + +static void +arrange_var_order (block) + tree block; +{ + tree types; + HOST_WIDE_INT offset; + + while (block && TREE_CODE(block)==BLOCK) + { + types = BLOCK_VARS (block); + + while (types) + { + /* skip the declaration that refers an external variable */ + /* name: types.decl.assembler_name.id */ + if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types) + && TREE_CODE (types) == VAR_DECL + && ! DECL_ARTIFICIAL (types) + && ! DECL_INLINE (types) /* don't sweep inlined string */ + && DECL_RTL_SET_P (types) + && GET_CODE (DECL_RTL (types)) == MEM) + { + if (search_string_def (TREE_TYPE (types))) + { + rtx home = DECL_RTL (types); + + if (! (GET_CODE (home) == MEM + && (GET_CODE (XEXP (home, 0)) == MEM + || (GET_CODE (XEXP (home, 0)) == REG + && XEXP (home, 0) != virtual_stack_vars_rtx + && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM + && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM +#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM + && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM +#endif + )))) + { + /* found a string variable */ + HOST_WIDE_INT var_size = + ((TREE_INT_CST_LOW (DECL_SIZE (types)) + BITS_PER_UNIT - 1) + / BITS_PER_UNIT); + + if (GET_MODE (DECL_RTL (types)) == BLKmode) + { + int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; + var_size = CEIL_ROUND (var_size, alignment); + } + + /* skip the variable if it is top of the region + specified by sweep_frame_offset */ + offset = AUTO_OFFSET (XEXP (DECL_RTL (types), 0)); + if (offset == sweep_frame_offset - var_size) + sweep_frame_offset -= var_size; + + else if (offset < sweep_frame_offset - var_size) + sweep_string_variable (DECL_RTL (types), var_size); + } + } + } + + types = TREE_CHAIN(types); + } + + arrange_var_order (BLOCK_SUBBLOCKS (block)); + + block = BLOCK_CHAIN (block); + } +} + + +static void +copy_args_for_protection (void) +{ + tree parms = DECL_ARGUMENTS (current_function_decl); + rtx temp_rtx; + + parms = DECL_ARGUMENTS (current_function_decl); + for (; parms; parms = TREE_CHAIN (parms)) + if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node) + { + if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms)) + { + int string_p; + + /* + skip arguemnt protection if the last argument is used + for the variable argument + */ + /* + tree fntype; + if (TREE_CHAIN (parms) == 0) + { + fntype = TREE_TYPE (current_function_decl); + + if ((TYPE_ARG_TYPES (fntype) != 0 && + TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) != void_type_node) + || current_function_varargs) + continue; + } + */ + + string_p = search_string_def (TREE_TYPE(parms)); + + /* check if it is a candidate to move */ + if (string_p || search_pointer_def (TREE_TYPE (parms))) + { + int arg_size + = ((TREE_INT_CST_LOW (DECL_SIZE (parms)) + BITS_PER_UNIT - 1) + / BITS_PER_UNIT); + + start_sequence (); + + if (GET_CODE (DECL_RTL (parms)) == REG) + { + rtx safe = 0; + + change_arg_use_of_insns (prologue_insert_point, DECL_RTL (parms), &safe, 0); + if (safe) + { + /* generate codes for copying the content */ + rtx movinsn = emit_move_insn (safe, DECL_RTL (parms)); + + /* avoid register elimination in gcse.c (COPY-PROP)*/ + PATTERN (movinsn)->volatil = 1; + + /* save debugger info */ + DECL_INCOMING_RTL (parms) = safe; + } + } + + else if (GET_CODE (DECL_RTL (parms)) == MEM + && GET_CODE (XEXP (DECL_RTL (parms), 0)) == ADDRESSOF) + { + rtx movinsn; + rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms))); + + /* generate codes for copying the content */ + movinsn = emit_move_insn (safe, DECL_INCOMING_RTL (parms)); + PATTERN (movinsn)->volatil = 1; /* avoid register elimination in gcse.c (COPY-PROP)*/ + + /* change the addressof information to the newly allocated pseudo register */ + emit_move_insn (DECL_RTL (parms), safe); + + /* save debugger info */ + DECL_INCOMING_RTL (parms) = safe; + } + + else + { + /* declare temporary local variable DECL_NAME (parms) for it */ + temp_rtx + = assign_stack_local (DECL_MODE (parms), arg_size, + DECL_MODE (parms) == BLKmode ? -1 : 0); + + MEM_IN_STRUCT_P (temp_rtx) = AGGREGATE_TYPE_P (TREE_TYPE (parms)); + set_mem_alias_set (temp_rtx, get_alias_set (parms)); + + /* generate codes for copying the content */ + store_expr (parms, temp_rtx, 0); + + /* change the reference for each instructions */ + move_arg_location (prologue_insert_point, DECL_RTL (parms), + temp_rtx, arg_size); + + /* change the location of parms variable */ + SET_DECL_RTL (parms, temp_rtx); + + /* change debugger info */ + DECL_INCOMING_RTL (parms) = temp_rtx; + } + + emit_insn_before (gen_sequence (), prologue_insert_point); + end_sequence (); + +#ifdef FRAME_GROWS_DOWNWARD + /* process the string argument */ + if (string_p && DECL_MODE (parms) == BLKmode) + { + int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT; + arg_size = CEIL_ROUND (arg_size, alignment); + + /* change the reference for each instructions */ + sweep_string_variable (DECL_RTL (parms), arg_size); + } +#endif + } + } + } +} + + +/* + sweep a string variable to the local variable addressed by sweep_frame_offset, that is + a last position of string variables. +*/ +static void +sweep_string_variable (sweep_var, var_size) + rtx sweep_var; + HOST_WIDE_INT var_size; +{ + HOST_WIDE_INT sweep_offset; + + switch (GET_CODE (sweep_var)) + { + case MEM: + if (GET_CODE (XEXP (sweep_var, 0)) == ADDRESSOF + && GET_CODE (XEXP (XEXP (sweep_var, 0), 0)) == REG) + return; + sweep_offset = AUTO_OFFSET(XEXP (sweep_var, 0)); + break; + case CONST_INT: + sweep_offset = INTVAL (sweep_var); + break; + default: + abort (); + } + + /* scan all declarations of variables and fix the offset address of + the variable based on the frame pointer */ + sweep_string_in_decls (DECL_INITIAL (current_function_decl), sweep_offset, var_size); + + /* scan all argument variable and fix the offset address based on the frame pointer */ + sweep_string_in_args (DECL_ARGUMENTS (current_function_decl), sweep_offset, var_size); + + /* For making room for sweep variable, scan all insns and fix the offset address + of the variable that is based on frame pointer*/ + sweep_string_use_of_insns (function_first_insn, sweep_offset, var_size); + + + /* Clear all the USED bits in operands of all insns and declarations of local vars */ + reset_used_flags_for_decls (DECL_INITIAL (current_function_decl)); + reset_used_flags_for_insns (function_first_insn); + + sweep_frame_offset -= var_size; +} + + + +/* + move an argument to the local variable addressed by frame_offset +*/ +static void +move_arg_location (insn, orig, new, var_size) + rtx insn, orig, new; + HOST_WIDE_INT var_size; +{ + /* For making room for sweep variable, scan all insns and fix the offset address + of the variable that is based on frame pointer*/ + change_arg_use_of_insns (insn, orig, &new, var_size); + + + /* Clear all the USED bits in operands of all insns and declarations of local vars */ + reset_used_flags_for_insns (insn); +} + + +static void +sweep_string_in_decls (block, sweep_offset, sweep_size) + tree block; + HOST_WIDE_INT sweep_offset, sweep_size; +{ + tree types; + HOST_WIDE_INT offset; + rtx home; + + while (block && TREE_CODE(block)==BLOCK) + { + types = BLOCK_VARS(block); + + while (types) + { + /* skip the declaration that refers an external variable and + also skip an global variable */ + if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) { + + if (!DECL_RTL_SET_P (types)) goto next; + home = DECL_RTL (types); + + /* process for static local variable */ + if (GET_CODE (home) == MEM + && GET_CODE (XEXP (home, 0)) == SYMBOL_REF) + goto next; + + if (GET_CODE (home) == MEM + && XEXP (home, 0) == virtual_stack_vars_rtx) + { + offset = 0; + + /* the operand related to the sweep variable */ + if (sweep_offset <= offset + && offset < sweep_offset + sweep_size) + { + offset = sweep_frame_offset - sweep_size - sweep_offset; + + XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx, offset); + XEXP (home, 0)->used = 1; + } + else if (sweep_offset <= offset + && offset < sweep_frame_offset) + { /* the rest of variables under sweep_frame_offset, so shift the location */ + XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx, -sweep_size); + XEXP (home, 0)->used = 1; + } + } + + if (GET_CODE (home) == MEM + && GET_CODE (XEXP (home, 0)) == MEM) + { + /* process for dynamically allocated aray */ + home = XEXP (home, 0); + } + + if (GET_CODE (home) == MEM + && GET_CODE (XEXP (home, 0)) == PLUS + && XEXP (XEXP (home, 0), 0) == virtual_stack_vars_rtx + && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT) + { + if (! XEXP (home, 0)->used) + { + offset = AUTO_OFFSET(XEXP (home, 0)); + + /* the operand related to the sweep variable */ + if (sweep_offset <= offset + && offset < sweep_offset + sweep_size) + { + + offset += sweep_frame_offset - sweep_size - sweep_offset; + XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset); + + /* mark */ + XEXP (home, 0)->used = 1; + } + else if (sweep_offset <= offset + && offset < sweep_frame_offset) + { /* the rest of variables under sweep_frame_offset, + so shift the location */ + + XEXP (XEXP (home, 0), 1) + = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size); + + /* mark */ + XEXP (home, 0)->used = 1; + } + } + } + + } + next: + types = TREE_CHAIN(types); + } + + sweep_string_in_decls (BLOCK_SUBBLOCKS (block), sweep_offset, sweep_size); + block = BLOCK_CHAIN (block); + } +} + + +static void +sweep_string_in_args (parms, sweep_offset, sweep_size) + tree parms; + HOST_WIDE_INT sweep_offset, sweep_size; +{ + rtx home; + HOST_WIDE_INT offset; + + for (; parms; parms = TREE_CHAIN (parms)) + if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node) + { + if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms)) + { + home = DECL_INCOMING_RTL (parms); + + if (XEXP (home, 0)->used) continue; + + offset = AUTO_OFFSET(XEXP (home, 0)); + + /* the operand related to the sweep variable */ + if (AUTO_BASEPTR (XEXP (home, 0)) == virtual_stack_vars_rtx) + { + if (sweep_offset <= offset + && offset < sweep_offset + sweep_size) + { + offset += sweep_frame_offset - sweep_size - sweep_offset; + XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset); + + /* mark */ + XEXP (home, 0)->used = 1; + } + else if (sweep_offset <= offset + && offset < sweep_frame_offset) + { /* the rest of variables under sweep_frame_offset, so shift the location */ + XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size); + + /* mark */ + XEXP (home, 0)->used = 1; + } + } + } + } +} + + +static int has_virtual_reg; + +static void +sweep_string_use_of_insns (insn, sweep_offset, sweep_size) + rtx insn; + HOST_WIDE_INT sweep_offset, sweep_size; +{ + for (; insn; insn = NEXT_INSN (insn)) + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN + || GET_CODE (insn) == CALL_INSN) + { + has_virtual_reg = FALSE; + sweep_string_in_operand (insn, &PATTERN (insn), sweep_offset, sweep_size); + } +} + + +static void +sweep_string_in_operand (insn, loc, sweep_offset, sweep_size) + rtx insn, *loc; + HOST_WIDE_INT sweep_offset, sweep_size; +{ + register rtx x = *loc; + register enum rtx_code code; + int i, j, k = 0; + HOST_WIDE_INT offset; + const char *fmt; + + if (x == 0) + return; + + code = GET_CODE (x); + + switch (code) + { + case CONST_INT: + case CONST_DOUBLE: + case CONST: + case SYMBOL_REF: + case CODE_LABEL: + case PC: + case CC0: + case ASM_INPUT: + case ADDR_VEC: + case ADDR_DIFF_VEC: + case RETURN: + case ADDRESSOF: + return; + + case REG: + if (x == virtual_incoming_args_rtx + || x == virtual_stack_vars_rtx + || x == virtual_stack_dynamic_rtx + || x == virtual_outgoing_args_rtx + || x == virtual_cfa_rtx) + has_virtual_reg = TRUE; + return; + + case SET: + /* + skip setjmp setup insn and setjmp restore insn + Example: + (set (MEM (reg:SI xx)) (virtual_stack_vars_rtx))) + (set (virtual_stack_vars_rtx) (REG)) + */ + if (GET_CODE (XEXP (x, 0)) == MEM + && XEXP (x, 1) == virtual_stack_vars_rtx) + return; + if (XEXP (x, 0) == virtual_stack_vars_rtx + && GET_CODE (XEXP (x, 1)) == REG) + return; + break; + + case PLUS: + /* Handle typical case of frame register plus constant. */ + if (XEXP (x, 0) == virtual_stack_vars_rtx + && CONSTANT_P (XEXP (x, 1))) + { + if (x->used) goto single_use_of_virtual_reg; + + offset = AUTO_OFFSET(x); + if (RTX_INTEGRATED_P (x)) k = -1; /* for inline base ptr */ + + /* the operand related to the sweep variable */ + if (sweep_offset <= offset + k + && offset + k < sweep_offset + sweep_size) + { + offset += sweep_frame_offset - sweep_size - sweep_offset; + + XEXP (x, 0) = virtual_stack_vars_rtx; + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset); + x->used = 1; + } + else if (sweep_offset <= offset + k + && offset + k < sweep_frame_offset) + { /* the rest of variables under sweep_frame_offset, so shift the location */ + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size); + x->used = 1; + } + + single_use_of_virtual_reg: + if (has_virtual_reg) { + /* excerpt from insn_invalid_p in recog.c */ + int icode = recog_memoized (insn); + + if (icode < 0 && asm_noperands (PATTERN (insn)) < 0) + { + rtx temp, seq; + + start_sequence (); + temp = force_operand (x, NULL_RTX); + seq = get_insns (); + end_sequence (); + + emit_insns_before (seq, insn); + if (! validate_change (insn, loc, temp, 0) + && ! validate_replace_rtx (x, temp, insn)) + fatal_insn ("sweep_string_in_operand", insn); + } + } + + has_virtual_reg = TRUE; + return; + } + +#ifdef FRAME_GROWS_DOWNWARD + /* + alert the case of frame register plus constant given by reg. + */ + else if (XEXP (x, 0) == virtual_stack_vars_rtx + && GET_CODE (XEXP (x, 1)) == REG) + fatal_insn ("sweep_string_in_operand: unknown addressing", insn); +#endif + + /* + process further subtree: + Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8))) + (const_int 5)) + */ + break; + + case CALL_PLACEHOLDER: + sweep_string_use_of_insns (XEXP (x, 0), sweep_offset, sweep_size); + sweep_string_use_of_insns (XEXP (x, 1), sweep_offset, sweep_size); + sweep_string_use_of_insns (XEXP (x, 2), sweep_offset, sweep_size); + break; + + default: + break; + } + + /* Scan all subexpressions. */ + fmt = GET_RTX_FORMAT (code); + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) + if (*fmt == 'e') + { + /* + virtual_stack_vars_rtx without offset + Example: + (set (reg:SI xx) (reg:SI 78)) + (set (reg:SI xx) (MEM (reg:SI 78))) + */ + if (XEXP (x, i) == virtual_stack_vars_rtx) + fatal_insn ("sweep_string_in_operand: unknown fp usage", insn); + sweep_string_in_operand (insn, &XEXP (x, i), sweep_offset, sweep_size); + } + else if (*fmt == 'E') + for (j = 0; j < XVECLEN (x, i); j++) + sweep_string_in_operand (insn, &XVECEXP (x, i, j), sweep_offset, sweep_size); +} + + +/* + change a argument variable to the local variable addressed by the "new" variable. +*/ +static int flag_caui_exit; + +static void +change_arg_use_of_insns (insn, orig, new, size) + rtx insn, orig, *new; + HOST_WIDE_INT size; +{ + flag_caui_exit = FALSE; + change_arg_use_of_insns_2 (insn, orig, new, size); +} + +static void +change_arg_use_of_insns_2 (insn, orig, new, size) + rtx insn, orig, *new; + HOST_WIDE_INT size; +{ + for (; insn && !flag_caui_exit; insn = NEXT_INSN (insn)) + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN + || GET_CODE (insn) == CALL_INSN) + { + change_arg_use_in_operand (PATTERN (insn), orig, new, size); + } +} + + + +static void +change_arg_use_in_operand (x, orig, new, size) + rtx x, orig, *new; + HOST_WIDE_INT size; +{ + register enum rtx_code code; + int i, j; + HOST_WIDE_INT offset; + const char *fmt; + + if (x == 0) + return; + + code = GET_CODE (x); + + switch (code) + { + case CONST_INT: + case CONST_DOUBLE: + case CONST: + case SYMBOL_REF: + case CODE_LABEL: + case PC: + case CC0: + case ASM_INPUT: + case ADDR_VEC: + case ADDR_DIFF_VEC: + case RETURN: + case REG: + case ADDRESSOF: + return; + + case MEM: + /* Handle special case of MEM (incoming_args) */ + if (GET_CODE (orig) == MEM + && XEXP (x, 0) == virtual_incoming_args_rtx) + { + offset = 0; + + /* the operand related to the sweep variable */ + if (AUTO_OFFSET(XEXP (orig, 0)) <= offset && + offset < AUTO_OFFSET(XEXP (orig, 0)) + size) { + + offset = AUTO_OFFSET(XEXP (*new, 0)) + + (offset - AUTO_OFFSET(XEXP (orig, 0))); + + XEXP (x, 0) = plus_constant (virtual_stack_vars_rtx, offset); + XEXP (x, 0)->used = 1; + + return; + } + } + break; + + case PLUS: + /* Handle special case of frame register plus constant. */ + if (GET_CODE (orig) == MEM /* skip if orig is register variable in the optimization */ + && XEXP (x, 0) == virtual_incoming_args_rtx && CONSTANT_P (XEXP (x, 1)) + && ! x->used) + { + offset = AUTO_OFFSET(x); + + /* the operand related to the sweep variable */ + if (AUTO_OFFSET(XEXP (orig, 0)) <= offset && + offset < AUTO_OFFSET(XEXP (orig, 0)) + size) { + + offset = AUTO_OFFSET(XEXP (*new, 0)) + + (offset - AUTO_OFFSET(XEXP (orig, 0))); + + XEXP (x, 0) = virtual_stack_vars_rtx; + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset); + x->used = 1; + + return; + } + + /* + process further subtree: + Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8))) + (const_int 5)) + */ + } + break; + + case CALL_PLACEHOLDER: + change_arg_use_of_insns_2 (XEXP (x, 0), orig, new, size); if (flag_caui_exit) return; + change_arg_use_of_insns_2 (XEXP (x, 1), orig, new, size); if (flag_caui_exit) return; + change_arg_use_of_insns_2 (XEXP (x, 2), orig, new, size); if (flag_caui_exit) return; + break; + + default: + break; + } + + if (*new == 0 + && code == SET + && SET_SRC (x) == orig + && GET_CODE (SET_DEST (x)) == REG) + { + /* exit to the change_arg_use_of_insns */ + flag_caui_exit = TRUE; + x->volatil = 1; /* avoid register elimination in gcse.c (COPY-PROP)*/ + return; + } + + /* Scan all subexpressions. */ + fmt = GET_RTX_FORMAT (code); + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) + if (*fmt == 'e') + { + if (XEXP (x, i) == orig) + { + if (*new == 0) *new = gen_reg_rtx (GET_MODE (orig)); + XEXP (x, i) = *new; + continue; + } + change_arg_use_in_operand (XEXP (x, i), orig, new, size); + } + else if (*fmt == 'E') + for (j = 0; j < XVECLEN (x, i); j++) + { + + if (XVECEXP (x, i, j) == orig) + { + if (*new == 0) *new = gen_reg_rtx (GET_MODE (orig)); + XVECEXP (x, i, j) = *new; + continue; + } + change_arg_use_in_operand (XVECEXP (x, i, j), orig, new, size); + } +} + + +static void +validate_insns_of_varrefs (insn) + rtx insn; +{ + rtx next; + + /* Initialize recognition, indicating that volatile is OK. */ + init_recog (); + + for (; insn; insn = next) + { + next = NEXT_INSN (insn); + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN + || GET_CODE (insn) == CALL_INSN) + { + /* excerpt from insn_invalid_p in recog.c */ + int icode = recog_memoized (insn); + + if (icode < 0 && asm_noperands (PATTERN (insn)) < 0) + validate_operand_of_varrefs (insn, &PATTERN (insn)); + } + } + + init_recog_no_volatile (); +} + + +static void +validate_operand_of_varrefs (insn, loc) + rtx insn, *loc; +{ + register enum rtx_code code; + rtx x, temp, seq; + int i, j; + const char *fmt; + + x = *loc; + if (x == 0) + return; + + code = GET_CODE (x); + + switch (code) + { + case USE: + case CONST_INT: + case CONST_DOUBLE: + case CONST: + case SYMBOL_REF: + case CODE_LABEL: + case PC: + case CC0: + case ASM_INPUT: + case ADDR_VEC: + case ADDR_DIFF_VEC: + case RETURN: + case REG: + case ADDRESSOF: + return; + + case PLUS: + /* validate insn of frame register plus constant. */ + if (GET_CODE (x) == PLUS + && XEXP (x, 0) == virtual_stack_vars_rtx + && CONSTANT_P (XEXP (x, 1))) + { + start_sequence (); + /* temp = force_operand (x, NULL_RTX); */ + { /* excerpt from expand_binop in optabs.c */ + optab binoptab = add_optab; + enum machine_mode mode = GET_MODE (x); + int icode = (int) binoptab->handlers[(int) mode].insn_code; + enum machine_mode mode1 = insn_data[icode].operand[2].mode; + rtx pat; + rtx xop0 = XEXP (x, 0), xop1 = XEXP (x, 1); + temp = gen_reg_rtx (mode); + + /* Now, if insn's predicates don't allow offset operands, put them into + pseudo regs. */ + + if (! (*insn_data[icode].operand[2].predicate) (xop1, mode1) + && mode1 != VOIDmode) + xop1 = copy_to_mode_reg (mode1, xop1); + + pat = GEN_FCN (icode) (temp, xop0, xop1); + if (pat) + emit_insn (pat); + } + seq = get_insns (); + end_sequence (); + + emit_insns_before (seq, insn); + if (! validate_change (insn, loc, temp, 0)) + abort (); + return; + } + break; + + + case CALL_PLACEHOLDER: + validate_insns_of_varrefs (XEXP (x, 0)); + validate_insns_of_varrefs (XEXP (x, 1)); + validate_insns_of_varrefs (XEXP (x, 2)); + break; + + default: + break; + } + + /* Scan all subexpressions. */ + fmt = GET_RTX_FORMAT (code); + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) + if (*fmt == 'e') + validate_operand_of_varrefs (insn, &XEXP (x, i)); + else if (*fmt == 'E') + for (j = 0; j < XVECLEN (x, i); j++) + validate_operand_of_varrefs (insn, &XVECEXP (x, i, j)); +} + + + + +/* + The following codes are invoked after the instantiation of pseuso registers. + + Reorder local variables to place a peudo register after buffers to avoid + the corruption of local variables that could be used to further corrupt + arbitrary memory locations. +*/ +#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD) +static void push_frame PARAMS ((HOST_WIDE_INT var_size, HOST_WIDE_INT boundary)); +static void push_frame_in_decls PARAMS ((tree block, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)); +static void push_frame_in_args PARAMS ((tree parms, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)); +static void push_frame_of_insns PARAMS ((rtx insn, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)); +static void push_frame_in_operand PARAMS ((rtx insn, rtx orig, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)); +static void push_frame_of_reg_equiv_memory_loc PARAMS ((HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)); +static void push_frame_of_reg_equiv_constant PARAMS ((HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)); +static void reset_used_flags_for_push_frame PARAMS ((void)); +static int check_out_of_frame_access PARAMS ((rtx insn, HOST_WIDE_INT boundary)); +static int check_out_of_frame_access_in_operand PARAMS ((rtx, HOST_WIDE_INT boundary)); +#endif + +rtx +assign_stack_local_for_pseudo_reg (mode, size, align) + enum machine_mode mode; + HOST_WIDE_INT size; + int align; +{ +#if defined(FRAME_GROWS_DOWNWARD) || !defined(STACK_GROWS_DOWNWARD) + return assign_stack_local (mode, size, align); +#else + tree blocks = DECL_INITIAL (current_function_decl); + rtx new; + HOST_WIDE_INT saved_frame_offset, units_per_push, starting_frame; + int first_call_from_purge_addressof, first_call_from_global_alloc; + + if (! flag_propolice_protection + || size == 0 + || ! blocks + || current_function_is_inlinable + || ! search_string_from_argsandvars (1) + || current_function_contains_functions) + return assign_stack_local (mode, size, align); + + first_call_from_purge_addressof = !push_frame_offset && !cse_not_expected; + first_call_from_global_alloc = !saved_cse_not_expected && cse_not_expected; + saved_cse_not_expected = cse_not_expected; + + starting_frame = (STARTING_FRAME_OFFSET)?STARTING_FRAME_OFFSET:BIGGEST_ALIGNMENT / BITS_PER_UNIT; + units_per_push = MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT, + GET_MODE_SIZE (mode)); + + if (first_call_from_purge_addressof) + { + push_frame_offset = push_allocated_offset; + if (check_out_of_frame_access (get_insns (), starting_frame)) + { + /* if there is an access beyond frame, push dummy region to seperate + the address of instantiated variables */ + push_frame (GET_MODE_SIZE (DImode), 0); + assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1); + } + } + + if (first_call_from_global_alloc) + { + push_frame_offset = push_allocated_offset = 0; + if (check_out_of_frame_access (get_insns (), starting_frame)) + { + if (STARTING_FRAME_OFFSET) + { + /* if there is an access beyond frame, push dummy region + to seperate the address of instantiated variables */ + push_frame (GET_MODE_SIZE (DImode), 0); + assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1); + } + else + push_allocated_offset = starting_frame; + } + } + + saved_frame_offset = frame_offset; + frame_offset = push_frame_offset; + + new = assign_stack_local (mode, size, align); + + push_frame_offset = frame_offset; + frame_offset = saved_frame_offset; + + if (push_frame_offset > push_allocated_offset) + { + push_frame (units_per_push, push_allocated_offset + STARTING_FRAME_OFFSET); + + assign_stack_local (BLKmode, units_per_push, -1); + push_allocated_offset += units_per_push; + } + + /* At the second call from global alloc, alpha push frame and assign + a local variable to the top of the stack */ + if (first_call_from_global_alloc && STARTING_FRAME_OFFSET == 0) + push_frame_offset = push_allocated_offset = 0; + + return new; +#endif +} + + +#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD) +/* + push frame infomation for instantiating pseudo register at the top of stack. + This is only for the "frame grows upward", it means FRAME_GROWS_DOWNWARD is + not defined. + + It is called by purge_addressof function and global_alloc (or reload) + function. +*/ +static void +push_frame (var_size, boundary) + HOST_WIDE_INT var_size, boundary; +{ + reset_used_flags_for_push_frame(); + + /* scan all declarations of variables and fix the offset address of the variable based on the frame pointer */ + push_frame_in_decls (DECL_INITIAL (current_function_decl), var_size, boundary); + + /* scan all argument variable and fix the offset address based on the frame pointer */ + push_frame_in_args (DECL_ARGUMENTS (current_function_decl), var_size, boundary); + + /* scan all operands of all insns and fix the offset address based on the frame pointer */ + push_frame_of_insns (get_insns (), var_size, boundary); + + /* scan all reg_equiv_memory_loc and reg_equiv_constant*/ + push_frame_of_reg_equiv_memory_loc (var_size, boundary); + push_frame_of_reg_equiv_constant (var_size, boundary); + + reset_used_flags_for_push_frame(); +} + +static void +reset_used_flags_for_push_frame() +{ + int i; + extern rtx *reg_equiv_memory_loc; + extern rtx *reg_equiv_constant; + + /* Clear all the USED bits in operands of all insns and declarations of local vars */ + reset_used_flags_for_decls (DECL_INITIAL (current_function_decl)); + reset_used_flags_for_insns (get_insns ()); + + + /* The following codes are processed if the push_frame is called from + global_alloc (or reload) function */ + if (reg_equiv_memory_loc == 0) return; + + for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++) + if (reg_equiv_memory_loc[i]) + { + rtx x = reg_equiv_memory_loc[i]; + + if (GET_CODE (x) == MEM + && GET_CODE (XEXP (x, 0)) == PLUS + && AUTO_BASEPTR (XEXP (x, 0)) == frame_pointer_rtx) + { + /* reset */ + XEXP (x, 0)->used = 0; + } + } + + + if (reg_equiv_constant == 0) return; + + for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++) + if (reg_equiv_constant[i]) + { + rtx x = reg_equiv_constant[i]; + + if (GET_CODE (x) == PLUS + && AUTO_BASEPTR (x) == frame_pointer_rtx) + { + /* reset */ + x->used = 0; + } + } +} + +static void +push_frame_in_decls (block, push_size, boundary) + tree block; + HOST_WIDE_INT push_size, boundary; +{ + tree types; + HOST_WIDE_INT offset; + rtx home; + + while (block && TREE_CODE(block)==BLOCK) + { + types = BLOCK_VARS(block); + + while (types) + { + /* skip the declaration that refers an external variable and + also skip an global variable */ + if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) + { + + if (!DECL_RTL_SET_P (types)) goto next; + home = DECL_RTL (types); + + /* process for static local variable */ + if (GET_CODE (home) == MEM + && GET_CODE (XEXP (home, 0)) == SYMBOL_REF) + goto next; + + if (GET_CODE (home) == MEM + && GET_CODE (XEXP (home, 0)) == REG) + { + if (XEXP (home, 0) != frame_pointer_rtx + || boundary != 0) + goto next; + + XEXP (home, 0) = plus_constant (frame_pointer_rtx, + push_size); + + /* mark */ + XEXP (home, 0)->used = 1; + } + + if (GET_CODE (home) == MEM + && GET_CODE (XEXP (home, 0)) == MEM) + { + + /* process for dynamically allocated aray */ + home = XEXP (home, 0); + } + + if (GET_CODE (home) == MEM + && GET_CODE (XEXP (home, 0)) == PLUS + && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT) + { + offset = AUTO_OFFSET(XEXP (home, 0)); + + if (! XEXP (home, 0)->used + && offset >= boundary) + { + offset += push_size; + XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset); + + /* mark */ + XEXP (home, 0)->used = 1; + } + } + + } + next: + types = TREE_CHAIN(types); + } + + push_frame_in_decls (BLOCK_SUBBLOCKS (block), push_size, boundary); + block = BLOCK_CHAIN (block); + } +} + + +static void +push_frame_in_args (parms, push_size, boundary) + tree parms; + HOST_WIDE_INT push_size, boundary; +{ + rtx home; + HOST_WIDE_INT offset; + + for (; parms; parms = TREE_CHAIN (parms)) + if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node) + { + if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms)) + { + home = DECL_INCOMING_RTL (parms); + offset = AUTO_OFFSET(XEXP (home, 0)); + + if (XEXP (home, 0)->used || offset < boundary) continue; + + /* the operand related to the sweep variable */ + if (AUTO_BASEPTR (XEXP (home, 0)) == frame_pointer_rtx) + { + offset += push_size; + XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset); + + /* mark */ + XEXP (home, 0)->used = 1; + } + } + } +} + + +static int insn_pushed; +static int *fp_equiv = 0; + +static void +push_frame_of_insns (insn, push_size, boundary) + rtx insn; + HOST_WIDE_INT push_size, boundary; +{ + /* init fp_equiv */ + fp_equiv = (int *) xcalloc (max_reg_num (), sizeof (int)); + + for (; insn; insn = NEXT_INSN (insn)) + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN + || GET_CODE (insn) == CALL_INSN) + { + insn_pushed = FALSE; debuginsn = insn; + push_frame_in_operand (insn, PATTERN (insn), push_size, boundary); + + if (insn_pushed) + { + rtx after = insn; + rtx seq = split_insns (PATTERN (insn), insn); + + if (seq && GET_CODE (seq) == SEQUENCE) + { + register int i; + + /* replace the pattern of the insn */ + PATTERN (insn) = PATTERN (XVECEXP (seq, 0, 0)); + + if (XVECLEN (seq, 0) == 2) + { + rtx pattern = PATTERN (XVECEXP (seq, 0, 1)); + + if (GET_CODE (pattern) == SET + && GET_CODE (XEXP (pattern, 0)) == REG + && GET_CODE (XEXP (pattern, 1)) == PLUS + && XEXP (pattern, 0) == XEXP (XEXP (pattern, 1), 0) + && CONSTANT_P (XEXP (XEXP (pattern, 1), 1))) + { + rtx offset = XEXP (XEXP (pattern, 1), 1); + fp_equiv[REGNO (XEXP (pattern, 0))] = INTVAL (offset); + goto next; + } + } + + for (i = 1; i < XVECLEN (seq, 0); i++) + { + rtx insn = XVECEXP (seq, 0, i); + add_insn_after (insn, after); + after = insn; + } + + /* Recursively call try_split for each new insn created */ + insn = NEXT_INSN (insn); + for (i = 1; i < XVECLEN (seq, 0); i++, insn = NEXT_INSN (insn)) + insn = try_split (PATTERN (insn), insn, 1); + } + } + + next: + /* push frame in NOTE */ + push_frame_in_operand (insn, REG_NOTES (insn), push_size, boundary); + + /* push frame in CALL EXPR_LIST */ + if (GET_CODE (insn) == CALL_INSN) + push_frame_in_operand (insn, CALL_INSN_FUNCTION_USAGE (insn), push_size, boundary); + } + + /* Clean up. */ + free (fp_equiv); +} + + +static void +push_frame_in_operand (insn, orig, push_size, boundary) + rtx insn, orig; + HOST_WIDE_INT push_size, boundary; +{ + register rtx x = orig; + register enum rtx_code code; + int i, j; + HOST_WIDE_INT offset; + const char *fmt; + + if (x == 0) + return; + + code = GET_CODE (x); + + switch (code) + { + case CONST_INT: + case CONST_DOUBLE: + case CONST: + case SYMBOL_REF: + case CODE_LABEL: + case PC: + case CC0: + case ASM_INPUT: + case ADDR_VEC: + case ADDR_DIFF_VEC: + case RETURN: + case REG: + case ADDRESSOF: + case USE: + return; + + case SET: + /* + skip setjmp setup insn and setjmp restore insn + alpha case: + (set (MEM (reg:SI xx)) (frame_pointer_rtx))) + (set (frame_pointer_rtx) (REG)) + */ + if (GET_CODE (XEXP (x, 0)) == MEM + && XEXP (x, 1) == frame_pointer_rtx) + return; + if (XEXP (x, 0) == frame_pointer_rtx + && GET_CODE (XEXP (x, 1)) == REG) + return; + + /* + powerpc case: restores setjmp address + (set (frame_pointer_rtx) (plus frame_pointer_rtx const_int -n)) + or + (set (reg) (plus frame_pointer_rtx const_int -n)) + (set (frame_pointer_rtx) (reg)) + */ + if (GET_CODE (XEXP (x, 0)) == REG + && GET_CODE (XEXP (x, 1)) == PLUS + && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx + && CONSTANT_P (XEXP (XEXP (x, 1), 1)) + && INTVAL (XEXP (XEXP (x, 1), 1)) < 0) + { + x = XEXP (x, 1); + offset = AUTO_OFFSET(x); + if (x->used || abs (offset) < boundary) + return; + + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - push_size); + x->used = 1; insn_pushed = TRUE; + return; + } + + /* reset fp_equiv register */ + else if (GET_CODE (XEXP (x, 0)) == REG + && fp_equiv[REGNO (XEXP (x, 0))]) + fp_equiv[REGNO (XEXP (x, 0))] = 0; + + /* propagete fp_equiv register */ + else if (GET_CODE (XEXP (x, 0)) == REG + && GET_CODE (XEXP (x, 1)) == REG + && fp_equiv[REGNO (XEXP (x, 1))]) + if (REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER + || reg_renumber[REGNO (XEXP (x, 0))] > 0) + fp_equiv[REGNO (XEXP (x, 0))] = fp_equiv[REGNO (XEXP (x, 1))]; + break; + + case MEM: + if (XEXP (x, 0) == frame_pointer_rtx + && boundary == 0) + { + XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size); + XEXP (x, 0)->used = 1; insn_pushed = TRUE; + return; + } + break; + + case PLUS: + offset = AUTO_OFFSET(x); + + /* Handle special case of frame register plus constant. */ + if (CONSTANT_P (XEXP (x, 1)) + && XEXP (x, 0) == frame_pointer_rtx) + { + if (x->used || offset < boundary) + return; + + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size); + x->used = 1; insn_pushed = TRUE; + + return; + } + /* + Handle alpha case: + (plus:SI (subreg:SI (reg:DI 63 FP) 0) (const_int 64 [0x40])) + */ + if (CONSTANT_P (XEXP (x, 1)) + && GET_CODE (XEXP (x, 0)) == SUBREG + && SUBREG_REG (XEXP (x, 0)) == frame_pointer_rtx) + { + if (x->used || offset < boundary) + return; + + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size); + x->used = 1; insn_pushed = TRUE; + + return; + } + /* + Handle powerpc case: + (set (reg x) (plus fp const)) + (set (.....) (... (plus (reg x) (const B)))) + */ + else if (CONSTANT_P (XEXP (x, 1)) + && GET_CODE (XEXP (x, 0)) == REG + && fp_equiv[REGNO (XEXP (x, 0))]) + { + if (x->used) return; + + offset += fp_equiv[REGNO (XEXP (x, 0))]; + + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset); + x->used = 1; insn_pushed = TRUE; + + return; + } + /* + Handle special case of frame register plus reg (constant). + (set (reg x) (const B)) + (set (....) (...(plus fp (reg x)))) + */ + else if (XEXP (x, 0) == frame_pointer_rtx + && GET_CODE (XEXP (x, 1)) == REG + && PREV_INSN (insn) + && PATTERN (PREV_INSN (insn)) + && SET_DEST (PATTERN (PREV_INSN (insn))) == XEXP (x, 1) + && CONSTANT_P (SET_SRC (PATTERN (PREV_INSN (insn))))) + { + HOST_WIDE_INT offset = INTVAL (SET_SRC (PATTERN (PREV_INSN (insn)))); + + if (x->used || offset < boundary) + return; + + SET_SRC (PATTERN (PREV_INSN (insn))) + = gen_rtx_CONST_INT (VOIDmode, offset + push_size); + x->used = 1; + XEXP (x, 1)->used = 1; + + return; + } + /* Handle special case of frame register plus reg (used). */ + else if (XEXP (x, 0) == frame_pointer_rtx + && XEXP (x, 1)->used) + { + x->used = 1; + return; + } + /* + process further subtree: + Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8))) + (const_int 5)) + */ + break; + + case CALL_PLACEHOLDER: + push_frame_of_insns (XEXP (x, 0), push_size, boundary); + push_frame_of_insns (XEXP (x, 1), push_size, boundary); + push_frame_of_insns (XEXP (x, 2), push_size, boundary); + break; + + default: + break; + } + + /* Scan all subexpressions. */ + fmt = GET_RTX_FORMAT (code); + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) + if (*fmt == 'e') + { + if (XEXP (x, i) == frame_pointer_rtx && boundary == 0) + fatal_insn ("push_frame_in_operand", insn); + push_frame_in_operand (insn, XEXP (x, i), push_size, boundary); + } + else if (*fmt == 'E') + for (j = 0; j < XVECLEN (x, i); j++) + push_frame_in_operand (insn, XVECEXP (x, i, j), push_size, boundary); +} + +static void +push_frame_of_reg_equiv_memory_loc (push_size, boundary) + HOST_WIDE_INT push_size, boundary; +{ + int i; + extern rtx *reg_equiv_memory_loc; + + /* This function is processed if the push_frame is called from + global_alloc (or reload) function */ + if (reg_equiv_memory_loc == 0) return; + + for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++) + if (reg_equiv_memory_loc[i]) + { + rtx x = reg_equiv_memory_loc[i]; + int offset; + + if (GET_CODE (x) == MEM + && GET_CODE (XEXP (x, 0)) == PLUS + && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx) + { + offset = AUTO_OFFSET(XEXP (x, 0)); + + if (! XEXP (x, 0)->used + && offset >= boundary) + { + offset += push_size; + XEXP (XEXP (x, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset); + + /* mark */ + XEXP (x, 0)->used = 1; + } + } + else if (GET_CODE (x) == MEM + && XEXP (x, 0) == frame_pointer_rtx + && boundary == 0) + { + XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size); + XEXP (x, 0)->used = 1; insn_pushed = TRUE; + } + } +} + +static void +push_frame_of_reg_equiv_constant (push_size, boundary) + HOST_WIDE_INT push_size, boundary; +{ + int i; + extern rtx *reg_equiv_constant; + + /* This function is processed if the push_frame is called from + global_alloc (or reload) function */ + if (reg_equiv_constant == 0) return; + + for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++) + if (reg_equiv_constant[i]) + { + rtx x = reg_equiv_constant[i]; + int offset; + + if (GET_CODE (x) == PLUS + && XEXP (x, 0) == frame_pointer_rtx) + { + offset = AUTO_OFFSET(x); + + if (! x->used + && offset >= boundary) + { + offset += push_size; + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset); + + /* mark */ + x->used = 1; + } + } + else if (x == frame_pointer_rtx + && boundary == 0) + { + reg_equiv_constant[i] + = plus_constant (frame_pointer_rtx, push_size); + reg_equiv_constant[i]->used = 1; insn_pushed = TRUE; + } + } +} + +static int +check_out_of_frame_access (insn, boundary) + rtx insn; + HOST_WIDE_INT boundary; +{ + for (; insn; insn = NEXT_INSN (insn)) + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN + || GET_CODE (insn) == CALL_INSN) + { + if (check_out_of_frame_access_in_operand (PATTERN (insn), boundary)) + return TRUE; + } + return FALSE; +} + + +static int +check_out_of_frame_access_in_operand (orig, boundary) + rtx orig; + HOST_WIDE_INT boundary; +{ + register rtx x = orig; + register enum rtx_code code; + int i, j; + const char *fmt; + + if (x == 0) + return FALSE; + + code = GET_CODE (x); + + switch (code) + { + case CONST_INT: + case CONST_DOUBLE: + case CONST: + case SYMBOL_REF: + case CODE_LABEL: + case PC: + case CC0: + case ASM_INPUT: + case ADDR_VEC: + case ADDR_DIFF_VEC: + case RETURN: + case REG: + case ADDRESSOF: + return FALSE; + + case MEM: + if (XEXP (x, 0) == frame_pointer_rtx) + if (0 < boundary) return TRUE; + break; + + case PLUS: + /* Handle special case of frame register plus constant. */ + if (CONSTANT_P (XEXP (x, 1)) + && XEXP (x, 0) == frame_pointer_rtx) + { + if (0 <= AUTO_OFFSET(x) + && AUTO_OFFSET(x) < boundary) return TRUE; + return FALSE; + } + /* + process further subtree: + Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8))) + (const_int 5)) + */ + break; + + case CALL_PLACEHOLDER: + if (check_out_of_frame_access (XEXP (x, 0), boundary)) return TRUE; + if (check_out_of_frame_access (XEXP (x, 1), boundary)) return TRUE; + if (check_out_of_frame_access (XEXP (x, 2), boundary)) return TRUE; + break; + + default: + break; + } + + /* Scan all subexpressions. */ + fmt = GET_RTX_FORMAT (code); + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++) + if (*fmt == 'e') + { + if (check_out_of_frame_access_in_operand (XEXP (x, i), boundary)) + return TRUE; + } + else if (*fmt == 'E') + for (j = 0; j < XVECLEN (x, i); j++) + if (check_out_of_frame_access_in_operand (XVECEXP (x, i, j), boundary)) + return TRUE; + + return FALSE; +} +#endif diff --git a/sys-devel/gcc/files/3.2/protector.h b/sys-devel/gcc/files/3.2/protector.h new file mode 100644 index 000000000000..341b50907a2e --- /dev/null +++ b/sys-devel/gcc/files/3.2/protector.h @@ -0,0 +1,48 @@ +/* RTL buffer overflow protection function for GNU C compiler + Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc. + +This file is part of GCC. + +GCC is free software; you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free +Software Foundation; either version 2, or (at your option) any later +version. + +GCC is distributed in the hope that it will be useful, but WITHOUT ANY +WARRANTY; without even the implied warranty of MERCHANTABILITY or +FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License +for more details. + +You should have received a copy of the GNU General Public License +along with GCC; see the file COPYING. If not, write to the Free +Software Foundation, 59 Temple Place - Suite 330, Boston, MA +02111-1307, USA. */ + + +/* declaration of GUARD variable */ +#define GUARD_m Pmode +#define UNITS_PER_GUARD MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT, GET_MODE_SIZE (GUARD_m)) + +#ifndef L_stack_smash_handler + +/* insert a guard variable before a character buffer and change the order + of pointer variables, character buffers and pointer arguments */ + +extern void prepare_stack_protection PARAMS ((int inlinable)); + +#ifdef TREE_CODE +/* search a character array from the specified type tree */ + +extern int search_string_def PARAMS ((tree names)); +#endif + +/* examine whether the input contains frame pointer addressing */ + +extern int contains_fp PARAMS ((rtx op)); + +/* allocate a local variable in the stack area before character buffers + to avoid the corruption of it */ + +extern rtx assign_stack_local_for_pseudo_reg PARAMS ((enum machine_mode, HOST_WIDE_INT, int)); + +#endif diff --git a/sys-devel/gcc/files/3.2/protector.patch b/sys-devel/gcc/files/3.2/protector.patch new file mode 100644 index 000000000000..3f02e80cf4d0 --- /dev/null +++ b/sys-devel/gcc/files/3.2/protector.patch @@ -0,0 +1,1046 @@ +Index: gcc/Makefile.in +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/Makefile.in,v +retrieving revision 1.1.1.12 +retrieving revision 1.1.1.12.2.1 +diff -c -3 -p -r1.1.1.12 -r1.1.1.12.2.1 +*** gcc/Makefile.in 2003/02/19 07:11:06 1.1.1.12 +--- gcc/Makefile.in 2003/02/26 08:01:00 1.1.1.12.2.1 +*************** OBJS = alias.o bb-reorder.o bitmap.o bui +*** 728,734 **** + sibcall.o simplify-rtx.o ssa.o ssa-ccp.o ssa-dce.o stmt.o \ + stor-layout.o stringpool.o timevar.o toplev.o tree.o tree-dump.o \ + tree-inline.o unroll.o varasm.o varray.o version.o vmsdbgout.o xcoffout.o \ +! $(GGC) $(out_object_file) $(EXTRA_OBJS) + + BACKEND = main.o libbackend.a + +--- 728,734 ---- + sibcall.o simplify-rtx.o ssa.o ssa-ccp.o ssa-dce.o stmt.o \ + stor-layout.o stringpool.o timevar.o toplev.o tree.o tree-dump.o \ + tree-inline.o unroll.o varasm.o varray.o version.o vmsdbgout.o xcoffout.o \ +! protector.o $(GGC) $(out_object_file) $(EXTRA_OBJS) + + BACKEND = main.o libbackend.a + +*************** LIB2FUNCS_1 = _muldi3 _negdi2 _lshrdi3 _ +*** 769,775 **** + + LIB2FUNCS_2 = _floatdixf _fixunsxfsi _fixtfdi _fixunstfdi _floatditf \ + _clear_cache _trampoline __main _exit _absvsi2 _absvdi2 _addvsi3 \ +! _addvdi3 _subvsi3 _subvdi3 _mulvsi3 _mulvdi3 _negvsi2 _negvdi2 _ctors + + # Defined in libgcc2.c, included only in the static library. + LIB2FUNCS_ST = _eprintf _bb __gcc_bcmp +--- 769,775 ---- + + LIB2FUNCS_2 = _floatdixf _fixunsxfsi _fixtfdi _fixunstfdi _floatditf \ + _clear_cache _trampoline __main _exit _absvsi2 _absvdi2 _addvsi3 \ +! _addvdi3 _subvsi3 _subvdi3 _mulvsi3 _mulvdi3 _negvsi2 _negvdi2 _ctors _stack_smash_handler + + # Defined in libgcc2.c, included only in the static library. + LIB2FUNCS_ST = _eprintf _bb __gcc_bcmp +Index: gcc/calls.c +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/calls.c,v +retrieving revision 1.1.1.9 +retrieving revision 1.1.1.9.2.1 +diff -c -3 -p -r1.1.1.9 -r1.1.1.9.2.1 +*** gcc/calls.c 2003/02/19 07:11:08 1.1.1.9 +--- gcc/calls.c 2003/02/26 08:01:00 1.1.1.9.2.1 +*************** expand_call (exp, target, ignore) +*** 2300,2306 **** + /* For variable-sized objects, we must be called with a target + specified. If we were to allocate space on the stack here, + we would have no way of knowing when to free it. */ +! rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1); + + mark_temp_addr_taken (d); + structure_value_addr = XEXP (d, 0); +--- 2300,2306 ---- + /* For variable-sized objects, we must be called with a target + specified. If we were to allocate space on the stack here, + we would have no way of knowing when to free it. */ +! rtx d = assign_temp (TREE_TYPE (exp), 5, 1, 1); + + mark_temp_addr_taken (d); + structure_value_addr = XEXP (d, 0); +Index: gcc/combine.c +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/combine.c,v +retrieving revision 1.1.1.9 +retrieving revision 1.1.1.9.2.2 +diff -c -3 -p -r1.1.1.9 -r1.1.1.9.2.2 +*** gcc/combine.c 2003/02/19 07:11:08 1.1.1.9 +--- gcc/combine.c 2003/03/11 05:51:10 1.1.1.9.2.2 +*************** combine_simplify_rtx (x, op0_mode, last, +*** 3819,3825 **** + rtx inner_op0 = XEXP (XEXP (x, 0), 1); + rtx inner_op1 = XEXP (x, 1); + rtx inner; +! + /* Make sure we pass the constant operand if any as the second + one if this is a commutative operation. */ + if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c') +--- 3819,3835 ---- + rtx inner_op0 = XEXP (XEXP (x, 0), 1); + rtx inner_op1 = XEXP (x, 1); + rtx inner; +! +! #ifndef FRAME_GROWS_DOWNWARD +! if (flag_propolice_protection +! && code == PLUS +! && other == frame_pointer_rtx +! && GET_CODE (inner_op0) == CONST_INT +! && GET_CODE (inner_op1) == CONST_INT +! && INTVAL (inner_op0) > 0 +! && INTVAL (inner_op0) + INTVAL (inner_op1) <= 0) +! return x; +! #endif + /* Make sure we pass the constant operand if any as the second + one if this is a commutative operation. */ + if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c') +*************** combine_simplify_rtx (x, op0_mode, last, +*** 4193,4198 **** +--- 4203,4213 ---- + they are now checked elsewhere. */ + if (GET_CODE (XEXP (x, 0)) == PLUS + && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1))) ++ #ifndef FRAME_GROWS_DOWNWARD ++ if (! (flag_propolice_protection ++ && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx ++ && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)) ++ #endif + return gen_binary (PLUS, mode, + gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), + XEXP (x, 1)), +Index: gcc/cse.c +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/cse.c,v +retrieving revision 1.1.1.6 +retrieving revision 1.1.1.6.8.2 +diff -c -3 -p -r1.1.1.6 -r1.1.1.6.8.2 +*** gcc/cse.c 2002/07/29 05:12:14 1.1.1.6 +--- gcc/cse.c 2003/02/28 06:27:19 1.1.1.6.8.2 +*************** fold_rtx (x, insn) +*** 4274,4280 **** + + if (new_const == 0) + break; +! + /* If we are associating shift operations, don't let this + produce a shift of the size of the object or larger. + This could occur when we follow a sign-extend by a right +--- 4274,4287 ---- + + if (new_const == 0) + break; +! #ifndef FRAME_GROWS_DOWNWARD +! if (flag_propolice_protection +! && GET_CODE (y) == PLUS +! && XEXP (y, 0) == frame_pointer_rtx +! && INTVAL (inner_const) > 0 +! && INTVAL (new_const) <= 0) +! break; +! #endif + /* If we are associating shift operations, don't let this + produce a shift of the size of the object or larger. + This could occur when we follow a sign-extend by a right +*************** cse_insn (insn, libcall_insn) +*** 4801,4806 **** +--- 4808,4820 ---- + if (SET_DEST (x) == pc_rtx + && GET_CODE (SET_SRC (x)) == LABEL_REF) + ; ++ /* cut the reg propagation of stack-protected argument */ ++ else if (x->volatil) { ++ rtx x1 = SET_DEST (x); ++ if (GET_CODE (x1) == SUBREG && GET_CODE (SUBREG_REG (x1)) == REG) ++ x1 = SUBREG_REG (x1); ++ make_new_qty (REGNO (x1), GET_MODE (x1)); ++ } + + /* Don't count call-insns, (set (reg 0) (call ...)), as a set. + The hard function value register is used only once, to copy to +Index: gcc/explow.c +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/explow.c,v +retrieving revision 1.1.1.5 +retrieving revision 1.1.1.5.4.1 +diff -c -3 -p -r1.1.1.5 -r1.1.1.5.4.1 +*** gcc/explow.c 2002/12/18 00:55:39 1.1.1.5 +--- gcc/explow.c 2003/02/26 08:01:01 1.1.1.5.4.1 +*************** plus_constant_wide (x, c) +*** 82,88 **** + rtx tem; + int all_constant = 0; + +! if (c == 0) + return x; + + restart: +--- 82,89 ---- + rtx tem; + int all_constant = 0; + +! if (c == 0 +! && !(flag_propolice_protection && x == virtual_stack_vars_rtx)) + return x; + + restart: +*************** plus_constant_wide (x, c) +*** 183,189 **** + break; + } + +! if (c != 0) + x = gen_rtx_PLUS (mode, x, GEN_INT (c)); + + if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF) +--- 184,191 ---- + break; + } + +! if (c != 0 +! || (flag_propolice_protection && x == virtual_stack_vars_rtx)) + x = gen_rtx_PLUS (mode, x, GEN_INT (c)); + + if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF) +*************** memory_address (mode, x) +*** 526,531 **** +--- 528,548 ---- + in certain cases. This is not necessary since the code + below can handle all possible cases, but machine-dependent + transformations can make better code. */ ++ if (flag_propolice_protection) ++ { ++ #define FRAMEADDR_P(X) (GET_CODE (X) == PLUS \ ++ && XEXP (X, 0) == virtual_stack_vars_rtx \ ++ && GET_CODE (XEXP (X, 1)) == CONST_INT) ++ rtx y; ++ if (FRAMEADDR_P (x)) goto win; ++ for (y=x; y!=0 && GET_CODE (y)==PLUS; y = XEXP (y, 0)) ++ { ++ if (FRAMEADDR_P (XEXP (y, 0))) ++ XEXP (y, 0) = force_reg (GET_MODE (XEXP (y, 0)), XEXP (y, 0)); ++ if (FRAMEADDR_P (XEXP (y, 1))) ++ XEXP (y, 1) = force_reg (GET_MODE (XEXP (y, 1)), XEXP (y, 1)); ++ } ++ } + LEGITIMIZE_ADDRESS (x, oldx, mode, win); + + /* PLUS and MULT can appear in special ways +Index: gcc/expr.c +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/expr.c,v +retrieving revision 1.1.1.11 +retrieving revision 1.1.1.11.2.1 +diff -c -3 -p -r1.1.1.11 -r1.1.1.11.2.1 +*** gcc/expr.c 2003/02/19 07:11:13 1.1.1.11 +--- gcc/expr.c 2003/02/26 08:01:01 1.1.1.11.2.1 +*************** Software Foundation, 59 Temple Place - S +*** 45,50 **** +--- 45,51 ---- + #include "langhooks.h" + #include "intl.h" + #include "tm_p.h" ++ #include "protector.h" + + /* Decide whether a function's arguments should be processed + from first to last or from last to first. +*************** move_by_pieces (to, from, len, align) +*** 1448,1454 **** + + if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from) + { +! data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len)); + data.autinc_from = 1; + data.explicit_inc_from = -1; + } +--- 1449,1455 ---- + + if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from) + { +! data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len-GET_MODE_SIZE (mode))); + data.autinc_from = 1; + data.explicit_inc_from = -1; + } +*************** move_by_pieces (to, from, len, align) +*** 1462,1468 **** + data.from_addr = copy_addr_to_reg (from_addr); + if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to) + { +! data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len)); + data.autinc_to = 1; + data.explicit_inc_to = -1; + } +--- 1463,1469 ---- + data.from_addr = copy_addr_to_reg (from_addr); + if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to) + { +! data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len-GET_MODE_SIZE (mode))); + data.autinc_to = 1; + data.explicit_inc_to = -1; + } +*************** move_by_pieces_1 (genfun, mode, data) +*** 1579,1589 **** + from1 = adjust_address (data->from, mode, data->offset); + + if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) +! emit_insn (gen_add2_insn (data->to_addr, +! GEN_INT (-(HOST_WIDE_INT)size))); + if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0) +! emit_insn (gen_add2_insn (data->from_addr, +! GEN_INT (-(HOST_WIDE_INT)size))); + + if (data->to) + emit_insn ((*genfun) (to1, from1)); +--- 1580,1592 ---- + from1 = adjust_address (data->from, mode, data->offset); + + if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) +! if (data->explicit_inc_to < -1) +! emit_insn (gen_add2_insn (data->to_addr, +! GEN_INT (-(HOST_WIDE_INT)size))); + if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0) +! if (data->explicit_inc_from < -1) +! emit_insn (gen_add2_insn (data->from_addr, +! GEN_INT (-(HOST_WIDE_INT)size))); + + if (data->to) + emit_insn ((*genfun) (to1, from1)); +*************** store_by_pieces_1 (data, align) +*** 2480,2486 **** + + if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to) + { +! data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len)); + data->autinc_to = 1; + data->explicit_inc_to = -1; + } +--- 2483,2489 ---- + + if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to) + { +! data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len-GET_MODE_SIZE (mode))); + data->autinc_to = 1; + data->explicit_inc_to = -1; + } +*************** store_by_pieces_2 (genfun, mode, data) +*** 2551,2558 **** + to1 = adjust_address (data->to, mode, data->offset); + + if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) +! emit_insn (gen_add2_insn (data->to_addr, +! GEN_INT (-(HOST_WIDE_INT) size))); + + cst = (*data->constfun) (data->constfundata, data->offset, mode); + emit_insn ((*genfun) (to1, cst)); +--- 2554,2562 ---- + to1 = adjust_address (data->to, mode, data->offset); + + if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0) +! if (data->explicit_inc_to < -1) +! emit_insn (gen_add2_insn (data->to_addr, +! GEN_INT (-(HOST_WIDE_INT) size))); + + cst = (*data->constfun) (data->constfundata, data->offset, mode); + emit_insn ((*genfun) (to1, cst)); +*************** expand_expr (exp, target, tmode, modifie +*** 7614,7620 **** + /* If adding to a sum including a constant, + associate it to put the constant outside. */ + if (GET_CODE (op1) == PLUS +! && CONSTANT_P (XEXP (op1, 1))) + { + rtx constant_term = const0_rtx; + +--- 7618,7625 ---- + /* If adding to a sum including a constant, + associate it to put the constant outside. */ + if (GET_CODE (op1) == PLUS +! && CONSTANT_P (XEXP (op1, 1)) +! && !(flag_propolice_protection && (contains_fp (op0) || contains_fp (op1)))) + { + rtx constant_term = const0_rtx; + +Index: gcc/flags.h +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/flags.h,v +retrieving revision 1.1.1.5 +retrieving revision 1.1.1.5.4.1 +diff -c -3 -p -r1.1.1.5 -r1.1.1.5.4.1 +*** gcc/flags.h 2002/12/18 00:55:41 1.1.1.5 +--- gcc/flags.h 2003/02/26 08:01:02 1.1.1.5.4.1 +*************** extern int flag_detailed_statistics; +*** 645,648 **** +--- 645,656 ---- + /* Nonzero means enable synchronous exceptions for non-call instructions. */ + extern int flag_non_call_exceptions; + ++ /* Nonzero means use propolice as a stack protection method */ ++ ++ extern int flag_propolice_protection; ++ ++ /* Warn when not issuing stack smashing protection for some reason */ ++ ++ extern int warn_stack_protector; ++ + #endif /* ! GCC_FLAGS_H */ +Index: gcc/function.c +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/function.c,v +retrieving revision 1.1.1.9 +retrieving revision 1.1.1.9.2.1 +diff -c -3 -p -r1.1.1.9 -r1.1.1.9.2.1 +*** gcc/function.c 2003/02/19 07:11:14 1.1.1.9 +--- gcc/function.c 2003/02/26 08:01:02 1.1.1.9.2.1 +*************** Software Foundation, 59 Temple Place - S +*** 60,65 **** +--- 60,66 ---- + #include "tm_p.h" + #include "integrate.h" + #include "langhooks.h" ++ #include "protector.h" + + #ifndef TRAMPOLINE_ALIGNMENT + #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY +*************** static varray_type epilogue; +*** 156,161 **** +--- 157,166 ---- + /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue + in this function. */ + static varray_type sibcall_epilogue; ++ ++ /* Current boundary mark for character arrays. */ ++ int temp_boundary_mark = 0; ++ + + /* In order to evaluate some expressions, such as function calls returning + structures in memory, we need to temporarily allocate stack locations. +*************** struct temp_slot +*** 209,214 **** +--- 214,221 ---- + /* The size of the slot, including extra space for alignment. This + info is for combine_temp_slots. */ + HOST_WIDE_INT full_size; ++ /* Boundary mark of a character array and the others. This info is for propolice */ ++ int boundary_mark; + }; + + /* This structure is used to record MEMs or pseudos used to replace VAR, any +*************** assign_stack_local (mode, size, align) +*** 655,660 **** +--- 662,668 ---- + whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3 + if we are to allocate something at an inner level to be treated as + a variable in the block (e.g., a SAVE_EXPR). ++ KEEP is 5 if we allocate a place to return structure. + + TYPE is the type that will be used for the stack slot. */ + +*************** assign_stack_temp_for_type (mode, size, +*** 668,673 **** +--- 676,683 ---- + unsigned int align; + struct temp_slot *p, *best_p = 0; + rtx slot; ++ int char_array = (flag_propolice_protection ++ && keep == 1 && search_string_def (type)); + + /* If SIZE is -1 it means that somebody tried to allocate a temporary + of a variable size. */ +*************** assign_stack_temp_for_type (mode, size, +*** 693,699 **** + && ! p->in_use + && objects_must_conflict_p (p->type, type) + && (best_p == 0 || best_p->size > p->size +! || (best_p->size == p->size && best_p->align > p->align))) + { + if (p->align == align && p->size == size) + { +--- 703,710 ---- + && ! p->in_use + && objects_must_conflict_p (p->type, type) + && (best_p == 0 || best_p->size > p->size +! || (best_p->size == p->size && best_p->align > p->align)) +! && (! char_array || p->boundary_mark != 0)) + { + if (p->align == align && p->size == size) + { +*************** assign_stack_temp_for_type (mode, size, +*** 728,733 **** +--- 739,745 ---- + p->address = 0; + p->rtl_expr = 0; + p->type = best_p->type; ++ p->boundary_mark = best_p->boundary_mark; + p->next = temp_slots; + temp_slots = p; + +*************** assign_stack_temp_for_type (mode, size, +*** 788,793 **** +--- 800,806 ---- + p->full_size = frame_offset - frame_offset_old; + #endif + p->address = 0; ++ p->boundary_mark = char_array?++temp_boundary_mark:0; + p->next = temp_slots; + temp_slots = p; + } +*************** combine_temp_slots () +*** 958,971 **** + int delete_q = 0; + if (! q->in_use && GET_MODE (q->slot) == BLKmode) + { +! if (p->base_offset + p->full_size == q->base_offset) + { + /* Q comes after P; combine Q into P. */ + p->size += q->size; + p->full_size += q->full_size; + delete_q = 1; + } +! else if (q->base_offset + q->full_size == p->base_offset) + { + /* P comes after Q; combine P into Q. */ + q->size += p->size; +--- 971,986 ---- + int delete_q = 0; + if (! q->in_use && GET_MODE (q->slot) == BLKmode) + { +! if (p->base_offset + p->full_size == q->base_offset && +! p->boundary_mark == q->boundary_mark) + { + /* Q comes after P; combine Q into P. */ + p->size += q->size; + p->full_size += q->full_size; + delete_q = 1; + } +! else if (q->base_offset + q->full_size == p->base_offset && +! p->boundary_mark == q->boundary_mark) + { + /* P comes after Q; combine P into Q. */ + q->size += p->size; +*************** put_reg_into_stack (function, reg, type, +*** 1519,1525 **** + new = func->x_parm_reg_stack_loc[regno]; + + if (new == 0) +! new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func); + + PUT_CODE (reg, MEM); + PUT_MODE (reg, decl_mode); +--- 1534,1542 ---- + new = func->x_parm_reg_stack_loc[regno]; + + if (new == 0) +! new = function ? +! assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func): +! assign_stack_local_for_pseudo_reg (decl_mode, GET_MODE_SIZE (decl_mode), 0); + + PUT_CODE (reg, MEM); + PUT_MODE (reg, decl_mode); +*************** instantiate_virtual_regs_1 (loc, object, +*** 3950,3956 **** + constant with that register. */ + temp = gen_reg_rtx (Pmode); + XEXP (x, 0) = new; +! if (validate_change (object, &XEXP (x, 1), temp, 0)) + emit_insn_before (gen_move_insn (temp, new_offset), object); + else + { +--- 3967,3974 ---- + constant with that register. */ + temp = gen_reg_rtx (Pmode); + XEXP (x, 0) = new; +! if (validate_change (object, &XEXP (x, 1), temp, 0) +! && ! flag_propolice_protection) + emit_insn_before (gen_move_insn (temp, new_offset), object); + else + { +Index: gcc/gcse.c +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/gcse.c,v +retrieving revision 1.1.1.4 +retrieving revision 1.1.1.4.8.1 +diff -c -3 -p -r1.1.1.4 -r1.1.1.4.8.1 +*** gcc/gcse.c 2002/07/29 05:12:18 1.1.1.4 +--- gcc/gcse.c 2003/02/26 08:01:02 1.1.1.4.8.1 +*************** cprop_insn (bb, insn, alter_jumps) +*** 4193,4199 **** + /* Find an assignment that sets reg_used and is available + at the start of the block. */ + set = find_avail_set (regno, insn); +! if (! set) + continue; + + pat = set->expr; +--- 4193,4199 ---- + /* Find an assignment that sets reg_used and is available + at the start of the block. */ + set = find_avail_set (regno, insn); +! if (! set || set->expr->volatil) + continue; + + pat = set->expr; +Index: gcc/integrate.c +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/integrate.c,v +retrieving revision 1.1.1.7 +retrieving revision 1.1.1.7.2.1 +diff -c -3 -p -r1.1.1.7 -r1.1.1.7.2.1 +*** gcc/integrate.c 2003/02/19 07:11:16 1.1.1.7 +--- gcc/integrate.c 2003/02/26 08:01:02 1.1.1.7.2.1 +*************** copy_decl_for_inlining (decl, from_fn, t +*** 388,393 **** +--- 388,397 ---- + /* These args would always appear unused, if not for this. */ + TREE_USED (copy) = 1; + ++ /* The inlined variable is marked as INLINE not to sweep by propolice */ ++ if (flag_propolice_protection && TREE_CODE (copy) == VAR_DECL) ++ DECL_INLINE (copy) = 1; ++ + /* Set the context for the new declaration. */ + if (!DECL_CONTEXT (decl)) + /* Globals stay global. */ +*************** copy_rtx_and_substitute (orig, map, for_ +*** 1950,1955 **** +--- 1954,1963 ---- + + seq = gen_sequence (); + end_sequence (); ++ #ifdef FRAME_GROWS_DOWNWARD ++ if (flag_propolice_protection && GET_CODE (seq) == SET) ++ RTX_INTEGRATED_P (SET_SRC (seq)) = 1; ++ #endif + emit_insn_after (seq, map->insns_at_start); + return temp; + } +Index: gcc/libgcc-std.ver +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/libgcc-std.ver,v +retrieving revision 1.1.1.4 +retrieving revision 1.1.1.4.12.1 +diff -c -3 -p -r1.1.1.4 -r1.1.1.4.12.1 +*** gcc/libgcc-std.ver 2001/06/19 04:54:26 1.1.1.4 +--- gcc/libgcc-std.ver 2003/02/26 08:01:02 1.1.1.4.12.1 +*************** GCC_3.0 { +*** 174,177 **** +--- 174,181 ---- + _Unwind_SjLj_RaiseException + _Unwind_SjLj_ForcedUnwind + _Unwind_SjLj_Resume ++ ++ # stack smash handler symbols ++ __guard ++ __stack_smash_handler + } +Index: gcc/libgcc2.c +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/libgcc2.c,v +retrieving revision 1.1.1.7 +retrieving revision 1.1.1.7.4.1 +diff -c -3 -p -r1.1.1.7 -r1.1.1.7.4.1 +*** gcc/libgcc2.c 2002/12/18 00:55:44 1.1.1.7 +--- gcc/libgcc2.c 2003/02/26 08:01:02 1.1.1.7.4.1 +*************** atexit (func_ptr func) +*** 2050,2052 **** +--- 2050,2138 ---- + #endif /* NEED_ATEXIT */ + + #endif /* L_exit */ ++ ++ #ifdef L_stack_smash_handler ++ #include <stdio.h> ++ #include <string.h> ++ #include <fcntl.h> ++ #include <unistd.h> ++ ++ #ifdef _POSIX_SOURCE ++ #include <signal.h> ++ #endif ++ ++ #if defined(HAVE_SYSLOG) ++ #include <sys/types.h> ++ #include <sys/socket.h> ++ #include <sys/un.h> ++ ++ #include <sys/syslog.h> ++ #ifndef _PATH_LOG ++ #define _PATH_LOG "/dev/log" ++ #endif ++ #endif ++ ++ long __guard[8] = {0,0,0,0,0,0,0,0}; ++ static void __guard_setup (void) __attribute__ ((constructor)) ; ++ static void __guard_setup (void) ++ { ++ int fd; ++ if (__guard[0]!=0) return; ++ fd = open ("/dev/urandom", 0); ++ if (fd != -1) { ++ ssize_t size = read (fd, (char*)&__guard, sizeof(__guard)); ++ close (fd) ; ++ if (size == sizeof(__guard)) return; ++ } ++ /* If a random generator can't be used, the protector switches the guard ++ to the "terminator canary" */ ++ ((char*)__guard)[0] = 0; ((char*)__guard)[1] = 0; ++ ((char*)__guard)[2] = '\n'; ((char*)__guard)[3] = 255; ++ } ++ void __stack_smash_handler (char func[], int damaged ATTRIBUTE_UNUSED) ++ { ++ #if defined (__GNU_LIBRARY__) ++ extern char * __progname; ++ #endif ++ char message[] = ": stack smashing attack in function "; ++ int bufsz = 256, len; ++ char buf[bufsz]; ++ #if defined(HAVE_SYSLOG) ++ int LogFile; ++ struct sockaddr_un SyslogAddr; /* AF_UNIX address of local logger */ ++ #endif ++ ++ strcpy(buf, "<2>"); len=3; /* send LOG_CRIT */ ++ #if defined (__GNU_LIBRARY__) ++ strncat(buf, __progname, bufsz-len-1); len = strlen(buf); ++ #endif ++ if (bufsz>len) strncat(buf, message, bufsz-len-1); len = strlen(buf); ++ if (bufsz>len) strncat(buf, func, bufsz-len-1); len = strlen(buf); ++ ++ /* print error message */ ++ write (STDERR_FILENO, buf+3, len-3); ++ #if defined(HAVE_SYSLOG) ++ if ((LogFile = socket(AF_UNIX, SOCK_DGRAM, 0)) != -1) { ++ ++ /* ++ * Send "found" message to the "/dev/log" path ++ */ ++ SyslogAddr.sun_family = AF_UNIX; ++ (void)strncpy(SyslogAddr.sun_path, _PATH_LOG, ++ sizeof(SyslogAddr.sun_path) - 1); ++ SyslogAddr.sun_path[sizeof(SyslogAddr.sun_path) - 1] = '\0'; ++ sendto(LogFile, buf, strlen(buf), 0, (struct sockaddr *)&SyslogAddr, sizeof(SyslogAddr)); ++ } ++ #endif ++ ++ #ifdef _POSIX_SOURCE ++ { ++ sigset_t mask; ++ sigfillset(&mask); ++ sigdelset(&mask, SIGABRT); ++ (void)kill(getpid(), SIGABRT); ++ } ++ #endif ++ _exit(127); ++ } ++ #endif +Index: gcc/loop.c +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/loop.c,v +retrieving revision 1.1.1.9 +retrieving revision 1.1.1.9.2.2 +diff -c -3 -p -r1.1.1.9 -r1.1.1.9.2.2 +*** gcc/loop.c 2003/02/19 07:11:17 1.1.1.9 +--- gcc/loop.c 2003/03/11 05:50:50 1.1.1.9.2.2 +*************** general_induction_var (loop, x, src_reg, +*** 6385,6390 **** +--- 6385,6398 ---- + if (GET_CODE (*mult_val) == USE) + *mult_val = XEXP (*mult_val, 0); + ++ #ifndef FRAME_GROWS_DOWNWARD ++ if (flag_propolice_protection ++ && GET_CODE (*add_val) == PLUS ++ && (XEXP (*add_val, 0) == frame_pointer_rtx ++ || XEXP (*add_val, 1) == frame_pointer_rtx)) ++ return 0; ++ #endif ++ + if (is_addr) + *pbenefit += address_cost (orig_x, addr_mode) - reg_address_cost; + else +Index: gcc/optabs.c +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/optabs.c,v +retrieving revision 1.1.1.8 +retrieving revision 1.1.1.8.2.1 +diff -c -3 -p -r1.1.1.8 -r1.1.1.8.2.1 +*** gcc/optabs.c 2003/02/19 07:11:17 1.1.1.8 +--- gcc/optabs.c 2003/02/26 08:01:03 1.1.1.8.2.1 +*************** expand_binop (mode, binoptab, op0, op1, +*** 670,675 **** +--- 670,695 ---- + if (target) + target = protect_from_queue (target, 1); + ++ if (flag_propolice_protection ++ && binoptab->code == PLUS ++ && op0 == virtual_stack_vars_rtx ++ && GET_CODE(op1) == CONST_INT) ++ { ++ int icode = (int) binoptab->handlers[(int) mode].insn_code; ++ if (target) ++ temp = target; ++ else ++ temp = gen_reg_rtx (mode); ++ ++ if (! (*insn_data[icode].operand[0].predicate) (temp, mode) ++ || GET_CODE (temp) != REG) ++ temp = gen_reg_rtx (mode); ++ ++ emit_insn (gen_rtx_SET (VOIDmode, temp, ++ gen_rtx_PLUS (GET_MODE (op0), op0, op1))); ++ return temp; ++ } ++ + if (flag_force_mem) + { + op0 = force_not_mem (op0); +Index: gcc/reload1.c +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/reload1.c,v +retrieving revision 1.1.1.7 +retrieving revision 1.1.1.7.4.1 +diff -c -3 -p -r1.1.1.7 -r1.1.1.7.4.1 +*** gcc/reload1.c 2002/12/18 00:55:47 1.1.1.7 +--- gcc/reload1.c 2003/02/26 08:01:03 1.1.1.7.4.1 +*************** Software Foundation, 59 Temple Place - S +*** 42,47 **** +--- 42,48 ---- + #include "toplev.h" + #include "except.h" + #include "tree.h" ++ #include "protector.h" + + /* This file contains the reload pass of the compiler, which is + run after register allocation has been done. It checks that +*************** alter_reg (i, from_reg) +*** 1985,1991 **** + if (from_reg == -1) + { + /* No known place to spill from => no slot to reuse. */ +! x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, + inherent_size == total_size ? 0 : -1); + if (BYTES_BIG_ENDIAN) + /* Cancel the big-endian correction done in assign_stack_local. +--- 1986,1992 ---- + if (from_reg == -1) + { + /* No known place to spill from => no slot to reuse. */ +! x = assign_stack_local_for_pseudo_reg (GET_MODE (regno_reg_rtx[i]), total_size, + inherent_size == total_size ? 0 : -1); + if (BYTES_BIG_ENDIAN) + /* Cancel the big-endian correction done in assign_stack_local. +Index: gcc/simplify-rtx.c +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/simplify-rtx.c,v +retrieving revision 1.1.1.2 +retrieving revision 1.1.1.2.8.1 +diff -c -3 -p -r1.1.1.2 -r1.1.1.2.8.1 +*** gcc/simplify-rtx.c 2002/07/29 05:12:24 1.1.1.2 +--- gcc/simplify-rtx.c 2003/02/26 08:01:04 1.1.1.2.8.1 +*************** simplify_plus_minus (code, mode, op0, op +*** 1763,1769 **** + int n_ops = 2, input_ops = 2, input_consts = 0, n_consts; + int first, negate, changed; + int i, j; +! + memset ((char *) ops, 0, sizeof ops); + + /* Set up the two operands and then expand them until nothing has been +--- 1763,1770 ---- + int n_ops = 2, input_ops = 2, input_consts = 0, n_consts; + int first, negate, changed; + int i, j; +! HOST_WIDE_INT fp_offset = 0; +! + memset ((char *) ops, 0, sizeof ops); + + /* Set up the two operands and then expand them until nothing has been +*************** simplify_plus_minus (code, mode, op0, op +*** 1788,1793 **** +--- 1789,1798 ---- + switch (this_code) + { + case PLUS: ++ if (flag_propolice_protection ++ && XEXP (this_op, 0) == virtual_stack_vars_rtx ++ && GET_CODE (XEXP (this_op, 1)) == CONST_INT) ++ fp_offset = INTVAL (XEXP (this_op, 1)); + case MINUS: + if (n_ops == 7) + return NULL_RTX; +*************** simplify_plus_minus (code, mode, op0, op +*** 1942,1951 **** + && GET_CODE (ops[n_ops - 1].op) == CONST_INT + && CONSTANT_P (ops[n_ops - 2].op)) + { +! rtx value = ops[n_ops - 1].op; + if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg) +! value = neg_const_int (mode, value); +! ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value)); + n_ops--; + } + +--- 1947,1956 ---- + && GET_CODE (ops[n_ops - 1].op) == CONST_INT + && CONSTANT_P (ops[n_ops - 2].op)) + { +! int value = INTVAL (ops[n_ops - 1].op); + if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg) +! value = -value; +! ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, value); + n_ops--; + } + +*************** simplify_plus_minus (code, mode, op0, op +*** 1963,1968 **** +--- 1968,2021 ---- + && (n_ops + n_consts > input_ops + || (n_ops + n_consts == input_ops && n_consts <= input_consts))) + return NULL_RTX; ++ ++ if (flag_propolice_protection) ++ { ++ /* keep the addressing style of local variables ++ as (plus (virtual_stack_vars_rtx) (CONST_int x)) ++ (1) inline function is expanded, (+ (+VFP c1) -c2)=>(+ VFP c1-c2) ++ (2) the case ary[r-1], (+ (+VFP c1) (+r -1))=>(+ R (+r -1)) ++ */ ++ for (i = 0; i < n_ops; i++) ++ #ifdef FRAME_GROWS_DOWNWARD ++ if (ops[i].op == virtual_stack_vars_rtx) ++ #else ++ if (ops[i].op == virtual_stack_vars_rtx ++ || ops[i].op == frame_pointer_rtx) ++ #endif ++ { ++ if (GET_CODE (ops[n_ops - 1].op) == CONST_INT) ++ { ++ HOST_WIDE_INT value = INTVAL (ops[n_ops - 1].op); ++ if (n_ops < 3 || value >= fp_offset) ++ { ++ ops[i].op = plus_constant (ops[i].op, value); ++ n_ops--; ++ } ++ else ++ { ++ if (!force ++ && (n_ops+1 + n_consts > input_ops ++ || (n_ops+1 + n_consts == input_ops && n_consts <= input_consts))) ++ return NULL_RTX; ++ ops[n_ops - 1].op = GEN_INT (value-fp_offset); ++ ops[i].op = plus_constant (ops[i].op, fp_offset); ++ } ++ } ++ /* buf[BUFSIZE]: buf is the first local variable (+ (+ fp -S) S) ++ or (+ (fp 0) r) ==> ((+ (+fp 1) r) -1) */ ++ else if (fp_offset != 0) ++ return NULL_RTX; ++ #ifndef FRAME_GROWS_DOWNWARD ++ /* ++ * For the case of buf[i], i: REG, buf: (plus fp 0), ++ */ ++ else if (fp_offset == 0) ++ return NULL_RTX; ++ #endif ++ break; ++ } ++ } + + /* Put a non-negated operand first. If there aren't any, make all + operands positive and negate the whole thing later. */ +Index: gcc/toplev.c +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/toplev.c,v +retrieving revision 1.1.1.10 +retrieving revision 1.1.1.10.4.1 +diff -c -3 -p -r1.1.1.10 -r1.1.1.10.4.1 +*** gcc/toplev.c 2002/12/18 00:55:48 1.1.1.10 +--- gcc/toplev.c 2003/02/26 08:01:04 1.1.1.10.4.1 +*************** int align_functions_log; +*** 883,888 **** +--- 883,895 ---- + minimum function alignment. Zero means no alignment is forced. */ + int force_align_functions_log; + ++ #if defined(STACK_PROTECTOR) && defined(STACK_GROWS_DOWNWARD) ++ /* Nonzero means use propolice as a stack protection method */ ++ int flag_propolice_protection = 1; ++ #else ++ int flag_propolice_protection = 0; ++ #endif ++ + /* Table of supported debugging formats. */ + static const struct + { +*************** static const lang_independent_options f_ +*** 1154,1159 **** +--- 1161,1170 ---- + N_("Report on permanent memory allocation at end of run") }, + { "trapv", &flag_trapv, 1, + N_("Trap for signed overflow in addition / subtraction / multiplication") }, ++ {"stack-protector", &flag_propolice_protection, 1, ++ N_("Enables stack protection") }, ++ {"no-stack-protector", &flag_propolice_protection, 0, ++ N_("Disables stack protection") }, + }; + + /* Table of language-specific options. */ +*************** static const lang_independent_options W_ +*** 1495,1501 **** + {"deprecated-declarations", &warn_deprecated_decl, 1, + N_("Warn about uses of __attribute__((deprecated)) declarations") }, + {"missing-noreturn", &warn_missing_noreturn, 1, +! N_("Warn about functions which might be candidates for attribute noreturn") } + }; + + void +--- 1506,1514 ---- + {"deprecated-declarations", &warn_deprecated_decl, 1, + N_("Warn about uses of __attribute__((deprecated)) declarations") }, + {"missing-noreturn", &warn_missing_noreturn, 1, +! N_("Warn about functions which might be candidates for attribute noreturn") }, +! {"stack-protector", &warn_stack_protector, 1, +! N_("Warn when disabling stack protector for some reason")} + }; + + void +*************** rest_of_compilation (decl) +*** 2446,2451 **** +--- 2459,2466 ---- + + insns = get_insns (); + ++ if (flag_propolice_protection) prepare_stack_protection (inlinable); ++ + /* Dump the rtl code if we are dumping rtl. */ + + if (open_dump_file (DFI_rtl, decl)) +Index: gcc/config/t-linux +=================================================================== +RCS file: /home/cvsroot/gcc/gcc/config/t-linux,v +retrieving revision 1.1.1.6 +retrieving revision 1.1.1.6.8.1 +diff -c -3 -p -r1.1.1.6 -r1.1.1.6.8.1 +*** gcc/config/t-linux 2002/07/29 05:12:32 1.1.1.6 +--- gcc/config/t-linux 2003/02/26 08:01:04 1.1.1.6.8.1 +*************** STMP_FIXPROTO = +*** 4,10 **** + # Compile crtbeginS.o and crtendS.o with pic. + CRTSTUFF_T_CFLAGS_S = -fPIC + # Compile libgcc2.a with pic. +! TARGET_LIBGCC2_CFLAGS = -fPIC + + # Override t-slibgcc-elf-ver to export some libgcc symbols with + # the symbol versions that glibc used. +--- 4,10 ---- + # Compile crtbeginS.o and crtendS.o with pic. + CRTSTUFF_T_CFLAGS_S = -fPIC + # Compile libgcc2.a with pic. +! TARGET_LIBGCC2_CFLAGS = -fPIC -DHAVE_SYSLOG + + # Override t-slibgcc-elf-ver to export some libgcc symbols with + # the symbol versions that glibc used. |