e53b6e564a
ChangeLog: * MAINTAINERS: Rename .c names to .cc. contrib/ChangeLog: * filter-clang-warnings.py: Rename .c names to .cc. * gcc_update: Likewise. * paranoia.cc: Likewise. contrib/header-tools/ChangeLog: * README: Rename .c names to .cc. gcc/ChangeLog: * Makefile.in: Rename .c names to .cc. * alias.h: Likewise. * asan.cc: Likewise. * auto-profile.h: Likewise. * basic-block.h (struct basic_block_d): Likewise. * btfout.cc: Likewise. * builtins.cc (expand_builtin_longjmp): Likewise. (validate_arg): Likewise. (access_ref::offset_bounded): Likewise. * caller-save.cc (reg_restore_code): Likewise. (setup_save_areas): Likewise. * calls.cc (initialize_argument_information): Likewise. (expand_call): Likewise. (emit_library_call_value_1): Likewise. * cfg-flags.def (RTL): Likewise. (SIBCALL): Likewise. (CAN_FALLTHRU): Likewise. * cfganal.cc (post_order_compute): Likewise. * cfgcleanup.cc (try_simplify_condjump): Likewise. (merge_blocks_move_predecessor_nojumps): Likewise. (merge_blocks_move_successor_nojumps): Likewise. (merge_blocks_move): Likewise. (old_insns_match_p): Likewise. (try_crossjump_bb): Likewise. * cfgexpand.cc (expand_gimple_stmt): Likewise. * cfghooks.cc (split_block_before_cond_jump): Likewise. (profile_record_check_consistency): Likewise. * cfghooks.h: Likewise. * cfgrtl.cc (pass_free_cfg::execute): Likewise. (rtl_can_merge_blocks): Likewise. (try_redirect_by_replacing_jump): Likewise. (make_pass_outof_cfg_layout_mode): Likewise. (cfg_layout_can_merge_blocks_p): Likewise. * cgraph.cc (release_function_body): Likewise. (cgraph_node::get_fun): Likewise. * cgraph.h (struct cgraph_node): Likewise. (asmname_hasher::equal): Likewise. (cgraph_inline_failed_type): Likewise. (thunk_adjust): Likewise. (dump_callgraph_transformation): Likewise. (record_references_in_initializer): Likewise. (ipa_discover_variable_flags): Likewise. * cgraphclones.cc (GTY): Likewise. * cgraphunit.cc (symbol_table::finalize_compilation_unit): Likewise. * collect-utils.h (GCC_COLLECT_UTILS_H): Likewise. * collect2-aix.h (GCC_COLLECT2_AIX_H): Likewise. * collect2.cc (maybe_run_lto_and_relink): Likewise. * combine-stack-adj.cc: Likewise. * combine.cc (setup_incoming_promotions): Likewise. (combine_simplify_rtx): Likewise. (count_rtxs): Likewise. * common.opt: Likewise. * common/config/aarch64/aarch64-common.cc: Likewise. * common/config/arm/arm-common.cc (arm_asm_auto_mfpu): Likewise. * common/config/avr/avr-common.cc: Likewise. * common/config/i386/i386-isas.h (struct _isa_names_table): Likewise. * conditions.h: Likewise. * config.gcc: Likewise. * config/aarch64/aarch64-builtins.cc (aarch64_resolve_overloaded_memtag): Likewise. * config/aarch64/aarch64-protos.h (aarch64_classify_address): Likewise. (aarch64_get_extension_string_for_isa_flags): Likewise. * config/aarch64/aarch64-sve-builtins.cc (function_builder::add_function): Likewise. * config/aarch64/aarch64.cc (aarch64_regmode_natural_size): Likewise. (aarch64_sched_first_cycle_multipass_dfa_lookahead): Likewise. (aarch64_option_valid_attribute_p): Likewise. (aarch64_short_vector_p): Likewise. (aarch64_float_const_representable_p): Likewise. * config/aarch64/aarch64.h (DBX_REGISTER_NUMBER): Likewise. (ASM_OUTPUT_POOL_EPILOGUE): Likewise. (GTY): Likewise. * config/aarch64/cortex-a57-fma-steering.cc: Likewise. * config/aarch64/driver-aarch64.cc (contains_core_p): Likewise. * config/aarch64/t-aarch64: Likewise. * config/aarch64/x-aarch64: Likewise. * config/aarch64/x-darwin: Likewise. * config/alpha/alpha-protos.h: Likewise. * config/alpha/alpha.cc (alpha_scalar_mode_supported_p): Likewise. * config/alpha/alpha.h (LONG_DOUBLE_TYPE_SIZE): Likewise. (enum reg_class): Likewise. * config/alpha/alpha.md: Likewise. * config/alpha/driver-alpha.cc (AMASK_LOCKPFTCHOK): Likewise. * config/alpha/x-alpha: Likewise. * config/arc/arc-protos.h (arc_eh_uses): Likewise. * config/arc/arc.cc (ARC_OPT): Likewise. (arc_ccfsm_advance): Likewise. (arc_arg_partial_bytes): Likewise. (conditionalize_nonjump): Likewise. * config/arc/arc.md: Likewise. * config/arc/builtins.def: Likewise. * config/arc/t-arc: Likewise. * config/arm/arm-c.cc (arm_resolve_overloaded_builtin): Likewise. (arm_pragma_target_parse): Likewise. * config/arm/arm-protos.h (save_restore_target_globals): Likewise. (arm_cpu_cpp_builtins): Likewise. * config/arm/arm.cc (vfp3_const_double_index): Likewise. (shift_op): Likewise. (thumb2_final_prescan_insn): Likewise. (arm_final_prescan_insn): Likewise. (arm_asm_output_labelref): Likewise. (arm_small_register_classes_for_mode_p): Likewise. * config/arm/arm.h: Likewise. * config/arm/arm.md: Likewise. * config/arm/driver-arm.cc: Likewise. * config/arm/symbian.h: Likewise. * config/arm/t-arm: Likewise. * config/arm/thumb1.md: Likewise. * config/arm/x-arm: Likewise. * config/avr/avr-c.cc (avr_register_target_pragmas): Likewise. * config/avr/avr-fixed.md: Likewise. * config/avr/avr-log.cc (avr_log_vadump): Likewise. * config/avr/avr-mcus.def: Likewise. * config/avr/avr-modes.def (FRACTIONAL_INT_MODE): Likewise. * config/avr/avr-passes.def (INSERT_PASS_BEFORE): Likewise. * config/avr/avr-protos.h (make_avr_pass_casesi): Likewise. * config/avr/avr.cc (avr_option_override): Likewise. (avr_build_builtin_va_list): Likewise. (avr_mode_dependent_address_p): Likewise. (avr_function_arg_advance): Likewise. (avr_asm_output_aligned_decl_common): Likewise. * config/avr/avr.h (RETURN_ADDR_RTX): Likewise. (SUPPORTS_INIT_PRIORITY): Likewise. * config/avr/avr.md: Likewise. * config/avr/builtins.def: Likewise. * config/avr/gen-avr-mmcu-specs.cc (IN_GEN_AVR_MMCU_TEXI): Likewise. * config/avr/gen-avr-mmcu-texi.cc (IN_GEN_AVR_MMCU_TEXI): Likewise. (main): Likewise. * config/avr/t-avr: Likewise. * config/bfin/bfin.cc (frame_related_constant_load): Likewise. * config/bpf/bpf-protos.h (GCC_BPF_PROTOS_H): Likewise. * config/bpf/bpf.h (enum reg_class): Likewise. * config/bpf/t-bpf: Likewise. * config/c6x/c6x-protos.h (GCC_C6X_PROTOS_H): Likewise. * config/cr16/cr16-protos.h: Likewise. * config/cris/cris.cc (cris_address_cost): Likewise. (cris_side_effect_mode_ok): Likewise. (cris_init_machine_status): Likewise. (cris_emit_movem_store): Likewise. * config/cris/cris.h (INDEX_REG_CLASS): Likewise. (enum reg_class): Likewise. (struct cum_args): Likewise. * config/cris/cris.opt: Likewise. * config/cris/sync.md: Likewise. * config/csky/csky.cc (csky_expand_prologue): Likewise. * config/darwin-c.cc: Likewise. * config/darwin-f.cc: Likewise. * config/darwin-sections.def (zobj_const_section): Likewise. * config/darwin.cc (output_objc_section_asm_op): Likewise. (fprintf): Likewise. * config/darwin.h (GTY): Likewise. * config/elfos.h: Likewise. * config/epiphany/epiphany-sched.md: Likewise. * config/epiphany/epiphany.cc (epiphany_function_value): Likewise. * config/epiphany/epiphany.h (GTY): Likewise. (NO_FUNCTION_CSE): Likewise. * config/epiphany/mode-switch-use.cc: Likewise. * config/epiphany/predicates.md: Likewise. * config/epiphany/t-epiphany: Likewise. * config/fr30/fr30-protos.h: Likewise. * config/frv/frv-protos.h: Likewise. * config/frv/frv.cc (TLS_BIAS): Likewise. * config/frv/frv.h (ASM_OUTPUT_ALIGNED_LOCAL): Likewise. * config/ft32/ft32-protos.h: Likewise. * config/gcn/gcn-hsa.h (ASM_APP_OFF): Likewise. * config/gcn/gcn.cc (gcn_init_libfuncs): Likewise. * config/gcn/mkoffload.cc (copy_early_debug_info): Likewise. * config/gcn/t-gcn-hsa: Likewise. * config/gcn/t-omp-device: Likewise. * config/h8300/h8300-protos.h (GCC_H8300_PROTOS_H): Likewise. (same_cmp_following_p): Likewise. * config/h8300/h8300.cc (F): Likewise. * config/h8300/h8300.h (struct cum_arg): Likewise. (BRANCH_COST): Likewise. * config/i386/cygming.h (DEFAULT_PCC_STRUCT_RETURN): Likewise. * config/i386/djgpp.h (TARGET_ASM_LTO_END): Likewise. * config/i386/dragonfly.h (NO_PROFILE_COUNTERS): Likewise. * config/i386/driver-i386.cc (detect_caches_intel): Likewise. * config/i386/freebsd.h (NO_PROFILE_COUNTERS): Likewise. * config/i386/i386-c.cc (ix86_target_macros): Likewise. * config/i386/i386-expand.cc (get_mode_wider_vector): Likewise. * config/i386/i386-options.cc (ix86_set_func_type): Likewise. * config/i386/i386-protos.h (ix86_extract_perm_from_pool_constant): Likewise. (ix86_register_pragmas): Likewise. (ix86_d_has_stdcall_convention): Likewise. (i386_pe_seh_init_sections): Likewise. * config/i386/i386.cc (ix86_function_arg_regno_p): Likewise. (ix86_function_value_regno_p): Likewise. (ix86_compute_frame_layout): Likewise. (legitimize_pe_coff_symbol): Likewise. (output_pic_addr_const): Likewise. * config/i386/i386.h (defined): Likewise. (host_detect_local_cpu): Likewise. (CONSTANT_ADDRESS_P): Likewise. (DEFAULT_LARGE_SECTION_THRESHOLD): Likewise. (struct machine_frame_state): Likewise. * config/i386/i386.md: Likewise. * config/i386/lynx.h (ASM_OUTPUT_ALIGN): Likewise. * config/i386/mmx.md: Likewise. * config/i386/sse.md: Likewise. * config/i386/t-cygming: Likewise. * config/i386/t-djgpp: Likewise. * config/i386/t-gnu-property: Likewise. * config/i386/t-i386: Likewise. * config/i386/t-intelmic: Likewise. * config/i386/t-omp-device: Likewise. * config/i386/winnt-cxx.cc (i386_pe_type_dllimport_p): Likewise. (i386_pe_adjust_class_at_definition): Likewise. * config/i386/winnt.cc (gen_stdcall_or_fastcall_suffix): Likewise. (i386_pe_mangle_decl_assembler_name): Likewise. (i386_pe_encode_section_info): Likewise. * config/i386/x-cygwin: Likewise. * config/i386/x-darwin: Likewise. * config/i386/x-i386: Likewise. * config/i386/x-mingw32: Likewise. * config/i386/x86-tune-sched-core.cc: Likewise. * config/i386/x86-tune.def: Likewise. * config/i386/xm-djgpp.h (STANDARD_STARTFILE_PREFIX_1): Likewise. * config/ia64/freebsd.h: Likewise. * config/ia64/hpux.h (REGISTER_TARGET_PRAGMAS): Likewise. * config/ia64/ia64-protos.h (ia64_except_unwind_info): Likewise. * config/ia64/ia64.cc (ia64_function_value_regno_p): Likewise. (ia64_secondary_reload_class): Likewise. (bundling): Likewise. * config/ia64/ia64.h: Likewise. * config/ia64/ia64.md: Likewise. * config/ia64/predicates.md: Likewise. * config/ia64/sysv4.h: Likewise. * config/ia64/t-ia64: Likewise. * config/iq2000/iq2000.h (FUNCTION_MODE): Likewise. * config/iq2000/iq2000.md: Likewise. * config/linux.h (TARGET_HAS_BIONIC): Likewise. (if): Likewise. * config/m32c/m32c.cc (m32c_function_needs_enter): Likewise. * config/m32c/m32c.h (MAX_REGS_PER_ADDRESS): Likewise. * config/m32c/t-m32c: Likewise. * config/m32r/m32r-protos.h: Likewise. * config/m32r/m32r.cc (m32r_print_operand): Likewise. * config/m32r/m32r.h: Likewise. * config/m32r/m32r.md: Likewise. * config/m68k/m68k-isas.def: Likewise. * config/m68k/m68k-microarchs.def: Likewise. * config/m68k/m68k-protos.h (strict_low_part_peephole_ok): Likewise. (m68k_epilogue_uses): Likewise. * config/m68k/m68k.cc (m68k_call_tls_get_addr): Likewise. (m68k_sched_adjust_cost): Likewise. (m68k_sched_md_init): Likewise. * config/m68k/m68k.h (__transfer_from_trampoline): Likewise. (enum m68k_function_kind): Likewise. * config/m68k/m68k.md: Likewise. * config/m68k/m68kemb.h: Likewise. * config/m68k/uclinux.h (ENDFILE_SPEC): Likewise. * config/mcore/mcore-protos.h: Likewise. * config/mcore/mcore.cc (mcore_expand_insv): Likewise. (mcore_expand_prolog): Likewise. * config/mcore/mcore.h (TARGET_MCORE): Likewise. * config/mcore/mcore.md: Likewise. * config/microblaze/microblaze-protos.h: Likewise. * config/microblaze/microblaze.cc (microblaze_legitimate_pic_operand): Likewise. (microblaze_function_prologue): Likewise. (microblaze_function_epilogue): Likewise. (microblaze_select_section): Likewise. (microblaze_asm_output_mi_thunk): Likewise. (microblaze_eh_return): Likewise. * config/microblaze/microblaze.h: Likewise. * config/microblaze/microblaze.md: Likewise. * config/microblaze/t-microblaze: Likewise. * config/mips/driver-native.cc: Likewise. * config/mips/loongson2ef.md: Likewise. * config/mips/mips-protos.h (mips_expand_vec_cmp_expr): Likewise. * config/mips/mips.cc (mips_rtx_costs): Likewise. (mips_output_filename): Likewise. (mips_output_function_prologue): Likewise. (mips_output_function_epilogue): Likewise. (mips_output_mi_thunk): Likewise. * config/mips/mips.h: Likewise. * config/mips/mips.md: Likewise. * config/mips/t-mips: Likewise. * config/mips/x-native: Likewise. * config/mmix/mmix-protos.h: Likewise. * config/mmix/mmix.cc (mmix_option_override): Likewise. (mmix_dbx_register_number): Likewise. (mmix_expand_prologue): Likewise. * config/mmix/mmix.h: Likewise. * config/mmix/mmix.md: Likewise. * config/mmix/predicates.md: Likewise. * config/mn10300/mn10300.cc (mn10300_symbolic_operand): Likewise. (mn10300_legitimate_pic_operand_p): Likewise. * config/mn10300/mn10300.h (enum reg_class): Likewise. (NO_FUNCTION_CSE): Likewise. * config/moxie/moxie-protos.h: Likewise. * config/moxie/uclinux.h (TARGET_LIBC_HAS_FUNCTION): Likewise. * config/msp430/msp430-devices.cc (extract_devices_dir_from_exec_prefix): Likewise. * config/msp430/msp430.cc (msp430_gimplify_va_arg_expr): Likewise. (msp430_incoming_return_addr_rtx): Likewise. * config/msp430/msp430.h (msp430_get_linker_devices_include_path): Likewise. * config/msp430/t-msp430: Likewise. * config/nds32/nds32-cost.cc (nds32_rtx_costs_speed_prefer): Likewise. (nds32_rtx_costs_size_prefer): Likewise. (nds32_init_rtx_costs): Likewise. * config/nds32/nds32-doubleword.md: Likewise. * config/nds32/nds32.cc (nds32_memory_move_cost): Likewise. (nds32_builtin_decl): Likewise. * config/nds32/nds32.h (enum nds32_16bit_address_type): Likewise. (enum nds32_isr_nested_type): Likewise. (enum reg_class): Likewise. * config/nds32/predicates.md: Likewise. * config/nds32/t-nds32: Likewise. * config/nios2/nios2.cc (nios2_pragma_target_parse): Likewise. * config/nvptx/nvptx-protos.h: Likewise. * config/nvptx/nvptx.cc (nvptx_goacc_expand_var_decl): Likewise. * config/nvptx/nvptx.h (TARGET_CPU_CPP_BUILTINS): Likewise. * config/nvptx/t-nvptx: Likewise. * config/nvptx/t-omp-device: Likewise. * config/pa/elf.h: Likewise. * config/pa/pa-linux.h (GLOBAL_ASM_OP): Likewise. * config/pa/pa-netbsd.h (GLOBAL_ASM_OP): Likewise. * config/pa/pa-openbsd.h (TARGET_ASM_GLOBALIZE_LABEL): Likewise. * config/pa/pa-protos.h (pa_eh_return_handler_rtx): Likewise. (pa_legitimize_reload_address): Likewise. (pa_can_use_return_insn): Likewise. * config/pa/pa.cc (mem_shadd_or_shadd_rtx_p): Likewise. (som_output_text_section_asm_op): Likewise. * config/pa/pa.h (PROFILE_BEFORE_PROLOGUE): Likewise. * config/pa/pa.md: Likewise. * config/pa/som.h: Likewise. * config/pa/t-pa: Likewise. * config/pdp11/pdp11.cc (decode_pdp11_d): Likewise. * config/pdp11/pdp11.h: Likewise. * config/pdp11/pdp11.md: Likewise. * config/pdp11/t-pdp11: Likewise. * config/pru/pru.md: Likewise. * config/pru/t-pru: Likewise. * config/riscv/riscv-protos.h (NUM_SYMBOL_TYPES): Likewise. (riscv_gpr_save_operation_p): Likewise. (riscv_d_register_target_info): Likewise. (riscv_init_builtins): Likewise. * config/riscv/riscv.cc (riscv_output_mi_thunk): Likewise. * config/riscv/riscv.h (CSW_MAX_OFFSET): Likewise. * config/riscv/t-riscv: Likewise. * config/rl78/rl78.cc (rl78_asm_ctor_dtor): Likewise. * config/rl78/t-rl78: Likewise. * config/rs6000/aix.h: Likewise. * config/rs6000/aix71.h (ASM_SPEC_COMMON): Likewise. * config/rs6000/aix72.h (ASM_SPEC_COMMON): Likewise. * config/rs6000/aix73.h (ASM_SPEC_COMMON): Likewise. * config/rs6000/darwin.h (TARGET_ASM_GLOBALIZE_LABEL): Likewise. * config/rs6000/driver-rs6000.cc: Likewise. * config/rs6000/freebsd.h: Likewise. * config/rs6000/freebsd64.h: Likewise. * config/rs6000/lynx.h (ASM_OUTPUT_ALIGN): Likewise. * config/rs6000/rbtree.cc: Likewise. * config/rs6000/rbtree.h: Likewise. * config/rs6000/rs6000-c.cc (rs6000_target_modify_macros): Likewise. * config/rs6000/rs6000-call.cc (rs6000_invalid_builtin): Likewise. (rs6000_expand_builtin): Likewise. (rs6000_init_builtins): Likewise. * config/rs6000/rs6000-cpus.def: Likewise. * config/rs6000/rs6000-gen-builtins.cc (write_init_ovld_table): Likewise. * config/rs6000/rs6000-internal.h (ALTIVEC_REG_BIT): Likewise. (quad_address_offset_p): Likewise. * config/rs6000/rs6000-logue.cc (interesting_frame_related_regno): Likewise. (rs6000_emit_epilogue): Likewise. * config/rs6000/rs6000-overload.def: Likewise. * config/rs6000/rs6000-p8swap.cc: Likewise. * config/rs6000/rs6000-protos.h (GCC_RS6000_PROTOS_H): Likewise. (rs6000_const_f32_to_i32): Likewise. * config/rs6000/rs6000.cc (legitimate_lo_sum_address_p): Likewise. (rs6000_debug_legitimize_address): Likewise. (rs6000_mode_dependent_address): Likewise. (rs6000_adjust_priority): Likewise. (rs6000_c_mode_for_suffix): Likewise. * config/rs6000/rs6000.h (defined): Likewise. (LONG_DOUBLE_TYPE_SIZE): Likewise. * config/rs6000/rs6000.md: Likewise. * config/rs6000/sysv4.h: Likewise. * config/rs6000/t-linux: Likewise. * config/rs6000/t-linux64: Likewise. * config/rs6000/t-rs6000: Likewise. * config/rs6000/x-darwin: Likewise. * config/rs6000/x-darwin64: Likewise. * config/rs6000/x-rs6000: Likewise. * config/rs6000/xcoff.h (ASM_OUTPUT_LABELREF): Likewise. * config/rx/rx.cc (rx_expand_builtin): Likewise. * config/s390/constraints.md: Likewise. * config/s390/driver-native.cc: Likewise. * config/s390/htmxlintrin.h: Likewise. * config/s390/s390-builtins.def (B_DEF): Likewise. (OB_DEF_VAR): Likewise. * config/s390/s390-builtins.h: Likewise. * config/s390/s390-c.cc: Likewise. * config/s390/s390-opts.h: Likewise. * config/s390/s390-protos.h (s390_check_symref_alignment): Likewise. (s390_register_target_pragmas): Likewise. * config/s390/s390.cc (s390_init_builtins): Likewise. (s390_expand_plus_operand): Likewise. (s390_expand_atomic): Likewise. (s390_valid_target_attribute_inner_p): Likewise. * config/s390/s390.h (LONG_DOUBLE_TYPE_SIZE): Likewise. * config/s390/s390.md: Likewise. * config/s390/t-s390: Likewise. * config/s390/vx-builtins.md: Likewise. * config/s390/x-native: Likewise. * config/sh/divtab-sh4-300.cc (main): Likewise. * config/sh/divtab-sh4.cc (main): Likewise. * config/sh/divtab.cc (main): Likewise. * config/sh/elf.h: Likewise. * config/sh/sh-protos.h (sh_fsca_int2sf): Likewise. * config/sh/sh.cc (SYMBOL_FLAG_FUNCVEC_FUNCTION): Likewise. (sh_struct_value_rtx): Likewise. (sh_remove_reg_dead_or_unused_notes): Likewise. * config/sh/sh.h (MIN_UNITS_PER_WORD): Likewise. * config/sh/t-sh: Likewise. * config/sol2-protos.h (solaris_override_options): Likewise. * config/sol2.h: Likewise. * config/sparc/driver-sparc.cc: Likewise. * config/sparc/freebsd.h: Likewise. * config/sparc/sparc-protos.h (make_pass_work_around_errata): Likewise. * config/sparc/sparc.cc (sparc_output_mi_thunk): Likewise. (sparc_asan_shadow_offset): Likewise. * config/sparc/sparc.h: Likewise. * config/sparc/sparc.md: Likewise. * config/sparc/t-sparc: Likewise. * config/sparc/x-sparc: Likewise. * config/stormy16/stormy16.cc (xstormy16_mode_dependent_address_p): Likewise. * config/t-darwin: Likewise. * config/t-dragonfly: Likewise. * config/t-freebsd: Likewise. * config/t-glibc: Likewise. * config/t-linux: Likewise. * config/t-netbsd: Likewise. * config/t-openbsd: Likewise. * config/t-pnt16-warn: Likewise. * config/t-sol2: Likewise. * config/t-vxworks: Likewise. * config/t-winnt: Likewise. * config/tilegx/t-tilegx: Likewise. * config/tilegx/tilegx-c.cc: Likewise. * config/tilegx/tilegx-protos.h (tilegx_function_profiler): Likewise. * config/tilegx/tilegx.md: Likewise. * config/tilepro/t-tilepro: Likewise. * config/tilepro/tilepro-c.cc: Likewise. * config/v850/t-v850: Likewise. * config/v850/v850-protos.h: Likewise. * config/v850/v850.cc (F): Likewise. * config/v850/v850.h (enum reg_class): Likewise. (SLOW_BYTE_ACCESS): Likewise. * config/vax/vax.cc (vax_mode_dependent_address_p): Likewise. * config/vax/vax.h (enum reg_class): Likewise. * config/vax/vax.md: Likewise. * config/visium/visium.cc (visium_legitimate_address_p): Likewise. * config/visium/visium.h: Likewise. * config/vms/t-vms: Likewise. * config/vms/vms-crtlmap.map: Likewise. * config/vms/vms-protos.h (vms_c_get_vms_ver): Likewise. * config/vx-common.h: Likewise. * config/x-darwin: Likewise. * config/x-hpux: Likewise. * config/x-linux: Likewise. * config/x-netbsd: Likewise. * config/x-openbsd: Likewise. * config/x-solaris: Likewise. * config/xtensa/xtensa-protos.h (xtensa_mem_offset): Likewise. * config/xtensa/xtensa.cc (xtensa_option_override): Likewise. * config/xtensa/xtensa.h: Likewise. * configure.ac: Likewise. * context.cc: Likewise. * convert.h: Likewise. * coretypes.h: Likewise. * coverage.cc: Likewise. * coverage.h: Likewise. * cppdefault.h (struct default_include): Likewise. * cprop.cc (local_cprop_pass): Likewise. (one_cprop_pass): Likewise. * cse.cc (hash_rtx_cb): Likewise. (fold_rtx): Likewise. * ctfc.h (ctfc_get_num_vlen_bytes): Likewise. * data-streamer.h (bp_unpack_var_len_int): Likewise. (streamer_write_widest_int): Likewise. * dbgcnt.def: Likewise. * dbxout.cc (dbxout_early_global_decl): Likewise. (dbxout_common_check): Likewise. * dbxout.h: Likewise. * debug.h (struct gcc_debug_hooks): Likewise. (dump_go_spec_init): Likewise. * df-core.cc: Likewise. * df-scan.cc (df_insn_info_delete): Likewise. (df_insn_delete): Likewise. * df.h (debug_df_chain): Likewise. (can_move_insns_across): Likewise. * dfp.cc (decimal_from_binary): Likewise. * diagnostic-color.cc: Likewise. * diagnostic-event-id.h: Likewise. * diagnostic-show-locus.cc (test_one_liner_labels): Likewise. * diagnostic.cc (bt_callback): Likewise. (num_digits): Likewise. * doc/avr-mmcu.texi: Likewise. * doc/cfg.texi: Likewise. * doc/contrib.texi: Likewise. * doc/cppinternals.texi: Likewise. * doc/extend.texi: Likewise. * doc/generic.texi: Likewise. * doc/gimple.texi: Likewise. * doc/gty.texi: Likewise. * doc/invoke.texi: Likewise. * doc/loop.texi: Likewise. * doc/lto.texi: Likewise. * doc/match-and-simplify.texi: Likewise. * doc/md.texi: Likewise. * doc/optinfo.texi: Likewise. * doc/options.texi: Likewise. * doc/passes.texi: Likewise. * doc/plugins.texi: Likewise. * doc/rtl.texi: Likewise. * doc/sourcebuild.texi: Likewise. * doc/tm.texi: Likewise. * doc/tm.texi.in: Likewise. * doc/tree-ssa.texi: Likewise. * dojump.cc (do_jump): Likewise. * dojump.h: Likewise. * dumpfile.cc (test_impl_location): Likewise. (test_capture_of_dump_calls): Likewise. * dumpfile.h (enum dump_kind): Likewise. (class dump_location_t): Likewise. (dump_enabled_p): Likewise. (enable_rtl_dump_file): Likewise. (dump_combine_total_stats): Likewise. * dwarf2asm.cc (dw2_asm_output_delta_uleb128): Likewise. * dwarf2ctf.h (ctf_debug_finish): Likewise. * dwarf2out.cc (dwarf2out_begin_prologue): Likewise. (struct loc_descr_context): Likewise. (rtl_for_decl_location): Likewise. (gen_subprogram_die): Likewise. (gen_label_die): Likewise. (is_trivial_indirect_ref): Likewise. (dwarf2out_late_global_decl): Likewise. (dwarf_file_hasher::hash): Likewise. (dwarf2out_end_source_file): Likewise. (dwarf2out_define): Likewise. (dwarf2out_early_finish): Likewise. * dwarf2out.h (struct dw_fde_node): Likewise. (struct dw_discr_list_node): Likewise. (output_loc_sequence_raw): Likewise. * emit-rtl.cc (gen_raw_REG): Likewise. (maybe_set_max_label_num): Likewise. * emit-rtl.h (struct rtl_data): Likewise. * errors.cc (internal_error): Likewise. (trim_filename): Likewise. * et-forest.cc: Likewise. * except.cc (init_eh_for_function): Likewise. * explow.cc (promote_ssa_mode): Likewise. (get_dynamic_stack_size): Likewise. * explow.h: Likewise. * expmed.h: Likewise. * expr.cc (safe_from_p): Likewise. (expand_expr_real_2): Likewise. (expand_expr_real_1): Likewise. * file-prefix-map.cc (remap_filename): Likewise. * final.cc (app_enable): Likewise. (make_pass_compute_alignments): Likewise. (final_scan_insn_1): Likewise. (final_scan_insn): Likewise. * fixed-value.h (fixed_from_string): Likewise. * flag-types.h (NO_DEBUG): Likewise. (DWARF2_DEBUG): Likewise. (VMS_DEBUG): Likewise. (BTF_DEBUG): Likewise. (enum ctf_debug_info_levels): Likewise. * fold-const.cc (const_binop): Likewise. (fold_binary_loc): Likewise. (fold_checksum_tree): Likewise. * fp-test.cc: Likewise. * function.cc (expand_function_end): Likewise. * function.h (struct function): Likewise. * fwprop.cc (should_replace_address): Likewise. * gcc-main.cc: Likewise. * gcc-rich-location.h (class gcc_rich_location): Likewise. * gcc-symtab.h: Likewise. * gcc.cc (MIN_FATAL_STATUS): Likewise. (driver_handle_option): Likewise. (quote_spec_arg): Likewise. (driver::finalize): Likewise. * gcc.h (set_input): Likewise. * gcov-dump.cc: Likewise. * gcov.cc (solve_flow_graph): Likewise. * gcse-common.cc: Likewise. * gcse.cc (make_pass_rtl_hoist): Likewise. * genattr-common.cc: Likewise. * genattrtab.cc (min_fn): Likewise. (write_const_num_delay_slots): Likewise. * genautomata.cc: Likewise. * genconditions.cc (write_one_condition): Likewise. * genconstants.cc: Likewise. * genemit.cc (gen_exp): Likewise. * generic-match-head.cc: Likewise. * genextract.cc: Likewise. * gengenrtl.cc (always_void_p): Likewise. * gengtype-parse.cc (gtymarker_opt): Likewise. * gengtype-state.cc (state_writer::state_writer): Likewise. (write_state_trailer): Likewise. (equals_type_number): Likewise. (read_state): Likewise. * gengtype.cc (open_base_files): Likewise. (struct file_rule_st): Likewise. (header_dot_h_frul): Likewise. * gengtype.h: Likewise. * genmatch.cc (main): Likewise. * genmddeps.cc: Likewise. * genmodes.cc (emit_mode_inner): Likewise. (emit_mode_unit_size): Likewise. * genpeep.cc (gen_peephole): Likewise. * genpreds.cc (write_tm_preds_h): Likewise. * genrecog.cc (validate_pattern): Likewise. (write_header): Likewise. (main): Likewise. * gensupport.cc (change_subst_attribute): Likewise. (traverse_c_tests): Likewise. (add_predicate): Likewise. (init_predicate_table): Likewise. * gensupport.h (struct optab_pattern): Likewise. (get_num_insn_codes): Likewise. (maybe_eval_c_test): Likewise. (struct pred_data): Likewise. * ggc-internal.h: Likewise. * gimple-fold.cc (maybe_fold_reference): Likewise. (get_range_strlen_tree): Likewise. * gimple-fold.h (gimple_stmt_integer_valued_real_p): Likewise. * gimple-low.cc: Likewise. * gimple-match-head.cc (directly_supported_p): Likewise. * gimple-pretty-print.h: Likewise. * gimple-ssa-sprintf.cc (format_percent): Likewise. (adjust_range_for_overflow): Likewise. * gimple-streamer.h: Likewise. * gimple.h (struct GTY): Likewise. (is_gimple_resx): Likewise. * gimplify.cc (gimplify_expr): Likewise. (gimplify_init_constructor): Likewise. (omp_construct_selector_matches): Likewise. (gimplify_omp_target_update): Likewise. (gimplify_omp_ordered): Likewise. (gimplify_va_arg_expr): Likewise. * graphite-isl-ast-to-gimple.cc (should_copy_to_new_region): Likewise. * haifa-sched.cc (increase_insn_priority): Likewise. (try_ready): Likewise. (sched_create_recovery_edges): Likewise. * ifcvt.cc (find_if_case_1): Likewise. (find_if_case_2): Likewise. * inchash.h: Likewise. * incpath.cc (add_env_var_paths): Likewise. * input.cc (dump_location_info): Likewise. (assert_loceq): Likewise. (test_lexer_string_locations_concatenation_1): Likewise. (test_lexer_string_locations_concatenation_2): Likewise. (test_lexer_string_locations_concatenation_3): Likewise. * input.h (BUILTINS_LOCATION): Likewise. (class string_concat_db): Likewise. * internal-fn.cc (expand_MUL_OVERFLOW): Likewise. (expand_LOOP_VECTORIZED): Likewise. * ipa-cp.cc (make_pass_ipa_cp): Likewise. * ipa-fnsummary.cc (remap_freqcounting_preds_after_dup): Likewise. (ipa_fn_summary_t::duplicate): Likewise. (make_pass_ipa_fn_summary): Likewise. * ipa-fnsummary.h (enum ipa_hints_vals): Likewise. * ipa-free-lang-data.cc (fld_simplified_type): Likewise. (free_lang_data_in_decl): Likewise. * ipa-inline.cc (compute_inlined_call_time): Likewise. (inline_always_inline_functions): Likewise. * ipa-inline.h (free_growth_caches): Likewise. (inline_account_function_p): Likewise. * ipa-modref.cc (modref_access_analysis::analyze_stmt): Likewise. (modref_eaf_analysis::analyze_ssa_name): Likewise. * ipa-param-manipulation.cc (ipa_param_body_adjustments::mark_dead_statements): Likewise. (ipa_param_body_adjustments::remap_with_debug_expressions): Likewise. * ipa-prop.cc (ipa_set_node_agg_value_chain): Likewise. * ipa-prop.h (IPA_UNDESCRIBED_USE): Likewise. (unadjusted_ptr_and_unit_offset): Likewise. * ipa-reference.cc (make_pass_ipa_reference): Likewise. * ipa-reference.h (GCC_IPA_REFERENCE_H): Likewise. * ipa-split.cc (consider_split): Likewise. * ipa-sra.cc (isra_read_node_info): Likewise. * ipa-utils.h (struct ipa_dfs_info): Likewise. (recursive_call_p): Likewise. (ipa_make_function_pure): Likewise. * ira-build.cc (ira_create_allocno): Likewise. (ira_flattening): Likewise. * ira-color.cc (do_coloring): Likewise. (update_curr_costs): Likewise. * ira-conflicts.cc (process_regs_for_copy): Likewise. * ira-int.h (struct ira_emit_data): Likewise. (ira_prohibited_mode_move_regs): Likewise. (ira_get_dup_out_num): Likewise. (ira_destroy): Likewise. (ira_tune_allocno_costs): Likewise. (ira_implicitly_set_insn_hard_regs): Likewise. (ira_build_conflicts): Likewise. (ira_color): Likewise. * ira-lives.cc (process_bb_node_lives): Likewise. * ira.cc (class ira_spilled_reg_stack_slot): Likewise. (setup_uniform_class_p): Likewise. (def_dominates_uses): Likewise. * ira.h (ira_nullify_asm_goto): Likewise. * langhooks.cc (lhd_post_options): Likewise. * langhooks.h (class substring_loc): Likewise. (struct lang_hooks_for_tree_inlining): Likewise. (struct lang_hooks_for_types): Likewise. (struct lang_hooks): Likewise. * libfuncs.h (synchronize_libfunc): Likewise. * loop-doloop.cc (doloop_condition_get): Likewise. * loop-init.cc (fix_loop_structure): Likewise. * loop-invariant.cc: Likewise. * lower-subreg.h: Likewise. * lra-constraints.cc (curr_insn_transform): Likewise. * lra-int.h (struct lra_insn_reg): Likewise. (lra_undo_inheritance): Likewise. (lra_setup_reload_pseudo_preferenced_hard_reg): Likewise. (lra_split_hard_reg_for): Likewise. (lra_coalesce): Likewise. (lra_final_code_change): Likewise. * lra-spills.cc (lra_final_code_change): Likewise. * lra.cc (lra_process_new_insns): Likewise. * lto-compress.h (struct lto_compression_stream): Likewise. * lto-streamer-out.cc (DFS::DFS_write_tree_body): Likewise. (write_symbol): Likewise. * lto-streamer.h (enum LTO_tags): Likewise. (lto_value_range_error): Likewise. (lto_append_block): Likewise. (lto_streamer_hooks_init): Likewise. (stream_read_tree_ref): Likewise. (lto_prepare_function_for_streaming): Likewise. (select_what_to_stream): Likewise. (omp_lto_input_declare_variant_alt): Likewise. (cl_optimization_stream_in): Likewise. * lto-wrapper.cc (append_compiler_options): Likewise. * machmode.def: Likewise. * machmode.h (struct int_n_data_t): Likewise. * main.cc (main): Likewise. * match.pd: Likewise. * omp-builtins.def (BUILT_IN_GOMP_CRITICAL_NAME_END): Likewise. (BUILT_IN_GOMP_LOOP_ULL_ORDERED_RUNTIME_NEXT): Likewise. * omp-expand.cc (expand_omp_atomic_fetch_op): Likewise. (make_pass_expand_omp_ssa): Likewise. * omp-low.cc (struct omp_context): Likewise. (struct omp_taskcopy_context): Likewise. (lower_omp): Likewise. * omp-oacc-neuter-broadcast.cc (omp_sese_active_worker_call): Likewise. (mask_name): Likewise. (omp_sese_dump_pars): Likewise. (worker_single_simple): Likewise. * omp-offload.cc (omp_finish_file): Likewise. (execute_oacc_loop_designation): Likewise. * optabs-query.cc (lshift_cheap_p): Likewise. * optc-gen.awk: Likewise. * optc-save-gen.awk: Likewise. * optinfo-emit-json.cc (optrecord_json_writer::optrecord_json_writer): Likewise. * opts-common.cc: Likewise. * output.h (app_enable): Likewise. (output_operand_lossage): Likewise. (insn_current_reference_address): Likewise. (get_insn_template): Likewise. (output_quoted_string): Likewise. * pass_manager.h (struct register_pass_info): Likewise. * plugin.cc: Likewise. * plugin.def (PLUGIN_ANALYZER_INIT): Likewise. * plugin.h (invoke_plugin_callbacks): Likewise. * pointer-query.cc (handle_mem_ref): Likewise. * postreload-gcse.cc (alloc_mem): Likewise. * predict.h (enum prediction): Likewise. (add_reg_br_prob_note): Likewise. * prefix.h: Likewise. * profile.h (get_working_sets): Likewise. * read-md.cc: Likewise. * read-md.h (struct mapping): Likewise. (class md_reader): Likewise. (class noop_reader): Likewise. * read-rtl-function.cc (function_reader::create_function): Likewise. (function_reader::extra_parsing_for_operand_code_0): Likewise. * read-rtl.cc (initialize_iterators): Likewise. * real.cc: Likewise. * real.h (struct real_value): Likewise. (format_helper::format_helper): Likewise. (real_hash): Likewise. (real_can_shorten_arithmetic): Likewise. * recog.cc (struct target_recog): Likewise. (offsettable_nonstrict_memref_p): Likewise. (constrain_operands): Likewise. * recog.h (MAX_RECOG_ALTERNATIVES): Likewise. (which_op_alt): Likewise. (struct insn_gen_fn): Likewise. * reg-notes.def (REG_NOTE): Likewise. * reg-stack.cc: Likewise. * regs.h (reg_is_parm_p): Likewise. * regset.h: Likewise. * reload.cc (push_reload): Likewise. (find_reloads): Likewise. (find_reloads_address_1): Likewise. (find_replacement): Likewise. (refers_to_regno_for_reload_p): Likewise. (refers_to_mem_for_reload_p): Likewise. * reload.h (push_reload): Likewise. (deallocate_reload_reg): Likewise. * reload1.cc (emit_input_reload_insns): Likewise. * reorg.cc (relax_delay_slots): Likewise. * rtl.def (UNKNOWN): Likewise. (SEQUENCE): Likewise. (BARRIER): Likewise. (ASM_OPERANDS): Likewise. (EQ_ATTR_ALT): Likewise. * rtl.h (struct GTY): Likewise. (LABEL_NAME): Likewise. (LABEL_ALT_ENTRY_P): Likewise. (SUBREG_BYTE): Likewise. (get_stack_check_protect): Likewise. (dump_rtx_statistics): Likewise. (unwrap_const_vec_duplicate): Likewise. (subreg_promoted_mode): Likewise. (gen_lowpart_common): Likewise. (operand_subword): Likewise. (immed_wide_int_const): Likewise. (decide_function_section): Likewise. (active_insn_p): Likewise. (delete_related_insns): Likewise. (try_split): Likewise. (val_signbit_known_clear_p): Likewise. (simplifiable_subregs): Likewise. (set_insn_deleted): Likewise. (subreg_get_info): Likewise. (remove_free_EXPR_LIST_node): Likewise. (finish_subregs_of_mode): Likewise. (get_mem_attrs): Likewise. (lookup_constant_def): Likewise. (rtx_to_tree_code): Likewise. (hash_rtx): Likewise. (condjump_in_parallel_p): Likewise. (validate_subreg): Likewise. (make_compound_operation): Likewise. (schedule_ebbs): Likewise. (print_inline_rtx): Likewise. (fixup_args_size_notes): Likewise. (expand_dec): Likewise. (prepare_copy_insn): Likewise. (mark_elimination): Likewise. (valid_mode_changes_for_regno): Likewise. (make_debug_expr_from_rtl): Likewise. (delete_vta_debug_insns): Likewise. (simplify_using_condition): Likewise. (set_insn_locations): Likewise. (fatal_insn_not_found): Likewise. (word_register_operation_p): Likewise. * rtlanal.cc (get_call_fndecl): Likewise. (side_effects_p): Likewise. (subreg_nregs): Likewise. (rtx_cost): Likewise. (canonicalize_condition): Likewise. * rtlanal.h (rtx_properties::try_to_add_note): Likewise. * run-rtl-passes.cc (run_rtl_passes): Likewise. * sanitizer.def (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK): Likewise. * sched-deps.cc (add_dependence_1): Likewise. * sched-ebb.cc (begin_move_insn): Likewise. (add_deps_for_risky_insns): Likewise. (advance_target_bb): Likewise. * sched-int.h (reemit_notes): Likewise. (struct _haifa_insn_data): Likewise. (HID): Likewise. (DEP_CANCELLED): Likewise. (debug_ds): Likewise. (number_in_ready): Likewise. (schedule_ebbs_finish): Likewise. (find_modifiable_mems): Likewise. * sched-rgn.cc (debug_rgn_dependencies): Likewise. * sel-sched-dump.cc (dump_lv_set): Likewise. * sel-sched-dump.h: Likewise. * sel-sched-ir.cc (sel_insn_rtx_cost): Likewise. (setup_id_reg_sets): Likewise. (has_dependence_p): Likewise. (sel_num_cfg_preds_gt_1): Likewise. (bb_ends_ebb_p): Likewise. * sel-sched-ir.h (struct _list_node): Likewise. (struct idata_def): Likewise. (bb_next_bb): Likewise. * sel-sched.cc (vinsn_writes_one_of_regs_p): Likewise. (choose_best_pseudo_reg): Likewise. (verify_target_availability): Likewise. (can_speculate_dep_p): Likewise. (sel_rank_for_schedule): Likewise. * selftest-run-tests.cc (selftest::run_tests): Likewise. * selftest.h (class auto_fix_quotes): Likewise. * shrink-wrap.cc (handle_simple_exit): Likewise. * shrink-wrap.h: Likewise. * simplify-rtx.cc (simplify_context::simplify_associative_operation): Likewise. (simplify_context::simplify_gen_vec_select): Likewise. * spellcheck-tree.h: Likewise. * spellcheck.h: Likewise. * statistics.h (struct function): Likewise. * stmt.cc (conditional_probability): Likewise. * stmt.h: Likewise. * stor-layout.h: Likewise. * streamer-hooks.h: Likewise. * stringpool.h: Likewise. * symtab.cc (symbol_table::change_decl_assembler_name): Likewise. * target.def (HOOK_VECTOR_END): Likewise. (type.): Likewise. * target.h (union cumulative_args_t): Likewise. (by_pieces_ninsns): Likewise. (class predefined_function_abi): Likewise. * targhooks.cc (default_translate_mode_attribute): Likewise. * timevar.def: Likewise. * timevar.h (class timer): Likewise. * toplev.h (enable_rtl_dump_file): Likewise. * trans-mem.cc (collect_bb2reg): Likewise. * tree-call-cdce.cc (gen_conditions_for_pow): Likewise. * tree-cfg.cc (remove_bb): Likewise. (verify_gimple_debug): Likewise. (remove_edge_and_dominated_blocks): Likewise. (push_fndecl): Likewise. * tree-cfgcleanup.h (GCC_TREE_CFGCLEANUP_H): Likewise. * tree-complex.cc (expand_complex_multiplication): Likewise. (expand_complex_div_straight): Likewise. * tree-core.h (enum tree_index): Likewise. (enum operand_equal_flag): Likewise. * tree-eh.cc (honor_protect_cleanup_actions): Likewise. * tree-if-conv.cc (if_convertible_gimple_assign_stmt_p): Likewise. * tree-inline.cc (initialize_inlined_parameters): Likewise. * tree-inline.h (force_value_to_type): Likewise. * tree-nested.cc (get_chain_decl): Likewise. (walk_all_functions): Likewise. * tree-object-size.h: Likewise. * tree-outof-ssa.cc: Likewise. * tree-parloops.cc (create_parallel_loop): Likewise. * tree-pretty-print.cc (print_generic_expr_to_str): Likewise. (dump_generic_node): Likewise. * tree-profile.cc (tree_profiling): Likewise. * tree-sra.cc (maybe_add_sra_candidate): Likewise. * tree-ssa-address.cc: Likewise. * tree-ssa-alias.cc: Likewise. * tree-ssa-alias.h (ao_ref::max_size_known_p): Likewise. (dump_alias_stats): Likewise. * tree-ssa-ccp.cc: Likewise. * tree-ssa-coalesce.h: Likewise. * tree-ssa-live.cc (remove_unused_scope_block_p): Likewise. * tree-ssa-loop-manip.cc (copy_phi_node_args): Likewise. * tree-ssa-loop-unswitch.cc: Likewise. * tree-ssa-math-opts.cc: Likewise. * tree-ssa-operands.cc (class operands_scanner): Likewise. * tree-ssa-pre.cc: Likewise. * tree-ssa-reassoc.cc (optimize_ops_list): Likewise. (debug_range_entry): Likewise. * tree-ssa-sccvn.cc (eliminate_dom_walker::eliminate_stmt): Likewise. * tree-ssa-sccvn.h (TREE_SSA_SCCVN_H): Likewise. * tree-ssa-scopedtables.cc (add_expr_commutative): Likewise. (equal_mem_array_ref_p): Likewise. * tree-ssa-strlen.cc (is_strlen_related_p): Likewise. * tree-ssa-strlen.h (get_range_strlen_dynamic): Likewise. * tree-ssa-tail-merge.cc (stmt_local_def): Likewise. * tree-ssa-ter.h: Likewise. * tree-ssa-threadupdate.h (enum bb_dom_status): Likewise. * tree-streamer-in.cc (lto_input_ts_block_tree_pointers): Likewise. * tree-streamer-out.cc (pack_ts_block_value_fields): Likewise. (write_ts_block_tree_pointers): Likewise. * tree-streamer.h (struct streamer_tree_cache_d): Likewise. (streamer_read_tree_bitfields): Likewise. (streamer_write_integer_cst): Likewise. * tree-vect-patterns.cc (apply_binop_and_append_stmt): Likewise. (vect_synth_mult_by_constant): Likewise. * tree-vect-stmts.cc (vectorizable_operation): Likewise. * tree-vectorizer.cc: Likewise. * tree-vectorizer.h (class auto_purge_vect_location): Likewise. (vect_update_inits_of_drs): Likewise. (vect_get_mask_type_for_stmt): Likewise. (vect_rgroup_iv_might_wrap_p): Likewise. (cse_and_gimplify_to_preheader): Likewise. (vect_free_slp_tree): Likewise. (vect_pattern_recog): Likewise. (vect_stmt_dominates_stmt_p): Likewise. * tree.cc (initialize_tree_contains_struct): Likewise. (need_assembler_name_p): Likewise. (type_with_interoperable_signedness): Likewise. * tree.def (SWITCH_EXPR): Likewise. * tree.h (TYPE_SYMTAB_ADDRESS): Likewise. (poly_int_tree_p): Likewise. (inlined_function_outer_scope_p): Likewise. (tree_code_for_canonical_type_merging): Likewise. * value-prof.cc: Likewise. * value-prof.h (get_nth_most_common_value): Likewise. (find_func_by_profile_id): Likewise. * value-range.cc (vrp_operand_equal_p): Likewise. * value-range.h: Likewise. * var-tracking.cc: Likewise. * varasm.cc (default_function_section): Likewise. (function_section_1): Likewise. (assemble_variable): Likewise. (handle_vtv_comdat_section): Likewise. * vec.h (struct vec_prefix): Likewise. * vmsdbgout.cc (full_name): Likewise. * vtable-verify.cc: Likewise. * vtable-verify.h (struct vtv_graph_node): Likewise. * xcoffout.cc: Likewise. * xcoffout.h (DEBUG_SYMS_TEXT): Likewise. gcc/ada/ChangeLog: * Make-generated.in: Rename .c names to .cc. * adaint.c: Likewise. * ctrl_c.c (dummy_handler): Likewise. * gcc-interface/Makefile.in: Likewise. * gcc-interface/config-lang.in: Likewise. * gcc-interface/decl.cc (concat_name): Likewise. (init_gnat_decl): Likewise. * gcc-interface/gigi.h (concat_name): Likewise. (init_gnat_utils): Likewise. (build_call_raise_range): Likewise. (gnat_mark_addressable): Likewise. (gnat_protect_expr): Likewise. (gnat_rewrite_reference): Likewise. * gcc-interface/lang-specs.h (ADA_DUMPS_OPTIONS): Likewise. * gcc-interface/utils.cc (GTY): Likewise. (add_deferred_type_context): Likewise. (init_gnat_utils): Likewise. * gcc-interface/utils2.cc (gnat_stable_expr_p): Likewise. (gnat_protect_expr): Likewise. (gnat_stabilize_reference_1): Likewise. (gnat_rewrite_reference): Likewise. * gsocket.h: Likewise. * init.cc (__gnat_error_handler): Likewise. * libgnarl/s-intman.ads: Likewise. * libgnarl/s-osinte__android.ads: Likewise. * libgnarl/s-osinte__darwin.ads: Likewise. * libgnarl/s-osinte__hpux.ads: Likewise. * libgnarl/s-osinte__linux.ads: Likewise. * libgnarl/s-osinte__qnx.ads: Likewise. * libgnarl/s-taskin.ads: Likewise. * rtfinal.cc: Likewise. * s-oscons-tmplt.c (CND): Likewise. * set_targ.ads: Likewise. gcc/analyzer/ChangeLog: * analyzer.cc (is_special_named_call_p): Rename .c names to .cc. (is_named_call_p): Likewise. * region-model-asm.cc (deterministic_p): Likewise. * region.cc (field_region::get_relative_concrete_offset): Likewise. * sm-malloc.cc (method_p): Likewise. * supergraph.cc (superedge::dump_dot): Likewise. gcc/c-family/ChangeLog: * c-ada-spec.cc: Rename .c names to .cc. * c-ada-spec.h: Likewise. * c-common.cc (c_build_vec_convert): Likewise. (warning_candidate_p): Likewise. * c-common.h (enum rid): Likewise. (build_real_imag_expr): Likewise. (finish_label_address_expr): Likewise. (c_get_substring_location): Likewise. (c_build_bind_expr): Likewise. (conflict_marker_get_final_tok_kind): Likewise. (c_parse_error): Likewise. (check_missing_format_attribute): Likewise. (invalid_array_size_error): Likewise. (warn_for_multistatement_macros): Likewise. (build_attr_access_from_parms): Likewise. * c-cppbuiltin.cc (c_cpp_builtins): Likewise. * c-format.cc: Likewise. * c-gimplify.cc (c_gimplify_expr): Likewise. * c-indentation.h: Likewise. * c-objc.h (objc_prop_attr_kind_for_rid): Likewise. * c-omp.cc (c_omp_predetermined_mapping): Likewise. * c-opts.cc (c_common_post_options): Likewise. (set_std_cxx23): Likewise. * c-pragma.cc (handle_pragma_redefine_extname): Likewise. * c-pretty-print.h: Likewise. gcc/c/ChangeLog: * Make-lang.in: Rename .c names to .cc. * c-convert.cc: Likewise. * c-decl.cc (struct lang_identifier): Likewise. (pop_scope): Likewise. (finish_decl): Likewise. * c-objc-common.h (GCC_C_OBJC_COMMON): Likewise. * c-parser.cc (c_parser_skip_to_end_of_block_or_statement): Likewise. * c-parser.h (GCC_C_PARSER_H): Likewise. * c-tree.h (c_keyword_starts_typename): Likewise. (finish_declspecs): Likewise. (c_get_alias_set): Likewise. (enum c_oracle_request): Likewise. (tag_exists_p): Likewise. (set_c_expr_source_range): Likewise. * c-typeck.cc (c_common_type): Likewise. (c_finish_omp_clauses): Likewise. * config-lang.in: Likewise. gcc/cp/ChangeLog: * Make-lang.in: Rename .c names to .cc. * config-lang.in: Likewise. * constexpr.cc (cxx_eval_constant_expression): Likewise. * coroutines.cc (morph_fn_to_coro): Likewise. * cp-gimplify.cc (cp_gimplify_expr): Likewise. * cp-lang.cc (struct lang_hooks): Likewise. (get_template_argument_pack_elems_folded): Likewise. * cp-objcp-common.cc (cp_tree_size): Likewise. (cp_unit_size_without_reusable_padding): Likewise. (pop_file_scope): Likewise. (cp_pushdecl): Likewise. * cp-objcp-common.h (GCC_CP_OBJCP_COMMON): Likewise. (cxx_simulate_record_decl): Likewise. * cp-tree.h (struct named_label_entry): Likewise. (current_function_return_value): Likewise. (more_aggr_init_expr_args_p): Likewise. (get_function_version_dispatcher): Likewise. (common_enclosing_class): Likewise. (strip_fnptr_conv): Likewise. (current_decl_namespace): Likewise. (do_aggregate_paren_init): Likewise. (cp_check_const_attributes): Likewise. (qualified_name_lookup_error): Likewise. (generic_targs_for): Likewise. (mark_exp_read): Likewise. (is_global_friend): Likewise. (maybe_reject_flexarray_init): Likewise. (module_token_lang): Likewise. (handle_module_option): Likewise. (literal_integer_zerop): Likewise. (build_extra_args): Likewise. (build_if_nonnull): Likewise. (maybe_check_overriding_exception_spec): Likewise. (finish_omp_target_clauses): Likewise. (maybe_warn_zero_as_null_pointer_constant): Likewise. (cxx_print_error_function): Likewise. (decl_in_std_namespace_p): Likewise. (merge_exception_specifiers): Likewise. (mangle_module_global_init): Likewise. (cxx_block_may_fallthru): Likewise. (fold_builtin_source_location): Likewise. (enum cp_oracle_request): Likewise. (subsumes): Likewise. (cp_finish_injected_record_type): Likewise. (vtv_build_vtable_verify_fndecl): Likewise. (cp_tree_c_finish_parsing): Likewise. * cvt.cc (diagnose_ref_binding): Likewise. (convert_to_void): Likewise. (convert_force): Likewise. (type_promotes_to): Likewise. * decl.cc (make_unbound_class_template_raw): Likewise. (cxx_init_decl_processing): Likewise. (check_class_member_definition_namespace): Likewise. (cxx_maybe_build_cleanup): Likewise. * decl2.cc (maybe_emit_vtables): Likewise. * error.cc (dump_function_name): Likewise. * init.cc (is_class_type): Likewise. (build_new_1): Likewise. * lang-specs.h: Likewise. * method.cc (make_alias_for_thunk): Likewise. * module.cc (specialization_add): Likewise. (module_state::read_cluster): Likewise. * name-lookup.cc (check_extern_c_conflict): Likewise. * name-lookup.h (struct cxx_binding): Likewise. * parser.cc (cp_parser_identifier): Likewise. * parser.h (struct cp_parser): Likewise. * pt.cc (has_value_dependent_address): Likewise. (push_tinst_level_loc): Likewise. * semantics.cc (finish_omp_clauses): Likewise. (finish_omp_atomic): Likewise. * tree.cc (cp_save_expr): Likewise. (cp_free_lang_data): Likewise. * typeck.cc (cp_common_type): Likewise. (strip_array_domain): Likewise. (rationalize_conditional_expr): Likewise. (check_return_expr): Likewise. * vtable-class-hierarchy.cc: Likewise. gcc/d/ChangeLog: * d-gimplify.cc: Rename .c names to .cc. * d-incpath.cc: Likewise. * lang-specs.h: Likewise. gcc/fortran/ChangeLog: * check.cc (gfc_check_all_any): Rename .c names to .cc. * class.cc (find_intrinsic_vtab): Likewise. * config-lang.in: Likewise. * cpp.cc (cpp_define_builtins): Likewise. * data.cc (get_array_index): Likewise. * decl.cc (match_clist_expr): Likewise. (get_proc_name): Likewise. (gfc_verify_c_interop_param): Likewise. (gfc_get_pdt_instance): Likewise. (gfc_match_formal_arglist): Likewise. (gfc_get_type_attr_spec): Likewise. * dependency.cc: Likewise. * error.cc (gfc_format_decoder): Likewise. * expr.cc (check_restricted): Likewise. (gfc_build_default_init_expr): Likewise. * f95-lang.cc: Likewise. * gfc-internals.texi: Likewise. * gfortran.h (enum match): Likewise. (enum procedure_type): Likewise. (enum oacc_routine_lop): Likewise. (gfc_get_pdt_instance): Likewise. (gfc_end_source_files): Likewise. (gfc_mpz_set_hwi): Likewise. (gfc_get_option_string): Likewise. (gfc_find_sym_in_expr): Likewise. (gfc_errors_to_warnings): Likewise. (gfc_real_4_kind): Likewise. (gfc_free_finalizer): Likewise. (gfc_sym_get_dummy_args): Likewise. (gfc_check_intrinsic_standard): Likewise. (gfc_free_case_list): Likewise. (gfc_resolve_oacc_routines): Likewise. (gfc_check_vardef_context): Likewise. (gfc_free_association_list): Likewise. (gfc_implicit_pure_function): Likewise. (gfc_ref_dimen_size): Likewise. (gfc_compare_actual_formal): Likewise. (gfc_resolve_wait): Likewise. (gfc_dt_upper_string): Likewise. (gfc_generate_module_code): Likewise. (gfc_delete_bbt): Likewise. (debug): Likewise. (gfc_build_block_ns): Likewise. (gfc_dep_difference): Likewise. (gfc_invalid_null_arg): Likewise. (gfc_is_finalizable): Likewise. (gfc_fix_implicit_pure): Likewise. (gfc_is_size_zero_array): Likewise. (gfc_is_reallocatable_lhs): Likewise. * gfortranspec.cc: Likewise. * interface.cc (compare_actual_expr): Likewise. * intrinsic.cc (add_functions): Likewise. * iresolve.cc (gfc_resolve_matmul): Likewise. (gfc_resolve_alarm_sub): Likewise. * iso-c-binding.def: Likewise. * lang-specs.h: Likewise. * libgfortran.h (GFC_STDERR_UNIT_NUMBER): Likewise. * match.cc (gfc_match_label): Likewise. (gfc_match_symbol): Likewise. (match_derived_type_spec): Likewise. (copy_ts_from_selector_to_associate): Likewise. * match.h (gfc_match_call): Likewise. (gfc_get_common): Likewise. (gfc_match_omp_end_single): Likewise. (gfc_match_volatile): Likewise. (gfc_match_bind_c): Likewise. (gfc_match_literal_constant): Likewise. (gfc_match_init_expr): Likewise. (gfc_match_array_constructor): Likewise. (gfc_match_end_interface): Likewise. (gfc_match_print): Likewise. (gfc_match_expr): Likewise. * matchexp.cc (next_operator): Likewise. * mathbuiltins.def: Likewise. * module.cc (free_true_name): Likewise. * openmp.cc (gfc_resolve_omp_parallel_blocks): Likewise. (gfc_omp_save_and_clear_state): Likewise. * parse.cc (parse_union): Likewise. (set_syms_host_assoc): Likewise. * resolve.cc (resolve_actual_arglist): Likewise. (resolve_elemental_actual): Likewise. (check_host_association): Likewise. (resolve_typebound_function): Likewise. (resolve_typebound_subroutine): Likewise. (gfc_resolve_expr): Likewise. (resolve_assoc_var): Likewise. (resolve_typebound_procedures): Likewise. (resolve_equivalence_derived): Likewise. * simplify.cc (simplify_bound): Likewise. * symbol.cc (gfc_set_default_type): Likewise. (gfc_add_ext_attribute): Likewise. * target-memory.cc (gfc_target_interpret_expr): Likewise. * target-memory.h (gfc_target_interpret_expr): Likewise. * trans-array.cc (gfc_get_cfi_dim_sm): Likewise. (gfc_conv_shift_descriptor_lbound): Likewise. (gfc_could_be_alias): Likewise. (gfc_get_dataptr_offset): Likewise. * trans-const.cc: Likewise. * trans-decl.cc (trans_function_start): Likewise. (gfc_trans_deferred_vars): Likewise. (generate_local_decl): Likewise. (gfc_generate_function_code): Likewise. * trans-expr.cc (gfc_vptr_size_get): Likewise. (gfc_trans_class_array_init_assign): Likewise. (POWI_TABLE_SIZE): Likewise. (gfc_conv_procedure_call): Likewise. (gfc_trans_arrayfunc_assign): Likewise. * trans-intrinsic.cc (gfc_conv_intrinsic_len): Likewise. (gfc_conv_intrinsic_loc): Likewise. (conv_intrinsic_event_query): Likewise. * trans-io.cc (gfc_build_st_parameter): Likewise. * trans-openmp.cc (gfc_omp_check_optional_argument): Likewise. (gfc_omp_unshare_expr_r): Likewise. (gfc_trans_omp_array_section): Likewise. (gfc_trans_omp_clauses): Likewise. * trans-stmt.cc (trans_associate_var): Likewise. (gfc_trans_deallocate): Likewise. * trans-stmt.h (gfc_trans_class_init_assign): Likewise. (gfc_trans_deallocate): Likewise. (gfc_trans_oacc_declare): Likewise. * trans-types.cc: Likewise. * trans-types.h (enum gfc_packed): Likewise. * trans.cc (N_): Likewise. (trans_code): Likewise. * trans.h (gfc_build_compare_string): Likewise. (gfc_conv_expr_type): Likewise. (gfc_trans_deferred_vars): Likewise. (getdecls): Likewise. (gfc_get_array_descr_info): Likewise. (gfc_omp_firstprivatize_type_sizes): Likewise. (GTY): Likewise. gcc/go/ChangeLog: * config-lang.in: Rename .c names to .cc. * go-backend.cc: Likewise. * go-lang.cc: Likewise. * gospec.cc: Likewise. * lang-specs.h: Likewise. gcc/jit/ChangeLog: * config-lang.in: Rename .c names to .cc. * docs/_build/texinfo/libgccjit.texi: Likewise. * docs/internals/index.rst: Likewise. * jit-builtins.cc (builtins_manager::make_builtin_function): Likewise. * jit-playback.cc (fold_const_var): Likewise. (playback::context::~context): Likewise. (new_field): Likewise. (new_bitfield): Likewise. (new_compound_type): Likewise. (playback::compound_type::set_fields): Likewise. (global_set_init_rvalue): Likewise. (load_blob_in_ctor): Likewise. (new_global_initialized): Likewise. (double>): Likewise. (new_string_literal): Likewise. (as_truth_value): Likewise. (build_call): Likewise. (playback::context::build_cast): Likewise. (new_array_access): Likewise. (new_field_access): Likewise. (dereference): Likewise. (postprocess): Likewise. (add_jump): Likewise. (add_switch): Likewise. (build_goto_operands): Likewise. (playback::context::read_dump_file): Likewise. (init_types): Likewise. * jit-recording.cc (recording::context::get_int_type): Likewise. * jit-recording.h: Likewise. * libgccjit.cc (compatible_types): Likewise. (gcc_jit_context_acquire): Likewise. (gcc_jit_context_release): Likewise. (gcc_jit_context_new_child_context): Likewise. (gcc_jit_type_as_object): Likewise. (gcc_jit_context_get_type): Likewise. (gcc_jit_context_get_int_type): Likewise. (gcc_jit_type_get_pointer): Likewise. (gcc_jit_type_get_const): Likewise. (gcc_jit_type_get_volatile): Likewise. (gcc_jit_type_dyncast_array): Likewise. (gcc_jit_type_is_bool): Likewise. (gcc_jit_type_is_pointer): Likewise. (gcc_jit_type_is_integral): Likewise. (gcc_jit_type_dyncast_vector): Likewise. (gcc_jit_type_is_struct): Likewise. (gcc_jit_vector_type_get_num_units): Likewise. (gcc_jit_vector_type_get_element_type): Likewise. (gcc_jit_type_unqualified): Likewise. (gcc_jit_type_dyncast_function_ptr_type): Likewise. (gcc_jit_function_type_get_return_type): Likewise. (gcc_jit_function_type_get_param_count): Likewise. (gcc_jit_function_type_get_param_type): Likewise. (gcc_jit_context_new_array_type): Likewise. (gcc_jit_context_new_field): Likewise. (gcc_jit_field_as_object): Likewise. (gcc_jit_context_new_struct_type): Likewise. (gcc_jit_struct_as_type): Likewise. (gcc_jit_struct_set_fields): Likewise. (gcc_jit_struct_get_field_count): Likewise. (gcc_jit_context_new_union_type): Likewise. (gcc_jit_context_new_function_ptr_type): Likewise. (gcc_jit_param_as_rvalue): Likewise. (gcc_jit_context_new_function): Likewise. (gcc_jit_function_get_return_type): Likewise. (gcc_jit_function_dump_to_dot): Likewise. (gcc_jit_block_get_function): Likewise. (gcc_jit_global_set_initializer_rvalue): Likewise. (gcc_jit_rvalue_get_type): Likewise. (gcc_jit_context_new_rvalue_from_int): Likewise. (gcc_jit_context_one): Likewise. (gcc_jit_context_new_rvalue_from_double): Likewise. (gcc_jit_context_null): Likewise. (gcc_jit_context_new_string_literal): Likewise. (valid_binary_op_p): Likewise. (gcc_jit_context_new_binary_op): Likewise. (gcc_jit_context_new_comparison): Likewise. (gcc_jit_context_new_call): Likewise. (is_valid_cast): Likewise. (gcc_jit_context_new_cast): Likewise. (gcc_jit_object_get_context): Likewise. (gcc_jit_object_get_debug_string): Likewise. (gcc_jit_lvalue_access_field): Likewise. (gcc_jit_rvalue_access_field): Likewise. (gcc_jit_rvalue_dereference_field): Likewise. (gcc_jit_rvalue_dereference): Likewise. (gcc_jit_lvalue_get_address): Likewise. (gcc_jit_lvalue_set_tls_model): Likewise. (gcc_jit_lvalue_set_link_section): Likewise. (gcc_jit_function_new_local): Likewise. (gcc_jit_block_add_eval): Likewise. (gcc_jit_block_add_assignment): Likewise. (is_bool): Likewise. (gcc_jit_block_end_with_conditional): Likewise. (gcc_jit_block_add_comment): Likewise. (gcc_jit_block_end_with_jump): Likewise. (gcc_jit_block_end_with_return): Likewise. (gcc_jit_block_end_with_void_return): Likewise. (case_range_validator::case_range_validator): Likewise. (case_range_validator::validate): Likewise. (case_range_validator::get_wide_int): Likewise. (gcc_jit_block_end_with_switch): Likewise. (gcc_jit_context_set_str_option): Likewise. (gcc_jit_context_set_int_option): Likewise. (gcc_jit_context_set_bool_option): Likewise. (gcc_jit_context_set_bool_allow_unreachable_blocks): Likewise. (gcc_jit_context_set_bool_use_external_driver): Likewise. (gcc_jit_context_add_command_line_option): Likewise. (gcc_jit_context_add_driver_option): Likewise. (gcc_jit_context_enable_dump): Likewise. (gcc_jit_context_compile): Likewise. (gcc_jit_context_compile_to_file): Likewise. (gcc_jit_context_set_logfile): Likewise. (gcc_jit_context_dump_reproducer_to_file): Likewise. (gcc_jit_context_get_first_error): Likewise. (gcc_jit_context_get_last_error): Likewise. (gcc_jit_result_get_code): Likewise. (gcc_jit_result_get_global): Likewise. (gcc_jit_rvalue_set_bool_require_tail_call): Likewise. (gcc_jit_type_get_aligned): Likewise. (gcc_jit_type_get_vector): Likewise. (gcc_jit_function_get_address): Likewise. (gcc_jit_version_patchlevel): Likewise. (gcc_jit_block_add_extended_asm): Likewise. (gcc_jit_extended_asm_as_object): Likewise. (gcc_jit_extended_asm_set_volatile_flag): Likewise. (gcc_jit_extended_asm_set_inline_flag): Likewise. (gcc_jit_extended_asm_add_output_operand): Likewise. (gcc_jit_extended_asm_add_input_operand): Likewise. (gcc_jit_extended_asm_add_clobber): Likewise. * notes.txt: Likewise. gcc/lto/ChangeLog: * config-lang.in: Rename .c names to .cc. * lang-specs.h: Likewise. * lto-common.cc (gimple_register_canonical_type_1): Likewise. * lto-common.h: Likewise. * lto-dump.cc (lto_main): Likewise. * lto-lang.cc (handle_fnspec_attribute): Likewise. (lto_getdecls): Likewise. (lto_init): Likewise. * lto.cc (lto_main): Likewise. * lto.h: Likewise. gcc/objc/ChangeLog: * Make-lang.in: Rename .c names to .cc. * config-lang.in: Likewise. * lang-specs.h: Likewise. * objc-act.cc (objc_build_component_ref): Likewise. (objc_copy_binfo): Likewise. (lookup_method_in_hash_lists): Likewise. (objc_finish_foreach_loop): Likewise. * objc-act.h (objc_common_init_ts): Likewise. * objc-gnu-runtime-abi-01.cc: Likewise. * objc-lang.cc (struct lang_hooks): Likewise. * objc-map.cc: Likewise. * objc-next-runtime-abi-01.cc (generate_objc_symtab_decl): Likewise. * objc-runtime-shared-support.cc: Likewise. * objc-runtime-shared-support.h (build_protocol_initializer): Likewise. gcc/objcp/ChangeLog: * Make-lang.in: Rename .c names to .cc. * config-lang.in: Likewise. * lang-specs.h: Likewise. * objcp-decl.cc (objcp_end_compound_stmt): Likewise. * objcp-lang.cc (struct lang_hooks): Likewise. gcc/po/ChangeLog: * EXCLUDES: Rename .c names to .cc. libcpp/ChangeLog: * Makefile.in: Rename .c names to .cc. * charset.cc (convert_escape): Likewise. * directives.cc (directive_diagnostics): Likewise. (_cpp_handle_directive): Likewise. (lex_macro_node): Likewise. * include/cpplib.h (struct _cpp_file): Likewise. (PURE_ZERO): Likewise. (cpp_defined): Likewise. (cpp_error_at): Likewise. (cpp_forall_identifiers): Likewise. (cpp_compare_macros): Likewise. (cpp_get_converted_source): Likewise. (cpp_read_state): Likewise. (cpp_directive_only_process): Likewise. (struct cpp_decoded_char): Likewise. * include/line-map.h (enum lc_reason): Likewise. (enum location_aspect): Likewise. * include/mkdeps.h: Likewise. * init.cc (cpp_destroy): Likewise. (cpp_finish): Likewise. * internal.h (struct cpp_reader): Likewise. (_cpp_defined_macro_p): Likewise. (_cpp_backup_tokens_direct): Likewise. (_cpp_destroy_hashtable): Likewise. (_cpp_has_header): Likewise. (_cpp_expand_op_stack): Likewise. (_cpp_commit_buff): Likewise. (_cpp_restore_special_builtin): Likewise. (_cpp_bracket_include): Likewise. (_cpp_replacement_text_len): Likewise. (ufputs): Likewise. * line-map.cc (linemap_macro_loc_to_exp_point): Likewise. (linemap_check_files_exited): Likewise. (line_map_new_raw): Likewise. * traditional.cc (enum ls): Likewise.
2322 lines
70 KiB
C++
2322 lines
70 KiB
C++
/* Subroutines for manipulating rtx's in semantically interesting ways.
|
||
Copyright (C) 1987-2022 Free Software Foundation, Inc.
|
||
|
||
This file is part of GCC.
|
||
|
||
GCC is free software; you can redistribute it and/or modify it under
|
||
the terms of the GNU General Public License as published by the Free
|
||
Software Foundation; either version 3, or (at your option) any later
|
||
version.
|
||
|
||
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
|
||
WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||
for more details.
|
||
|
||
You should have received a copy of the GNU General Public License
|
||
along with GCC; see the file COPYING3. If not see
|
||
<http://www.gnu.org/licenses/>. */
|
||
|
||
|
||
#include "config.h"
|
||
#include "system.h"
|
||
#include "coretypes.h"
|
||
#include "target.h"
|
||
#include "function.h"
|
||
#include "rtl.h"
|
||
#include "tree.h"
|
||
#include "memmodel.h"
|
||
#include "tm_p.h"
|
||
#include "optabs.h"
|
||
#include "expmed.h"
|
||
#include "profile-count.h"
|
||
#include "emit-rtl.h"
|
||
#include "recog.h"
|
||
#include "diagnostic-core.h"
|
||
#include "stor-layout.h"
|
||
#include "langhooks.h"
|
||
#include "except.h"
|
||
#include "dojump.h"
|
||
#include "explow.h"
|
||
#include "expr.h"
|
||
#include "stringpool.h"
|
||
#include "common/common-target.h"
|
||
#include "output.h"
|
||
|
||
static rtx break_out_memory_refs (rtx);
|
||
|
||
|
||
/* Truncate and perhaps sign-extend C as appropriate for MODE. */
|
||
|
||
HOST_WIDE_INT
|
||
trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
|
||
{
|
||
/* Not scalar_int_mode because we also allow pointer bound modes. */
|
||
scalar_mode smode = as_a <scalar_mode> (mode);
|
||
int width = GET_MODE_PRECISION (smode);
|
||
|
||
/* You want to truncate to a _what_? */
|
||
gcc_assert (SCALAR_INT_MODE_P (mode));
|
||
|
||
/* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
|
||
if (smode == BImode)
|
||
return c & 1 ? STORE_FLAG_VALUE : 0;
|
||
|
||
/* Sign-extend for the requested mode. */
|
||
|
||
if (width < HOST_BITS_PER_WIDE_INT)
|
||
{
|
||
HOST_WIDE_INT sign = 1;
|
||
sign <<= width - 1;
|
||
c &= (sign << 1) - 1;
|
||
c ^= sign;
|
||
c -= sign;
|
||
}
|
||
|
||
return c;
|
||
}
|
||
|
||
/* Likewise for polynomial values, using the sign-extended representation
|
||
for each individual coefficient. */
|
||
|
||
poly_int64
|
||
trunc_int_for_mode (poly_int64 x, machine_mode mode)
|
||
{
|
||
for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
|
||
x.coeffs[i] = trunc_int_for_mode (x.coeffs[i], mode);
|
||
return x;
|
||
}
|
||
|
||
/* Return an rtx for the sum of X and the integer C, given that X has
|
||
mode MODE. INPLACE is true if X can be modified inplace or false
|
||
if it must be treated as immutable. */
|
||
|
||
rtx
|
||
plus_constant (machine_mode mode, rtx x, poly_int64 c, bool inplace)
|
||
{
|
||
RTX_CODE code;
|
||
rtx y;
|
||
rtx tem;
|
||
int all_constant = 0;
|
||
|
||
gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
|
||
|
||
if (known_eq (c, 0))
|
||
return x;
|
||
|
||
restart:
|
||
|
||
code = GET_CODE (x);
|
||
y = x;
|
||
|
||
switch (code)
|
||
{
|
||
CASE_CONST_SCALAR_INT:
|
||
return immed_wide_int_const (wi::add (rtx_mode_t (x, mode), c), mode);
|
||
case MEM:
|
||
/* If this is a reference to the constant pool, try replacing it with
|
||
a reference to a new constant. If the resulting address isn't
|
||
valid, don't return it because we have no way to validize it. */
|
||
if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
|
||
&& CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
|
||
{
|
||
rtx cst = get_pool_constant (XEXP (x, 0));
|
||
|
||
if (GET_CODE (cst) == CONST_VECTOR
|
||
&& GET_MODE_INNER (GET_MODE (cst)) == mode)
|
||
{
|
||
cst = gen_lowpart (mode, cst);
|
||
gcc_assert (cst);
|
||
}
|
||
else if (GET_MODE (cst) == VOIDmode
|
||
&& get_pool_mode (XEXP (x, 0)) != mode)
|
||
break;
|
||
if (GET_MODE (cst) == VOIDmode || GET_MODE (cst) == mode)
|
||
{
|
||
tem = plus_constant (mode, cst, c);
|
||
tem = force_const_mem (GET_MODE (x), tem);
|
||
/* Targets may disallow some constants in the constant pool, thus
|
||
force_const_mem may return NULL_RTX. */
|
||
if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
|
||
return tem;
|
||
}
|
||
}
|
||
break;
|
||
|
||
case CONST:
|
||
/* If adding to something entirely constant, set a flag
|
||
so that we can add a CONST around the result. */
|
||
if (inplace && shared_const_p (x))
|
||
inplace = false;
|
||
x = XEXP (x, 0);
|
||
all_constant = 1;
|
||
goto restart;
|
||
|
||
case SYMBOL_REF:
|
||
case LABEL_REF:
|
||
all_constant = 1;
|
||
break;
|
||
|
||
case PLUS:
|
||
/* The interesting case is adding the integer to a sum. Look
|
||
for constant term in the sum and combine with C. For an
|
||
integer constant term or a constant term that is not an
|
||
explicit integer, we combine or group them together anyway.
|
||
|
||
We may not immediately return from the recursive call here, lest
|
||
all_constant gets lost. */
|
||
|
||
if (CONSTANT_P (XEXP (x, 1)))
|
||
{
|
||
rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
|
||
if (term == const0_rtx)
|
||
x = XEXP (x, 0);
|
||
else if (inplace)
|
||
XEXP (x, 1) = term;
|
||
else
|
||
x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
|
||
c = 0;
|
||
}
|
||
else if (rtx *const_loc = find_constant_term_loc (&y))
|
||
{
|
||
if (!inplace)
|
||
{
|
||
/* We need to be careful since X may be shared and we can't
|
||
modify it in place. */
|
||
x = copy_rtx (x);
|
||
const_loc = find_constant_term_loc (&x);
|
||
}
|
||
*const_loc = plus_constant (mode, *const_loc, c, true);
|
||
c = 0;
|
||
}
|
||
break;
|
||
|
||
default:
|
||
if (CONST_POLY_INT_P (x))
|
||
return immed_wide_int_const (const_poly_int_value (x) + c, mode);
|
||
break;
|
||
}
|
||
|
||
if (maybe_ne (c, 0))
|
||
x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
|
||
|
||
if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
|
||
return x;
|
||
else if (all_constant)
|
||
return gen_rtx_CONST (mode, x);
|
||
else
|
||
return x;
|
||
}
|
||
|
||
/* If X is a sum, return a new sum like X but lacking any constant terms.
|
||
Add all the removed constant terms into *CONSTPTR.
|
||
X itself is not altered. The result != X if and only if
|
||
it is not isomorphic to X. */
|
||
|
||
rtx
|
||
eliminate_constant_term (rtx x, rtx *constptr)
|
||
{
|
||
rtx x0, x1;
|
||
rtx tem;
|
||
|
||
if (GET_CODE (x) != PLUS)
|
||
return x;
|
||
|
||
/* First handle constants appearing at this level explicitly. */
|
||
if (CONST_INT_P (XEXP (x, 1))
|
||
&& (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
|
||
XEXP (x, 1))) != 0
|
||
&& CONST_INT_P (tem))
|
||
{
|
||
*constptr = tem;
|
||
return eliminate_constant_term (XEXP (x, 0), constptr);
|
||
}
|
||
|
||
tem = const0_rtx;
|
||
x0 = eliminate_constant_term (XEXP (x, 0), &tem);
|
||
x1 = eliminate_constant_term (XEXP (x, 1), &tem);
|
||
if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
|
||
&& (tem = simplify_binary_operation (PLUS, GET_MODE (x),
|
||
*constptr, tem)) != 0
|
||
&& CONST_INT_P (tem))
|
||
{
|
||
*constptr = tem;
|
||
return gen_rtx_PLUS (GET_MODE (x), x0, x1);
|
||
}
|
||
|
||
return x;
|
||
}
|
||
|
||
|
||
/* Return a copy of X in which all memory references
|
||
and all constants that involve symbol refs
|
||
have been replaced with new temporary registers.
|
||
Also emit code to load the memory locations and constants
|
||
into those registers.
|
||
|
||
If X contains no such constants or memory references,
|
||
X itself (not a copy) is returned.
|
||
|
||
If a constant is found in the address that is not a legitimate constant
|
||
in an insn, it is left alone in the hope that it might be valid in the
|
||
address.
|
||
|
||
X may contain no arithmetic except addition, subtraction and multiplication.
|
||
Values returned by expand_expr with 1 for sum_ok fit this constraint. */
|
||
|
||
static rtx
|
||
break_out_memory_refs (rtx x)
|
||
{
|
||
if (MEM_P (x)
|
||
|| (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
|
||
&& GET_MODE (x) != VOIDmode))
|
||
x = force_reg (GET_MODE (x), x);
|
||
else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
|
||
|| GET_CODE (x) == MULT)
|
||
{
|
||
rtx op0 = break_out_memory_refs (XEXP (x, 0));
|
||
rtx op1 = break_out_memory_refs (XEXP (x, 1));
|
||
|
||
if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
|
||
x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
|
||
}
|
||
|
||
return x;
|
||
}
|
||
|
||
/* Given X, a memory address in address space AS' pointer mode, convert it to
|
||
an address in the address space's address mode, or vice versa (TO_MODE says
|
||
which way). We take advantage of the fact that pointers are not allowed to
|
||
overflow by commuting arithmetic operations over conversions so that address
|
||
arithmetic insns can be used. IN_CONST is true if this conversion is inside
|
||
a CONST. NO_EMIT is true if no insns should be emitted, and instead
|
||
it should return NULL if it can't be simplified without emitting insns. */
|
||
|
||
rtx
|
||
convert_memory_address_addr_space_1 (scalar_int_mode to_mode ATTRIBUTE_UNUSED,
|
||
rtx x, addr_space_t as ATTRIBUTE_UNUSED,
|
||
bool in_const ATTRIBUTE_UNUSED,
|
||
bool no_emit ATTRIBUTE_UNUSED)
|
||
{
|
||
#ifndef POINTERS_EXTEND_UNSIGNED
|
||
gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
|
||
return x;
|
||
#else /* defined(POINTERS_EXTEND_UNSIGNED) */
|
||
scalar_int_mode pointer_mode, address_mode, from_mode;
|
||
rtx temp;
|
||
enum rtx_code code;
|
||
|
||
/* If X already has the right mode, just return it. */
|
||
if (GET_MODE (x) == to_mode)
|
||
return x;
|
||
|
||
pointer_mode = targetm.addr_space.pointer_mode (as);
|
||
address_mode = targetm.addr_space.address_mode (as);
|
||
from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
|
||
|
||
/* Here we handle some special cases. If none of them apply, fall through
|
||
to the default case. */
|
||
switch (GET_CODE (x))
|
||
{
|
||
CASE_CONST_SCALAR_INT:
|
||
if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
|
||
code = TRUNCATE;
|
||
else if (POINTERS_EXTEND_UNSIGNED < 0)
|
||
break;
|
||
else if (POINTERS_EXTEND_UNSIGNED > 0)
|
||
code = ZERO_EXTEND;
|
||
else
|
||
code = SIGN_EXTEND;
|
||
temp = simplify_unary_operation (code, to_mode, x, from_mode);
|
||
if (temp)
|
||
return temp;
|
||
break;
|
||
|
||
case SUBREG:
|
||
if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
|
||
&& GET_MODE (SUBREG_REG (x)) == to_mode)
|
||
return SUBREG_REG (x);
|
||
break;
|
||
|
||
case LABEL_REF:
|
||
temp = gen_rtx_LABEL_REF (to_mode, label_ref_label (x));
|
||
LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
|
||
return temp;
|
||
|
||
case SYMBOL_REF:
|
||
temp = shallow_copy_rtx (x);
|
||
PUT_MODE (temp, to_mode);
|
||
return temp;
|
||
|
||
case CONST:
|
||
temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0), as,
|
||
true, no_emit);
|
||
return temp ? gen_rtx_CONST (to_mode, temp) : temp;
|
||
|
||
case PLUS:
|
||
case MULT:
|
||
/* For addition we can safely permute the conversion and addition
|
||
operation if one operand is a constant and converting the constant
|
||
does not change it or if one operand is a constant and we are
|
||
using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
|
||
We can always safely permute them if we are making the address
|
||
narrower. Inside a CONST RTL, this is safe for both pointers
|
||
zero or sign extended as pointers cannot wrap. */
|
||
if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
|
||
|| (GET_CODE (x) == PLUS
|
||
&& CONST_INT_P (XEXP (x, 1))
|
||
&& ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
|
||
|| XEXP (x, 1) == convert_memory_address_addr_space_1
|
||
(to_mode, XEXP (x, 1), as, in_const,
|
||
no_emit)
|
||
|| POINTERS_EXTEND_UNSIGNED < 0)))
|
||
{
|
||
temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0),
|
||
as, in_const, no_emit);
|
||
return (temp ? gen_rtx_fmt_ee (GET_CODE (x), to_mode,
|
||
temp, XEXP (x, 1))
|
||
: temp);
|
||
}
|
||
break;
|
||
|
||
case UNSPEC:
|
||
/* Assume that all UNSPECs in a constant address can be converted
|
||
operand-by-operand. We could add a target hook if some targets
|
||
require different behavior. */
|
||
if (in_const && GET_MODE (x) == from_mode)
|
||
{
|
||
unsigned int n = XVECLEN (x, 0);
|
||
rtvec v = gen_rtvec (n);
|
||
for (unsigned int i = 0; i < n; ++i)
|
||
{
|
||
rtx op = XVECEXP (x, 0, i);
|
||
if (GET_MODE (op) == from_mode)
|
||
op = convert_memory_address_addr_space_1 (to_mode, op, as,
|
||
in_const, no_emit);
|
||
RTVEC_ELT (v, i) = op;
|
||
}
|
||
return gen_rtx_UNSPEC (to_mode, v, XINT (x, 1));
|
||
}
|
||
break;
|
||
|
||
default:
|
||
break;
|
||
}
|
||
|
||
if (no_emit)
|
||
return NULL_RTX;
|
||
|
||
return convert_modes (to_mode, from_mode,
|
||
x, POINTERS_EXTEND_UNSIGNED);
|
||
#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
|
||
}
|
||
|
||
/* Given X, a memory address in address space AS' pointer mode, convert it to
|
||
an address in the address space's address mode, or vice versa (TO_MODE says
|
||
which way). We take advantage of the fact that pointers are not allowed to
|
||
overflow by commuting arithmetic operations over conversions so that address
|
||
arithmetic insns can be used. */
|
||
|
||
rtx
|
||
convert_memory_address_addr_space (scalar_int_mode to_mode, rtx x,
|
||
addr_space_t as)
|
||
{
|
||
return convert_memory_address_addr_space_1 (to_mode, x, as, false, false);
|
||
}
|
||
|
||
|
||
/* Return something equivalent to X but valid as a memory address for something
|
||
of mode MODE in the named address space AS. When X is not itself valid,
|
||
this works by copying X or subexpressions of it into registers. */
|
||
|
||
rtx
|
||
memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
|
||
{
|
||
rtx oldx = x;
|
||
scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
|
||
|
||
x = convert_memory_address_addr_space (address_mode, x, as);
|
||
|
||
/* By passing constant addresses through registers
|
||
we get a chance to cse them. */
|
||
if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
|
||
x = force_reg (address_mode, x);
|
||
|
||
/* We get better cse by rejecting indirect addressing at this stage.
|
||
Let the combiner create indirect addresses where appropriate.
|
||
For now, generate the code so that the subexpressions useful to share
|
||
are visible. But not if cse won't be done! */
|
||
else
|
||
{
|
||
if (! cse_not_expected && !REG_P (x))
|
||
x = break_out_memory_refs (x);
|
||
|
||
/* At this point, any valid address is accepted. */
|
||
if (memory_address_addr_space_p (mode, x, as))
|
||
goto done;
|
||
|
||
/* If it was valid before but breaking out memory refs invalidated it,
|
||
use it the old way. */
|
||
if (memory_address_addr_space_p (mode, oldx, as))
|
||
{
|
||
x = oldx;
|
||
goto done;
|
||
}
|
||
|
||
/* Perform machine-dependent transformations on X
|
||
in certain cases. This is not necessary since the code
|
||
below can handle all possible cases, but machine-dependent
|
||
transformations can make better code. */
|
||
{
|
||
rtx orig_x = x;
|
||
x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
|
||
if (orig_x != x && memory_address_addr_space_p (mode, x, as))
|
||
goto done;
|
||
}
|
||
|
||
/* PLUS and MULT can appear in special ways
|
||
as the result of attempts to make an address usable for indexing.
|
||
Usually they are dealt with by calling force_operand, below.
|
||
But a sum containing constant terms is special
|
||
if removing them makes the sum a valid address:
|
||
then we generate that address in a register
|
||
and index off of it. We do this because it often makes
|
||
shorter code, and because the addresses thus generated
|
||
in registers often become common subexpressions. */
|
||
if (GET_CODE (x) == PLUS)
|
||
{
|
||
rtx constant_term = const0_rtx;
|
||
rtx y = eliminate_constant_term (x, &constant_term);
|
||
if (constant_term == const0_rtx
|
||
|| ! memory_address_addr_space_p (mode, y, as))
|
||
x = force_operand (x, NULL_RTX);
|
||
else
|
||
{
|
||
y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
|
||
if (! memory_address_addr_space_p (mode, y, as))
|
||
x = force_operand (x, NULL_RTX);
|
||
else
|
||
x = y;
|
||
}
|
||
}
|
||
|
||
else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
|
||
x = force_operand (x, NULL_RTX);
|
||
|
||
/* If we have a register that's an invalid address,
|
||
it must be a hard reg of the wrong class. Copy it to a pseudo. */
|
||
else if (REG_P (x))
|
||
x = copy_to_reg (x);
|
||
|
||
/* Last resort: copy the value to a register, since
|
||
the register is a valid address. */
|
||
else
|
||
x = force_reg (address_mode, x);
|
||
}
|
||
|
||
done:
|
||
|
||
gcc_assert (memory_address_addr_space_p (mode, x, as));
|
||
/* If we didn't change the address, we are done. Otherwise, mark
|
||
a reg as a pointer if we have REG or REG + CONST_INT. */
|
||
if (oldx == x)
|
||
return x;
|
||
else if (REG_P (x))
|
||
mark_reg_pointer (x, BITS_PER_UNIT);
|
||
else if (GET_CODE (x) == PLUS
|
||
&& REG_P (XEXP (x, 0))
|
||
&& CONST_INT_P (XEXP (x, 1)))
|
||
mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
|
||
|
||
/* OLDX may have been the address on a temporary. Update the address
|
||
to indicate that X is now used. */
|
||
update_temp_slot_address (oldx, x);
|
||
|
||
return x;
|
||
}
|
||
|
||
/* Convert a mem ref into one with a valid memory address.
|
||
Pass through anything else unchanged. */
|
||
|
||
rtx
|
||
validize_mem (rtx ref)
|
||
{
|
||
if (!MEM_P (ref))
|
||
return ref;
|
||
ref = use_anchored_address (ref);
|
||
if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
|
||
MEM_ADDR_SPACE (ref)))
|
||
return ref;
|
||
|
||
/* Don't alter REF itself, since that is probably a stack slot. */
|
||
return replace_equiv_address (ref, XEXP (ref, 0));
|
||
}
|
||
|
||
/* If X is a memory reference to a member of an object block, try rewriting
|
||
it to use an anchor instead. Return the new memory reference on success
|
||
and the old one on failure. */
|
||
|
||
rtx
|
||
use_anchored_address (rtx x)
|
||
{
|
||
rtx base;
|
||
HOST_WIDE_INT offset;
|
||
machine_mode mode;
|
||
|
||
if (!flag_section_anchors)
|
||
return x;
|
||
|
||
if (!MEM_P (x))
|
||
return x;
|
||
|
||
/* Split the address into a base and offset. */
|
||
base = XEXP (x, 0);
|
||
offset = 0;
|
||
if (GET_CODE (base) == CONST
|
||
&& GET_CODE (XEXP (base, 0)) == PLUS
|
||
&& CONST_INT_P (XEXP (XEXP (base, 0), 1)))
|
||
{
|
||
offset += INTVAL (XEXP (XEXP (base, 0), 1));
|
||
base = XEXP (XEXP (base, 0), 0);
|
||
}
|
||
|
||
/* Check whether BASE is suitable for anchors. */
|
||
if (GET_CODE (base) != SYMBOL_REF
|
||
|| !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
|
||
|| SYMBOL_REF_ANCHOR_P (base)
|
||
|| SYMBOL_REF_BLOCK (base) == NULL
|
||
|| !targetm.use_anchors_for_symbol_p (base))
|
||
return x;
|
||
|
||
/* Decide where BASE is going to be. */
|
||
place_block_symbol (base);
|
||
|
||
/* Get the anchor we need to use. */
|
||
offset += SYMBOL_REF_BLOCK_OFFSET (base);
|
||
base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
|
||
SYMBOL_REF_TLS_MODEL (base));
|
||
|
||
/* Work out the offset from the anchor. */
|
||
offset -= SYMBOL_REF_BLOCK_OFFSET (base);
|
||
|
||
/* If we're going to run a CSE pass, force the anchor into a register.
|
||
We will then be able to reuse registers for several accesses, if the
|
||
target costs say that that's worthwhile. */
|
||
mode = GET_MODE (base);
|
||
if (!cse_not_expected)
|
||
base = force_reg (mode, base);
|
||
|
||
return replace_equiv_address (x, plus_constant (mode, base, offset));
|
||
}
|
||
|
||
/* Copy the value or contents of X to a new temp reg and return that reg. */
|
||
|
||
rtx
|
||
copy_to_reg (rtx x)
|
||
{
|
||
rtx temp = gen_reg_rtx (GET_MODE (x));
|
||
|
||
/* If not an operand, must be an address with PLUS and MULT so
|
||
do the computation. */
|
||
if (! general_operand (x, VOIDmode))
|
||
x = force_operand (x, temp);
|
||
|
||
if (x != temp)
|
||
emit_move_insn (temp, x);
|
||
|
||
return temp;
|
||
}
|
||
|
||
/* Like copy_to_reg but always give the new register mode Pmode
|
||
in case X is a constant. */
|
||
|
||
rtx
|
||
copy_addr_to_reg (rtx x)
|
||
{
|
||
return copy_to_mode_reg (Pmode, x);
|
||
}
|
||
|
||
/* Like copy_to_reg but always give the new register mode MODE
|
||
in case X is a constant. */
|
||
|
||
rtx
|
||
copy_to_mode_reg (machine_mode mode, rtx x)
|
||
{
|
||
rtx temp = gen_reg_rtx (mode);
|
||
|
||
/* If not an operand, must be an address with PLUS and MULT so
|
||
do the computation. */
|
||
if (! general_operand (x, VOIDmode))
|
||
x = force_operand (x, temp);
|
||
|
||
gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
|
||
if (x != temp)
|
||
emit_move_insn (temp, x);
|
||
return temp;
|
||
}
|
||
|
||
/* Load X into a register if it is not already one.
|
||
Use mode MODE for the register.
|
||
X should be valid for mode MODE, but it may be a constant which
|
||
is valid for all integer modes; that's why caller must specify MODE.
|
||
|
||
The caller must not alter the value in the register we return,
|
||
since we mark it as a "constant" register. */
|
||
|
||
rtx
|
||
force_reg (machine_mode mode, rtx x)
|
||
{
|
||
rtx temp, set;
|
||
rtx_insn *insn;
|
||
|
||
if (REG_P (x))
|
||
return x;
|
||
|
||
if (general_operand (x, mode))
|
||
{
|
||
temp = gen_reg_rtx (mode);
|
||
insn = emit_move_insn (temp, x);
|
||
}
|
||
else
|
||
{
|
||
temp = force_operand (x, NULL_RTX);
|
||
if (REG_P (temp))
|
||
insn = get_last_insn ();
|
||
else
|
||
{
|
||
rtx temp2 = gen_reg_rtx (mode);
|
||
insn = emit_move_insn (temp2, temp);
|
||
temp = temp2;
|
||
}
|
||
}
|
||
|
||
/* Let optimizers know that TEMP's value never changes
|
||
and that X can be substituted for it. Don't get confused
|
||
if INSN set something else (such as a SUBREG of TEMP). */
|
||
if (CONSTANT_P (x)
|
||
&& (set = single_set (insn)) != 0
|
||
&& SET_DEST (set) == temp
|
||
&& ! rtx_equal_p (x, SET_SRC (set)))
|
||
set_unique_reg_note (insn, REG_EQUAL, x);
|
||
|
||
/* Let optimizers know that TEMP is a pointer, and if so, the
|
||
known alignment of that pointer. */
|
||
{
|
||
unsigned align = 0;
|
||
if (GET_CODE (x) == SYMBOL_REF)
|
||
{
|
||
align = BITS_PER_UNIT;
|
||
if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
|
||
align = DECL_ALIGN (SYMBOL_REF_DECL (x));
|
||
}
|
||
else if (GET_CODE (x) == LABEL_REF)
|
||
align = BITS_PER_UNIT;
|
||
else if (GET_CODE (x) == CONST
|
||
&& GET_CODE (XEXP (x, 0)) == PLUS
|
||
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
|
||
&& CONST_INT_P (XEXP (XEXP (x, 0), 1)))
|
||
{
|
||
rtx s = XEXP (XEXP (x, 0), 0);
|
||
rtx c = XEXP (XEXP (x, 0), 1);
|
||
unsigned sa, ca;
|
||
|
||
sa = BITS_PER_UNIT;
|
||
if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
|
||
sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
|
||
|
||
if (INTVAL (c) == 0)
|
||
align = sa;
|
||
else
|
||
{
|
||
ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
|
||
align = MIN (sa, ca);
|
||
}
|
||
}
|
||
|
||
if (align || (MEM_P (x) && MEM_POINTER (x)))
|
||
mark_reg_pointer (temp, align);
|
||
}
|
||
|
||
return temp;
|
||
}
|
||
|
||
/* If X is a memory ref, copy its contents to a new temp reg and return
|
||
that reg. Otherwise, return X. */
|
||
|
||
rtx
|
||
force_not_mem (rtx x)
|
||
{
|
||
rtx temp;
|
||
|
||
if (!MEM_P (x) || GET_MODE (x) == BLKmode)
|
||
return x;
|
||
|
||
temp = gen_reg_rtx (GET_MODE (x));
|
||
|
||
if (MEM_POINTER (x))
|
||
REG_POINTER (temp) = 1;
|
||
|
||
emit_move_insn (temp, x);
|
||
return temp;
|
||
}
|
||
|
||
/* Copy X to TARGET (if it's nonzero and a reg)
|
||
or to a new temp reg and return that reg.
|
||
MODE is the mode to use for X in case it is a constant. */
|
||
|
||
rtx
|
||
copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
|
||
{
|
||
rtx temp;
|
||
|
||
if (target && REG_P (target))
|
||
temp = target;
|
||
else
|
||
temp = gen_reg_rtx (mode);
|
||
|
||
emit_move_insn (temp, x);
|
||
return temp;
|
||
}
|
||
|
||
/* Return the mode to use to pass or return a scalar of TYPE and MODE.
|
||
PUNSIGNEDP points to the signedness of the type and may be adjusted
|
||
to show what signedness to use on extension operations.
|
||
|
||
FOR_RETURN is nonzero if the caller is promoting the return value
|
||
of FNDECL, else it is for promoting args. */
|
||
|
||
machine_mode
|
||
promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
|
||
const_tree funtype, int for_return)
|
||
{
|
||
/* Called without a type node for a libcall. */
|
||
if (type == NULL_TREE)
|
||
{
|
||
if (INTEGRAL_MODE_P (mode))
|
||
return targetm.calls.promote_function_mode (NULL_TREE, mode,
|
||
punsignedp, funtype,
|
||
for_return);
|
||
else
|
||
return mode;
|
||
}
|
||
|
||
switch (TREE_CODE (type))
|
||
{
|
||
case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
|
||
case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
|
||
case POINTER_TYPE: case REFERENCE_TYPE:
|
||
return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
|
||
for_return);
|
||
|
||
default:
|
||
return mode;
|
||
}
|
||
}
|
||
/* Return the mode to use to store a scalar of TYPE and MODE.
|
||
PUNSIGNEDP points to the signedness of the type and may be adjusted
|
||
to show what signedness to use on extension operations. */
|
||
|
||
machine_mode
|
||
promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
|
||
int *punsignedp ATTRIBUTE_UNUSED)
|
||
{
|
||
#ifdef PROMOTE_MODE
|
||
enum tree_code code;
|
||
int unsignedp;
|
||
scalar_mode smode;
|
||
#endif
|
||
|
||
/* For libcalls this is invoked without TYPE from the backends
|
||
TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
|
||
case. */
|
||
if (type == NULL_TREE)
|
||
return mode;
|
||
|
||
/* FIXME: this is the same logic that was there until GCC 4.4, but we
|
||
probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
|
||
is not defined. The affected targets are M32C, S390, SPARC. */
|
||
#ifdef PROMOTE_MODE
|
||
code = TREE_CODE (type);
|
||
unsignedp = *punsignedp;
|
||
|
||
switch (code)
|
||
{
|
||
case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
|
||
case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
|
||
/* Values of these types always have scalar mode. */
|
||
smode = as_a <scalar_mode> (mode);
|
||
PROMOTE_MODE (smode, unsignedp, type);
|
||
*punsignedp = unsignedp;
|
||
return smode;
|
||
|
||
#ifdef POINTERS_EXTEND_UNSIGNED
|
||
case REFERENCE_TYPE:
|
||
case POINTER_TYPE:
|
||
*punsignedp = POINTERS_EXTEND_UNSIGNED;
|
||
return targetm.addr_space.address_mode
|
||
(TYPE_ADDR_SPACE (TREE_TYPE (type)));
|
||
#endif
|
||
|
||
default:
|
||
return mode;
|
||
}
|
||
#else
|
||
return mode;
|
||
#endif
|
||
}
|
||
|
||
|
||
/* Use one of promote_mode or promote_function_mode to find the promoted
|
||
mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
|
||
of DECL after promotion. */
|
||
|
||
machine_mode
|
||
promote_decl_mode (const_tree decl, int *punsignedp)
|
||
{
|
||
tree type = TREE_TYPE (decl);
|
||
int unsignedp = TYPE_UNSIGNED (type);
|
||
machine_mode mode = DECL_MODE (decl);
|
||
machine_mode pmode;
|
||
|
||
if (TREE_CODE (decl) == RESULT_DECL && !DECL_BY_REFERENCE (decl))
|
||
pmode = promote_function_mode (type, mode, &unsignedp,
|
||
TREE_TYPE (current_function_decl), 1);
|
||
else if (TREE_CODE (decl) == RESULT_DECL || TREE_CODE (decl) == PARM_DECL)
|
||
pmode = promote_function_mode (type, mode, &unsignedp,
|
||
TREE_TYPE (current_function_decl), 2);
|
||
else
|
||
pmode = promote_mode (type, mode, &unsignedp);
|
||
|
||
if (punsignedp)
|
||
*punsignedp = unsignedp;
|
||
return pmode;
|
||
}
|
||
|
||
/* Return the promoted mode for name. If it is a named SSA_NAME, it
|
||
is the same as promote_decl_mode. Otherwise, it is the promoted
|
||
mode of a temp decl of same type as the SSA_NAME, if we had created
|
||
one. */
|
||
|
||
machine_mode
|
||
promote_ssa_mode (const_tree name, int *punsignedp)
|
||
{
|
||
gcc_assert (TREE_CODE (name) == SSA_NAME);
|
||
|
||
/* Partitions holding parms and results must be promoted as expected
|
||
by function.cc. */
|
||
if (SSA_NAME_VAR (name)
|
||
&& (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
|
||
|| TREE_CODE (SSA_NAME_VAR (name)) == RESULT_DECL))
|
||
{
|
||
machine_mode mode = promote_decl_mode (SSA_NAME_VAR (name), punsignedp);
|
||
if (mode != BLKmode)
|
||
return mode;
|
||
}
|
||
|
||
tree type = TREE_TYPE (name);
|
||
int unsignedp = TYPE_UNSIGNED (type);
|
||
machine_mode pmode = promote_mode (type, TYPE_MODE (type), &unsignedp);
|
||
if (punsignedp)
|
||
*punsignedp = unsignedp;
|
||
|
||
return pmode;
|
||
}
|
||
|
||
|
||
|
||
/* Controls the behavior of {anti_,}adjust_stack. */
|
||
static bool suppress_reg_args_size;
|
||
|
||
/* A helper for adjust_stack and anti_adjust_stack. */
|
||
|
||
static void
|
||
adjust_stack_1 (rtx adjust, bool anti_p)
|
||
{
|
||
rtx temp;
|
||
rtx_insn *insn;
|
||
|
||
/* Hereafter anti_p means subtract_p. */
|
||
if (!STACK_GROWS_DOWNWARD)
|
||
anti_p = !anti_p;
|
||
|
||
temp = expand_binop (Pmode,
|
||
anti_p ? sub_optab : add_optab,
|
||
stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
|
||
OPTAB_LIB_WIDEN);
|
||
|
||
if (temp != stack_pointer_rtx)
|
||
insn = emit_move_insn (stack_pointer_rtx, temp);
|
||
else
|
||
{
|
||
insn = get_last_insn ();
|
||
temp = single_set (insn);
|
||
gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
|
||
}
|
||
|
||
if (!suppress_reg_args_size)
|
||
add_args_size_note (insn, stack_pointer_delta);
|
||
}
|
||
|
||
/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
|
||
This pops when ADJUST is positive. ADJUST need not be constant. */
|
||
|
||
void
|
||
adjust_stack (rtx adjust)
|
||
{
|
||
if (adjust == const0_rtx)
|
||
return;
|
||
|
||
/* We expect all variable sized adjustments to be multiple of
|
||
PREFERRED_STACK_BOUNDARY. */
|
||
poly_int64 const_adjust;
|
||
if (poly_int_rtx_p (adjust, &const_adjust))
|
||
stack_pointer_delta -= const_adjust;
|
||
|
||
adjust_stack_1 (adjust, false);
|
||
}
|
||
|
||
/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
|
||
This pushes when ADJUST is positive. ADJUST need not be constant. */
|
||
|
||
void
|
||
anti_adjust_stack (rtx adjust)
|
||
{
|
||
if (adjust == const0_rtx)
|
||
return;
|
||
|
||
/* We expect all variable sized adjustments to be multiple of
|
||
PREFERRED_STACK_BOUNDARY. */
|
||
poly_int64 const_adjust;
|
||
if (poly_int_rtx_p (adjust, &const_adjust))
|
||
stack_pointer_delta += const_adjust;
|
||
|
||
adjust_stack_1 (adjust, true);
|
||
}
|
||
|
||
/* Round the size of a block to be pushed up to the boundary required
|
||
by this machine. SIZE is the desired size, which need not be constant. */
|
||
|
||
static rtx
|
||
round_push (rtx size)
|
||
{
|
||
rtx align_rtx, alignm1_rtx;
|
||
|
||
if (!SUPPORTS_STACK_ALIGNMENT
|
||
|| crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
|
||
{
|
||
int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
|
||
|
||
if (align == 1)
|
||
return size;
|
||
|
||
if (CONST_INT_P (size))
|
||
{
|
||
HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
|
||
|
||
if (INTVAL (size) != new_size)
|
||
size = GEN_INT (new_size);
|
||
return size;
|
||
}
|
||
|
||
align_rtx = GEN_INT (align);
|
||
alignm1_rtx = GEN_INT (align - 1);
|
||
}
|
||
else
|
||
{
|
||
/* If crtl->preferred_stack_boundary might still grow, use
|
||
virtual_preferred_stack_boundary_rtx instead. This will be
|
||
substituted by the right value in vregs pass and optimized
|
||
during combine. */
|
||
align_rtx = virtual_preferred_stack_boundary_rtx;
|
||
alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
|
||
NULL_RTX);
|
||
}
|
||
|
||
/* CEIL_DIV_EXPR needs to worry about the addition overflowing,
|
||
but we know it can't. So add ourselves and then do
|
||
TRUNC_DIV_EXPR. */
|
||
size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
|
||
NULL_RTX, 1, OPTAB_LIB_WIDEN);
|
||
size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
|
||
NULL_RTX, 1);
|
||
size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
|
||
|
||
return size;
|
||
}
|
||
|
||
/* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
|
||
to a previously-created save area. If no save area has been allocated,
|
||
this function will allocate one. If a save area is specified, it
|
||
must be of the proper mode. */
|
||
|
||
void
|
||
emit_stack_save (enum save_level save_level, rtx *psave)
|
||
{
|
||
rtx sa = *psave;
|
||
/* The default is that we use a move insn and save in a Pmode object. */
|
||
rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
|
||
machine_mode mode = STACK_SAVEAREA_MODE (save_level);
|
||
|
||
/* See if this machine has anything special to do for this kind of save. */
|
||
switch (save_level)
|
||
{
|
||
case SAVE_BLOCK:
|
||
if (targetm.have_save_stack_block ())
|
||
fcn = targetm.gen_save_stack_block;
|
||
break;
|
||
case SAVE_FUNCTION:
|
||
if (targetm.have_save_stack_function ())
|
||
fcn = targetm.gen_save_stack_function;
|
||
break;
|
||
case SAVE_NONLOCAL:
|
||
if (targetm.have_save_stack_nonlocal ())
|
||
fcn = targetm.gen_save_stack_nonlocal;
|
||
break;
|
||
default:
|
||
break;
|
||
}
|
||
|
||
/* If there is no save area and we have to allocate one, do so. Otherwise
|
||
verify the save area is the proper mode. */
|
||
|
||
if (sa == 0)
|
||
{
|
||
if (mode != VOIDmode)
|
||
{
|
||
if (save_level == SAVE_NONLOCAL)
|
||
*psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
|
||
else
|
||
*psave = sa = gen_reg_rtx (mode);
|
||
}
|
||
}
|
||
|
||
do_pending_stack_adjust ();
|
||
if (sa != 0)
|
||
sa = validize_mem (sa);
|
||
emit_insn (fcn (sa, stack_pointer_rtx));
|
||
}
|
||
|
||
/* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
|
||
area made by emit_stack_save. If it is zero, we have nothing to do. */
|
||
|
||
void
|
||
emit_stack_restore (enum save_level save_level, rtx sa)
|
||
{
|
||
/* The default is that we use a move insn. */
|
||
rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
|
||
|
||
/* If stack_realign_drap, the x86 backend emits a prologue that aligns both
|
||
STACK_POINTER and HARD_FRAME_POINTER.
|
||
If stack_realign_fp, the x86 backend emits a prologue that aligns only
|
||
STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
|
||
aligned variables, which is reflected in ix86_can_eliminate.
|
||
We normally still have the realigned STACK_POINTER that we can use.
|
||
But if there is a stack restore still present at reload, it can trigger
|
||
mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
|
||
FRAME_POINTER into a hard reg.
|
||
To prevent this situation, we force need_drap if we emit a stack
|
||
restore. */
|
||
if (SUPPORTS_STACK_ALIGNMENT)
|
||
crtl->need_drap = true;
|
||
|
||
/* See if this machine has anything special to do for this kind of save. */
|
||
switch (save_level)
|
||
{
|
||
case SAVE_BLOCK:
|
||
if (targetm.have_restore_stack_block ())
|
||
fcn = targetm.gen_restore_stack_block;
|
||
break;
|
||
case SAVE_FUNCTION:
|
||
if (targetm.have_restore_stack_function ())
|
||
fcn = targetm.gen_restore_stack_function;
|
||
break;
|
||
case SAVE_NONLOCAL:
|
||
if (targetm.have_restore_stack_nonlocal ())
|
||
fcn = targetm.gen_restore_stack_nonlocal;
|
||
break;
|
||
default:
|
||
break;
|
||
}
|
||
|
||
if (sa != 0)
|
||
{
|
||
sa = validize_mem (sa);
|
||
/* These clobbers prevent the scheduler from moving
|
||
references to variable arrays below the code
|
||
that deletes (pops) the arrays. */
|
||
emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
|
||
emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
|
||
}
|
||
|
||
discard_pending_stack_adjust ();
|
||
|
||
emit_insn (fcn (stack_pointer_rtx, sa));
|
||
}
|
||
|
||
/* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
|
||
function. This should be called whenever we allocate or deallocate
|
||
dynamic stack space. */
|
||
|
||
void
|
||
update_nonlocal_goto_save_area (void)
|
||
{
|
||
tree t_save;
|
||
rtx r_save;
|
||
|
||
/* The nonlocal_goto_save_area object is an array of N pointers. The
|
||
first one is used for the frame pointer save; the rest are sized by
|
||
STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
|
||
of the stack save area slots. */
|
||
t_save = build4 (ARRAY_REF,
|
||
TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
|
||
cfun->nonlocal_goto_save_area,
|
||
integer_one_node, NULL_TREE, NULL_TREE);
|
||
r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
|
||
|
||
emit_stack_save (SAVE_NONLOCAL, &r_save);
|
||
}
|
||
|
||
/* Record a new stack level for the current function. This should be called
|
||
whenever we allocate or deallocate dynamic stack space. */
|
||
|
||
void
|
||
record_new_stack_level (void)
|
||
{
|
||
/* Record the new stack level for nonlocal gotos. */
|
||
if (cfun->nonlocal_goto_save_area)
|
||
update_nonlocal_goto_save_area ();
|
||
|
||
/* Record the new stack level for SJLJ exceptions. */
|
||
if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
|
||
update_sjlj_context ();
|
||
}
|
||
|
||
/* Return an rtx doing runtime alignment to REQUIRED_ALIGN on TARGET. */
|
||
|
||
rtx
|
||
align_dynamic_address (rtx target, unsigned required_align)
|
||
{
|
||
/* CEIL_DIV_EXPR needs to worry about the addition overflowing,
|
||
but we know it can't. So add ourselves and then do
|
||
TRUNC_DIV_EXPR. */
|
||
target = expand_binop (Pmode, add_optab, target,
|
||
gen_int_mode (required_align / BITS_PER_UNIT - 1,
|
||
Pmode),
|
||
NULL_RTX, 1, OPTAB_LIB_WIDEN);
|
||
target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
|
||
gen_int_mode (required_align / BITS_PER_UNIT,
|
||
Pmode),
|
||
NULL_RTX, 1);
|
||
target = expand_mult (Pmode, target,
|
||
gen_int_mode (required_align / BITS_PER_UNIT,
|
||
Pmode),
|
||
NULL_RTX, 1);
|
||
|
||
return target;
|
||
}
|
||
|
||
/* Return an rtx through *PSIZE, representing the size of an area of memory to
|
||
be dynamically pushed on the stack.
|
||
|
||
*PSIZE is an rtx representing the size of the area.
|
||
|
||
SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
|
||
parameter may be zero. If so, a proper value will be extracted
|
||
from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
|
||
|
||
REQUIRED_ALIGN is the alignment (in bits) required for the region
|
||
of memory.
|
||
|
||
If PSTACK_USAGE_SIZE is not NULL it points to a value that is increased for
|
||
the additional size returned. */
|
||
void
|
||
get_dynamic_stack_size (rtx *psize, unsigned size_align,
|
||
unsigned required_align,
|
||
HOST_WIDE_INT *pstack_usage_size)
|
||
{
|
||
rtx size = *psize;
|
||
|
||
/* Ensure the size is in the proper mode. */
|
||
if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
|
||
size = convert_to_mode (Pmode, size, 1);
|
||
|
||
if (CONST_INT_P (size))
|
||
{
|
||
unsigned HOST_WIDE_INT lsb;
|
||
|
||
lsb = INTVAL (size);
|
||
lsb &= -lsb;
|
||
|
||
/* Watch out for overflow truncating to "unsigned". */
|
||
if (lsb > UINT_MAX / BITS_PER_UNIT)
|
||
size_align = 1u << (HOST_BITS_PER_INT - 1);
|
||
else
|
||
size_align = (unsigned)lsb * BITS_PER_UNIT;
|
||
}
|
||
else if (size_align < BITS_PER_UNIT)
|
||
size_align = BITS_PER_UNIT;
|
||
|
||
/* We can't attempt to minimize alignment necessary, because we don't
|
||
know the final value of preferred_stack_boundary yet while executing
|
||
this code. */
|
||
if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
|
||
crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
|
||
|
||
/* We will need to ensure that the address we return is aligned to
|
||
REQUIRED_ALIGN. At this point in the compilation, we don't always
|
||
know the final value of the STACK_DYNAMIC_OFFSET used in function.cc
|
||
(it might depend on the size of the outgoing parameter lists, for
|
||
example), so we must preventively align the value. We leave space
|
||
in SIZE for the hole that might result from the alignment operation. */
|
||
|
||
unsigned known_align = REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM);
|
||
if (known_align == 0)
|
||
known_align = BITS_PER_UNIT;
|
||
if (required_align > known_align)
|
||
{
|
||
unsigned extra = (required_align - known_align) / BITS_PER_UNIT;
|
||
size = plus_constant (Pmode, size, extra);
|
||
size = force_operand (size, NULL_RTX);
|
||
if (size_align > known_align)
|
||
size_align = known_align;
|
||
|
||
if (flag_stack_usage_info && pstack_usage_size)
|
||
*pstack_usage_size += extra;
|
||
}
|
||
|
||
/* Round the size to a multiple of the required stack alignment.
|
||
Since the stack is presumed to be rounded before this allocation,
|
||
this will maintain the required alignment.
|
||
|
||
If the stack grows downward, we could save an insn by subtracting
|
||
SIZE from the stack pointer and then aligning the stack pointer.
|
||
The problem with this is that the stack pointer may be unaligned
|
||
between the execution of the subtraction and alignment insns and
|
||
some machines do not allow this. Even on those that do, some
|
||
signal handlers malfunction if a signal should occur between those
|
||
insns. Since this is an extremely rare event, we have no reliable
|
||
way of knowing which systems have this problem. So we avoid even
|
||
momentarily mis-aligning the stack. */
|
||
if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
|
||
{
|
||
size = round_push (size);
|
||
|
||
if (flag_stack_usage_info && pstack_usage_size)
|
||
{
|
||
int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
|
||
*pstack_usage_size =
|
||
(*pstack_usage_size + align - 1) / align * align;
|
||
}
|
||
}
|
||
|
||
*psize = size;
|
||
}
|
||
|
||
/* Return the number of bytes to "protect" on the stack for -fstack-check.
|
||
|
||
"protect" in the context of -fstack-check means how many bytes we need
|
||
to always ensure are available on the stack; as a consequence, this is
|
||
also how many bytes are first skipped when probing the stack.
|
||
|
||
On some targets we want to reuse the -fstack-check prologue support
|
||
to give a degree of protection against stack clashing style attacks.
|
||
|
||
In that scenario we do not want to skip bytes before probing as that
|
||
would render the stack clash protections useless.
|
||
|
||
So we never use STACK_CHECK_PROTECT directly. Instead we indirectly
|
||
use it through this helper, which allows to provide different values
|
||
for -fstack-check and -fstack-clash-protection. */
|
||
|
||
HOST_WIDE_INT
|
||
get_stack_check_protect (void)
|
||
{
|
||
if (flag_stack_clash_protection)
|
||
return 0;
|
||
|
||
return STACK_CHECK_PROTECT;
|
||
}
|
||
|
||
/* Return an rtx representing the address of an area of memory dynamically
|
||
pushed on the stack.
|
||
|
||
Any required stack pointer alignment is preserved.
|
||
|
||
SIZE is an rtx representing the size of the area.
|
||
|
||
SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
|
||
parameter may be zero. If so, a proper value will be extracted
|
||
from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
|
||
|
||
REQUIRED_ALIGN is the alignment (in bits) required for the region
|
||
of memory.
|
||
|
||
MAX_SIZE is an upper bound for SIZE, if SIZE is not constant, or -1 if
|
||
no such upper bound is known.
|
||
|
||
If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
|
||
stack space allocated by the generated code cannot be added with itself
|
||
in the course of the execution of the function. It is always safe to
|
||
pass FALSE here and the following criterion is sufficient in order to
|
||
pass TRUE: every path in the CFG that starts at the allocation point and
|
||
loops to it executes the associated deallocation code. */
|
||
|
||
rtx
|
||
allocate_dynamic_stack_space (rtx size, unsigned size_align,
|
||
unsigned required_align,
|
||
HOST_WIDE_INT max_size,
|
||
bool cannot_accumulate)
|
||
{
|
||
HOST_WIDE_INT stack_usage_size = -1;
|
||
rtx_code_label *final_label;
|
||
rtx final_target, target;
|
||
|
||
/* If we're asking for zero bytes, it doesn't matter what we point
|
||
to since we can't dereference it. But return a reasonable
|
||
address anyway. */
|
||
if (size == const0_rtx)
|
||
return virtual_stack_dynamic_rtx;
|
||
|
||
/* Otherwise, show we're calling alloca or equivalent. */
|
||
cfun->calls_alloca = 1;
|
||
|
||
/* If stack usage info is requested, look into the size we are passed.
|
||
We need to do so this early to avoid the obfuscation that may be
|
||
introduced later by the various alignment operations. */
|
||
if (flag_stack_usage_info)
|
||
{
|
||
if (CONST_INT_P (size))
|
||
stack_usage_size = INTVAL (size);
|
||
else if (REG_P (size))
|
||
{
|
||
/* Look into the last emitted insn and see if we can deduce
|
||
something for the register. */
|
||
rtx_insn *insn;
|
||
rtx set, note;
|
||
insn = get_last_insn ();
|
||
if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
|
||
{
|
||
if (CONST_INT_P (SET_SRC (set)))
|
||
stack_usage_size = INTVAL (SET_SRC (set));
|
||
else if ((note = find_reg_equal_equiv_note (insn))
|
||
&& CONST_INT_P (XEXP (note, 0)))
|
||
stack_usage_size = INTVAL (XEXP (note, 0));
|
||
}
|
||
}
|
||
|
||
/* If the size is not constant, try the maximum size. */
|
||
if (stack_usage_size < 0)
|
||
stack_usage_size = max_size;
|
||
|
||
/* If the size is still not constant, we can't say anything. */
|
||
if (stack_usage_size < 0)
|
||
{
|
||
current_function_has_unbounded_dynamic_stack_size = 1;
|
||
stack_usage_size = 0;
|
||
}
|
||
}
|
||
|
||
get_dynamic_stack_size (&size, size_align, required_align, &stack_usage_size);
|
||
|
||
target = gen_reg_rtx (Pmode);
|
||
|
||
/* The size is supposed to be fully adjusted at this point so record it
|
||
if stack usage info is requested. */
|
||
if (flag_stack_usage_info)
|
||
{
|
||
current_function_dynamic_stack_size += stack_usage_size;
|
||
|
||
/* ??? This is gross but the only safe stance in the absence
|
||
of stack usage oriented flow analysis. */
|
||
if (!cannot_accumulate)
|
||
current_function_has_unbounded_dynamic_stack_size = 1;
|
||
}
|
||
|
||
do_pending_stack_adjust ();
|
||
|
||
final_label = NULL;
|
||
final_target = NULL_RTX;
|
||
|
||
/* If we are splitting the stack, we need to ask the backend whether
|
||
there is enough room on the current stack. If there isn't, or if
|
||
the backend doesn't know how to tell is, then we need to call a
|
||
function to allocate memory in some other way. This memory will
|
||
be released when we release the current stack segment. The
|
||
effect is that stack allocation becomes less efficient, but at
|
||
least it doesn't cause a stack overflow. */
|
||
if (flag_split_stack)
|
||
{
|
||
rtx_code_label *available_label;
|
||
rtx ask, space, func;
|
||
|
||
available_label = NULL;
|
||
|
||
if (targetm.have_split_stack_space_check ())
|
||
{
|
||
available_label = gen_label_rtx ();
|
||
|
||
/* This instruction will branch to AVAILABLE_LABEL if there
|
||
are SIZE bytes available on the stack. */
|
||
emit_insn (targetm.gen_split_stack_space_check
|
||
(size, available_label));
|
||
}
|
||
|
||
/* The __morestack_allocate_stack_space function will allocate
|
||
memory using malloc. If the alignment of the memory returned
|
||
by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
|
||
make sure we allocate enough space. */
|
||
if (MALLOC_ABI_ALIGNMENT >= required_align)
|
||
ask = size;
|
||
else
|
||
ask = expand_binop (Pmode, add_optab, size,
|
||
gen_int_mode (required_align / BITS_PER_UNIT - 1,
|
||
Pmode),
|
||
NULL_RTX, 1, OPTAB_LIB_WIDEN);
|
||
|
||
func = init_one_libfunc ("__morestack_allocate_stack_space");
|
||
|
||
space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
|
||
ask, Pmode);
|
||
|
||
if (available_label == NULL_RTX)
|
||
return space;
|
||
|
||
final_target = gen_reg_rtx (Pmode);
|
||
|
||
emit_move_insn (final_target, space);
|
||
|
||
final_label = gen_label_rtx ();
|
||
emit_jump (final_label);
|
||
|
||
emit_label (available_label);
|
||
}
|
||
|
||
/* We ought to be called always on the toplevel and stack ought to be aligned
|
||
properly. */
|
||
gcc_assert (multiple_p (stack_pointer_delta,
|
||
PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
|
||
|
||
/* If needed, check that we have the required amount of stack. Take into
|
||
account what has already been checked. */
|
||
if (STACK_CHECK_MOVING_SP)
|
||
;
|
||
else if (flag_stack_check == GENERIC_STACK_CHECK)
|
||
probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
|
||
size);
|
||
else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
|
||
probe_stack_range (get_stack_check_protect (), size);
|
||
|
||
/* Don't let anti_adjust_stack emit notes. */
|
||
suppress_reg_args_size = true;
|
||
|
||
/* Perform the required allocation from the stack. Some systems do
|
||
this differently than simply incrementing/decrementing from the
|
||
stack pointer, such as acquiring the space by calling malloc(). */
|
||
if (targetm.have_allocate_stack ())
|
||
{
|
||
class expand_operand ops[2];
|
||
/* We don't have to check against the predicate for operand 0 since
|
||
TARGET is known to be a pseudo of the proper mode, which must
|
||
be valid for the operand. */
|
||
create_fixed_operand (&ops[0], target);
|
||
create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
|
||
expand_insn (targetm.code_for_allocate_stack, 2, ops);
|
||
}
|
||
else
|
||
{
|
||
poly_int64 saved_stack_pointer_delta;
|
||
|
||
if (!STACK_GROWS_DOWNWARD)
|
||
emit_move_insn (target, virtual_stack_dynamic_rtx);
|
||
|
||
/* Check stack bounds if necessary. */
|
||
if (crtl->limit_stack)
|
||
{
|
||
rtx available;
|
||
rtx_code_label *space_available = gen_label_rtx ();
|
||
if (STACK_GROWS_DOWNWARD)
|
||
available = expand_binop (Pmode, sub_optab,
|
||
stack_pointer_rtx, stack_limit_rtx,
|
||
NULL_RTX, 1, OPTAB_WIDEN);
|
||
else
|
||
available = expand_binop (Pmode, sub_optab,
|
||
stack_limit_rtx, stack_pointer_rtx,
|
||
NULL_RTX, 1, OPTAB_WIDEN);
|
||
|
||
emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
|
||
space_available);
|
||
if (targetm.have_trap ())
|
||
emit_insn (targetm.gen_trap ());
|
||
else
|
||
error ("stack limits not supported on this target");
|
||
emit_barrier ();
|
||
emit_label (space_available);
|
||
}
|
||
|
||
saved_stack_pointer_delta = stack_pointer_delta;
|
||
|
||
/* If stack checking or stack clash protection is requested,
|
||
then probe the stack while allocating space from it. */
|
||
if (flag_stack_check && STACK_CHECK_MOVING_SP)
|
||
anti_adjust_stack_and_probe (size, false);
|
||
else if (flag_stack_clash_protection)
|
||
anti_adjust_stack_and_probe_stack_clash (size);
|
||
else
|
||
anti_adjust_stack (size);
|
||
|
||
/* Even if size is constant, don't modify stack_pointer_delta.
|
||
The constant size alloca should preserve
|
||
crtl->preferred_stack_boundary alignment. */
|
||
stack_pointer_delta = saved_stack_pointer_delta;
|
||
|
||
if (STACK_GROWS_DOWNWARD)
|
||
emit_move_insn (target, virtual_stack_dynamic_rtx);
|
||
}
|
||
|
||
suppress_reg_args_size = false;
|
||
|
||
/* Finish up the split stack handling. */
|
||
if (final_label != NULL_RTX)
|
||
{
|
||
gcc_assert (flag_split_stack);
|
||
emit_move_insn (final_target, target);
|
||
emit_label (final_label);
|
||
target = final_target;
|
||
}
|
||
|
||
target = align_dynamic_address (target, required_align);
|
||
|
||
/* Now that we've committed to a return value, mark its alignment. */
|
||
mark_reg_pointer (target, required_align);
|
||
|
||
/* Record the new stack level. */
|
||
record_new_stack_level ();
|
||
|
||
return target;
|
||
}
|
||
|
||
/* Return an rtx representing the address of an area of memory already
|
||
statically pushed onto the stack in the virtual stack vars area. (It is
|
||
assumed that the area is allocated in the function prologue.)
|
||
|
||
Any required stack pointer alignment is preserved.
|
||
|
||
OFFSET is the offset of the area into the virtual stack vars area.
|
||
|
||
REQUIRED_ALIGN is the alignment (in bits) required for the region
|
||
of memory.
|
||
|
||
BASE is the rtx of the base of this virtual stack vars area.
|
||
The only time this is not `virtual_stack_vars_rtx` is when tagging pointers
|
||
on the stack. */
|
||
|
||
rtx
|
||
get_dynamic_stack_base (poly_int64 offset, unsigned required_align, rtx base)
|
||
{
|
||
rtx target;
|
||
|
||
if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
|
||
crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
|
||
|
||
target = gen_reg_rtx (Pmode);
|
||
emit_move_insn (target, base);
|
||
target = expand_binop (Pmode, add_optab, target,
|
||
gen_int_mode (offset, Pmode),
|
||
NULL_RTX, 1, OPTAB_LIB_WIDEN);
|
||
target = align_dynamic_address (target, required_align);
|
||
|
||
/* Now that we've committed to a return value, mark its alignment. */
|
||
mark_reg_pointer (target, required_align);
|
||
|
||
return target;
|
||
}
|
||
|
||
/* A front end may want to override GCC's stack checking by providing a
|
||
run-time routine to call to check the stack, so provide a mechanism for
|
||
calling that routine. */
|
||
|
||
static GTY(()) rtx stack_check_libfunc;
|
||
|
||
void
|
||
set_stack_check_libfunc (const char *libfunc_name)
|
||
{
|
||
gcc_assert (stack_check_libfunc == NULL_RTX);
|
||
stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
|
||
tree ptype
|
||
= Pmode == ptr_mode
|
||
? ptr_type_node
|
||
: lang_hooks.types.type_for_mode (Pmode, 1);
|
||
tree ftype
|
||
= build_function_type_list (void_type_node, ptype, NULL_TREE);
|
||
tree decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
|
||
get_identifier (libfunc_name), ftype);
|
||
DECL_EXTERNAL (decl) = 1;
|
||
SET_SYMBOL_REF_DECL (stack_check_libfunc, decl);
|
||
}
|
||
|
||
/* Emit one stack probe at ADDRESS, an address within the stack. */
|
||
|
||
void
|
||
emit_stack_probe (rtx address)
|
||
{
|
||
if (targetm.have_probe_stack_address ())
|
||
{
|
||
class expand_operand ops[1];
|
||
insn_code icode = targetm.code_for_probe_stack_address;
|
||
create_address_operand (ops, address);
|
||
maybe_legitimize_operands (icode, 0, 1, ops);
|
||
expand_insn (icode, 1, ops);
|
||
}
|
||
else
|
||
{
|
||
rtx memref = gen_rtx_MEM (word_mode, address);
|
||
|
||
MEM_VOLATILE_P (memref) = 1;
|
||
memref = validize_mem (memref);
|
||
|
||
/* See if we have an insn to probe the stack. */
|
||
if (targetm.have_probe_stack ())
|
||
emit_insn (targetm.gen_probe_stack (memref));
|
||
else
|
||
emit_move_insn (memref, const0_rtx);
|
||
}
|
||
}
|
||
|
||
/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
|
||
FIRST is a constant and size is a Pmode RTX. These are offsets from
|
||
the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
|
||
or subtract them from the stack pointer. */
|
||
|
||
#define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
|
||
|
||
#if STACK_GROWS_DOWNWARD
|
||
#define STACK_GROW_OP MINUS
|
||
#define STACK_GROW_OPTAB sub_optab
|
||
#define STACK_GROW_OFF(off) -(off)
|
||
#else
|
||
#define STACK_GROW_OP PLUS
|
||
#define STACK_GROW_OPTAB add_optab
|
||
#define STACK_GROW_OFF(off) (off)
|
||
#endif
|
||
|
||
void
|
||
probe_stack_range (HOST_WIDE_INT first, rtx size)
|
||
{
|
||
/* First ensure SIZE is Pmode. */
|
||
if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
|
||
size = convert_to_mode (Pmode, size, 1);
|
||
|
||
/* Next see if we have a function to check the stack. */
|
||
if (stack_check_libfunc)
|
||
{
|
||
rtx addr = memory_address (Pmode,
|
||
gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
|
||
stack_pointer_rtx,
|
||
plus_constant (Pmode,
|
||
size, first)));
|
||
emit_library_call (stack_check_libfunc, LCT_THROW, VOIDmode,
|
||
addr, Pmode);
|
||
}
|
||
|
||
/* Next see if we have an insn to check the stack. */
|
||
else if (targetm.have_check_stack ())
|
||
{
|
||
class expand_operand ops[1];
|
||
rtx addr = memory_address (Pmode,
|
||
gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
|
||
stack_pointer_rtx,
|
||
plus_constant (Pmode,
|
||
size, first)));
|
||
bool success;
|
||
create_input_operand (&ops[0], addr, Pmode);
|
||
success = maybe_expand_insn (targetm.code_for_check_stack, 1, ops);
|
||
gcc_assert (success);
|
||
}
|
||
|
||
/* Otherwise we have to generate explicit probes. If we have a constant
|
||
small number of them to generate, that's the easy case. */
|
||
else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
|
||
{
|
||
HOST_WIDE_INT isize = INTVAL (size), i;
|
||
rtx addr;
|
||
|
||
/* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
|
||
it exceeds SIZE. If only one probe is needed, this will not
|
||
generate any code. Then probe at FIRST + SIZE. */
|
||
for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
|
||
{
|
||
addr = memory_address (Pmode,
|
||
plus_constant (Pmode, stack_pointer_rtx,
|
||
STACK_GROW_OFF (first + i)));
|
||
emit_stack_probe (addr);
|
||
}
|
||
|
||
addr = memory_address (Pmode,
|
||
plus_constant (Pmode, stack_pointer_rtx,
|
||
STACK_GROW_OFF (first + isize)));
|
||
emit_stack_probe (addr);
|
||
}
|
||
|
||
/* In the variable case, do the same as above, but in a loop. Note that we
|
||
must be extra careful with variables wrapping around because we might be
|
||
at the very top (or the very bottom) of the address space and we have to
|
||
be able to handle this case properly; in particular, we use an equality
|
||
test for the loop condition. */
|
||
else
|
||
{
|
||
rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
|
||
rtx_code_label *loop_lab = gen_label_rtx ();
|
||
rtx_code_label *end_lab = gen_label_rtx ();
|
||
|
||
/* Step 1: round SIZE to the previous multiple of the interval. */
|
||
|
||
/* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
|
||
rounded_size
|
||
= simplify_gen_binary (AND, Pmode, size,
|
||
gen_int_mode (-PROBE_INTERVAL, Pmode));
|
||
rounded_size_op = force_operand (rounded_size, NULL_RTX);
|
||
|
||
|
||
/* Step 2: compute initial and final value of the loop counter. */
|
||
|
||
/* TEST_ADDR = SP + FIRST. */
|
||
test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
|
||
stack_pointer_rtx,
|
||
gen_int_mode (first, Pmode)),
|
||
NULL_RTX);
|
||
|
||
/* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
|
||
last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
|
||
test_addr,
|
||
rounded_size_op), NULL_RTX);
|
||
|
||
|
||
/* Step 3: the loop
|
||
|
||
while (TEST_ADDR != LAST_ADDR)
|
||
{
|
||
TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
|
||
probe at TEST_ADDR
|
||
}
|
||
|
||
probes at FIRST + N * PROBE_INTERVAL for values of N from 1
|
||
until it is equal to ROUNDED_SIZE. */
|
||
|
||
emit_label (loop_lab);
|
||
|
||
/* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
|
||
emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
|
||
end_lab);
|
||
|
||
/* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
|
||
temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
|
||
gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
|
||
1, OPTAB_WIDEN);
|
||
|
||
gcc_assert (temp == test_addr);
|
||
|
||
/* Probe at TEST_ADDR. */
|
||
emit_stack_probe (test_addr);
|
||
|
||
emit_jump (loop_lab);
|
||
|
||
emit_label (end_lab);
|
||
|
||
|
||
/* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
|
||
that SIZE is equal to ROUNDED_SIZE. */
|
||
|
||
/* TEMP = SIZE - ROUNDED_SIZE. */
|
||
temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
|
||
if (temp != const0_rtx)
|
||
{
|
||
rtx addr;
|
||
|
||
if (CONST_INT_P (temp))
|
||
{
|
||
/* Use [base + disp} addressing mode if supported. */
|
||
HOST_WIDE_INT offset = INTVAL (temp);
|
||
addr = memory_address (Pmode,
|
||
plus_constant (Pmode, last_addr,
|
||
STACK_GROW_OFF (offset)));
|
||
}
|
||
else
|
||
{
|
||
/* Manual CSE if the difference is not known at compile-time. */
|
||
temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
|
||
addr = memory_address (Pmode,
|
||
gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
|
||
last_addr, temp));
|
||
}
|
||
|
||
emit_stack_probe (addr);
|
||
}
|
||
}
|
||
|
||
/* Make sure nothing is scheduled before we are done. */
|
||
emit_insn (gen_blockage ());
|
||
}
|
||
|
||
/* Compute parameters for stack clash probing a dynamic stack
|
||
allocation of SIZE bytes.
|
||
|
||
We compute ROUNDED_SIZE, LAST_ADDR, RESIDUAL and PROBE_INTERVAL.
|
||
|
||
Additionally we conditionally dump the type of probing that will
|
||
be needed given the values computed. */
|
||
|
||
void
|
||
compute_stack_clash_protection_loop_data (rtx *rounded_size, rtx *last_addr,
|
||
rtx *residual,
|
||
HOST_WIDE_INT *probe_interval,
|
||
rtx size)
|
||
{
|
||
/* Round SIZE down to STACK_CLASH_PROTECTION_PROBE_INTERVAL */
|
||
*probe_interval
|
||
= 1 << param_stack_clash_protection_probe_interval;
|
||
*rounded_size = simplify_gen_binary (AND, Pmode, size,
|
||
GEN_INT (-*probe_interval));
|
||
|
||
/* Compute the value of the stack pointer for the last iteration.
|
||
It's just SP + ROUNDED_SIZE. */
|
||
rtx rounded_size_op = force_operand (*rounded_size, NULL_RTX);
|
||
*last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
|
||
stack_pointer_rtx,
|
||
rounded_size_op),
|
||
NULL_RTX);
|
||
|
||
/* Compute any residuals not allocated by the loop above. Residuals
|
||
are just the ROUNDED_SIZE - SIZE. */
|
||
*residual = simplify_gen_binary (MINUS, Pmode, size, *rounded_size);
|
||
|
||
/* Dump key information to make writing tests easy. */
|
||
if (dump_file)
|
||
{
|
||
if (*rounded_size == CONST0_RTX (Pmode))
|
||
fprintf (dump_file,
|
||
"Stack clash skipped dynamic allocation and probing loop.\n");
|
||
else if (CONST_INT_P (*rounded_size)
|
||
&& INTVAL (*rounded_size) <= 4 * *probe_interval)
|
||
fprintf (dump_file,
|
||
"Stack clash dynamic allocation and probing inline.\n");
|
||
else if (CONST_INT_P (*rounded_size))
|
||
fprintf (dump_file,
|
||
"Stack clash dynamic allocation and probing in "
|
||
"rotated loop.\n");
|
||
else
|
||
fprintf (dump_file,
|
||
"Stack clash dynamic allocation and probing in loop.\n");
|
||
|
||
if (*residual != CONST0_RTX (Pmode))
|
||
fprintf (dump_file,
|
||
"Stack clash dynamic allocation and probing residuals.\n");
|
||
else
|
||
fprintf (dump_file,
|
||
"Stack clash skipped dynamic allocation and "
|
||
"probing residuals.\n");
|
||
}
|
||
}
|
||
|
||
/* Emit the start of an allocate/probe loop for stack
|
||
clash protection.
|
||
|
||
LOOP_LAB and END_LAB are returned for use when we emit the
|
||
end of the loop.
|
||
|
||
LAST addr is the value for SP which stops the loop. */
|
||
void
|
||
emit_stack_clash_protection_probe_loop_start (rtx *loop_lab,
|
||
rtx *end_lab,
|
||
rtx last_addr,
|
||
bool rotated)
|
||
{
|
||
/* Essentially we want to emit any setup code, the top of loop
|
||
label and the comparison at the top of the loop. */
|
||
*loop_lab = gen_label_rtx ();
|
||
*end_lab = gen_label_rtx ();
|
||
|
||
emit_label (*loop_lab);
|
||
if (!rotated)
|
||
emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
|
||
Pmode, 1, *end_lab);
|
||
}
|
||
|
||
/* Emit the end of a stack clash probing loop.
|
||
|
||
This consists of just the jump back to LOOP_LAB and
|
||
emitting END_LOOP after the loop. */
|
||
|
||
void
|
||
emit_stack_clash_protection_probe_loop_end (rtx loop_lab, rtx end_loop,
|
||
rtx last_addr, bool rotated)
|
||
{
|
||
if (rotated)
|
||
emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, NE, NULL_RTX,
|
||
Pmode, 1, loop_lab);
|
||
else
|
||
emit_jump (loop_lab);
|
||
|
||
emit_label (end_loop);
|
||
|
||
}
|
||
|
||
/* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
|
||
while probing it. This pushes when SIZE is positive. SIZE need not
|
||
be constant.
|
||
|
||
This is subtly different than anti_adjust_stack_and_probe to try and
|
||
prevent stack-clash attacks
|
||
|
||
1. It must assume no knowledge of the probing state, any allocation
|
||
must probe.
|
||
|
||
Consider the case of a 1 byte alloca in a loop. If the sum of the
|
||
allocations is large, then this could be used to jump the guard if
|
||
probes were not emitted.
|
||
|
||
2. It never skips probes, whereas anti_adjust_stack_and_probe will
|
||
skip the probe on the first PROBE_INTERVAL on the assumption it
|
||
was already done in the prologue and in previous allocations.
|
||
|
||
3. It only allocates and probes SIZE bytes, it does not need to
|
||
allocate/probe beyond that because this probing style does not
|
||
guarantee signal handling capability if the guard is hit. */
|
||
|
||
void
|
||
anti_adjust_stack_and_probe_stack_clash (rtx size)
|
||
{
|
||
/* First ensure SIZE is Pmode. */
|
||
if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
|
||
size = convert_to_mode (Pmode, size, 1);
|
||
|
||
/* We can get here with a constant size on some targets. */
|
||
rtx rounded_size, last_addr, residual;
|
||
HOST_WIDE_INT probe_interval, probe_range;
|
||
bool target_probe_range_p = false;
|
||
compute_stack_clash_protection_loop_data (&rounded_size, &last_addr,
|
||
&residual, &probe_interval, size);
|
||
|
||
/* Get the back-end specific probe ranges. */
|
||
probe_range = targetm.stack_clash_protection_alloca_probe_range ();
|
||
target_probe_range_p = probe_range != 0;
|
||
gcc_assert (probe_range >= 0);
|
||
|
||
/* If no back-end specific range defined, default to the top of the newly
|
||
allocated range. */
|
||
if (probe_range == 0)
|
||
probe_range = probe_interval - GET_MODE_SIZE (word_mode);
|
||
|
||
if (rounded_size != CONST0_RTX (Pmode))
|
||
{
|
||
if (CONST_INT_P (rounded_size)
|
||
&& INTVAL (rounded_size) <= 4 * probe_interval)
|
||
{
|
||
for (HOST_WIDE_INT i = 0;
|
||
i < INTVAL (rounded_size);
|
||
i += probe_interval)
|
||
{
|
||
anti_adjust_stack (GEN_INT (probe_interval));
|
||
/* The prologue does not probe residuals. Thus the offset
|
||
here to probe just beyond what the prologue had already
|
||
allocated. */
|
||
emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
|
||
probe_range));
|
||
|
||
emit_insn (gen_blockage ());
|
||
}
|
||
}
|
||
else
|
||
{
|
||
rtx loop_lab, end_loop;
|
||
bool rotate_loop = CONST_INT_P (rounded_size);
|
||
emit_stack_clash_protection_probe_loop_start (&loop_lab, &end_loop,
|
||
last_addr, rotate_loop);
|
||
|
||
anti_adjust_stack (GEN_INT (probe_interval));
|
||
|
||
/* The prologue does not probe residuals. Thus the offset here
|
||
to probe just beyond what the prologue had already
|
||
allocated. */
|
||
emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
|
||
probe_range));
|
||
|
||
emit_stack_clash_protection_probe_loop_end (loop_lab, end_loop,
|
||
last_addr, rotate_loop);
|
||
emit_insn (gen_blockage ());
|
||
}
|
||
}
|
||
|
||
if (residual != CONST0_RTX (Pmode))
|
||
{
|
||
rtx label = NULL_RTX;
|
||
/* RESIDUAL could be zero at runtime and in that case *sp could
|
||
hold live data. Furthermore, we do not want to probe into the
|
||
red zone.
|
||
|
||
If TARGET_PROBE_RANGE_P then the target has promised it's safe to
|
||
probe at offset 0. In which case we no longer have to check for
|
||
RESIDUAL == 0. However we still need to probe at the right offset
|
||
when RESIDUAL > PROBE_RANGE, in which case we probe at PROBE_RANGE.
|
||
|
||
If !TARGET_PROBE_RANGE_P then go ahead and just guard the probe at *sp
|
||
on RESIDUAL != 0 at runtime if RESIDUAL is not a compile time constant.
|
||
*/
|
||
anti_adjust_stack (residual);
|
||
|
||
if (!CONST_INT_P (residual))
|
||
{
|
||
label = gen_label_rtx ();
|
||
rtx_code op = target_probe_range_p ? LT : EQ;
|
||
rtx probe_cmp_value = target_probe_range_p
|
||
? gen_rtx_CONST_INT (GET_MODE (residual), probe_range)
|
||
: CONST0_RTX (GET_MODE (residual));
|
||
|
||
if (target_probe_range_p)
|
||
emit_stack_probe (stack_pointer_rtx);
|
||
|
||
emit_cmp_and_jump_insns (residual, probe_cmp_value,
|
||
op, NULL_RTX, Pmode, 1, label);
|
||
}
|
||
|
||
rtx x = NULL_RTX;
|
||
|
||
/* If RESIDUAL isn't a constant and TARGET_PROBE_RANGE_P then we probe up
|
||
by the ABI defined safe value. */
|
||
if (!CONST_INT_P (residual) && target_probe_range_p)
|
||
x = GEN_INT (probe_range);
|
||
/* If RESIDUAL is a constant but smaller than the ABI defined safe value,
|
||
we still want to probe up, but the safest amount if a word. */
|
||
else if (target_probe_range_p)
|
||
{
|
||
if (INTVAL (residual) <= probe_range)
|
||
x = GEN_INT (GET_MODE_SIZE (word_mode));
|
||
else
|
||
x = GEN_INT (probe_range);
|
||
}
|
||
else
|
||
/* If nothing else, probe at the top of the new allocation. */
|
||
x = plus_constant (Pmode, residual, -GET_MODE_SIZE (word_mode));
|
||
|
||
emit_stack_probe (gen_rtx_PLUS (Pmode, stack_pointer_rtx, x));
|
||
|
||
emit_insn (gen_blockage ());
|
||
if (!CONST_INT_P (residual))
|
||
emit_label (label);
|
||
}
|
||
}
|
||
|
||
|
||
/* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
|
||
while probing it. This pushes when SIZE is positive. SIZE need not
|
||
be constant. If ADJUST_BACK is true, adjust back the stack pointer
|
||
by plus SIZE at the end. */
|
||
|
||
void
|
||
anti_adjust_stack_and_probe (rtx size, bool adjust_back)
|
||
{
|
||
/* We skip the probe for the first interval + a small dope of 4 words and
|
||
probe that many bytes past the specified size to maintain a protection
|
||
area at the botton of the stack. */
|
||
const int dope = 4 * UNITS_PER_WORD;
|
||
|
||
/* First ensure SIZE is Pmode. */
|
||
if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
|
||
size = convert_to_mode (Pmode, size, 1);
|
||
|
||
/* If we have a constant small number of probes to generate, that's the
|
||
easy case. */
|
||
if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
|
||
{
|
||
HOST_WIDE_INT isize = INTVAL (size), i;
|
||
bool first_probe = true;
|
||
|
||
/* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
|
||
values of N from 1 until it exceeds SIZE. If only one probe is
|
||
needed, this will not generate any code. Then adjust and probe
|
||
to PROBE_INTERVAL + SIZE. */
|
||
for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
|
||
{
|
||
if (first_probe)
|
||
{
|
||
anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
|
||
first_probe = false;
|
||
}
|
||
else
|
||
anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
|
||
emit_stack_probe (stack_pointer_rtx);
|
||
}
|
||
|
||
if (first_probe)
|
||
anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
|
||
else
|
||
anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
|
||
emit_stack_probe (stack_pointer_rtx);
|
||
}
|
||
|
||
/* In the variable case, do the same as above, but in a loop. Note that we
|
||
must be extra careful with variables wrapping around because we might be
|
||
at the very top (or the very bottom) of the address space and we have to
|
||
be able to handle this case properly; in particular, we use an equality
|
||
test for the loop condition. */
|
||
else
|
||
{
|
||
rtx rounded_size, rounded_size_op, last_addr, temp;
|
||
rtx_code_label *loop_lab = gen_label_rtx ();
|
||
rtx_code_label *end_lab = gen_label_rtx ();
|
||
|
||
|
||
/* Step 1: round SIZE to the previous multiple of the interval. */
|
||
|
||
/* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
|
||
rounded_size
|
||
= simplify_gen_binary (AND, Pmode, size,
|
||
gen_int_mode (-PROBE_INTERVAL, Pmode));
|
||
rounded_size_op = force_operand (rounded_size, NULL_RTX);
|
||
|
||
|
||
/* Step 2: compute initial and final value of the loop counter. */
|
||
|
||
/* SP = SP_0 + PROBE_INTERVAL. */
|
||
anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
|
||
|
||
/* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
|
||
last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
|
||
stack_pointer_rtx,
|
||
rounded_size_op), NULL_RTX);
|
||
|
||
|
||
/* Step 3: the loop
|
||
|
||
while (SP != LAST_ADDR)
|
||
{
|
||
SP = SP + PROBE_INTERVAL
|
||
probe at SP
|
||
}
|
||
|
||
adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
|
||
values of N from 1 until it is equal to ROUNDED_SIZE. */
|
||
|
||
emit_label (loop_lab);
|
||
|
||
/* Jump to END_LAB if SP == LAST_ADDR. */
|
||
emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
|
||
Pmode, 1, end_lab);
|
||
|
||
/* SP = SP + PROBE_INTERVAL and probe at SP. */
|
||
anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
|
||
emit_stack_probe (stack_pointer_rtx);
|
||
|
||
emit_jump (loop_lab);
|
||
|
||
emit_label (end_lab);
|
||
|
||
|
||
/* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
|
||
assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
|
||
|
||
/* TEMP = SIZE - ROUNDED_SIZE. */
|
||
temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
|
||
if (temp != const0_rtx)
|
||
{
|
||
/* Manual CSE if the difference is not known at compile-time. */
|
||
if (GET_CODE (temp) != CONST_INT)
|
||
temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
|
||
anti_adjust_stack (temp);
|
||
emit_stack_probe (stack_pointer_rtx);
|
||
}
|
||
}
|
||
|
||
/* Adjust back and account for the additional first interval. */
|
||
if (adjust_back)
|
||
adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
|
||
else
|
||
adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
|
||
}
|
||
|
||
/* Return an rtx representing the register or memory location
|
||
in which a scalar value of data type VALTYPE
|
||
was returned by a function call to function FUNC.
|
||
FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
|
||
function is known, otherwise 0.
|
||
OUTGOING is 1 if on a machine with register windows this function
|
||
should return the register in which the function will put its result
|
||
and 0 otherwise. */
|
||
|
||
rtx
|
||
hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
|
||
int outgoing ATTRIBUTE_UNUSED)
|
||
{
|
||
rtx val;
|
||
|
||
val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
|
||
|
||
if (REG_P (val)
|
||
&& GET_MODE (val) == BLKmode)
|
||
{
|
||
unsigned HOST_WIDE_INT bytes = arg_int_size_in_bytes (valtype);
|
||
opt_scalar_int_mode tmpmode;
|
||
|
||
/* int_size_in_bytes can return -1. We don't need a check here
|
||
since the value of bytes will then be large enough that no
|
||
mode will match anyway. */
|
||
|
||
FOR_EACH_MODE_IN_CLASS (tmpmode, MODE_INT)
|
||
{
|
||
/* Have we found a large enough mode? */
|
||
if (GET_MODE_SIZE (tmpmode.require ()) >= bytes)
|
||
break;
|
||
}
|
||
|
||
PUT_MODE (val, tmpmode.require ());
|
||
}
|
||
return val;
|
||
}
|
||
|
||
/* Return an rtx representing the register or memory location
|
||
in which a scalar value of mode MODE was returned by a library call. */
|
||
|
||
rtx
|
||
hard_libcall_value (machine_mode mode, rtx fun)
|
||
{
|
||
return targetm.calls.libcall_value (mode, fun);
|
||
}
|
||
|
||
/* Look up the tree code for a given rtx code
|
||
to provide the arithmetic operation for real_arithmetic.
|
||
The function returns an int because the caller may not know
|
||
what `enum tree_code' means. */
|
||
|
||
int
|
||
rtx_to_tree_code (enum rtx_code code)
|
||
{
|
||
enum tree_code tcode;
|
||
|
||
switch (code)
|
||
{
|
||
case PLUS:
|
||
tcode = PLUS_EXPR;
|
||
break;
|
||
case MINUS:
|
||
tcode = MINUS_EXPR;
|
||
break;
|
||
case MULT:
|
||
tcode = MULT_EXPR;
|
||
break;
|
||
case DIV:
|
||
tcode = RDIV_EXPR;
|
||
break;
|
||
case SMIN:
|
||
tcode = MIN_EXPR;
|
||
break;
|
||
case SMAX:
|
||
tcode = MAX_EXPR;
|
||
break;
|
||
default:
|
||
tcode = LAST_AND_UNUSED_TREE_CODE;
|
||
break;
|
||
}
|
||
return ((int) tcode);
|
||
}
|
||
|
||
#include "gt-explow.h"
|