Subversion Repositories Kolibri OS

Rev

Rev 5222 | Go to most recent revision | Blame | Compare with Previous | Last modification | View Log | RSS feed

  1. /* tc-i386.c -- Assemble code for the Intel 80386
  2.    Copyright (C) 1989-2015 Free Software Foundation, Inc.
  3.  
  4.    This file is part of GAS, the GNU Assembler.
  5.  
  6.    GAS is free software; you can redistribute it and/or modify
  7.    it under the terms of the GNU General Public License as published by
  8.    the Free Software Foundation; either version 3, or (at your option)
  9.    any later version.
  10.  
  11.    GAS is distributed in the hope that it will be useful,
  12.    but WITHOUT ANY WARRANTY; without even the implied warranty of
  13.    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
  14.    GNU General Public License for more details.
  15.  
  16.    You should have received a copy of the GNU General Public License
  17.    along with GAS; see the file COPYING.  If not, write to the Free
  18.    Software Foundation, 51 Franklin Street - Fifth Floor, Boston, MA
  19.    02110-1301, USA.  */
  20.  
  21. /* Intel 80386 machine specific gas.
  22.    Written by Eliot Dresselhaus (eliot@mgm.mit.edu).
  23.    x86_64 support by Jan Hubicka (jh@suse.cz)
  24.    VIA PadLock support by Michal Ludvig (mludvig@suse.cz)
  25.    Bugs & suggestions are completely welcome.  This is free software.
  26.    Please help us make it better.  */
  27.  
  28. #include "as.h"
  29. #include "safe-ctype.h"
  30. #include "subsegs.h"
  31. #include "dwarf2dbg.h"
  32. #include "dw2gencfi.h"
  33. #include "elf/x86-64.h"
  34. #include "opcodes/i386-init.h"
  35.  
  36. #ifndef REGISTER_WARNINGS
  37. #define REGISTER_WARNINGS 1
  38. #endif
  39.  
  40. #ifndef INFER_ADDR_PREFIX
  41. #define INFER_ADDR_PREFIX 1
  42. #endif
  43.  
  44. #ifndef DEFAULT_ARCH
  45. #define DEFAULT_ARCH "i386"
  46. #endif
  47.  
  48. #ifndef INLINE
  49. #if __GNUC__ >= 2
  50. #define INLINE __inline__
  51. #else
  52. #define INLINE
  53. #endif
  54. #endif
  55.  
  56. /* Prefixes will be emitted in the order defined below.
  57.    WAIT_PREFIX must be the first prefix since FWAIT is really is an
  58.    instruction, and so must come before any prefixes.
  59.    The preferred prefix order is SEG_PREFIX, ADDR_PREFIX, DATA_PREFIX,
  60.    REP_PREFIX/HLE_PREFIX, LOCK_PREFIX.  */
  61. #define WAIT_PREFIX     0
  62. #define SEG_PREFIX      1
  63. #define ADDR_PREFIX     2
  64. #define DATA_PREFIX     3
  65. #define REP_PREFIX      4
  66. #define HLE_PREFIX      REP_PREFIX
  67. #define BND_PREFIX      REP_PREFIX
  68. #define LOCK_PREFIX     5
  69. #define REX_PREFIX      6       /* must come last.  */
  70. #define MAX_PREFIXES    7       /* max prefixes per opcode */
  71.  
  72. /* we define the syntax here (modulo base,index,scale syntax) */
  73. #define REGISTER_PREFIX '%'
  74. #define IMMEDIATE_PREFIX '$'
  75. #define ABSOLUTE_PREFIX '*'
  76.  
  77. /* these are the instruction mnemonic suffixes in AT&T syntax or
  78.    memory operand size in Intel syntax.  */
  79. #define WORD_MNEM_SUFFIX  'w'
  80. #define BYTE_MNEM_SUFFIX  'b'
  81. #define SHORT_MNEM_SUFFIX 's'
  82. #define LONG_MNEM_SUFFIX  'l'
  83. #define QWORD_MNEM_SUFFIX  'q'
  84. #define XMMWORD_MNEM_SUFFIX  'x'
  85. #define YMMWORD_MNEM_SUFFIX 'y'
  86. #define ZMMWORD_MNEM_SUFFIX 'z'
  87. /* Intel Syntax.  Use a non-ascii letter since since it never appears
  88.    in instructions.  */
  89. #define LONG_DOUBLE_MNEM_SUFFIX '\1'
  90.  
  91. #define END_OF_INSN '\0'
  92.  
  93. /*
  94.   'templates' is for grouping together 'template' structures for opcodes
  95.   of the same name.  This is only used for storing the insns in the grand
  96.   ole hash table of insns.
  97.   The templates themselves start at START and range up to (but not including)
  98.   END.
  99.   */
  100. typedef struct
  101. {
  102.   const insn_template *start;
  103.   const insn_template *end;
  104. }
  105. templates;
  106.  
  107. /* 386 operand encoding bytes:  see 386 book for details of this.  */
  108. typedef struct
  109. {
  110.   unsigned int regmem;  /* codes register or memory operand */
  111.   unsigned int reg;     /* codes register operand (or extended opcode) */
  112.   unsigned int mode;    /* how to interpret regmem & reg */
  113. }
  114. modrm_byte;
  115.  
  116. /* x86-64 extension prefix.  */
  117. typedef int rex_byte;
  118.  
  119. /* 386 opcode byte to code indirect addressing.  */
  120. typedef struct
  121. {
  122.   unsigned base;
  123.   unsigned index;
  124.   unsigned scale;
  125. }
  126. sib_byte;
  127.  
  128. /* x86 arch names, types and features */
  129. typedef struct
  130. {
  131.   const char *name;             /* arch name */
  132.   unsigned int len;             /* arch string length */
  133.   enum processor_type type;     /* arch type */
  134.   i386_cpu_flags flags;         /* cpu feature flags */
  135.   unsigned int skip;            /* show_arch should skip this. */
  136.   unsigned int negated;         /* turn off indicated flags.  */
  137. }
  138. arch_entry;
  139.  
  140. static void update_code_flag (int, int);
  141. static void set_code_flag (int);
  142. static void set_16bit_gcc_code_flag (int);
  143. static void set_intel_syntax (int);
  144. static void set_intel_mnemonic (int);
  145. static void set_allow_index_reg (int);
  146. static void set_check (int);
  147. static void set_cpu_arch (int);
  148. #ifdef TE_PE
  149. static void pe_directive_secrel (int);
  150. #endif
  151. static void signed_cons (int);
  152. static char *output_invalid (int c);
  153. static int i386_finalize_immediate (segT, expressionS *, i386_operand_type,
  154.                                     const char *);
  155. static int i386_finalize_displacement (segT, expressionS *, i386_operand_type,
  156.                                        const char *);
  157. static int i386_att_operand (char *);
  158. static int i386_intel_operand (char *, int);
  159. static int i386_intel_simplify (expressionS *);
  160. static int i386_intel_parse_name (const char *, expressionS *);
  161. static const reg_entry *parse_register (char *, char **);
  162. static char *parse_insn (char *, char *);
  163. static char *parse_operands (char *, const char *);
  164. static void swap_operands (void);
  165. static void swap_2_operands (int, int);
  166. static void optimize_imm (void);
  167. static void optimize_disp (void);
  168. static const insn_template *match_template (void);
  169. static int check_string (void);
  170. static int process_suffix (void);
  171. static int check_byte_reg (void);
  172. static int check_long_reg (void);
  173. static int check_qword_reg (void);
  174. static int check_word_reg (void);
  175. static int finalize_imm (void);
  176. static int process_operands (void);
  177. static const seg_entry *build_modrm_byte (void);
  178. static void output_insn (void);
  179. static void output_imm (fragS *, offsetT);
  180. static void output_disp (fragS *, offsetT);
  181. #ifndef I386COFF
  182. static void s_bss (int);
  183. #endif
  184. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  185. static void handle_large_common (int small ATTRIBUTE_UNUSED);
  186. #endif
  187.  
  188. static const char *default_arch = DEFAULT_ARCH;
  189.  
  190. /* This struct describes rounding control and SAE in the instruction.  */
  191. struct RC_Operation
  192. {
  193.   enum rc_type
  194.     {
  195.       rne = 0,
  196.       rd,
  197.       ru,
  198.       rz,
  199.       saeonly
  200.     } type;
  201.   int operand;
  202. };
  203.  
  204. static struct RC_Operation rc_op;
  205.  
  206. /* The struct describes masking, applied to OPERAND in the instruction.
  207.    MASK is a pointer to the corresponding mask register.  ZEROING tells
  208.    whether merging or zeroing mask is used.  */
  209. struct Mask_Operation
  210. {
  211.   const reg_entry *mask;
  212.   unsigned int zeroing;
  213.   /* The operand where this operation is associated.  */
  214.   int operand;
  215. };
  216.  
  217. static struct Mask_Operation mask_op;
  218.  
  219. /* The struct describes broadcasting, applied to OPERAND.  FACTOR is
  220.    broadcast factor.  */
  221. struct Broadcast_Operation
  222. {
  223.   /* Type of broadcast: no broadcast, {1to8}, or {1to16}.  */
  224.   int type;
  225.  
  226.   /* Index of broadcasted operand.  */
  227.   int operand;
  228. };
  229.  
  230. static struct Broadcast_Operation broadcast_op;
  231.  
  232. /* VEX prefix.  */
  233. typedef struct
  234. {
  235.   /* VEX prefix is either 2 byte or 3 byte.  EVEX is 4 byte.  */
  236.   unsigned char bytes[4];
  237.   unsigned int length;
  238.   /* Destination or source register specifier.  */
  239.   const reg_entry *register_specifier;
  240. } vex_prefix;
  241.  
  242. /* 'md_assemble ()' gathers together information and puts it into a
  243.    i386_insn.  */
  244.  
  245. union i386_op
  246.   {
  247.     expressionS *disps;
  248.     expressionS *imms;
  249.     const reg_entry *regs;
  250.   };
  251.  
  252. enum i386_error
  253.   {
  254.     operand_size_mismatch,
  255.     operand_type_mismatch,
  256.     register_type_mismatch,
  257.     number_of_operands_mismatch,
  258.     invalid_instruction_suffix,
  259.     bad_imm4,
  260.     old_gcc_only,
  261.     unsupported_with_intel_mnemonic,
  262.     unsupported_syntax,
  263.     unsupported,
  264.     invalid_vsib_address,
  265.     invalid_vector_register_set,
  266.     unsupported_vector_index_register,
  267.     unsupported_broadcast,
  268.     broadcast_not_on_src_operand,
  269.     broadcast_needed,
  270.     unsupported_masking,
  271.     mask_not_on_destination,
  272.     no_default_mask,
  273.     unsupported_rc_sae,
  274.     rc_sae_operand_not_last_imm,
  275.     invalid_register_operand,
  276.     try_vector_disp8
  277.   };
  278.  
  279. struct _i386_insn
  280.   {
  281.     /* TM holds the template for the insn were currently assembling.  */
  282.     insn_template tm;
  283.  
  284.     /* SUFFIX holds the instruction size suffix for byte, word, dword
  285.        or qword, if given.  */
  286.     char suffix;
  287.  
  288.     /* OPERANDS gives the number of given operands.  */
  289.     unsigned int operands;
  290.  
  291.     /* REG_OPERANDS, DISP_OPERANDS, MEM_OPERANDS, IMM_OPERANDS give the number
  292.        of given register, displacement, memory operands and immediate
  293.        operands.  */
  294.     unsigned int reg_operands, disp_operands, mem_operands, imm_operands;
  295.  
  296.     /* TYPES [i] is the type (see above #defines) which tells us how to
  297.        use OP[i] for the corresponding operand.  */
  298.     i386_operand_type types[MAX_OPERANDS];
  299.  
  300.     /* Displacement expression, immediate expression, or register for each
  301.        operand.  */
  302.     union i386_op op[MAX_OPERANDS];
  303.  
  304.     /* Flags for operands.  */
  305.     unsigned int flags[MAX_OPERANDS];
  306. #define Operand_PCrel 1
  307.  
  308.     /* Relocation type for operand */
  309.     enum bfd_reloc_code_real reloc[MAX_OPERANDS];
  310.  
  311.     /* BASE_REG, INDEX_REG, and LOG2_SCALE_FACTOR are used to encode
  312.        the base index byte below.  */
  313.     const reg_entry *base_reg;
  314.     const reg_entry *index_reg;
  315.     unsigned int log2_scale_factor;
  316.  
  317.     /* SEG gives the seg_entries of this insn.  They are zero unless
  318.        explicit segment overrides are given.  */
  319.     const seg_entry *seg[2];
  320.  
  321.     /* PREFIX holds all the given prefix opcodes (usually null).
  322.        PREFIXES is the number of prefix opcodes.  */
  323.     unsigned int prefixes;
  324.     unsigned char prefix[MAX_PREFIXES];
  325.  
  326.     /* RM and SIB are the modrm byte and the sib byte where the
  327.        addressing modes of this insn are encoded.  */
  328.     modrm_byte rm;
  329.     rex_byte rex;
  330.     rex_byte vrex;
  331.     sib_byte sib;
  332.     vex_prefix vex;
  333.  
  334.     /* Masking attributes.  */
  335.     struct Mask_Operation *mask;
  336.  
  337.     /* Rounding control and SAE attributes.  */
  338.     struct RC_Operation *rounding;
  339.  
  340.     /* Broadcasting attributes.  */
  341.     struct Broadcast_Operation *broadcast;
  342.  
  343.     /* Compressed disp8*N attribute.  */
  344.     unsigned int memshift;
  345.  
  346.     /* Swap operand in encoding.  */
  347.     unsigned int swap_operand;
  348.  
  349.     /* Prefer 8bit or 32bit displacement in encoding.  */
  350.     enum
  351.       {
  352.         disp_encoding_default = 0,
  353.         disp_encoding_8bit,
  354.         disp_encoding_32bit
  355.       } disp_encoding;
  356.  
  357.     /* REP prefix.  */
  358.     const char *rep_prefix;
  359.  
  360.     /* HLE prefix.  */
  361.     const char *hle_prefix;
  362.  
  363.     /* Have BND prefix.  */
  364.     const char *bnd_prefix;
  365.  
  366.     /* Need VREX to support upper 16 registers.  */
  367.     int need_vrex;
  368.  
  369.     /* Error message.  */
  370.     enum i386_error error;
  371.   };
  372.  
  373. typedef struct _i386_insn i386_insn;
  374.  
  375. /* Link RC type with corresponding string, that'll be looked for in
  376.    asm.  */
  377. struct RC_name
  378. {
  379.   enum rc_type type;
  380.   const char *name;
  381.   unsigned int len;
  382. };
  383.  
  384. static const struct RC_name RC_NamesTable[] =
  385. {
  386.   {  rne, STRING_COMMA_LEN ("rn-sae") },
  387.   {  rd,  STRING_COMMA_LEN ("rd-sae") },
  388.   {  ru,  STRING_COMMA_LEN ("ru-sae") },
  389.   {  rz,  STRING_COMMA_LEN ("rz-sae") },
  390.   {  saeonly,  STRING_COMMA_LEN ("sae") },
  391. };
  392.  
  393. /* List of chars besides those in app.c:symbol_chars that can start an
  394.    operand.  Used to prevent the scrubber eating vital white-space.  */
  395. const char extra_symbol_chars[] = "*%-([{"
  396. #ifdef LEX_AT
  397.         "@"
  398. #endif
  399. #ifdef LEX_QM
  400.         "?"
  401. #endif
  402.         ;
  403.  
  404. #if (defined (TE_I386AIX)                               \
  405.      || ((defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)) \
  406.          && !defined (TE_GNU)                           \
  407.          && !defined (TE_LINUX)                         \
  408.          && !defined (TE_NACL)                          \
  409.          && !defined (TE_NETWARE)                       \
  410.          && !defined (TE_FreeBSD)                       \
  411.          && !defined (TE_DragonFly)                     \
  412.          && !defined (TE_NetBSD)))
  413. /* This array holds the chars that always start a comment.  If the
  414.    pre-processor is disabled, these aren't very useful.  The option
  415.    --divide will remove '/' from this list.  */
  416. const char *i386_comment_chars = "#/";
  417. #define SVR4_COMMENT_CHARS 1
  418. #define PREFIX_SEPARATOR '\\'
  419.  
  420. #else
  421. const char *i386_comment_chars = "#";
  422. #define PREFIX_SEPARATOR '/'
  423. #endif
  424.  
  425. /* This array holds the chars that only start a comment at the beginning of
  426.    a line.  If the line seems to have the form '# 123 filename'
  427.    .line and .file directives will appear in the pre-processed output.
  428.    Note that input_file.c hand checks for '#' at the beginning of the
  429.    first line of the input file.  This is because the compiler outputs
  430.    #NO_APP at the beginning of its output.
  431.    Also note that comments started like this one will always work if
  432.    '/' isn't otherwise defined.  */
  433. const char line_comment_chars[] = "#/";
  434.  
  435. const char line_separator_chars[] = ";";
  436.  
  437. /* Chars that can be used to separate mant from exp in floating point
  438.    nums.  */
  439. const char EXP_CHARS[] = "eE";
  440.  
  441. /* Chars that mean this number is a floating point constant
  442.    As in 0f12.456
  443.    or    0d1.2345e12.  */
  444. const char FLT_CHARS[] = "fFdDxX";
  445.  
  446. /* Tables for lexical analysis.  */
  447. static char mnemonic_chars[256];
  448. static char register_chars[256];
  449. static char operand_chars[256];
  450. static char identifier_chars[256];
  451. static char digit_chars[256];
  452.  
  453. /* Lexical macros.  */
  454. #define is_mnemonic_char(x) (mnemonic_chars[(unsigned char) x])
  455. #define is_operand_char(x) (operand_chars[(unsigned char) x])
  456. #define is_register_char(x) (register_chars[(unsigned char) x])
  457. #define is_space_char(x) ((x) == ' ')
  458. #define is_identifier_char(x) (identifier_chars[(unsigned char) x])
  459. #define is_digit_char(x) (digit_chars[(unsigned char) x])
  460.  
  461. /* All non-digit non-letter characters that may occur in an operand.  */
  462. static char operand_special_chars[] = "%$-+(,)*._~/<>|&^!:[@]";
  463.  
  464. /* md_assemble() always leaves the strings it's passed unaltered.  To
  465.    effect this we maintain a stack of saved characters that we've smashed
  466.    with '\0's (indicating end of strings for various sub-fields of the
  467.    assembler instruction).  */
  468. static char save_stack[32];
  469. static char *save_stack_p;
  470. #define END_STRING_AND_SAVE(s) \
  471.         do { *save_stack_p++ = *(s); *(s) = '\0'; } while (0)
  472. #define RESTORE_END_STRING(s) \
  473.         do { *(s) = *--save_stack_p; } while (0)
  474.  
  475. /* The instruction we're assembling.  */
  476. static i386_insn i;
  477.  
  478. /* Possible templates for current insn.  */
  479. static const templates *current_templates;
  480.  
  481. /* Per instruction expressionS buffers: max displacements & immediates.  */
  482. static expressionS disp_expressions[MAX_MEMORY_OPERANDS];
  483. static expressionS im_expressions[MAX_IMMEDIATE_OPERANDS];
  484.  
  485. /* Current operand we are working on.  */
  486. static int this_operand = -1;
  487.  
  488. /* We support four different modes.  FLAG_CODE variable is used to distinguish
  489.    these.  */
  490.  
  491. enum flag_code {
  492.         CODE_32BIT,
  493.         CODE_16BIT,
  494.         CODE_64BIT };
  495.  
  496. static enum flag_code flag_code;
  497. static unsigned int object_64bit;
  498. static unsigned int disallow_64bit_reloc;
  499. static int use_rela_relocations = 0;
  500.  
  501. #if ((defined (OBJ_MAYBE_COFF) && defined (OBJ_MAYBE_AOUT)) \
  502.      || defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF) \
  503.      || defined (TE_PE) || defined (TE_PEP) || defined (OBJ_MACH_O))
  504.  
  505. /* The ELF ABI to use.  */
  506. enum x86_elf_abi
  507. {
  508.   I386_ABI,
  509.   X86_64_ABI,
  510.   X86_64_X32_ABI
  511. };
  512.  
  513. static enum x86_elf_abi x86_elf_abi = I386_ABI;
  514. #endif
  515.  
  516. #if defined (TE_PE) || defined (TE_PEP)
  517. /* Use big object file format.  */
  518. static int use_big_obj = 0;
  519. #endif
  520.  
  521. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  522. /* 1 if generating code for a shared library.  */
  523. static int shared = 0;
  524. #endif
  525.  
  526. /* 1 for intel syntax,
  527.    0 if att syntax.  */
  528. static int intel_syntax = 0;
  529.  
  530. /* 1 for intel mnemonic,
  531.    0 if att mnemonic.  */
  532. static int intel_mnemonic = !SYSV386_COMPAT;
  533.  
  534. /* 1 if support old (<= 2.8.1) versions of gcc.  */
  535. static int old_gcc = OLDGCC_COMPAT;
  536.  
  537. /* 1 if pseudo registers are permitted.  */
  538. static int allow_pseudo_reg = 0;
  539.  
  540. /* 1 if register prefix % not required.  */
  541. static int allow_naked_reg = 0;
  542.  
  543. /* 1 if the assembler should add BND prefix for all control-tranferring
  544.    instructions supporting it, even if this prefix wasn't specified
  545.    explicitly.  */
  546. static int add_bnd_prefix = 0;
  547.  
  548. /* 1 if pseudo index register, eiz/riz, is allowed .  */
  549. static int allow_index_reg = 0;
  550.  
  551. /* 1 if the assembler should ignore LOCK prefix, even if it was
  552.    specified explicitly.  */
  553. static int omit_lock_prefix = 0;
  554.  
  555. static enum check_kind
  556.   {
  557.     check_none = 0,
  558.     check_warning,
  559.     check_error
  560.   }
  561. sse_check, operand_check = check_warning;
  562.  
  563. /* Register prefix used for error message.  */
  564. static const char *register_prefix = "%";
  565.  
  566. /* Used in 16 bit gcc mode to add an l suffix to call, ret, enter,
  567.    leave, push, and pop instructions so that gcc has the same stack
  568.    frame as in 32 bit mode.  */
  569. static char stackop_size = '\0';
  570.  
  571. /* Non-zero to optimize code alignment.  */
  572. int optimize_align_code = 1;
  573.  
  574. /* Non-zero to quieten some warnings.  */
  575. static int quiet_warnings = 0;
  576.  
  577. /* CPU name.  */
  578. static const char *cpu_arch_name = NULL;
  579. static char *cpu_sub_arch_name = NULL;
  580.  
  581. /* CPU feature flags.  */
  582. static i386_cpu_flags cpu_arch_flags = CPU_UNKNOWN_FLAGS;
  583.  
  584. /* If we have selected a cpu we are generating instructions for.  */
  585. static int cpu_arch_tune_set = 0;
  586.  
  587. /* Cpu we are generating instructions for.  */
  588. enum processor_type cpu_arch_tune = PROCESSOR_UNKNOWN;
  589.  
  590. /* CPU feature flags of cpu we are generating instructions for.  */
  591. static i386_cpu_flags cpu_arch_tune_flags;
  592.  
  593. /* CPU instruction set architecture used.  */
  594. enum processor_type cpu_arch_isa = PROCESSOR_UNKNOWN;
  595.  
  596. /* CPU feature flags of instruction set architecture used.  */
  597. i386_cpu_flags cpu_arch_isa_flags;
  598.  
  599. /* If set, conditional jumps are not automatically promoted to handle
  600.    larger than a byte offset.  */
  601. static unsigned int no_cond_jump_promotion = 0;
  602.  
  603. /* Encode SSE instructions with VEX prefix.  */
  604. static unsigned int sse2avx;
  605.  
  606. /* Encode scalar AVX instructions with specific vector length.  */
  607. static enum
  608.   {
  609.     vex128 = 0,
  610.     vex256
  611.   } avxscalar;
  612.  
  613. /* Encode scalar EVEX LIG instructions with specific vector length.  */
  614. static enum
  615.   {
  616.     evexl128 = 0,
  617.     evexl256,
  618.     evexl512
  619.   } evexlig;
  620.  
  621. /* Encode EVEX WIG instructions with specific evex.w.  */
  622. static enum
  623.   {
  624.     evexw0 = 0,
  625.     evexw1
  626.   } evexwig;
  627.  
  628. /* Value to encode in EVEX RC bits, for SAE-only instructions.  */
  629. static enum rc_type evexrcig = rne;
  630.  
  631. /* Pre-defined "_GLOBAL_OFFSET_TABLE_".  */
  632. static symbolS *GOT_symbol;
  633.  
  634. /* The dwarf2 return column, adjusted for 32 or 64 bit.  */
  635. unsigned int x86_dwarf2_return_column;
  636.  
  637. /* The dwarf2 data alignment, adjusted for 32 or 64 bit.  */
  638. int x86_cie_data_alignment;
  639.  
  640. /* Interface to relax_segment.
  641.    There are 3 major relax states for 386 jump insns because the
  642.    different types of jumps add different sizes to frags when we're
  643.    figuring out what sort of jump to choose to reach a given label.  */
  644.  
  645. /* Types.  */
  646. #define UNCOND_JUMP 0
  647. #define COND_JUMP 1
  648. #define COND_JUMP86 2
  649.  
  650. /* Sizes.  */
  651. #define CODE16  1
  652. #define SMALL   0
  653. #define SMALL16 (SMALL | CODE16)
  654. #define BIG     2
  655. #define BIG16   (BIG | CODE16)
  656.  
  657. #ifndef INLINE
  658. #ifdef __GNUC__
  659. #define INLINE __inline__
  660. #else
  661. #define INLINE
  662. #endif
  663. #endif
  664.  
  665. #define ENCODE_RELAX_STATE(type, size) \
  666.   ((relax_substateT) (((type) << 2) | (size)))
  667. #define TYPE_FROM_RELAX_STATE(s) \
  668.   ((s) >> 2)
  669. #define DISP_SIZE_FROM_RELAX_STATE(s) \
  670.     ((((s) & 3) == BIG ? 4 : (((s) & 3) == BIG16 ? 2 : 1)))
  671.  
  672. /* This table is used by relax_frag to promote short jumps to long
  673.    ones where necessary.  SMALL (short) jumps may be promoted to BIG
  674.    (32 bit long) ones, and SMALL16 jumps to BIG16 (16 bit long).  We
  675.    don't allow a short jump in a 32 bit code segment to be promoted to
  676.    a 16 bit offset jump because it's slower (requires data size
  677.    prefix), and doesn't work, unless the destination is in the bottom
  678.    64k of the code segment (The top 16 bits of eip are zeroed).  */
  679.  
  680. const relax_typeS md_relax_table[] =
  681. {
  682.   /* The fields are:
  683.      1) most positive reach of this state,
  684.      2) most negative reach of this state,
  685.      3) how many bytes this mode will have in the variable part of the frag
  686.      4) which index into the table to try if we can't fit into this one.  */
  687.  
  688.   /* UNCOND_JUMP states.  */
  689.   {127 + 1, -128 + 1, 1, ENCODE_RELAX_STATE (UNCOND_JUMP, BIG)},
  690.   {127 + 1, -128 + 1, 1, ENCODE_RELAX_STATE (UNCOND_JUMP, BIG16)},
  691.   /* dword jmp adds 4 bytes to frag:
  692.      0 extra opcode bytes, 4 displacement bytes.  */
  693.   {0, 0, 4, 0},
  694.   /* word jmp adds 2 byte2 to frag:
  695.      0 extra opcode bytes, 2 displacement bytes.  */
  696.   {0, 0, 2, 0},
  697.  
  698.   /* COND_JUMP states.  */
  699.   {127 + 1, -128 + 1, 1, ENCODE_RELAX_STATE (COND_JUMP, BIG)},
  700.   {127 + 1, -128 + 1, 1, ENCODE_RELAX_STATE (COND_JUMP, BIG16)},
  701.   /* dword conditionals adds 5 bytes to frag:
  702.      1 extra opcode byte, 4 displacement bytes.  */
  703.   {0, 0, 5, 0},
  704.   /* word conditionals add 3 bytes to frag:
  705.      1 extra opcode byte, 2 displacement bytes.  */
  706.   {0, 0, 3, 0},
  707.  
  708.   /* COND_JUMP86 states.  */
  709.   {127 + 1, -128 + 1, 1, ENCODE_RELAX_STATE (COND_JUMP86, BIG)},
  710.   {127 + 1, -128 + 1, 1, ENCODE_RELAX_STATE (COND_JUMP86, BIG16)},
  711.   /* dword conditionals adds 5 bytes to frag:
  712.      1 extra opcode byte, 4 displacement bytes.  */
  713.   {0, 0, 5, 0},
  714.   /* word conditionals add 4 bytes to frag:
  715.      1 displacement byte and a 3 byte long branch insn.  */
  716.   {0, 0, 4, 0}
  717. };
  718.  
  719. static const arch_entry cpu_arch[] =
  720. {
  721.   /* Do not replace the first two entries - i386_target_format()
  722.      relies on them being there in this order.  */
  723.   { STRING_COMMA_LEN ("generic32"), PROCESSOR_GENERIC32,
  724.     CPU_GENERIC32_FLAGS, 0, 0 },
  725.   { STRING_COMMA_LEN ("generic64"), PROCESSOR_GENERIC64,
  726.     CPU_GENERIC64_FLAGS, 0, 0 },
  727.   { STRING_COMMA_LEN ("i8086"), PROCESSOR_UNKNOWN,
  728.     CPU_NONE_FLAGS, 0, 0 },
  729.   { STRING_COMMA_LEN ("i186"), PROCESSOR_UNKNOWN,
  730.     CPU_I186_FLAGS, 0, 0 },
  731.   { STRING_COMMA_LEN ("i286"), PROCESSOR_UNKNOWN,
  732.     CPU_I286_FLAGS, 0, 0 },
  733.   { STRING_COMMA_LEN ("i386"), PROCESSOR_I386,
  734.     CPU_I386_FLAGS, 0, 0 },
  735.   { STRING_COMMA_LEN ("i486"), PROCESSOR_I486,
  736.     CPU_I486_FLAGS, 0, 0 },
  737.   { STRING_COMMA_LEN ("i586"), PROCESSOR_PENTIUM,
  738.     CPU_I586_FLAGS, 0, 0 },
  739.   { STRING_COMMA_LEN ("i686"), PROCESSOR_PENTIUMPRO,
  740.     CPU_I686_FLAGS, 0, 0 },
  741.   { STRING_COMMA_LEN ("pentium"), PROCESSOR_PENTIUM,
  742.     CPU_I586_FLAGS, 0, 0 },
  743.   { STRING_COMMA_LEN ("pentiumpro"), PROCESSOR_PENTIUMPRO,
  744.     CPU_PENTIUMPRO_FLAGS, 0, 0 },
  745.   { STRING_COMMA_LEN ("pentiumii"), PROCESSOR_PENTIUMPRO,
  746.     CPU_P2_FLAGS, 0, 0 },
  747.   { STRING_COMMA_LEN ("pentiumiii"),PROCESSOR_PENTIUMPRO,
  748.     CPU_P3_FLAGS, 0, 0 },
  749.   { STRING_COMMA_LEN ("pentium4"), PROCESSOR_PENTIUM4,
  750.     CPU_P4_FLAGS, 0, 0 },
  751.   { STRING_COMMA_LEN ("prescott"), PROCESSOR_NOCONA,
  752.     CPU_CORE_FLAGS, 0, 0 },
  753.   { STRING_COMMA_LEN ("nocona"), PROCESSOR_NOCONA,
  754.     CPU_NOCONA_FLAGS, 0, 0 },
  755.   { STRING_COMMA_LEN ("yonah"), PROCESSOR_CORE,
  756.     CPU_CORE_FLAGS, 1, 0 },
  757.   { STRING_COMMA_LEN ("core"), PROCESSOR_CORE,
  758.     CPU_CORE_FLAGS, 0, 0 },
  759.   { STRING_COMMA_LEN ("merom"), PROCESSOR_CORE2,
  760.     CPU_CORE2_FLAGS, 1, 0 },
  761.   { STRING_COMMA_LEN ("core2"), PROCESSOR_CORE2,
  762.     CPU_CORE2_FLAGS, 0, 0 },
  763.   { STRING_COMMA_LEN ("corei7"), PROCESSOR_COREI7,
  764.     CPU_COREI7_FLAGS, 0, 0 },
  765.   { STRING_COMMA_LEN ("l1om"), PROCESSOR_L1OM,
  766.     CPU_L1OM_FLAGS, 0, 0 },
  767.   { STRING_COMMA_LEN ("k1om"), PROCESSOR_K1OM,
  768.     CPU_K1OM_FLAGS, 0, 0 },
  769.   { STRING_COMMA_LEN ("iamcu"), PROCESSOR_IAMCU,
  770.     CPU_IAMCU_FLAGS, 0, 0 },
  771.   { STRING_COMMA_LEN ("k6"), PROCESSOR_K6,
  772.     CPU_K6_FLAGS, 0, 0 },
  773.   { STRING_COMMA_LEN ("k6_2"), PROCESSOR_K6,
  774.     CPU_K6_2_FLAGS, 0, 0 },
  775.   { STRING_COMMA_LEN ("athlon"), PROCESSOR_ATHLON,
  776.     CPU_ATHLON_FLAGS, 0, 0 },
  777.   { STRING_COMMA_LEN ("sledgehammer"), PROCESSOR_K8,
  778.     CPU_K8_FLAGS, 1, 0 },
  779.   { STRING_COMMA_LEN ("opteron"), PROCESSOR_K8,
  780.     CPU_K8_FLAGS, 0, 0 },
  781.   { STRING_COMMA_LEN ("k8"), PROCESSOR_K8,
  782.     CPU_K8_FLAGS, 0, 0 },
  783.   { STRING_COMMA_LEN ("amdfam10"), PROCESSOR_AMDFAM10,
  784.     CPU_AMDFAM10_FLAGS, 0, 0 },
  785.   { STRING_COMMA_LEN ("bdver1"), PROCESSOR_BD,
  786.     CPU_BDVER1_FLAGS, 0, 0 },
  787.   { STRING_COMMA_LEN ("bdver2"), PROCESSOR_BD,
  788.     CPU_BDVER2_FLAGS, 0, 0 },
  789.   { STRING_COMMA_LEN ("bdver3"), PROCESSOR_BD,
  790.     CPU_BDVER3_FLAGS, 0, 0 },
  791.   { STRING_COMMA_LEN ("bdver4"), PROCESSOR_BD,
  792.     CPU_BDVER4_FLAGS, 0, 0 },
  793.   { STRING_COMMA_LEN ("znver1"), PROCESSOR_ZNVER,
  794.     CPU_ZNVER1_FLAGS, 0, 0 },
  795.   { STRING_COMMA_LEN ("btver1"), PROCESSOR_BT,
  796.     CPU_BTVER1_FLAGS, 0, 0 },
  797.   { STRING_COMMA_LEN ("btver2"), PROCESSOR_BT,
  798.     CPU_BTVER2_FLAGS, 0, 0 },
  799.   { STRING_COMMA_LEN (".8087"), PROCESSOR_UNKNOWN,
  800.     CPU_8087_FLAGS, 0, 0 },
  801.   { STRING_COMMA_LEN (".287"), PROCESSOR_UNKNOWN,
  802.     CPU_287_FLAGS, 0, 0 },
  803.   { STRING_COMMA_LEN (".387"), PROCESSOR_UNKNOWN,
  804.     CPU_387_FLAGS, 0, 0 },
  805.   { STRING_COMMA_LEN (".no87"), PROCESSOR_UNKNOWN,
  806.     CPU_ANY87_FLAGS, 0, 1 },
  807.   { STRING_COMMA_LEN (".mmx"), PROCESSOR_UNKNOWN,
  808.     CPU_MMX_FLAGS, 0, 0 },
  809.   { STRING_COMMA_LEN (".nommx"), PROCESSOR_UNKNOWN,
  810.     CPU_3DNOWA_FLAGS, 0, 1 },
  811.   { STRING_COMMA_LEN (".sse"), PROCESSOR_UNKNOWN,
  812.     CPU_SSE_FLAGS, 0, 0 },
  813.   { STRING_COMMA_LEN (".sse2"), PROCESSOR_UNKNOWN,
  814.     CPU_SSE2_FLAGS, 0, 0 },
  815.   { STRING_COMMA_LEN (".sse3"), PROCESSOR_UNKNOWN,
  816.     CPU_SSE3_FLAGS, 0, 0 },
  817.   { STRING_COMMA_LEN (".ssse3"), PROCESSOR_UNKNOWN,
  818.     CPU_SSSE3_FLAGS, 0, 0 },
  819.   { STRING_COMMA_LEN (".sse4.1"), PROCESSOR_UNKNOWN,
  820.     CPU_SSE4_1_FLAGS, 0, 0 },
  821.   { STRING_COMMA_LEN (".sse4.2"), PROCESSOR_UNKNOWN,
  822.     CPU_SSE4_2_FLAGS, 0, 0 },
  823.   { STRING_COMMA_LEN (".sse4"), PROCESSOR_UNKNOWN,
  824.     CPU_SSE4_2_FLAGS, 0, 0 },
  825.   { STRING_COMMA_LEN (".nosse"), PROCESSOR_UNKNOWN,
  826.     CPU_ANY_SSE_FLAGS, 0, 1 },
  827.   { STRING_COMMA_LEN (".avx"), PROCESSOR_UNKNOWN,
  828.     CPU_AVX_FLAGS, 0, 0 },
  829.   { STRING_COMMA_LEN (".avx2"), PROCESSOR_UNKNOWN,
  830.     CPU_AVX2_FLAGS, 0, 0 },
  831.   { STRING_COMMA_LEN (".avx512f"), PROCESSOR_UNKNOWN,
  832.     CPU_AVX512F_FLAGS, 0, 0 },
  833.   { STRING_COMMA_LEN (".avx512cd"), PROCESSOR_UNKNOWN,
  834.     CPU_AVX512CD_FLAGS, 0, 0 },
  835.   { STRING_COMMA_LEN (".avx512er"), PROCESSOR_UNKNOWN,
  836.     CPU_AVX512ER_FLAGS, 0, 0 },
  837.   { STRING_COMMA_LEN (".avx512pf"), PROCESSOR_UNKNOWN,
  838.     CPU_AVX512PF_FLAGS, 0, 0 },
  839.   { STRING_COMMA_LEN (".avx512dq"), PROCESSOR_UNKNOWN,
  840.     CPU_AVX512DQ_FLAGS, 0, 0 },
  841.   { STRING_COMMA_LEN (".avx512bw"), PROCESSOR_UNKNOWN,
  842.     CPU_AVX512BW_FLAGS, 0, 0 },
  843.   { STRING_COMMA_LEN (".avx512vl"), PROCESSOR_UNKNOWN,
  844.     CPU_AVX512VL_FLAGS, 0, 0 },
  845.   { STRING_COMMA_LEN (".noavx"), PROCESSOR_UNKNOWN,
  846.     CPU_ANY_AVX_FLAGS, 0, 1 },
  847.   { STRING_COMMA_LEN (".vmx"), PROCESSOR_UNKNOWN,
  848.     CPU_VMX_FLAGS, 0, 0 },
  849.   { STRING_COMMA_LEN (".vmfunc"), PROCESSOR_UNKNOWN,
  850.     CPU_VMFUNC_FLAGS, 0, 0 },
  851.   { STRING_COMMA_LEN (".smx"), PROCESSOR_UNKNOWN,
  852.     CPU_SMX_FLAGS, 0, 0 },
  853.   { STRING_COMMA_LEN (".xsave"), PROCESSOR_UNKNOWN,
  854.     CPU_XSAVE_FLAGS, 0, 0 },
  855.   { STRING_COMMA_LEN (".xsaveopt"), PROCESSOR_UNKNOWN,
  856.     CPU_XSAVEOPT_FLAGS, 0, 0 },
  857.   { STRING_COMMA_LEN (".xsavec"), PROCESSOR_UNKNOWN,
  858.     CPU_XSAVEC_FLAGS, 0, 0 },
  859.   { STRING_COMMA_LEN (".xsaves"), PROCESSOR_UNKNOWN,
  860.     CPU_XSAVES_FLAGS, 0, 0 },
  861.   { STRING_COMMA_LEN (".aes"), PROCESSOR_UNKNOWN,
  862.     CPU_AES_FLAGS, 0, 0 },
  863.   { STRING_COMMA_LEN (".pclmul"), PROCESSOR_UNKNOWN,
  864.     CPU_PCLMUL_FLAGS, 0, 0 },
  865.   { STRING_COMMA_LEN (".clmul"), PROCESSOR_UNKNOWN,
  866.     CPU_PCLMUL_FLAGS, 1, 0 },
  867.   { STRING_COMMA_LEN (".fsgsbase"), PROCESSOR_UNKNOWN,
  868.     CPU_FSGSBASE_FLAGS, 0, 0 },
  869.   { STRING_COMMA_LEN (".rdrnd"), PROCESSOR_UNKNOWN,
  870.     CPU_RDRND_FLAGS, 0, 0 },
  871.   { STRING_COMMA_LEN (".f16c"), PROCESSOR_UNKNOWN,
  872.     CPU_F16C_FLAGS, 0, 0 },
  873.   { STRING_COMMA_LEN (".bmi2"), PROCESSOR_UNKNOWN,
  874.     CPU_BMI2_FLAGS, 0, 0 },
  875.   { STRING_COMMA_LEN (".fma"), PROCESSOR_UNKNOWN,
  876.     CPU_FMA_FLAGS, 0, 0 },
  877.   { STRING_COMMA_LEN (".fma4"), PROCESSOR_UNKNOWN,
  878.     CPU_FMA4_FLAGS, 0, 0 },
  879.   { STRING_COMMA_LEN (".xop"), PROCESSOR_UNKNOWN,
  880.     CPU_XOP_FLAGS, 0, 0 },
  881.   { STRING_COMMA_LEN (".lwp"), PROCESSOR_UNKNOWN,
  882.     CPU_LWP_FLAGS, 0, 0 },
  883.   { STRING_COMMA_LEN (".movbe"), PROCESSOR_UNKNOWN,
  884.     CPU_MOVBE_FLAGS, 0, 0 },
  885.   { STRING_COMMA_LEN (".cx16"), PROCESSOR_UNKNOWN,
  886.     CPU_CX16_FLAGS, 0, 0 },
  887.   { STRING_COMMA_LEN (".ept"), PROCESSOR_UNKNOWN,
  888.     CPU_EPT_FLAGS, 0, 0 },
  889.   { STRING_COMMA_LEN (".lzcnt"), PROCESSOR_UNKNOWN,
  890.     CPU_LZCNT_FLAGS, 0, 0 },
  891.   { STRING_COMMA_LEN (".hle"), PROCESSOR_UNKNOWN,
  892.     CPU_HLE_FLAGS, 0, 0 },
  893.   { STRING_COMMA_LEN (".rtm"), PROCESSOR_UNKNOWN,
  894.     CPU_RTM_FLAGS, 0, 0 },
  895.   { STRING_COMMA_LEN (".invpcid"), PROCESSOR_UNKNOWN,
  896.     CPU_INVPCID_FLAGS, 0, 0 },
  897.   { STRING_COMMA_LEN (".clflush"), PROCESSOR_UNKNOWN,
  898.     CPU_CLFLUSH_FLAGS, 0, 0 },
  899.   { STRING_COMMA_LEN (".nop"), PROCESSOR_UNKNOWN,
  900.     CPU_NOP_FLAGS, 0, 0 },
  901.   { STRING_COMMA_LEN (".syscall"), PROCESSOR_UNKNOWN,
  902.     CPU_SYSCALL_FLAGS, 0, 0 },
  903.   { STRING_COMMA_LEN (".rdtscp"), PROCESSOR_UNKNOWN,
  904.     CPU_RDTSCP_FLAGS, 0, 0 },
  905.   { STRING_COMMA_LEN (".3dnow"), PROCESSOR_UNKNOWN,
  906.     CPU_3DNOW_FLAGS, 0, 0 },
  907.   { STRING_COMMA_LEN (".3dnowa"), PROCESSOR_UNKNOWN,
  908.     CPU_3DNOWA_FLAGS, 0, 0 },
  909.   { STRING_COMMA_LEN (".padlock"), PROCESSOR_UNKNOWN,
  910.     CPU_PADLOCK_FLAGS, 0, 0 },
  911.   { STRING_COMMA_LEN (".pacifica"), PROCESSOR_UNKNOWN,
  912.     CPU_SVME_FLAGS, 1, 0 },
  913.   { STRING_COMMA_LEN (".svme"), PROCESSOR_UNKNOWN,
  914.     CPU_SVME_FLAGS, 0, 0 },
  915.   { STRING_COMMA_LEN (".sse4a"), PROCESSOR_UNKNOWN,
  916.     CPU_SSE4A_FLAGS, 0, 0 },
  917.   { STRING_COMMA_LEN (".abm"), PROCESSOR_UNKNOWN,
  918.     CPU_ABM_FLAGS, 0, 0 },
  919.   { STRING_COMMA_LEN (".bmi"), PROCESSOR_UNKNOWN,
  920.     CPU_BMI_FLAGS, 0, 0 },
  921.   { STRING_COMMA_LEN (".tbm"), PROCESSOR_UNKNOWN,
  922.     CPU_TBM_FLAGS, 0, 0 },
  923.   { STRING_COMMA_LEN (".adx"), PROCESSOR_UNKNOWN,
  924.     CPU_ADX_FLAGS, 0, 0 },
  925.   { STRING_COMMA_LEN (".rdseed"), PROCESSOR_UNKNOWN,
  926.     CPU_RDSEED_FLAGS, 0, 0 },
  927.   { STRING_COMMA_LEN (".prfchw"), PROCESSOR_UNKNOWN,
  928.     CPU_PRFCHW_FLAGS, 0, 0 },
  929.   { STRING_COMMA_LEN (".smap"), PROCESSOR_UNKNOWN,
  930.     CPU_SMAP_FLAGS, 0, 0 },
  931.   { STRING_COMMA_LEN (".mpx"), PROCESSOR_UNKNOWN,
  932.     CPU_MPX_FLAGS, 0, 0 },
  933.   { STRING_COMMA_LEN (".sha"), PROCESSOR_UNKNOWN,
  934.     CPU_SHA_FLAGS, 0, 0 },
  935.   { STRING_COMMA_LEN (".clflushopt"), PROCESSOR_UNKNOWN,
  936.     CPU_CLFLUSHOPT_FLAGS, 0, 0 },
  937.   { STRING_COMMA_LEN (".prefetchwt1"), PROCESSOR_UNKNOWN,
  938.     CPU_PREFETCHWT1_FLAGS, 0, 0 },
  939.   { STRING_COMMA_LEN (".se1"), PROCESSOR_UNKNOWN,
  940.     CPU_SE1_FLAGS, 0, 0 },
  941.   { STRING_COMMA_LEN (".clwb"), PROCESSOR_UNKNOWN,
  942.     CPU_CLWB_FLAGS, 0, 0 },
  943.   { STRING_COMMA_LEN (".pcommit"), PROCESSOR_UNKNOWN,
  944.     CPU_PCOMMIT_FLAGS, 0, 0 },
  945.   { STRING_COMMA_LEN (".avx512ifma"), PROCESSOR_UNKNOWN,
  946.     CPU_AVX512IFMA_FLAGS, 0, 0 },
  947.   { STRING_COMMA_LEN (".avx512vbmi"), PROCESSOR_UNKNOWN,
  948.     CPU_AVX512VBMI_FLAGS, 0, 0 },
  949.   { STRING_COMMA_LEN (".clzero"), PROCESSOR_UNKNOWN,
  950.     CPU_CLZERO_FLAGS, 0, 0 },
  951.   { STRING_COMMA_LEN (".mwaitx"), PROCESSOR_UNKNOWN,
  952.     CPU_MWAITX_FLAGS, 0, 0 },
  953.   { STRING_COMMA_LEN (".ospke"), PROCESSOR_UNKNOWN,
  954.     CPU_OSPKE_FLAGS, 0, 0 },
  955. };
  956.  
  957. #ifdef I386COFF
  958. /* Like s_lcomm_internal in gas/read.c but the alignment string
  959.    is allowed to be optional.  */
  960.  
  961. static symbolS *
  962. pe_lcomm_internal (int needs_align, symbolS *symbolP, addressT size)
  963. {
  964.   addressT align = 0;
  965.  
  966.   SKIP_WHITESPACE ();
  967.  
  968.   if (needs_align
  969.       && *input_line_pointer == ',')
  970.     {
  971.       align = parse_align (needs_align - 1);
  972.  
  973.       if (align == (addressT) -1)
  974.         return NULL;
  975.     }
  976.   else
  977.     {
  978.       if (size >= 8)
  979.         align = 3;
  980.       else if (size >= 4)
  981.         align = 2;
  982.       else if (size >= 2)
  983.         align = 1;
  984.       else
  985.         align = 0;
  986.     }
  987.  
  988.   bss_alloc (symbolP, size, align);
  989.   return symbolP;
  990. }
  991.  
  992. static void
  993. pe_lcomm (int needs_align)
  994. {
  995.   s_comm_internal (needs_align * 2, pe_lcomm_internal);
  996. }
  997. #endif
  998.  
  999. const pseudo_typeS md_pseudo_table[] =
  1000. {
  1001. #if !defined(OBJ_AOUT) && !defined(USE_ALIGN_PTWO)
  1002.   {"align", s_align_bytes, 0},
  1003. #else
  1004.   {"align", s_align_ptwo, 0},
  1005. #endif
  1006.   {"arch", set_cpu_arch, 0},
  1007. #ifndef I386COFF
  1008.   {"bss", s_bss, 0},
  1009. #else
  1010.   {"lcomm", pe_lcomm, 1},
  1011. #endif
  1012.   {"ffloat", float_cons, 'f'},
  1013.   {"dfloat", float_cons, 'd'},
  1014.   {"tfloat", float_cons, 'x'},
  1015.   {"value", cons, 2},
  1016.   {"slong", signed_cons, 4},
  1017.   {"noopt", s_ignore, 0},
  1018.   {"optim", s_ignore, 0},
  1019.   {"code16gcc", set_16bit_gcc_code_flag, CODE_16BIT},
  1020.   {"code16", set_code_flag, CODE_16BIT},
  1021.   {"code32", set_code_flag, CODE_32BIT},
  1022.   {"code64", set_code_flag, CODE_64BIT},
  1023.   {"intel_syntax", set_intel_syntax, 1},
  1024.   {"att_syntax", set_intel_syntax, 0},
  1025.   {"intel_mnemonic", set_intel_mnemonic, 1},
  1026.   {"att_mnemonic", set_intel_mnemonic, 0},
  1027.   {"allow_index_reg", set_allow_index_reg, 1},
  1028.   {"disallow_index_reg", set_allow_index_reg, 0},
  1029.   {"sse_check", set_check, 0},
  1030.   {"operand_check", set_check, 1},
  1031. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  1032.   {"largecomm", handle_large_common, 0},
  1033. #else
  1034.   {"file", (void (*) (int)) dwarf2_directive_file, 0},
  1035.   {"loc", dwarf2_directive_loc, 0},
  1036.   {"loc_mark_labels", dwarf2_directive_loc_mark_labels, 0},
  1037. #endif
  1038. #ifdef TE_PE
  1039.   {"secrel32", pe_directive_secrel, 0},
  1040. #endif
  1041.   {0, 0, 0}
  1042. };
  1043.  
  1044. /* For interface with expression ().  */
  1045. extern char *input_line_pointer;
  1046.  
  1047. /* Hash table for instruction mnemonic lookup.  */
  1048. static struct hash_control *op_hash;
  1049.  
  1050. /* Hash table for register lookup.  */
  1051. static struct hash_control *reg_hash;
  1052. void
  1053. i386_align_code (fragS *fragP, int count)
  1054. {
  1055.   /* Various efficient no-op patterns for aligning code labels.
  1056.      Note: Don't try to assemble the instructions in the comments.
  1057.      0L and 0w are not legal.  */
  1058.   static const char f32_1[] =
  1059.     {0x90};                                     /* nop                  */
  1060.   static const char f32_2[] =
  1061.     {0x66,0x90};                                /* xchg %ax,%ax */
  1062.   static const char f32_3[] =
  1063.     {0x8d,0x76,0x00};                           /* leal 0(%esi),%esi    */
  1064.   static const char f32_4[] =
  1065.     {0x8d,0x74,0x26,0x00};                      /* leal 0(%esi,1),%esi  */
  1066.   static const char f32_5[] =
  1067.     {0x90,                                      /* nop                  */
  1068.      0x8d,0x74,0x26,0x00};                      /* leal 0(%esi,1),%esi  */
  1069.   static const char f32_6[] =
  1070.     {0x8d,0xb6,0x00,0x00,0x00,0x00};            /* leal 0L(%esi),%esi   */
  1071.   static const char f32_7[] =
  1072.     {0x8d,0xb4,0x26,0x00,0x00,0x00,0x00};       /* leal 0L(%esi,1),%esi */
  1073.   static const char f32_8[] =
  1074.     {0x90,                                      /* nop                  */
  1075.      0x8d,0xb4,0x26,0x00,0x00,0x00,0x00};       /* leal 0L(%esi,1),%esi */
  1076.   static const char f32_9[] =
  1077.     {0x89,0xf6,                                 /* movl %esi,%esi       */
  1078.      0x8d,0xbc,0x27,0x00,0x00,0x00,0x00};       /* leal 0L(%edi,1),%edi */
  1079.   static const char f32_10[] =
  1080.     {0x8d,0x76,0x00,                            /* leal 0(%esi),%esi    */
  1081.      0x8d,0xbc,0x27,0x00,0x00,0x00,0x00};       /* leal 0L(%edi,1),%edi */
  1082.   static const char f32_11[] =
  1083.     {0x8d,0x74,0x26,0x00,                       /* leal 0(%esi,1),%esi  */
  1084.      0x8d,0xbc,0x27,0x00,0x00,0x00,0x00};       /* leal 0L(%edi,1),%edi */
  1085.   static const char f32_12[] =
  1086.     {0x8d,0xb6,0x00,0x00,0x00,0x00,             /* leal 0L(%esi),%esi   */
  1087.      0x8d,0xbf,0x00,0x00,0x00,0x00};            /* leal 0L(%edi),%edi   */
  1088.   static const char f32_13[] =
  1089.     {0x8d,0xb6,0x00,0x00,0x00,0x00,             /* leal 0L(%esi),%esi   */
  1090.      0x8d,0xbc,0x27,0x00,0x00,0x00,0x00};       /* leal 0L(%edi,1),%edi */
  1091.   static const char f32_14[] =
  1092.     {0x8d,0xb4,0x26,0x00,0x00,0x00,0x00,        /* leal 0L(%esi,1),%esi */
  1093.      0x8d,0xbc,0x27,0x00,0x00,0x00,0x00};       /* leal 0L(%edi,1),%edi */
  1094.   static const char f16_3[] =
  1095.     {0x8d,0x74,0x00};                           /* lea 0(%esi),%esi     */
  1096.   static const char f16_4[] =
  1097.     {0x8d,0xb4,0x00,0x00};                      /* lea 0w(%si),%si      */
  1098.   static const char f16_5[] =
  1099.     {0x90,                                      /* nop                  */
  1100.      0x8d,0xb4,0x00,0x00};                      /* lea 0w(%si),%si      */
  1101.   static const char f16_6[] =
  1102.     {0x89,0xf6,                                 /* mov %si,%si          */
  1103.      0x8d,0xbd,0x00,0x00};                      /* lea 0w(%di),%di      */
  1104.   static const char f16_7[] =
  1105.     {0x8d,0x74,0x00,                            /* lea 0(%si),%si       */
  1106.      0x8d,0xbd,0x00,0x00};                      /* lea 0w(%di),%di      */
  1107.   static const char f16_8[] =
  1108.     {0x8d,0xb4,0x00,0x00,                       /* lea 0w(%si),%si      */
  1109.      0x8d,0xbd,0x00,0x00};                      /* lea 0w(%di),%di      */
  1110.   static const char jump_31[] =
  1111.     {0xeb,0x1d,0x90,0x90,0x90,0x90,0x90,        /* jmp .+31; lotsa nops */
  1112.      0x90,0x90,0x90,0x90,0x90,0x90,0x90,0x90,
  1113.      0x90,0x90,0x90,0x90,0x90,0x90,0x90,0x90,
  1114.      0x90,0x90,0x90,0x90,0x90,0x90,0x90,0x90};
  1115.   static const char *const f32_patt[] = {
  1116.     f32_1, f32_2, f32_3, f32_4, f32_5, f32_6, f32_7, f32_8,
  1117.     f32_9, f32_10, f32_11, f32_12, f32_13, f32_14
  1118.   };
  1119.   static const char *const f16_patt[] = {
  1120.     f32_1, f32_2, f16_3, f16_4, f16_5, f16_6, f16_7, f16_8
  1121.   };
  1122.   /* nopl (%[re]ax) */
  1123.   static const char alt_3[] =
  1124.     {0x0f,0x1f,0x00};
  1125.   /* nopl 0(%[re]ax) */
  1126.   static const char alt_4[] =
  1127.     {0x0f,0x1f,0x40,0x00};
  1128.   /* nopl 0(%[re]ax,%[re]ax,1) */
  1129.   static const char alt_5[] =
  1130.     {0x0f,0x1f,0x44,0x00,0x00};
  1131.   /* nopw 0(%[re]ax,%[re]ax,1) */
  1132.   static const char alt_6[] =
  1133.     {0x66,0x0f,0x1f,0x44,0x00,0x00};
  1134.   /* nopl 0L(%[re]ax) */
  1135.   static const char alt_7[] =
  1136.     {0x0f,0x1f,0x80,0x00,0x00,0x00,0x00};
  1137.   /* nopl 0L(%[re]ax,%[re]ax,1) */
  1138.   static const char alt_8[] =
  1139.     {0x0f,0x1f,0x84,0x00,0x00,0x00,0x00,0x00};
  1140.   /* nopw 0L(%[re]ax,%[re]ax,1) */
  1141.   static const char alt_9[] =
  1142.     {0x66,0x0f,0x1f,0x84,0x00,0x00,0x00,0x00,0x00};
  1143.   /* nopw %cs:0L(%[re]ax,%[re]ax,1) */
  1144.   static const char alt_10[] =
  1145.     {0x66,0x2e,0x0f,0x1f,0x84,0x00,0x00,0x00,0x00,0x00};
  1146.   static const char *const alt_patt[] = {
  1147.     f32_1, f32_2, alt_3, alt_4, alt_5, alt_6, alt_7, alt_8,
  1148.     alt_9, alt_10
  1149.   };
  1150.  
  1151.   /* Only align for at least a positive non-zero boundary. */
  1152.   if (count <= 0 || count > MAX_MEM_FOR_RS_ALIGN_CODE)
  1153.     return;
  1154.  
  1155.   /* We need to decide which NOP sequence to use for 32bit and
  1156.      64bit. When -mtune= is used:
  1157.  
  1158.      1. For PROCESSOR_I386, PROCESSOR_I486, PROCESSOR_PENTIUM and
  1159.      PROCESSOR_GENERIC32, f32_patt will be used.
  1160.      2. For the rest, alt_patt will be used.
  1161.  
  1162.      When -mtune= isn't used, alt_patt will be used if
  1163.      cpu_arch_isa_flags has CpuNop.  Otherwise, f32_patt will
  1164.      be used.
  1165.  
  1166.      When -march= or .arch is used, we can't use anything beyond
  1167.      cpu_arch_isa_flags.   */
  1168.  
  1169.   if (flag_code == CODE_16BIT)
  1170.     {
  1171.       if (count > 8)
  1172.         {
  1173.           memcpy (fragP->fr_literal + fragP->fr_fix,
  1174.                   jump_31, count);
  1175.           /* Adjust jump offset.  */
  1176.           fragP->fr_literal[fragP->fr_fix + 1] = count - 2;
  1177.         }
  1178.       else
  1179.         memcpy (fragP->fr_literal + fragP->fr_fix,
  1180.                 f16_patt[count - 1], count);
  1181.     }
  1182.   else
  1183.     {
  1184.       const char *const *patt = NULL;
  1185.  
  1186.       if (fragP->tc_frag_data.isa == PROCESSOR_UNKNOWN)
  1187.         {
  1188.           /* PROCESSOR_UNKNOWN means that all ISAs may be used.  */
  1189.           switch (cpu_arch_tune)
  1190.             {
  1191.             case PROCESSOR_UNKNOWN:
  1192.               /* We use cpu_arch_isa_flags to check if we SHOULD
  1193.                  optimize with nops.  */
  1194.               if (fragP->tc_frag_data.isa_flags.bitfield.cpunop)
  1195.                 patt = alt_patt;
  1196.               else
  1197.                 patt = f32_patt;
  1198.               break;
  1199.             case PROCESSOR_PENTIUM4:
  1200.             case PROCESSOR_NOCONA:
  1201.             case PROCESSOR_CORE:
  1202.             case PROCESSOR_CORE2:
  1203.             case PROCESSOR_COREI7:
  1204.             case PROCESSOR_L1OM:
  1205.             case PROCESSOR_K1OM:
  1206.             case PROCESSOR_GENERIC64:
  1207.             case PROCESSOR_K6:
  1208.             case PROCESSOR_ATHLON:
  1209.             case PROCESSOR_K8:
  1210.             case PROCESSOR_AMDFAM10:
  1211.             case PROCESSOR_BD:
  1212.             case PROCESSOR_ZNVER:
  1213.             case PROCESSOR_BT:
  1214.               patt = alt_patt;
  1215.               break;
  1216.             case PROCESSOR_I386:
  1217.             case PROCESSOR_I486:
  1218.             case PROCESSOR_PENTIUM:
  1219.             case PROCESSOR_PENTIUMPRO:
  1220.             case PROCESSOR_IAMCU:
  1221.             case PROCESSOR_GENERIC32:
  1222.               patt = f32_patt;
  1223.               break;
  1224.             }
  1225.         }
  1226.       else
  1227.         {
  1228.           switch (fragP->tc_frag_data.tune)
  1229.             {
  1230.             case PROCESSOR_UNKNOWN:
  1231.               /* When cpu_arch_isa is set, cpu_arch_tune shouldn't be
  1232.                  PROCESSOR_UNKNOWN.  */
  1233.               abort ();
  1234.               break;
  1235.  
  1236.             case PROCESSOR_I386:
  1237.             case PROCESSOR_I486:
  1238.             case PROCESSOR_PENTIUM:
  1239.             case PROCESSOR_IAMCU:
  1240.             case PROCESSOR_K6:
  1241.             case PROCESSOR_ATHLON:
  1242.             case PROCESSOR_K8:
  1243.             case PROCESSOR_AMDFAM10:
  1244.             case PROCESSOR_BD:
  1245.             case PROCESSOR_ZNVER:
  1246.             case PROCESSOR_BT:
  1247.             case PROCESSOR_GENERIC32:
  1248.               /* We use cpu_arch_isa_flags to check if we CAN optimize
  1249.                  with nops.  */
  1250.               if (fragP->tc_frag_data.isa_flags.bitfield.cpunop)
  1251.                 patt = alt_patt;
  1252.               else
  1253.                 patt = f32_patt;
  1254.               break;
  1255.             case PROCESSOR_PENTIUMPRO:
  1256.             case PROCESSOR_PENTIUM4:
  1257.             case PROCESSOR_NOCONA:
  1258.             case PROCESSOR_CORE:
  1259.             case PROCESSOR_CORE2:
  1260.             case PROCESSOR_COREI7:
  1261.             case PROCESSOR_L1OM:
  1262.             case PROCESSOR_K1OM:
  1263.               if (fragP->tc_frag_data.isa_flags.bitfield.cpunop)
  1264.                 patt = alt_patt;
  1265.               else
  1266.                 patt = f32_patt;
  1267.               break;
  1268.             case PROCESSOR_GENERIC64:
  1269.               patt = alt_patt;
  1270.               break;
  1271.             }
  1272.         }
  1273.  
  1274.       if (patt == f32_patt)
  1275.         {
  1276.           /* If the padding is less than 15 bytes, we use the normal
  1277.              ones.  Otherwise, we use a jump instruction and adjust
  1278.              its offset.   */
  1279.           int limit;
  1280.  
  1281.           /* For 64bit, the limit is 3 bytes.  */
  1282.           if (flag_code == CODE_64BIT
  1283.               && fragP->tc_frag_data.isa_flags.bitfield.cpulm)
  1284.             limit = 3;
  1285.           else
  1286.             limit = 15;
  1287.           if (count < limit)
  1288.             memcpy (fragP->fr_literal + fragP->fr_fix,
  1289.                     patt[count - 1], count);
  1290.           else
  1291.             {
  1292.               memcpy (fragP->fr_literal + fragP->fr_fix,
  1293.                       jump_31, count);
  1294.               /* Adjust jump offset.  */
  1295.               fragP->fr_literal[fragP->fr_fix + 1] = count - 2;
  1296.             }
  1297.         }
  1298.       else
  1299.         {
  1300.           /* Maximum length of an instruction is 10 byte.  If the
  1301.              padding is greater than 10 bytes and we don't use jump,
  1302.              we have to break it into smaller pieces.  */
  1303.           int padding = count;
  1304.           while (padding > 10)
  1305.             {
  1306.               padding -= 10;
  1307.               memcpy (fragP->fr_literal + fragP->fr_fix + padding,
  1308.                       patt [9], 10);
  1309.             }
  1310.  
  1311.           if (padding)
  1312.             memcpy (fragP->fr_literal + fragP->fr_fix,
  1313.                     patt [padding - 1], padding);
  1314.         }
  1315.     }
  1316.   fragP->fr_var = count;
  1317. }
  1318.  
  1319. static INLINE int
  1320. operand_type_all_zero (const union i386_operand_type *x)
  1321. {
  1322.   switch (ARRAY_SIZE(x->array))
  1323.     {
  1324.     case 3:
  1325.       if (x->array[2])
  1326.         return 0;
  1327.     case 2:
  1328.       if (x->array[1])
  1329.         return 0;
  1330.     case 1:
  1331.       return !x->array[0];
  1332.     default:
  1333.       abort ();
  1334.     }
  1335. }
  1336.  
  1337. static INLINE void
  1338. operand_type_set (union i386_operand_type *x, unsigned int v)
  1339. {
  1340.   switch (ARRAY_SIZE(x->array))
  1341.     {
  1342.     case 3:
  1343.       x->array[2] = v;
  1344.     case 2:
  1345.       x->array[1] = v;
  1346.     case 1:
  1347.       x->array[0] = v;
  1348.       break;
  1349.     default:
  1350.       abort ();
  1351.     }
  1352. }
  1353.  
  1354. static INLINE int
  1355. operand_type_equal (const union i386_operand_type *x,
  1356.                     const union i386_operand_type *y)
  1357. {
  1358.   switch (ARRAY_SIZE(x->array))
  1359.     {
  1360.     case 3:
  1361.       if (x->array[2] != y->array[2])
  1362.         return 0;
  1363.     case 2:
  1364.       if (x->array[1] != y->array[1])
  1365.         return 0;
  1366.     case 1:
  1367.       return x->array[0] == y->array[0];
  1368.       break;
  1369.     default:
  1370.       abort ();
  1371.     }
  1372. }
  1373.  
  1374. static INLINE int
  1375. cpu_flags_all_zero (const union i386_cpu_flags *x)
  1376. {
  1377.   switch (ARRAY_SIZE(x->array))
  1378.     {
  1379.     case 3:
  1380.       if (x->array[2])
  1381.         return 0;
  1382.     case 2:
  1383.       if (x->array[1])
  1384.         return 0;
  1385.     case 1:
  1386.       return !x->array[0];
  1387.     default:
  1388.       abort ();
  1389.     }
  1390. }
  1391.  
  1392. static INLINE int
  1393. cpu_flags_equal (const union i386_cpu_flags *x,
  1394.                  const union i386_cpu_flags *y)
  1395. {
  1396.   switch (ARRAY_SIZE(x->array))
  1397.     {
  1398.     case 3:
  1399.       if (x->array[2] != y->array[2])
  1400.         return 0;
  1401.     case 2:
  1402.       if (x->array[1] != y->array[1])
  1403.         return 0;
  1404.     case 1:
  1405.       return x->array[0] == y->array[0];
  1406.       break;
  1407.     default:
  1408.       abort ();
  1409.     }
  1410. }
  1411.  
  1412. static INLINE int
  1413. cpu_flags_check_cpu64 (i386_cpu_flags f)
  1414. {
  1415.   return !((flag_code == CODE_64BIT && f.bitfield.cpuno64)
  1416.            || (flag_code != CODE_64BIT && f.bitfield.cpu64));
  1417. }
  1418.  
  1419. static INLINE i386_cpu_flags
  1420. cpu_flags_and (i386_cpu_flags x, i386_cpu_flags y)
  1421. {
  1422.   switch (ARRAY_SIZE (x.array))
  1423.     {
  1424.     case 3:
  1425.       x.array [2] &= y.array [2];
  1426.     case 2:
  1427.       x.array [1] &= y.array [1];
  1428.     case 1:
  1429.       x.array [0] &= y.array [0];
  1430.       break;
  1431.     default:
  1432.       abort ();
  1433.     }
  1434.   return x;
  1435. }
  1436.  
  1437. static INLINE i386_cpu_flags
  1438. cpu_flags_or (i386_cpu_flags x, i386_cpu_flags y)
  1439. {
  1440.   switch (ARRAY_SIZE (x.array))
  1441.     {
  1442.     case 3:
  1443.       x.array [2] |= y.array [2];
  1444.     case 2:
  1445.       x.array [1] |= y.array [1];
  1446.     case 1:
  1447.       x.array [0] |= y.array [0];
  1448.       break;
  1449.     default:
  1450.       abort ();
  1451.     }
  1452.   return x;
  1453. }
  1454.  
  1455. static INLINE i386_cpu_flags
  1456. cpu_flags_and_not (i386_cpu_flags x, i386_cpu_flags y)
  1457. {
  1458.   switch (ARRAY_SIZE (x.array))
  1459.     {
  1460.     case 3:
  1461.       x.array [2] &= ~y.array [2];
  1462.     case 2:
  1463.       x.array [1] &= ~y.array [1];
  1464.     case 1:
  1465.       x.array [0] &= ~y.array [0];
  1466.       break;
  1467.     default:
  1468.       abort ();
  1469.     }
  1470.   return x;
  1471. }
  1472.  
  1473. static int
  1474. valid_iamcu_cpu_flags (const i386_cpu_flags *flags)
  1475. {
  1476.   if (cpu_arch_isa == PROCESSOR_IAMCU)
  1477.     {
  1478.       static const i386_cpu_flags iamcu_flags = CPU_IAMCU_COMPAT_FLAGS;
  1479.       i386_cpu_flags compat_flags;
  1480.       compat_flags = cpu_flags_and_not (*flags, iamcu_flags);
  1481.       return cpu_flags_all_zero (&compat_flags);
  1482.     }
  1483.   else
  1484.     return 1;
  1485. }
  1486.  
  1487. #define CPU_FLAGS_ARCH_MATCH            0x1
  1488. #define CPU_FLAGS_64BIT_MATCH           0x2
  1489. #define CPU_FLAGS_AES_MATCH             0x4
  1490. #define CPU_FLAGS_PCLMUL_MATCH          0x8
  1491. #define CPU_FLAGS_AVX_MATCH            0x10
  1492.  
  1493. #define CPU_FLAGS_32BIT_MATCH \
  1494.   (CPU_FLAGS_ARCH_MATCH | CPU_FLAGS_AES_MATCH \
  1495.    | CPU_FLAGS_PCLMUL_MATCH | CPU_FLAGS_AVX_MATCH)
  1496. #define CPU_FLAGS_PERFECT_MATCH \
  1497.   (CPU_FLAGS_32BIT_MATCH | CPU_FLAGS_64BIT_MATCH)
  1498.  
  1499. /* Return CPU flags match bits. */
  1500.  
  1501. static int
  1502. cpu_flags_match (const insn_template *t)
  1503. {
  1504.   i386_cpu_flags x = t->cpu_flags;
  1505.   int match = cpu_flags_check_cpu64 (x) ? CPU_FLAGS_64BIT_MATCH : 0;
  1506.  
  1507.   x.bitfield.cpu64 = 0;
  1508.   x.bitfield.cpuno64 = 0;
  1509.  
  1510.   if (cpu_flags_all_zero (&x))
  1511.     {
  1512.       /* This instruction is available on all archs.  */
  1513.       match |= CPU_FLAGS_32BIT_MATCH;
  1514.     }
  1515.   else
  1516.     {
  1517.       /* This instruction is available only on some archs.  */
  1518.       i386_cpu_flags cpu = cpu_arch_flags;
  1519.  
  1520.       cpu.bitfield.cpu64 = 0;
  1521.       cpu.bitfield.cpuno64 = 0;
  1522.       cpu = cpu_flags_and (x, cpu);
  1523.       if (!cpu_flags_all_zero (&cpu))
  1524.         {
  1525.           if (x.bitfield.cpuavx)
  1526.             {
  1527.               /* We only need to check AES/PCLMUL/SSE2AVX with AVX.  */
  1528.               if (cpu.bitfield.cpuavx)
  1529.                 {
  1530.                   /* Check SSE2AVX.  */
  1531.                   if (!t->opcode_modifier.sse2avx|| sse2avx)
  1532.                     {
  1533.                       match |= (CPU_FLAGS_ARCH_MATCH
  1534.                                 | CPU_FLAGS_AVX_MATCH);
  1535.                       /* Check AES.  */
  1536.                       if (!x.bitfield.cpuaes || cpu.bitfield.cpuaes)
  1537.                         match |= CPU_FLAGS_AES_MATCH;
  1538.                       /* Check PCLMUL.  */
  1539.                       if (!x.bitfield.cpupclmul
  1540.                           || cpu.bitfield.cpupclmul)
  1541.                         match |= CPU_FLAGS_PCLMUL_MATCH;
  1542.                     }
  1543.                 }
  1544.               else
  1545.                 match |= CPU_FLAGS_ARCH_MATCH;
  1546.             }
  1547.           else
  1548.             match |= CPU_FLAGS_32BIT_MATCH;
  1549.         }
  1550.     }
  1551.   return match;
  1552. }
  1553.  
  1554. static INLINE i386_operand_type
  1555. operand_type_and (i386_operand_type x, i386_operand_type y)
  1556. {
  1557.   switch (ARRAY_SIZE (x.array))
  1558.     {
  1559.     case 3:
  1560.       x.array [2] &= y.array [2];
  1561.     case 2:
  1562.       x.array [1] &= y.array [1];
  1563.     case 1:
  1564.       x.array [0] &= y.array [0];
  1565.       break;
  1566.     default:
  1567.       abort ();
  1568.     }
  1569.   return x;
  1570. }
  1571.  
  1572. static INLINE i386_operand_type
  1573. operand_type_or (i386_operand_type x, i386_operand_type y)
  1574. {
  1575.   switch (ARRAY_SIZE (x.array))
  1576.     {
  1577.     case 3:
  1578.       x.array [2] |= y.array [2];
  1579.     case 2:
  1580.       x.array [1] |= y.array [1];
  1581.     case 1:
  1582.       x.array [0] |= y.array [0];
  1583.       break;
  1584.     default:
  1585.       abort ();
  1586.     }
  1587.   return x;
  1588. }
  1589.  
  1590. static INLINE i386_operand_type
  1591. operand_type_xor (i386_operand_type x, i386_operand_type y)
  1592. {
  1593.   switch (ARRAY_SIZE (x.array))
  1594.     {
  1595.     case 3:
  1596.       x.array [2] ^= y.array [2];
  1597.     case 2:
  1598.       x.array [1] ^= y.array [1];
  1599.     case 1:
  1600.       x.array [0] ^= y.array [0];
  1601.       break;
  1602.     default:
  1603.       abort ();
  1604.     }
  1605.   return x;
  1606. }
  1607.  
  1608. static const i386_operand_type acc32 = OPERAND_TYPE_ACC32;
  1609. static const i386_operand_type acc64 = OPERAND_TYPE_ACC64;
  1610. static const i386_operand_type control = OPERAND_TYPE_CONTROL;
  1611. static const i386_operand_type inoutportreg
  1612.   = OPERAND_TYPE_INOUTPORTREG;
  1613. static const i386_operand_type reg16_inoutportreg
  1614.   = OPERAND_TYPE_REG16_INOUTPORTREG;
  1615. static const i386_operand_type disp16 = OPERAND_TYPE_DISP16;
  1616. static const i386_operand_type disp32 = OPERAND_TYPE_DISP32;
  1617. static const i386_operand_type disp32s = OPERAND_TYPE_DISP32S;
  1618. static const i386_operand_type disp16_32 = OPERAND_TYPE_DISP16_32;
  1619. static const i386_operand_type anydisp
  1620.   = OPERAND_TYPE_ANYDISP;
  1621. static const i386_operand_type regxmm = OPERAND_TYPE_REGXMM;
  1622. static const i386_operand_type regymm = OPERAND_TYPE_REGYMM;
  1623. static const i386_operand_type regzmm = OPERAND_TYPE_REGZMM;
  1624. static const i386_operand_type regmask = OPERAND_TYPE_REGMASK;
  1625. static const i386_operand_type imm8 = OPERAND_TYPE_IMM8;
  1626. static const i386_operand_type imm8s = OPERAND_TYPE_IMM8S;
  1627. static const i386_operand_type imm16 = OPERAND_TYPE_IMM16;
  1628. static const i386_operand_type imm32 = OPERAND_TYPE_IMM32;
  1629. static const i386_operand_type imm32s = OPERAND_TYPE_IMM32S;
  1630. static const i386_operand_type imm64 = OPERAND_TYPE_IMM64;
  1631. static const i386_operand_type imm16_32 = OPERAND_TYPE_IMM16_32;
  1632. static const i386_operand_type imm16_32s = OPERAND_TYPE_IMM16_32S;
  1633. static const i386_operand_type imm16_32_32s = OPERAND_TYPE_IMM16_32_32S;
  1634. static const i386_operand_type vec_imm4 = OPERAND_TYPE_VEC_IMM4;
  1635.  
  1636. enum operand_type
  1637. {
  1638.   reg,
  1639.   imm,
  1640.   disp,
  1641.   anymem
  1642. };
  1643.  
  1644. static INLINE int
  1645. operand_type_check (i386_operand_type t, enum operand_type c)
  1646. {
  1647.   switch (c)
  1648.     {
  1649.     case reg:
  1650.       return (t.bitfield.reg8
  1651.               || t.bitfield.reg16
  1652.               || t.bitfield.reg32
  1653.               || t.bitfield.reg64);
  1654.  
  1655.     case imm:
  1656.       return (t.bitfield.imm8
  1657.               || t.bitfield.imm8s
  1658.               || t.bitfield.imm16
  1659.               || t.bitfield.imm32
  1660.               || t.bitfield.imm32s
  1661.               || t.bitfield.imm64);
  1662.  
  1663.     case disp:
  1664.       return (t.bitfield.disp8
  1665.               || t.bitfield.disp16
  1666.               || t.bitfield.disp32
  1667.               || t.bitfield.disp32s
  1668.               || t.bitfield.disp64);
  1669.  
  1670.     case anymem:
  1671.       return (t.bitfield.disp8
  1672.               || t.bitfield.disp16
  1673.               || t.bitfield.disp32
  1674.               || t.bitfield.disp32s
  1675.               || t.bitfield.disp64
  1676.               || t.bitfield.baseindex);
  1677.  
  1678.     default:
  1679.       abort ();
  1680.     }
  1681.  
  1682.   return 0;
  1683. }
  1684.  
  1685. /* Return 1 if there is no conflict in 8bit/16bit/32bit/64bit on
  1686.    operand J for instruction template T.  */
  1687.  
  1688. static INLINE int
  1689. match_reg_size (const insn_template *t, unsigned int j)
  1690. {
  1691.   return !((i.types[j].bitfield.byte
  1692.             && !t->operand_types[j].bitfield.byte)
  1693.            || (i.types[j].bitfield.word
  1694.                && !t->operand_types[j].bitfield.word)
  1695.            || (i.types[j].bitfield.dword
  1696.                && !t->operand_types[j].bitfield.dword)
  1697.            || (i.types[j].bitfield.qword
  1698.                && !t->operand_types[j].bitfield.qword));
  1699. }
  1700.  
  1701. /* Return 1 if there is no conflict in any size on operand J for
  1702.    instruction template T.  */
  1703.  
  1704. static INLINE int
  1705. match_mem_size (const insn_template *t, unsigned int j)
  1706. {
  1707.   return (match_reg_size (t, j)
  1708.           && !((i.types[j].bitfield.unspecified
  1709.                 && !i.broadcast
  1710.                 && !t->operand_types[j].bitfield.unspecified)
  1711.                || (i.types[j].bitfield.fword
  1712.                    && !t->operand_types[j].bitfield.fword)
  1713.                || (i.types[j].bitfield.tbyte
  1714.                    && !t->operand_types[j].bitfield.tbyte)
  1715.                || (i.types[j].bitfield.xmmword
  1716.                    && !t->operand_types[j].bitfield.xmmword)
  1717.                || (i.types[j].bitfield.ymmword
  1718.                    && !t->operand_types[j].bitfield.ymmword)
  1719.                || (i.types[j].bitfield.zmmword
  1720.                    && !t->operand_types[j].bitfield.zmmword)));
  1721. }
  1722.  
  1723. /* Return 1 if there is no size conflict on any operands for
  1724.    instruction template T.  */
  1725.  
  1726. static INLINE int
  1727. operand_size_match (const insn_template *t)
  1728. {
  1729.   unsigned int j;
  1730.   int match = 1;
  1731.  
  1732.   /* Don't check jump instructions.  */
  1733.   if (t->opcode_modifier.jump
  1734.       || t->opcode_modifier.jumpbyte
  1735.       || t->opcode_modifier.jumpdword
  1736.       || t->opcode_modifier.jumpintersegment)
  1737.     return match;
  1738.  
  1739.   /* Check memory and accumulator operand size.  */
  1740.   for (j = 0; j < i.operands; j++)
  1741.     {
  1742.       if (t->operand_types[j].bitfield.anysize)
  1743.         continue;
  1744.  
  1745.       if (t->operand_types[j].bitfield.acc && !match_reg_size (t, j))
  1746.         {
  1747.           match = 0;
  1748.           break;
  1749.         }
  1750.  
  1751.       if (i.types[j].bitfield.mem && !match_mem_size (t, j))
  1752.         {
  1753.           match = 0;
  1754.           break;
  1755.         }
  1756.     }
  1757.  
  1758.   if (match)
  1759.     return match;
  1760.   else if (!t->opcode_modifier.d && !t->opcode_modifier.floatd)
  1761.     {
  1762. mismatch:
  1763.       i.error = operand_size_mismatch;
  1764.       return 0;
  1765.     }
  1766.  
  1767.   /* Check reverse.  */
  1768.   gas_assert (i.operands == 2);
  1769.  
  1770.   match = 1;
  1771.   for (j = 0; j < 2; j++)
  1772.     {
  1773.       if (t->operand_types[j].bitfield.acc
  1774.           && !match_reg_size (t, j ? 0 : 1))
  1775.         goto mismatch;
  1776.  
  1777.       if (i.types[j].bitfield.mem
  1778.           && !match_mem_size (t, j ? 0 : 1))
  1779.         goto mismatch;
  1780.     }
  1781.  
  1782.   return match;
  1783. }
  1784.  
  1785. static INLINE int
  1786. operand_type_match (i386_operand_type overlap,
  1787.                     i386_operand_type given)
  1788. {
  1789.   i386_operand_type temp = overlap;
  1790.  
  1791.   temp.bitfield.jumpabsolute = 0;
  1792.   temp.bitfield.unspecified = 0;
  1793.   temp.bitfield.byte = 0;
  1794.   temp.bitfield.word = 0;
  1795.   temp.bitfield.dword = 0;
  1796.   temp.bitfield.fword = 0;
  1797.   temp.bitfield.qword = 0;
  1798.   temp.bitfield.tbyte = 0;
  1799.   temp.bitfield.xmmword = 0;
  1800.   temp.bitfield.ymmword = 0;
  1801.   temp.bitfield.zmmword = 0;
  1802.   if (operand_type_all_zero (&temp))
  1803.     goto mismatch;
  1804.  
  1805.   if (given.bitfield.baseindex == overlap.bitfield.baseindex
  1806.       && given.bitfield.jumpabsolute == overlap.bitfield.jumpabsolute)
  1807.     return 1;
  1808.  
  1809. mismatch:
  1810.   i.error = operand_type_mismatch;
  1811.   return 0;
  1812. }
  1813.  
  1814. /* If given types g0 and g1 are registers they must be of the same type
  1815.    unless the expected operand type register overlap is null.
  1816.    Note that Acc in a template matches every size of reg.  */
  1817.  
  1818. static INLINE int
  1819. operand_type_register_match (i386_operand_type m0,
  1820.                              i386_operand_type g0,
  1821.                              i386_operand_type t0,
  1822.                              i386_operand_type m1,
  1823.                              i386_operand_type g1,
  1824.                              i386_operand_type t1)
  1825. {
  1826.   if (!operand_type_check (g0, reg))
  1827.     return 1;
  1828.  
  1829.   if (!operand_type_check (g1, reg))
  1830.     return 1;
  1831.  
  1832.   if (g0.bitfield.reg8 == g1.bitfield.reg8
  1833.       && g0.bitfield.reg16 == g1.bitfield.reg16
  1834.       && g0.bitfield.reg32 == g1.bitfield.reg32
  1835.       && g0.bitfield.reg64 == g1.bitfield.reg64)
  1836.     return 1;
  1837.  
  1838.   if (m0.bitfield.acc)
  1839.     {
  1840.       t0.bitfield.reg8 = 1;
  1841.       t0.bitfield.reg16 = 1;
  1842.       t0.bitfield.reg32 = 1;
  1843.       t0.bitfield.reg64 = 1;
  1844.     }
  1845.  
  1846.   if (m1.bitfield.acc)
  1847.     {
  1848.       t1.bitfield.reg8 = 1;
  1849.       t1.bitfield.reg16 = 1;
  1850.       t1.bitfield.reg32 = 1;
  1851.       t1.bitfield.reg64 = 1;
  1852.     }
  1853.  
  1854.   if (!(t0.bitfield.reg8 & t1.bitfield.reg8)
  1855.       && !(t0.bitfield.reg16 & t1.bitfield.reg16)
  1856.       && !(t0.bitfield.reg32 & t1.bitfield.reg32)
  1857.       && !(t0.bitfield.reg64 & t1.bitfield.reg64))
  1858.     return 1;
  1859.  
  1860.   i.error = register_type_mismatch;
  1861.  
  1862.   return 0;
  1863. }
  1864.  
  1865. static INLINE unsigned int
  1866. register_number (const reg_entry *r)
  1867. {
  1868.   unsigned int nr = r->reg_num;
  1869.  
  1870.   if (r->reg_flags & RegRex)
  1871.     nr += 8;
  1872.  
  1873.   return nr;
  1874. }
  1875.  
  1876. static INLINE unsigned int
  1877. mode_from_disp_size (i386_operand_type t)
  1878. {
  1879.   if (t.bitfield.disp8 || t.bitfield.vec_disp8)
  1880.     return 1;
  1881.   else if (t.bitfield.disp16
  1882.            || t.bitfield.disp32
  1883.            || t.bitfield.disp32s)
  1884.     return 2;
  1885.   else
  1886.     return 0;
  1887. }
  1888.  
  1889. static INLINE int
  1890. fits_in_signed_byte (addressT num)
  1891. {
  1892.   return num + 0x80 <= 0xff;
  1893. }
  1894.  
  1895. static INLINE int
  1896. fits_in_unsigned_byte (addressT num)
  1897. {
  1898.   return num <= 0xff;
  1899. }
  1900.  
  1901. static INLINE int
  1902. fits_in_unsigned_word (addressT num)
  1903. {
  1904.   return num <= 0xffff;
  1905. }
  1906.  
  1907. static INLINE int
  1908. fits_in_signed_word (addressT num)
  1909. {
  1910.   return num + 0x8000 <= 0xffff;
  1911. }
  1912.  
  1913. static INLINE int
  1914. fits_in_signed_long (addressT num ATTRIBUTE_UNUSED)
  1915. {
  1916. #ifndef BFD64
  1917.   return 1;
  1918. #else
  1919.   return num + 0x80000000 <= 0xffffffff;
  1920. #endif
  1921. }                               /* fits_in_signed_long() */
  1922.  
  1923. static INLINE int
  1924. fits_in_unsigned_long (addressT num ATTRIBUTE_UNUSED)
  1925. {
  1926. #ifndef BFD64
  1927.   return 1;
  1928. #else
  1929.   return num <= 0xffffffff;
  1930. #endif
  1931. }                               /* fits_in_unsigned_long() */
  1932.  
  1933. static INLINE int
  1934. fits_in_vec_disp8 (offsetT num)
  1935. {
  1936.   int shift = i.memshift;
  1937.   unsigned int mask;
  1938.  
  1939.   if (shift == -1)
  1940.     abort ();
  1941.  
  1942.   mask = (1 << shift) - 1;
  1943.  
  1944.   /* Return 0 if NUM isn't properly aligned.  */
  1945.   if ((num & mask))
  1946.     return 0;
  1947.  
  1948.   /* Check if NUM will fit in 8bit after shift.  */
  1949.   return fits_in_signed_byte (num >> shift);
  1950. }
  1951.  
  1952. static INLINE int
  1953. fits_in_imm4 (offsetT num)
  1954. {
  1955.   return (num & 0xf) == num;
  1956. }
  1957.  
  1958. static i386_operand_type
  1959. smallest_imm_type (offsetT num)
  1960. {
  1961.   i386_operand_type t;
  1962.  
  1963.   operand_type_set (&t, 0);
  1964.   t.bitfield.imm64 = 1;
  1965.  
  1966.   if (cpu_arch_tune != PROCESSOR_I486 && num == 1)
  1967.     {
  1968.       /* This code is disabled on the 486 because all the Imm1 forms
  1969.          in the opcode table are slower on the i486.  They're the
  1970.          versions with the implicitly specified single-position
  1971.          displacement, which has another syntax if you really want to
  1972.          use that form.  */
  1973.       t.bitfield.imm1 = 1;
  1974.       t.bitfield.imm8 = 1;
  1975.       t.bitfield.imm8s = 1;
  1976.       t.bitfield.imm16 = 1;
  1977.       t.bitfield.imm32 = 1;
  1978.       t.bitfield.imm32s = 1;
  1979.     }
  1980.   else if (fits_in_signed_byte (num))
  1981.     {
  1982.       t.bitfield.imm8 = 1;
  1983.       t.bitfield.imm8s = 1;
  1984.       t.bitfield.imm16 = 1;
  1985.       t.bitfield.imm32 = 1;
  1986.       t.bitfield.imm32s = 1;
  1987.     }
  1988.   else if (fits_in_unsigned_byte (num))
  1989.     {
  1990.       t.bitfield.imm8 = 1;
  1991.       t.bitfield.imm16 = 1;
  1992.       t.bitfield.imm32 = 1;
  1993.       t.bitfield.imm32s = 1;
  1994.     }
  1995.   else if (fits_in_signed_word (num) || fits_in_unsigned_word (num))
  1996.     {
  1997.       t.bitfield.imm16 = 1;
  1998.       t.bitfield.imm32 = 1;
  1999.       t.bitfield.imm32s = 1;
  2000.     }
  2001.   else if (fits_in_signed_long (num))
  2002.     {
  2003.       t.bitfield.imm32 = 1;
  2004.       t.bitfield.imm32s = 1;
  2005.     }
  2006.   else if (fits_in_unsigned_long (num))
  2007.     t.bitfield.imm32 = 1;
  2008.  
  2009.   return t;
  2010. }
  2011.  
  2012. static offsetT
  2013. offset_in_range (offsetT val, int size)
  2014. {
  2015.   addressT mask;
  2016.  
  2017.   switch (size)
  2018.     {
  2019.     case 1: mask = ((addressT) 1 <<  8) - 1; break;
  2020.     case 2: mask = ((addressT) 1 << 16) - 1; break;
  2021.     case 4: mask = ((addressT) 2 << 31) - 1; break;
  2022. #ifdef BFD64
  2023.     case 8: mask = ((addressT) 2 << 63) - 1; break;
  2024. #endif
  2025.     default: abort ();
  2026.     }
  2027.  
  2028. #ifdef BFD64
  2029.   /* If BFD64, sign extend val for 32bit address mode.  */
  2030.   if (flag_code != CODE_64BIT
  2031.       || i.prefix[ADDR_PREFIX])
  2032.     if ((val & ~(((addressT) 2 << 31) - 1)) == 0)
  2033.       val = (val ^ ((addressT) 1 << 31)) - ((addressT) 1 << 31);
  2034. #endif
  2035.  
  2036.   if ((val & ~mask) != 0 && (val & ~mask) != ~mask)
  2037.     {
  2038.       char buf1[40], buf2[40];
  2039.  
  2040.       sprint_value (buf1, val);
  2041.       sprint_value (buf2, val & mask);
  2042.       as_warn (_("%s shortened to %s"), buf1, buf2);
  2043.     }
  2044.   return val & mask;
  2045. }
  2046.  
  2047. enum PREFIX_GROUP
  2048. {
  2049.   PREFIX_EXIST = 0,
  2050.   PREFIX_LOCK,
  2051.   PREFIX_REP,
  2052.   PREFIX_OTHER
  2053. };
  2054.  
  2055. /* Returns
  2056.    a. PREFIX_EXIST if attempting to add a prefix where one from the
  2057.    same class already exists.
  2058.    b. PREFIX_LOCK if lock prefix is added.
  2059.    c. PREFIX_REP if rep/repne prefix is added.
  2060.    d. PREFIX_OTHER if other prefix is added.
  2061.  */
  2062.  
  2063. static enum PREFIX_GROUP
  2064. add_prefix (unsigned int prefix)
  2065. {
  2066.   enum PREFIX_GROUP ret = PREFIX_OTHER;
  2067.   unsigned int q;
  2068.  
  2069.   if (prefix >= REX_OPCODE && prefix < REX_OPCODE + 16
  2070.       && flag_code == CODE_64BIT)
  2071.     {
  2072.       if ((i.prefix[REX_PREFIX] & prefix & REX_W)
  2073.           || ((i.prefix[REX_PREFIX] & (REX_R | REX_X | REX_B))
  2074.               && (prefix & (REX_R | REX_X | REX_B))))
  2075.         ret = PREFIX_EXIST;
  2076.       q = REX_PREFIX;
  2077.     }
  2078.   else
  2079.     {
  2080.       switch (prefix)
  2081.         {
  2082.         default:
  2083.           abort ();
  2084.  
  2085.         case CS_PREFIX_OPCODE:
  2086.         case DS_PREFIX_OPCODE:
  2087.         case ES_PREFIX_OPCODE:
  2088.         case FS_PREFIX_OPCODE:
  2089.         case GS_PREFIX_OPCODE:
  2090.         case SS_PREFIX_OPCODE:
  2091.           q = SEG_PREFIX;
  2092.           break;
  2093.  
  2094.         case REPNE_PREFIX_OPCODE:
  2095.         case REPE_PREFIX_OPCODE:
  2096.           q = REP_PREFIX;
  2097.           ret = PREFIX_REP;
  2098.           break;
  2099.  
  2100.         case LOCK_PREFIX_OPCODE:
  2101.           q = LOCK_PREFIX;
  2102.           ret = PREFIX_LOCK;
  2103.           break;
  2104.  
  2105.         case FWAIT_OPCODE:
  2106.           q = WAIT_PREFIX;
  2107.           break;
  2108.  
  2109.         case ADDR_PREFIX_OPCODE:
  2110.           q = ADDR_PREFIX;
  2111.           break;
  2112.  
  2113.         case DATA_PREFIX_OPCODE:
  2114.           q = DATA_PREFIX;
  2115.           break;
  2116.         }
  2117.       if (i.prefix[q] != 0)
  2118.         ret = PREFIX_EXIST;
  2119.     }
  2120.  
  2121.   if (ret)
  2122.     {
  2123.       if (!i.prefix[q])
  2124.         ++i.prefixes;
  2125.       i.prefix[q] |= prefix;
  2126.     }
  2127.   else
  2128.     as_bad (_("same type of prefix used twice"));
  2129.  
  2130.   return ret;
  2131. }
  2132.  
  2133. static void
  2134. update_code_flag (int value, int check)
  2135. {
  2136.   PRINTF_LIKE ((*as_error));
  2137.  
  2138.   flag_code = (enum flag_code) value;
  2139.   if (flag_code == CODE_64BIT)
  2140.     {
  2141.       cpu_arch_flags.bitfield.cpu64 = 1;
  2142.       cpu_arch_flags.bitfield.cpuno64 = 0;
  2143.     }
  2144.   else
  2145.     {
  2146.       cpu_arch_flags.bitfield.cpu64 = 0;
  2147.       cpu_arch_flags.bitfield.cpuno64 = 1;
  2148.     }
  2149.   if (value == CODE_64BIT && !cpu_arch_flags.bitfield.cpulm )
  2150.     {
  2151.       if (check)
  2152.         as_error = as_fatal;
  2153.       else
  2154.         as_error = as_bad;
  2155.       (*as_error) (_("64bit mode not supported on `%s'."),
  2156.                    cpu_arch_name ? cpu_arch_name : default_arch);
  2157.     }
  2158.   if (value == CODE_32BIT && !cpu_arch_flags.bitfield.cpui386)
  2159.     {
  2160.       if (check)
  2161.         as_error = as_fatal;
  2162.       else
  2163.         as_error = as_bad;
  2164.       (*as_error) (_("32bit mode not supported on `%s'."),
  2165.                    cpu_arch_name ? cpu_arch_name : default_arch);
  2166.     }
  2167.   stackop_size = '\0';
  2168. }
  2169.  
  2170. static void
  2171. set_code_flag (int value)
  2172. {
  2173.   update_code_flag (value, 0);
  2174. }
  2175.  
  2176. static void
  2177. set_16bit_gcc_code_flag (int new_code_flag)
  2178. {
  2179.   flag_code = (enum flag_code) new_code_flag;
  2180.   if (flag_code != CODE_16BIT)
  2181.     abort ();
  2182.   cpu_arch_flags.bitfield.cpu64 = 0;
  2183.   cpu_arch_flags.bitfield.cpuno64 = 1;
  2184.   stackop_size = LONG_MNEM_SUFFIX;
  2185. }
  2186.  
  2187. static void
  2188. set_intel_syntax (int syntax_flag)
  2189. {
  2190.   /* Find out if register prefixing is specified.  */
  2191.   int ask_naked_reg = 0;
  2192.  
  2193.   SKIP_WHITESPACE ();
  2194.   if (!is_end_of_line[(unsigned char) *input_line_pointer])
  2195.     {
  2196.       char *string;
  2197.       int e = get_symbol_name (&string);
  2198.  
  2199.       if (strcmp (string, "prefix") == 0)
  2200.         ask_naked_reg = 1;
  2201.       else if (strcmp (string, "noprefix") == 0)
  2202.         ask_naked_reg = -1;
  2203.       else
  2204.         as_bad (_("bad argument to syntax directive."));
  2205.       (void) restore_line_pointer (e);
  2206.     }
  2207.   demand_empty_rest_of_line ();
  2208.  
  2209.   intel_syntax = syntax_flag;
  2210.  
  2211.   if (ask_naked_reg == 0)
  2212.     allow_naked_reg = (intel_syntax
  2213.                        && (bfd_get_symbol_leading_char (stdoutput) != '\0'));
  2214.   else
  2215.     allow_naked_reg = (ask_naked_reg < 0);
  2216.  
  2217.   expr_set_rank (O_full_ptr, syntax_flag ? 10 : 0);
  2218.  
  2219.   identifier_chars['%'] = intel_syntax && allow_naked_reg ? '%' : 0;
  2220.   identifier_chars['$'] = intel_syntax ? '$' : 0;
  2221.   register_prefix = allow_naked_reg ? "" : "%";
  2222. }
  2223.  
  2224. static void
  2225. set_intel_mnemonic (int mnemonic_flag)
  2226. {
  2227.   intel_mnemonic = mnemonic_flag;
  2228. }
  2229.  
  2230. static void
  2231. set_allow_index_reg (int flag)
  2232. {
  2233.   allow_index_reg = flag;
  2234. }
  2235.  
  2236. static void
  2237. set_check (int what)
  2238. {
  2239.   enum check_kind *kind;
  2240.   const char *str;
  2241.  
  2242.   if (what)
  2243.     {
  2244.       kind = &operand_check;
  2245.       str = "operand";
  2246.     }
  2247.   else
  2248.     {
  2249.       kind = &sse_check;
  2250.       str = "sse";
  2251.     }
  2252.  
  2253.   SKIP_WHITESPACE ();
  2254.  
  2255.   if (!is_end_of_line[(unsigned char) *input_line_pointer])
  2256.     {
  2257.       char *string;
  2258.       int e = get_symbol_name (&string);
  2259.  
  2260.       if (strcmp (string, "none") == 0)
  2261.         *kind = check_none;
  2262.       else if (strcmp (string, "warning") == 0)
  2263.         *kind = check_warning;
  2264.       else if (strcmp (string, "error") == 0)
  2265.         *kind = check_error;
  2266.       else
  2267.         as_bad (_("bad argument to %s_check directive."), str);
  2268.       (void) restore_line_pointer (e);
  2269.     }
  2270.   else
  2271.     as_bad (_("missing argument for %s_check directive"), str);
  2272.  
  2273.   demand_empty_rest_of_line ();
  2274. }
  2275.  
  2276. static void
  2277. check_cpu_arch_compatible (const char *name ATTRIBUTE_UNUSED,
  2278.                            i386_cpu_flags new_flag ATTRIBUTE_UNUSED)
  2279. {
  2280. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  2281.   static const char *arch;
  2282.  
  2283.   /* Intel LIOM is only supported on ELF.  */
  2284.   if (!IS_ELF)
  2285.     return;
  2286.  
  2287.   if (!arch)
  2288.     {
  2289.       /* Use cpu_arch_name if it is set in md_parse_option.  Otherwise
  2290.          use default_arch.  */
  2291.       arch = cpu_arch_name;
  2292.       if (!arch)
  2293.         arch = default_arch;
  2294.     }
  2295.  
  2296.   /* If we are targeting Intel MCU, we must enable it.  */
  2297.   if (get_elf_backend_data (stdoutput)->elf_machine_code != EM_IAMCU
  2298.       || new_flag.bitfield.cpuiamcu)
  2299.     return;
  2300.  
  2301.   /* If we are targeting Intel L1OM, we must enable it.  */
  2302.   if (get_elf_backend_data (stdoutput)->elf_machine_code != EM_L1OM
  2303.       || new_flag.bitfield.cpul1om)
  2304.     return;
  2305.  
  2306.   /* If we are targeting Intel K1OM, we must enable it.  */
  2307.   if (get_elf_backend_data (stdoutput)->elf_machine_code != EM_K1OM
  2308.       || new_flag.bitfield.cpuk1om)
  2309.     return;
  2310.  
  2311.   as_bad (_("`%s' is not supported on `%s'"), name, arch);
  2312. #endif
  2313. }
  2314.  
  2315. static void
  2316. set_cpu_arch (int dummy ATTRIBUTE_UNUSED)
  2317. {
  2318.   SKIP_WHITESPACE ();
  2319.  
  2320.   if (!is_end_of_line[(unsigned char) *input_line_pointer])
  2321.     {
  2322.       char *string;
  2323.       int e = get_symbol_name (&string);
  2324.       unsigned int j;
  2325.       i386_cpu_flags flags;
  2326.  
  2327.       for (j = 0; j < ARRAY_SIZE (cpu_arch); j++)
  2328.         {
  2329.           if (strcmp (string, cpu_arch[j].name) == 0)
  2330.             {
  2331.               check_cpu_arch_compatible (string, cpu_arch[j].flags);
  2332.  
  2333.               if (*string != '.')
  2334.                 {
  2335.                   cpu_arch_name = cpu_arch[j].name;
  2336.                   cpu_sub_arch_name = NULL;
  2337.                   cpu_arch_flags = cpu_arch[j].flags;
  2338.                   if (flag_code == CODE_64BIT)
  2339.                     {
  2340.                       cpu_arch_flags.bitfield.cpu64 = 1;
  2341.                       cpu_arch_flags.bitfield.cpuno64 = 0;
  2342.                     }
  2343.                   else
  2344.                     {
  2345.                       cpu_arch_flags.bitfield.cpu64 = 0;
  2346.                       cpu_arch_flags.bitfield.cpuno64 = 1;
  2347.                     }
  2348.                   cpu_arch_isa = cpu_arch[j].type;
  2349.                   cpu_arch_isa_flags = cpu_arch[j].flags;
  2350.                   if (!cpu_arch_tune_set)
  2351.                     {
  2352.                       cpu_arch_tune = cpu_arch_isa;
  2353.                       cpu_arch_tune_flags = cpu_arch_isa_flags;
  2354.                     }
  2355.                   break;
  2356.                 }
  2357.  
  2358.               if (!cpu_arch[j].negated)
  2359.                 flags = cpu_flags_or (cpu_arch_flags,
  2360.                                       cpu_arch[j].flags);
  2361.               else
  2362.                 flags = cpu_flags_and_not (cpu_arch_flags,
  2363.                                            cpu_arch[j].flags);
  2364.  
  2365.               if (!valid_iamcu_cpu_flags (&flags))
  2366.                 as_fatal (_("`%s' isn't valid for Intel MCU"),
  2367.                           cpu_arch[j].name);
  2368.               else if (!cpu_flags_equal (&flags, &cpu_arch_flags))
  2369.                 {
  2370.                   if (cpu_sub_arch_name)
  2371.                     {
  2372.                       char *name = cpu_sub_arch_name;
  2373.                       cpu_sub_arch_name = concat (name,
  2374.                                                   cpu_arch[j].name,
  2375.                                                   (const char *) NULL);
  2376.                       free (name);
  2377.                     }
  2378.                   else
  2379.                     cpu_sub_arch_name = xstrdup (cpu_arch[j].name);
  2380.                   cpu_arch_flags = flags;
  2381.                   cpu_arch_isa_flags = flags;
  2382.                 }
  2383.               (void) restore_line_pointer (e);
  2384.               demand_empty_rest_of_line ();
  2385.               return;
  2386.             }
  2387.         }
  2388.       if (j >= ARRAY_SIZE (cpu_arch))
  2389.         as_bad (_("no such architecture: `%s'"), string);
  2390.  
  2391.       *input_line_pointer = e;
  2392.     }
  2393.   else
  2394.     as_bad (_("missing cpu architecture"));
  2395.  
  2396.   no_cond_jump_promotion = 0;
  2397.   if (*input_line_pointer == ','
  2398.       && !is_end_of_line[(unsigned char) input_line_pointer[1]])
  2399.     {
  2400.       char *string;
  2401.       char e;
  2402.  
  2403.       ++input_line_pointer;
  2404.       e = get_symbol_name (&string);
  2405.  
  2406.       if (strcmp (string, "nojumps") == 0)
  2407.         no_cond_jump_promotion = 1;
  2408.       else if (strcmp (string, "jumps") == 0)
  2409.         ;
  2410.       else
  2411.         as_bad (_("no such architecture modifier: `%s'"), string);
  2412.  
  2413.       (void) restore_line_pointer (e);
  2414.     }
  2415.  
  2416.   demand_empty_rest_of_line ();
  2417. }
  2418.  
  2419. enum bfd_architecture
  2420. i386_arch (void)
  2421. {
  2422.   if (cpu_arch_isa == PROCESSOR_L1OM)
  2423.     {
  2424.       if (OUTPUT_FLAVOR != bfd_target_elf_flavour
  2425.           || flag_code != CODE_64BIT)
  2426.         as_fatal (_("Intel L1OM is 64bit ELF only"));
  2427.       return bfd_arch_l1om;
  2428.     }
  2429.   else if (cpu_arch_isa == PROCESSOR_K1OM)
  2430.     {
  2431.       if (OUTPUT_FLAVOR != bfd_target_elf_flavour
  2432.           || flag_code != CODE_64BIT)
  2433.         as_fatal (_("Intel K1OM is 64bit ELF only"));
  2434.       return bfd_arch_k1om;
  2435.     }
  2436.   else if (cpu_arch_isa == PROCESSOR_IAMCU)
  2437.     {
  2438.       if (OUTPUT_FLAVOR != bfd_target_elf_flavour
  2439.           || flag_code == CODE_64BIT)
  2440.         as_fatal (_("Intel MCU is 32bit ELF only"));
  2441.       return bfd_arch_iamcu;
  2442.     }
  2443.   else
  2444.     return bfd_arch_i386;
  2445. }
  2446.  
  2447. unsigned long
  2448. i386_mach (void)
  2449. {
  2450.   if (!strncmp (default_arch, "x86_64", 6))
  2451.     {
  2452.       if (cpu_arch_isa == PROCESSOR_L1OM)
  2453.         {
  2454.           if (OUTPUT_FLAVOR != bfd_target_elf_flavour
  2455.               || default_arch[6] != '\0')
  2456.             as_fatal (_("Intel L1OM is 64bit ELF only"));
  2457.           return bfd_mach_l1om;
  2458.         }
  2459.       else if (cpu_arch_isa == PROCESSOR_K1OM)
  2460.         {
  2461.           if (OUTPUT_FLAVOR != bfd_target_elf_flavour
  2462.               || default_arch[6] != '\0')
  2463.             as_fatal (_("Intel K1OM is 64bit ELF only"));
  2464.           return bfd_mach_k1om;
  2465.         }
  2466.       else if (default_arch[6] == '\0')
  2467.         return bfd_mach_x86_64;
  2468.       else
  2469.         return bfd_mach_x64_32;
  2470.     }
  2471.   else if (!strcmp (default_arch, "i386")
  2472.            || !strcmp (default_arch, "iamcu"))
  2473.     {
  2474.       if (cpu_arch_isa == PROCESSOR_IAMCU)
  2475.         {
  2476.           if (OUTPUT_FLAVOR != bfd_target_elf_flavour)
  2477.             as_fatal (_("Intel MCU is 32bit ELF only"));
  2478.           return bfd_mach_i386_iamcu;
  2479.         }
  2480.       else
  2481.         return bfd_mach_i386_i386;
  2482.     }
  2483.   else
  2484.     as_fatal (_("unknown architecture"));
  2485. }
  2486. void
  2487. md_begin (void)
  2488. {
  2489.   const char *hash_err;
  2490.  
  2491.   /* Initialize op_hash hash table.  */
  2492.   op_hash = hash_new ();
  2493.  
  2494.   {
  2495.     const insn_template *optab;
  2496.     templates *core_optab;
  2497.  
  2498.     /* Setup for loop.  */
  2499.     optab = i386_optab;
  2500.     core_optab = (templates *) xmalloc (sizeof (templates));
  2501.     core_optab->start = optab;
  2502.  
  2503.     while (1)
  2504.       {
  2505.         ++optab;
  2506.         if (optab->name == NULL
  2507.             || strcmp (optab->name, (optab - 1)->name) != 0)
  2508.           {
  2509.             /* different name --> ship out current template list;
  2510.                add to hash table; & begin anew.  */
  2511.             core_optab->end = optab;
  2512.             hash_err = hash_insert (op_hash,
  2513.                                     (optab - 1)->name,
  2514.                                     (void *) core_optab);
  2515.             if (hash_err)
  2516.               {
  2517.                 as_fatal (_("can't hash %s: %s"),
  2518.                           (optab - 1)->name,
  2519.                           hash_err);
  2520.               }
  2521.             if (optab->name == NULL)
  2522.               break;
  2523.             core_optab = (templates *) xmalloc (sizeof (templates));
  2524.             core_optab->start = optab;
  2525.           }
  2526.       }
  2527.   }
  2528.  
  2529.   /* Initialize reg_hash hash table.  */
  2530.   reg_hash = hash_new ();
  2531.   {
  2532.     const reg_entry *regtab;
  2533.     unsigned int regtab_size = i386_regtab_size;
  2534.  
  2535.     for (regtab = i386_regtab; regtab_size--; regtab++)
  2536.       {
  2537.         hash_err = hash_insert (reg_hash, regtab->reg_name, (void *) regtab);
  2538.         if (hash_err)
  2539.           as_fatal (_("can't hash %s: %s"),
  2540.                     regtab->reg_name,
  2541.                     hash_err);
  2542.       }
  2543.   }
  2544.  
  2545.   /* Fill in lexical tables:  mnemonic_chars, operand_chars.  */
  2546.   {
  2547.     int c;
  2548.     char *p;
  2549.  
  2550.     for (c = 0; c < 256; c++)
  2551.       {
  2552.         if (ISDIGIT (c))
  2553.           {
  2554.             digit_chars[c] = c;
  2555.             mnemonic_chars[c] = c;
  2556.             register_chars[c] = c;
  2557.             operand_chars[c] = c;
  2558.           }
  2559.         else if (ISLOWER (c))
  2560.           {
  2561.             mnemonic_chars[c] = c;
  2562.             register_chars[c] = c;
  2563.             operand_chars[c] = c;
  2564.           }
  2565.         else if (ISUPPER (c))
  2566.           {
  2567.             mnemonic_chars[c] = TOLOWER (c);
  2568.             register_chars[c] = mnemonic_chars[c];
  2569.             operand_chars[c] = c;
  2570.           }
  2571.         else if (c == '{' || c == '}')
  2572.           operand_chars[c] = c;
  2573.  
  2574.         if (ISALPHA (c) || ISDIGIT (c))
  2575.           identifier_chars[c] = c;
  2576.         else if (c >= 128)
  2577.           {
  2578.             identifier_chars[c] = c;
  2579.             operand_chars[c] = c;
  2580.           }
  2581.       }
  2582.  
  2583. #ifdef LEX_AT
  2584.     identifier_chars['@'] = '@';
  2585. #endif
  2586. #ifdef LEX_QM
  2587.     identifier_chars['?'] = '?';
  2588.     operand_chars['?'] = '?';
  2589. #endif
  2590.     digit_chars['-'] = '-';
  2591.     mnemonic_chars['_'] = '_';
  2592.     mnemonic_chars['-'] = '-';
  2593.     mnemonic_chars['.'] = '.';
  2594.     identifier_chars['_'] = '_';
  2595.     identifier_chars['.'] = '.';
  2596.  
  2597.     for (p = operand_special_chars; *p != '\0'; p++)
  2598.       operand_chars[(unsigned char) *p] = *p;
  2599.   }
  2600.  
  2601.   if (flag_code == CODE_64BIT)
  2602.     {
  2603. #if defined (OBJ_COFF) && defined (TE_PE)
  2604.       x86_dwarf2_return_column = (OUTPUT_FLAVOR == bfd_target_coff_flavour
  2605.                                   ? 32 : 16);
  2606. #else
  2607.       x86_dwarf2_return_column = 16;
  2608. #endif
  2609.       x86_cie_data_alignment = -8;
  2610.     }
  2611.   else
  2612.     {
  2613.       x86_dwarf2_return_column = 8;
  2614.       x86_cie_data_alignment = -4;
  2615.     }
  2616. }
  2617.  
  2618. void
  2619. i386_print_statistics (FILE *file)
  2620. {
  2621.   hash_print_statistics (file, "i386 opcode", op_hash);
  2622.   hash_print_statistics (file, "i386 register", reg_hash);
  2623. }
  2624. #ifdef DEBUG386
  2625.  
  2626. /* Debugging routines for md_assemble.  */
  2627. static void pte (insn_template *);
  2628. static void pt (i386_operand_type);
  2629. static void pe (expressionS *);
  2630. static void ps (symbolS *);
  2631.  
  2632. static void
  2633. pi (char *line, i386_insn *x)
  2634. {
  2635.   unsigned int j;
  2636.  
  2637.   fprintf (stdout, "%s: template ", line);
  2638.   pte (&x->tm);
  2639.   fprintf (stdout, "  address: base %s  index %s  scale %x\n",
  2640.            x->base_reg ? x->base_reg->reg_name : "none",
  2641.            x->index_reg ? x->index_reg->reg_name : "none",
  2642.            x->log2_scale_factor);
  2643.   fprintf (stdout, "  modrm:  mode %x  reg %x  reg/mem %x\n",
  2644.            x->rm.mode, x->rm.reg, x->rm.regmem);
  2645.   fprintf (stdout, "  sib:  base %x  index %x  scale %x\n",
  2646.            x->sib.base, x->sib.index, x->sib.scale);
  2647.   fprintf (stdout, "  rex: 64bit %x  extX %x  extY %x  extZ %x\n",
  2648.            (x->rex & REX_W) != 0,
  2649.            (x->rex & REX_R) != 0,
  2650.            (x->rex & REX_X) != 0,
  2651.            (x->rex & REX_B) != 0);
  2652.   for (j = 0; j < x->operands; j++)
  2653.     {
  2654.       fprintf (stdout, "    #%d:  ", j + 1);
  2655.       pt (x->types[j]);
  2656.       fprintf (stdout, "\n");
  2657.       if (x->types[j].bitfield.reg8
  2658.           || x->types[j].bitfield.reg16
  2659.           || x->types[j].bitfield.reg32
  2660.           || x->types[j].bitfield.reg64
  2661.           || x->types[j].bitfield.regmmx
  2662.           || x->types[j].bitfield.regxmm
  2663.           || x->types[j].bitfield.regymm
  2664.           || x->types[j].bitfield.regzmm
  2665.           || x->types[j].bitfield.sreg2
  2666.           || x->types[j].bitfield.sreg3
  2667.           || x->types[j].bitfield.control
  2668.           || x->types[j].bitfield.debug
  2669.           || x->types[j].bitfield.test)
  2670.         fprintf (stdout, "%s\n", x->op[j].regs->reg_name);
  2671.       if (operand_type_check (x->types[j], imm))
  2672.         pe (x->op[j].imms);
  2673.       if (operand_type_check (x->types[j], disp))
  2674.         pe (x->op[j].disps);
  2675.     }
  2676. }
  2677.  
  2678. static void
  2679. pte (insn_template *t)
  2680. {
  2681.   unsigned int j;
  2682.   fprintf (stdout, " %d operands ", t->operands);
  2683.   fprintf (stdout, "opcode %x ", t->base_opcode);
  2684.   if (t->extension_opcode != None)
  2685.     fprintf (stdout, "ext %x ", t->extension_opcode);
  2686.   if (t->opcode_modifier.d)
  2687.     fprintf (stdout, "D");
  2688.   if (t->opcode_modifier.w)
  2689.     fprintf (stdout, "W");
  2690.   fprintf (stdout, "\n");
  2691.   for (j = 0; j < t->operands; j++)
  2692.     {
  2693.       fprintf (stdout, "    #%d type ", j + 1);
  2694.       pt (t->operand_types[j]);
  2695.       fprintf (stdout, "\n");
  2696.     }
  2697. }
  2698.  
  2699. static void
  2700. pe (expressionS *e)
  2701. {
  2702.   fprintf (stdout, "    operation     %d\n", e->X_op);
  2703.   fprintf (stdout, "    add_number    %ld (%lx)\n",
  2704.            (long) e->X_add_number, (long) e->X_add_number);
  2705.   if (e->X_add_symbol)
  2706.     {
  2707.       fprintf (stdout, "    add_symbol    ");
  2708.       ps (e->X_add_symbol);
  2709.       fprintf (stdout, "\n");
  2710.     }
  2711.   if (e->X_op_symbol)
  2712.     {
  2713.       fprintf (stdout, "    op_symbol    ");
  2714.       ps (e->X_op_symbol);
  2715.       fprintf (stdout, "\n");
  2716.     }
  2717. }
  2718.  
  2719. static void
  2720. ps (symbolS *s)
  2721. {
  2722.   fprintf (stdout, "%s type %s%s",
  2723.            S_GET_NAME (s),
  2724.            S_IS_EXTERNAL (s) ? "EXTERNAL " : "",
  2725.            segment_name (S_GET_SEGMENT (s)));
  2726. }
  2727.  
  2728. static struct type_name
  2729.   {
  2730.     i386_operand_type mask;
  2731.     const char *name;
  2732.   }
  2733. const type_names[] =
  2734. {
  2735.   { OPERAND_TYPE_REG8, "r8" },
  2736.   { OPERAND_TYPE_REG16, "r16" },
  2737.   { OPERAND_TYPE_REG32, "r32" },
  2738.   { OPERAND_TYPE_REG64, "r64" },
  2739.   { OPERAND_TYPE_IMM8, "i8" },
  2740.   { OPERAND_TYPE_IMM8, "i8s" },
  2741.   { OPERAND_TYPE_IMM16, "i16" },
  2742.   { OPERAND_TYPE_IMM32, "i32" },
  2743.   { OPERAND_TYPE_IMM32S, "i32s" },
  2744.   { OPERAND_TYPE_IMM64, "i64" },
  2745.   { OPERAND_TYPE_IMM1, "i1" },
  2746.   { OPERAND_TYPE_BASEINDEX, "BaseIndex" },
  2747.   { OPERAND_TYPE_DISP8, "d8" },
  2748.   { OPERAND_TYPE_DISP16, "d16" },
  2749.   { OPERAND_TYPE_DISP32, "d32" },
  2750.   { OPERAND_TYPE_DISP32S, "d32s" },
  2751.   { OPERAND_TYPE_DISP64, "d64" },
  2752.   { OPERAND_TYPE_VEC_DISP8, "Vector d8" },
  2753.   { OPERAND_TYPE_INOUTPORTREG, "InOutPortReg" },
  2754.   { OPERAND_TYPE_SHIFTCOUNT, "ShiftCount" },
  2755.   { OPERAND_TYPE_CONTROL, "control reg" },
  2756.   { OPERAND_TYPE_TEST, "test reg" },
  2757.   { OPERAND_TYPE_DEBUG, "debug reg" },
  2758.   { OPERAND_TYPE_FLOATREG, "FReg" },
  2759.   { OPERAND_TYPE_FLOATACC, "FAcc" },
  2760.   { OPERAND_TYPE_SREG2, "SReg2" },
  2761.   { OPERAND_TYPE_SREG3, "SReg3" },
  2762.   { OPERAND_TYPE_ACC, "Acc" },
  2763.   { OPERAND_TYPE_JUMPABSOLUTE, "Jump Absolute" },
  2764.   { OPERAND_TYPE_REGMMX, "rMMX" },
  2765.   { OPERAND_TYPE_REGXMM, "rXMM" },
  2766.   { OPERAND_TYPE_REGYMM, "rYMM" },
  2767.   { OPERAND_TYPE_REGZMM, "rZMM" },
  2768.   { OPERAND_TYPE_REGMASK, "Mask reg" },
  2769.   { OPERAND_TYPE_ESSEG, "es" },
  2770. };
  2771.  
  2772. static void
  2773. pt (i386_operand_type t)
  2774. {
  2775.   unsigned int j;
  2776.   i386_operand_type a;
  2777.  
  2778.   for (j = 0; j < ARRAY_SIZE (type_names); j++)
  2779.     {
  2780.       a = operand_type_and (t, type_names[j].mask);
  2781.       if (!operand_type_all_zero (&a))
  2782.         fprintf (stdout, "%s, ",  type_names[j].name);
  2783.     }
  2784.   fflush (stdout);
  2785. }
  2786.  
  2787. #endif /* DEBUG386 */
  2788. static bfd_reloc_code_real_type
  2789. reloc (unsigned int size,
  2790.        int pcrel,
  2791.        int sign,
  2792.        bfd_reloc_code_real_type other)
  2793. {
  2794.   if (other != NO_RELOC)
  2795.     {
  2796.       reloc_howto_type *rel;
  2797.  
  2798.       if (size == 8)
  2799.         switch (other)
  2800.           {
  2801.           case BFD_RELOC_X86_64_GOT32:
  2802.             return BFD_RELOC_X86_64_GOT64;
  2803.             break;
  2804.           case BFD_RELOC_X86_64_GOTPLT64:
  2805.             return BFD_RELOC_X86_64_GOTPLT64;
  2806.             break;
  2807.           case BFD_RELOC_X86_64_PLTOFF64:
  2808.             return BFD_RELOC_X86_64_PLTOFF64;
  2809.             break;
  2810.           case BFD_RELOC_X86_64_GOTPC32:
  2811.             other = BFD_RELOC_X86_64_GOTPC64;
  2812.             break;
  2813.           case BFD_RELOC_X86_64_GOTPCREL:
  2814.             other = BFD_RELOC_X86_64_GOTPCREL64;
  2815.             break;
  2816.           case BFD_RELOC_X86_64_TPOFF32:
  2817.             other = BFD_RELOC_X86_64_TPOFF64;
  2818.             break;
  2819.           case BFD_RELOC_X86_64_DTPOFF32:
  2820.             other = BFD_RELOC_X86_64_DTPOFF64;
  2821.             break;
  2822.           default:
  2823.             break;
  2824.           }
  2825.  
  2826. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  2827.       if (other == BFD_RELOC_SIZE32)
  2828.         {
  2829.           if (size == 8)
  2830.             other = BFD_RELOC_SIZE64;
  2831.           if (pcrel)
  2832.             {
  2833.               as_bad (_("there are no pc-relative size relocations"));
  2834.               return NO_RELOC;
  2835.             }
  2836.         }
  2837. #endif
  2838.  
  2839.       /* Sign-checking 4-byte relocations in 16-/32-bit code is pointless.  */
  2840.       if (size == 4 && (flag_code != CODE_64BIT || disallow_64bit_reloc))
  2841.         sign = -1;
  2842.  
  2843.       rel = bfd_reloc_type_lookup (stdoutput, other);
  2844.       if (!rel)
  2845.         as_bad (_("unknown relocation (%u)"), other);
  2846.       else if (size != bfd_get_reloc_size (rel))
  2847.         as_bad (_("%u-byte relocation cannot be applied to %u-byte field"),
  2848.                 bfd_get_reloc_size (rel),
  2849.                 size);
  2850.       else if (pcrel && !rel->pc_relative)
  2851.         as_bad (_("non-pc-relative relocation for pc-relative field"));
  2852.       else if ((rel->complain_on_overflow == complain_overflow_signed
  2853.                 && !sign)
  2854.                || (rel->complain_on_overflow == complain_overflow_unsigned
  2855.                    && sign > 0))
  2856.         as_bad (_("relocated field and relocation type differ in signedness"));
  2857.       else
  2858.         return other;
  2859.       return NO_RELOC;
  2860.     }
  2861.  
  2862.   if (pcrel)
  2863.     {
  2864.       if (!sign)
  2865.         as_bad (_("there are no unsigned pc-relative relocations"));
  2866.       switch (size)
  2867.         {
  2868.         case 1: return BFD_RELOC_8_PCREL;
  2869.         case 2: return BFD_RELOC_16_PCREL;
  2870.         case 4: return BFD_RELOC_32_PCREL;
  2871.         case 8: return BFD_RELOC_64_PCREL;
  2872.         }
  2873.       as_bad (_("cannot do %u byte pc-relative relocation"), size);
  2874.     }
  2875.   else
  2876.     {
  2877.       if (sign > 0)
  2878.         switch (size)
  2879.           {
  2880.           case 4: return BFD_RELOC_X86_64_32S;
  2881.           }
  2882.       else
  2883.         switch (size)
  2884.           {
  2885.           case 1: return BFD_RELOC_8;
  2886.           case 2: return BFD_RELOC_16;
  2887.           case 4: return BFD_RELOC_32;
  2888.           case 8: return BFD_RELOC_64;
  2889.           }
  2890.       as_bad (_("cannot do %s %u byte relocation"),
  2891.               sign > 0 ? "signed" : "unsigned", size);
  2892.     }
  2893.  
  2894.   return NO_RELOC;
  2895. }
  2896.  
  2897. /* Here we decide which fixups can be adjusted to make them relative to
  2898.    the beginning of the section instead of the symbol.  Basically we need
  2899.    to make sure that the dynamic relocations are done correctly, so in
  2900.    some cases we force the original symbol to be used.  */
  2901.  
  2902. int
  2903. tc_i386_fix_adjustable (fixS *fixP ATTRIBUTE_UNUSED)
  2904. {
  2905. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  2906.   if (!IS_ELF)
  2907.     return 1;
  2908.  
  2909.   /* Don't adjust pc-relative references to merge sections in 64-bit
  2910.      mode.  */
  2911.   if (use_rela_relocations
  2912.       && (S_GET_SEGMENT (fixP->fx_addsy)->flags & SEC_MERGE) != 0
  2913.       && fixP->fx_pcrel)
  2914.     return 0;
  2915.  
  2916.   /* The x86_64 GOTPCREL are represented as 32bit PCrel relocations
  2917.      and changed later by validate_fix.  */
  2918.   if (GOT_symbol && fixP->fx_subsy == GOT_symbol
  2919.       && fixP->fx_r_type == BFD_RELOC_32_PCREL)
  2920.     return 0;
  2921.  
  2922.   /* Adjust_reloc_syms doesn't know about the GOT.  Need to keep symbol
  2923.      for size relocations.  */
  2924.   if (fixP->fx_r_type == BFD_RELOC_SIZE32
  2925.       || fixP->fx_r_type == BFD_RELOC_SIZE64
  2926.       || fixP->fx_r_type == BFD_RELOC_386_GOTOFF
  2927.       || fixP->fx_r_type == BFD_RELOC_386_PLT32
  2928.       || fixP->fx_r_type == BFD_RELOC_386_GOT32
  2929.       || fixP->fx_r_type == BFD_RELOC_386_GOT32X
  2930.       || fixP->fx_r_type == BFD_RELOC_386_TLS_GD
  2931.       || fixP->fx_r_type == BFD_RELOC_386_TLS_LDM
  2932.       || fixP->fx_r_type == BFD_RELOC_386_TLS_LDO_32
  2933.       || fixP->fx_r_type == BFD_RELOC_386_TLS_IE_32
  2934.       || fixP->fx_r_type == BFD_RELOC_386_TLS_IE
  2935.       || fixP->fx_r_type == BFD_RELOC_386_TLS_GOTIE
  2936.       || fixP->fx_r_type == BFD_RELOC_386_TLS_LE_32
  2937.       || fixP->fx_r_type == BFD_RELOC_386_TLS_LE
  2938.       || fixP->fx_r_type == BFD_RELOC_386_TLS_GOTDESC
  2939.       || fixP->fx_r_type == BFD_RELOC_386_TLS_DESC_CALL
  2940.       || fixP->fx_r_type == BFD_RELOC_X86_64_PLT32
  2941.       || fixP->fx_r_type == BFD_RELOC_X86_64_GOT32
  2942.       || fixP->fx_r_type == BFD_RELOC_X86_64_GOTPCREL
  2943.       || fixP->fx_r_type == BFD_RELOC_X86_64_GOTPCRELX
  2944.       || fixP->fx_r_type == BFD_RELOC_X86_64_REX_GOTPCRELX
  2945.       || fixP->fx_r_type == BFD_RELOC_X86_64_TLSGD
  2946.       || fixP->fx_r_type == BFD_RELOC_X86_64_TLSLD
  2947.       || fixP->fx_r_type == BFD_RELOC_X86_64_DTPOFF32
  2948.       || fixP->fx_r_type == BFD_RELOC_X86_64_DTPOFF64
  2949.       || fixP->fx_r_type == BFD_RELOC_X86_64_GOTTPOFF
  2950.       || fixP->fx_r_type == BFD_RELOC_X86_64_TPOFF32
  2951.       || fixP->fx_r_type == BFD_RELOC_X86_64_TPOFF64
  2952.       || fixP->fx_r_type == BFD_RELOC_X86_64_GOTOFF64
  2953.       || fixP->fx_r_type == BFD_RELOC_X86_64_GOTPC32_TLSDESC
  2954.       || fixP->fx_r_type == BFD_RELOC_X86_64_TLSDESC_CALL
  2955.       || fixP->fx_r_type == BFD_RELOC_VTABLE_INHERIT
  2956.       || fixP->fx_r_type == BFD_RELOC_VTABLE_ENTRY)
  2957.     return 0;
  2958. #endif
  2959.   return 1;
  2960. }
  2961.  
  2962. static int
  2963. intel_float_operand (const char *mnemonic)
  2964. {
  2965.   /* Note that the value returned is meaningful only for opcodes with (memory)
  2966.      operands, hence the code here is free to improperly handle opcodes that
  2967.      have no operands (for better performance and smaller code). */
  2968.  
  2969.   if (mnemonic[0] != 'f')
  2970.     return 0; /* non-math */
  2971.  
  2972.   switch (mnemonic[1])
  2973.     {
  2974.     /* fclex, fdecstp, fdisi, femms, feni, fincstp, finit, fsetpm, and
  2975.        the fs segment override prefix not currently handled because no
  2976.        call path can make opcodes without operands get here */
  2977.     case 'i':
  2978.       return 2 /* integer op */;
  2979.     case 'l':
  2980.       if (mnemonic[2] == 'd' && (mnemonic[3] == 'c' || mnemonic[3] == 'e'))
  2981.         return 3; /* fldcw/fldenv */
  2982.       break;
  2983.     case 'n':
  2984.       if (mnemonic[2] != 'o' /* fnop */)
  2985.         return 3; /* non-waiting control op */
  2986.       break;
  2987.     case 'r':
  2988.       if (mnemonic[2] == 's')
  2989.         return 3; /* frstor/frstpm */
  2990.       break;
  2991.     case 's':
  2992.       if (mnemonic[2] == 'a')
  2993.         return 3; /* fsave */
  2994.       if (mnemonic[2] == 't')
  2995.         {
  2996.           switch (mnemonic[3])
  2997.             {
  2998.             case 'c': /* fstcw */
  2999.             case 'd': /* fstdw */
  3000.             case 'e': /* fstenv */
  3001.             case 's': /* fsts[gw] */
  3002.               return 3;
  3003.             }
  3004.         }
  3005.       break;
  3006.     case 'x':
  3007.       if (mnemonic[2] == 'r' || mnemonic[2] == 's')
  3008.         return 0; /* fxsave/fxrstor are not really math ops */
  3009.       break;
  3010.     }
  3011.  
  3012.   return 1;
  3013. }
  3014.  
  3015. /* Build the VEX prefix.  */
  3016.  
  3017. static void
  3018. build_vex_prefix (const insn_template *t)
  3019. {
  3020.   unsigned int register_specifier;
  3021.   unsigned int implied_prefix;
  3022.   unsigned int vector_length;
  3023.  
  3024.   /* Check register specifier.  */
  3025.   if (i.vex.register_specifier)
  3026.     {
  3027.       register_specifier =
  3028.         ~register_number (i.vex.register_specifier) & 0xf;
  3029.       gas_assert ((i.vex.register_specifier->reg_flags & RegVRex) == 0);
  3030.     }
  3031.   else
  3032.     register_specifier = 0xf;
  3033.  
  3034.   /* Use 2-byte VEX prefix by swappping destination and source
  3035.      operand.  */
  3036.   if (!i.swap_operand
  3037.       && i.operands == i.reg_operands
  3038.       && i.tm.opcode_modifier.vexopcode == VEX0F
  3039.       && i.tm.opcode_modifier.s
  3040.       && i.rex == REX_B)
  3041.     {
  3042.       unsigned int xchg = i.operands - 1;
  3043.       union i386_op temp_op;
  3044.       i386_operand_type temp_type;
  3045.  
  3046.       temp_type = i.types[xchg];
  3047.       i.types[xchg] = i.types[0];
  3048.       i.types[0] = temp_type;
  3049.       temp_op = i.op[xchg];
  3050.       i.op[xchg] = i.op[0];
  3051.       i.op[0] = temp_op;
  3052.  
  3053.       gas_assert (i.rm.mode == 3);
  3054.  
  3055.       i.rex = REX_R;
  3056.       xchg = i.rm.regmem;
  3057.       i.rm.regmem = i.rm.reg;
  3058.       i.rm.reg = xchg;
  3059.  
  3060.       /* Use the next insn.  */
  3061.       i.tm = t[1];
  3062.     }
  3063.  
  3064.   if (i.tm.opcode_modifier.vex == VEXScalar)
  3065.     vector_length = avxscalar;
  3066.   else
  3067.     vector_length = i.tm.opcode_modifier.vex == VEX256 ? 1 : 0;
  3068.  
  3069.   switch ((i.tm.base_opcode >> 8) & 0xff)
  3070.     {
  3071.     case 0:
  3072.       implied_prefix = 0;
  3073.       break;
  3074.     case DATA_PREFIX_OPCODE:
  3075.       implied_prefix = 1;
  3076.       break;
  3077.     case REPE_PREFIX_OPCODE:
  3078.       implied_prefix = 2;
  3079.       break;
  3080.     case REPNE_PREFIX_OPCODE:
  3081.       implied_prefix = 3;
  3082.       break;
  3083.     default:
  3084.       abort ();
  3085.     }
  3086.  
  3087.   /* Use 2-byte VEX prefix if possible.  */
  3088.   if (i.tm.opcode_modifier.vexopcode == VEX0F
  3089.       && i.tm.opcode_modifier.vexw != VEXW1
  3090.       && (i.rex & (REX_W | REX_X | REX_B)) == 0)
  3091.     {
  3092.       /* 2-byte VEX prefix.  */
  3093.       unsigned int r;
  3094.  
  3095.       i.vex.length = 2;
  3096.       i.vex.bytes[0] = 0xc5;
  3097.  
  3098.       /* Check the REX.R bit.  */
  3099.       r = (i.rex & REX_R) ? 0 : 1;
  3100.       i.vex.bytes[1] = (r << 7
  3101.                         | register_specifier << 3
  3102.                         | vector_length << 2
  3103.                         | implied_prefix);
  3104.     }
  3105.   else
  3106.     {
  3107.       /* 3-byte VEX prefix.  */
  3108.       unsigned int m, w;
  3109.  
  3110.       i.vex.length = 3;
  3111.  
  3112.       switch (i.tm.opcode_modifier.vexopcode)
  3113.         {
  3114.         case VEX0F:
  3115.           m = 0x1;
  3116.           i.vex.bytes[0] = 0xc4;
  3117.           break;
  3118.         case VEX0F38:
  3119.           m = 0x2;
  3120.           i.vex.bytes[0] = 0xc4;
  3121.           break;
  3122.         case VEX0F3A:
  3123.           m = 0x3;
  3124.           i.vex.bytes[0] = 0xc4;
  3125.           break;
  3126.         case XOP08:
  3127.           m = 0x8;
  3128.           i.vex.bytes[0] = 0x8f;
  3129.           break;
  3130.         case XOP09:
  3131.           m = 0x9;
  3132.           i.vex.bytes[0] = 0x8f;
  3133.           break;
  3134.         case XOP0A:
  3135.           m = 0xa;
  3136.           i.vex.bytes[0] = 0x8f;
  3137.           break;
  3138.         default:
  3139.           abort ();
  3140.         }
  3141.  
  3142.       /* The high 3 bits of the second VEX byte are 1's compliment
  3143.          of RXB bits from REX.  */
  3144.       i.vex.bytes[1] = (~i.rex & 0x7) << 5 | m;
  3145.  
  3146.       /* Check the REX.W bit.  */
  3147.       w = (i.rex & REX_W) ? 1 : 0;
  3148.       if (i.tm.opcode_modifier.vexw == VEXW1)
  3149.         w = 1;
  3150.  
  3151.       i.vex.bytes[2] = (w << 7
  3152.                         | register_specifier << 3
  3153.                         | vector_length << 2
  3154.                         | implied_prefix);
  3155.     }
  3156. }
  3157.  
  3158. /* Build the EVEX prefix.  */
  3159.  
  3160. static void
  3161. build_evex_prefix (void)
  3162. {
  3163.   unsigned int register_specifier;
  3164.   unsigned int implied_prefix;
  3165.   unsigned int m, w;
  3166.   rex_byte vrex_used = 0;
  3167.  
  3168.   /* Check register specifier.  */
  3169.   if (i.vex.register_specifier)
  3170.     {
  3171.       gas_assert ((i.vrex & REX_X) == 0);
  3172.  
  3173.       register_specifier = i.vex.register_specifier->reg_num;
  3174.       if ((i.vex.register_specifier->reg_flags & RegRex))
  3175.         register_specifier += 8;
  3176.       /* The upper 16 registers are encoded in the fourth byte of the
  3177.          EVEX prefix.  */
  3178.       if (!(i.vex.register_specifier->reg_flags & RegVRex))
  3179.         i.vex.bytes[3] = 0x8;
  3180.       register_specifier = ~register_specifier & 0xf;
  3181.     }
  3182.   else
  3183.     {
  3184.       register_specifier = 0xf;
  3185.  
  3186.       /* Encode upper 16 vector index register in the fourth byte of
  3187.          the EVEX prefix.  */
  3188.       if (!(i.vrex & REX_X))
  3189.         i.vex.bytes[3] = 0x8;
  3190.       else
  3191.         vrex_used |= REX_X;
  3192.     }
  3193.  
  3194.   switch ((i.tm.base_opcode >> 8) & 0xff)
  3195.     {
  3196.     case 0:
  3197.       implied_prefix = 0;
  3198.       break;
  3199.     case DATA_PREFIX_OPCODE:
  3200.       implied_prefix = 1;
  3201.       break;
  3202.     case REPE_PREFIX_OPCODE:
  3203.       implied_prefix = 2;
  3204.       break;
  3205.     case REPNE_PREFIX_OPCODE:
  3206.       implied_prefix = 3;
  3207.       break;
  3208.     default:
  3209.       abort ();
  3210.     }
  3211.  
  3212.   /* 4 byte EVEX prefix.  */
  3213.   i.vex.length = 4;
  3214.   i.vex.bytes[0] = 0x62;
  3215.  
  3216.   /* mmmm bits.  */
  3217.   switch (i.tm.opcode_modifier.vexopcode)
  3218.     {
  3219.     case VEX0F:
  3220.       m = 1;
  3221.       break;
  3222.     case VEX0F38:
  3223.       m = 2;
  3224.       break;
  3225.     case VEX0F3A:
  3226.       m = 3;
  3227.       break;
  3228.     default:
  3229.       abort ();
  3230.       break;
  3231.     }
  3232.  
  3233.   /* The high 3 bits of the second EVEX byte are 1's compliment of RXB
  3234.      bits from REX.  */
  3235.   i.vex.bytes[1] = (~i.rex & 0x7) << 5 | m;
  3236.  
  3237.   /* The fifth bit of the second EVEX byte is 1's compliment of the
  3238.      REX_R bit in VREX.  */
  3239.   if (!(i.vrex & REX_R))
  3240.     i.vex.bytes[1] |= 0x10;
  3241.   else
  3242.     vrex_used |= REX_R;
  3243.  
  3244.   if ((i.reg_operands + i.imm_operands) == i.operands)
  3245.     {
  3246.       /* When all operands are registers, the REX_X bit in REX is not
  3247.          used.  We reuse it to encode the upper 16 registers, which is
  3248.          indicated by the REX_B bit in VREX.  The REX_X bit is encoded
  3249.          as 1's compliment.  */
  3250.       if ((i.vrex & REX_B))
  3251.         {
  3252.           vrex_used |= REX_B;
  3253.           i.vex.bytes[1] &= ~0x40;
  3254.         }
  3255.     }
  3256.  
  3257.   /* EVEX instructions shouldn't need the REX prefix.  */
  3258.   i.vrex &= ~vrex_used;
  3259.   gas_assert (i.vrex == 0);
  3260.  
  3261.   /* Check the REX.W bit.  */
  3262.   w = (i.rex & REX_W) ? 1 : 0;
  3263.   if (i.tm.opcode_modifier.vexw)
  3264.     {
  3265.       if (i.tm.opcode_modifier.vexw == VEXW1)
  3266.         w = 1;
  3267.     }
  3268.   /* If w is not set it means we are dealing with WIG instruction.  */
  3269.   else if (!w)
  3270.     {
  3271.       if (evexwig == evexw1)
  3272.         w = 1;
  3273.     }
  3274.  
  3275.   /* Encode the U bit.  */
  3276.   implied_prefix |= 0x4;
  3277.  
  3278.   /* The third byte of the EVEX prefix.  */
  3279.   i.vex.bytes[2] = (w << 7 | register_specifier << 3 | implied_prefix);
  3280.  
  3281.   /* The fourth byte of the EVEX prefix.  */
  3282.   /* The zeroing-masking bit.  */
  3283.   if (i.mask && i.mask->zeroing)
  3284.     i.vex.bytes[3] |= 0x80;
  3285.  
  3286.   /* Don't always set the broadcast bit if there is no RC.  */
  3287.   if (!i.rounding)
  3288.     {
  3289.       /* Encode the vector length.  */
  3290.       unsigned int vec_length;
  3291.  
  3292.       switch (i.tm.opcode_modifier.evex)
  3293.         {
  3294.         case EVEXLIG: /* LL' is ignored */
  3295.           vec_length = evexlig << 5;
  3296.           break;
  3297.         case EVEX128:
  3298.           vec_length = 0 << 5;
  3299.           break;
  3300.         case EVEX256:
  3301.           vec_length = 1 << 5;
  3302.           break;
  3303.         case EVEX512:
  3304.           vec_length = 2 << 5;
  3305.           break;
  3306.         default:
  3307.           abort ();
  3308.           break;
  3309.         }
  3310.       i.vex.bytes[3] |= vec_length;
  3311.       /* Encode the broadcast bit.  */
  3312.       if (i.broadcast)
  3313.         i.vex.bytes[3] |= 0x10;
  3314.     }
  3315.   else
  3316.     {
  3317.       if (i.rounding->type != saeonly)
  3318.         i.vex.bytes[3] |= 0x10 | (i.rounding->type << 5);
  3319.       else
  3320.         i.vex.bytes[3] |= 0x10 | (evexrcig << 5);
  3321.     }
  3322.  
  3323.   if (i.mask && i.mask->mask)
  3324.     i.vex.bytes[3] |= i.mask->mask->reg_num;
  3325. }
  3326.  
  3327. static void
  3328. process_immext (void)
  3329. {
  3330.   expressionS *exp;
  3331.  
  3332.   if ((i.tm.cpu_flags.bitfield.cpusse3 || i.tm.cpu_flags.bitfield.cpusvme)
  3333.       && i.operands > 0)
  3334.     {
  3335.       /* MONITOR/MWAIT as well as SVME instructions have fixed operands
  3336.          with an opcode suffix which is coded in the same place as an
  3337.          8-bit immediate field would be.
  3338.          Here we check those operands and remove them afterwards.  */
  3339.       unsigned int x;
  3340.  
  3341.       for (x = 0; x < i.operands; x++)
  3342.         if (register_number (i.op[x].regs) != x)
  3343.           as_bad (_("can't use register '%s%s' as operand %d in '%s'."),
  3344.                   register_prefix, i.op[x].regs->reg_name, x + 1,
  3345.                   i.tm.name);
  3346.  
  3347.       i.operands = 0;
  3348.     }
  3349.  
  3350.   if (i.tm.cpu_flags.bitfield.cpumwaitx && i.operands > 0)
  3351.     {
  3352.       /* MONITORX/MWAITX instructions have fixed operands with an opcode
  3353.          suffix which is coded in the same place as an 8-bit immediate
  3354.          field would be.
  3355.          Here we check those operands and remove them afterwards.  */
  3356.       unsigned int x;
  3357.  
  3358.       if (i.operands != 3)
  3359.         abort();
  3360.  
  3361.       for (x = 0; x < 2; x++)
  3362.         if (register_number (i.op[x].regs) != x)
  3363.           goto bad_register_operand;
  3364.  
  3365.       /* Check for third operand for mwaitx/monitorx insn.  */
  3366.       if (register_number (i.op[x].regs)
  3367.           != (x + (i.tm.extension_opcode == 0xfb)))
  3368.         {
  3369. bad_register_operand:
  3370.           as_bad (_("can't use register '%s%s' as operand %d in '%s'."),
  3371.                   register_prefix, i.op[x].regs->reg_name, x+1,
  3372.                   i.tm.name);
  3373.         }
  3374.  
  3375.       i.operands = 0;
  3376.     }
  3377.  
  3378.   /* These AMD 3DNow! and SSE2 instructions have an opcode suffix
  3379.      which is coded in the same place as an 8-bit immediate field
  3380.      would be.  Here we fake an 8-bit immediate operand from the
  3381.      opcode suffix stored in tm.extension_opcode.
  3382.  
  3383.      AVX instructions also use this encoding, for some of
  3384.      3 argument instructions.  */
  3385.  
  3386.   gas_assert (i.imm_operands <= 1
  3387.               && (i.operands <= 2
  3388.                   || ((i.tm.opcode_modifier.vex
  3389.                        || i.tm.opcode_modifier.evex)
  3390.                       && i.operands <= 4)));
  3391.  
  3392.   exp = &im_expressions[i.imm_operands++];
  3393.   i.op[i.operands].imms = exp;
  3394.   i.types[i.operands] = imm8;
  3395.   i.operands++;
  3396.   exp->X_op = O_constant;
  3397.   exp->X_add_number = i.tm.extension_opcode;
  3398.   i.tm.extension_opcode = None;
  3399. }
  3400.  
  3401.  
  3402. static int
  3403. check_hle (void)
  3404. {
  3405.   switch (i.tm.opcode_modifier.hleprefixok)
  3406.     {
  3407.     default:
  3408.       abort ();
  3409.     case HLEPrefixNone:
  3410.       as_bad (_("invalid instruction `%s' after `%s'"),
  3411.               i.tm.name, i.hle_prefix);
  3412.       return 0;
  3413.     case HLEPrefixLock:
  3414.       if (i.prefix[LOCK_PREFIX])
  3415.         return 1;
  3416.       as_bad (_("missing `lock' with `%s'"), i.hle_prefix);
  3417.       return 0;
  3418.     case HLEPrefixAny:
  3419.       return 1;
  3420.     case HLEPrefixRelease:
  3421.       if (i.prefix[HLE_PREFIX] != XRELEASE_PREFIX_OPCODE)
  3422.         {
  3423.           as_bad (_("instruction `%s' after `xacquire' not allowed"),
  3424.                   i.tm.name);
  3425.           return 0;
  3426.         }
  3427.       if (i.mem_operands == 0
  3428.           || !operand_type_check (i.types[i.operands - 1], anymem))
  3429.         {
  3430.           as_bad (_("memory destination needed for instruction `%s'"
  3431.                     " after `xrelease'"), i.tm.name);
  3432.           return 0;
  3433.         }
  3434.       return 1;
  3435.     }
  3436. }
  3437.  
  3438. /* This is the guts of the machine-dependent assembler.  LINE points to a
  3439.    machine dependent instruction.  This function is supposed to emit
  3440.    the frags/bytes it assembles to.  */
  3441.  
  3442. void
  3443. md_assemble (char *line)
  3444. {
  3445.   unsigned int j;
  3446.   char mnemonic[MAX_MNEM_SIZE];
  3447.   const insn_template *t;
  3448.  
  3449.   /* Initialize globals.  */
  3450.   memset (&i, '\0', sizeof (i));
  3451.   for (j = 0; j < MAX_OPERANDS; j++)
  3452.     i.reloc[j] = NO_RELOC;
  3453.   memset (disp_expressions, '\0', sizeof (disp_expressions));
  3454.   memset (im_expressions, '\0', sizeof (im_expressions));
  3455.   save_stack_p = save_stack;
  3456.  
  3457.   /* First parse an instruction mnemonic & call i386_operand for the operands.
  3458.      We assume that the scrubber has arranged it so that line[0] is the valid
  3459.      start of a (possibly prefixed) mnemonic.  */
  3460.  
  3461.   line = parse_insn (line, mnemonic);
  3462.   if (line == NULL)
  3463.     return;
  3464.  
  3465.   line = parse_operands (line, mnemonic);
  3466.   this_operand = -1;
  3467.   if (line == NULL)
  3468.     return;
  3469.  
  3470.   /* Now we've parsed the mnemonic into a set of templates, and have the
  3471.      operands at hand.  */
  3472.  
  3473.   /* All intel opcodes have reversed operands except for "bound" and
  3474.      "enter".  We also don't reverse intersegment "jmp" and "call"
  3475.      instructions with 2 immediate operands so that the immediate segment
  3476.      precedes the offset, as it does when in AT&T mode. */
  3477.   if (intel_syntax
  3478.       && i.operands > 1
  3479.       && (strcmp (mnemonic, "bound") != 0)
  3480.       && (strcmp (mnemonic, "invlpga") != 0)
  3481.       && !(operand_type_check (i.types[0], imm)
  3482.            && operand_type_check (i.types[1], imm)))
  3483.     swap_operands ();
  3484.  
  3485.   /* The order of the immediates should be reversed
  3486.      for 2 immediates extrq and insertq instructions */
  3487.   if (i.imm_operands == 2
  3488.       && (strcmp (mnemonic, "extrq") == 0
  3489.           || strcmp (mnemonic, "insertq") == 0))
  3490.       swap_2_operands (0, 1);
  3491.  
  3492.   if (i.imm_operands)
  3493.     optimize_imm ();
  3494.  
  3495.   /* Don't optimize displacement for movabs since it only takes 64bit
  3496.      displacement.  */
  3497.   if (i.disp_operands
  3498.       && i.disp_encoding != disp_encoding_32bit
  3499.       && (flag_code != CODE_64BIT
  3500.           || strcmp (mnemonic, "movabs") != 0))
  3501.     optimize_disp ();
  3502.  
  3503.   /* Next, we find a template that matches the given insn,
  3504.      making sure the overlap of the given operands types is consistent
  3505.      with the template operand types.  */
  3506.  
  3507.   if (!(t = match_template ()))
  3508.     return;
  3509.  
  3510.   if (sse_check != check_none
  3511.       && !i.tm.opcode_modifier.noavx
  3512.       && (i.tm.cpu_flags.bitfield.cpusse
  3513.           || i.tm.cpu_flags.bitfield.cpusse2
  3514.           || i.tm.cpu_flags.bitfield.cpusse3
  3515.           || i.tm.cpu_flags.bitfield.cpussse3
  3516.           || i.tm.cpu_flags.bitfield.cpusse4_1
  3517.           || i.tm.cpu_flags.bitfield.cpusse4_2))
  3518.     {
  3519.       (sse_check == check_warning
  3520.        ? as_warn
  3521.        : as_bad) (_("SSE instruction `%s' is used"), i.tm.name);
  3522.     }
  3523.  
  3524.   /* Zap movzx and movsx suffix.  The suffix has been set from
  3525.      "word ptr" or "byte ptr" on the source operand in Intel syntax
  3526.      or extracted from mnemonic in AT&T syntax.  But we'll use
  3527.      the destination register to choose the suffix for encoding.  */
  3528.   if ((i.tm.base_opcode & ~9) == 0x0fb6)
  3529.     {
  3530.       /* In Intel syntax, there must be a suffix.  In AT&T syntax, if
  3531.          there is no suffix, the default will be byte extension.  */
  3532.       if (i.reg_operands != 2
  3533.           && !i.suffix
  3534.           && intel_syntax)
  3535.         as_bad (_("ambiguous operand size for `%s'"), i.tm.name);
  3536.  
  3537.       i.suffix = 0;
  3538.     }
  3539.  
  3540.   if (i.tm.opcode_modifier.fwait)
  3541.     if (!add_prefix (FWAIT_OPCODE))
  3542.       return;
  3543.  
  3544.   /* Check if REP prefix is OK.  */
  3545.   if (i.rep_prefix && !i.tm.opcode_modifier.repprefixok)
  3546.     {
  3547.       as_bad (_("invalid instruction `%s' after `%s'"),
  3548.                 i.tm.name, i.rep_prefix);
  3549.       return;
  3550.     }
  3551.  
  3552.   /* Check for lock without a lockable instruction.  Destination operand
  3553.      must be memory unless it is xchg (0x86).  */
  3554.   if (i.prefix[LOCK_PREFIX]
  3555.       && (!i.tm.opcode_modifier.islockable
  3556.           || i.mem_operands == 0
  3557.           || (i.tm.base_opcode != 0x86
  3558.               && !operand_type_check (i.types[i.operands - 1], anymem))))
  3559.     {
  3560.       as_bad (_("expecting lockable instruction after `lock'"));
  3561.       return;
  3562.     }
  3563.  
  3564.   /* Check if HLE prefix is OK.  */
  3565.   if (i.hle_prefix && !check_hle ())
  3566.     return;
  3567.  
  3568.   /* Check BND prefix.  */
  3569.   if (i.bnd_prefix && !i.tm.opcode_modifier.bndprefixok)
  3570.     as_bad (_("expecting valid branch instruction after `bnd'"));
  3571.  
  3572.   if (i.tm.cpu_flags.bitfield.cpumpx
  3573.       && flag_code == CODE_64BIT
  3574.       && i.prefix[ADDR_PREFIX])
  3575.     as_bad (_("32-bit address isn't allowed in 64-bit MPX instructions."));
  3576.  
  3577.   /* Insert BND prefix.  */
  3578.   if (add_bnd_prefix
  3579.       && i.tm.opcode_modifier.bndprefixok
  3580.       && !i.prefix[BND_PREFIX])
  3581.     add_prefix (BND_PREFIX_OPCODE);
  3582.  
  3583.   /* Check string instruction segment overrides.  */
  3584.   if (i.tm.opcode_modifier.isstring && i.mem_operands != 0)
  3585.     {
  3586.       if (!check_string ())
  3587.         return;
  3588.       i.disp_operands = 0;
  3589.     }
  3590.  
  3591.   if (!process_suffix ())
  3592.     return;
  3593.  
  3594.   /* Update operand types.  */
  3595.   for (j = 0; j < i.operands; j++)
  3596.     i.types[j] = operand_type_and (i.types[j], i.tm.operand_types[j]);
  3597.  
  3598.   /* Make still unresolved immediate matches conform to size of immediate
  3599.      given in i.suffix.  */
  3600.   if (!finalize_imm ())
  3601.     return;
  3602.  
  3603.   if (i.types[0].bitfield.imm1)
  3604.     i.imm_operands = 0; /* kludge for shift insns.  */
  3605.  
  3606.   /* We only need to check those implicit registers for instructions
  3607.      with 3 operands or less.  */
  3608.   if (i.operands <= 3)
  3609.     for (j = 0; j < i.operands; j++)
  3610.       if (i.types[j].bitfield.inoutportreg
  3611.           || i.types[j].bitfield.shiftcount
  3612.           || i.types[j].bitfield.acc
  3613.           || i.types[j].bitfield.floatacc)
  3614.         i.reg_operands--;
  3615.  
  3616.   /* ImmExt should be processed after SSE2AVX.  */
  3617.   if (!i.tm.opcode_modifier.sse2avx
  3618.       && i.tm.opcode_modifier.immext)
  3619.     process_immext ();
  3620.  
  3621.   /* For insns with operands there are more diddles to do to the opcode.  */
  3622.   if (i.operands)
  3623.     {
  3624.       if (!process_operands ())
  3625.         return;
  3626.     }
  3627.   else if (!quiet_warnings && i.tm.opcode_modifier.ugh)
  3628.     {
  3629.       /* UnixWare fsub no args is alias for fsubp, fadd -> faddp, etc.  */
  3630.       as_warn (_("translating to `%sp'"), i.tm.name);
  3631.     }
  3632.  
  3633.   if (i.tm.opcode_modifier.vex || i.tm.opcode_modifier.evex)
  3634.     {
  3635.       if (flag_code == CODE_16BIT)
  3636.         {
  3637.           as_bad (_("instruction `%s' isn't supported in 16-bit mode."),
  3638.                   i.tm.name);
  3639.           return;
  3640.         }
  3641.  
  3642.       if (i.tm.opcode_modifier.vex)
  3643.         build_vex_prefix (t);
  3644.       else
  3645.         build_evex_prefix ();
  3646.     }
  3647.  
  3648.   /* Handle conversion of 'int $3' --> special int3 insn.  XOP or FMA4
  3649.      instructions may define INT_OPCODE as well, so avoid this corner
  3650.      case for those instructions that use MODRM.  */
  3651.   if (i.tm.base_opcode == INT_OPCODE
  3652.       && !i.tm.opcode_modifier.modrm
  3653.       && i.op[0].imms->X_add_number == 3)
  3654.     {
  3655.       i.tm.base_opcode = INT3_OPCODE;
  3656.       i.imm_operands = 0;
  3657.     }
  3658.  
  3659.   if ((i.tm.opcode_modifier.jump
  3660.        || i.tm.opcode_modifier.jumpbyte
  3661.        || i.tm.opcode_modifier.jumpdword)
  3662.       && i.op[0].disps->X_op == O_constant)
  3663.     {
  3664.       /* Convert "jmp constant" (and "call constant") to a jump (call) to
  3665.          the absolute address given by the constant.  Since ix86 jumps and
  3666.          calls are pc relative, we need to generate a reloc.  */
  3667.       i.op[0].disps->X_add_symbol = &abs_symbol;
  3668.       i.op[0].disps->X_op = O_symbol;
  3669.     }
  3670.  
  3671.   if (i.tm.opcode_modifier.rex64)
  3672.     i.rex |= REX_W;
  3673.  
  3674.   /* For 8 bit registers we need an empty rex prefix.  Also if the
  3675.      instruction already has a prefix, we need to convert old
  3676.      registers to new ones.  */
  3677.  
  3678.   if ((i.types[0].bitfield.reg8
  3679.        && (i.op[0].regs->reg_flags & RegRex64) != 0)
  3680.       || (i.types[1].bitfield.reg8
  3681.           && (i.op[1].regs->reg_flags & RegRex64) != 0)
  3682.       || ((i.types[0].bitfield.reg8
  3683.            || i.types[1].bitfield.reg8)
  3684.           && i.rex != 0))
  3685.     {
  3686.       int x;
  3687.  
  3688.       i.rex |= REX_OPCODE;
  3689.       for (x = 0; x < 2; x++)
  3690.         {
  3691.           /* Look for 8 bit operand that uses old registers.  */
  3692.           if (i.types[x].bitfield.reg8
  3693.               && (i.op[x].regs->reg_flags & RegRex64) == 0)
  3694.             {
  3695.               /* In case it is "hi" register, give up.  */
  3696.               if (i.op[x].regs->reg_num > 3)
  3697.                 as_bad (_("can't encode register '%s%s' in an "
  3698.                           "instruction requiring REX prefix."),
  3699.                         register_prefix, i.op[x].regs->reg_name);
  3700.  
  3701.               /* Otherwise it is equivalent to the extended register.
  3702.                  Since the encoding doesn't change this is merely
  3703.                  cosmetic cleanup for debug output.  */
  3704.  
  3705.               i.op[x].regs = i.op[x].regs + 8;
  3706.             }
  3707.         }
  3708.     }
  3709.  
  3710.   if (i.rex != 0)
  3711.     add_prefix (REX_OPCODE | i.rex);
  3712.  
  3713.   /* We are ready to output the insn.  */
  3714.   output_insn ();
  3715. }
  3716.  
  3717. static char *
  3718. parse_insn (char *line, char *mnemonic)
  3719. {
  3720.   char *l = line;
  3721.   char *token_start = l;
  3722.   char *mnem_p;
  3723.   int supported;
  3724.   const insn_template *t;
  3725.   char *dot_p = NULL;
  3726.  
  3727.   while (1)
  3728.     {
  3729.       mnem_p = mnemonic;
  3730.       while ((*mnem_p = mnemonic_chars[(unsigned char) *l]) != 0)
  3731.         {
  3732.           if (*mnem_p == '.')
  3733.             dot_p = mnem_p;
  3734.           mnem_p++;
  3735.           if (mnem_p >= mnemonic + MAX_MNEM_SIZE)
  3736.             {
  3737.               as_bad (_("no such instruction: `%s'"), token_start);
  3738.               return NULL;
  3739.             }
  3740.           l++;
  3741.         }
  3742.       if (!is_space_char (*l)
  3743.           && *l != END_OF_INSN
  3744.           && (intel_syntax
  3745.               || (*l != PREFIX_SEPARATOR
  3746.                   && *l != ',')))
  3747.         {
  3748.           as_bad (_("invalid character %s in mnemonic"),
  3749.                   output_invalid (*l));
  3750.           return NULL;
  3751.         }
  3752.       if (token_start == l)
  3753.         {
  3754.           if (!intel_syntax && *l == PREFIX_SEPARATOR)
  3755.             as_bad (_("expecting prefix; got nothing"));
  3756.           else
  3757.             as_bad (_("expecting mnemonic; got nothing"));
  3758.           return NULL;
  3759.         }
  3760.  
  3761.       /* Look up instruction (or prefix) via hash table.  */
  3762.       current_templates = (const templates *) hash_find (op_hash, mnemonic);
  3763.  
  3764.       if (*l != END_OF_INSN
  3765.           && (!is_space_char (*l) || l[1] != END_OF_INSN)
  3766.           && current_templates
  3767.           && current_templates->start->opcode_modifier.isprefix)
  3768.         {
  3769.           if (!cpu_flags_check_cpu64 (current_templates->start->cpu_flags))
  3770.             {
  3771.               as_bad ((flag_code != CODE_64BIT
  3772.                        ? _("`%s' is only supported in 64-bit mode")
  3773.                        : _("`%s' is not supported in 64-bit mode")),
  3774.                       current_templates->start->name);
  3775.               return NULL;
  3776.             }
  3777.           /* If we are in 16-bit mode, do not allow addr16 or data16.
  3778.              Similarly, in 32-bit mode, do not allow addr32 or data32.  */
  3779.           if ((current_templates->start->opcode_modifier.size16
  3780.                || current_templates->start->opcode_modifier.size32)
  3781.               && flag_code != CODE_64BIT
  3782.               && (current_templates->start->opcode_modifier.size32
  3783.                   ^ (flag_code == CODE_16BIT)))
  3784.             {
  3785.               as_bad (_("redundant %s prefix"),
  3786.                       current_templates->start->name);
  3787.               return NULL;
  3788.             }
  3789.           /* Add prefix, checking for repeated prefixes.  */
  3790.           switch (add_prefix (current_templates->start->base_opcode))
  3791.             {
  3792.             case PREFIX_EXIST:
  3793.               return NULL;
  3794.             case PREFIX_REP:
  3795.               if (current_templates->start->cpu_flags.bitfield.cpuhle)
  3796.                 i.hle_prefix = current_templates->start->name;
  3797.               else if (current_templates->start->cpu_flags.bitfield.cpumpx)
  3798.                 i.bnd_prefix = current_templates->start->name;
  3799.               else
  3800.                 i.rep_prefix = current_templates->start->name;
  3801.               break;
  3802.             default:
  3803.               break;
  3804.             }
  3805.           /* Skip past PREFIX_SEPARATOR and reset token_start.  */
  3806.           token_start = ++l;
  3807.         }
  3808.       else
  3809.         break;
  3810.     }
  3811.  
  3812.   if (!current_templates)
  3813.     {
  3814.       /* Check if we should swap operand or force 32bit displacement in
  3815.          encoding.  */
  3816.       if (mnem_p - 2 == dot_p && dot_p[1] == 's')
  3817.         i.swap_operand = 1;
  3818.       else if (mnem_p - 3 == dot_p
  3819.                && dot_p[1] == 'd'
  3820.                && dot_p[2] == '8')
  3821.         i.disp_encoding = disp_encoding_8bit;
  3822.       else if (mnem_p - 4 == dot_p
  3823.                && dot_p[1] == 'd'
  3824.                && dot_p[2] == '3'
  3825.                && dot_p[3] == '2')
  3826.         i.disp_encoding = disp_encoding_32bit;
  3827.       else
  3828.         goto check_suffix;
  3829.       mnem_p = dot_p;
  3830.       *dot_p = '\0';
  3831.       current_templates = (const templates *) hash_find (op_hash, mnemonic);
  3832.     }
  3833.  
  3834.   if (!current_templates)
  3835.     {
  3836. check_suffix:
  3837.       /* See if we can get a match by trimming off a suffix.  */
  3838.       switch (mnem_p[-1])
  3839.         {
  3840.         case WORD_MNEM_SUFFIX:
  3841.           if (intel_syntax && (intel_float_operand (mnemonic) & 2))
  3842.             i.suffix = SHORT_MNEM_SUFFIX;
  3843.           else
  3844.         case BYTE_MNEM_SUFFIX:
  3845.         case QWORD_MNEM_SUFFIX:
  3846.           i.suffix = mnem_p[-1];
  3847.           mnem_p[-1] = '\0';
  3848.           current_templates = (const templates *) hash_find (op_hash,
  3849.                                                              mnemonic);
  3850.           break;
  3851.         case SHORT_MNEM_SUFFIX:
  3852.         case LONG_MNEM_SUFFIX:
  3853.           if (!intel_syntax)
  3854.             {
  3855.               i.suffix = mnem_p[-1];
  3856.               mnem_p[-1] = '\0';
  3857.               current_templates = (const templates *) hash_find (op_hash,
  3858.                                                                  mnemonic);
  3859.             }
  3860.           break;
  3861.  
  3862.           /* Intel Syntax.  */
  3863.         case 'd':
  3864.           if (intel_syntax)
  3865.             {
  3866.               if (intel_float_operand (mnemonic) == 1)
  3867.                 i.suffix = SHORT_MNEM_SUFFIX;
  3868.               else
  3869.                 i.suffix = LONG_MNEM_SUFFIX;
  3870.               mnem_p[-1] = '\0';
  3871.               current_templates = (const templates *) hash_find (op_hash,
  3872.                                                                  mnemonic);
  3873.             }
  3874.           break;
  3875.         }
  3876.       if (!current_templates)
  3877.         {
  3878.           as_bad (_("no such instruction: `%s'"), token_start);
  3879.           return NULL;
  3880.         }
  3881.     }
  3882.  
  3883.   if (current_templates->start->opcode_modifier.jump
  3884.       || current_templates->start->opcode_modifier.jumpbyte)
  3885.     {
  3886.       /* Check for a branch hint.  We allow ",pt" and ",pn" for
  3887.          predict taken and predict not taken respectively.
  3888.          I'm not sure that branch hints actually do anything on loop
  3889.          and jcxz insns (JumpByte) for current Pentium4 chips.  They
  3890.          may work in the future and it doesn't hurt to accept them
  3891.          now.  */
  3892.       if (l[0] == ',' && l[1] == 'p')
  3893.         {
  3894.           if (l[2] == 't')
  3895.             {
  3896.               if (!add_prefix (DS_PREFIX_OPCODE))
  3897.                 return NULL;
  3898.               l += 3;
  3899.             }
  3900.           else if (l[2] == 'n')
  3901.             {
  3902.               if (!add_prefix (CS_PREFIX_OPCODE))
  3903.                 return NULL;
  3904.               l += 3;
  3905.             }
  3906.         }
  3907.     }
  3908.   /* Any other comma loses.  */
  3909.   if (*l == ',')
  3910.     {
  3911.       as_bad (_("invalid character %s in mnemonic"),
  3912.               output_invalid (*l));
  3913.       return NULL;
  3914.     }
  3915.  
  3916.   /* Check if instruction is supported on specified architecture.  */
  3917.   supported = 0;
  3918.   for (t = current_templates->start; t < current_templates->end; ++t)
  3919.     {
  3920.       supported |= cpu_flags_match (t);
  3921.       if (supported == CPU_FLAGS_PERFECT_MATCH)
  3922.         goto skip;
  3923.     }
  3924.  
  3925.   if (!(supported & CPU_FLAGS_64BIT_MATCH))
  3926.     {
  3927.       as_bad (flag_code == CODE_64BIT
  3928.               ? _("`%s' is not supported in 64-bit mode")
  3929.               : _("`%s' is only supported in 64-bit mode"),
  3930.               current_templates->start->name);
  3931.       return NULL;
  3932.     }
  3933.   if (supported != CPU_FLAGS_PERFECT_MATCH)
  3934.     {
  3935.       as_bad (_("`%s' is not supported on `%s%s'"),
  3936.               current_templates->start->name,
  3937.               cpu_arch_name ? cpu_arch_name : default_arch,
  3938.               cpu_sub_arch_name ? cpu_sub_arch_name : "");
  3939.       return NULL;
  3940.     }
  3941.  
  3942. skip:
  3943.   if (!cpu_arch_flags.bitfield.cpui386
  3944.            && (flag_code != CODE_16BIT))
  3945.     {
  3946.       as_warn (_("use .code16 to ensure correct addressing mode"));
  3947.     }
  3948.  
  3949.   return l;
  3950. }
  3951.  
  3952. static char *
  3953. parse_operands (char *l, const char *mnemonic)
  3954. {
  3955.   char *token_start;
  3956.  
  3957.   /* 1 if operand is pending after ','.  */
  3958.   unsigned int expecting_operand = 0;
  3959.  
  3960.   /* Non-zero if operand parens not balanced.  */
  3961.   unsigned int paren_not_balanced;
  3962.  
  3963.   while (*l != END_OF_INSN)
  3964.     {
  3965.       /* Skip optional white space before operand.  */
  3966.       if (is_space_char (*l))
  3967.         ++l;
  3968.       if (!is_operand_char (*l) && *l != END_OF_INSN && *l != '"')
  3969.         {
  3970.           as_bad (_("invalid character %s before operand %d"),
  3971.                   output_invalid (*l),
  3972.                   i.operands + 1);
  3973.           return NULL;
  3974.         }
  3975.       token_start = l;  /* After white space.  */
  3976.       paren_not_balanced = 0;
  3977.       while (paren_not_balanced || *l != ',')
  3978.         {
  3979.           if (*l == END_OF_INSN)
  3980.             {
  3981.               if (paren_not_balanced)
  3982.                 {
  3983.                   if (!intel_syntax)
  3984.                     as_bad (_("unbalanced parenthesis in operand %d."),
  3985.                             i.operands + 1);
  3986.                   else
  3987.                     as_bad (_("unbalanced brackets in operand %d."),
  3988.                             i.operands + 1);
  3989.                   return NULL;
  3990.                 }
  3991.               else
  3992.                 break;  /* we are done */
  3993.             }
  3994.           else if (!is_operand_char (*l) && !is_space_char (*l) && *l != '"')
  3995.             {
  3996.               as_bad (_("invalid character %s in operand %d"),
  3997.                       output_invalid (*l),
  3998.                       i.operands + 1);
  3999.               return NULL;
  4000.             }
  4001.           if (!intel_syntax)
  4002.             {
  4003.               if (*l == '(')
  4004.                 ++paren_not_balanced;
  4005.               if (*l == ')')
  4006.                 --paren_not_balanced;
  4007.             }
  4008.           else
  4009.             {
  4010.               if (*l == '[')
  4011.                 ++paren_not_balanced;
  4012.               if (*l == ']')
  4013.                 --paren_not_balanced;
  4014.             }
  4015.           l++;
  4016.         }
  4017.       if (l != token_start)
  4018.         {                       /* Yes, we've read in another operand.  */
  4019.           unsigned int operand_ok;
  4020.           this_operand = i.operands++;
  4021.           i.types[this_operand].bitfield.unspecified = 1;
  4022.           if (i.operands > MAX_OPERANDS)
  4023.             {
  4024.               as_bad (_("spurious operands; (%d operands/instruction max)"),
  4025.                       MAX_OPERANDS);
  4026.               return NULL;
  4027.             }
  4028.           /* Now parse operand adding info to 'i' as we go along.  */
  4029.           END_STRING_AND_SAVE (l);
  4030.  
  4031.           if (intel_syntax)
  4032.             operand_ok =
  4033.               i386_intel_operand (token_start,
  4034.                                   intel_float_operand (mnemonic));
  4035.           else
  4036.             operand_ok = i386_att_operand (token_start);
  4037.  
  4038.           RESTORE_END_STRING (l);
  4039.           if (!operand_ok)
  4040.             return NULL;
  4041.         }
  4042.       else
  4043.         {
  4044.           if (expecting_operand)
  4045.             {
  4046.             expecting_operand_after_comma:
  4047.               as_bad (_("expecting operand after ','; got nothing"));
  4048.               return NULL;
  4049.             }
  4050.           if (*l == ',')
  4051.             {
  4052.               as_bad (_("expecting operand before ','; got nothing"));
  4053.               return NULL;
  4054.             }
  4055.         }
  4056.  
  4057.       /* Now *l must be either ',' or END_OF_INSN.  */
  4058.       if (*l == ',')
  4059.         {
  4060.           if (*++l == END_OF_INSN)
  4061.             {
  4062.               /* Just skip it, if it's \n complain.  */
  4063.               goto expecting_operand_after_comma;
  4064.             }
  4065.           expecting_operand = 1;
  4066.         }
  4067.     }
  4068.   return l;
  4069. }
  4070.  
  4071. static void
  4072. swap_2_operands (int xchg1, int xchg2)
  4073. {
  4074.   union i386_op temp_op;
  4075.   i386_operand_type temp_type;
  4076.   enum bfd_reloc_code_real temp_reloc;
  4077.  
  4078.   temp_type = i.types[xchg2];
  4079.   i.types[xchg2] = i.types[xchg1];
  4080.   i.types[xchg1] = temp_type;
  4081.   temp_op = i.op[xchg2];
  4082.   i.op[xchg2] = i.op[xchg1];
  4083.   i.op[xchg1] = temp_op;
  4084.   temp_reloc = i.reloc[xchg2];
  4085.   i.reloc[xchg2] = i.reloc[xchg1];
  4086.   i.reloc[xchg1] = temp_reloc;
  4087.  
  4088.   if (i.mask)
  4089.     {
  4090.       if (i.mask->operand == xchg1)
  4091.         i.mask->operand = xchg2;
  4092.       else if (i.mask->operand == xchg2)
  4093.         i.mask->operand = xchg1;
  4094.     }
  4095.   if (i.broadcast)
  4096.     {
  4097.       if (i.broadcast->operand == xchg1)
  4098.         i.broadcast->operand = xchg2;
  4099.       else if (i.broadcast->operand == xchg2)
  4100.         i.broadcast->operand = xchg1;
  4101.     }
  4102.   if (i.rounding)
  4103.     {
  4104.       if (i.rounding->operand == xchg1)
  4105.         i.rounding->operand = xchg2;
  4106.       else if (i.rounding->operand == xchg2)
  4107.         i.rounding->operand = xchg1;
  4108.     }
  4109. }
  4110.  
  4111. static void
  4112. swap_operands (void)
  4113. {
  4114.   switch (i.operands)
  4115.     {
  4116.     case 5:
  4117.     case 4:
  4118.       swap_2_operands (1, i.operands - 2);
  4119.     case 3:
  4120.     case 2:
  4121.       swap_2_operands (0, i.operands - 1);
  4122.       break;
  4123.     default:
  4124.       abort ();
  4125.     }
  4126.  
  4127.   if (i.mem_operands == 2)
  4128.     {
  4129.       const seg_entry *temp_seg;
  4130.       temp_seg = i.seg[0];
  4131.       i.seg[0] = i.seg[1];
  4132.       i.seg[1] = temp_seg;
  4133.     }
  4134. }
  4135.  
  4136. /* Try to ensure constant immediates are represented in the smallest
  4137.    opcode possible.  */
  4138. static void
  4139. optimize_imm (void)
  4140. {
  4141.   char guess_suffix = 0;
  4142.   int op;
  4143.  
  4144.   if (i.suffix)
  4145.     guess_suffix = i.suffix;
  4146.   else if (i.reg_operands)
  4147.     {
  4148.       /* Figure out a suffix from the last register operand specified.
  4149.          We can't do this properly yet, ie. excluding InOutPortReg,
  4150.          but the following works for instructions with immediates.
  4151.          In any case, we can't set i.suffix yet.  */
  4152.       for (op = i.operands; --op >= 0;)
  4153.         if (i.types[op].bitfield.reg8)
  4154.           {
  4155.             guess_suffix = BYTE_MNEM_SUFFIX;
  4156.             break;
  4157.           }
  4158.         else if (i.types[op].bitfield.reg16)
  4159.           {
  4160.             guess_suffix = WORD_MNEM_SUFFIX;
  4161.             break;
  4162.           }
  4163.         else if (i.types[op].bitfield.reg32)
  4164.           {
  4165.             guess_suffix = LONG_MNEM_SUFFIX;
  4166.             break;
  4167.           }
  4168.         else if (i.types[op].bitfield.reg64)
  4169.           {
  4170.             guess_suffix = QWORD_MNEM_SUFFIX;
  4171.             break;
  4172.           }
  4173.     }
  4174.   else if ((flag_code == CODE_16BIT) ^ (i.prefix[DATA_PREFIX] != 0))
  4175.     guess_suffix = WORD_MNEM_SUFFIX;
  4176.  
  4177.   for (op = i.operands; --op >= 0;)
  4178.     if (operand_type_check (i.types[op], imm))
  4179.       {
  4180.         switch (i.op[op].imms->X_op)
  4181.           {
  4182.           case O_constant:
  4183.             /* If a suffix is given, this operand may be shortened.  */
  4184.             switch (guess_suffix)
  4185.               {
  4186.               case LONG_MNEM_SUFFIX:
  4187.                 i.types[op].bitfield.imm32 = 1;
  4188.                 i.types[op].bitfield.imm64 = 1;
  4189.                 break;
  4190.               case WORD_MNEM_SUFFIX:
  4191.                 i.types[op].bitfield.imm16 = 1;
  4192.                 i.types[op].bitfield.imm32 = 1;
  4193.                 i.types[op].bitfield.imm32s = 1;
  4194.                 i.types[op].bitfield.imm64 = 1;
  4195.                 break;
  4196.               case BYTE_MNEM_SUFFIX:
  4197.                 i.types[op].bitfield.imm8 = 1;
  4198.                 i.types[op].bitfield.imm8s = 1;
  4199.                 i.types[op].bitfield.imm16 = 1;
  4200.                 i.types[op].bitfield.imm32 = 1;
  4201.                 i.types[op].bitfield.imm32s = 1;
  4202.                 i.types[op].bitfield.imm64 = 1;
  4203.                 break;
  4204.               }
  4205.  
  4206.             /* If this operand is at most 16 bits, convert it
  4207.                to a signed 16 bit number before trying to see
  4208.                whether it will fit in an even smaller size.
  4209.                This allows a 16-bit operand such as $0xffe0 to
  4210.                be recognised as within Imm8S range.  */
  4211.             if ((i.types[op].bitfield.imm16)
  4212.                 && (i.op[op].imms->X_add_number & ~(offsetT) 0xffff) == 0)
  4213.               {
  4214.                 i.op[op].imms->X_add_number =
  4215.                   (((i.op[op].imms->X_add_number & 0xffff) ^ 0x8000) - 0x8000);
  4216.               }
  4217.             if ((i.types[op].bitfield.imm32)
  4218.                 && ((i.op[op].imms->X_add_number & ~(((offsetT) 2 << 31) - 1))
  4219.                     == 0))
  4220.               {
  4221.                 i.op[op].imms->X_add_number = ((i.op[op].imms->X_add_number
  4222.                                                 ^ ((offsetT) 1 << 31))
  4223.                                                - ((offsetT) 1 << 31));
  4224.               }
  4225.             i.types[op]
  4226.               = operand_type_or (i.types[op],
  4227.                                  smallest_imm_type (i.op[op].imms->X_add_number));
  4228.  
  4229.             /* We must avoid matching of Imm32 templates when 64bit
  4230.                only immediate is available.  */
  4231.             if (guess_suffix == QWORD_MNEM_SUFFIX)
  4232.               i.types[op].bitfield.imm32 = 0;
  4233.             break;
  4234.  
  4235.           case O_absent:
  4236.           case O_register:
  4237.             abort ();
  4238.  
  4239.             /* Symbols and expressions.  */
  4240.           default:
  4241.             /* Convert symbolic operand to proper sizes for matching, but don't
  4242.                prevent matching a set of insns that only supports sizes other
  4243.                than those matching the insn suffix.  */
  4244.             {
  4245.               i386_operand_type mask, allowed;
  4246.               const insn_template *t;
  4247.  
  4248.               operand_type_set (&mask, 0);
  4249.               operand_type_set (&allowed, 0);
  4250.  
  4251.               for (t = current_templates->start;
  4252.                    t < current_templates->end;
  4253.                    ++t)
  4254.                 allowed = operand_type_or (allowed,
  4255.                                            t->operand_types[op]);
  4256.               switch (guess_suffix)
  4257.                 {
  4258.                 case QWORD_MNEM_SUFFIX:
  4259.                   mask.bitfield.imm64 = 1;
  4260.                   mask.bitfield.imm32s = 1;
  4261.                   break;
  4262.                 case LONG_MNEM_SUFFIX:
  4263.                   mask.bitfield.imm32 = 1;
  4264.                   break;
  4265.                 case WORD_MNEM_SUFFIX:
  4266.                   mask.bitfield.imm16 = 1;
  4267.                   break;
  4268.                 case BYTE_MNEM_SUFFIX:
  4269.                   mask.bitfield.imm8 = 1;
  4270.                   break;
  4271.                 default:
  4272.                   break;
  4273.                 }
  4274.               allowed = operand_type_and (mask, allowed);
  4275.               if (!operand_type_all_zero (&allowed))
  4276.                 i.types[op] = operand_type_and (i.types[op], mask);
  4277.             }
  4278.             break;
  4279.           }
  4280.       }
  4281. }
  4282.  
  4283. /* Try to use the smallest displacement type too.  */
  4284. static void
  4285. optimize_disp (void)
  4286. {
  4287.   int op;
  4288.  
  4289.   for (op = i.operands; --op >= 0;)
  4290.     if (operand_type_check (i.types[op], disp))
  4291.       {
  4292.         if (i.op[op].disps->X_op == O_constant)
  4293.           {
  4294.             offsetT op_disp = i.op[op].disps->X_add_number;
  4295.  
  4296.             if (i.types[op].bitfield.disp16
  4297.                 && (op_disp & ~(offsetT) 0xffff) == 0)
  4298.               {
  4299.                 /* If this operand is at most 16 bits, convert
  4300.                    to a signed 16 bit number and don't use 64bit
  4301.                    displacement.  */
  4302.                 op_disp = (((op_disp & 0xffff) ^ 0x8000) - 0x8000);
  4303.                 i.types[op].bitfield.disp64 = 0;
  4304.               }
  4305.             if (i.types[op].bitfield.disp32
  4306.                 && (op_disp & ~(((offsetT) 2 << 31) - 1)) == 0)
  4307.               {
  4308.                 /* If this operand is at most 32 bits, convert
  4309.                    to a signed 32 bit number and don't use 64bit
  4310.                    displacement.  */
  4311.                 op_disp &= (((offsetT) 2 << 31) - 1);
  4312.                 op_disp = (op_disp ^ ((offsetT) 1 << 31)) - ((addressT) 1 << 31);
  4313.                 i.types[op].bitfield.disp64 = 0;
  4314.               }
  4315.             if (!op_disp && i.types[op].bitfield.baseindex)
  4316.               {
  4317.                 i.types[op].bitfield.disp8 = 0;
  4318.                 i.types[op].bitfield.disp16 = 0;
  4319.                 i.types[op].bitfield.disp32 = 0;
  4320.                 i.types[op].bitfield.disp32s = 0;
  4321.                 i.types[op].bitfield.disp64 = 0;
  4322.                 i.op[op].disps = 0;
  4323.                 i.disp_operands--;
  4324.               }
  4325.             else if (flag_code == CODE_64BIT)
  4326.               {
  4327.                 if (fits_in_signed_long (op_disp))
  4328.                   {
  4329.                     i.types[op].bitfield.disp64 = 0;
  4330.                     i.types[op].bitfield.disp32s = 1;
  4331.                   }
  4332.                 if (i.prefix[ADDR_PREFIX]
  4333.                     && fits_in_unsigned_long (op_disp))
  4334.                   i.types[op].bitfield.disp32 = 1;
  4335.               }
  4336.             if ((i.types[op].bitfield.disp32
  4337.                  || i.types[op].bitfield.disp32s
  4338.                  || i.types[op].bitfield.disp16)
  4339.                 && fits_in_signed_byte (op_disp))
  4340.               i.types[op].bitfield.disp8 = 1;
  4341.           }
  4342.         else if (i.reloc[op] == BFD_RELOC_386_TLS_DESC_CALL
  4343.                  || i.reloc[op] == BFD_RELOC_X86_64_TLSDESC_CALL)
  4344.           {
  4345.             fix_new_exp (frag_now, frag_more (0) - frag_now->fr_literal, 0,
  4346.                          i.op[op].disps, 0, i.reloc[op]);
  4347.             i.types[op].bitfield.disp8 = 0;
  4348.             i.types[op].bitfield.disp16 = 0;
  4349.             i.types[op].bitfield.disp32 = 0;
  4350.             i.types[op].bitfield.disp32s = 0;
  4351.             i.types[op].bitfield.disp64 = 0;
  4352.           }
  4353.         else
  4354.           /* We only support 64bit displacement on constants.  */
  4355.           i.types[op].bitfield.disp64 = 0;
  4356.       }
  4357. }
  4358.  
  4359. /* Check if operands are valid for the instruction.  */
  4360.  
  4361. static int
  4362. check_VecOperands (const insn_template *t)
  4363. {
  4364.   unsigned int op;
  4365.  
  4366.   /* Without VSIB byte, we can't have a vector register for index.  */
  4367.   if (!t->opcode_modifier.vecsib
  4368.       && i.index_reg
  4369.       && (i.index_reg->reg_type.bitfield.regxmm
  4370.           || i.index_reg->reg_type.bitfield.regymm
  4371.           || i.index_reg->reg_type.bitfield.regzmm))
  4372.     {
  4373.       i.error = unsupported_vector_index_register;
  4374.       return 1;
  4375.     }
  4376.  
  4377.   /* Check if default mask is allowed.  */
  4378.   if (t->opcode_modifier.nodefmask
  4379.       && (!i.mask || i.mask->mask->reg_num == 0))
  4380.     {
  4381.       i.error = no_default_mask;
  4382.       return 1;
  4383.     }
  4384.  
  4385.   /* For VSIB byte, we need a vector register for index, and all vector
  4386.      registers must be distinct.  */
  4387.   if (t->opcode_modifier.vecsib)
  4388.     {
  4389.       if (!i.index_reg
  4390.           || !((t->opcode_modifier.vecsib == VecSIB128
  4391.                 && i.index_reg->reg_type.bitfield.regxmm)
  4392.                || (t->opcode_modifier.vecsib == VecSIB256
  4393.                    && i.index_reg->reg_type.bitfield.regymm)
  4394.                || (t->opcode_modifier.vecsib == VecSIB512
  4395.                    && i.index_reg->reg_type.bitfield.regzmm)))
  4396.       {
  4397.         i.error = invalid_vsib_address;
  4398.         return 1;
  4399.       }
  4400.  
  4401.       gas_assert (i.reg_operands == 2 || i.mask);
  4402.       if (i.reg_operands == 2 && !i.mask)
  4403.         {
  4404.           gas_assert (i.types[0].bitfield.regxmm
  4405.                       || i.types[0].bitfield.regymm);
  4406.           gas_assert (i.types[2].bitfield.regxmm
  4407.                       || i.types[2].bitfield.regymm);
  4408.           if (operand_check == check_none)
  4409.             return 0;
  4410.           if (register_number (i.op[0].regs)
  4411.               != register_number (i.index_reg)
  4412.               && register_number (i.op[2].regs)
  4413.                  != register_number (i.index_reg)
  4414.               && register_number (i.op[0].regs)
  4415.                  != register_number (i.op[2].regs))
  4416.             return 0;
  4417.           if (operand_check == check_error)
  4418.             {
  4419.               i.error = invalid_vector_register_set;
  4420.               return 1;
  4421.             }
  4422.           as_warn (_("mask, index, and destination registers should be distinct"));
  4423.         }
  4424.       else if (i.reg_operands == 1 && i.mask)
  4425.         {
  4426.           if ((i.types[1].bitfield.regymm
  4427.                || i.types[1].bitfield.regzmm)
  4428.               && (register_number (i.op[1].regs)
  4429.                   == register_number (i.index_reg)))
  4430.             {
  4431.               if (operand_check == check_error)
  4432.                 {
  4433.                   i.error = invalid_vector_register_set;
  4434.                   return 1;
  4435.                 }
  4436.               if (operand_check != check_none)
  4437.                 as_warn (_("index and destination registers should be distinct"));
  4438.             }
  4439.         }
  4440.     }
  4441.  
  4442.   /* Check if broadcast is supported by the instruction and is applied
  4443.      to the memory operand.  */
  4444.   if (i.broadcast)
  4445.     {
  4446.       int broadcasted_opnd_size;
  4447.  
  4448.       /* Check if specified broadcast is supported in this instruction,
  4449.          and it's applied to memory operand of DWORD or QWORD type,
  4450.          depending on VecESize.  */
  4451.       if (i.broadcast->type != t->opcode_modifier.broadcast
  4452.           || !i.types[i.broadcast->operand].bitfield.mem
  4453.           || (t->opcode_modifier.vecesize == 0
  4454.               && !i.types[i.broadcast->operand].bitfield.dword
  4455.               && !i.types[i.broadcast->operand].bitfield.unspecified)
  4456.           || (t->opcode_modifier.vecesize == 1
  4457.               && !i.types[i.broadcast->operand].bitfield.qword
  4458.               && !i.types[i.broadcast->operand].bitfield.unspecified))
  4459.         goto bad_broadcast;
  4460.  
  4461.       broadcasted_opnd_size = t->opcode_modifier.vecesize ? 64 : 32;
  4462.       if (i.broadcast->type == BROADCAST_1TO16)
  4463.         broadcasted_opnd_size <<= 4; /* Broadcast 1to16.  */
  4464.       else if (i.broadcast->type == BROADCAST_1TO8)
  4465.         broadcasted_opnd_size <<= 3; /* Broadcast 1to8.  */
  4466.       else if (i.broadcast->type == BROADCAST_1TO4)
  4467.         broadcasted_opnd_size <<= 2; /* Broadcast 1to4.  */
  4468.       else if (i.broadcast->type == BROADCAST_1TO2)
  4469.         broadcasted_opnd_size <<= 1; /* Broadcast 1to2.  */
  4470.       else
  4471.         goto bad_broadcast;
  4472.  
  4473.       if ((broadcasted_opnd_size == 256
  4474.            && !t->operand_types[i.broadcast->operand].bitfield.ymmword)
  4475.           || (broadcasted_opnd_size == 512
  4476.               && !t->operand_types[i.broadcast->operand].bitfield.zmmword))
  4477.         {
  4478.         bad_broadcast:
  4479.           i.error = unsupported_broadcast;
  4480.           return 1;
  4481.         }
  4482.     }
  4483.   /* If broadcast is supported in this instruction, we need to check if
  4484.      operand of one-element size isn't specified without broadcast.  */
  4485.   else if (t->opcode_modifier.broadcast && i.mem_operands)
  4486.     {
  4487.       /* Find memory operand.  */
  4488.       for (op = 0; op < i.operands; op++)
  4489.         if (operand_type_check (i.types[op], anymem))
  4490.           break;
  4491.       gas_assert (op < i.operands);
  4492.       /* Check size of the memory operand.  */
  4493.       if ((t->opcode_modifier.vecesize == 0
  4494.            && i.types[op].bitfield.dword)
  4495.           || (t->opcode_modifier.vecesize == 1
  4496.               && i.types[op].bitfield.qword))
  4497.         {
  4498.           i.error = broadcast_needed;
  4499.           return 1;
  4500.         }
  4501.     }
  4502.  
  4503.   /* Check if requested masking is supported.  */
  4504.   if (i.mask
  4505.       && (!t->opcode_modifier.masking
  4506.           || (i.mask->zeroing
  4507.               && t->opcode_modifier.masking == MERGING_MASKING)))
  4508.     {
  4509.       i.error = unsupported_masking;
  4510.       return 1;
  4511.     }
  4512.  
  4513.   /* Check if masking is applied to dest operand.  */
  4514.   if (i.mask && (i.mask->operand != (int) (i.operands - 1)))
  4515.     {
  4516.       i.error = mask_not_on_destination;
  4517.       return 1;
  4518.     }
  4519.  
  4520.   /* Check RC/SAE.  */
  4521.   if (i.rounding)
  4522.     {
  4523.       if ((i.rounding->type != saeonly
  4524.            && !t->opcode_modifier.staticrounding)
  4525.           || (i.rounding->type == saeonly
  4526.               && (t->opcode_modifier.staticrounding
  4527.                   || !t->opcode_modifier.sae)))
  4528.         {
  4529.           i.error = unsupported_rc_sae;
  4530.           return 1;
  4531.         }
  4532.       /* If the instruction has several immediate operands and one of
  4533.          them is rounding, the rounding operand should be the last
  4534.          immediate operand.  */
  4535.       if (i.imm_operands > 1
  4536.           && i.rounding->operand != (int) (i.imm_operands - 1))
  4537.         {
  4538.           i.error = rc_sae_operand_not_last_imm;
  4539.           return 1;
  4540.         }
  4541.     }
  4542.  
  4543.   /* Check vector Disp8 operand.  */
  4544.   if (t->opcode_modifier.disp8memshift)
  4545.     {
  4546.       if (i.broadcast)
  4547.         i.memshift = t->opcode_modifier.vecesize ? 3 : 2;
  4548.       else
  4549.         i.memshift = t->opcode_modifier.disp8memshift;
  4550.  
  4551.       for (op = 0; op < i.operands; op++)
  4552.         if (operand_type_check (i.types[op], disp)
  4553.             && i.op[op].disps->X_op == O_constant)
  4554.           {
  4555.             offsetT value = i.op[op].disps->X_add_number;
  4556.             int vec_disp8_ok = fits_in_vec_disp8 (value);
  4557.             if (t->operand_types [op].bitfield.vec_disp8)
  4558.               {
  4559.                 if (vec_disp8_ok)
  4560.                   i.types[op].bitfield.vec_disp8 = 1;
  4561.                 else
  4562.                   {
  4563.                     /* Vector insn can only have Vec_Disp8/Disp32 in
  4564.                        32/64bit modes, and Vec_Disp8/Disp16 in 16bit
  4565.                        mode.  */
  4566.                     i.types[op].bitfield.disp8 = 0;
  4567.                     if (flag_code != CODE_16BIT)
  4568.                       i.types[op].bitfield.disp16 = 0;
  4569.                   }
  4570.               }
  4571.             else if (flag_code != CODE_16BIT)
  4572.               {
  4573.                 /* One form of this instruction supports vector Disp8.
  4574.                    Try vector Disp8 if we need to use Disp32.  */
  4575.                 if (vec_disp8_ok && !fits_in_signed_byte (value))
  4576.                   {
  4577.                     i.error = try_vector_disp8;
  4578.                     return 1;
  4579.                   }
  4580.               }
  4581.           }
  4582.     }
  4583.   else
  4584.     i.memshift = -1;
  4585.  
  4586.   return 0;
  4587. }
  4588.  
  4589. /* Check if operands are valid for the instruction.  Update VEX
  4590.    operand types.  */
  4591.  
  4592. static int
  4593. VEX_check_operands (const insn_template *t)
  4594. {
  4595.   /* VREX is only valid with EVEX prefix.  */
  4596.   if (i.need_vrex && !t->opcode_modifier.evex)
  4597.     {
  4598.       i.error = invalid_register_operand;
  4599.       return 1;
  4600.     }
  4601.  
  4602.   if (!t->opcode_modifier.vex)
  4603.     return 0;
  4604.  
  4605.   /* Only check VEX_Imm4, which must be the first operand.  */
  4606.   if (t->operand_types[0].bitfield.vec_imm4)
  4607.     {
  4608.       if (i.op[0].imms->X_op != O_constant
  4609.           || !fits_in_imm4 (i.op[0].imms->X_add_number))
  4610.         {
  4611.           i.error = bad_imm4;
  4612.           return 1;
  4613.         }
  4614.  
  4615.       /* Turn off Imm8 so that update_imm won't complain.  */
  4616.       i.types[0] = vec_imm4;
  4617.     }
  4618.  
  4619.   return 0;
  4620. }
  4621.  
  4622. static const insn_template *
  4623. match_template (void)
  4624. {
  4625.   /* Points to template once we've found it.  */
  4626.   const insn_template *t;
  4627.   i386_operand_type overlap0, overlap1, overlap2, overlap3;
  4628.   i386_operand_type overlap4;
  4629.   unsigned int found_reverse_match;
  4630.   i386_opcode_modifier suffix_check;
  4631.   i386_operand_type operand_types [MAX_OPERANDS];
  4632.   int addr_prefix_disp;
  4633.   unsigned int j;
  4634.   unsigned int found_cpu_match;
  4635.   unsigned int check_register;
  4636.   enum i386_error specific_error = 0;
  4637.  
  4638. #if MAX_OPERANDS != 5
  4639. # error "MAX_OPERANDS must be 5."
  4640. #endif
  4641.  
  4642.   found_reverse_match = 0;
  4643.   addr_prefix_disp = -1;
  4644.  
  4645.   memset (&suffix_check, 0, sizeof (suffix_check));
  4646.   if (i.suffix == BYTE_MNEM_SUFFIX)
  4647.     suffix_check.no_bsuf = 1;
  4648.   else if (i.suffix == WORD_MNEM_SUFFIX)
  4649.     suffix_check.no_wsuf = 1;
  4650.   else if (i.suffix == SHORT_MNEM_SUFFIX)
  4651.     suffix_check.no_ssuf = 1;
  4652.   else if (i.suffix == LONG_MNEM_SUFFIX)
  4653.     suffix_check.no_lsuf = 1;
  4654.   else if (i.suffix == QWORD_MNEM_SUFFIX)
  4655.     suffix_check.no_qsuf = 1;
  4656.   else if (i.suffix == LONG_DOUBLE_MNEM_SUFFIX)
  4657.     suffix_check.no_ldsuf = 1;
  4658.  
  4659.   /* Must have right number of operands.  */
  4660.   i.error = number_of_operands_mismatch;
  4661.  
  4662.   for (t = current_templates->start; t < current_templates->end; t++)
  4663.     {
  4664.       addr_prefix_disp = -1;
  4665.  
  4666.       if (i.operands != t->operands)
  4667.         continue;
  4668.  
  4669.       /* Check processor support.  */
  4670.       i.error = unsupported;
  4671.       found_cpu_match = (cpu_flags_match (t)
  4672.                          == CPU_FLAGS_PERFECT_MATCH);
  4673.       if (!found_cpu_match)
  4674.         continue;
  4675.  
  4676.       /* Check old gcc support. */
  4677.       i.error = old_gcc_only;
  4678.       if (!old_gcc && t->opcode_modifier.oldgcc)
  4679.         continue;
  4680.  
  4681.       /* Check AT&T mnemonic.   */
  4682.       i.error = unsupported_with_intel_mnemonic;
  4683.       if (intel_mnemonic && t->opcode_modifier.attmnemonic)
  4684.         continue;
  4685.  
  4686.       /* Check AT&T/Intel syntax.   */
  4687.       i.error = unsupported_syntax;
  4688.       if ((intel_syntax && t->opcode_modifier.attsyntax)
  4689.           || (!intel_syntax && t->opcode_modifier.intelsyntax))
  4690.         continue;
  4691.  
  4692.       /* Check the suffix, except for some instructions in intel mode.  */
  4693.       i.error = invalid_instruction_suffix;
  4694.       if ((!intel_syntax || !t->opcode_modifier.ignoresize)
  4695.           && ((t->opcode_modifier.no_bsuf && suffix_check.no_bsuf)
  4696.               || (t->opcode_modifier.no_wsuf && suffix_check.no_wsuf)
  4697.               || (t->opcode_modifier.no_lsuf && suffix_check.no_lsuf)
  4698.               || (t->opcode_modifier.no_ssuf && suffix_check.no_ssuf)
  4699.               || (t->opcode_modifier.no_qsuf && suffix_check.no_qsuf)
  4700.               || (t->opcode_modifier.no_ldsuf && suffix_check.no_ldsuf)))
  4701.         continue;
  4702.  
  4703.       if (!operand_size_match (t))
  4704.         continue;
  4705.  
  4706.       for (j = 0; j < MAX_OPERANDS; j++)
  4707.         operand_types[j] = t->operand_types[j];
  4708.  
  4709.       /* In general, don't allow 64-bit operands in 32-bit mode.  */
  4710.       if (i.suffix == QWORD_MNEM_SUFFIX
  4711.           && flag_code != CODE_64BIT
  4712.           && (intel_syntax
  4713.               ? (!t->opcode_modifier.ignoresize
  4714.                  && !intel_float_operand (t->name))
  4715.               : intel_float_operand (t->name) != 2)
  4716.           && ((!operand_types[0].bitfield.regmmx
  4717.                && !operand_types[0].bitfield.regxmm
  4718.                && !operand_types[0].bitfield.regymm
  4719.                && !operand_types[0].bitfield.regzmm)
  4720.               || (!operand_types[t->operands > 1].bitfield.regmmx
  4721.                   && operand_types[t->operands > 1].bitfield.regxmm
  4722.                   && operand_types[t->operands > 1].bitfield.regymm
  4723.                   && operand_types[t->operands > 1].bitfield.regzmm))
  4724.           && (t->base_opcode != 0x0fc7
  4725.               || t->extension_opcode != 1 /* cmpxchg8b */))
  4726.         continue;
  4727.  
  4728.       /* In general, don't allow 32-bit operands on pre-386.  */
  4729.       else if (i.suffix == LONG_MNEM_SUFFIX
  4730.                && !cpu_arch_flags.bitfield.cpui386
  4731.                && (intel_syntax
  4732.                    ? (!t->opcode_modifier.ignoresize
  4733.                       && !intel_float_operand (t->name))
  4734.                    : intel_float_operand (t->name) != 2)
  4735.                && ((!operand_types[0].bitfield.regmmx
  4736.                     && !operand_types[0].bitfield.regxmm)
  4737.                    || (!operand_types[t->operands > 1].bitfield.regmmx
  4738.                        && operand_types[t->operands > 1].bitfield.regxmm)))
  4739.         continue;
  4740.  
  4741.       /* Do not verify operands when there are none.  */
  4742.       else
  4743.         {
  4744.           if (!t->operands)
  4745.             /* We've found a match; break out of loop.  */
  4746.             break;
  4747.         }
  4748.  
  4749.       /* Address size prefix will turn Disp64/Disp32/Disp16 operand
  4750.          into Disp32/Disp16/Disp32 operand.  */
  4751.       if (i.prefix[ADDR_PREFIX] != 0)
  4752.           {
  4753.             /* There should be only one Disp operand.  */
  4754.             switch (flag_code)
  4755.             {
  4756.             case CODE_16BIT:
  4757.               for (j = 0; j < MAX_OPERANDS; j++)
  4758.                 {
  4759.                   if (operand_types[j].bitfield.disp16)
  4760.                     {
  4761.                       addr_prefix_disp = j;
  4762.                       operand_types[j].bitfield.disp32 = 1;
  4763.                       operand_types[j].bitfield.disp16 = 0;
  4764.                       break;
  4765.                     }
  4766.                 }
  4767.               break;
  4768.             case CODE_32BIT:
  4769.               for (j = 0; j < MAX_OPERANDS; j++)
  4770.                 {
  4771.                   if (operand_types[j].bitfield.disp32)
  4772.                     {
  4773.                       addr_prefix_disp = j;
  4774.                       operand_types[j].bitfield.disp32 = 0;
  4775.                       operand_types[j].bitfield.disp16 = 1;
  4776.                       break;
  4777.                     }
  4778.                 }
  4779.               break;
  4780.             case CODE_64BIT:
  4781.               for (j = 0; j < MAX_OPERANDS; j++)
  4782.                 {
  4783.                   if (operand_types[j].bitfield.disp64)
  4784.                     {
  4785.                       addr_prefix_disp = j;
  4786.                       operand_types[j].bitfield.disp64 = 0;
  4787.                       operand_types[j].bitfield.disp32 = 1;
  4788.                       break;
  4789.                     }
  4790.                 }
  4791.               break;
  4792.             }
  4793.           }
  4794.  
  4795.       /* Force 0x8b encoding for "mov foo@GOT, %eax".  */
  4796.       if (i.reloc[0] == BFD_RELOC_386_GOT32 && t->base_opcode == 0xa0)
  4797.         continue;
  4798.  
  4799.       /* We check register size if needed.  */
  4800.       check_register = t->opcode_modifier.checkregsize;
  4801.       overlap0 = operand_type_and (i.types[0], operand_types[0]);
  4802.       switch (t->operands)
  4803.         {
  4804.         case 1:
  4805.           if (!operand_type_match (overlap0, i.types[0]))
  4806.             continue;
  4807.           break;
  4808.         case 2:
  4809.           /* xchg %eax, %eax is a special case. It is an aliase for nop
  4810.              only in 32bit mode and we can use opcode 0x90.  In 64bit
  4811.              mode, we can't use 0x90 for xchg %eax, %eax since it should
  4812.              zero-extend %eax to %rax.  */
  4813.           if (flag_code == CODE_64BIT
  4814.               && t->base_opcode == 0x90
  4815.               && operand_type_equal (&i.types [0], &acc32)
  4816.               && operand_type_equal (&i.types [1], &acc32))
  4817.             continue;
  4818.           if (i.swap_operand)
  4819.             {
  4820.               /* If we swap operand in encoding, we either match
  4821.                  the next one or reverse direction of operands.  */
  4822.               if (t->opcode_modifier.s)
  4823.                 continue;
  4824.               else if (t->opcode_modifier.d)
  4825.                 goto check_reverse;
  4826.             }
  4827.  
  4828.         case 3:
  4829.           /* If we swap operand in encoding, we match the next one.  */
  4830.           if (i.swap_operand && t->opcode_modifier.s)
  4831.             continue;
  4832.         case 4:
  4833.         case 5:
  4834.           overlap1 = operand_type_and (i.types[1], operand_types[1]);
  4835.           if (!operand_type_match (overlap0, i.types[0])
  4836.               || !operand_type_match (overlap1, i.types[1])
  4837.               || (check_register
  4838.                   && !operand_type_register_match (overlap0, i.types[0],
  4839.                                                    operand_types[0],
  4840.                                                    overlap1, i.types[1],
  4841.                                                    operand_types[1])))
  4842.             {
  4843.               /* Check if other direction is valid ...  */
  4844.               if (!t->opcode_modifier.d && !t->opcode_modifier.floatd)
  4845.                 continue;
  4846.  
  4847. check_reverse:
  4848.               /* Try reversing direction of operands.  */
  4849.               overlap0 = operand_type_and (i.types[0], operand_types[1]);
  4850.               overlap1 = operand_type_and (i.types[1], operand_types[0]);
  4851.               if (!operand_type_match (overlap0, i.types[0])
  4852.                   || !operand_type_match (overlap1, i.types[1])
  4853.                   || (check_register
  4854.                       && !operand_type_register_match (overlap0,
  4855.                                                        i.types[0],
  4856.                                                        operand_types[1],
  4857.                                                        overlap1,
  4858.                                                        i.types[1],
  4859.                                                        operand_types[0])))
  4860.                 {
  4861.                   /* Does not match either direction.  */
  4862.                   continue;
  4863.                 }
  4864.               /* found_reverse_match holds which of D or FloatDR
  4865.                  we've found.  */
  4866.               if (t->opcode_modifier.d)
  4867.                 found_reverse_match = Opcode_D;
  4868.               else if (t->opcode_modifier.floatd)
  4869.                 found_reverse_match = Opcode_FloatD;
  4870.               else
  4871.                 found_reverse_match = 0;
  4872.               if (t->opcode_modifier.floatr)
  4873.                 found_reverse_match |= Opcode_FloatR;
  4874.             }
  4875.           else
  4876.             {
  4877.               /* Found a forward 2 operand match here.  */
  4878.               switch (t->operands)
  4879.                 {
  4880.                 case 5:
  4881.                   overlap4 = operand_type_and (i.types[4],
  4882.                                                operand_types[4]);
  4883.                 case 4:
  4884.                   overlap3 = operand_type_and (i.types[3],
  4885.                                                operand_types[3]);
  4886.                 case 3:
  4887.                   overlap2 = operand_type_and (i.types[2],
  4888.                                                operand_types[2]);
  4889.                   break;
  4890.                 }
  4891.  
  4892.               switch (t->operands)
  4893.                 {
  4894.                 case 5:
  4895.                   if (!operand_type_match (overlap4, i.types[4])
  4896.                       || !operand_type_register_match (overlap3,
  4897.                                                        i.types[3],
  4898.                                                        operand_types[3],
  4899.                                                        overlap4,
  4900.                                                        i.types[4],
  4901.                                                        operand_types[4]))
  4902.                     continue;
  4903.                 case 4:
  4904.                   if (!operand_type_match (overlap3, i.types[3])
  4905.                       || (check_register
  4906.                           && !operand_type_register_match (overlap2,
  4907.                                                            i.types[2],
  4908.                                                            operand_types[2],
  4909.                                                            overlap3,
  4910.                                                            i.types[3],
  4911.                                                            operand_types[3])))
  4912.                     continue;
  4913.                 case 3:
  4914.                   /* Here we make use of the fact that there are no
  4915.                      reverse match 3 operand instructions, and all 3
  4916.                      operand instructions only need to be checked for
  4917.                      register consistency between operands 2 and 3.  */
  4918.                   if (!operand_type_match (overlap2, i.types[2])
  4919.                       || (check_register
  4920.                           && !operand_type_register_match (overlap1,
  4921.                                                            i.types[1],
  4922.                                                            operand_types[1],
  4923.                                                            overlap2,
  4924.                                                            i.types[2],
  4925.                                                            operand_types[2])))
  4926.                     continue;
  4927.                   break;
  4928.                 }
  4929.             }
  4930.           /* Found either forward/reverse 2, 3 or 4 operand match here:
  4931.              slip through to break.  */
  4932.         }
  4933.       if (!found_cpu_match)
  4934.         {
  4935.           found_reverse_match = 0;
  4936.           continue;
  4937.         }
  4938.  
  4939.       /* Check if vector and VEX operands are valid.  */
  4940.       if (check_VecOperands (t) || VEX_check_operands (t))
  4941.         {
  4942.           specific_error = i.error;
  4943.           continue;
  4944.         }
  4945.  
  4946.       /* We've found a match; break out of loop.  */
  4947.       break;
  4948.     }
  4949.  
  4950.   if (t == current_templates->end)
  4951.     {
  4952.       /* We found no match.  */
  4953.       const char *err_msg;
  4954.       switch (specific_error ? specific_error : i.error)
  4955.         {
  4956.         default:
  4957.           abort ();
  4958.         case operand_size_mismatch:
  4959.           err_msg = _("operand size mismatch");
  4960.           break;
  4961.         case operand_type_mismatch:
  4962.           err_msg = _("operand type mismatch");
  4963.           break;
  4964.         case register_type_mismatch:
  4965.           err_msg = _("register type mismatch");
  4966.           break;
  4967.         case number_of_operands_mismatch:
  4968.           err_msg = _("number of operands mismatch");
  4969.           break;
  4970.         case invalid_instruction_suffix:
  4971.           err_msg = _("invalid instruction suffix");
  4972.           break;
  4973.         case bad_imm4:
  4974.           err_msg = _("constant doesn't fit in 4 bits");
  4975.           break;
  4976.         case old_gcc_only:
  4977.           err_msg = _("only supported with old gcc");
  4978.           break;
  4979.         case unsupported_with_intel_mnemonic:
  4980.           err_msg = _("unsupported with Intel mnemonic");
  4981.           break;
  4982.         case unsupported_syntax:
  4983.           err_msg = _("unsupported syntax");
  4984.           break;
  4985.         case unsupported:
  4986.           as_bad (_("unsupported instruction `%s'"),
  4987.                   current_templates->start->name);
  4988.           return NULL;
  4989.         case invalid_vsib_address:
  4990.           err_msg = _("invalid VSIB address");
  4991.           break;
  4992.         case invalid_vector_register_set:
  4993.           err_msg = _("mask, index, and destination registers must be distinct");
  4994.           break;
  4995.         case unsupported_vector_index_register:
  4996.           err_msg = _("unsupported vector index register");
  4997.           break;
  4998.         case unsupported_broadcast:
  4999.           err_msg = _("unsupported broadcast");
  5000.           break;
  5001.         case broadcast_not_on_src_operand:
  5002.           err_msg = _("broadcast not on source memory operand");
  5003.           break;
  5004.         case broadcast_needed:
  5005.           err_msg = _("broadcast is needed for operand of such type");
  5006.           break;
  5007.         case unsupported_masking:
  5008.           err_msg = _("unsupported masking");
  5009.           break;
  5010.         case mask_not_on_destination:
  5011.           err_msg = _("mask not on destination operand");
  5012.           break;
  5013.         case no_default_mask:
  5014.           err_msg = _("default mask isn't allowed");
  5015.           break;
  5016.         case unsupported_rc_sae:
  5017.           err_msg = _("unsupported static rounding/sae");
  5018.           break;
  5019.         case rc_sae_operand_not_last_imm:
  5020.           if (intel_syntax)
  5021.             err_msg = _("RC/SAE operand must precede immediate operands");
  5022.           else
  5023.             err_msg = _("RC/SAE operand must follow immediate operands");
  5024.           break;
  5025.         case invalid_register_operand:
  5026.           err_msg = _("invalid register operand");
  5027.           break;
  5028.         }
  5029.       as_bad (_("%s for `%s'"), err_msg,
  5030.               current_templates->start->name);
  5031.       return NULL;
  5032.     }
  5033.  
  5034.   if (!quiet_warnings)
  5035.     {
  5036.       if (!intel_syntax
  5037.           && (i.types[0].bitfield.jumpabsolute
  5038.               != operand_types[0].bitfield.jumpabsolute))
  5039.         {
  5040.           as_warn (_("indirect %s without `*'"), t->name);
  5041.         }
  5042.  
  5043.       if (t->opcode_modifier.isprefix
  5044.           && t->opcode_modifier.ignoresize)
  5045.         {
  5046.           /* Warn them that a data or address size prefix doesn't
  5047.              affect assembly of the next line of code.  */
  5048.           as_warn (_("stand-alone `%s' prefix"), t->name);
  5049.         }
  5050.     }
  5051.  
  5052.   /* Copy the template we found.  */
  5053.   i.tm = *t;
  5054.  
  5055.   if (addr_prefix_disp != -1)
  5056.     i.tm.operand_types[addr_prefix_disp]
  5057.       = operand_types[addr_prefix_disp];
  5058.  
  5059.   if (found_reverse_match)
  5060.     {
  5061.       /* If we found a reverse match we must alter the opcode
  5062.          direction bit.  found_reverse_match holds bits to change
  5063.          (different for int & float insns).  */
  5064.  
  5065.       i.tm.base_opcode ^= found_reverse_match;
  5066.  
  5067.       i.tm.operand_types[0] = operand_types[1];
  5068.       i.tm.operand_types[1] = operand_types[0];
  5069.     }
  5070.  
  5071.   return t;
  5072. }
  5073.  
  5074. static int
  5075. check_string (void)
  5076. {
  5077.   int mem_op = operand_type_check (i.types[0], anymem) ? 0 : 1;
  5078.   if (i.tm.operand_types[mem_op].bitfield.esseg)
  5079.     {
  5080.       if (i.seg[0] != NULL && i.seg[0] != &es)
  5081.         {
  5082.           as_bad (_("`%s' operand %d must use `%ses' segment"),
  5083.                   i.tm.name,
  5084.                   mem_op + 1,
  5085.                   register_prefix);
  5086.           return 0;
  5087.         }
  5088.       /* There's only ever one segment override allowed per instruction.
  5089.          This instruction possibly has a legal segment override on the
  5090.          second operand, so copy the segment to where non-string
  5091.          instructions store it, allowing common code.  */
  5092.       i.seg[0] = i.seg[1];
  5093.     }
  5094.   else if (i.tm.operand_types[mem_op + 1].bitfield.esseg)
  5095.     {
  5096.       if (i.seg[1] != NULL && i.seg[1] != &es)
  5097.         {
  5098.           as_bad (_("`%s' operand %d must use `%ses' segment"),
  5099.                   i.tm.name,
  5100.                   mem_op + 2,
  5101.                   register_prefix);
  5102.           return 0;
  5103.         }
  5104.     }
  5105.   return 1;
  5106. }
  5107.  
  5108. static int
  5109. process_suffix (void)
  5110. {
  5111.   /* If matched instruction specifies an explicit instruction mnemonic
  5112.      suffix, use it.  */
  5113.   if (i.tm.opcode_modifier.size16)
  5114.     i.suffix = WORD_MNEM_SUFFIX;
  5115.   else if (i.tm.opcode_modifier.size32)
  5116.     i.suffix = LONG_MNEM_SUFFIX;
  5117.   else if (i.tm.opcode_modifier.size64)
  5118.     i.suffix = QWORD_MNEM_SUFFIX;
  5119.   else if (i.reg_operands)
  5120.     {
  5121.       /* If there's no instruction mnemonic suffix we try to invent one
  5122.          based on register operands.  */
  5123.       if (!i.suffix)
  5124.         {
  5125.           /* We take i.suffix from the last register operand specified,
  5126.              Destination register type is more significant than source
  5127.              register type.  crc32 in SSE4.2 prefers source register
  5128.              type. */
  5129.           if (i.tm.base_opcode == 0xf20f38f1)
  5130.             {
  5131.               if (i.types[0].bitfield.reg16)
  5132.                 i.suffix = WORD_MNEM_SUFFIX;
  5133.               else if (i.types[0].bitfield.reg32)
  5134.                 i.suffix = LONG_MNEM_SUFFIX;
  5135.               else if (i.types[0].bitfield.reg64)
  5136.                 i.suffix = QWORD_MNEM_SUFFIX;
  5137.             }
  5138.           else if (i.tm.base_opcode == 0xf20f38f0)
  5139.             {
  5140.               if (i.types[0].bitfield.reg8)
  5141.                 i.suffix = BYTE_MNEM_SUFFIX;
  5142.             }
  5143.  
  5144.           if (!i.suffix)
  5145.             {
  5146.               int op;
  5147.  
  5148.               if (i.tm.base_opcode == 0xf20f38f1
  5149.                   || i.tm.base_opcode == 0xf20f38f0)
  5150.                 {
  5151.                   /* We have to know the operand size for crc32.  */
  5152.                   as_bad (_("ambiguous memory operand size for `%s`"),
  5153.                           i.tm.name);
  5154.                   return 0;
  5155.                 }
  5156.  
  5157.               for (op = i.operands; --op >= 0;)
  5158.                 if (!i.tm.operand_types[op].bitfield.inoutportreg)
  5159.                   {
  5160.                     if (i.types[op].bitfield.reg8)
  5161.                       {
  5162.                         i.suffix = BYTE_MNEM_SUFFIX;
  5163.                         break;
  5164.                       }
  5165.                     else if (i.types[op].bitfield.reg16)
  5166.                       {
  5167.                         i.suffix = WORD_MNEM_SUFFIX;
  5168.                         break;
  5169.                       }
  5170.                     else if (i.types[op].bitfield.reg32)
  5171.                       {
  5172.                         i.suffix = LONG_MNEM_SUFFIX;
  5173.                         break;
  5174.                       }
  5175.                     else if (i.types[op].bitfield.reg64)
  5176.                       {
  5177.                         i.suffix = QWORD_MNEM_SUFFIX;
  5178.                         break;
  5179.                       }
  5180.                   }
  5181.             }
  5182.         }
  5183.       else if (i.suffix == BYTE_MNEM_SUFFIX)
  5184.         {
  5185.           if (intel_syntax
  5186.               && i.tm.opcode_modifier.ignoresize
  5187.               && i.tm.opcode_modifier.no_bsuf)
  5188.             i.suffix = 0;
  5189.           else if (!check_byte_reg ())
  5190.             return 0;
  5191.         }
  5192.       else if (i.suffix == LONG_MNEM_SUFFIX)
  5193.         {
  5194.           if (intel_syntax
  5195.               && i.tm.opcode_modifier.ignoresize
  5196.               && i.tm.opcode_modifier.no_lsuf)
  5197.             i.suffix = 0;
  5198.           else if (!check_long_reg ())
  5199.             return 0;
  5200.         }
  5201.       else if (i.suffix == QWORD_MNEM_SUFFIX)
  5202.         {
  5203.           if (intel_syntax
  5204.               && i.tm.opcode_modifier.ignoresize
  5205.               && i.tm.opcode_modifier.no_qsuf)
  5206.             i.suffix = 0;
  5207.           else if (!check_qword_reg ())
  5208.             return 0;
  5209.         }
  5210.       else if (i.suffix == WORD_MNEM_SUFFIX)
  5211.         {
  5212.           if (intel_syntax
  5213.               && i.tm.opcode_modifier.ignoresize
  5214.               && i.tm.opcode_modifier.no_wsuf)
  5215.             i.suffix = 0;
  5216.           else if (!check_word_reg ())
  5217.             return 0;
  5218.         }
  5219.       else if (i.suffix == XMMWORD_MNEM_SUFFIX
  5220.                || i.suffix == YMMWORD_MNEM_SUFFIX
  5221.                || i.suffix == ZMMWORD_MNEM_SUFFIX)
  5222.         {
  5223.           /* Skip if the instruction has x/y/z suffix.  match_template
  5224.              should check if it is a valid suffix.  */
  5225.         }
  5226.       else if (intel_syntax && i.tm.opcode_modifier.ignoresize)
  5227.         /* Do nothing if the instruction is going to ignore the prefix.  */
  5228.         ;
  5229.       else
  5230.         abort ();
  5231.     }
  5232.   else if (i.tm.opcode_modifier.defaultsize
  5233.            && !i.suffix
  5234.            /* exclude fldenv/frstor/fsave/fstenv */
  5235.            && i.tm.opcode_modifier.no_ssuf)
  5236.     {
  5237.       i.suffix = stackop_size;
  5238.     }
  5239.   else if (intel_syntax
  5240.            && !i.suffix
  5241.            && (i.tm.operand_types[0].bitfield.jumpabsolute
  5242.                || i.tm.opcode_modifier.jumpbyte
  5243.                || i.tm.opcode_modifier.jumpintersegment
  5244.                || (i.tm.base_opcode == 0x0f01 /* [ls][gi]dt */
  5245.                    && i.tm.extension_opcode <= 3)))
  5246.     {
  5247.       switch (flag_code)
  5248.         {
  5249.         case CODE_64BIT:
  5250.           if (!i.tm.opcode_modifier.no_qsuf)
  5251.             {
  5252.               i.suffix = QWORD_MNEM_SUFFIX;
  5253.               break;
  5254.             }
  5255.         case CODE_32BIT:
  5256.           if (!i.tm.opcode_modifier.no_lsuf)
  5257.             i.suffix = LONG_MNEM_SUFFIX;
  5258.           break;
  5259.         case CODE_16BIT:
  5260.           if (!i.tm.opcode_modifier.no_wsuf)
  5261.             i.suffix = WORD_MNEM_SUFFIX;
  5262.           break;
  5263.         }
  5264.     }
  5265.  
  5266.   if (!i.suffix)
  5267.     {
  5268.       if (!intel_syntax)
  5269.         {
  5270.           if (i.tm.opcode_modifier.w)
  5271.             {
  5272.               as_bad (_("no instruction mnemonic suffix given and "
  5273.                         "no register operands; can't size instruction"));
  5274.               return 0;
  5275.             }
  5276.         }
  5277.       else
  5278.         {
  5279.           unsigned int suffixes;
  5280.  
  5281.           suffixes = !i.tm.opcode_modifier.no_bsuf;
  5282.           if (!i.tm.opcode_modifier.no_wsuf)
  5283.             suffixes |= 1 << 1;
  5284.           if (!i.tm.opcode_modifier.no_lsuf)
  5285.             suffixes |= 1 << 2;
  5286.           if (!i.tm.opcode_modifier.no_ldsuf)
  5287.             suffixes |= 1 << 3;
  5288.           if (!i.tm.opcode_modifier.no_ssuf)
  5289.             suffixes |= 1 << 4;
  5290.           if (!i.tm.opcode_modifier.no_qsuf)
  5291.             suffixes |= 1 << 5;
  5292.  
  5293.           /* There are more than suffix matches.  */
  5294.           if (i.tm.opcode_modifier.w
  5295.               || ((suffixes & (suffixes - 1))
  5296.                   && !i.tm.opcode_modifier.defaultsize
  5297.                   && !i.tm.opcode_modifier.ignoresize))
  5298.             {
  5299.               as_bad (_("ambiguous operand size for `%s'"), i.tm.name);
  5300.               return 0;
  5301.             }
  5302.         }
  5303.     }
  5304.  
  5305.   /* Change the opcode based on the operand size given by i.suffix;
  5306.      We don't need to change things for byte insns.  */
  5307.  
  5308.   if (i.suffix
  5309.       && i.suffix != BYTE_MNEM_SUFFIX
  5310.       && i.suffix != XMMWORD_MNEM_SUFFIX
  5311.       && i.suffix != YMMWORD_MNEM_SUFFIX
  5312.       && i.suffix != ZMMWORD_MNEM_SUFFIX)
  5313.     {
  5314.       /* It's not a byte, select word/dword operation.  */
  5315.       if (i.tm.opcode_modifier.w)
  5316.         {
  5317.           if (i.tm.opcode_modifier.shortform)
  5318.             i.tm.base_opcode |= 8;
  5319.           else
  5320.             i.tm.base_opcode |= 1;
  5321.         }
  5322.  
  5323.       /* Now select between word & dword operations via the operand
  5324.          size prefix, except for instructions that will ignore this
  5325.          prefix anyway.  */
  5326.       if (i.tm.opcode_modifier.addrprefixop0)
  5327.         {
  5328.           /* The address size override prefix changes the size of the
  5329.              first operand.  */
  5330.           if ((flag_code == CODE_32BIT
  5331.                && i.op->regs[0].reg_type.bitfield.reg16)
  5332.               || (flag_code != CODE_32BIT
  5333.                   && i.op->regs[0].reg_type.bitfield.reg32))
  5334.             if (!add_prefix (ADDR_PREFIX_OPCODE))
  5335.               return 0;
  5336.         }
  5337.       else if (i.suffix != QWORD_MNEM_SUFFIX
  5338.                && i.suffix != LONG_DOUBLE_MNEM_SUFFIX
  5339.                && !i.tm.opcode_modifier.ignoresize
  5340.                && !i.tm.opcode_modifier.floatmf
  5341.                && ((i.suffix == LONG_MNEM_SUFFIX) == (flag_code == CODE_16BIT)
  5342.                    || (flag_code == CODE_64BIT
  5343.                        && i.tm.opcode_modifier.jumpbyte)))
  5344.         {
  5345.           unsigned int prefix = DATA_PREFIX_OPCODE;
  5346.  
  5347.           if (i.tm.opcode_modifier.jumpbyte) /* jcxz, loop */
  5348.             prefix = ADDR_PREFIX_OPCODE;
  5349.  
  5350.           if (!add_prefix (prefix))
  5351.             return 0;
  5352.         }
  5353.  
  5354.       /* Set mode64 for an operand.  */
  5355.       if (i.suffix == QWORD_MNEM_SUFFIX
  5356.           && flag_code == CODE_64BIT
  5357.           && !i.tm.opcode_modifier.norex64)
  5358.         {
  5359.           /* Special case for xchg %rax,%rax.  It is NOP and doesn't
  5360.              need rex64.  cmpxchg8b is also a special case. */
  5361.           if (! (i.operands == 2
  5362.                  && i.tm.base_opcode == 0x90
  5363.                  && i.tm.extension_opcode == None
  5364.                  && operand_type_equal (&i.types [0], &acc64)
  5365.                  && operand_type_equal (&i.types [1], &acc64))
  5366.               && ! (i.operands == 1
  5367.                     && i.tm.base_opcode == 0xfc7
  5368.                     && i.tm.extension_opcode == 1
  5369.                     && !operand_type_check (i.types [0], reg)
  5370.                     && operand_type_check (i.types [0], anymem)))
  5371.             i.rex |= REX_W;
  5372.         }
  5373.  
  5374.       /* Size floating point instruction.  */
  5375.       if (i.suffix == LONG_MNEM_SUFFIX)
  5376.         if (i.tm.opcode_modifier.floatmf)
  5377.           i.tm.base_opcode ^= 4;
  5378.     }
  5379.  
  5380.   return 1;
  5381. }
  5382.  
  5383. static int
  5384. check_byte_reg (void)
  5385. {
  5386.   int op;
  5387.  
  5388.   for (op = i.operands; --op >= 0;)
  5389.     {
  5390.       /* If this is an eight bit register, it's OK.  If it's the 16 or
  5391.          32 bit version of an eight bit register, we will just use the
  5392.          low portion, and that's OK too.  */
  5393.       if (i.types[op].bitfield.reg8)
  5394.         continue;
  5395.  
  5396.       /* I/O port address operands are OK too.  */
  5397.       if (i.tm.operand_types[op].bitfield.inoutportreg)
  5398.         continue;
  5399.  
  5400.       /* crc32 doesn't generate this warning.  */
  5401.       if (i.tm.base_opcode == 0xf20f38f0)
  5402.         continue;
  5403.  
  5404.       if ((i.types[op].bitfield.reg16
  5405.            || i.types[op].bitfield.reg32
  5406.            || i.types[op].bitfield.reg64)
  5407.           && i.op[op].regs->reg_num < 4
  5408.           /* Prohibit these changes in 64bit mode, since the lowering
  5409.              would be more complicated.  */
  5410.           && flag_code != CODE_64BIT)
  5411.         {
  5412. #if REGISTER_WARNINGS
  5413.           if (!quiet_warnings)
  5414.             as_warn (_("using `%s%s' instead of `%s%s' due to `%c' suffix"),
  5415.                      register_prefix,
  5416.                      (i.op[op].regs + (i.types[op].bitfield.reg16
  5417.                                        ? REGNAM_AL - REGNAM_AX
  5418.                                        : REGNAM_AL - REGNAM_EAX))->reg_name,
  5419.                      register_prefix,
  5420.                      i.op[op].regs->reg_name,
  5421.                      i.suffix);
  5422. #endif
  5423.           continue;
  5424.         }
  5425.       /* Any other register is bad.  */
  5426.       if (i.types[op].bitfield.reg16
  5427.           || i.types[op].bitfield.reg32
  5428.           || i.types[op].bitfield.reg64
  5429.           || i.types[op].bitfield.regmmx
  5430.           || i.types[op].bitfield.regxmm
  5431.           || i.types[op].bitfield.regymm
  5432.           || i.types[op].bitfield.regzmm
  5433.           || i.types[op].bitfield.sreg2
  5434.           || i.types[op].bitfield.sreg3
  5435.           || i.types[op].bitfield.control
  5436.           || i.types[op].bitfield.debug
  5437.           || i.types[op].bitfield.test
  5438.           || i.types[op].bitfield.floatreg
  5439.           || i.types[op].bitfield.floatacc)
  5440.         {
  5441.           as_bad (_("`%s%s' not allowed with `%s%c'"),
  5442.                   register_prefix,
  5443.                   i.op[op].regs->reg_name,
  5444.                   i.tm.name,
  5445.                   i.suffix);
  5446.           return 0;
  5447.         }
  5448.     }
  5449.   return 1;
  5450. }
  5451.  
  5452. static int
  5453. check_long_reg (void)
  5454. {
  5455.   int op;
  5456.  
  5457.   for (op = i.operands; --op >= 0;)
  5458.     /* Reject eight bit registers, except where the template requires
  5459.        them. (eg. movzb)  */
  5460.     if (i.types[op].bitfield.reg8
  5461.         && (i.tm.operand_types[op].bitfield.reg16
  5462.             || i.tm.operand_types[op].bitfield.reg32
  5463.             || i.tm.operand_types[op].bitfield.acc))
  5464.       {
  5465.         as_bad (_("`%s%s' not allowed with `%s%c'"),
  5466.                 register_prefix,
  5467.                 i.op[op].regs->reg_name,
  5468.                 i.tm.name,
  5469.                 i.suffix);
  5470.         return 0;
  5471.       }
  5472.     /* Warn if the e prefix on a general reg is missing.  */
  5473.     else if ((!quiet_warnings || flag_code == CODE_64BIT)
  5474.              && i.types[op].bitfield.reg16
  5475.              && (i.tm.operand_types[op].bitfield.reg32
  5476.                  || i.tm.operand_types[op].bitfield.acc))
  5477.       {
  5478.         /* Prohibit these changes in the 64bit mode, since the
  5479.            lowering is more complicated.  */
  5480.         if (flag_code == CODE_64BIT)
  5481.           {
  5482.             as_bad (_("incorrect register `%s%s' used with `%c' suffix"),
  5483.                     register_prefix, i.op[op].regs->reg_name,
  5484.                     i.suffix);
  5485.             return 0;
  5486.           }
  5487. #if REGISTER_WARNINGS
  5488.         as_warn (_("using `%s%s' instead of `%s%s' due to `%c' suffix"),
  5489.                  register_prefix,
  5490.                  (i.op[op].regs + REGNAM_EAX - REGNAM_AX)->reg_name,
  5491.                  register_prefix, i.op[op].regs->reg_name, i.suffix);
  5492. #endif
  5493.       }
  5494.     /* Warn if the r prefix on a general reg is present.  */
  5495.     else if (i.types[op].bitfield.reg64
  5496.              && (i.tm.operand_types[op].bitfield.reg32
  5497.                  || i.tm.operand_types[op].bitfield.acc))
  5498.       {
  5499.         if (intel_syntax
  5500.             && i.tm.opcode_modifier.toqword
  5501.             && !i.types[0].bitfield.regxmm)
  5502.           {
  5503.             /* Convert to QWORD.  We want REX byte. */
  5504.             i.suffix = QWORD_MNEM_SUFFIX;
  5505.           }
  5506.         else
  5507.           {
  5508.             as_bad (_("incorrect register `%s%s' used with `%c' suffix"),
  5509.                     register_prefix, i.op[op].regs->reg_name,
  5510.                     i.suffix);
  5511.             return 0;
  5512.           }
  5513.       }
  5514.   return 1;
  5515. }
  5516.  
  5517. static int
  5518. check_qword_reg (void)
  5519. {
  5520.   int op;
  5521.  
  5522.   for (op = i.operands; --op >= 0; )
  5523.     /* Reject eight bit registers, except where the template requires
  5524.        them. (eg. movzb)  */
  5525.     if (i.types[op].bitfield.reg8
  5526.         && (i.tm.operand_types[op].bitfield.reg16
  5527.             || i.tm.operand_types[op].bitfield.reg32
  5528.             || i.tm.operand_types[op].bitfield.acc))
  5529.       {
  5530.         as_bad (_("`%s%s' not allowed with `%s%c'"),
  5531.                 register_prefix,
  5532.                 i.op[op].regs->reg_name,
  5533.                 i.tm.name,
  5534.                 i.suffix);
  5535.         return 0;
  5536.       }
  5537.     /* Warn if the r prefix on a general reg is missing.  */
  5538.     else if ((i.types[op].bitfield.reg16
  5539.               || i.types[op].bitfield.reg32)
  5540.              && (i.tm.operand_types[op].bitfield.reg32
  5541.                  || i.tm.operand_types[op].bitfield.acc))
  5542.       {
  5543.         /* Prohibit these changes in the 64bit mode, since the
  5544.            lowering is more complicated.  */
  5545.         if (intel_syntax
  5546.             && i.tm.opcode_modifier.todword
  5547.             && !i.types[0].bitfield.regxmm)
  5548.           {
  5549.             /* Convert to DWORD.  We don't want REX byte. */
  5550.             i.suffix = LONG_MNEM_SUFFIX;
  5551.           }
  5552.         else
  5553.           {
  5554.             as_bad (_("incorrect register `%s%s' used with `%c' suffix"),
  5555.                     register_prefix, i.op[op].regs->reg_name,
  5556.                     i.suffix);
  5557.             return 0;
  5558.           }
  5559.       }
  5560.   return 1;
  5561. }
  5562.  
  5563. static int
  5564. check_word_reg (void)
  5565. {
  5566.   int op;
  5567.   for (op = i.operands; --op >= 0;)
  5568.     /* Reject eight bit registers, except where the template requires
  5569.        them. (eg. movzb)  */
  5570.     if (i.types[op].bitfield.reg8
  5571.         && (i.tm.operand_types[op].bitfield.reg16
  5572.             || i.tm.operand_types[op].bitfield.reg32
  5573.             || i.tm.operand_types[op].bitfield.acc))
  5574.       {
  5575.         as_bad (_("`%s%s' not allowed with `%s%c'"),
  5576.                 register_prefix,
  5577.                 i.op[op].regs->reg_name,
  5578.                 i.tm.name,
  5579.                 i.suffix);
  5580.         return 0;
  5581.       }
  5582.     /* Warn if the e or r prefix on a general reg is present.  */
  5583.     else if ((!quiet_warnings || flag_code == CODE_64BIT)
  5584.              && (i.types[op].bitfield.reg32
  5585.                  || i.types[op].bitfield.reg64)
  5586.              && (i.tm.operand_types[op].bitfield.reg16
  5587.                  || i.tm.operand_types[op].bitfield.acc))
  5588.       {
  5589.         /* Prohibit these changes in the 64bit mode, since the
  5590.            lowering is more complicated.  */
  5591.         if (flag_code == CODE_64BIT)
  5592.           {
  5593.             as_bad (_("incorrect register `%s%s' used with `%c' suffix"),
  5594.                     register_prefix, i.op[op].regs->reg_name,
  5595.                     i.suffix);
  5596.             return 0;
  5597.           }
  5598. #if REGISTER_WARNINGS
  5599.         as_warn (_("using `%s%s' instead of `%s%s' due to `%c' suffix"),
  5600.                  register_prefix,
  5601.                  (i.op[op].regs + REGNAM_AX - REGNAM_EAX)->reg_name,
  5602.                  register_prefix, i.op[op].regs->reg_name, i.suffix);
  5603. #endif
  5604.       }
  5605.   return 1;
  5606. }
  5607.  
  5608. static int
  5609. update_imm (unsigned int j)
  5610. {
  5611.   i386_operand_type overlap = i.types[j];
  5612.   if ((overlap.bitfield.imm8
  5613.        || overlap.bitfield.imm8s
  5614.        || overlap.bitfield.imm16
  5615.        || overlap.bitfield.imm32
  5616.        || overlap.bitfield.imm32s
  5617.        || overlap.bitfield.imm64)
  5618.       && !operand_type_equal (&overlap, &imm8)
  5619.       && !operand_type_equal (&overlap, &imm8s)
  5620.       && !operand_type_equal (&overlap, &imm16)
  5621.       && !operand_type_equal (&overlap, &imm32)
  5622.       && !operand_type_equal (&overlap, &imm32s)
  5623.       && !operand_type_equal (&overlap, &imm64))
  5624.     {
  5625.       if (i.suffix)
  5626.         {
  5627.           i386_operand_type temp;
  5628.  
  5629.           operand_type_set (&temp, 0);
  5630.           if (i.suffix == BYTE_MNEM_SUFFIX)
  5631.             {
  5632.               temp.bitfield.imm8 = overlap.bitfield.imm8;
  5633.               temp.bitfield.imm8s = overlap.bitfield.imm8s;
  5634.             }
  5635.           else if (i.suffix == WORD_MNEM_SUFFIX)
  5636.             temp.bitfield.imm16 = overlap.bitfield.imm16;
  5637.           else if (i.suffix == QWORD_MNEM_SUFFIX)
  5638.             {
  5639.               temp.bitfield.imm64 = overlap.bitfield.imm64;
  5640.               temp.bitfield.imm32s = overlap.bitfield.imm32s;
  5641.             }
  5642.           else
  5643.             temp.bitfield.imm32 = overlap.bitfield.imm32;
  5644.           overlap = temp;
  5645.         }
  5646.       else if (operand_type_equal (&overlap, &imm16_32_32s)
  5647.                || operand_type_equal (&overlap, &imm16_32)
  5648.                || operand_type_equal (&overlap, &imm16_32s))
  5649.         {
  5650.           if ((flag_code == CODE_16BIT) ^ (i.prefix[DATA_PREFIX] != 0))
  5651.             overlap = imm16;
  5652.           else
  5653.             overlap = imm32s;
  5654.         }
  5655.       if (!operand_type_equal (&overlap, &imm8)
  5656.           && !operand_type_equal (&overlap, &imm8s)
  5657.           && !operand_type_equal (&overlap, &imm16)
  5658.           && !operand_type_equal (&overlap, &imm32)
  5659.           && !operand_type_equal (&overlap, &imm32s)
  5660.           && !operand_type_equal (&overlap, &imm64))
  5661.         {
  5662.           as_bad (_("no instruction mnemonic suffix given; "
  5663.                     "can't determine immediate size"));
  5664.           return 0;
  5665.         }
  5666.     }
  5667.   i.types[j] = overlap;
  5668.  
  5669.   return 1;
  5670. }
  5671.  
  5672. static int
  5673. finalize_imm (void)
  5674. {
  5675.   unsigned int j, n;
  5676.  
  5677.   /* Update the first 2 immediate operands.  */
  5678.   n = i.operands > 2 ? 2 : i.operands;
  5679.   if (n)
  5680.     {
  5681.       for (j = 0; j < n; j++)
  5682.         if (update_imm (j) == 0)
  5683.           return 0;
  5684.  
  5685.       /* The 3rd operand can't be immediate operand.  */
  5686.       gas_assert (operand_type_check (i.types[2], imm) == 0);
  5687.     }
  5688.  
  5689.   return 1;
  5690. }
  5691.  
  5692. static int
  5693. bad_implicit_operand (int xmm)
  5694. {
  5695.   const char *ireg = xmm ? "xmm0" : "ymm0";
  5696.  
  5697.   if (intel_syntax)
  5698.     as_bad (_("the last operand of `%s' must be `%s%s'"),
  5699.             i.tm.name, register_prefix, ireg);
  5700.   else
  5701.     as_bad (_("the first operand of `%s' must be `%s%s'"),
  5702.             i.tm.name, register_prefix, ireg);
  5703.   return 0;
  5704. }
  5705.  
  5706. static int
  5707. process_operands (void)
  5708. {
  5709.   /* Default segment register this instruction will use for memory
  5710.      accesses.  0 means unknown.  This is only for optimizing out
  5711.      unnecessary segment overrides.  */
  5712.   const seg_entry *default_seg = 0;
  5713.  
  5714.   if (i.tm.opcode_modifier.sse2avx && i.tm.opcode_modifier.vexvvvv)
  5715.     {
  5716.       unsigned int dupl = i.operands;
  5717.       unsigned int dest = dupl - 1;
  5718.       unsigned int j;
  5719.  
  5720.       /* The destination must be an xmm register.  */
  5721.       gas_assert (i.reg_operands
  5722.                   && MAX_OPERANDS > dupl
  5723.                   && operand_type_equal (&i.types[dest], &regxmm));
  5724.  
  5725.       if (i.tm.opcode_modifier.firstxmm0)
  5726.         {
  5727.           /* The first operand is implicit and must be xmm0.  */
  5728.           gas_assert (operand_type_equal (&i.types[0], &regxmm));
  5729.           if (register_number (i.op[0].regs) != 0)
  5730.             return bad_implicit_operand (1);
  5731.  
  5732.           if (i.tm.opcode_modifier.vexsources == VEX3SOURCES)
  5733.             {
  5734.               /* Keep xmm0 for instructions with VEX prefix and 3
  5735.                  sources.  */
  5736.               goto duplicate;
  5737.             }
  5738.           else
  5739.             {
  5740.               /* We remove the first xmm0 and keep the number of
  5741.                  operands unchanged, which in fact duplicates the
  5742.                  destination.  */
  5743.               for (j = 1; j < i.operands; j++)
  5744.                 {
  5745.                   i.op[j - 1] = i.op[j];
  5746.                   i.types[j - 1] = i.types[j];
  5747.                   i.tm.operand_types[j - 1] = i.tm.operand_types[j];
  5748.                 }
  5749.             }
  5750.         }
  5751.       else if (i.tm.opcode_modifier.implicit1stxmm0)
  5752.         {
  5753.           gas_assert ((MAX_OPERANDS - 1) > dupl
  5754.                       && (i.tm.opcode_modifier.vexsources
  5755.                           == VEX3SOURCES));
  5756.  
  5757.           /* Add the implicit xmm0 for instructions with VEX prefix
  5758.              and 3 sources.  */
  5759.           for (j = i.operands; j > 0; j--)
  5760.             {
  5761.               i.op[j] = i.op[j - 1];
  5762.               i.types[j] = i.types[j - 1];
  5763.               i.tm.operand_types[j] = i.tm.operand_types[j - 1];
  5764.             }
  5765.           i.op[0].regs
  5766.             = (const reg_entry *) hash_find (reg_hash, "xmm0");
  5767.           i.types[0] = regxmm;
  5768.           i.tm.operand_types[0] = regxmm;
  5769.  
  5770.           i.operands += 2;
  5771.           i.reg_operands += 2;
  5772.           i.tm.operands += 2;
  5773.  
  5774.           dupl++;
  5775.           dest++;
  5776.           i.op[dupl] = i.op[dest];
  5777.           i.types[dupl] = i.types[dest];
  5778.           i.tm.operand_types[dupl] = i.tm.operand_types[dest];
  5779.         }
  5780.       else
  5781.         {
  5782. duplicate:
  5783.           i.operands++;
  5784.           i.reg_operands++;
  5785.           i.tm.operands++;
  5786.  
  5787.           i.op[dupl] = i.op[dest];
  5788.           i.types[dupl] = i.types[dest];
  5789.           i.tm.operand_types[dupl] = i.tm.operand_types[dest];
  5790.         }
  5791.  
  5792.        if (i.tm.opcode_modifier.immext)
  5793.          process_immext ();
  5794.     }
  5795.   else if (i.tm.opcode_modifier.firstxmm0)
  5796.     {
  5797.       unsigned int j;
  5798.  
  5799.       /* The first operand is implicit and must be xmm0/ymm0/zmm0.  */
  5800.       gas_assert (i.reg_operands
  5801.                   && (operand_type_equal (&i.types[0], &regxmm)
  5802.                       || operand_type_equal (&i.types[0], &regymm)
  5803.                       || operand_type_equal (&i.types[0], &regzmm)));
  5804.       if (register_number (i.op[0].regs) != 0)
  5805.         return bad_implicit_operand (i.types[0].bitfield.regxmm);
  5806.  
  5807.       for (j = 1; j < i.operands; j++)
  5808.         {
  5809.           i.op[j - 1] = i.op[j];
  5810.           i.types[j - 1] = i.types[j];
  5811.  
  5812.           /* We need to adjust fields in i.tm since they are used by
  5813.              build_modrm_byte.  */
  5814.           i.tm.operand_types [j - 1] = i.tm.operand_types [j];
  5815.         }
  5816.  
  5817.       i.operands--;
  5818.       i.reg_operands--;
  5819.       i.tm.operands--;
  5820.     }
  5821.   else if (i.tm.opcode_modifier.regkludge)
  5822.     {
  5823.       /* The imul $imm, %reg instruction is converted into
  5824.          imul $imm, %reg, %reg, and the clr %reg instruction
  5825.          is converted into xor %reg, %reg.  */
  5826.  
  5827.       unsigned int first_reg_op;
  5828.  
  5829.       if (operand_type_check (i.types[0], reg))
  5830.         first_reg_op = 0;
  5831.       else
  5832.         first_reg_op = 1;
  5833.       /* Pretend we saw the extra register operand.  */
  5834.       gas_assert (i.reg_operands == 1
  5835.                   && i.op[first_reg_op + 1].regs == 0);
  5836.       i.op[first_reg_op + 1].regs = i.op[first_reg_op].regs;
  5837.       i.types[first_reg_op + 1] = i.types[first_reg_op];
  5838.       i.operands++;
  5839.       i.reg_operands++;
  5840.     }
  5841.  
  5842.   if (i.tm.opcode_modifier.shortform)
  5843.     {
  5844.       if (i.types[0].bitfield.sreg2
  5845.           || i.types[0].bitfield.sreg3)
  5846.         {
  5847.           if (i.tm.base_opcode == POP_SEG_SHORT
  5848.               && i.op[0].regs->reg_num == 1)
  5849.             {
  5850.               as_bad (_("you can't `pop %scs'"), register_prefix);
  5851.               return 0;
  5852.             }
  5853.           i.tm.base_opcode |= (i.op[0].regs->reg_num << 3);
  5854.           if ((i.op[0].regs->reg_flags & RegRex) != 0)
  5855.             i.rex |= REX_B;
  5856.         }
  5857.       else
  5858.         {
  5859.           /* The register or float register operand is in operand
  5860.              0 or 1.  */
  5861.           unsigned int op;
  5862.  
  5863.           if (i.types[0].bitfield.floatreg
  5864.               || operand_type_check (i.types[0], reg))
  5865.             op = 0;
  5866.           else
  5867.             op = 1;
  5868.           /* Register goes in low 3 bits of opcode.  */
  5869.           i.tm.base_opcode |= i.op[op].regs->reg_num;
  5870.           if ((i.op[op].regs->reg_flags & RegRex) != 0)
  5871.             i.rex |= REX_B;
  5872.           if (!quiet_warnings && i.tm.opcode_modifier.ugh)
  5873.             {
  5874.               /* Warn about some common errors, but press on regardless.
  5875.                  The first case can be generated by gcc (<= 2.8.1).  */
  5876.               if (i.operands == 2)
  5877.                 {
  5878.                   /* Reversed arguments on faddp, fsubp, etc.  */
  5879.                   as_warn (_("translating to `%s %s%s,%s%s'"), i.tm.name,
  5880.                            register_prefix, i.op[!intel_syntax].regs->reg_name,
  5881.                            register_prefix, i.op[intel_syntax].regs->reg_name);
  5882.                 }
  5883.               else
  5884.                 {
  5885.                   /* Extraneous `l' suffix on fp insn.  */
  5886.                   as_warn (_("translating to `%s %s%s'"), i.tm.name,
  5887.                            register_prefix, i.op[0].regs->reg_name);
  5888.                 }
  5889.             }
  5890.         }
  5891.     }
  5892.   else if (i.tm.opcode_modifier.modrm)
  5893.     {
  5894.       /* The opcode is completed (modulo i.tm.extension_opcode which
  5895.          must be put into the modrm byte).  Now, we make the modrm and
  5896.          index base bytes based on all the info we've collected.  */
  5897.  
  5898.       default_seg = build_modrm_byte ();
  5899.     }
  5900.   else if ((i.tm.base_opcode & ~0x3) == MOV_AX_DISP32)
  5901.     {
  5902.       default_seg = &ds;
  5903.     }
  5904.   else if (i.tm.opcode_modifier.isstring)
  5905.     {
  5906.       /* For the string instructions that allow a segment override
  5907.          on one of their operands, the default segment is ds.  */
  5908.       default_seg = &ds;
  5909.     }
  5910.  
  5911.   if (i.tm.base_opcode == 0x8d /* lea */
  5912.       && i.seg[0]
  5913.       && !quiet_warnings)
  5914.     as_warn (_("segment override on `%s' is ineffectual"), i.tm.name);
  5915.  
  5916.   /* If a segment was explicitly specified, and the specified segment
  5917.      is not the default, use an opcode prefix to select it.  If we
  5918.      never figured out what the default segment is, then default_seg
  5919.      will be zero at this point, and the specified segment prefix will
  5920.      always be used.  */
  5921.   if ((i.seg[0]) && (i.seg[0] != default_seg))
  5922.     {
  5923.       if (!add_prefix (i.seg[0]->seg_prefix))
  5924.         return 0;
  5925.     }
  5926.   return 1;
  5927. }
  5928.  
  5929. static const seg_entry *
  5930. build_modrm_byte (void)
  5931. {
  5932.   const seg_entry *default_seg = 0;
  5933.   unsigned int source, dest;
  5934.   int vex_3_sources;
  5935.  
  5936.   /* The first operand of instructions with VEX prefix and 3 sources
  5937.      must be VEX_Imm4.  */
  5938.   vex_3_sources = i.tm.opcode_modifier.vexsources == VEX3SOURCES;
  5939.   if (vex_3_sources)
  5940.     {
  5941.       unsigned int nds, reg_slot;
  5942.       expressionS *exp;
  5943.  
  5944.       if (i.tm.opcode_modifier.veximmext
  5945.           && i.tm.opcode_modifier.immext)
  5946.         {
  5947.           dest = i.operands - 2;
  5948.           gas_assert (dest == 3);
  5949.         }
  5950.       else
  5951.         dest = i.operands - 1;
  5952.       nds = dest - 1;
  5953.  
  5954.       /* There are 2 kinds of instructions:
  5955.          1. 5 operands: 4 register operands or 3 register operands
  5956.          plus 1 memory operand plus one Vec_Imm4 operand, VexXDS, and
  5957.          VexW0 or VexW1.  The destination must be either XMM, YMM or
  5958.          ZMM register.
  5959.          2. 4 operands: 4 register operands or 3 register operands
  5960.          plus 1 memory operand, VexXDS, and VexImmExt  */
  5961.       gas_assert ((i.reg_operands == 4
  5962.                    || (i.reg_operands == 3 && i.mem_operands == 1))
  5963.                   && i.tm.opcode_modifier.vexvvvv == VEXXDS
  5964.                   && (i.tm.opcode_modifier.veximmext
  5965.                       || (i.imm_operands == 1
  5966.                           && i.types[0].bitfield.vec_imm4
  5967.                           && (i.tm.opcode_modifier.vexw == VEXW0
  5968.                               || i.tm.opcode_modifier.vexw == VEXW1)
  5969.                           && (operand_type_equal (&i.tm.operand_types[dest], &regxmm)
  5970.                               || operand_type_equal (&i.tm.operand_types[dest], &regymm)
  5971.                               || operand_type_equal (&i.tm.operand_types[dest], &regzmm)))));
  5972.  
  5973.       if (i.imm_operands == 0)
  5974.         {
  5975.           /* When there is no immediate operand, generate an 8bit
  5976.              immediate operand to encode the first operand.  */
  5977.           exp = &im_expressions[i.imm_operands++];
  5978.           i.op[i.operands].imms = exp;
  5979.           i.types[i.operands] = imm8;
  5980.           i.operands++;
  5981.           /* If VexW1 is set, the first operand is the source and
  5982.              the second operand is encoded in the immediate operand.  */
  5983.           if (i.tm.opcode_modifier.vexw == VEXW1)
  5984.             {
  5985.               source = 0;
  5986.               reg_slot = 1;
  5987.             }
  5988.           else
  5989.             {
  5990.               source = 1;
  5991.               reg_slot = 0;
  5992.             }
  5993.  
  5994.           /* FMA swaps REG and NDS.  */
  5995.           if (i.tm.cpu_flags.bitfield.cpufma)
  5996.             {
  5997.               unsigned int tmp;
  5998.               tmp = reg_slot;
  5999.               reg_slot = nds;
  6000.               nds = tmp;
  6001.             }
  6002.  
  6003.           gas_assert (operand_type_equal (&i.tm.operand_types[reg_slot],
  6004.                                           &regxmm)
  6005.                       || operand_type_equal (&i.tm.operand_types[reg_slot],
  6006.                                              &regymm)
  6007.                       || operand_type_equal (&i.tm.operand_types[reg_slot],
  6008.                                              &regzmm));
  6009.           exp->X_op = O_constant;
  6010.           exp->X_add_number = register_number (i.op[reg_slot].regs) << 4;
  6011.           gas_assert ((i.op[reg_slot].regs->reg_flags & RegVRex) == 0);
  6012.         }
  6013.       else
  6014.         {
  6015.           unsigned int imm_slot;
  6016.  
  6017.           if (i.tm.opcode_modifier.vexw == VEXW0)
  6018.             {
  6019.               /* If VexW0 is set, the third operand is the source and
  6020.                  the second operand is encoded in the immediate
  6021.                  operand.  */
  6022.               source = 2;
  6023.               reg_slot = 1;
  6024.             }
  6025.           else
  6026.             {
  6027.               /* VexW1 is set, the second operand is the source and
  6028.                  the third operand is encoded in the immediate
  6029.                  operand.  */
  6030.               source = 1;
  6031.               reg_slot = 2;
  6032.             }
  6033.  
  6034.           if (i.tm.opcode_modifier.immext)
  6035.             {
  6036.               /* When ImmExt is set, the immdiate byte is the last
  6037.                  operand.  */
  6038.               imm_slot = i.operands - 1;
  6039.               source--;
  6040.               reg_slot--;
  6041.             }
  6042.           else
  6043.             {
  6044.               imm_slot = 0;
  6045.  
  6046.               /* Turn on Imm8 so that output_imm will generate it.  */
  6047.               i.types[imm_slot].bitfield.imm8 = 1;
  6048.             }
  6049.  
  6050.           gas_assert (operand_type_equal (&i.tm.operand_types[reg_slot],
  6051.                                           &regxmm)
  6052.                       || operand_type_equal (&i.tm.operand_types[reg_slot],
  6053.                                              &regymm)
  6054.                       || operand_type_equal (&i.tm.operand_types[reg_slot],
  6055.                                              &regzmm));
  6056.           i.op[imm_slot].imms->X_add_number
  6057.               |= register_number (i.op[reg_slot].regs) << 4;
  6058.           gas_assert ((i.op[reg_slot].regs->reg_flags & RegVRex) == 0);
  6059.         }
  6060.  
  6061.       gas_assert (operand_type_equal (&i.tm.operand_types[nds], &regxmm)
  6062.                   || operand_type_equal (&i.tm.operand_types[nds],
  6063.                                          &regymm)
  6064.                   || operand_type_equal (&i.tm.operand_types[nds],
  6065.                                          &regzmm));
  6066.       i.vex.register_specifier = i.op[nds].regs;
  6067.     }
  6068.   else
  6069.     source = dest = 0;
  6070.  
  6071.   /* i.reg_operands MUST be the number of real register operands;
  6072.      implicit registers do not count.  If there are 3 register
  6073.      operands, it must be a instruction with VexNDS.  For a
  6074.      instruction with VexNDD, the destination register is encoded
  6075.      in VEX prefix.  If there are 4 register operands, it must be
  6076.      a instruction with VEX prefix and 3 sources.  */
  6077.   if (i.mem_operands == 0
  6078.       && ((i.reg_operands == 2
  6079.            && i.tm.opcode_modifier.vexvvvv <= VEXXDS)
  6080.           || (i.reg_operands == 3
  6081.               && i.tm.opcode_modifier.vexvvvv == VEXXDS)
  6082.           || (i.reg_operands == 4 && vex_3_sources)))
  6083.     {
  6084.       switch (i.operands)
  6085.         {
  6086.         case 2:
  6087.           source = 0;
  6088.           break;
  6089.         case 3:
  6090.           /* When there are 3 operands, one of them may be immediate,
  6091.              which may be the first or the last operand.  Otherwise,
  6092.              the first operand must be shift count register (cl) or it
  6093.              is an instruction with VexNDS. */
  6094.           gas_assert (i.imm_operands == 1
  6095.                       || (i.imm_operands == 0
  6096.                           && (i.tm.opcode_modifier.vexvvvv == VEXXDS
  6097.                               || i.types[0].bitfield.shiftcount)));
  6098.           if (operand_type_check (i.types[0], imm)
  6099.               || i.types[0].bitfield.shiftcount)
  6100.             source = 1;
  6101.           else
  6102.             source = 0;
  6103.           break;
  6104.         case 4:
  6105.           /* When there are 4 operands, the first two must be 8bit
  6106.              immediate operands. The source operand will be the 3rd
  6107.              one.
  6108.  
  6109.              For instructions with VexNDS, if the first operand
  6110.              an imm8, the source operand is the 2nd one.  If the last
  6111.              operand is imm8, the source operand is the first one.  */
  6112.           gas_assert ((i.imm_operands == 2
  6113.                        && i.types[0].bitfield.imm8
  6114.                        && i.types[1].bitfield.imm8)
  6115.                       || (i.tm.opcode_modifier.vexvvvv == VEXXDS
  6116.                           && i.imm_operands == 1
  6117.                           && (i.types[0].bitfield.imm8
  6118.                               || i.types[i.operands - 1].bitfield.imm8
  6119.                               || i.rounding)));
  6120.           if (i.imm_operands == 2)
  6121.             source = 2;
  6122.           else
  6123.             {
  6124.               if (i.types[0].bitfield.imm8)
  6125.                 source = 1;
  6126.               else
  6127.                 source = 0;
  6128.             }
  6129.           break;
  6130.         case 5:
  6131.           if (i.tm.opcode_modifier.evex)
  6132.             {
  6133.               /* For EVEX instructions, when there are 5 operands, the
  6134.                  first one must be immediate operand.  If the second one
  6135.                  is immediate operand, the source operand is the 3th
  6136.                  one.  If the last one is immediate operand, the source
  6137.                  operand is the 2nd one.  */
  6138.               gas_assert (i.imm_operands == 2
  6139.                           && i.tm.opcode_modifier.sae
  6140.                           && operand_type_check (i.types[0], imm));
  6141.               if (operand_type_check (i.types[1], imm))
  6142.                 source = 2;
  6143.               else if (operand_type_check (i.types[4], imm))
  6144.                 source = 1;
  6145.               else
  6146.                 abort ();
  6147.             }
  6148.           break;
  6149.         default:
  6150.           abort ();
  6151.         }
  6152.  
  6153.       if (!vex_3_sources)
  6154.         {
  6155.           dest = source + 1;
  6156.  
  6157.           /* RC/SAE operand could be between DEST and SRC.  That happens
  6158.              when one operand is GPR and the other one is XMM/YMM/ZMM
  6159.              register.  */
  6160.           if (i.rounding && i.rounding->operand == (int) dest)
  6161.             dest++;
  6162.  
  6163.           if (i.tm.opcode_modifier.vexvvvv == VEXXDS)
  6164.             {
  6165.               /* For instructions with VexNDS, the register-only source
  6166.                  operand must be 32/64bit integer, XMM, YMM or ZMM
  6167.                  register.  It is encoded in VEX prefix.  We need to
  6168.                  clear RegMem bit before calling operand_type_equal.  */
  6169.  
  6170.               i386_operand_type op;
  6171.               unsigned int vvvv;
  6172.  
  6173.               /* Check register-only source operand when two source
  6174.                  operands are swapped.  */
  6175.               if (!i.tm.operand_types[source].bitfield.baseindex
  6176.                   && i.tm.operand_types[dest].bitfield.baseindex)
  6177.                 {
  6178.                   vvvv = source;
  6179.                   source = dest;
  6180.                 }
  6181.               else
  6182.                 vvvv = dest;
  6183.  
  6184.               op = i.tm.operand_types[vvvv];
  6185.               op.bitfield.regmem = 0;
  6186.               if ((dest + 1) >= i.operands
  6187.                   || (!op.bitfield.reg32
  6188.                       && op.bitfield.reg64
  6189.                       && !operand_type_equal (&op, &regxmm)
  6190.                       && !operand_type_equal (&op, &regymm)
  6191.                       && !operand_type_equal (&op, &regzmm)
  6192.                       && !operand_type_equal (&op, &regmask)))
  6193.                 abort ();
  6194.               i.vex.register_specifier = i.op[vvvv].regs;
  6195.               dest++;
  6196.             }
  6197.         }
  6198.  
  6199.       i.rm.mode = 3;
  6200.       /* One of the register operands will be encoded in the i.tm.reg
  6201.          field, the other in the combined i.tm.mode and i.tm.regmem
  6202.          fields.  If no form of this instruction supports a memory
  6203.          destination operand, then we assume the source operand may
  6204.          sometimes be a memory operand and so we need to store the
  6205.          destination in the i.rm.reg field.  */
  6206.       if (!i.tm.operand_types[dest].bitfield.regmem
  6207.           && operand_type_check (i.tm.operand_types[dest], anymem) == 0)
  6208.         {
  6209.           i.rm.reg = i.op[dest].regs->reg_num;
  6210.           i.rm.regmem = i.op[source].regs->reg_num;
  6211.           if ((i.op[dest].regs->reg_flags & RegRex) != 0)
  6212.             i.rex |= REX_R;
  6213.           if ((i.op[dest].regs->reg_flags & RegVRex) != 0)
  6214.             i.vrex |= REX_R;
  6215.           if ((i.op[source].regs->reg_flags & RegRex) != 0)
  6216.             i.rex |= REX_B;
  6217.           if ((i.op[source].regs->reg_flags & RegVRex) != 0)
  6218.             i.vrex |= REX_B;
  6219.         }
  6220.       else
  6221.         {
  6222.           i.rm.reg = i.op[source].regs->reg_num;
  6223.           i.rm.regmem = i.op[dest].regs->reg_num;
  6224.           if ((i.op[dest].regs->reg_flags & RegRex) != 0)
  6225.             i.rex |= REX_B;
  6226.           if ((i.op[dest].regs->reg_flags & RegVRex) != 0)
  6227.             i.vrex |= REX_B;
  6228.           if ((i.op[source].regs->reg_flags & RegRex) != 0)
  6229.             i.rex |= REX_R;
  6230.           if ((i.op[source].regs->reg_flags & RegVRex) != 0)
  6231.             i.vrex |= REX_R;
  6232.         }
  6233.       if (flag_code != CODE_64BIT && (i.rex & (REX_R | REX_B)))
  6234.         {
  6235.           if (!i.types[0].bitfield.control
  6236.               && !i.types[1].bitfield.control)
  6237.             abort ();
  6238.           i.rex &= ~(REX_R | REX_B);
  6239.           add_prefix (LOCK_PREFIX_OPCODE);
  6240.         }
  6241.     }
  6242.   else
  6243.     {                   /* If it's not 2 reg operands...  */
  6244.       unsigned int mem;
  6245.  
  6246.       if (i.mem_operands)
  6247.         {
  6248.           unsigned int fake_zero_displacement = 0;
  6249.           unsigned int op;
  6250.  
  6251.           for (op = 0; op < i.operands; op++)
  6252.             if (operand_type_check (i.types[op], anymem))
  6253.               break;
  6254.           gas_assert (op < i.operands);
  6255.  
  6256.           if (i.tm.opcode_modifier.vecsib)
  6257.             {
  6258.               if (i.index_reg->reg_num == RegEiz
  6259.                   || i.index_reg->reg_num == RegRiz)
  6260.                 abort ();
  6261.  
  6262.               i.rm.regmem = ESCAPE_TO_TWO_BYTE_ADDRESSING;
  6263.               if (!i.base_reg)
  6264.                 {
  6265.                   i.sib.base = NO_BASE_REGISTER;
  6266.                   i.sib.scale = i.log2_scale_factor;
  6267.                   /* No Vec_Disp8 if there is no base.  */
  6268.                   i.types[op].bitfield.vec_disp8 = 0;
  6269.                   i.types[op].bitfield.disp8 = 0;
  6270.                   i.types[op].bitfield.disp16 = 0;
  6271.                   i.types[op].bitfield.disp64 = 0;
  6272.                   if (flag_code != CODE_64BIT)
  6273.                     {
  6274.                       /* Must be 32 bit */
  6275.                       i.types[op].bitfield.disp32 = 1;
  6276.                       i.types[op].bitfield.disp32s = 0;
  6277.                     }
  6278.                   else
  6279.                     {
  6280.                       i.types[op].bitfield.disp32 = 0;
  6281.                       i.types[op].bitfield.disp32s = 1;
  6282.                     }
  6283.                 }
  6284.               i.sib.index = i.index_reg->reg_num;
  6285.               if ((i.index_reg->reg_flags & RegRex) != 0)
  6286.                 i.rex |= REX_X;
  6287.               if ((i.index_reg->reg_flags & RegVRex) != 0)
  6288.                 i.vrex |= REX_X;
  6289.             }
  6290.  
  6291.           default_seg = &ds;
  6292.  
  6293.           if (i.base_reg == 0)
  6294.             {
  6295.               i.rm.mode = 0;
  6296.               if (!i.disp_operands)
  6297.                 {
  6298.                   fake_zero_displacement = 1;
  6299.                   /* Instructions with VSIB byte need 32bit displacement
  6300.                      if there is no base register.  */
  6301.                   if (i.tm.opcode_modifier.vecsib)
  6302.                     i.types[op].bitfield.disp32 = 1;
  6303.                 }
  6304.               if (i.index_reg == 0)
  6305.                 {
  6306.                   gas_assert (!i.tm.opcode_modifier.vecsib);
  6307.                   /* Operand is just <disp>  */
  6308.                   if (flag_code == CODE_64BIT)
  6309.                     {
  6310.                       /* 64bit mode overwrites the 32bit absolute
  6311.                          addressing by RIP relative addressing and
  6312.                          absolute addressing is encoded by one of the
  6313.                          redundant SIB forms.  */
  6314.                       i.rm.regmem = ESCAPE_TO_TWO_BYTE_ADDRESSING;
  6315.                       i.sib.base = NO_BASE_REGISTER;
  6316.                       i.sib.index = NO_INDEX_REGISTER;
  6317.                       i.types[op] = ((i.prefix[ADDR_PREFIX] == 0)
  6318.                                      ? disp32s : disp32);
  6319.                     }
  6320.                   else if ((flag_code == CODE_16BIT)
  6321.                            ^ (i.prefix[ADDR_PREFIX] != 0))
  6322.                     {
  6323.                       i.rm.regmem = NO_BASE_REGISTER_16;
  6324.                       i.types[op] = disp16;
  6325.                     }
  6326.                   else
  6327.                     {
  6328.                       i.rm.regmem = NO_BASE_REGISTER;
  6329.                       i.types[op] = disp32;
  6330.                     }
  6331.                 }
  6332.               else if (!i.tm.opcode_modifier.vecsib)
  6333.                 {
  6334.                   /* !i.base_reg && i.index_reg  */
  6335.                   if (i.index_reg->reg_num == RegEiz
  6336.                       || i.index_reg->reg_num == RegRiz)
  6337.                     i.sib.index = NO_INDEX_REGISTER;
  6338.                   else
  6339.                     i.sib.index = i.index_reg->reg_num;
  6340.                   i.sib.base = NO_BASE_REGISTER;
  6341.                   i.sib.scale = i.log2_scale_factor;
  6342.                   i.rm.regmem = ESCAPE_TO_TWO_BYTE_ADDRESSING;
  6343.                   /* No Vec_Disp8 if there is no base.  */
  6344.                   i.types[op].bitfield.vec_disp8 = 0;
  6345.                   i.types[op].bitfield.disp8 = 0;
  6346.                   i.types[op].bitfield.disp16 = 0;
  6347.                   i.types[op].bitfield.disp64 = 0;
  6348.                   if (flag_code != CODE_64BIT)
  6349.                     {
  6350.                       /* Must be 32 bit */
  6351.                       i.types[op].bitfield.disp32 = 1;
  6352.                       i.types[op].bitfield.disp32s = 0;
  6353.                     }
  6354.                   else
  6355.                     {
  6356.                       i.types[op].bitfield.disp32 = 0;
  6357.                       i.types[op].bitfield.disp32s = 1;
  6358.                     }
  6359.                   if ((i.index_reg->reg_flags & RegRex) != 0)
  6360.                     i.rex |= REX_X;
  6361.                 }
  6362.             }
  6363.           /* RIP addressing for 64bit mode.  */
  6364.           else if (i.base_reg->reg_num == RegRip ||
  6365.                    i.base_reg->reg_num == RegEip)
  6366.             {
  6367.               gas_assert (!i.tm.opcode_modifier.vecsib);
  6368.               i.rm.regmem = NO_BASE_REGISTER;
  6369.               i.types[op].bitfield.disp8 = 0;
  6370.               i.types[op].bitfield.disp16 = 0;
  6371.               i.types[op].bitfield.disp32 = 0;
  6372.               i.types[op].bitfield.disp32s = 1;
  6373.               i.types[op].bitfield.disp64 = 0;
  6374.               i.types[op].bitfield.vec_disp8 = 0;
  6375.               i.flags[op] |= Operand_PCrel;
  6376.               if (! i.disp_operands)
  6377.                 fake_zero_displacement = 1;
  6378.             }
  6379.           else if (i.base_reg->reg_type.bitfield.reg16)
  6380.             {
  6381.               gas_assert (!i.tm.opcode_modifier.vecsib);
  6382.               switch (i.base_reg->reg_num)
  6383.                 {
  6384.                 case 3: /* (%bx)  */
  6385.                   if (i.index_reg == 0)
  6386.                     i.rm.regmem = 7;
  6387.                   else /* (%bx,%si) -> 0, or (%bx,%di) -> 1  */
  6388.                     i.rm.regmem = i.index_reg->reg_num - 6;
  6389.                   break;
  6390.                 case 5: /* (%bp)  */
  6391.                   default_seg = &ss;
  6392.                   if (i.index_reg == 0)
  6393.                     {
  6394.                       i.rm.regmem = 6;
  6395.                       if (operand_type_check (i.types[op], disp) == 0)
  6396.                         {
  6397.                           /* fake (%bp) into 0(%bp)  */
  6398.                           if (i.tm.operand_types[op].bitfield.vec_disp8)
  6399.                             i.types[op].bitfield.vec_disp8 = 1;
  6400.                           else
  6401.                             i.types[op].bitfield.disp8 = 1;
  6402.                           fake_zero_displacement = 1;
  6403.                         }
  6404.                     }
  6405.                   else /* (%bp,%si) -> 2, or (%bp,%di) -> 3  */
  6406.                     i.rm.regmem = i.index_reg->reg_num - 6 + 2;
  6407.                   break;
  6408.                 default: /* (%si) -> 4 or (%di) -> 5  */
  6409.                   i.rm.regmem = i.base_reg->reg_num - 6 + 4;
  6410.                 }
  6411.               i.rm.mode = mode_from_disp_size (i.types[op]);
  6412.             }
  6413.           else /* i.base_reg and 32/64 bit mode  */
  6414.             {
  6415.               if (flag_code == CODE_64BIT
  6416.                   && operand_type_check (i.types[op], disp))
  6417.                 {
  6418.                   i386_operand_type temp;
  6419.                   operand_type_set (&temp, 0);
  6420.                   temp.bitfield.disp8 = i.types[op].bitfield.disp8;
  6421.                   temp.bitfield.vec_disp8
  6422.                     = i.types[op].bitfield.vec_disp8;
  6423.                   i.types[op] = temp;
  6424.                   if (i.prefix[ADDR_PREFIX] == 0)
  6425.                     i.types[op].bitfield.disp32s = 1;
  6426.                   else
  6427.                     i.types[op].bitfield.disp32 = 1;
  6428.                 }
  6429.  
  6430.               if (!i.tm.opcode_modifier.vecsib)
  6431.                 i.rm.regmem = i.base_reg->reg_num;
  6432.               if ((i.base_reg->reg_flags & RegRex) != 0)
  6433.                 i.rex |= REX_B;
  6434.               i.sib.base = i.base_reg->reg_num;
  6435.               /* x86-64 ignores REX prefix bit here to avoid decoder
  6436.                  complications.  */
  6437.               if (!(i.base_reg->reg_flags & RegRex)
  6438.                   && (i.base_reg->reg_num == EBP_REG_NUM
  6439.                    || i.base_reg->reg_num == ESP_REG_NUM))
  6440.                   default_seg = &ss;
  6441.               if (i.base_reg->reg_num == 5 && i.disp_operands == 0)
  6442.                 {
  6443.                   fake_zero_displacement = 1;
  6444.                   if (i.tm.operand_types [op].bitfield.vec_disp8)
  6445.                     i.types[op].bitfield.vec_disp8 = 1;
  6446.                   else
  6447.                     i.types[op].bitfield.disp8 = 1;
  6448.                 }
  6449.               i.sib.scale = i.log2_scale_factor;
  6450.               if (i.index_reg == 0)
  6451.                 {
  6452.                   gas_assert (!i.tm.opcode_modifier.vecsib);
  6453.                   /* <disp>(%esp) becomes two byte modrm with no index
  6454.                      register.  We've already stored the code for esp
  6455.                      in i.rm.regmem ie. ESCAPE_TO_TWO_BYTE_ADDRESSING.
  6456.                      Any base register besides %esp will not use the
  6457.                      extra modrm byte.  */
  6458.                   i.sib.index = NO_INDEX_REGISTER;
  6459.                 }
  6460.               else if (!i.tm.opcode_modifier.vecsib)
  6461.                 {
  6462.                   if (i.index_reg->reg_num == RegEiz
  6463.                       || i.index_reg->reg_num == RegRiz)
  6464.                     i.sib.index = NO_INDEX_REGISTER;
  6465.                   else
  6466.                     i.sib.index = i.index_reg->reg_num;
  6467.                   i.rm.regmem = ESCAPE_TO_TWO_BYTE_ADDRESSING;
  6468.                   if ((i.index_reg->reg_flags & RegRex) != 0)
  6469.                     i.rex |= REX_X;
  6470.                 }
  6471.  
  6472.               if (i.disp_operands
  6473.                   && (i.reloc[op] == BFD_RELOC_386_TLS_DESC_CALL
  6474.                       || i.reloc[op] == BFD_RELOC_X86_64_TLSDESC_CALL))
  6475.                 i.rm.mode = 0;
  6476.               else
  6477.                 {
  6478.                   if (!fake_zero_displacement
  6479.                       && !i.disp_operands
  6480.                       && i.disp_encoding)
  6481.                     {
  6482.                       fake_zero_displacement = 1;
  6483.                       if (i.disp_encoding == disp_encoding_8bit)
  6484.                         i.types[op].bitfield.disp8 = 1;
  6485.                       else
  6486.                         i.types[op].bitfield.disp32 = 1;
  6487.                     }
  6488.                   i.rm.mode = mode_from_disp_size (i.types[op]);
  6489.                 }
  6490.             }
  6491.  
  6492.           if (fake_zero_displacement)
  6493.             {
  6494.               /* Fakes a zero displacement assuming that i.types[op]
  6495.                  holds the correct displacement size.  */
  6496.               expressionS *exp;
  6497.  
  6498.               gas_assert (i.op[op].disps == 0);
  6499.               exp = &disp_expressions[i.disp_operands++];
  6500.               i.op[op].disps = exp;
  6501.               exp->X_op = O_constant;
  6502.               exp->X_add_number = 0;
  6503.               exp->X_add_symbol = (symbolS *) 0;
  6504.               exp->X_op_symbol = (symbolS *) 0;
  6505.             }
  6506.  
  6507.           mem = op;
  6508.         }
  6509.       else
  6510.         mem = ~0;
  6511.  
  6512.       if (i.tm.opcode_modifier.vexsources == XOP2SOURCES)
  6513.         {
  6514.           if (operand_type_check (i.types[0], imm))
  6515.             i.vex.register_specifier = NULL;
  6516.           else
  6517.             {
  6518.               /* VEX.vvvv encodes one of the sources when the first
  6519.                  operand is not an immediate.  */
  6520.               if (i.tm.opcode_modifier.vexw == VEXW0)
  6521.                 i.vex.register_specifier = i.op[0].regs;
  6522.               else
  6523.                 i.vex.register_specifier = i.op[1].regs;
  6524.             }
  6525.  
  6526.           /* Destination is a XMM register encoded in the ModRM.reg
  6527.              and VEX.R bit.  */
  6528.           i.rm.reg = i.op[2].regs->reg_num;
  6529.           if ((i.op[2].regs->reg_flags & RegRex) != 0)
  6530.             i.rex |= REX_R;
  6531.  
  6532.           /* ModRM.rm and VEX.B encodes the other source.  */
  6533.           if (!i.mem_operands)
  6534.             {
  6535.               i.rm.mode = 3;
  6536.  
  6537.               if (i.tm.opcode_modifier.vexw == VEXW0)
  6538.                 i.rm.regmem = i.op[1].regs->reg_num;
  6539.               else
  6540.                 i.rm.regmem = i.op[0].regs->reg_num;
  6541.  
  6542.               if ((i.op[1].regs->reg_flags & RegRex) != 0)
  6543.                 i.rex |= REX_B;
  6544.             }
  6545.         }
  6546.       else if (i.tm.opcode_modifier.vexvvvv == VEXLWP)
  6547.         {
  6548.           i.vex.register_specifier = i.op[2].regs;
  6549.           if (!i.mem_operands)
  6550.             {
  6551.               i.rm.mode = 3;
  6552.               i.rm.regmem = i.op[1].regs->reg_num;
  6553.               if ((i.op[1].regs->reg_flags & RegRex) != 0)
  6554.                 i.rex |= REX_B;
  6555.             }
  6556.         }
  6557.       /* Fill in i.rm.reg or i.rm.regmem field with register operand
  6558.          (if any) based on i.tm.extension_opcode.  Again, we must be
  6559.          careful to make sure that segment/control/debug/test/MMX
  6560.          registers are coded into the i.rm.reg field.  */
  6561.       else if (i.reg_operands)
  6562.         {
  6563.           unsigned int op;
  6564.           unsigned int vex_reg = ~0;
  6565.  
  6566.           for (op = 0; op < i.operands; op++)
  6567.             if (i.types[op].bitfield.reg8
  6568.                 || i.types[op].bitfield.reg16
  6569.                 || i.types[op].bitfield.reg32
  6570.                 || i.types[op].bitfield.reg64
  6571.                 || i.types[op].bitfield.regmmx
  6572.                 || i.types[op].bitfield.regxmm
  6573.                 || i.types[op].bitfield.regymm
  6574.                 || i.types[op].bitfield.regbnd
  6575.                 || i.types[op].bitfield.regzmm
  6576.                 || i.types[op].bitfield.regmask
  6577.                 || i.types[op].bitfield.sreg2
  6578.                 || i.types[op].bitfield.sreg3
  6579.                 || i.types[op].bitfield.control
  6580.                 || i.types[op].bitfield.debug
  6581.                 || i.types[op].bitfield.test)
  6582.               break;
  6583.  
  6584.           if (vex_3_sources)
  6585.             op = dest;
  6586.           else if (i.tm.opcode_modifier.vexvvvv == VEXXDS)
  6587.             {
  6588.               /* For instructions with VexNDS, the register-only
  6589.                  source operand is encoded in VEX prefix. */
  6590.               gas_assert (mem != (unsigned int) ~0);
  6591.  
  6592.               if (op > mem)
  6593.                 {
  6594.                   vex_reg = op++;
  6595.                   gas_assert (op < i.operands);
  6596.                 }
  6597.               else
  6598.                 {
  6599.                   /* Check register-only source operand when two source
  6600.                      operands are swapped.  */
  6601.                   if (!i.tm.operand_types[op].bitfield.baseindex
  6602.                       && i.tm.operand_types[op + 1].bitfield.baseindex)
  6603.                     {
  6604.                       vex_reg = op;
  6605.                       op += 2;
  6606.                       gas_assert (mem == (vex_reg + 1)
  6607.                                   && op < i.operands);
  6608.                     }
  6609.                   else
  6610.                     {
  6611.                       vex_reg = op + 1;
  6612.                       gas_assert (vex_reg < i.operands);
  6613.                     }
  6614.                 }
  6615.             }
  6616.           else if (i.tm.opcode_modifier.vexvvvv == VEXNDD)
  6617.             {
  6618.               /* For instructions with VexNDD, the register destination
  6619.                  is encoded in VEX prefix.  */
  6620.               if (i.mem_operands == 0)
  6621.                 {
  6622.                   /* There is no memory operand.  */
  6623.                   gas_assert ((op + 2) == i.operands);
  6624.                   vex_reg = op + 1;
  6625.                 }
  6626.               else
  6627.                 {
  6628.                   /* There are only 2 operands.  */
  6629.                   gas_assert (op < 2 && i.operands == 2);
  6630.                   vex_reg = 1;
  6631.                 }
  6632.             }
  6633.           else
  6634.             gas_assert (op < i.operands);
  6635.  
  6636.           if (vex_reg != (unsigned int) ~0)
  6637.             {
  6638.               i386_operand_type *type = &i.tm.operand_types[vex_reg];
  6639.  
  6640.               if (type->bitfield.reg32 != 1
  6641.                   && type->bitfield.reg64 != 1
  6642.                   && !operand_type_equal (type, &regxmm)
  6643.                   && !operand_type_equal (type, &regymm)
  6644.                   && !operand_type_equal (type, &regzmm)
  6645.                   && !operand_type_equal (type, &regmask))
  6646.                 abort ();
  6647.  
  6648.               i.vex.register_specifier = i.op[vex_reg].regs;
  6649.             }
  6650.  
  6651.           /* Don't set OP operand twice.  */
  6652.           if (vex_reg != op)
  6653.             {
  6654.               /* If there is an extension opcode to put here, the
  6655.                  register number must be put into the regmem field.  */
  6656.               if (i.tm.extension_opcode != None)
  6657.                 {
  6658.                   i.rm.regmem = i.op[op].regs->reg_num;
  6659.                   if ((i.op[op].regs->reg_flags & RegRex) != 0)
  6660.                     i.rex |= REX_B;
  6661.                   if ((i.op[op].regs->reg_flags & RegVRex) != 0)
  6662.                     i.vrex |= REX_B;
  6663.                 }
  6664.               else
  6665.                 {
  6666.                   i.rm.reg = i.op[op].regs->reg_num;
  6667.                   if ((i.op[op].regs->reg_flags & RegRex) != 0)
  6668.                     i.rex |= REX_R;
  6669.                   if ((i.op[op].regs->reg_flags & RegVRex) != 0)
  6670.                     i.vrex |= REX_R;
  6671.                 }
  6672.             }
  6673.  
  6674.           /* Now, if no memory operand has set i.rm.mode = 0, 1, 2 we
  6675.              must set it to 3 to indicate this is a register operand
  6676.              in the regmem field.  */
  6677.           if (!i.mem_operands)
  6678.             i.rm.mode = 3;
  6679.         }
  6680.  
  6681.       /* Fill in i.rm.reg field with extension opcode (if any).  */
  6682.       if (i.tm.extension_opcode != None)
  6683.         i.rm.reg = i.tm.extension_opcode;
  6684.     }
  6685.   return default_seg;
  6686. }
  6687.  
  6688. static void
  6689. output_branch (void)
  6690. {
  6691.   char *p;
  6692.   int size;
  6693.   int code16;
  6694.   int prefix;
  6695.   relax_substateT subtype;
  6696.   symbolS *sym;
  6697.   offsetT off;
  6698.  
  6699.   code16 = flag_code == CODE_16BIT ? CODE16 : 0;
  6700.   size = i.disp_encoding == disp_encoding_32bit ? BIG : SMALL;
  6701.  
  6702.   prefix = 0;
  6703.   if (i.prefix[DATA_PREFIX] != 0)
  6704.     {
  6705.       prefix = 1;
  6706.       i.prefixes -= 1;
  6707.       code16 ^= CODE16;
  6708.     }
  6709.   /* Pentium4 branch hints.  */
  6710.   if (i.prefix[SEG_PREFIX] == CS_PREFIX_OPCODE /* not taken */
  6711.       || i.prefix[SEG_PREFIX] == DS_PREFIX_OPCODE /* taken */)
  6712.     {
  6713.       prefix++;
  6714.       i.prefixes--;
  6715.     }
  6716.   if (i.prefix[REX_PREFIX] != 0)
  6717.     {
  6718.       prefix++;
  6719.       i.prefixes--;
  6720.     }
  6721.  
  6722.   /* BND prefixed jump.  */
  6723.   if (i.prefix[BND_PREFIX] != 0)
  6724.     {
  6725.       FRAG_APPEND_1_CHAR (i.prefix[BND_PREFIX]);
  6726.       i.prefixes -= 1;
  6727.     }
  6728.  
  6729.   if (i.prefixes != 0 && !intel_syntax)
  6730.     as_warn (_("skipping prefixes on this instruction"));
  6731.  
  6732.   /* It's always a symbol;  End frag & setup for relax.
  6733.      Make sure there is enough room in this frag for the largest
  6734.      instruction we may generate in md_convert_frag.  This is 2
  6735.      bytes for the opcode and room for the prefix and largest
  6736.      displacement.  */
  6737.   frag_grow (prefix + 2 + 4);
  6738.   /* Prefix and 1 opcode byte go in fr_fix.  */
  6739.   p = frag_more (prefix + 1);
  6740.   if (i.prefix[DATA_PREFIX] != 0)
  6741.     *p++ = DATA_PREFIX_OPCODE;
  6742.   if (i.prefix[SEG_PREFIX] == CS_PREFIX_OPCODE
  6743.       || i.prefix[SEG_PREFIX] == DS_PREFIX_OPCODE)
  6744.     *p++ = i.prefix[SEG_PREFIX];
  6745.   if (i.prefix[REX_PREFIX] != 0)
  6746.     *p++ = i.prefix[REX_PREFIX];
  6747.   *p = i.tm.base_opcode;
  6748.  
  6749.   if ((unsigned char) *p == JUMP_PC_RELATIVE)
  6750.     subtype = ENCODE_RELAX_STATE (UNCOND_JUMP, size);
  6751.   else if (cpu_arch_flags.bitfield.cpui386)
  6752.     subtype = ENCODE_RELAX_STATE (COND_JUMP, size);
  6753.   else
  6754.     subtype = ENCODE_RELAX_STATE (COND_JUMP86, size);
  6755.   subtype |= code16;
  6756.  
  6757.   sym = i.op[0].disps->X_add_symbol;
  6758.   off = i.op[0].disps->X_add_number;
  6759.  
  6760.   if (i.op[0].disps->X_op != O_constant
  6761.       && i.op[0].disps->X_op != O_symbol)
  6762.     {
  6763.       /* Handle complex expressions.  */
  6764.       sym = make_expr_symbol (i.op[0].disps);
  6765.       off = 0;
  6766.     }
  6767.  
  6768.   /* 1 possible extra opcode + 4 byte displacement go in var part.
  6769.      Pass reloc in fr_var.  */
  6770.   frag_var (rs_machine_dependent, 5, i.reloc[0], subtype, sym, off, p);
  6771. }
  6772.  
  6773. static void
  6774. output_jump (void)
  6775. {
  6776.   char *p;
  6777.   int size;
  6778.   fixS *fixP;
  6779.  
  6780.   if (i.tm.opcode_modifier.jumpbyte)
  6781.     {
  6782.       /* This is a loop or jecxz type instruction.  */
  6783.       size = 1;
  6784.       if (i.prefix[ADDR_PREFIX] != 0)
  6785.         {
  6786.           FRAG_APPEND_1_CHAR (ADDR_PREFIX_OPCODE);
  6787.           i.prefixes -= 1;
  6788.         }
  6789.       /* Pentium4 branch hints.  */
  6790.       if (i.prefix[SEG_PREFIX] == CS_PREFIX_OPCODE /* not taken */
  6791.           || i.prefix[SEG_PREFIX] == DS_PREFIX_OPCODE /* taken */)
  6792.         {
  6793.           FRAG_APPEND_1_CHAR (i.prefix[SEG_PREFIX]);
  6794.           i.prefixes--;
  6795.         }
  6796.     }
  6797.   else
  6798.     {
  6799.       int code16;
  6800.  
  6801.       code16 = 0;
  6802.       if (flag_code == CODE_16BIT)
  6803.         code16 = CODE16;
  6804.  
  6805.       if (i.prefix[DATA_PREFIX] != 0)
  6806.         {
  6807.           FRAG_APPEND_1_CHAR (DATA_PREFIX_OPCODE);
  6808.           i.prefixes -= 1;
  6809.           code16 ^= CODE16;
  6810.         }
  6811.  
  6812.       size = 4;
  6813.       if (code16)
  6814.         size = 2;
  6815.     }
  6816.  
  6817.   if (i.prefix[REX_PREFIX] != 0)
  6818.     {
  6819.       FRAG_APPEND_1_CHAR (i.prefix[REX_PREFIX]);
  6820.       i.prefixes -= 1;
  6821.     }
  6822.  
  6823.   /* BND prefixed jump.  */
  6824.   if (i.prefix[BND_PREFIX] != 0)
  6825.     {
  6826.       FRAG_APPEND_1_CHAR (i.prefix[BND_PREFIX]);
  6827.       i.prefixes -= 1;
  6828.     }
  6829.  
  6830.   if (i.prefixes != 0 && !intel_syntax)
  6831.     as_warn (_("skipping prefixes on this instruction"));
  6832.  
  6833.   p = frag_more (i.tm.opcode_length + size);
  6834.   switch (i.tm.opcode_length)
  6835.     {
  6836.     case 2:
  6837.       *p++ = i.tm.base_opcode >> 8;
  6838.     case 1:
  6839.       *p++ = i.tm.base_opcode;
  6840.       break;
  6841.     default:
  6842.       abort ();
  6843.     }
  6844.  
  6845.   fixP = fix_new_exp (frag_now, p - frag_now->fr_literal, size,
  6846.                       i.op[0].disps, 1, reloc (size, 1, 1, i.reloc[0]));
  6847.  
  6848.   /* All jumps handled here are signed, but don't use a signed limit
  6849.      check for 32 and 16 bit jumps as we want to allow wrap around at
  6850.      4G and 64k respectively.  */
  6851.   if (size == 1)
  6852.     fixP->fx_signed = 1;
  6853. }
  6854.  
  6855. static void
  6856. output_interseg_jump (void)
  6857. {
  6858.   char *p;
  6859.   int size;
  6860.   int prefix;
  6861.   int code16;
  6862.  
  6863.   code16 = 0;
  6864.   if (flag_code == CODE_16BIT)
  6865.     code16 = CODE16;
  6866.  
  6867.   prefix = 0;
  6868.   if (i.prefix[DATA_PREFIX] != 0)
  6869.     {
  6870.       prefix = 1;
  6871.       i.prefixes -= 1;
  6872.       code16 ^= CODE16;
  6873.     }
  6874.   if (i.prefix[REX_PREFIX] != 0)
  6875.     {
  6876.       prefix++;
  6877.       i.prefixes -= 1;
  6878.     }
  6879.  
  6880.   size = 4;
  6881.   if (code16)
  6882.     size = 2;
  6883.  
  6884.   if (i.prefixes != 0 && !intel_syntax)
  6885.     as_warn (_("skipping prefixes on this instruction"));
  6886.  
  6887.   /* 1 opcode; 2 segment; offset  */
  6888.   p = frag_more (prefix + 1 + 2 + size);
  6889.  
  6890.   if (i.prefix[DATA_PREFIX] != 0)
  6891.     *p++ = DATA_PREFIX_OPCODE;
  6892.  
  6893.   if (i.prefix[REX_PREFIX] != 0)
  6894.     *p++ = i.prefix[REX_PREFIX];
  6895.  
  6896.   *p++ = i.tm.base_opcode;
  6897.   if (i.op[1].imms->X_op == O_constant)
  6898.     {
  6899.       offsetT n = i.op[1].imms->X_add_number;
  6900.  
  6901.       if (size == 2
  6902.           && !fits_in_unsigned_word (n)
  6903.           && !fits_in_signed_word (n))
  6904.         {
  6905.           as_bad (_("16-bit jump out of range"));
  6906.           return;
  6907.         }
  6908.       md_number_to_chars (p, n, size);
  6909.     }
  6910.   else
  6911.     fix_new_exp (frag_now, p - frag_now->fr_literal, size,
  6912.                  i.op[1].imms, 0, reloc (size, 0, 0, i.reloc[1]));
  6913.   if (i.op[0].imms->X_op != O_constant)
  6914.     as_bad (_("can't handle non absolute segment in `%s'"),
  6915.             i.tm.name);
  6916.   md_number_to_chars (p + size, (valueT) i.op[0].imms->X_add_number, 2);
  6917. }
  6918.  
  6919. static void
  6920. output_insn (void)
  6921. {
  6922.   fragS *insn_start_frag;
  6923.   offsetT insn_start_off;
  6924.  
  6925.   /* Tie dwarf2 debug info to the address at the start of the insn.
  6926.      We can't do this after the insn has been output as the current
  6927.      frag may have been closed off.  eg. by frag_var.  */
  6928.   dwarf2_emit_insn (0);
  6929.  
  6930.   insn_start_frag = frag_now;
  6931.   insn_start_off = frag_now_fix ();
  6932.  
  6933.   /* Output jumps.  */
  6934.   if (i.tm.opcode_modifier.jump)
  6935.     output_branch ();
  6936.   else if (i.tm.opcode_modifier.jumpbyte
  6937.            || i.tm.opcode_modifier.jumpdword)
  6938.     output_jump ();
  6939.   else if (i.tm.opcode_modifier.jumpintersegment)
  6940.     output_interseg_jump ();
  6941.   else
  6942.     {
  6943.       /* Output normal instructions here.  */
  6944.       char *p;
  6945.       unsigned char *q;
  6946.       unsigned int j;
  6947.       unsigned int prefix;
  6948.  
  6949.       /* Some processors fail on LOCK prefix. This options makes
  6950.          assembler ignore LOCK prefix and serves as a workaround.  */
  6951.       if (omit_lock_prefix)
  6952.         {
  6953.           if (i.tm.base_opcode == LOCK_PREFIX_OPCODE)
  6954.             return;
  6955.           i.prefix[LOCK_PREFIX] = 0;
  6956.         }
  6957.  
  6958.       /* Since the VEX/EVEX prefix contains the implicit prefix, we
  6959.          don't need the explicit prefix.  */
  6960.       if (!i.tm.opcode_modifier.vex && !i.tm.opcode_modifier.evex)
  6961.         {
  6962.           switch (i.tm.opcode_length)
  6963.             {
  6964.             case 3:
  6965.               if (i.tm.base_opcode & 0xff000000)
  6966.                 {
  6967.                   prefix = (i.tm.base_opcode >> 24) & 0xff;
  6968.                   goto check_prefix;
  6969.                 }
  6970.               break;
  6971.             case 2:
  6972.               if ((i.tm.base_opcode & 0xff0000) != 0)
  6973.                 {
  6974.                   prefix = (i.tm.base_opcode >> 16) & 0xff;
  6975.                   if (i.tm.cpu_flags.bitfield.cpupadlock)
  6976.                     {
  6977. check_prefix:
  6978.                       if (prefix != REPE_PREFIX_OPCODE
  6979.                           || (i.prefix[REP_PREFIX]
  6980.                               != REPE_PREFIX_OPCODE))
  6981.                         add_prefix (prefix);
  6982.                     }
  6983.                   else
  6984.                     add_prefix (prefix);
  6985.                 }
  6986.               break;
  6987.             case 1:
  6988.               break;
  6989.             default:
  6990.               abort ();
  6991.             }
  6992.  
  6993. #if defined (OBJ_MAYBE_ELF) || defined (OBJ_ELF)
  6994.           /* For x32, add a dummy REX_OPCODE prefix for mov/add with
  6995.              R_X86_64_GOTTPOFF relocation so that linker can safely
  6996.              perform IE->LE optimization.  */
  6997.           if (x86_elf_abi == X86_64_X32_ABI
  6998.               && i.operands == 2
  6999.               && i.reloc[0] == BFD_RELOC_X86_64_GOTTPOFF
  7000.               && i.prefix[REX_PREFIX] == 0)
  7001.             add_prefix (REX_OPCODE);
  7002. #endif
  7003.  
  7004.           /* The prefix bytes.  */
  7005.           for (j = ARRAY_SIZE (i.prefix), q = i.prefix; j > 0; j--, q++)
  7006.             if (*q)
  7007.               FRAG_APPEND_1_CHAR (*q);
  7008.         }
  7009.       else
  7010.         {
  7011.           for (j = 0, q = i.prefix; j < ARRAY_SIZE (i.prefix); j++, q++)
  7012.             if (*q)
  7013.               switch (j)
  7014.                 {
  7015.                 case REX_PREFIX:
  7016.                   /* REX byte is encoded in VEX prefix.  */
  7017.                   break;
  7018.                 case SEG_PREFIX:
  7019.                 case ADDR_PREFIX:
  7020.                   FRAG_APPEND_1_CHAR (*q);
  7021.                   break;
  7022.                 default:
  7023.                   /* There should be no other prefixes for instructions
  7024.                      with VEX prefix.  */
  7025.                   abort ();
  7026.                 }
  7027.  
  7028.           /* For EVEX instructions i.vrex should become 0 after
  7029.              build_evex_prefix.  For VEX instructions upper 16 registers
  7030.              aren't available, so VREX should be 0.  */
  7031.           if (i.vrex)
  7032.             abort ();
  7033.           /* Now the VEX prefix.  */
  7034.           p = frag_more (i.vex.length);
  7035.           for (j = 0; j < i.vex.length; j++)
  7036.             p[j] = i.vex.bytes[j];
  7037.         }
  7038.  
  7039.       /* Now the opcode; be careful about word order here!  */
  7040.       if (i.tm.opcode_length == 1)
  7041.         {
  7042.           FRAG_APPEND_1_CHAR (i.tm.base_opcode);
  7043.         }
  7044.       else
  7045.         {
  7046.           switch (i.tm.opcode_length)
  7047.             {
  7048.             case 4:
  7049.               p = frag_more (4);
  7050.               *p++ = (i.tm.base_opcode >> 24) & 0xff;
  7051.               *p++ = (i.tm.base_opcode >> 16) & 0xff;
  7052.               break;
  7053.             case 3:
  7054.               p = frag_more (3);
  7055.               *p++ = (i.tm.base_opcode >> 16) & 0xff;
  7056.               break;
  7057.             case 2:
  7058.               p = frag_more (2);
  7059.               break;
  7060.             default:
  7061.               abort ();
  7062.               break;
  7063.             }
  7064.  
  7065.           /* Put out high byte first: can't use md_number_to_chars!  */
  7066.           *p++ = (i.tm.base_opcode >> 8) & 0xff;
  7067.           *p = i.tm.base_opcode & 0xff;
  7068.         }
  7069.  
  7070.       /* Now the modrm byte and sib byte (if present).  */
  7071.       if (i.tm.opcode_modifier.modrm)
  7072.         {
  7073.           FRAG_APPEND_1_CHAR ((i.rm.regmem << 0
  7074.                                | i.rm.reg << 3
  7075.                                | i.rm.mode << 6));
  7076.           /* If i.rm.regmem == ESP (4)
  7077.              && i.rm.mode != (Register mode)
  7078.              && not 16 bit
  7079.              ==> need second modrm byte.  */
  7080.           if (i.rm.regmem == ESCAPE_TO_TWO_BYTE_ADDRESSING
  7081.               && i.rm.mode != 3
  7082.               && !(i.base_reg && i.base_reg->reg_type.bitfield.reg16))
  7083.             FRAG_APPEND_1_CHAR ((i.sib.base << 0
  7084.                                  | i.sib.index << 3
  7085.                                  | i.sib.scale << 6));
  7086.         }
  7087.  
  7088.       if (i.disp_operands)
  7089.         output_disp (insn_start_frag, insn_start_off);
  7090.  
  7091.       if (i.imm_operands)
  7092.         output_imm (insn_start_frag, insn_start_off);
  7093.     }
  7094.  
  7095. #ifdef DEBUG386
  7096.   if (flag_debug)
  7097.     {
  7098.       pi ("" /*line*/, &i);
  7099.     }
  7100. #endif /* DEBUG386  */
  7101. }
  7102.  
  7103. /* Return the size of the displacement operand N.  */
  7104.  
  7105. static int
  7106. disp_size (unsigned int n)
  7107. {
  7108.   int size = 4;
  7109.  
  7110.   /* Vec_Disp8 has to be 8bit.  */
  7111.   if (i.types[n].bitfield.vec_disp8)
  7112.     size = 1;
  7113.   else if (i.types[n].bitfield.disp64)
  7114.     size = 8;
  7115.   else if (i.types[n].bitfield.disp8)
  7116.     size = 1;
  7117.   else if (i.types[n].bitfield.disp16)
  7118.     size = 2;
  7119.   return size;
  7120. }
  7121.  
  7122. /* Return the size of the immediate operand N.  */
  7123.  
  7124. static int
  7125. imm_size (unsigned int n)
  7126. {
  7127.   int size = 4;
  7128.   if (i.types[n].bitfield.imm64)
  7129.     size = 8;
  7130.   else if (i.types[n].bitfield.imm8 || i.types[n].bitfield.imm8s)
  7131.     size = 1;
  7132.   else if (i.types[n].bitfield.imm16)
  7133.     size = 2;
  7134.   return size;
  7135. }
  7136.  
  7137. static void
  7138. output_disp (fragS *insn_start_frag, offsetT insn_start_off)
  7139. {
  7140.   char *p;
  7141.   unsigned int n;
  7142.  
  7143.   for (n = 0; n < i.operands; n++)
  7144.     {
  7145.       if (i.types[n].bitfield.vec_disp8
  7146.           || operand_type_check (i.types[n], disp))
  7147.         {
  7148.           if (i.op[n].disps->X_op == O_constant)
  7149.             {
  7150.               int size = disp_size (n);
  7151.               offsetT val = i.op[n].disps->X_add_number;
  7152.  
  7153.               if (i.types[n].bitfield.vec_disp8)
  7154.                 val >>= i.memshift;
  7155.               val = offset_in_range (val, size);
  7156.               p = frag_more (size);
  7157.               md_number_to_chars (p, val, size);
  7158.             }
  7159.           else
  7160.             {
  7161.               enum bfd_reloc_code_real reloc_type;
  7162.               int size = disp_size (n);
  7163.               int sign = i.types[n].bitfield.disp32s;
  7164.               int pcrel = (i.flags[n] & Operand_PCrel) != 0;
  7165.               fixS *fixP;
  7166.  
  7167.               /* We can't have 8 bit displacement here.  */
  7168.               gas_assert (!i.types[n].bitfield.disp8);
  7169.  
  7170.               /* The PC relative address is computed relative
  7171.                  to the instruction boundary, so in case immediate
  7172.                  fields follows, we need to adjust the value.  */
  7173.               if (pcrel && i.imm_operands)
  7174.                 {
  7175.                   unsigned int n1;
  7176.                   int sz = 0;
  7177.  
  7178.                   for (n1 = 0; n1 < i.operands; n1++)
  7179.                     if (operand_type_check (i.types[n1], imm))
  7180.                       {
  7181.                         /* Only one immediate is allowed for PC
  7182.                            relative address.  */
  7183.                         gas_assert (sz == 0);
  7184.                         sz = imm_size (n1);
  7185.                         i.op[n].disps->X_add_number -= sz;
  7186.                       }
  7187.                   /* We should find the immediate.  */
  7188.                   gas_assert (sz != 0);
  7189.                 }
  7190.  
  7191.               p = frag_more (size);
  7192.               reloc_type = reloc (size, pcrel, sign, i.reloc[n]);
  7193.               if (GOT_symbol
  7194.                   && GOT_symbol == i.op[n].disps->X_add_symbol
  7195.                   && (((reloc_type == BFD_RELOC_32
  7196.                         || reloc_type == BFD_RELOC_X86_64_32S
  7197.                         || (reloc_type == BFD_RELOC_64
  7198.                             && object_64bit))
  7199.                        && (i.op[n].disps->X_op == O_symbol
  7200.                            || (i.op[n].disps->X_op == O_add
  7201.                                && ((symbol_get_value_expression
  7202.                                     (i.op[n].disps->X_op_symbol)->X_op)
  7203.                                    == O_subtract))))
  7204.                       || reloc_type == BFD_RELOC_32_PCREL))
  7205.                 {
  7206.                   offsetT add;
  7207.  
  7208.                   if (insn_start_frag == frag_now)
  7209.                     add = (p - frag_now->fr_literal) - insn_start_off;
  7210.                   else
  7211.                     {
  7212.                       fragS *fr;
  7213.  
  7214.                       add = insn_start_frag->fr_fix - insn_start_off;
  7215.                       for (fr = insn_start_frag->fr_next;
  7216.                            fr && fr != frag_now; fr = fr->fr_next)
  7217.                         add += fr->fr_fix;
  7218.                       add += p - frag_now->fr_literal;
  7219.                     }
  7220.  
  7221.                   if (!object_64bit)
  7222.                     {
  7223.                       reloc_type = BFD_RELOC_386_GOTPC;
  7224.                       i.op[n].imms->X_add_number += add;
  7225.                     }
  7226.                   else if (reloc_type == BFD_RELOC_64)
  7227.                     reloc_type = BFD_RELOC_X86_64_GOTPC64;
  7228.                   else
  7229.                     /* Don't do the adjustment for x86-64, as there
  7230.                        the pcrel addressing is relative to the _next_
  7231.                        insn, and that is taken care of in other code.  */
  7232.                     reloc_type = BFD_RELOC_X86_64_GOTPC32;
  7233.                 }
  7234.               fixP = fix_new_exp (frag_now, p - frag_now->fr_literal,
  7235.                                   size, i.op[n].disps, pcrel,
  7236.                                   reloc_type);
  7237.               /* Check for "call/jmp *mem", "mov mem, %reg",
  7238.                  "test %reg, mem" and "binop mem, %reg" where binop
  7239.                  is one of adc, add, and, cmp, or, sbb, sub, xor
  7240.                  instructions.  */
  7241.               if ((i.rm.mode == 2
  7242.                    || (i.rm.mode == 0 && i.rm.regmem == 5))
  7243.                   && ((i.operands == 1
  7244.                        && i.tm.base_opcode == 0xff
  7245.                        && (i.rm.reg == 2 || i.rm.reg == 4))
  7246.                       || (i.operands == 2
  7247.                           && (i.tm.base_opcode == 0x8b
  7248.                               || i.tm.base_opcode == 0x85
  7249.                               || (i.tm.base_opcode & 0xc7) == 0x03))))
  7250.                 {
  7251.                   if (object_64bit)
  7252.                     {
  7253.                       fixP->fx_tcbit = i.rex != 0;
  7254.                       if (i.base_reg
  7255.                           && (i.base_reg->reg_num == RegRip
  7256.                               || i.base_reg->reg_num == RegEip))
  7257.                       fixP->fx_tcbit2 = 1;
  7258.                     }
  7259.                   else
  7260.                     fixP->fx_tcbit2 = 1;
  7261.                 }
  7262.             }
  7263.         }
  7264.     }
  7265. }
  7266.  
  7267. static void
  7268. output_imm (fragS *insn_start_frag, offsetT insn_start_off)
  7269. {
  7270.   char *p;
  7271.   unsigned int n;
  7272.  
  7273.   for (n = 0; n < i.operands; n++)
  7274.     {
  7275.       /* Skip SAE/RC Imm operand in EVEX.  They are already handled.  */
  7276.       if (i.rounding && (int) n == i.rounding->operand)
  7277.         continue;
  7278.  
  7279.       if (operand_type_check (i.types[n], imm))
  7280.         {
  7281.           if (i.op[n].imms->X_op == O_constant)
  7282.             {
  7283.               int size = imm_size (n);
  7284.               offsetT val;
  7285.  
  7286.               val = offset_in_range (i.op[n].imms->X_add_number,
  7287.                                      size);
  7288.               p = frag_more (size);
  7289.               md_number_to_chars (p, val, size);
  7290.             }
  7291.           else
  7292.             {
  7293.               /* Not absolute_section.
  7294.                  Need a 32-bit fixup (don't support 8bit
  7295.                  non-absolute imms).  Try to support other
  7296.                  sizes ...  */
  7297.               enum bfd_reloc_code_real reloc_type;
  7298.               int size = imm_size (n);
  7299.               int sign;
  7300.  
  7301.               if (i.types[n].bitfield.imm32s
  7302.                   && (i.suffix == QWORD_MNEM_SUFFIX
  7303.                       || (!i.suffix && i.tm.opcode_modifier.no_lsuf)))
  7304.                 sign = 1;
  7305.               else
  7306.                 sign = 0;
  7307.  
  7308.               p = frag_more (size);
  7309.               reloc_type = reloc (size, 0, sign, i.reloc[n]);
  7310.  
  7311.               /*   This is tough to explain.  We end up with this one if we
  7312.                * have operands that look like
  7313.                * "_GLOBAL_OFFSET_TABLE_+[.-.L284]".  The goal here is to
  7314.                * obtain the absolute address of the GOT, and it is strongly
  7315.                * preferable from a performance point of view to avoid using
  7316.                * a runtime relocation for this.  The actual sequence of
  7317.                * instructions often look something like:
  7318.                *
  7319.                *        call    .L66
  7320.                * .L66:
  7321.                *        popl    %ebx
  7322.                *        addl    $_GLOBAL_OFFSET_TABLE_+[.-.L66],%ebx
  7323.                *
  7324.                *   The call and pop essentially return the absolute address
  7325.                * of the label .L66 and store it in %ebx.  The linker itself
  7326.                * will ultimately change the first operand of the addl so
  7327.                * that %ebx points to the GOT, but to keep things simple, the
  7328.                * .o file must have this operand set so that it generates not
  7329.                * the absolute address of .L66, but the absolute address of
  7330.                * itself.  This allows the linker itself simply treat a GOTPC
  7331.                * relocation as asking for a pcrel offset to the GOT to be
  7332.                * added in, and the addend of the relocation is stored in the
  7333.                * operand field for the instruction itself.
  7334.                *
  7335.                *   Our job here is to fix the operand so that it would add
  7336.                * the correct offset so that %ebx would point to itself.  The
  7337.                * thing that is tricky is that .-.L66 will point to the
  7338.                * beginning of the instruction, so we need to further modify
  7339.                * the operand so that it will point to itself.  There are
  7340.                * other cases where you have something like:
  7341.                *
  7342.                *        .long   $_GLOBAL_OFFSET_TABLE_+[.-.L66]
  7343.                *
  7344.                * and here no correction would be required.  Internally in
  7345.                * the assembler we treat operands of this form as not being
  7346.                * pcrel since the '.' is explicitly mentioned, and I wonder
  7347.                * whether it would simplify matters to do it this way.  Who
  7348.                * knows.  In earlier versions of the PIC patches, the
  7349.                * pcrel_adjust field was used to store the correction, but
  7350.                * since the expression is not pcrel, I felt it would be
  7351.                * confusing to do it this way.  */
  7352.  
  7353.               if ((reloc_type == BFD_RELOC_32
  7354.                    || reloc_type == BFD_RELOC_X86_64_32S
  7355.                    || reloc_type == BFD_RELOC_64)
  7356.                   && GOT_symbol
  7357.                   && GOT_symbol == i.op[n].imms->X_add_symbol
  7358.                   && (i.op[n].imms->X_op == O_symbol
  7359.                       || (i.op[n].imms->X_op == O_add
  7360.                           && ((symbol_get_value_expression
  7361.                                (i.op[n].imms->X_op_symbol)->X_op)
  7362.                               == O_subtract))))
  7363.                 {
  7364.                   offsetT add;
  7365.  
  7366.                   if (insn_start_frag == frag_now)
  7367.                     add = (p - frag_now->fr_literal) - insn_start_off;
  7368.                   else
  7369.                     {
  7370.                       fragS *fr;
  7371.  
  7372.                       add = insn_start_frag->fr_fix - insn_start_off;
  7373.                       for (fr = insn_start_frag->fr_next;
  7374.                            fr && fr != frag_now; fr = fr->fr_next)
  7375.                         add += fr->fr_fix;
  7376.                       add += p - frag_now->fr_literal;
  7377.                     }
  7378.  
  7379.                   if (!object_64bit)
  7380.                     reloc_type = BFD_RELOC_386_GOTPC;
  7381.                   else if (size == 4)
  7382.                     reloc_type = BFD_RELOC_X86_64_GOTPC32;
  7383.                   else if (size == 8)
  7384.                     reloc_type = BFD_RELOC_X86_64_GOTPC64;
  7385.                   i.op[n].imms->X_add_number += add;
  7386.                 }
  7387.               fix_new_exp (frag_now, p - frag_now->fr_literal, size,
  7388.                            i.op[n].imms, 0, reloc_type);
  7389.             }
  7390.         }
  7391.     }
  7392. }
  7393. /* x86_cons_fix_new is called via the expression parsing code when a
  7394.    reloc is needed.  We use this hook to get the correct .got reloc.  */
  7395. static int cons_sign = -1;
  7396.  
  7397. void
  7398. x86_cons_fix_new (fragS *frag, unsigned int off, unsigned int len,
  7399.                   expressionS *exp, bfd_reloc_code_real_type r)
  7400. {
  7401.   r = reloc (len, 0, cons_sign, r);
  7402.  
  7403. #ifdef TE_PE
  7404.   if (exp->X_op == O_secrel)
  7405.     {
  7406.       exp->X_op = O_symbol;
  7407.       r = BFD_RELOC_32_SECREL;
  7408.     }
  7409. #endif
  7410.  
  7411.   fix_new_exp (frag, off, len, exp, 0, r);
  7412. }
  7413.  
  7414. /* Export the ABI address size for use by TC_ADDRESS_BYTES for the
  7415.    purpose of the `.dc.a' internal pseudo-op.  */
  7416.  
  7417. int
  7418. x86_address_bytes (void)
  7419. {
  7420.   if ((stdoutput->arch_info->mach & bfd_mach_x64_32))
  7421.     return 4;
  7422.   return stdoutput->arch_info->bits_per_address / 8;
  7423. }
  7424.  
  7425. #if !(defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF) || defined (OBJ_MACH_O)) \
  7426.     || defined (LEX_AT)
  7427. # define lex_got(reloc, adjust, types) NULL
  7428. #else
  7429. /* Parse operands of the form
  7430.    <symbol>@GOTOFF+<nnn>
  7431.    and similar .plt or .got references.
  7432.  
  7433.    If we find one, set up the correct relocation in RELOC and copy the
  7434.    input string, minus the `@GOTOFF' into a malloc'd buffer for
  7435.    parsing by the calling routine.  Return this buffer, and if ADJUST
  7436.    is non-null set it to the length of the string we removed from the
  7437.    input line.  Otherwise return NULL.  */
  7438. static char *
  7439. lex_got (enum bfd_reloc_code_real *rel,
  7440.          int *adjust,
  7441.          i386_operand_type *types)
  7442. {
  7443.   /* Some of the relocations depend on the size of what field is to
  7444.      be relocated.  But in our callers i386_immediate and i386_displacement
  7445.      we don't yet know the operand size (this will be set by insn
  7446.      matching).  Hence we record the word32 relocation here,
  7447.      and adjust the reloc according to the real size in reloc().  */
  7448.   static const struct {
  7449.     const char *str;
  7450.     int len;
  7451.     const enum bfd_reloc_code_real rel[2];
  7452.     const i386_operand_type types64;
  7453.   } gotrel[] = {
  7454. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  7455.     { STRING_COMMA_LEN ("SIZE"),      { BFD_RELOC_SIZE32,
  7456.                                         BFD_RELOC_SIZE32 },
  7457.       OPERAND_TYPE_IMM32_64 },
  7458. #endif
  7459.     { STRING_COMMA_LEN ("PLTOFF"),   { _dummy_first_bfd_reloc_code_real,
  7460.                                        BFD_RELOC_X86_64_PLTOFF64 },
  7461.       OPERAND_TYPE_IMM64 },
  7462.     { STRING_COMMA_LEN ("PLT"),      { BFD_RELOC_386_PLT32,
  7463.                                        BFD_RELOC_X86_64_PLT32    },
  7464.       OPERAND_TYPE_IMM32_32S_DISP32 },
  7465.     { STRING_COMMA_LEN ("GOTPLT"),   { _dummy_first_bfd_reloc_code_real,
  7466.                                        BFD_RELOC_X86_64_GOTPLT64 },
  7467.       OPERAND_TYPE_IMM64_DISP64 },
  7468.     { STRING_COMMA_LEN ("GOTOFF"),   { BFD_RELOC_386_GOTOFF,
  7469.                                        BFD_RELOC_X86_64_GOTOFF64 },
  7470.       OPERAND_TYPE_IMM64_DISP64 },
  7471.     { STRING_COMMA_LEN ("GOTPCREL"), { _dummy_first_bfd_reloc_code_real,
  7472.                                        BFD_RELOC_X86_64_GOTPCREL },
  7473.       OPERAND_TYPE_IMM32_32S_DISP32 },
  7474.     { STRING_COMMA_LEN ("TLSGD"),    { BFD_RELOC_386_TLS_GD,
  7475.                                        BFD_RELOC_X86_64_TLSGD    },
  7476.       OPERAND_TYPE_IMM32_32S_DISP32 },
  7477.     { STRING_COMMA_LEN ("TLSLDM"),   { BFD_RELOC_386_TLS_LDM,
  7478.                                        _dummy_first_bfd_reloc_code_real },
  7479.       OPERAND_TYPE_NONE },
  7480.     { STRING_COMMA_LEN ("TLSLD"),    { _dummy_first_bfd_reloc_code_real,
  7481.                                        BFD_RELOC_X86_64_TLSLD    },
  7482.       OPERAND_TYPE_IMM32_32S_DISP32 },
  7483.     { STRING_COMMA_LEN ("GOTTPOFF"), { BFD_RELOC_386_TLS_IE_32,
  7484.                                        BFD_RELOC_X86_64_GOTTPOFF },
  7485.       OPERAND_TYPE_IMM32_32S_DISP32 },
  7486.     { STRING_COMMA_LEN ("TPOFF"),    { BFD_RELOC_386_TLS_LE_32,
  7487.                                        BFD_RELOC_X86_64_TPOFF32  },
  7488.       OPERAND_TYPE_IMM32_32S_64_DISP32_64 },
  7489.     { STRING_COMMA_LEN ("NTPOFF"),   { BFD_RELOC_386_TLS_LE,
  7490.                                        _dummy_first_bfd_reloc_code_real },
  7491.       OPERAND_TYPE_NONE },
  7492.     { STRING_COMMA_LEN ("DTPOFF"),   { BFD_RELOC_386_TLS_LDO_32,
  7493.                                        BFD_RELOC_X86_64_DTPOFF32 },
  7494.       OPERAND_TYPE_IMM32_32S_64_DISP32_64 },
  7495.     { STRING_COMMA_LEN ("GOTNTPOFF"),{ BFD_RELOC_386_TLS_GOTIE,
  7496.                                        _dummy_first_bfd_reloc_code_real },
  7497.       OPERAND_TYPE_NONE },
  7498.     { STRING_COMMA_LEN ("INDNTPOFF"),{ BFD_RELOC_386_TLS_IE,
  7499.                                        _dummy_first_bfd_reloc_code_real },
  7500.       OPERAND_TYPE_NONE },
  7501.     { STRING_COMMA_LEN ("GOT"),      { BFD_RELOC_386_GOT32,
  7502.                                        BFD_RELOC_X86_64_GOT32    },
  7503.       OPERAND_TYPE_IMM32_32S_64_DISP32 },
  7504.     { STRING_COMMA_LEN ("TLSDESC"),  { BFD_RELOC_386_TLS_GOTDESC,
  7505.                                        BFD_RELOC_X86_64_GOTPC32_TLSDESC },
  7506.       OPERAND_TYPE_IMM32_32S_DISP32 },
  7507.     { STRING_COMMA_LEN ("TLSCALL"),  { BFD_RELOC_386_TLS_DESC_CALL,
  7508.                                        BFD_RELOC_X86_64_TLSDESC_CALL },
  7509.       OPERAND_TYPE_IMM32_32S_DISP32 },
  7510.   };
  7511.   char *cp;
  7512.   unsigned int j;
  7513.  
  7514. #if defined (OBJ_MAYBE_ELF)
  7515.   if (!IS_ELF)
  7516.     return NULL;
  7517. #endif
  7518.  
  7519.   for (cp = input_line_pointer; *cp != '@'; cp++)
  7520.     if (is_end_of_line[(unsigned char) *cp] || *cp == ',')
  7521.       return NULL;
  7522.  
  7523.   for (j = 0; j < ARRAY_SIZE (gotrel); j++)
  7524.     {
  7525.       int len = gotrel[j].len;
  7526.       if (strncasecmp (cp + 1, gotrel[j].str, len) == 0)
  7527.         {
  7528.           if (gotrel[j].rel[object_64bit] != 0)
  7529.             {
  7530.               int first, second;
  7531.               char *tmpbuf, *past_reloc;
  7532.  
  7533.               *rel = gotrel[j].rel[object_64bit];
  7534.  
  7535.               if (types)
  7536.                 {
  7537.                   if (flag_code != CODE_64BIT)
  7538.                     {
  7539.                       types->bitfield.imm32 = 1;
  7540.                       types->bitfield.disp32 = 1;
  7541.                     }
  7542.                   else
  7543.                     *types = gotrel[j].types64;
  7544.                 }
  7545.  
  7546.               if (j != 0 && GOT_symbol == NULL)
  7547.                 GOT_symbol = symbol_find_or_make (GLOBAL_OFFSET_TABLE_NAME);
  7548.  
  7549.               /* The length of the first part of our input line.  */
  7550.               first = cp - input_line_pointer;
  7551.  
  7552.               /* The second part goes from after the reloc token until
  7553.                  (and including) an end_of_line char or comma.  */
  7554.               past_reloc = cp + 1 + len;
  7555.               cp = past_reloc;
  7556.               while (!is_end_of_line[(unsigned char) *cp] && *cp != ',')
  7557.                 ++cp;
  7558.               second = cp + 1 - past_reloc;
  7559.  
  7560.               /* Allocate and copy string.  The trailing NUL shouldn't
  7561.                  be necessary, but be safe.  */
  7562.               tmpbuf = (char *) xmalloc (first + second + 2);
  7563.               memcpy (tmpbuf, input_line_pointer, first);
  7564.               if (second != 0 && *past_reloc != ' ')
  7565.                 /* Replace the relocation token with ' ', so that
  7566.                    errors like foo@GOTOFF1 will be detected.  */
  7567.                 tmpbuf[first++] = ' ';
  7568.               else
  7569.                 /* Increment length by 1 if the relocation token is
  7570.                    removed.  */
  7571.                 len++;
  7572.               if (adjust)
  7573.                 *adjust = len;
  7574.               memcpy (tmpbuf + first, past_reloc, second);
  7575.               tmpbuf[first + second] = '\0';
  7576.               return tmpbuf;
  7577.             }
  7578.  
  7579.           as_bad (_("@%s reloc is not supported with %d-bit output format"),
  7580.                   gotrel[j].str, 1 << (5 + object_64bit));
  7581.           return NULL;
  7582.         }
  7583.     }
  7584.  
  7585.   /* Might be a symbol version string.  Don't as_bad here.  */
  7586.   return NULL;
  7587. }
  7588. #endif
  7589.  
  7590. #ifdef TE_PE
  7591. #ifdef lex_got
  7592. #undef lex_got
  7593. #endif
  7594. /* Parse operands of the form
  7595.    <symbol>@SECREL32+<nnn>
  7596.  
  7597.    If we find one, set up the correct relocation in RELOC and copy the
  7598.    input string, minus the `@SECREL32' into a malloc'd buffer for
  7599.    parsing by the calling routine.  Return this buffer, and if ADJUST
  7600.    is non-null set it to the length of the string we removed from the
  7601.    input line.  Otherwise return NULL.
  7602.  
  7603.    This function is copied from the ELF version above adjusted for PE targets.  */
  7604.  
  7605. static char *
  7606. lex_got (enum bfd_reloc_code_real *rel ATTRIBUTE_UNUSED,
  7607.          int *adjust ATTRIBUTE_UNUSED,
  7608.          i386_operand_type *types)
  7609. {
  7610.   static const struct
  7611.   {
  7612.     const char *str;
  7613.     int len;
  7614.     const enum bfd_reloc_code_real rel[2];
  7615.     const i386_operand_type types64;
  7616.   }
  7617.   gotrel[] =
  7618.   {
  7619.     { STRING_COMMA_LEN ("SECREL32"),    { BFD_RELOC_32_SECREL,
  7620.                                           BFD_RELOC_32_SECREL },
  7621.       OPERAND_TYPE_IMM32_32S_64_DISP32_64 },
  7622.   };
  7623.  
  7624.   char *cp;
  7625.   unsigned j;
  7626.  
  7627.   for (cp = input_line_pointer; *cp != '@'; cp++)
  7628.     if (is_end_of_line[(unsigned char) *cp] || *cp == ',')
  7629.       return NULL;
  7630.  
  7631.   for (j = 0; j < ARRAY_SIZE (gotrel); j++)
  7632.     {
  7633.       int len = gotrel[j].len;
  7634.  
  7635.       if (strncasecmp (cp + 1, gotrel[j].str, len) == 0)
  7636.         {
  7637.           if (gotrel[j].rel[object_64bit] != 0)
  7638.             {
  7639.               int first, second;
  7640.               char *tmpbuf, *past_reloc;
  7641.  
  7642.               *rel = gotrel[j].rel[object_64bit];
  7643.               if (adjust)
  7644.                 *adjust = len;
  7645.  
  7646.               if (types)
  7647.                 {
  7648.                   if (flag_code != CODE_64BIT)
  7649.                     {
  7650.                       types->bitfield.imm32 = 1;
  7651.                       types->bitfield.disp32 = 1;
  7652.                     }
  7653.                   else
  7654.                     *types = gotrel[j].types64;
  7655.                 }
  7656.  
  7657.               /* The length of the first part of our input line.  */
  7658.               first = cp - input_line_pointer;
  7659.  
  7660.               /* The second part goes from after the reloc token until
  7661.                  (and including) an end_of_line char or comma.  */
  7662.               past_reloc = cp + 1 + len;
  7663.               cp = past_reloc;
  7664.               while (!is_end_of_line[(unsigned char) *cp] && *cp != ',')
  7665.                 ++cp;
  7666.               second = cp + 1 - past_reloc;
  7667.  
  7668.               /* Allocate and copy string.  The trailing NUL shouldn't
  7669.                  be necessary, but be safe.  */
  7670.               tmpbuf = (char *) xmalloc (first + second + 2);
  7671.               memcpy (tmpbuf, input_line_pointer, first);
  7672.               if (second != 0 && *past_reloc != ' ')
  7673.                 /* Replace the relocation token with ' ', so that
  7674.                    errors like foo@SECLREL321 will be detected.  */
  7675.                 tmpbuf[first++] = ' ';
  7676.               memcpy (tmpbuf + first, past_reloc, second);
  7677.               tmpbuf[first + second] = '\0';
  7678.               return tmpbuf;
  7679.             }
  7680.  
  7681.           as_bad (_("@%s reloc is not supported with %d-bit output format"),
  7682.                   gotrel[j].str, 1 << (5 + object_64bit));
  7683.           return NULL;
  7684.         }
  7685.     }
  7686.  
  7687.   /* Might be a symbol version string.  Don't as_bad here.  */
  7688.   return NULL;
  7689. }
  7690.  
  7691. #endif /* TE_PE */
  7692.  
  7693. bfd_reloc_code_real_type
  7694. x86_cons (expressionS *exp, int size)
  7695. {
  7696.   bfd_reloc_code_real_type got_reloc = NO_RELOC;
  7697.  
  7698.   intel_syntax = -intel_syntax;
  7699.  
  7700.   exp->X_md = 0;
  7701.   if (size == 4 || (object_64bit && size == 8))
  7702.     {
  7703.       /* Handle @GOTOFF and the like in an expression.  */
  7704.       char *save;
  7705.       char *gotfree_input_line;
  7706.       int adjust = 0;
  7707.  
  7708.       save = input_line_pointer;
  7709.       gotfree_input_line = lex_got (&got_reloc, &adjust, NULL);
  7710.       if (gotfree_input_line)
  7711.         input_line_pointer = gotfree_input_line;
  7712.  
  7713.       expression (exp);
  7714.  
  7715.       if (gotfree_input_line)
  7716.         {
  7717.           /* expression () has merrily parsed up to the end of line,
  7718.              or a comma - in the wrong buffer.  Transfer how far
  7719.              input_line_pointer has moved to the right buffer.  */
  7720.           input_line_pointer = (save
  7721.                                 + (input_line_pointer - gotfree_input_line)
  7722.                                 + adjust);
  7723.           free (gotfree_input_line);
  7724.           if (exp->X_op == O_constant
  7725.               || exp->X_op == O_absent
  7726.               || exp->X_op == O_illegal
  7727.               || exp->X_op == O_register
  7728.               || exp->X_op == O_big)
  7729.             {
  7730.               char c = *input_line_pointer;
  7731.               *input_line_pointer = 0;
  7732.               as_bad (_("missing or invalid expression `%s'"), save);
  7733.               *input_line_pointer = c;
  7734.             }
  7735.         }
  7736.     }
  7737.   else
  7738.     expression (exp);
  7739.  
  7740.   intel_syntax = -intel_syntax;
  7741.  
  7742.   if (intel_syntax)
  7743.     i386_intel_simplify (exp);
  7744.  
  7745.   return got_reloc;
  7746. }
  7747.  
  7748. static void
  7749. signed_cons (int size)
  7750. {
  7751.   if (flag_code == CODE_64BIT)
  7752.     cons_sign = 1;
  7753.   cons (size);
  7754.   cons_sign = -1;
  7755. }
  7756.  
  7757. #ifdef TE_PE
  7758. static void
  7759. pe_directive_secrel (int dummy ATTRIBUTE_UNUSED)
  7760. {
  7761.   expressionS exp;
  7762.  
  7763.   do
  7764.     {
  7765.       expression (&exp);
  7766.       if (exp.X_op == O_symbol)
  7767.         exp.X_op = O_secrel;
  7768.  
  7769.       emit_expr (&exp, 4);
  7770.     }
  7771.   while (*input_line_pointer++ == ',');
  7772.  
  7773.   input_line_pointer--;
  7774.   demand_empty_rest_of_line ();
  7775. }
  7776. #endif
  7777.  
  7778. /* Handle Vector operations.  */
  7779.  
  7780. static char *
  7781. check_VecOperations (char *op_string, char *op_end)
  7782. {
  7783.   const reg_entry *mask;
  7784.   const char *saved;
  7785.   char *end_op;
  7786.  
  7787.   while (*op_string
  7788.          && (op_end == NULL || op_string < op_end))
  7789.     {
  7790.       saved = op_string;
  7791.       if (*op_string == '{')
  7792.         {
  7793.           op_string++;
  7794.  
  7795.           /* Check broadcasts.  */
  7796.           if (strncmp (op_string, "1to", 3) == 0)
  7797.             {
  7798.               int bcst_type;
  7799.  
  7800.               if (i.broadcast)
  7801.                 goto duplicated_vec_op;
  7802.  
  7803.               op_string += 3;
  7804.               if (*op_string == '8')
  7805.                 bcst_type = BROADCAST_1TO8;
  7806.               else if (*op_string == '4')
  7807.                 bcst_type = BROADCAST_1TO4;
  7808.               else if (*op_string == '2')
  7809.                 bcst_type = BROADCAST_1TO2;
  7810.               else if (*op_string == '1'
  7811.                        && *(op_string+1) == '6')
  7812.                 {
  7813.                   bcst_type = BROADCAST_1TO16;
  7814.                   op_string++;
  7815.                 }
  7816.               else
  7817.                 {
  7818.                   as_bad (_("Unsupported broadcast: `%s'"), saved);
  7819.                   return NULL;
  7820.                 }
  7821.               op_string++;
  7822.  
  7823.               broadcast_op.type = bcst_type;
  7824.               broadcast_op.operand = this_operand;
  7825.               i.broadcast = &broadcast_op;
  7826.             }
  7827.           /* Check masking operation.  */
  7828.           else if ((mask = parse_register (op_string, &end_op)) != NULL)
  7829.             {
  7830.               /* k0 can't be used for write mask.  */
  7831.               if (mask->reg_num == 0)
  7832.                 {
  7833.                   as_bad (_("`%s' can't be used for write mask"),
  7834.                           op_string);
  7835.                   return NULL;
  7836.                 }
  7837.  
  7838.               if (!i.mask)
  7839.                 {
  7840.                   mask_op.mask = mask;
  7841.                   mask_op.zeroing = 0;
  7842.                   mask_op.operand = this_operand;
  7843.                   i.mask = &mask_op;
  7844.                 }
  7845.               else
  7846.                 {
  7847.                   if (i.mask->mask)
  7848.                     goto duplicated_vec_op;
  7849.  
  7850.                   i.mask->mask = mask;
  7851.  
  7852.                   /* Only "{z}" is allowed here.  No need to check
  7853.                      zeroing mask explicitly.  */
  7854.                   if (i.mask->operand != this_operand)
  7855.                     {
  7856.                       as_bad (_("invalid write mask `%s'"), saved);
  7857.                       return NULL;
  7858.                     }
  7859.                 }
  7860.  
  7861.               op_string = end_op;
  7862.             }
  7863.           /* Check zeroing-flag for masking operation.  */
  7864.           else if (*op_string == 'z')
  7865.             {
  7866.               if (!i.mask)
  7867.                 {
  7868.                   mask_op.mask = NULL;
  7869.                   mask_op.zeroing = 1;
  7870.                   mask_op.operand = this_operand;
  7871.                   i.mask = &mask_op;
  7872.                 }
  7873.               else
  7874.                 {
  7875.                   if (i.mask->zeroing)
  7876.                     {
  7877.                     duplicated_vec_op:
  7878.                       as_bad (_("duplicated `%s'"), saved);
  7879.                       return NULL;
  7880.                     }
  7881.  
  7882.                   i.mask->zeroing = 1;
  7883.  
  7884.                   /* Only "{%k}" is allowed here.  No need to check mask
  7885.                      register explicitly.  */
  7886.                   if (i.mask->operand != this_operand)
  7887.                     {
  7888.                       as_bad (_("invalid zeroing-masking `%s'"),
  7889.                               saved);
  7890.                       return NULL;
  7891.                     }
  7892.                 }
  7893.  
  7894.               op_string++;
  7895.             }
  7896.           else
  7897.             goto unknown_vec_op;
  7898.  
  7899.           if (*op_string != '}')
  7900.             {
  7901.               as_bad (_("missing `}' in `%s'"), saved);
  7902.               return NULL;
  7903.             }
  7904.           op_string++;
  7905.           continue;
  7906.         }
  7907.     unknown_vec_op:
  7908.       /* We don't know this one.  */
  7909.       as_bad (_("unknown vector operation: `%s'"), saved);
  7910.       return NULL;
  7911.     }
  7912.  
  7913.   return op_string;
  7914. }
  7915.  
  7916. static int
  7917. i386_immediate (char *imm_start)
  7918. {
  7919.   char *save_input_line_pointer;
  7920.   char *gotfree_input_line;
  7921.   segT exp_seg = 0;
  7922.   expressionS *exp;
  7923.   i386_operand_type types;
  7924.  
  7925.   operand_type_set (&types, ~0);
  7926.  
  7927.   if (i.imm_operands == MAX_IMMEDIATE_OPERANDS)
  7928.     {
  7929.       as_bad (_("at most %d immediate operands are allowed"),
  7930.               MAX_IMMEDIATE_OPERANDS);
  7931.       return 0;
  7932.     }
  7933.  
  7934.   exp = &im_expressions[i.imm_operands++];
  7935.   i.op[this_operand].imms = exp;
  7936.  
  7937.   if (is_space_char (*imm_start))
  7938.     ++imm_start;
  7939.  
  7940.   save_input_line_pointer = input_line_pointer;
  7941.   input_line_pointer = imm_start;
  7942.  
  7943.   gotfree_input_line = lex_got (&i.reloc[this_operand], NULL, &types);
  7944.   if (gotfree_input_line)
  7945.     input_line_pointer = gotfree_input_line;
  7946.  
  7947.   exp_seg = expression (exp);
  7948.  
  7949.   SKIP_WHITESPACE ();
  7950.  
  7951.   /* Handle vector operations.  */
  7952.   if (*input_line_pointer == '{')
  7953.     {
  7954.       input_line_pointer = check_VecOperations (input_line_pointer,
  7955.                                                 NULL);
  7956.       if (input_line_pointer == NULL)
  7957.         return 0;
  7958.     }
  7959.  
  7960.   if (*input_line_pointer)
  7961.     as_bad (_("junk `%s' after expression"), input_line_pointer);
  7962.  
  7963.   input_line_pointer = save_input_line_pointer;
  7964.   if (gotfree_input_line)
  7965.     {
  7966.       free (gotfree_input_line);
  7967.  
  7968.       if (exp->X_op == O_constant || exp->X_op == O_register)
  7969.         exp->X_op = O_illegal;
  7970.     }
  7971.  
  7972.   return i386_finalize_immediate (exp_seg, exp, types, imm_start);
  7973. }
  7974.  
  7975. static int
  7976. i386_finalize_immediate (segT exp_seg ATTRIBUTE_UNUSED, expressionS *exp,
  7977.                          i386_operand_type types, const char *imm_start)
  7978. {
  7979.   if (exp->X_op == O_absent || exp->X_op == O_illegal || exp->X_op == O_big)
  7980.     {
  7981.       if (imm_start)
  7982.         as_bad (_("missing or invalid immediate expression `%s'"),
  7983.                 imm_start);
  7984.       return 0;
  7985.     }
  7986.   else if (exp->X_op == O_constant)
  7987.     {
  7988.       /* Size it properly later.  */
  7989.       i.types[this_operand].bitfield.imm64 = 1;
  7990.       /* If not 64bit, sign extend val.  */
  7991.       if (flag_code != CODE_64BIT
  7992.           && (exp->X_add_number & ~(((addressT) 2 << 31) - 1)) == 0)
  7993.         exp->X_add_number
  7994.           = (exp->X_add_number ^ ((addressT) 1 << 31)) - ((addressT) 1 << 31);
  7995.     }
  7996. #if (defined (OBJ_AOUT) || defined (OBJ_MAYBE_AOUT))
  7997.   else if (OUTPUT_FLAVOR == bfd_target_aout_flavour
  7998.            && exp_seg != absolute_section
  7999.            && exp_seg != text_section
  8000.            && exp_seg != data_section
  8001.            && exp_seg != bss_section
  8002.            && exp_seg != undefined_section
  8003.            && !bfd_is_com_section (exp_seg))
  8004.     {
  8005.       as_bad (_("unimplemented segment %s in operand"), exp_seg->name);
  8006.       return 0;
  8007.     }
  8008. #endif
  8009.   else if (!intel_syntax && exp_seg == reg_section)
  8010.     {
  8011.       if (imm_start)
  8012.         as_bad (_("illegal immediate register operand %s"), imm_start);
  8013.       return 0;
  8014.     }
  8015.   else
  8016.     {
  8017.       /* This is an address.  The size of the address will be
  8018.          determined later, depending on destination register,
  8019.          suffix, or the default for the section.  */
  8020.       i.types[this_operand].bitfield.imm8 = 1;
  8021.       i.types[this_operand].bitfield.imm16 = 1;
  8022.       i.types[this_operand].bitfield.imm32 = 1;
  8023.       i.types[this_operand].bitfield.imm32s = 1;
  8024.       i.types[this_operand].bitfield.imm64 = 1;
  8025.       i.types[this_operand] = operand_type_and (i.types[this_operand],
  8026.                                                 types);
  8027.     }
  8028.  
  8029.   return 1;
  8030. }
  8031.  
  8032. static char *
  8033. i386_scale (char *scale)
  8034. {
  8035.   offsetT val;
  8036.   char *save = input_line_pointer;
  8037.  
  8038.   input_line_pointer = scale;
  8039.   val = get_absolute_expression ();
  8040.  
  8041.   switch (val)
  8042.     {
  8043.     case 1:
  8044.       i.log2_scale_factor = 0;
  8045.       break;
  8046.     case 2:
  8047.       i.log2_scale_factor = 1;
  8048.       break;
  8049.     case 4:
  8050.       i.log2_scale_factor = 2;
  8051.       break;
  8052.     case 8:
  8053.       i.log2_scale_factor = 3;
  8054.       break;
  8055.     default:
  8056.       {
  8057.         char sep = *input_line_pointer;
  8058.  
  8059.         *input_line_pointer = '\0';
  8060.         as_bad (_("expecting scale factor of 1, 2, 4, or 8: got `%s'"),
  8061.                 scale);
  8062.         *input_line_pointer = sep;
  8063.         input_line_pointer = save;
  8064.         return NULL;
  8065.       }
  8066.     }
  8067.   if (i.log2_scale_factor != 0 && i.index_reg == 0)
  8068.     {
  8069.       as_warn (_("scale factor of %d without an index register"),
  8070.                1 << i.log2_scale_factor);
  8071.       i.log2_scale_factor = 0;
  8072.     }
  8073.   scale = input_line_pointer;
  8074.   input_line_pointer = save;
  8075.   return scale;
  8076. }
  8077.  
  8078. static int
  8079. i386_displacement (char *disp_start, char *disp_end)
  8080. {
  8081.   expressionS *exp;
  8082.   segT exp_seg = 0;
  8083.   char *save_input_line_pointer;
  8084.   char *gotfree_input_line;
  8085.   int override;
  8086.   i386_operand_type bigdisp, types = anydisp;
  8087.   int ret;
  8088.  
  8089.   if (i.disp_operands == MAX_MEMORY_OPERANDS)
  8090.     {
  8091.       as_bad (_("at most %d displacement operands are allowed"),
  8092.               MAX_MEMORY_OPERANDS);
  8093.       return 0;
  8094.     }
  8095.  
  8096.   operand_type_set (&bigdisp, 0);
  8097.   if ((i.types[this_operand].bitfield.jumpabsolute)
  8098.       || (!current_templates->start->opcode_modifier.jump
  8099.           && !current_templates->start->opcode_modifier.jumpdword))
  8100.     {
  8101.       bigdisp.bitfield.disp32 = 1;
  8102.       override = (i.prefix[ADDR_PREFIX] != 0);
  8103.       if (flag_code == CODE_64BIT)
  8104.         {
  8105.           if (!override)
  8106.             {
  8107.               bigdisp.bitfield.disp32s = 1;
  8108.               bigdisp.bitfield.disp64 = 1;
  8109.             }
  8110.         }
  8111.       else if ((flag_code == CODE_16BIT) ^ override)
  8112.         {
  8113.           bigdisp.bitfield.disp32 = 0;
  8114.           bigdisp.bitfield.disp16 = 1;
  8115.         }
  8116.     }
  8117.   else
  8118.     {
  8119.       /* For PC-relative branches, the width of the displacement
  8120.          is dependent upon data size, not address size.  */
  8121.       override = (i.prefix[DATA_PREFIX] != 0);
  8122.       if (flag_code == CODE_64BIT)
  8123.         {
  8124.           if (override || i.suffix == WORD_MNEM_SUFFIX)
  8125.             bigdisp.bitfield.disp16 = 1;
  8126.           else
  8127.             {
  8128.               bigdisp.bitfield.disp32 = 1;
  8129.               bigdisp.bitfield.disp32s = 1;
  8130.             }
  8131.         }
  8132.       else
  8133.         {
  8134.           if (!override)
  8135.             override = (i.suffix == (flag_code != CODE_16BIT
  8136.                                      ? WORD_MNEM_SUFFIX
  8137.                                      : LONG_MNEM_SUFFIX));
  8138.           bigdisp.bitfield.disp32 = 1;
  8139.           if ((flag_code == CODE_16BIT) ^ override)
  8140.             {
  8141.               bigdisp.bitfield.disp32 = 0;
  8142.               bigdisp.bitfield.disp16 = 1;
  8143.             }
  8144.         }
  8145.     }
  8146.   i.types[this_operand] = operand_type_or (i.types[this_operand],
  8147.                                            bigdisp);
  8148.  
  8149.   exp = &disp_expressions[i.disp_operands];
  8150.   i.op[this_operand].disps = exp;
  8151.   i.disp_operands++;
  8152.   save_input_line_pointer = input_line_pointer;
  8153.   input_line_pointer = disp_start;
  8154.   END_STRING_AND_SAVE (disp_end);
  8155.  
  8156. #ifndef GCC_ASM_O_HACK
  8157. #define GCC_ASM_O_HACK 0
  8158. #endif
  8159. #if GCC_ASM_O_HACK
  8160.   END_STRING_AND_SAVE (disp_end + 1);
  8161.   if (i.types[this_operand].bitfield.baseIndex
  8162.       && displacement_string_end[-1] == '+')
  8163.     {
  8164.       /* This hack is to avoid a warning when using the "o"
  8165.          constraint within gcc asm statements.
  8166.          For instance:
  8167.  
  8168.          #define _set_tssldt_desc(n,addr,limit,type) \
  8169.          __asm__ __volatile__ ( \
  8170.          "movw %w2,%0\n\t" \
  8171.          "movw %w1,2+%0\n\t" \
  8172.          "rorl $16,%1\n\t" \
  8173.          "movb %b1,4+%0\n\t" \
  8174.          "movb %4,5+%0\n\t" \
  8175.          "movb $0,6+%0\n\t" \
  8176.          "movb %h1,7+%0\n\t" \
  8177.          "rorl $16,%1" \
  8178.          : "=o"(*(n)) : "q" (addr), "ri"(limit), "i"(type))
  8179.  
  8180.          This works great except that the output assembler ends
  8181.          up looking a bit weird if it turns out that there is
  8182.          no offset.  You end up producing code that looks like:
  8183.  
  8184.          #APP
  8185.          movw $235,(%eax)
  8186.          movw %dx,2+(%eax)
  8187.          rorl $16,%edx
  8188.          movb %dl,4+(%eax)
  8189.          movb $137,5+(%eax)
  8190.          movb $0,6+(%eax)
  8191.          movb %dh,7+(%eax)
  8192.          rorl $16,%edx
  8193.          #NO_APP
  8194.  
  8195.          So here we provide the missing zero.  */
  8196.  
  8197.       *displacement_string_end = '0';
  8198.     }
  8199. #endif
  8200.   gotfree_input_line = lex_got (&i.reloc[this_operand], NULL, &types);
  8201.   if (gotfree_input_line)
  8202.     input_line_pointer = gotfree_input_line;
  8203.  
  8204.   exp_seg = expression (exp);
  8205.  
  8206.   SKIP_WHITESPACE ();
  8207.   if (*input_line_pointer)
  8208.     as_bad (_("junk `%s' after expression"), input_line_pointer);
  8209. #if GCC_ASM_O_HACK
  8210.   RESTORE_END_STRING (disp_end + 1);
  8211. #endif
  8212.   input_line_pointer = save_input_line_pointer;
  8213.   if (gotfree_input_line)
  8214.     {
  8215.       free (gotfree_input_line);
  8216.  
  8217.       if (exp->X_op == O_constant || exp->X_op == O_register)
  8218.         exp->X_op = O_illegal;
  8219.     }
  8220.  
  8221.   ret = i386_finalize_displacement (exp_seg, exp, types, disp_start);
  8222.  
  8223.   RESTORE_END_STRING (disp_end);
  8224.  
  8225.   return ret;
  8226. }
  8227.  
  8228. static int
  8229. i386_finalize_displacement (segT exp_seg ATTRIBUTE_UNUSED, expressionS *exp,
  8230.                             i386_operand_type types, const char *disp_start)
  8231. {
  8232.   i386_operand_type bigdisp;
  8233.   int ret = 1;
  8234.  
  8235.   /* We do this to make sure that the section symbol is in
  8236.      the symbol table.  We will ultimately change the relocation
  8237.      to be relative to the beginning of the section.  */
  8238.   if (i.reloc[this_operand] == BFD_RELOC_386_GOTOFF
  8239.       || i.reloc[this_operand] == BFD_RELOC_X86_64_GOTPCREL
  8240.       || i.reloc[this_operand] == BFD_RELOC_X86_64_GOTOFF64)
  8241.     {
  8242.       if (exp->X_op != O_symbol)
  8243.         goto inv_disp;
  8244.  
  8245.       if (S_IS_LOCAL (exp->X_add_symbol)
  8246.           && S_GET_SEGMENT (exp->X_add_symbol) != undefined_section
  8247.           && S_GET_SEGMENT (exp->X_add_symbol) != expr_section)
  8248.         section_symbol (S_GET_SEGMENT (exp->X_add_symbol));
  8249.       exp->X_op = O_subtract;
  8250.       exp->X_op_symbol = GOT_symbol;
  8251.       if (i.reloc[this_operand] == BFD_RELOC_X86_64_GOTPCREL)
  8252.         i.reloc[this_operand] = BFD_RELOC_32_PCREL;
  8253.       else if (i.reloc[this_operand] == BFD_RELOC_X86_64_GOTOFF64)
  8254.         i.reloc[this_operand] = BFD_RELOC_64;
  8255.       else
  8256.         i.reloc[this_operand] = BFD_RELOC_32;
  8257.     }
  8258.  
  8259.   else if (exp->X_op == O_absent
  8260.            || exp->X_op == O_illegal
  8261.            || exp->X_op == O_big)
  8262.     {
  8263.     inv_disp:
  8264.       as_bad (_("missing or invalid displacement expression `%s'"),
  8265.               disp_start);
  8266.       ret = 0;
  8267.     }
  8268.  
  8269.   else if (flag_code == CODE_64BIT
  8270.            && !i.prefix[ADDR_PREFIX]
  8271.            && exp->X_op == O_constant)
  8272.     {
  8273.       /* Since displacement is signed extended to 64bit, don't allow
  8274.          disp32 and turn off disp32s if they are out of range.  */
  8275.       i.types[this_operand].bitfield.disp32 = 0;
  8276.       if (!fits_in_signed_long (exp->X_add_number))
  8277.         {
  8278.           i.types[this_operand].bitfield.disp32s = 0;
  8279.           if (i.types[this_operand].bitfield.baseindex)
  8280.             {
  8281.               as_bad (_("0x%lx out range of signed 32bit displacement"),
  8282.                       (long) exp->X_add_number);
  8283.               ret = 0;
  8284.             }
  8285.         }
  8286.     }
  8287.  
  8288. #if (defined (OBJ_AOUT) || defined (OBJ_MAYBE_AOUT))
  8289.   else if (exp->X_op != O_constant
  8290.            && OUTPUT_FLAVOR == bfd_target_aout_flavour
  8291.            && exp_seg != absolute_section
  8292.            && exp_seg != text_section
  8293.            && exp_seg != data_section
  8294.            && exp_seg != bss_section
  8295.            && exp_seg != undefined_section
  8296.            && !bfd_is_com_section (exp_seg))
  8297.     {
  8298.       as_bad (_("unimplemented segment %s in operand"), exp_seg->name);
  8299.       ret = 0;
  8300.     }
  8301. #endif
  8302.  
  8303.   /* Check if this is a displacement only operand.  */
  8304.   bigdisp = i.types[this_operand];
  8305.   bigdisp.bitfield.disp8 = 0;
  8306.   bigdisp.bitfield.disp16 = 0;
  8307.   bigdisp.bitfield.disp32 = 0;
  8308.   bigdisp.bitfield.disp32s = 0;
  8309.   bigdisp.bitfield.disp64 = 0;
  8310.   if (operand_type_all_zero (&bigdisp))
  8311.     i.types[this_operand] = operand_type_and (i.types[this_operand],
  8312.                                               types);
  8313.  
  8314.   return ret;
  8315. }
  8316.  
  8317. /* Make sure the memory operand we've been dealt is valid.
  8318.    Return 1 on success, 0 on a failure.  */
  8319.  
  8320. static int
  8321. i386_index_check (const char *operand_string)
  8322. {
  8323.   const char *kind = "base/index";
  8324.   enum flag_code addr_mode;
  8325.  
  8326.   if (i.prefix[ADDR_PREFIX])
  8327.     addr_mode = flag_code == CODE_32BIT ? CODE_16BIT : CODE_32BIT;
  8328.   else
  8329.     {
  8330.       addr_mode = flag_code;
  8331.  
  8332. #if INFER_ADDR_PREFIX
  8333.       if (i.mem_operands == 0)
  8334.         {
  8335.           /* Infer address prefix from the first memory operand.  */
  8336.           const reg_entry *addr_reg = i.base_reg;
  8337.  
  8338.           if (addr_reg == NULL)
  8339.             addr_reg = i.index_reg;
  8340.  
  8341.           if (addr_reg)
  8342.             {
  8343.               if (addr_reg->reg_num == RegEip
  8344.                   || addr_reg->reg_num == RegEiz
  8345.                   || addr_reg->reg_type.bitfield.reg32)
  8346.                 addr_mode = CODE_32BIT;
  8347.               else if (flag_code != CODE_64BIT
  8348.                        && addr_reg->reg_type.bitfield.reg16)
  8349.                 addr_mode = CODE_16BIT;
  8350.  
  8351.               if (addr_mode != flag_code)
  8352.                 {
  8353.                   i.prefix[ADDR_PREFIX] = ADDR_PREFIX_OPCODE;
  8354.                   i.prefixes += 1;
  8355.                   /* Change the size of any displacement too.  At most one
  8356.                      of Disp16 or Disp32 is set.
  8357.                      FIXME.  There doesn't seem to be any real need for
  8358.                      separate Disp16 and Disp32 flags.  The same goes for
  8359.                      Imm16 and Imm32.  Removing them would probably clean
  8360.                      up the code quite a lot.  */
  8361.                   if (flag_code != CODE_64BIT
  8362.                       && (i.types[this_operand].bitfield.disp16
  8363.                           || i.types[this_operand].bitfield.disp32))
  8364.                     i.types[this_operand]
  8365.                       = operand_type_xor (i.types[this_operand], disp16_32);
  8366.                 }
  8367.             }
  8368.         }
  8369. #endif
  8370.     }
  8371.  
  8372.   if (current_templates->start->opcode_modifier.isstring
  8373.       && !current_templates->start->opcode_modifier.immext
  8374.       && (current_templates->end[-1].opcode_modifier.isstring
  8375.           || i.mem_operands))
  8376.     {
  8377.       /* Memory operands of string insns are special in that they only allow
  8378.          a single register (rDI, rSI, or rBX) as their memory address.  */
  8379.       const reg_entry *expected_reg;
  8380.       static const char *di_si[][2] =
  8381.         {
  8382.           { "esi", "edi" },
  8383.           { "si", "di" },
  8384.           { "rsi", "rdi" }
  8385.         };
  8386.       static const char *bx[] = { "ebx", "bx", "rbx" };
  8387.  
  8388.       kind = "string address";
  8389.  
  8390.       if (current_templates->start->opcode_modifier.w)
  8391.         {
  8392.           i386_operand_type type = current_templates->end[-1].operand_types[0];
  8393.  
  8394.           if (!type.bitfield.baseindex
  8395.               || ((!i.mem_operands != !intel_syntax)
  8396.                   && current_templates->end[-1].operand_types[1]
  8397.                      .bitfield.baseindex))
  8398.             type = current_templates->end[-1].operand_types[1];
  8399.           expected_reg = hash_find (reg_hash,
  8400.                                     di_si[addr_mode][type.bitfield.esseg]);
  8401.  
  8402.         }
  8403.       else
  8404.         expected_reg = hash_find (reg_hash, bx[addr_mode]);
  8405.  
  8406.       if (i.base_reg != expected_reg
  8407.           || i.index_reg
  8408.           || operand_type_check (i.types[this_operand], disp))
  8409.         {
  8410.           /* The second memory operand must have the same size as
  8411.              the first one.  */
  8412.           if (i.mem_operands
  8413.               && i.base_reg
  8414.               && !((addr_mode == CODE_64BIT
  8415.                     && i.base_reg->reg_type.bitfield.reg64)
  8416.                    || (addr_mode == CODE_32BIT
  8417.                        ? i.base_reg->reg_type.bitfield.reg32
  8418.                        : i.base_reg->reg_type.bitfield.reg16)))
  8419.             goto bad_address;
  8420.  
  8421.           as_warn (_("`%s' is not valid here (expected `%c%s%s%c')"),
  8422.                    operand_string,
  8423.                    intel_syntax ? '[' : '(',
  8424.                    register_prefix,
  8425.                    expected_reg->reg_name,
  8426.                    intel_syntax ? ']' : ')');
  8427.           return 1;
  8428.         }
  8429.       else
  8430.         return 1;
  8431.  
  8432. bad_address:
  8433.       as_bad (_("`%s' is not a valid %s expression"),
  8434.               operand_string, kind);
  8435.       return 0;
  8436.     }
  8437.   else
  8438.     {
  8439.       if (addr_mode != CODE_16BIT)
  8440.         {
  8441.           /* 32-bit/64-bit checks.  */
  8442.           if ((i.base_reg
  8443.                && (addr_mode == CODE_64BIT
  8444.                    ? !i.base_reg->reg_type.bitfield.reg64
  8445.                    : !i.base_reg->reg_type.bitfield.reg32)
  8446.                && (i.index_reg
  8447.                    || (i.base_reg->reg_num
  8448.                        != (addr_mode == CODE_64BIT ? RegRip : RegEip))))
  8449.               || (i.index_reg
  8450.                   && !i.index_reg->reg_type.bitfield.regxmm
  8451.                   && !i.index_reg->reg_type.bitfield.regymm
  8452.                   && !i.index_reg->reg_type.bitfield.regzmm
  8453.                   && ((addr_mode == CODE_64BIT
  8454.                        ? !(i.index_reg->reg_type.bitfield.reg64
  8455.                            || i.index_reg->reg_num == RegRiz)
  8456.                        : !(i.index_reg->reg_type.bitfield.reg32
  8457.                            || i.index_reg->reg_num == RegEiz))
  8458.                       || !i.index_reg->reg_type.bitfield.baseindex)))
  8459.             goto bad_address;
  8460.         }
  8461.       else
  8462.         {
  8463.           /* 16-bit checks.  */
  8464.           if ((i.base_reg
  8465.                && (!i.base_reg->reg_type.bitfield.reg16
  8466.                    || !i.base_reg->reg_type.bitfield.baseindex))
  8467.               || (i.index_reg
  8468.                   && (!i.index_reg->reg_type.bitfield.reg16
  8469.                       || !i.index_reg->reg_type.bitfield.baseindex
  8470.                       || !(i.base_reg
  8471.                            && i.base_reg->reg_num < 6
  8472.                            && i.index_reg->reg_num >= 6
  8473.                            && i.log2_scale_factor == 0))))
  8474.             goto bad_address;
  8475.         }
  8476.     }
  8477.   return 1;
  8478. }
  8479.  
  8480. /* Handle vector immediates.  */
  8481.  
  8482. static int
  8483. RC_SAE_immediate (const char *imm_start)
  8484. {
  8485.   unsigned int match_found, j;
  8486.   const char *pstr = imm_start;
  8487.   expressionS *exp;
  8488.  
  8489.   if (*pstr != '{')
  8490.     return 0;
  8491.  
  8492.   pstr++;
  8493.   match_found = 0;
  8494.   for (j = 0; j < ARRAY_SIZE (RC_NamesTable); j++)
  8495.     {
  8496.       if (!strncmp (pstr, RC_NamesTable[j].name, RC_NamesTable[j].len))
  8497.         {
  8498.           if (!i.rounding)
  8499.             {
  8500.               rc_op.type = RC_NamesTable[j].type;
  8501.               rc_op.operand = this_operand;
  8502.               i.rounding = &rc_op;
  8503.             }
  8504.           else
  8505.             {
  8506.               as_bad (_("duplicated `%s'"), imm_start);
  8507.               return 0;
  8508.             }
  8509.           pstr += RC_NamesTable[j].len;
  8510.           match_found = 1;
  8511.           break;
  8512.         }
  8513.     }
  8514.   if (!match_found)
  8515.     return 0;
  8516.  
  8517.   if (*pstr++ != '}')
  8518.     {
  8519.       as_bad (_("Missing '}': '%s'"), imm_start);
  8520.       return 0;
  8521.     }
  8522.   /* RC/SAE immediate string should contain nothing more.  */;
  8523.   if (*pstr != 0)
  8524.     {
  8525.       as_bad (_("Junk after '}': '%s'"), imm_start);
  8526.       return 0;
  8527.     }
  8528.  
  8529.   exp = &im_expressions[i.imm_operands++];
  8530.   i.op[this_operand].imms = exp;
  8531.  
  8532.   exp->X_op = O_constant;
  8533.   exp->X_add_number = 0;
  8534.   exp->X_add_symbol = (symbolS *) 0;
  8535.   exp->X_op_symbol = (symbolS *) 0;
  8536.  
  8537.   i.types[this_operand].bitfield.imm8 = 1;
  8538.   return 1;
  8539. }
  8540.  
  8541. /* Parse OPERAND_STRING into the i386_insn structure I.  Returns zero
  8542.    on error.  */
  8543.  
  8544. static int
  8545. i386_att_operand (char *operand_string)
  8546. {
  8547.   const reg_entry *r;
  8548.   char *end_op;
  8549.   char *op_string = operand_string;
  8550.  
  8551.   if (is_space_char (*op_string))
  8552.     ++op_string;
  8553.  
  8554.   /* We check for an absolute prefix (differentiating,
  8555.      for example, 'jmp pc_relative_label' from 'jmp *absolute_label'.  */
  8556.   if (*op_string == ABSOLUTE_PREFIX)
  8557.     {
  8558.       ++op_string;
  8559.       if (is_space_char (*op_string))
  8560.         ++op_string;
  8561.       i.types[this_operand].bitfield.jumpabsolute = 1;
  8562.     }
  8563.  
  8564.   /* Check if operand is a register.  */
  8565.   if ((r = parse_register (op_string, &end_op)) != NULL)
  8566.     {
  8567.       i386_operand_type temp;
  8568.  
  8569.       /* Check for a segment override by searching for ':' after a
  8570.          segment register.  */
  8571.       op_string = end_op;
  8572.       if (is_space_char (*op_string))
  8573.         ++op_string;
  8574.       if (*op_string == ':'
  8575.           && (r->reg_type.bitfield.sreg2
  8576.               || r->reg_type.bitfield.sreg3))
  8577.         {
  8578.           switch (r->reg_num)
  8579.             {
  8580.             case 0:
  8581.               i.seg[i.mem_operands] = &es;
  8582.               break;
  8583.             case 1:
  8584.               i.seg[i.mem_operands] = &cs;
  8585.               break;
  8586.             case 2:
  8587.               i.seg[i.mem_operands] = &ss;
  8588.               break;
  8589.             case 3:
  8590.               i.seg[i.mem_operands] = &ds;
  8591.               break;
  8592.             case 4:
  8593.               i.seg[i.mem_operands] = &fs;
  8594.               break;
  8595.             case 5:
  8596.               i.seg[i.mem_operands] = &gs;
  8597.               break;
  8598.             }
  8599.  
  8600.           /* Skip the ':' and whitespace.  */
  8601.           ++op_string;
  8602.           if (is_space_char (*op_string))
  8603.             ++op_string;
  8604.  
  8605.           if (!is_digit_char (*op_string)
  8606.               && !is_identifier_char (*op_string)
  8607.               && *op_string != '('
  8608.               && *op_string != ABSOLUTE_PREFIX)
  8609.             {
  8610.               as_bad (_("bad memory operand `%s'"), op_string);
  8611.               return 0;
  8612.             }
  8613.           /* Handle case of %es:*foo.  */
  8614.           if (*op_string == ABSOLUTE_PREFIX)
  8615.             {
  8616.               ++op_string;
  8617.               if (is_space_char (*op_string))
  8618.                 ++op_string;
  8619.               i.types[this_operand].bitfield.jumpabsolute = 1;
  8620.             }
  8621.           goto do_memory_reference;
  8622.         }
  8623.  
  8624.       /* Handle vector operations.  */
  8625.       if (*op_string == '{')
  8626.         {
  8627.           op_string = check_VecOperations (op_string, NULL);
  8628.           if (op_string == NULL)
  8629.             return 0;
  8630.         }
  8631.  
  8632.       if (*op_string)
  8633.         {
  8634.           as_bad (_("junk `%s' after register"), op_string);
  8635.           return 0;
  8636.         }
  8637.       temp = r->reg_type;
  8638.       temp.bitfield.baseindex = 0;
  8639.       i.types[this_operand] = operand_type_or (i.types[this_operand],
  8640.                                                temp);
  8641.       i.types[this_operand].bitfield.unspecified = 0;
  8642.       i.op[this_operand].regs = r;
  8643.       i.reg_operands++;
  8644.     }
  8645.   else if (*op_string == REGISTER_PREFIX)
  8646.     {
  8647.       as_bad (_("bad register name `%s'"), op_string);
  8648.       return 0;
  8649.     }
  8650.   else if (*op_string == IMMEDIATE_PREFIX)
  8651.     {
  8652.       ++op_string;
  8653.       if (i.types[this_operand].bitfield.jumpabsolute)
  8654.         {
  8655.           as_bad (_("immediate operand illegal with absolute jump"));
  8656.           return 0;
  8657.         }
  8658.       if (!i386_immediate (op_string))
  8659.         return 0;
  8660.     }
  8661.   else if (RC_SAE_immediate (operand_string))
  8662.     {
  8663.       /* If it is a RC or SAE immediate, do nothing.  */
  8664.       ;
  8665.     }
  8666.   else if (is_digit_char (*op_string)
  8667.            || is_identifier_char (*op_string)
  8668.            || *op_string == '"'
  8669.            || *op_string == '(')
  8670.     {
  8671.       /* This is a memory reference of some sort.  */
  8672.       char *base_string;
  8673.  
  8674.       /* Start and end of displacement string expression (if found).  */
  8675.       char *displacement_string_start;
  8676.       char *displacement_string_end;
  8677.       char *vop_start;
  8678.  
  8679.     do_memory_reference:
  8680.       if ((i.mem_operands == 1
  8681.            && !current_templates->start->opcode_modifier.isstring)
  8682.           || i.mem_operands == 2)
  8683.         {
  8684.           as_bad (_("too many memory references for `%s'"),
  8685.                   current_templates->start->name);
  8686.           return 0;
  8687.         }
  8688.  
  8689.       /* Check for base index form.  We detect the base index form by
  8690.          looking for an ')' at the end of the operand, searching
  8691.          for the '(' matching it, and finding a REGISTER_PREFIX or ','
  8692.          after the '('.  */
  8693.       base_string = op_string + strlen (op_string);
  8694.  
  8695.       /* Handle vector operations.  */
  8696.       vop_start = strchr (op_string, '{');
  8697.       if (vop_start && vop_start < base_string)
  8698.         {
  8699.           if (check_VecOperations (vop_start, base_string) == NULL)
  8700.             return 0;
  8701.           base_string = vop_start;
  8702.         }
  8703.  
  8704.       --base_string;
  8705.       if (is_space_char (*base_string))
  8706.         --base_string;
  8707.  
  8708.       /* If we only have a displacement, set-up for it to be parsed later.  */
  8709.       displacement_string_start = op_string;
  8710.       displacement_string_end = base_string + 1;
  8711.  
  8712.       if (*base_string == ')')
  8713.         {
  8714.           char *temp_string;
  8715.           unsigned int parens_balanced = 1;
  8716.           /* We've already checked that the number of left & right ()'s are
  8717.              equal, so this loop will not be infinite.  */
  8718.           do
  8719.             {
  8720.               base_string--;
  8721.               if (*base_string == ')')
  8722.                 parens_balanced++;
  8723.               if (*base_string == '(')
  8724.                 parens_balanced--;
  8725.             }
  8726.           while (parens_balanced);
  8727.  
  8728.           temp_string = base_string;
  8729.  
  8730.           /* Skip past '(' and whitespace.  */
  8731.           ++base_string;
  8732.           if (is_space_char (*base_string))
  8733.             ++base_string;
  8734.  
  8735.           if (*base_string == ','
  8736.               || ((i.base_reg = parse_register (base_string, &end_op))
  8737.                   != NULL))
  8738.             {
  8739.               displacement_string_end = temp_string;
  8740.  
  8741.               i.types[this_operand].bitfield.baseindex = 1;
  8742.  
  8743.               if (i.base_reg)
  8744.                 {
  8745.                   base_string = end_op;
  8746.                   if (is_space_char (*base_string))
  8747.                     ++base_string;
  8748.                 }
  8749.  
  8750.               /* There may be an index reg or scale factor here.  */
  8751.               if (*base_string == ',')
  8752.                 {
  8753.                   ++base_string;
  8754.                   if (is_space_char (*base_string))
  8755.                     ++base_string;
  8756.  
  8757.                   if ((i.index_reg = parse_register (base_string, &end_op))
  8758.                       != NULL)
  8759.                     {
  8760.                       base_string = end_op;
  8761.                       if (is_space_char (*base_string))
  8762.                         ++base_string;
  8763.                       if (*base_string == ',')
  8764.                         {
  8765.                           ++base_string;
  8766.                           if (is_space_char (*base_string))
  8767.                             ++base_string;
  8768.                         }
  8769.                       else if (*base_string != ')')
  8770.                         {
  8771.                           as_bad (_("expecting `,' or `)' "
  8772.                                     "after index register in `%s'"),
  8773.                                   operand_string);
  8774.                           return 0;
  8775.                         }
  8776.                     }
  8777.                   else if (*base_string == REGISTER_PREFIX)
  8778.                     {
  8779.                       end_op = strchr (base_string, ',');
  8780.                       if (end_op)
  8781.                         *end_op = '\0';
  8782.                       as_bad (_("bad register name `%s'"), base_string);
  8783.                       return 0;
  8784.                     }
  8785.  
  8786.                   /* Check for scale factor.  */
  8787.                   if (*base_string != ')')
  8788.                     {
  8789.                       char *end_scale = i386_scale (base_string);
  8790.  
  8791.                       if (!end_scale)
  8792.                         return 0;
  8793.  
  8794.                       base_string = end_scale;
  8795.                       if (is_space_char (*base_string))
  8796.                         ++base_string;
  8797.                       if (*base_string != ')')
  8798.                         {
  8799.                           as_bad (_("expecting `)' "
  8800.                                     "after scale factor in `%s'"),
  8801.                                   operand_string);
  8802.                           return 0;
  8803.                         }
  8804.                     }
  8805.                   else if (!i.index_reg)
  8806.                     {
  8807.                       as_bad (_("expecting index register or scale factor "
  8808.                                 "after `,'; got '%c'"),
  8809.                               *base_string);
  8810.                       return 0;
  8811.                     }
  8812.                 }
  8813.               else if (*base_string != ')')
  8814.                 {
  8815.                   as_bad (_("expecting `,' or `)' "
  8816.                             "after base register in `%s'"),
  8817.                           operand_string);
  8818.                   return 0;
  8819.                 }
  8820.             }
  8821.           else if (*base_string == REGISTER_PREFIX)
  8822.             {
  8823.               end_op = strchr (base_string, ',');
  8824.               if (end_op)
  8825.                 *end_op = '\0';
  8826.               as_bad (_("bad register name `%s'"), base_string);
  8827.               return 0;
  8828.             }
  8829.         }
  8830.  
  8831.       /* If there's an expression beginning the operand, parse it,
  8832.          assuming displacement_string_start and
  8833.          displacement_string_end are meaningful.  */
  8834.       if (displacement_string_start != displacement_string_end)
  8835.         {
  8836.           if (!i386_displacement (displacement_string_start,
  8837.                                   displacement_string_end))
  8838.             return 0;
  8839.         }
  8840.  
  8841.       /* Special case for (%dx) while doing input/output op.  */
  8842.       if (i.base_reg
  8843.           && operand_type_equal (&i.base_reg->reg_type,
  8844.                                  &reg16_inoutportreg)
  8845.           && i.index_reg == 0
  8846.           && i.log2_scale_factor == 0
  8847.           && i.seg[i.mem_operands] == 0
  8848.           && !operand_type_check (i.types[this_operand], disp))
  8849.         {
  8850.           i.types[this_operand] = inoutportreg;
  8851.           return 1;
  8852.         }
  8853.  
  8854.       if (i386_index_check (operand_string) == 0)
  8855.         return 0;
  8856.       i.types[this_operand].bitfield.mem = 1;
  8857.       i.mem_operands++;
  8858.     }
  8859.   else
  8860.     {
  8861.       /* It's not a memory operand; argh!  */
  8862.       as_bad (_("invalid char %s beginning operand %d `%s'"),
  8863.               output_invalid (*op_string),
  8864.               this_operand + 1,
  8865.               op_string);
  8866.       return 0;
  8867.     }
  8868.   return 1;                     /* Normal return.  */
  8869. }
  8870. /* Calculate the maximum variable size (i.e., excluding fr_fix)
  8871.    that an rs_machine_dependent frag may reach.  */
  8872.  
  8873. unsigned int
  8874. i386_frag_max_var (fragS *frag)
  8875. {
  8876.   /* The only relaxable frags are for jumps.
  8877.      Unconditional jumps can grow by 4 bytes and others by 5 bytes.  */
  8878.   gas_assert (frag->fr_type == rs_machine_dependent);
  8879.   return TYPE_FROM_RELAX_STATE (frag->fr_subtype) == UNCOND_JUMP ? 4 : 5;
  8880. }
  8881.  
  8882. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  8883. static int
  8884. elf_symbol_resolved_in_segment_p (symbolS *fr_symbol, offsetT fr_var)
  8885. {
  8886.   /* STT_GNU_IFUNC symbol must go through PLT.  */
  8887.   if ((symbol_get_bfdsym (fr_symbol)->flags
  8888.        & BSF_GNU_INDIRECT_FUNCTION) != 0)
  8889.     return 0;
  8890.  
  8891.   if (!S_IS_EXTERNAL (fr_symbol))
  8892.     /* Symbol may be weak or local.  */
  8893.     return !S_IS_WEAK (fr_symbol);
  8894.  
  8895.   /* Global symbols with non-default visibility can't be preempted. */
  8896.   if (ELF_ST_VISIBILITY (S_GET_OTHER (fr_symbol)) != STV_DEFAULT)
  8897.     return 1;
  8898.  
  8899.   if (fr_var != NO_RELOC)
  8900.     switch ((enum bfd_reloc_code_real) fr_var)
  8901.       {
  8902.       case BFD_RELOC_386_PLT32:
  8903.       case BFD_RELOC_X86_64_PLT32:
  8904.         /* Symbol with PLT relocatin may be preempted. */
  8905.         return 0;
  8906.       default:
  8907.         abort ();
  8908.       }
  8909.  
  8910.   /* Global symbols with default visibility in a shared library may be
  8911.      preempted by another definition.  */
  8912.   return !shared;
  8913. }
  8914. #endif
  8915.  
  8916. /* md_estimate_size_before_relax()
  8917.  
  8918.    Called just before relax() for rs_machine_dependent frags.  The x86
  8919.    assembler uses these frags to handle variable size jump
  8920.    instructions.
  8921.  
  8922.    Any symbol that is now undefined will not become defined.
  8923.    Return the correct fr_subtype in the frag.
  8924.    Return the initial "guess for variable size of frag" to caller.
  8925.    The guess is actually the growth beyond the fixed part.  Whatever
  8926.    we do to grow the fixed or variable part contributes to our
  8927.    returned value.  */
  8928.  
  8929. int
  8930. md_estimate_size_before_relax (fragS *fragP, segT segment)
  8931. {
  8932.   /* We've already got fragP->fr_subtype right;  all we have to do is
  8933.      check for un-relaxable symbols.  On an ELF system, we can't relax
  8934.      an externally visible symbol, because it may be overridden by a
  8935.      shared library.  */
  8936.   if (S_GET_SEGMENT (fragP->fr_symbol) != segment
  8937. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  8938.       || (IS_ELF
  8939.           && !elf_symbol_resolved_in_segment_p (fragP->fr_symbol,
  8940.                                                 fragP->fr_var))
  8941. #endif
  8942. #if defined (OBJ_COFF) && defined (TE_PE)
  8943.       || (OUTPUT_FLAVOR == bfd_target_coff_flavour
  8944.           && S_IS_WEAK (fragP->fr_symbol))
  8945. #endif
  8946.       )
  8947.     {
  8948.       /* Symbol is undefined in this segment, or we need to keep a
  8949.          reloc so that weak symbols can be overridden.  */
  8950.       int size = (fragP->fr_subtype & CODE16) ? 2 : 4;
  8951.       enum bfd_reloc_code_real reloc_type;
  8952.       unsigned char *opcode;
  8953.       int old_fr_fix;
  8954.  
  8955.       if (fragP->fr_var != NO_RELOC)
  8956.         reloc_type = (enum bfd_reloc_code_real) fragP->fr_var;
  8957.       else if (size == 2)
  8958.         reloc_type = BFD_RELOC_16_PCREL;
  8959.       else
  8960.         reloc_type = BFD_RELOC_32_PCREL;
  8961.  
  8962.       old_fr_fix = fragP->fr_fix;
  8963.       opcode = (unsigned char *) fragP->fr_opcode;
  8964.  
  8965.       switch (TYPE_FROM_RELAX_STATE (fragP->fr_subtype))
  8966.         {
  8967.         case UNCOND_JUMP:
  8968.           /* Make jmp (0xeb) a (d)word displacement jump.  */
  8969.           opcode[0] = 0xe9;
  8970.           fragP->fr_fix += size;
  8971.           fix_new (fragP, old_fr_fix, size,
  8972.                    fragP->fr_symbol,
  8973.                    fragP->fr_offset, 1,
  8974.                    reloc_type);
  8975.           break;
  8976.  
  8977.         case COND_JUMP86:
  8978.           if (size == 2
  8979.               && (!no_cond_jump_promotion || fragP->fr_var != NO_RELOC))
  8980.             {
  8981.               /* Negate the condition, and branch past an
  8982.                  unconditional jump.  */
  8983.               opcode[0] ^= 1;
  8984.               opcode[1] = 3;
  8985.               /* Insert an unconditional jump.  */
  8986.               opcode[2] = 0xe9;
  8987.               /* We added two extra opcode bytes, and have a two byte
  8988.                  offset.  */
  8989.               fragP->fr_fix += 2 + 2;
  8990.               fix_new (fragP, old_fr_fix + 2, 2,
  8991.                        fragP->fr_symbol,
  8992.                        fragP->fr_offset, 1,
  8993.                        reloc_type);
  8994.               break;
  8995.             }
  8996.           /* Fall through.  */
  8997.  
  8998.         case COND_JUMP:
  8999.           if (no_cond_jump_promotion && fragP->fr_var == NO_RELOC)
  9000.             {
  9001.               fixS *fixP;
  9002.  
  9003.               fragP->fr_fix += 1;
  9004.               fixP = fix_new (fragP, old_fr_fix, 1,
  9005.                               fragP->fr_symbol,
  9006.                               fragP->fr_offset, 1,
  9007.                               BFD_RELOC_8_PCREL);
  9008.               fixP->fx_signed = 1;
  9009.               break;
  9010.             }
  9011.  
  9012.           /* This changes the byte-displacement jump 0x7N
  9013.              to the (d)word-displacement jump 0x0f,0x8N.  */
  9014.           opcode[1] = opcode[0] + 0x10;
  9015.           opcode[0] = TWO_BYTE_OPCODE_ESCAPE;
  9016.           /* We've added an opcode byte.  */
  9017.           fragP->fr_fix += 1 + size;
  9018.           fix_new (fragP, old_fr_fix + 1, size,
  9019.                    fragP->fr_symbol,
  9020.                    fragP->fr_offset, 1,
  9021.                    reloc_type);
  9022.           break;
  9023.  
  9024.         default:
  9025.           BAD_CASE (fragP->fr_subtype);
  9026.           break;
  9027.         }
  9028.       frag_wane (fragP);
  9029.       return fragP->fr_fix - old_fr_fix;
  9030.     }
  9031.  
  9032.   /* Guess size depending on current relax state.  Initially the relax
  9033.      state will correspond to a short jump and we return 1, because
  9034.      the variable part of the frag (the branch offset) is one byte
  9035.      long.  However, we can relax a section more than once and in that
  9036.      case we must either set fr_subtype back to the unrelaxed state,
  9037.      or return the value for the appropriate branch.  */
  9038.   return md_relax_table[fragP->fr_subtype].rlx_length;
  9039. }
  9040.  
  9041. /* Called after relax() is finished.
  9042.  
  9043.    In:  Address of frag.
  9044.         fr_type == rs_machine_dependent.
  9045.         fr_subtype is what the address relaxed to.
  9046.  
  9047.    Out: Any fixSs and constants are set up.
  9048.         Caller will turn frag into a ".space 0".  */
  9049.  
  9050. void
  9051. md_convert_frag (bfd *abfd ATTRIBUTE_UNUSED, segT sec ATTRIBUTE_UNUSED,
  9052.                  fragS *fragP)
  9053. {
  9054.   unsigned char *opcode;
  9055.   unsigned char *where_to_put_displacement = NULL;
  9056.   offsetT target_address;
  9057.   offsetT opcode_address;
  9058.   unsigned int extension = 0;
  9059.   offsetT displacement_from_opcode_start;
  9060.  
  9061.   opcode = (unsigned char *) fragP->fr_opcode;
  9062.  
  9063.   /* Address we want to reach in file space.  */
  9064.   target_address = S_GET_VALUE (fragP->fr_symbol) + fragP->fr_offset;
  9065.  
  9066.   /* Address opcode resides at in file space.  */
  9067.   opcode_address = fragP->fr_address + fragP->fr_fix;
  9068.  
  9069.   /* Displacement from opcode start to fill into instruction.  */
  9070.   displacement_from_opcode_start = target_address - opcode_address;
  9071.  
  9072.   if ((fragP->fr_subtype & BIG) == 0)
  9073.     {
  9074.       /* Don't have to change opcode.  */
  9075.       extension = 1;            /* 1 opcode + 1 displacement  */
  9076.       where_to_put_displacement = &opcode[1];
  9077.     }
  9078.   else
  9079.     {
  9080.       if (no_cond_jump_promotion
  9081.           && TYPE_FROM_RELAX_STATE (fragP->fr_subtype) != UNCOND_JUMP)
  9082.         as_warn_where (fragP->fr_file, fragP->fr_line,
  9083.                        _("long jump required"));
  9084.  
  9085.       switch (fragP->fr_subtype)
  9086.         {
  9087.         case ENCODE_RELAX_STATE (UNCOND_JUMP, BIG):
  9088.           extension = 4;                /* 1 opcode + 4 displacement  */
  9089.           opcode[0] = 0xe9;
  9090.           where_to_put_displacement = &opcode[1];
  9091.           break;
  9092.  
  9093.         case ENCODE_RELAX_STATE (UNCOND_JUMP, BIG16):
  9094.           extension = 2;                /* 1 opcode + 2 displacement  */
  9095.           opcode[0] = 0xe9;
  9096.           where_to_put_displacement = &opcode[1];
  9097.           break;
  9098.  
  9099.         case ENCODE_RELAX_STATE (COND_JUMP, BIG):
  9100.         case ENCODE_RELAX_STATE (COND_JUMP86, BIG):
  9101.           extension = 5;                /* 2 opcode + 4 displacement  */
  9102.           opcode[1] = opcode[0] + 0x10;
  9103.           opcode[0] = TWO_BYTE_OPCODE_ESCAPE;
  9104.           where_to_put_displacement = &opcode[2];
  9105.           break;
  9106.  
  9107.         case ENCODE_RELAX_STATE (COND_JUMP, BIG16):
  9108.           extension = 3;                /* 2 opcode + 2 displacement  */
  9109.           opcode[1] = opcode[0] + 0x10;
  9110.           opcode[0] = TWO_BYTE_OPCODE_ESCAPE;
  9111.           where_to_put_displacement = &opcode[2];
  9112.           break;
  9113.  
  9114.         case ENCODE_RELAX_STATE (COND_JUMP86, BIG16):
  9115.           extension = 4;
  9116.           opcode[0] ^= 1;
  9117.           opcode[1] = 3;
  9118.           opcode[2] = 0xe9;
  9119.           where_to_put_displacement = &opcode[3];
  9120.           break;
  9121.  
  9122.         default:
  9123.           BAD_CASE (fragP->fr_subtype);
  9124.           break;
  9125.         }
  9126.     }
  9127.  
  9128.   /* If size if less then four we are sure that the operand fits,
  9129.      but if it's 4, then it could be that the displacement is larger
  9130.      then -/+ 2GB.  */
  9131.   if (DISP_SIZE_FROM_RELAX_STATE (fragP->fr_subtype) == 4
  9132.       && object_64bit
  9133.       && ((addressT) (displacement_from_opcode_start - extension
  9134.                       + ((addressT) 1 << 31))
  9135.           > (((addressT) 2 << 31) - 1)))
  9136.     {
  9137.       as_bad_where (fragP->fr_file, fragP->fr_line,
  9138.                     _("jump target out of range"));
  9139.       /* Make us emit 0.  */
  9140.       displacement_from_opcode_start = extension;
  9141.     }
  9142.   /* Now put displacement after opcode.  */
  9143.   md_number_to_chars ((char *) where_to_put_displacement,
  9144.                       (valueT) (displacement_from_opcode_start - extension),
  9145.                       DISP_SIZE_FROM_RELAX_STATE (fragP->fr_subtype));
  9146.   fragP->fr_fix += extension;
  9147. }
  9148. /* Apply a fixup (fixP) to segment data, once it has been determined
  9149.    by our caller that we have all the info we need to fix it up.
  9150.  
  9151.    Parameter valP is the pointer to the value of the bits.
  9152.  
  9153.    On the 386, immediates, displacements, and data pointers are all in
  9154.    the same (little-endian) format, so we don't need to care about which
  9155.    we are handling.  */
  9156.  
  9157. void
  9158. md_apply_fix (fixS *fixP, valueT *valP, segT seg ATTRIBUTE_UNUSED)
  9159. {
  9160.   char *p = fixP->fx_where + fixP->fx_frag->fr_literal;
  9161.   valueT value = *valP;
  9162.  
  9163. #if !defined (TE_Mach)
  9164.   if (fixP->fx_pcrel)
  9165.     {
  9166.       switch (fixP->fx_r_type)
  9167.         {
  9168.         default:
  9169.           break;
  9170.  
  9171.         case BFD_RELOC_64:
  9172.           fixP->fx_r_type = BFD_RELOC_64_PCREL;
  9173.           break;
  9174.         case BFD_RELOC_32:
  9175.         case BFD_RELOC_X86_64_32S:
  9176.           fixP->fx_r_type = BFD_RELOC_32_PCREL;
  9177.           break;
  9178.         case BFD_RELOC_16:
  9179.           fixP->fx_r_type = BFD_RELOC_16_PCREL;
  9180.           break;
  9181.         case BFD_RELOC_8:
  9182.           fixP->fx_r_type = BFD_RELOC_8_PCREL;
  9183.           break;
  9184.         }
  9185.     }
  9186.  
  9187.   if (fixP->fx_addsy != NULL
  9188.       && (fixP->fx_r_type == BFD_RELOC_32_PCREL
  9189.           || fixP->fx_r_type == BFD_RELOC_64_PCREL
  9190.           || fixP->fx_r_type == BFD_RELOC_16_PCREL
  9191.           || fixP->fx_r_type == BFD_RELOC_8_PCREL)
  9192.       && !use_rela_relocations)
  9193.     {
  9194.       /* This is a hack.  There should be a better way to handle this.
  9195.          This covers for the fact that bfd_install_relocation will
  9196.          subtract the current location (for partial_inplace, PC relative
  9197.          relocations); see more below.  */
  9198. #ifndef OBJ_AOUT
  9199.       if (IS_ELF
  9200. #ifdef TE_PE
  9201.           || OUTPUT_FLAVOR == bfd_target_coff_flavour
  9202. #endif
  9203.           )
  9204.         value += fixP->fx_where + fixP->fx_frag->fr_address;
  9205. #endif
  9206. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  9207.       if (IS_ELF)
  9208.         {
  9209.           segT sym_seg = S_GET_SEGMENT (fixP->fx_addsy);
  9210.  
  9211.           if ((sym_seg == seg
  9212.                || (symbol_section_p (fixP->fx_addsy)
  9213.                    && sym_seg != absolute_section))
  9214.               && !generic_force_reloc (fixP))
  9215.             {
  9216.               /* Yes, we add the values in twice.  This is because
  9217.                  bfd_install_relocation subtracts them out again.  I think
  9218.                  bfd_install_relocation is broken, but I don't dare change
  9219.                  it.  FIXME.  */
  9220.               value += fixP->fx_where + fixP->fx_frag->fr_address;
  9221.             }
  9222.         }
  9223. #endif
  9224. #if defined (OBJ_COFF) && defined (TE_PE)
  9225.       /* For some reason, the PE format does not store a
  9226.          section address offset for a PC relative symbol.  */
  9227.       if (S_GET_SEGMENT (fixP->fx_addsy) != seg
  9228.           || S_IS_WEAK (fixP->fx_addsy))
  9229.         value += md_pcrel_from (fixP);
  9230. #endif
  9231.     }
  9232. #if defined (OBJ_COFF) && defined (TE_PE)
  9233.   if (fixP->fx_addsy != NULL
  9234.       && S_IS_WEAK (fixP->fx_addsy)
  9235.       /* PR 16858: Do not modify weak function references.  */
  9236.       && ! fixP->fx_pcrel)
  9237.     {
  9238. #if !defined (TE_PEP)
  9239.       /* For x86 PE weak function symbols are neither PC-relative
  9240.          nor do they set S_IS_FUNCTION.  So the only reliable way
  9241.          to detect them is to check the flags of their containing
  9242.          section.  */
  9243.       if (S_GET_SEGMENT (fixP->fx_addsy) != NULL
  9244.           && S_GET_SEGMENT (fixP->fx_addsy)->flags & SEC_CODE)
  9245.         ;
  9246.       else
  9247. #endif
  9248.       value -= S_GET_VALUE (fixP->fx_addsy);
  9249.     }
  9250. #endif
  9251.  
  9252.   /* Fix a few things - the dynamic linker expects certain values here,
  9253.      and we must not disappoint it.  */
  9254. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  9255.   if (IS_ELF && fixP->fx_addsy)
  9256.     switch (fixP->fx_r_type)
  9257.       {
  9258.       case BFD_RELOC_386_PLT32:
  9259.       case BFD_RELOC_X86_64_PLT32:
  9260.         /* Make the jump instruction point to the address of the operand.  At
  9261.            runtime we merely add the offset to the actual PLT entry.  */
  9262.         value = -4;
  9263.         break;
  9264.  
  9265.       case BFD_RELOC_386_TLS_GD:
  9266.       case BFD_RELOC_386_TLS_LDM:
  9267.       case BFD_RELOC_386_TLS_IE_32:
  9268.       case BFD_RELOC_386_TLS_IE:
  9269.       case BFD_RELOC_386_TLS_GOTIE:
  9270.       case BFD_RELOC_386_TLS_GOTDESC:
  9271.       case BFD_RELOC_X86_64_TLSGD:
  9272.       case BFD_RELOC_X86_64_TLSLD:
  9273.       case BFD_RELOC_X86_64_GOTTPOFF:
  9274.       case BFD_RELOC_X86_64_GOTPC32_TLSDESC:
  9275.         value = 0; /* Fully resolved at runtime.  No addend.  */
  9276.         /* Fallthrough */
  9277.       case BFD_RELOC_386_TLS_LE:
  9278.       case BFD_RELOC_386_TLS_LDO_32:
  9279.       case BFD_RELOC_386_TLS_LE_32:
  9280.       case BFD_RELOC_X86_64_DTPOFF32:
  9281.       case BFD_RELOC_X86_64_DTPOFF64:
  9282.       case BFD_RELOC_X86_64_TPOFF32:
  9283.       case BFD_RELOC_X86_64_TPOFF64:
  9284.         S_SET_THREAD_LOCAL (fixP->fx_addsy);
  9285.         break;
  9286.  
  9287.       case BFD_RELOC_386_TLS_DESC_CALL:
  9288.       case BFD_RELOC_X86_64_TLSDESC_CALL:
  9289.         value = 0; /* Fully resolved at runtime.  No addend.  */
  9290.         S_SET_THREAD_LOCAL (fixP->fx_addsy);
  9291.         fixP->fx_done = 0;
  9292.         return;
  9293.  
  9294.       case BFD_RELOC_386_GOT32:
  9295.       case BFD_RELOC_X86_64_GOT32:
  9296.         value = 0; /* Fully resolved at runtime.  No addend.  */
  9297.         break;
  9298.  
  9299.       case BFD_RELOC_VTABLE_INHERIT:
  9300.       case BFD_RELOC_VTABLE_ENTRY:
  9301.         fixP->fx_done = 0;
  9302.         return;
  9303.  
  9304.       default:
  9305.         break;
  9306.       }
  9307. #endif /* defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)  */
  9308.   *valP = value;
  9309. #endif /* !defined (TE_Mach)  */
  9310.  
  9311.   /* Are we finished with this relocation now?  */
  9312.   if (fixP->fx_addsy == NULL)
  9313.     fixP->fx_done = 1;
  9314. #if defined (OBJ_COFF) && defined (TE_PE)
  9315.   else if (fixP->fx_addsy != NULL && S_IS_WEAK (fixP->fx_addsy))
  9316.     {
  9317.       fixP->fx_done = 0;
  9318.       /* Remember value for tc_gen_reloc.  */
  9319.       fixP->fx_addnumber = value;
  9320.       /* Clear out the frag for now.  */
  9321.       value = 0;
  9322.     }
  9323. #endif
  9324.   else if (use_rela_relocations)
  9325.     {
  9326.       fixP->fx_no_overflow = 1;
  9327.       /* Remember value for tc_gen_reloc.  */
  9328.       fixP->fx_addnumber = value;
  9329.       value = 0;
  9330.     }
  9331.  
  9332.   md_number_to_chars (p, value, fixP->fx_size);
  9333. }
  9334. char *
  9335. md_atof (int type, char *litP, int *sizeP)
  9336. {
  9337.   /* This outputs the LITTLENUMs in REVERSE order;
  9338.      in accord with the bigendian 386.  */
  9339.   return ieee_md_atof (type, litP, sizeP, FALSE);
  9340. }
  9341. static char output_invalid_buf[sizeof (unsigned char) * 2 + 6];
  9342.  
  9343. static char *
  9344. output_invalid (int c)
  9345. {
  9346.   if (ISPRINT (c))
  9347.     snprintf (output_invalid_buf, sizeof (output_invalid_buf),
  9348.               "'%c'", c);
  9349.   else
  9350.     snprintf (output_invalid_buf, sizeof (output_invalid_buf),
  9351.               "(0x%x)", (unsigned char) c);
  9352.   return output_invalid_buf;
  9353. }
  9354.  
  9355. /* REG_STRING starts *before* REGISTER_PREFIX.  */
  9356.  
  9357. static const reg_entry *
  9358. parse_real_register (char *reg_string, char **end_op)
  9359. {
  9360.   char *s = reg_string;
  9361.   char *p;
  9362.   char reg_name_given[MAX_REG_NAME_SIZE + 1];
  9363.   const reg_entry *r;
  9364.  
  9365.   /* Skip possible REGISTER_PREFIX and possible whitespace.  */
  9366.   if (*s == REGISTER_PREFIX)
  9367.     ++s;
  9368.  
  9369.   if (is_space_char (*s))
  9370.     ++s;
  9371.  
  9372.   p = reg_name_given;
  9373.   while ((*p++ = register_chars[(unsigned char) *s]) != '\0')
  9374.     {
  9375.       if (p >= reg_name_given + MAX_REG_NAME_SIZE)
  9376.         return (const reg_entry *) NULL;
  9377.       s++;
  9378.     }
  9379.  
  9380.   /* For naked regs, make sure that we are not dealing with an identifier.
  9381.      This prevents confusing an identifier like `eax_var' with register
  9382.      `eax'.  */
  9383.   if (allow_naked_reg && identifier_chars[(unsigned char) *s])
  9384.     return (const reg_entry *) NULL;
  9385.  
  9386.   *end_op = s;
  9387.  
  9388.   r = (const reg_entry *) hash_find (reg_hash, reg_name_given);
  9389.  
  9390.   /* Handle floating point regs, allowing spaces in the (i) part.  */
  9391.   if (r == i386_regtab /* %st is first entry of table  */)
  9392.     {
  9393.       if (is_space_char (*s))
  9394.         ++s;
  9395.       if (*s == '(')
  9396.         {
  9397.           ++s;
  9398.           if (is_space_char (*s))
  9399.             ++s;
  9400.           if (*s >= '0' && *s <= '7')
  9401.             {
  9402.               int fpr = *s - '0';
  9403.               ++s;
  9404.               if (is_space_char (*s))
  9405.                 ++s;
  9406.               if (*s == ')')
  9407.                 {
  9408.                   *end_op = s + 1;
  9409.                   r = (const reg_entry *) hash_find (reg_hash, "st(0)");
  9410.                   know (r);
  9411.                   return r + fpr;
  9412.                 }
  9413.             }
  9414.           /* We have "%st(" then garbage.  */
  9415.           return (const reg_entry *) NULL;
  9416.         }
  9417.     }
  9418.  
  9419.   if (r == NULL || allow_pseudo_reg)
  9420.     return r;
  9421.  
  9422.   if (operand_type_all_zero (&r->reg_type))
  9423.     return (const reg_entry *) NULL;
  9424.  
  9425.   if ((r->reg_type.bitfield.reg32
  9426.        || r->reg_type.bitfield.sreg3
  9427.        || r->reg_type.bitfield.control
  9428.        || r->reg_type.bitfield.debug
  9429.        || r->reg_type.bitfield.test)
  9430.       && !cpu_arch_flags.bitfield.cpui386)
  9431.     return (const reg_entry *) NULL;
  9432.  
  9433.   if (r->reg_type.bitfield.floatreg
  9434.       && !cpu_arch_flags.bitfield.cpu8087
  9435.       && !cpu_arch_flags.bitfield.cpu287
  9436.       && !cpu_arch_flags.bitfield.cpu387)
  9437.     return (const reg_entry *) NULL;
  9438.  
  9439.   if (r->reg_type.bitfield.regmmx && !cpu_arch_flags.bitfield.cpummx)
  9440.     return (const reg_entry *) NULL;
  9441.  
  9442.   if (r->reg_type.bitfield.regxmm && !cpu_arch_flags.bitfield.cpusse)
  9443.     return (const reg_entry *) NULL;
  9444.  
  9445.   if (r->reg_type.bitfield.regymm && !cpu_arch_flags.bitfield.cpuavx)
  9446.     return (const reg_entry *) NULL;
  9447.  
  9448.   if ((r->reg_type.bitfield.regzmm || r->reg_type.bitfield.regmask)
  9449.        && !cpu_arch_flags.bitfield.cpuavx512f)
  9450.     return (const reg_entry *) NULL;
  9451.  
  9452.   /* Don't allow fake index register unless allow_index_reg isn't 0. */
  9453.   if (!allow_index_reg
  9454.       && (r->reg_num == RegEiz || r->reg_num == RegRiz))
  9455.     return (const reg_entry *) NULL;
  9456.  
  9457.   /* Upper 16 vector register is only available with VREX in 64bit
  9458.      mode.  */
  9459.   if ((r->reg_flags & RegVRex))
  9460.     {
  9461.       if (!cpu_arch_flags.bitfield.cpuvrex
  9462.           || flag_code != CODE_64BIT)
  9463.         return (const reg_entry *) NULL;
  9464.  
  9465.       i.need_vrex = 1;
  9466.     }
  9467.  
  9468.   if (((r->reg_flags & (RegRex64 | RegRex))
  9469.        || r->reg_type.bitfield.reg64)
  9470.       && (!cpu_arch_flags.bitfield.cpulm
  9471.           || !operand_type_equal (&r->reg_type, &control))
  9472.       && flag_code != CODE_64BIT)
  9473.     return (const reg_entry *) NULL;
  9474.  
  9475.   if (r->reg_type.bitfield.sreg3 && r->reg_num == RegFlat && !intel_syntax)
  9476.     return (const reg_entry *) NULL;
  9477.  
  9478.   return r;
  9479. }
  9480.  
  9481. /* REG_STRING starts *before* REGISTER_PREFIX.  */
  9482.  
  9483. static const reg_entry *
  9484. parse_register (char *reg_string, char **end_op)
  9485. {
  9486.   const reg_entry *r;
  9487.  
  9488.   if (*reg_string == REGISTER_PREFIX || allow_naked_reg)
  9489.     r = parse_real_register (reg_string, end_op);
  9490.   else
  9491.     r = NULL;
  9492.   if (!r)
  9493.     {
  9494.       char *save = input_line_pointer;
  9495.       char c;
  9496.       symbolS *symbolP;
  9497.  
  9498.       input_line_pointer = reg_string;
  9499.       c = get_symbol_name (&reg_string);
  9500.       symbolP = symbol_find (reg_string);
  9501.       if (symbolP && S_GET_SEGMENT (symbolP) == reg_section)
  9502.         {
  9503.           const expressionS *e = symbol_get_value_expression (symbolP);
  9504.  
  9505.           know (e->X_op == O_register);
  9506.           know (e->X_add_number >= 0
  9507.                 && (valueT) e->X_add_number < i386_regtab_size);
  9508.           r = i386_regtab + e->X_add_number;
  9509.           if ((r->reg_flags & RegVRex))
  9510.             i.need_vrex = 1;
  9511.           *end_op = input_line_pointer;
  9512.         }
  9513.       *input_line_pointer = c;
  9514.       input_line_pointer = save;
  9515.     }
  9516.   return r;
  9517. }
  9518.  
  9519. int
  9520. i386_parse_name (char *name, expressionS *e, char *nextcharP)
  9521. {
  9522.   const reg_entry *r;
  9523.   char *end = input_line_pointer;
  9524.  
  9525.   *end = *nextcharP;
  9526.   r = parse_register (name, &input_line_pointer);
  9527.   if (r && end <= input_line_pointer)
  9528.     {
  9529.       *nextcharP = *input_line_pointer;
  9530.       *input_line_pointer = 0;
  9531.       e->X_op = O_register;
  9532.       e->X_add_number = r - i386_regtab;
  9533.       return 1;
  9534.     }
  9535.   input_line_pointer = end;
  9536.   *end = 0;
  9537.   return intel_syntax ? i386_intel_parse_name (name, e) : 0;
  9538. }
  9539.  
  9540. void
  9541. md_operand (expressionS *e)
  9542. {
  9543.   char *end;
  9544.   const reg_entry *r;
  9545.  
  9546.   switch (*input_line_pointer)
  9547.     {
  9548.     case REGISTER_PREFIX:
  9549.       r = parse_real_register (input_line_pointer, &end);
  9550.       if (r)
  9551.         {
  9552.           e->X_op = O_register;
  9553.           e->X_add_number = r - i386_regtab;
  9554.           input_line_pointer = end;
  9555.         }
  9556.       break;
  9557.  
  9558.     case '[':
  9559.       gas_assert (intel_syntax);
  9560.       end = input_line_pointer++;
  9561.       expression (e);
  9562.       if (*input_line_pointer == ']')
  9563.         {
  9564.           ++input_line_pointer;
  9565.           e->X_op_symbol = make_expr_symbol (e);
  9566.           e->X_add_symbol = NULL;
  9567.           e->X_add_number = 0;
  9568.           e->X_op = O_index;
  9569.         }
  9570.       else
  9571.         {
  9572.           e->X_op = O_absent;
  9573.           input_line_pointer = end;
  9574.         }
  9575.       break;
  9576.     }
  9577. }
  9578.  
  9579. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  9580. const char *md_shortopts = "kVQ:sqn";
  9581. #else
  9582. const char *md_shortopts = "qn";
  9583. #endif
  9584.  
  9585. #define OPTION_32 (OPTION_MD_BASE + 0)
  9586. #define OPTION_64 (OPTION_MD_BASE + 1)
  9587. #define OPTION_DIVIDE (OPTION_MD_BASE + 2)
  9588. #define OPTION_MARCH (OPTION_MD_BASE + 3)
  9589. #define OPTION_MTUNE (OPTION_MD_BASE + 4)
  9590. #define OPTION_MMNEMONIC (OPTION_MD_BASE + 5)
  9591. #define OPTION_MSYNTAX (OPTION_MD_BASE + 6)
  9592. #define OPTION_MINDEX_REG (OPTION_MD_BASE + 7)
  9593. #define OPTION_MNAKED_REG (OPTION_MD_BASE + 8)
  9594. #define OPTION_MOLD_GCC (OPTION_MD_BASE + 9)
  9595. #define OPTION_MSSE2AVX (OPTION_MD_BASE + 10)
  9596. #define OPTION_MSSE_CHECK (OPTION_MD_BASE + 11)
  9597. #define OPTION_MOPERAND_CHECK (OPTION_MD_BASE + 12)
  9598. #define OPTION_MAVXSCALAR (OPTION_MD_BASE + 13)
  9599. #define OPTION_X32 (OPTION_MD_BASE + 14)
  9600. #define OPTION_MADD_BND_PREFIX (OPTION_MD_BASE + 15)
  9601. #define OPTION_MEVEXLIG (OPTION_MD_BASE + 16)
  9602. #define OPTION_MEVEXWIG (OPTION_MD_BASE + 17)
  9603. #define OPTION_MBIG_OBJ (OPTION_MD_BASE + 18)
  9604. #define OPTION_OMIT_LOCK_PREFIX (OPTION_MD_BASE + 19)
  9605. #define OPTION_MEVEXRCIG (OPTION_MD_BASE + 20)
  9606. #define OPTION_MSHARED (OPTION_MD_BASE + 21)
  9607. #define OPTION_MAMD64 (OPTION_MD_BASE + 22)
  9608. #define OPTION_MINTEL64 (OPTION_MD_BASE + 23)
  9609.  
  9610. struct option md_longopts[] =
  9611. {
  9612.   {"32", no_argument, NULL, OPTION_32},
  9613. #if (defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF) \
  9614.      || defined (TE_PE) || defined (TE_PEP) || defined (OBJ_MACH_O))
  9615.   {"64", no_argument, NULL, OPTION_64},
  9616. #endif
  9617. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  9618.   {"x32", no_argument, NULL, OPTION_X32},
  9619.   {"mshared", no_argument, NULL, OPTION_MSHARED},
  9620. #endif
  9621.   {"divide", no_argument, NULL, OPTION_DIVIDE},
  9622.   {"march", required_argument, NULL, OPTION_MARCH},
  9623.   {"mtune", required_argument, NULL, OPTION_MTUNE},
  9624.   {"mmnemonic", required_argument, NULL, OPTION_MMNEMONIC},
  9625.   {"msyntax", required_argument, NULL, OPTION_MSYNTAX},
  9626.   {"mindex-reg", no_argument, NULL, OPTION_MINDEX_REG},
  9627.   {"mnaked-reg", no_argument, NULL, OPTION_MNAKED_REG},
  9628.   {"mold-gcc", no_argument, NULL, OPTION_MOLD_GCC},
  9629.   {"msse2avx", no_argument, NULL, OPTION_MSSE2AVX},
  9630.   {"msse-check", required_argument, NULL, OPTION_MSSE_CHECK},
  9631.   {"moperand-check", required_argument, NULL, OPTION_MOPERAND_CHECK},
  9632.   {"mavxscalar", required_argument, NULL, OPTION_MAVXSCALAR},
  9633.   {"madd-bnd-prefix", no_argument, NULL, OPTION_MADD_BND_PREFIX},
  9634.   {"mevexlig", required_argument, NULL, OPTION_MEVEXLIG},
  9635.   {"mevexwig", required_argument, NULL, OPTION_MEVEXWIG},
  9636. # if defined (TE_PE) || defined (TE_PEP)
  9637.   {"mbig-obj", no_argument, NULL, OPTION_MBIG_OBJ},
  9638. #endif
  9639.   {"momit-lock-prefix", required_argument, NULL, OPTION_OMIT_LOCK_PREFIX},
  9640.   {"mevexrcig", required_argument, NULL, OPTION_MEVEXRCIG},
  9641.   {"mamd64", no_argument, NULL, OPTION_MAMD64},
  9642.   {"mintel64", no_argument, NULL, OPTION_MINTEL64},
  9643.   {NULL, no_argument, NULL, 0}
  9644. };
  9645. size_t md_longopts_size = sizeof (md_longopts);
  9646.  
  9647. int
  9648. md_parse_option (int c, char *arg)
  9649. {
  9650.   unsigned int j;
  9651.   char *arch, *next;
  9652.  
  9653.   switch (c)
  9654.     {
  9655.     case 'n':
  9656.       optimize_align_code = 0;
  9657.       break;
  9658.  
  9659.     case 'q':
  9660.       quiet_warnings = 1;
  9661.       break;
  9662.  
  9663. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  9664.       /* -Qy, -Qn: SVR4 arguments controlling whether a .comment section
  9665.          should be emitted or not.  FIXME: Not implemented.  */
  9666.     case 'Q':
  9667.       break;
  9668.  
  9669.       /* -V: SVR4 argument to print version ID.  */
  9670.     case 'V':
  9671.       print_version_id ();
  9672.       break;
  9673.  
  9674.       /* -k: Ignore for FreeBSD compatibility.  */
  9675.     case 'k':
  9676.       break;
  9677.  
  9678.     case 's':
  9679.       /* -s: On i386 Solaris, this tells the native assembler to use
  9680.          .stab instead of .stab.excl.  We always use .stab anyhow.  */
  9681.       break;
  9682.  
  9683.     case OPTION_MSHARED:
  9684.       shared = 1;
  9685.       break;
  9686. #endif
  9687. #if (defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF) \
  9688.      || defined (TE_PE) || defined (TE_PEP) || defined (OBJ_MACH_O))
  9689.     case OPTION_64:
  9690.       {
  9691.         const char **list, **l;
  9692.  
  9693.         list = bfd_target_list ();
  9694.         for (l = list; *l != NULL; l++)
  9695.           if (CONST_STRNEQ (*l, "elf64-x86-64")
  9696.               || strcmp (*l, "coff-x86-64") == 0
  9697.               || strcmp (*l, "pe-x86-64") == 0
  9698.               || strcmp (*l, "pei-x86-64") == 0
  9699.               || strcmp (*l, "mach-o-x86-64") == 0)
  9700.             {
  9701.               default_arch = "x86_64";
  9702.               break;
  9703.             }
  9704.         if (*l == NULL)
  9705.           as_fatal (_("no compiled in support for x86_64"));
  9706.         free (list);
  9707.       }
  9708.       break;
  9709. #endif
  9710.  
  9711. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  9712.     case OPTION_X32:
  9713.       if (IS_ELF)
  9714.         {
  9715.           const char **list, **l;
  9716.  
  9717.           list = bfd_target_list ();
  9718.           for (l = list; *l != NULL; l++)
  9719.             if (CONST_STRNEQ (*l, "elf32-x86-64"))
  9720.               {
  9721.                 default_arch = "x86_64:32";
  9722.                 break;
  9723.               }
  9724.           if (*l == NULL)
  9725.             as_fatal (_("no compiled in support for 32bit x86_64"));
  9726.           free (list);
  9727.         }
  9728.       else
  9729.         as_fatal (_("32bit x86_64 is only supported for ELF"));
  9730.       break;
  9731. #endif
  9732.  
  9733.     case OPTION_32:
  9734.       default_arch = "i386";
  9735.       break;
  9736.  
  9737.     case OPTION_DIVIDE:
  9738. #ifdef SVR4_COMMENT_CHARS
  9739.       {
  9740.         char *n, *t;
  9741.         const char *s;
  9742.  
  9743.         n = (char *) xmalloc (strlen (i386_comment_chars) + 1);
  9744.         t = n;
  9745.         for (s = i386_comment_chars; *s != '\0'; s++)
  9746.           if (*s != '/')
  9747.             *t++ = *s;
  9748.         *t = '\0';
  9749.         i386_comment_chars = n;
  9750.       }
  9751. #endif
  9752.       break;
  9753.  
  9754.     case OPTION_MARCH:
  9755.       arch = xstrdup (arg);
  9756.       do
  9757.         {
  9758.           if (*arch == '.')
  9759.             as_fatal (_("invalid -march= option: `%s'"), arg);
  9760.           next = strchr (arch, '+');
  9761.           if (next)
  9762.             *next++ = '\0';
  9763.           for (j = 0; j < ARRAY_SIZE (cpu_arch); j++)
  9764.             {
  9765.               if (strcmp (arch, cpu_arch [j].name) == 0)
  9766.                 {
  9767.                   /* Processor.  */
  9768.                   if (! cpu_arch[j].flags.bitfield.cpui386)
  9769.                     continue;
  9770.  
  9771.                   cpu_arch_name = cpu_arch[j].name;
  9772.                   cpu_sub_arch_name = NULL;
  9773.                   cpu_arch_flags = cpu_arch[j].flags;
  9774.                   cpu_arch_isa = cpu_arch[j].type;
  9775.                   cpu_arch_isa_flags = cpu_arch[j].flags;
  9776.                   if (!cpu_arch_tune_set)
  9777.                     {
  9778.                       cpu_arch_tune = cpu_arch_isa;
  9779.                       cpu_arch_tune_flags = cpu_arch_isa_flags;
  9780.                     }
  9781.                   break;
  9782.                 }
  9783.               else if (*cpu_arch [j].name == '.'
  9784.                        && strcmp (arch, cpu_arch [j].name + 1) == 0)
  9785.                 {
  9786.                   /* ISA entension.  */
  9787.                   i386_cpu_flags flags;
  9788.  
  9789.                   if (!cpu_arch[j].negated)
  9790.                     flags = cpu_flags_or (cpu_arch_flags,
  9791.                                           cpu_arch[j].flags);
  9792.                   else
  9793.                     flags = cpu_flags_and_not (cpu_arch_flags,
  9794.                                                cpu_arch[j].flags);
  9795.  
  9796.                   if (!valid_iamcu_cpu_flags (&flags))
  9797.                     as_fatal (_("`%s' isn't valid for Intel MCU"), arch);
  9798.                   else if (!cpu_flags_equal (&flags, &cpu_arch_flags))
  9799.                     {
  9800.                       if (cpu_sub_arch_name)
  9801.                         {
  9802.                           char *name = cpu_sub_arch_name;
  9803.                           cpu_sub_arch_name = concat (name,
  9804.                                                       cpu_arch[j].name,
  9805.                                                       (const char *) NULL);
  9806.                           free (name);
  9807.                         }
  9808.                       else
  9809.                         cpu_sub_arch_name = xstrdup (cpu_arch[j].name);
  9810.                       cpu_arch_flags = flags;
  9811.                       cpu_arch_isa_flags = flags;
  9812.                     }
  9813.                   break;
  9814.                 }
  9815.             }
  9816.  
  9817.           if (j >= ARRAY_SIZE (cpu_arch))
  9818.             as_fatal (_("invalid -march= option: `%s'"), arg);
  9819.  
  9820.           arch = next;
  9821.         }
  9822.       while (next != NULL );
  9823.       break;
  9824.  
  9825.     case OPTION_MTUNE:
  9826.       if (*arg == '.')
  9827.         as_fatal (_("invalid -mtune= option: `%s'"), arg);
  9828.       for (j = 0; j < ARRAY_SIZE (cpu_arch); j++)
  9829.         {
  9830.           if (strcmp (arg, cpu_arch [j].name) == 0)
  9831.             {
  9832.               cpu_arch_tune_set = 1;
  9833.               cpu_arch_tune = cpu_arch [j].type;
  9834.               cpu_arch_tune_flags = cpu_arch[j].flags;
  9835.               break;
  9836.             }
  9837.         }
  9838.       if (j >= ARRAY_SIZE (cpu_arch))
  9839.         as_fatal (_("invalid -mtune= option: `%s'"), arg);
  9840.       break;
  9841.  
  9842.     case OPTION_MMNEMONIC:
  9843.       if (strcasecmp (arg, "att") == 0)
  9844.         intel_mnemonic = 0;
  9845.       else if (strcasecmp (arg, "intel") == 0)
  9846.         intel_mnemonic = 1;
  9847.       else
  9848.         as_fatal (_("invalid -mmnemonic= option: `%s'"), arg);
  9849.       break;
  9850.  
  9851.     case OPTION_MSYNTAX:
  9852.       if (strcasecmp (arg, "att") == 0)
  9853.         intel_syntax = 0;
  9854.       else if (strcasecmp (arg, "intel") == 0)
  9855.         intel_syntax = 1;
  9856.       else
  9857.         as_fatal (_("invalid -msyntax= option: `%s'"), arg);
  9858.       break;
  9859.  
  9860.     case OPTION_MINDEX_REG:
  9861.       allow_index_reg = 1;
  9862.       break;
  9863.  
  9864.     case OPTION_MNAKED_REG:
  9865.       allow_naked_reg = 1;
  9866.       break;
  9867.  
  9868.     case OPTION_MOLD_GCC:
  9869.       old_gcc = 1;
  9870.       break;
  9871.  
  9872.     case OPTION_MSSE2AVX:
  9873.       sse2avx = 1;
  9874.       break;
  9875.  
  9876.     case OPTION_MSSE_CHECK:
  9877.       if (strcasecmp (arg, "error") == 0)
  9878.         sse_check = check_error;
  9879.       else if (strcasecmp (arg, "warning") == 0)
  9880.         sse_check = check_warning;
  9881.       else if (strcasecmp (arg, "none") == 0)
  9882.         sse_check = check_none;
  9883.       else
  9884.         as_fatal (_("invalid -msse-check= option: `%s'"), arg);
  9885.       break;
  9886.  
  9887.     case OPTION_MOPERAND_CHECK:
  9888.       if (strcasecmp (arg, "error") == 0)
  9889.         operand_check = check_error;
  9890.       else if (strcasecmp (arg, "warning") == 0)
  9891.         operand_check = check_warning;
  9892.       else if (strcasecmp (arg, "none") == 0)
  9893.         operand_check = check_none;
  9894.       else
  9895.         as_fatal (_("invalid -moperand-check= option: `%s'"), arg);
  9896.       break;
  9897.  
  9898.     case OPTION_MAVXSCALAR:
  9899.       if (strcasecmp (arg, "128") == 0)
  9900.         avxscalar = vex128;
  9901.       else if (strcasecmp (arg, "256") == 0)
  9902.         avxscalar = vex256;
  9903.       else
  9904.         as_fatal (_("invalid -mavxscalar= option: `%s'"), arg);
  9905.       break;
  9906.  
  9907.     case OPTION_MADD_BND_PREFIX:
  9908.       add_bnd_prefix = 1;
  9909.       break;
  9910.  
  9911.     case OPTION_MEVEXLIG:
  9912.       if (strcmp (arg, "128") == 0)
  9913.         evexlig = evexl128;
  9914.       else if (strcmp (arg, "256") == 0)
  9915.         evexlig = evexl256;
  9916.       else  if (strcmp (arg, "512") == 0)
  9917.         evexlig = evexl512;
  9918.       else
  9919.         as_fatal (_("invalid -mevexlig= option: `%s'"), arg);
  9920.       break;
  9921.  
  9922.     case OPTION_MEVEXRCIG:
  9923.       if (strcmp (arg, "rne") == 0)
  9924.         evexrcig = rne;
  9925.       else if (strcmp (arg, "rd") == 0)
  9926.         evexrcig = rd;
  9927.       else if (strcmp (arg, "ru") == 0)
  9928.         evexrcig = ru;
  9929.       else if (strcmp (arg, "rz") == 0)
  9930.         evexrcig = rz;
  9931.       else
  9932.         as_fatal (_("invalid -mevexrcig= option: `%s'"), arg);
  9933.       break;
  9934.  
  9935.     case OPTION_MEVEXWIG:
  9936.       if (strcmp (arg, "0") == 0)
  9937.         evexwig = evexw0;
  9938.       else if (strcmp (arg, "1") == 0)
  9939.         evexwig = evexw1;
  9940.       else
  9941.         as_fatal (_("invalid -mevexwig= option: `%s'"), arg);
  9942.       break;
  9943.  
  9944. # if defined (TE_PE) || defined (TE_PEP)
  9945.     case OPTION_MBIG_OBJ:
  9946.       use_big_obj = 1;
  9947.       break;
  9948. #endif
  9949.  
  9950.     case OPTION_OMIT_LOCK_PREFIX:
  9951.       if (strcasecmp (arg, "yes") == 0)
  9952.         omit_lock_prefix = 1;
  9953.       else if (strcasecmp (arg, "no") == 0)
  9954.         omit_lock_prefix = 0;
  9955.       else
  9956.         as_fatal (_("invalid -momit-lock-prefix= option: `%s'"), arg);
  9957.       break;
  9958.  
  9959.     case OPTION_MAMD64:
  9960.       cpu_arch_flags.bitfield.cpuamd64 = 1;
  9961.       cpu_arch_flags.bitfield.cpuintel64 = 0;
  9962.       cpu_arch_isa_flags.bitfield.cpuamd64 = 1;
  9963.       cpu_arch_isa_flags.bitfield.cpuintel64 = 0;
  9964.       break;
  9965.  
  9966.     case OPTION_MINTEL64:
  9967.       cpu_arch_flags.bitfield.cpuamd64 = 0;
  9968.       cpu_arch_flags.bitfield.cpuintel64 = 1;
  9969.       cpu_arch_isa_flags.bitfield.cpuamd64 = 0;
  9970.       cpu_arch_isa_flags.bitfield.cpuintel64 = 1;
  9971.       break;
  9972.  
  9973.     default:
  9974.       return 0;
  9975.     }
  9976.   return 1;
  9977. }
  9978.  
  9979. #define MESSAGE_TEMPLATE \
  9980. "                                                                                "
  9981.  
  9982. static void
  9983. show_arch (FILE *stream, int ext, int check)
  9984. {
  9985.   static char message[] = MESSAGE_TEMPLATE;
  9986.   char *start = message + 27;
  9987.   char *p;
  9988.   int size = sizeof (MESSAGE_TEMPLATE);
  9989.   int left;
  9990.   const char *name;
  9991.   int len;
  9992.   unsigned int j;
  9993.  
  9994.   p = start;
  9995.   left = size - (start - message);
  9996.   for (j = 0; j < ARRAY_SIZE (cpu_arch); j++)
  9997.     {
  9998.       /* Should it be skipped?  */
  9999.       if (cpu_arch [j].skip)
  10000.         continue;
  10001.  
  10002.       name = cpu_arch [j].name;
  10003.       len = cpu_arch [j].len;
  10004.       if (*name == '.')
  10005.         {
  10006.           /* It is an extension.  Skip if we aren't asked to show it.  */
  10007.           if (ext)
  10008.             {
  10009.               name++;
  10010.               len--;
  10011.             }
  10012.           else
  10013.             continue;
  10014.         }
  10015.       else if (ext)
  10016.         {
  10017.           /* It is an processor.  Skip if we show only extension.  */
  10018.           continue;
  10019.         }
  10020.       else if (check && ! cpu_arch[j].flags.bitfield.cpui386)
  10021.         {
  10022.           /* It is an impossible processor - skip.  */
  10023.           continue;
  10024.         }
  10025.  
  10026.       /* Reserve 2 spaces for ", " or ",\0" */
  10027.       left -= len + 2;
  10028.  
  10029.       /* Check if there is any room.  */
  10030.       if (left >= 0)
  10031.         {
  10032.           if (p != start)
  10033.             {
  10034.               *p++ = ',';
  10035.               *p++ = ' ';
  10036.             }
  10037.           p = mempcpy (p, name, len);
  10038.         }
  10039.       else
  10040.         {
  10041.           /* Output the current message now and start a new one.  */
  10042.           *p++ = ',';
  10043.           *p = '\0';
  10044.           fprintf (stream, "%s\n", message);
  10045.           p = start;
  10046.           left = size - (start - message) - len - 2;
  10047.  
  10048.           gas_assert (left >= 0);
  10049.  
  10050.           p = mempcpy (p, name, len);
  10051.         }
  10052.     }
  10053.  
  10054.   *p = '\0';
  10055.   fprintf (stream, "%s\n", message);
  10056. }
  10057.  
  10058. void
  10059. md_show_usage (FILE *stream)
  10060. {
  10061. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  10062.   fprintf (stream, _("\
  10063.  -Q                      ignored\n\
  10064.  -V                      print assembler version number\n\
  10065.  -k                      ignored\n"));
  10066. #endif
  10067.   fprintf (stream, _("\
  10068.  -n                      Do not optimize code alignment\n\
  10069.  -q                      quieten some warnings\n"));
  10070. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  10071.   fprintf (stream, _("\
  10072.  -s                      ignored\n"));
  10073. #endif
  10074. #if (defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF) \
  10075.      || defined (TE_PE) || defined (TE_PEP))
  10076.   fprintf (stream, _("\
  10077.  --32/--64/--x32         generate 32bit/64bit/x32 code\n"));
  10078. #endif
  10079. #ifdef SVR4_COMMENT_CHARS
  10080.   fprintf (stream, _("\
  10081.  --divide                do not treat `/' as a comment character\n"));
  10082. #else
  10083.   fprintf (stream, _("\
  10084.  --divide                ignored\n"));
  10085. #endif
  10086.   fprintf (stream, _("\
  10087.  -march=CPU[,+EXTENSION...]\n\
  10088.                          generate code for CPU and EXTENSION, CPU is one of:\n"));
  10089.   show_arch (stream, 0, 1);
  10090.   fprintf (stream, _("\
  10091.                          EXTENSION is combination of:\n"));
  10092.   show_arch (stream, 1, 0);
  10093.   fprintf (stream, _("\
  10094.  -mtune=CPU              optimize for CPU, CPU is one of:\n"));
  10095.   show_arch (stream, 0, 0);
  10096.   fprintf (stream, _("\
  10097.  -msse2avx               encode SSE instructions with VEX prefix\n"));
  10098.   fprintf (stream, _("\
  10099.  -msse-check=[none|error|warning]\n\
  10100.                          check SSE instructions\n"));
  10101.   fprintf (stream, _("\
  10102.  -moperand-check=[none|error|warning]\n\
  10103.                          check operand combinations for validity\n"));
  10104.   fprintf (stream, _("\
  10105.  -mavxscalar=[128|256]   encode scalar AVX instructions with specific vector\n\
  10106.                           length\n"));
  10107.   fprintf (stream, _("\
  10108.  -mevexlig=[128|256|512] encode scalar EVEX instructions with specific vector\n\
  10109.                           length\n"));
  10110.   fprintf (stream, _("\
  10111.  -mevexwig=[0|1]         encode EVEX instructions with specific EVEX.W value\n\
  10112.                           for EVEX.W bit ignored instructions\n"));
  10113.   fprintf (stream, _("\
  10114.  -mevexrcig=[rne|rd|ru|rz]\n\
  10115.                          encode EVEX instructions with specific EVEX.RC value\n\
  10116.                           for SAE-only ignored instructions\n"));
  10117.   fprintf (stream, _("\
  10118.  -mmnemonic=[att|intel]  use AT&T/Intel mnemonic\n"));
  10119.   fprintf (stream, _("\
  10120.  -msyntax=[att|intel]    use AT&T/Intel syntax\n"));
  10121.   fprintf (stream, _("\
  10122.  -mindex-reg             support pseudo index registers\n"));
  10123.   fprintf (stream, _("\
  10124.  -mnaked-reg             don't require `%%' prefix for registers\n"));
  10125.   fprintf (stream, _("\
  10126.  -mold-gcc               support old (<= 2.8.1) versions of gcc\n"));
  10127.   fprintf (stream, _("\
  10128.  -madd-bnd-prefix        add BND prefix for all valid branches\n"));
  10129.   fprintf (stream, _("\
  10130.  -mshared                disable branch optimization for shared code\n"));
  10131. # if defined (TE_PE) || defined (TE_PEP)
  10132.   fprintf (stream, _("\
  10133.  -mbig-obj               generate big object files\n"));
  10134. #endif
  10135.   fprintf (stream, _("\
  10136.  -momit-lock-prefix=[no|yes]\n\
  10137.                          strip all lock prefixes\n"));
  10138.   fprintf (stream, _("\
  10139.  -mamd64                 accept only AMD64 ISA\n"));
  10140.   fprintf (stream, _("\
  10141.  -mintel64               accept only Intel64 ISA\n"));
  10142. }
  10143.  
  10144. #if ((defined (OBJ_MAYBE_COFF) && defined (OBJ_MAYBE_AOUT)) \
  10145.      || defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF) \
  10146.      || defined (TE_PE) || defined (TE_PEP) || defined (OBJ_MACH_O))
  10147.  
  10148. /* Pick the target format to use.  */
  10149.  
  10150. const char *
  10151. i386_target_format (void)
  10152. {
  10153.   if (!strncmp (default_arch, "x86_64", 6))
  10154.     {
  10155.       update_code_flag (CODE_64BIT, 1);
  10156.       if (default_arch[6] == '\0')
  10157.         x86_elf_abi = X86_64_ABI;
  10158.       else
  10159.         x86_elf_abi = X86_64_X32_ABI;
  10160.     }
  10161.   else if (!strcmp (default_arch, "i386"))
  10162.     update_code_flag (CODE_32BIT, 1);
  10163.   else if (!strcmp (default_arch, "iamcu"))
  10164.     {
  10165.       update_code_flag (CODE_32BIT, 1);
  10166.       if (cpu_arch_isa == PROCESSOR_UNKNOWN)
  10167.         {
  10168.           static const i386_cpu_flags iamcu_flags = CPU_IAMCU_FLAGS;
  10169.           cpu_arch_name = "iamcu";
  10170.           cpu_sub_arch_name = NULL;
  10171.           cpu_arch_flags = iamcu_flags;
  10172.           cpu_arch_isa = PROCESSOR_IAMCU;
  10173.           cpu_arch_isa_flags = iamcu_flags;
  10174.           if (!cpu_arch_tune_set)
  10175.             {
  10176.               cpu_arch_tune = cpu_arch_isa;
  10177.               cpu_arch_tune_flags = cpu_arch_isa_flags;
  10178.             }
  10179.         }
  10180.       else
  10181.         as_fatal (_("Intel MCU doesn't support `%s' architecture"),
  10182.                   cpu_arch_name);
  10183.     }
  10184.   else
  10185.     as_fatal (_("unknown architecture"));
  10186.  
  10187.   if (cpu_flags_all_zero (&cpu_arch_isa_flags))
  10188.     cpu_arch_isa_flags = cpu_arch[flag_code == CODE_64BIT].flags;
  10189.   if (cpu_flags_all_zero (&cpu_arch_tune_flags))
  10190.     cpu_arch_tune_flags = cpu_arch[flag_code == CODE_64BIT].flags;
  10191.  
  10192.   switch (OUTPUT_FLAVOR)
  10193.     {
  10194. #if defined (OBJ_MAYBE_AOUT) || defined (OBJ_AOUT)
  10195.     case bfd_target_aout_flavour:
  10196.       return AOUT_TARGET_FORMAT;
  10197. #endif
  10198. #if defined (OBJ_MAYBE_COFF) || defined (OBJ_COFF)
  10199. # if defined (TE_PE) || defined (TE_PEP)
  10200.     case bfd_target_coff_flavour:
  10201.       if (flag_code == CODE_64BIT)
  10202.         return use_big_obj ? "pe-bigobj-x86-64" : "pe-x86-64";
  10203.       else
  10204.         return "pe-i386";
  10205. # elif defined (TE_GO32)
  10206.     case bfd_target_coff_flavour:
  10207.       return "coff-go32";
  10208. # else
  10209.     case bfd_target_coff_flavour:
  10210.       return "coff-i386";
  10211. # endif
  10212. #endif
  10213. #if defined (OBJ_MAYBE_ELF) || defined (OBJ_ELF)
  10214.     case bfd_target_elf_flavour:
  10215.       {
  10216.         const char *format;
  10217.  
  10218.         switch (x86_elf_abi)
  10219.           {
  10220.           default:
  10221.             format = ELF_TARGET_FORMAT;
  10222.             break;
  10223.           case X86_64_ABI:
  10224.             use_rela_relocations = 1;
  10225.             object_64bit = 1;
  10226.             format = ELF_TARGET_FORMAT64;
  10227.             break;
  10228.           case X86_64_X32_ABI:
  10229.             use_rela_relocations = 1;
  10230.             object_64bit = 1;
  10231.             disallow_64bit_reloc = 1;
  10232.             format = ELF_TARGET_FORMAT32;
  10233.             break;
  10234.           }
  10235.         if (cpu_arch_isa == PROCESSOR_L1OM)
  10236.           {
  10237.             if (x86_elf_abi != X86_64_ABI)
  10238.               as_fatal (_("Intel L1OM is 64bit only"));
  10239.             return ELF_TARGET_L1OM_FORMAT;
  10240.           }
  10241.         else if (cpu_arch_isa == PROCESSOR_K1OM)
  10242.           {
  10243.             if (x86_elf_abi != X86_64_ABI)
  10244.               as_fatal (_("Intel K1OM is 64bit only"));
  10245.             return ELF_TARGET_K1OM_FORMAT;
  10246.           }
  10247.         else if (cpu_arch_isa == PROCESSOR_IAMCU)
  10248.           {
  10249.             if (x86_elf_abi != I386_ABI)
  10250.               as_fatal (_("Intel MCU is 32bit only"));
  10251.             return ELF_TARGET_IAMCU_FORMAT;
  10252.           }
  10253.         else
  10254.           return format;
  10255.       }
  10256. #endif
  10257. #if defined (OBJ_MACH_O)
  10258.     case bfd_target_mach_o_flavour:
  10259.       if (flag_code == CODE_64BIT)
  10260.         {
  10261.           use_rela_relocations = 1;
  10262.           object_64bit = 1;
  10263.           return "mach-o-x86-64";
  10264.         }
  10265.       else
  10266.         return "mach-o-i386";
  10267. #endif
  10268.     default:
  10269.       abort ();
  10270.       return NULL;
  10271.     }
  10272. }
  10273.  
  10274. #endif /* OBJ_MAYBE_ more than one  */
  10275. symbolS *
  10276. md_undefined_symbol (char *name)
  10277. {
  10278.   if (name[0] == GLOBAL_OFFSET_TABLE_NAME[0]
  10279.       && name[1] == GLOBAL_OFFSET_TABLE_NAME[1]
  10280.       && name[2] == GLOBAL_OFFSET_TABLE_NAME[2]
  10281.       && strcmp (name, GLOBAL_OFFSET_TABLE_NAME) == 0)
  10282.     {
  10283.       if (!GOT_symbol)
  10284.         {
  10285.           if (symbol_find (name))
  10286.             as_bad (_("GOT already in symbol table"));
  10287.           GOT_symbol = symbol_new (name, undefined_section,
  10288.                                    (valueT) 0, &zero_address_frag);
  10289.         };
  10290.       return GOT_symbol;
  10291.     }
  10292.   return 0;
  10293. }
  10294.  
  10295. /* Round up a section size to the appropriate boundary.  */
  10296.  
  10297. valueT
  10298. md_section_align (segT segment ATTRIBUTE_UNUSED, valueT size)
  10299. {
  10300. #if (defined (OBJ_AOUT) || defined (OBJ_MAYBE_AOUT))
  10301.   if (OUTPUT_FLAVOR == bfd_target_aout_flavour)
  10302.     {
  10303.       /* For a.out, force the section size to be aligned.  If we don't do
  10304.          this, BFD will align it for us, but it will not write out the
  10305.          final bytes of the section.  This may be a bug in BFD, but it is
  10306.          easier to fix it here since that is how the other a.out targets
  10307.          work.  */
  10308.       int align;
  10309.  
  10310.       align = bfd_get_section_alignment (stdoutput, segment);
  10311.       size = ((size + (1 << align) - 1) & (-((valueT) 1 << align)));
  10312.     }
  10313. #endif
  10314.  
  10315.   return size;
  10316. }
  10317.  
  10318. /* On the i386, PC-relative offsets are relative to the start of the
  10319.    next instruction.  That is, the address of the offset, plus its
  10320.    size, since the offset is always the last part of the insn.  */
  10321.  
  10322. long
  10323. md_pcrel_from (fixS *fixP)
  10324. {
  10325.   return fixP->fx_size + fixP->fx_where + fixP->fx_frag->fr_address;
  10326. }
  10327.  
  10328. #ifndef I386COFF
  10329.  
  10330. static void
  10331. s_bss (int ignore ATTRIBUTE_UNUSED)
  10332. {
  10333.   int temp;
  10334.  
  10335. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  10336.   if (IS_ELF)
  10337.     obj_elf_section_change_hook ();
  10338. #endif
  10339.   temp = get_absolute_expression ();
  10340.   subseg_set (bss_section, (subsegT) temp);
  10341.   demand_empty_rest_of_line ();
  10342. }
  10343.  
  10344. #endif
  10345.  
  10346. void
  10347. i386_validate_fix (fixS *fixp)
  10348. {
  10349.   if (fixp->fx_subsy)
  10350.     {
  10351.       if (fixp->fx_subsy == GOT_symbol)
  10352.         {
  10353.           if (fixp->fx_r_type == BFD_RELOC_32_PCREL)
  10354.             {
  10355.               if (!object_64bit)
  10356.                 abort ();
  10357. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  10358.               if (fixp->fx_tcbit2)
  10359.                 fixp->fx_r_type = (fixp->fx_tcbit
  10360.                                    ? BFD_RELOC_X86_64_REX_GOTPCRELX
  10361.                                    : BFD_RELOC_X86_64_GOTPCRELX);
  10362.               else
  10363. #endif
  10364.                 fixp->fx_r_type = BFD_RELOC_X86_64_GOTPCREL;
  10365.             }
  10366.           else
  10367.             {
  10368.               if (!object_64bit)
  10369.                 fixp->fx_r_type = BFD_RELOC_386_GOTOFF;
  10370.               else
  10371.                 fixp->fx_r_type = BFD_RELOC_X86_64_GOTOFF64;
  10372.             }
  10373.           fixp->fx_subsy = 0;
  10374.         }
  10375.     }
  10376. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  10377.   else if (!object_64bit)
  10378.     {
  10379.       if (fixp->fx_r_type == BFD_RELOC_386_GOT32
  10380.           && fixp->fx_tcbit2)
  10381.         fixp->fx_r_type = BFD_RELOC_386_GOT32X;
  10382.     }
  10383. #endif
  10384. }
  10385.  
  10386. arelent *
  10387. tc_gen_reloc (asection *section ATTRIBUTE_UNUSED, fixS *fixp)
  10388. {
  10389.   arelent *rel;
  10390.   bfd_reloc_code_real_type code;
  10391.  
  10392.   switch (fixp->fx_r_type)
  10393.     {
  10394. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  10395.     case BFD_RELOC_SIZE32:
  10396.     case BFD_RELOC_SIZE64:
  10397.       if (S_IS_DEFINED (fixp->fx_addsy)
  10398.           && !S_IS_EXTERNAL (fixp->fx_addsy))
  10399.         {
  10400.           /* Resolve size relocation against local symbol to size of
  10401.              the symbol plus addend.  */
  10402.           valueT value = S_GET_SIZE (fixp->fx_addsy) + fixp->fx_offset;
  10403.           if (fixp->fx_r_type == BFD_RELOC_SIZE32
  10404.               && !fits_in_unsigned_long (value))
  10405.             as_bad_where (fixp->fx_file, fixp->fx_line,
  10406.                           _("symbol size computation overflow"));
  10407.           fixp->fx_addsy = NULL;
  10408.           fixp->fx_subsy = NULL;
  10409.           md_apply_fix (fixp, (valueT *) &value, NULL);
  10410.           return NULL;
  10411.         }
  10412. #endif
  10413.  
  10414.     case BFD_RELOC_X86_64_PLT32:
  10415.     case BFD_RELOC_X86_64_GOT32:
  10416.     case BFD_RELOC_X86_64_GOTPCREL:
  10417.     case BFD_RELOC_X86_64_GOTPCRELX:
  10418.     case BFD_RELOC_X86_64_REX_GOTPCRELX:
  10419.     case BFD_RELOC_386_PLT32:
  10420.     case BFD_RELOC_386_GOT32:
  10421.     case BFD_RELOC_386_GOT32X:
  10422.     case BFD_RELOC_386_GOTOFF:
  10423.     case BFD_RELOC_386_GOTPC:
  10424.     case BFD_RELOC_386_TLS_GD:
  10425.     case BFD_RELOC_386_TLS_LDM:
  10426.     case BFD_RELOC_386_TLS_LDO_32:
  10427.     case BFD_RELOC_386_TLS_IE_32:
  10428.     case BFD_RELOC_386_TLS_IE:
  10429.     case BFD_RELOC_386_TLS_GOTIE:
  10430.     case BFD_RELOC_386_TLS_LE_32:
  10431.     case BFD_RELOC_386_TLS_LE:
  10432.     case BFD_RELOC_386_TLS_GOTDESC:
  10433.     case BFD_RELOC_386_TLS_DESC_CALL:
  10434.     case BFD_RELOC_X86_64_TLSGD:
  10435.     case BFD_RELOC_X86_64_TLSLD:
  10436.     case BFD_RELOC_X86_64_DTPOFF32:
  10437.     case BFD_RELOC_X86_64_DTPOFF64:
  10438.     case BFD_RELOC_X86_64_GOTTPOFF:
  10439.     case BFD_RELOC_X86_64_TPOFF32:
  10440.     case BFD_RELOC_X86_64_TPOFF64:
  10441.     case BFD_RELOC_X86_64_GOTOFF64:
  10442.     case BFD_RELOC_X86_64_GOTPC32:
  10443.     case BFD_RELOC_X86_64_GOT64:
  10444.     case BFD_RELOC_X86_64_GOTPCREL64:
  10445.     case BFD_RELOC_X86_64_GOTPC64:
  10446.     case BFD_RELOC_X86_64_GOTPLT64:
  10447.     case BFD_RELOC_X86_64_PLTOFF64:
  10448.     case BFD_RELOC_X86_64_GOTPC32_TLSDESC:
  10449.     case BFD_RELOC_X86_64_TLSDESC_CALL:
  10450.     case BFD_RELOC_RVA:
  10451.     case BFD_RELOC_VTABLE_ENTRY:
  10452.     case BFD_RELOC_VTABLE_INHERIT:
  10453. #ifdef TE_PE
  10454.     case BFD_RELOC_32_SECREL:
  10455. #endif
  10456.       code = fixp->fx_r_type;
  10457.       break;
  10458.     case BFD_RELOC_X86_64_32S:
  10459.       if (!fixp->fx_pcrel)
  10460.         {
  10461.           /* Don't turn BFD_RELOC_X86_64_32S into BFD_RELOC_32.  */
  10462.           code = fixp->fx_r_type;
  10463.           break;
  10464.         }
  10465.     default:
  10466.       if (fixp->fx_pcrel)
  10467.         {
  10468.           switch (fixp->fx_size)
  10469.             {
  10470.             default:
  10471.               as_bad_where (fixp->fx_file, fixp->fx_line,
  10472.                             _("can not do %d byte pc-relative relocation"),
  10473.                             fixp->fx_size);
  10474.               code = BFD_RELOC_32_PCREL;
  10475.               break;
  10476.             case 1: code = BFD_RELOC_8_PCREL;  break;
  10477.             case 2: code = BFD_RELOC_16_PCREL; break;
  10478.             case 4: code = BFD_RELOC_32_PCREL; break;
  10479. #ifdef BFD64
  10480.             case 8: code = BFD_RELOC_64_PCREL; break;
  10481. #endif
  10482.             }
  10483.         }
  10484.       else
  10485.         {
  10486.           switch (fixp->fx_size)
  10487.             {
  10488.             default:
  10489.               as_bad_where (fixp->fx_file, fixp->fx_line,
  10490.                             _("can not do %d byte relocation"),
  10491.                             fixp->fx_size);
  10492.               code = BFD_RELOC_32;
  10493.               break;
  10494.             case 1: code = BFD_RELOC_8;  break;
  10495.             case 2: code = BFD_RELOC_16; break;
  10496.             case 4: code = BFD_RELOC_32; break;
  10497. #ifdef BFD64
  10498.             case 8: code = BFD_RELOC_64; break;
  10499. #endif
  10500.             }
  10501.         }
  10502.       break;
  10503.     }
  10504.  
  10505.   if ((code == BFD_RELOC_32
  10506.        || code == BFD_RELOC_32_PCREL
  10507.        || code == BFD_RELOC_X86_64_32S)
  10508.       && GOT_symbol
  10509.       && fixp->fx_addsy == GOT_symbol)
  10510.     {
  10511.       if (!object_64bit)
  10512.         code = BFD_RELOC_386_GOTPC;
  10513.       else
  10514.         code = BFD_RELOC_X86_64_GOTPC32;
  10515.     }
  10516.   if ((code == BFD_RELOC_64 || code == BFD_RELOC_64_PCREL)
  10517.       && GOT_symbol
  10518.       && fixp->fx_addsy == GOT_symbol)
  10519.     {
  10520.       code = BFD_RELOC_X86_64_GOTPC64;
  10521.     }
  10522.  
  10523.   rel = (arelent *) xmalloc (sizeof (arelent));
  10524.   rel->sym_ptr_ptr = (asymbol **) xmalloc (sizeof (asymbol *));
  10525.   *rel->sym_ptr_ptr = symbol_get_bfdsym (fixp->fx_addsy);
  10526.  
  10527.   rel->address = fixp->fx_frag->fr_address + fixp->fx_where;
  10528.  
  10529.   if (!use_rela_relocations)
  10530.     {
  10531.       /* HACK: Since i386 ELF uses Rel instead of Rela, encode the
  10532.          vtable entry to be used in the relocation's section offset.  */
  10533.       if (fixp->fx_r_type == BFD_RELOC_VTABLE_ENTRY)
  10534.         rel->address = fixp->fx_offset;
  10535. #if defined (OBJ_COFF) && defined (TE_PE)
  10536.       else if (fixp->fx_addsy && S_IS_WEAK (fixp->fx_addsy))
  10537.         rel->addend = fixp->fx_addnumber - (S_GET_VALUE (fixp->fx_addsy) * 2);
  10538.       else
  10539. #endif
  10540.       rel->addend = 0;
  10541.     }
  10542.   /* Use the rela in 64bit mode.  */
  10543.   else
  10544.     {
  10545.       if (disallow_64bit_reloc)
  10546.         switch (code)
  10547.           {
  10548.           case BFD_RELOC_X86_64_DTPOFF64:
  10549.           case BFD_RELOC_X86_64_TPOFF64:
  10550.           case BFD_RELOC_64_PCREL:
  10551.           case BFD_RELOC_X86_64_GOTOFF64:
  10552.           case BFD_RELOC_X86_64_GOT64:
  10553.           case BFD_RELOC_X86_64_GOTPCREL64:
  10554.           case BFD_RELOC_X86_64_GOTPC64:
  10555.           case BFD_RELOC_X86_64_GOTPLT64:
  10556.           case BFD_RELOC_X86_64_PLTOFF64:
  10557.             as_bad_where (fixp->fx_file, fixp->fx_line,
  10558.                           _("cannot represent relocation type %s in x32 mode"),
  10559.                           bfd_get_reloc_code_name (code));
  10560.             break;
  10561.           default:
  10562.             break;
  10563.           }
  10564.  
  10565.       if (!fixp->fx_pcrel)
  10566.         rel->addend = fixp->fx_offset;
  10567.       else
  10568.         switch (code)
  10569.           {
  10570.           case BFD_RELOC_X86_64_PLT32:
  10571.           case BFD_RELOC_X86_64_GOT32:
  10572.           case BFD_RELOC_X86_64_GOTPCREL:
  10573.           case BFD_RELOC_X86_64_GOTPCRELX:
  10574.           case BFD_RELOC_X86_64_REX_GOTPCRELX:
  10575.           case BFD_RELOC_X86_64_TLSGD:
  10576.           case BFD_RELOC_X86_64_TLSLD:
  10577.           case BFD_RELOC_X86_64_GOTTPOFF:
  10578.           case BFD_RELOC_X86_64_GOTPC32_TLSDESC:
  10579.           case BFD_RELOC_X86_64_TLSDESC_CALL:
  10580.             rel->addend = fixp->fx_offset - fixp->fx_size;
  10581.             break;
  10582.           default:
  10583.             rel->addend = (section->vma
  10584.                            - fixp->fx_size
  10585.                            + fixp->fx_addnumber
  10586.                            + md_pcrel_from (fixp));
  10587.             break;
  10588.           }
  10589.     }
  10590.  
  10591.   rel->howto = bfd_reloc_type_lookup (stdoutput, code);
  10592.   if (rel->howto == NULL)
  10593.     {
  10594.       as_bad_where (fixp->fx_file, fixp->fx_line,
  10595.                     _("cannot represent relocation type %s"),
  10596.                     bfd_get_reloc_code_name (code));
  10597.       /* Set howto to a garbage value so that we can keep going.  */
  10598.       rel->howto = bfd_reloc_type_lookup (stdoutput, BFD_RELOC_32);
  10599.       gas_assert (rel->howto != NULL);
  10600.     }
  10601.  
  10602.   return rel;
  10603. }
  10604.  
  10605. #include "tc-i386-intel.c"
  10606.  
  10607. void
  10608. tc_x86_parse_to_dw2regnum (expressionS *exp)
  10609. {
  10610.   int saved_naked_reg;
  10611.   char saved_register_dot;
  10612.  
  10613.   saved_naked_reg = allow_naked_reg;
  10614.   allow_naked_reg = 1;
  10615.   saved_register_dot = register_chars['.'];
  10616.   register_chars['.'] = '.';
  10617.   allow_pseudo_reg = 1;
  10618.   expression_and_evaluate (exp);
  10619.   allow_pseudo_reg = 0;
  10620.   register_chars['.'] = saved_register_dot;
  10621.   allow_naked_reg = saved_naked_reg;
  10622.  
  10623.   if (exp->X_op == O_register && exp->X_add_number >= 0)
  10624.     {
  10625.       if ((addressT) exp->X_add_number < i386_regtab_size)
  10626.         {
  10627.           exp->X_op = O_constant;
  10628.           exp->X_add_number = i386_regtab[exp->X_add_number]
  10629.                               .dw2_regnum[flag_code >> 1];
  10630.         }
  10631.       else
  10632.         exp->X_op = O_illegal;
  10633.     }
  10634. }
  10635.  
  10636. void
  10637. tc_x86_frame_initial_instructions (void)
  10638. {
  10639.   static unsigned int sp_regno[2];
  10640.  
  10641.   if (!sp_regno[flag_code >> 1])
  10642.     {
  10643.       char *saved_input = input_line_pointer;
  10644.       char sp[][4] = {"esp", "rsp"};
  10645.       expressionS exp;
  10646.  
  10647.       input_line_pointer = sp[flag_code >> 1];
  10648.       tc_x86_parse_to_dw2regnum (&exp);
  10649.       gas_assert (exp.X_op == O_constant);
  10650.       sp_regno[flag_code >> 1] = exp.X_add_number;
  10651.       input_line_pointer = saved_input;
  10652.     }
  10653.  
  10654.   cfi_add_CFA_def_cfa (sp_regno[flag_code >> 1], -x86_cie_data_alignment);
  10655.   cfi_add_CFA_offset (x86_dwarf2_return_column, x86_cie_data_alignment);
  10656. }
  10657.  
  10658. int
  10659. x86_dwarf2_addr_size (void)
  10660. {
  10661. #if defined (OBJ_MAYBE_ELF) || defined (OBJ_ELF)
  10662.   if (x86_elf_abi == X86_64_X32_ABI)
  10663.     return 4;
  10664. #endif
  10665.   return bfd_arch_bits_per_address (stdoutput) / 8;
  10666. }
  10667.  
  10668. int
  10669. i386_elf_section_type (const char *str, size_t len)
  10670. {
  10671.   if (flag_code == CODE_64BIT
  10672.       && len == sizeof ("unwind") - 1
  10673.       && strncmp (str, "unwind", 6) == 0)
  10674.     return SHT_X86_64_UNWIND;
  10675.  
  10676.   return -1;
  10677. }
  10678.  
  10679. #ifdef TE_SOLARIS
  10680. void
  10681. i386_solaris_fix_up_eh_frame (segT sec)
  10682. {
  10683.   if (flag_code == CODE_64BIT)
  10684.     elf_section_type (sec) = SHT_X86_64_UNWIND;
  10685. }
  10686. #endif
  10687.  
  10688. #ifdef TE_PE
  10689. void
  10690. tc_pe_dwarf2_emit_offset (symbolS *symbol, unsigned int size)
  10691. {
  10692.   expressionS exp;
  10693.  
  10694.   exp.X_op = O_secrel;
  10695.   exp.X_add_symbol = symbol;
  10696.   exp.X_add_number = 0;
  10697.   emit_expr (&exp, size);
  10698. }
  10699. #endif
  10700.  
  10701. #if defined (OBJ_ELF) || defined (OBJ_MAYBE_ELF)
  10702. /* For ELF on x86-64, add support for SHF_X86_64_LARGE.  */
  10703.  
  10704. bfd_vma
  10705. x86_64_section_letter (int letter, char **ptr_msg)
  10706. {
  10707.   if (flag_code == CODE_64BIT)
  10708.     {
  10709.       if (letter == 'l')
  10710.         return SHF_X86_64_LARGE;
  10711.  
  10712.       *ptr_msg = _("bad .section directive: want a,l,w,x,M,S,G,T in string");
  10713.     }
  10714.   else
  10715.     *ptr_msg = _("bad .section directive: want a,w,x,M,S,G,T in string");
  10716.   return -1;
  10717. }
  10718.  
  10719. bfd_vma
  10720. x86_64_section_word (char *str, size_t len)
  10721. {
  10722.   if (len == 5 && flag_code == CODE_64BIT && CONST_STRNEQ (str, "large"))
  10723.     return SHF_X86_64_LARGE;
  10724.  
  10725.   return -1;
  10726. }
  10727.  
  10728. static void
  10729. handle_large_common (int small ATTRIBUTE_UNUSED)
  10730. {
  10731.   if (flag_code != CODE_64BIT)
  10732.     {
  10733.       s_comm_internal (0, elf_common_parse);
  10734.       as_warn (_(".largecomm supported only in 64bit mode, producing .comm"));
  10735.     }
  10736.   else
  10737.     {
  10738.       static segT lbss_section;
  10739.       asection *saved_com_section_ptr = elf_com_section_ptr;
  10740.       asection *saved_bss_section = bss_section;
  10741.  
  10742.       if (lbss_section == NULL)
  10743.         {
  10744.           flagword applicable;
  10745.           segT seg = now_seg;
  10746.           subsegT subseg = now_subseg;
  10747.  
  10748.           /* The .lbss section is for local .largecomm symbols.  */
  10749.           lbss_section = subseg_new (".lbss", 0);
  10750.           applicable = bfd_applicable_section_flags (stdoutput);
  10751.           bfd_set_section_flags (stdoutput, lbss_section,
  10752.                                  applicable & SEC_ALLOC);
  10753.           seg_info (lbss_section)->bss = 1;
  10754.  
  10755.           subseg_set (seg, subseg);
  10756.         }
  10757.  
  10758.       elf_com_section_ptr = &_bfd_elf_large_com_section;
  10759.       bss_section = lbss_section;
  10760.  
  10761.       s_comm_internal (0, elf_common_parse);
  10762.  
  10763.       elf_com_section_ptr = saved_com_section_ptr;
  10764.       bss_section = saved_bss_section;
  10765.     }
  10766. }
  10767. #endif /* OBJ_ELF || OBJ_MAYBE_ELF */
  10768.