Andrew Geissler | 82c905d | 2020-04-13 13:39:40 -0500 | [diff] [blame] | 1 | From ae531041c7c5956672342f89c486a011c84f027f Mon Sep 17 00:00:00 2001 |
| 2 | From: "H.J. Lu" <hjl.tools@gmail.com> |
| 3 | Date: Wed, 11 Mar 2020 09:46:19 -0700 |
| 4 | Subject: [PATCH 1/1] i386: Generate lfence with load/indirect branch/ret |
| 5 | [CVE-2020-0551] |
| 6 | |
| 7 | Add 3 command-line options to generate lfence for load, indirect near |
| 8 | branch and ret to help mitigate: |
| 9 | |
| 10 | https://www.intel.com/content/www/us/en/security-center/advisory/intel-sa-00334.html |
| 11 | http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2020-0551 |
| 12 | |
| 13 | 1. -mlfence-after-load=[no|yes]: |
| 14 | -mlfence-after-load=yes generates lfence after load instructions. |
| 15 | 2. -mlfence-before-indirect-branch=[none|all|memory|register]: |
| 16 | a. -mlfence-before-indirect-branch=all generates lfence before indirect |
| 17 | near branches via register and a warning before indirect near branches |
| 18 | via memory. |
| 19 | b. -mlfence-before-indirect-branch=memory issue a warning before |
| 20 | indirect near branches via memory. |
| 21 | c. -mlfence-before-indirect-branch=register generates lfence before |
| 22 | indirect near branches via register. |
| 23 | Note that lfence won't be generated before indirect near branches via |
| 24 | register with -mlfence-after-load=yes since lfence will be generated |
| 25 | after loading branch target register. |
| 26 | 3. -mlfence-before-ret=[none|or|not] |
| 27 | a. -mlfence-before-ret=or generates or with lfence before ret. |
| 28 | b. -mlfence-before-ret=not generates not with lfence before ret. |
| 29 | |
| 30 | A warning will be issued and lfence won't be generated before indirect |
| 31 | near branch and ret if the previous item is a prefix or a constant |
| 32 | directive, which may be used to hardcode an instruction, since there |
| 33 | is no clear instruction boundary. |
| 34 | |
| 35 | * config/tc-i386.c (lfence_after_load): New. |
| 36 | (lfence_before_indirect_branch_kind): New. |
| 37 | (lfence_before_indirect_branch): New. |
| 38 | (lfence_before_ret_kind): New. |
| 39 | (lfence_before_ret): New. |
| 40 | (last_insn): New. |
| 41 | (load_insn_p): New. |
| 42 | (insert_lfence_after): New. |
| 43 | (insert_lfence_before): New. |
| 44 | (md_assemble): Call insert_lfence_before and insert_lfence_after. |
| 45 | Set last_insn. |
| 46 | (OPTION_MLFENCE_AFTER_LOAD): New. |
| 47 | (OPTION_MLFENCE_BEFORE_INDIRECT_BRANCH): New. |
| 48 | (OPTION_MLFENCE_BEFORE_RET): New. |
| 49 | (md_longopts): Add -mlfence-after-load=, |
| 50 | -mlfence-before-indirect-branch= and -mlfence-before-ret=. |
| 51 | (md_parse_option): Handle -mlfence-after-load=, |
| 52 | -mlfence-before-indirect-branch= and -mlfence-before-ret=. |
| 53 | (md_show_usage): Display -mlfence-after-load=, |
| 54 | -mlfence-before-indirect-branch= and -mlfence-before-ret=. |
| 55 | (i386_cons_align): New. |
| 56 | * config/tc-i386.h (i386_cons_align): New. |
| 57 | (md_cons_align): New. |
| 58 | * doc/c-i386.texi: Document -mlfence-after-load=, |
| 59 | -mlfence-before-indirect-branch= and -mlfence-before-ret=. |
| 60 | |
| 61 | Signed-off-by: Anuj Mittal <anuj.mittal@intel.com> |
| 62 | Upstream-Status: Backport [https://sourceware.org/git/?p=binutils-gdb.git;a=commit;h=ae531041c7c5956672342f89c486a011c84f027f] |
| 63 | CVE: CVE-2020-0551 |
| 64 | --- |
| 65 | diff --git a/gas/config/tc-i386.c b/gas/config/tc-i386.c |
| 66 | index b020f39c863..09063f784b7 100644 |
| 67 | --- a/gas/config/tc-i386.c |
| 68 | +++ b/gas/config/tc-i386.c |
| 69 | @@ -629,7 +629,29 @@ static int omit_lock_prefix = 0; |
| 70 | "lock addl $0, (%{re}sp)". */ |
| 71 | static int avoid_fence = 0; |
| 72 | |
| 73 | -/* Type of the previous instruction. */ |
| 74 | +/* 1 if lfence should be inserted after every load. */ |
| 75 | +static int lfence_after_load = 0; |
| 76 | + |
| 77 | +/* Non-zero if lfence should be inserted before indirect branch. */ |
| 78 | +static enum lfence_before_indirect_branch_kind |
| 79 | + { |
| 80 | + lfence_branch_none = 0, |
| 81 | + lfence_branch_register, |
| 82 | + lfence_branch_memory, |
| 83 | + lfence_branch_all |
| 84 | + } |
| 85 | +lfence_before_indirect_branch; |
| 86 | + |
| 87 | +/* Non-zero if lfence should be inserted before ret. */ |
| 88 | +static enum lfence_before_ret_kind |
| 89 | + { |
| 90 | + lfence_before_ret_none = 0, |
| 91 | + lfence_before_ret_not, |
| 92 | + lfence_before_ret_or |
| 93 | + } |
| 94 | +lfence_before_ret; |
| 95 | + |
| 96 | +/* Types of previous instruction is .byte or prefix. */ |
| 97 | static struct |
| 98 | { |
| 99 | segT seg; |
| 100 | @@ -4311,6 +4333,283 @@ optimize_encoding (void) |
| 101 | } |
| 102 | } |
| 103 | |
| 104 | +/* Return non-zero for load instruction. */ |
| 105 | + |
| 106 | +static int |
| 107 | +load_insn_p (void) |
| 108 | +{ |
| 109 | + unsigned int dest; |
| 110 | + int any_vex_p = is_any_vex_encoding (&i.tm); |
| 111 | + unsigned int base_opcode = i.tm.base_opcode | 1; |
| 112 | + |
| 113 | + if (!any_vex_p) |
| 114 | + { |
| 115 | + /* lea */ |
| 116 | + if (i.tm.base_opcode == 0x8d) |
| 117 | + return 0; |
| 118 | + |
| 119 | + /* pop */ |
| 120 | + if ((i.tm.base_opcode & ~7) == 0x58 |
| 121 | + || (i.tm.base_opcode == 0x8f && i.tm.extension_opcode == 0)) |
| 122 | + return 1; |
| 123 | + |
| 124 | + /* movs, cmps, lods, scas. */ |
| 125 | + if ((i.tm.base_opcode | 0xb) == 0xaf) |
| 126 | + return 1; |
| 127 | + |
| 128 | + /* outs */ |
| 129 | + if (base_opcode == 0x6f) |
| 130 | + return 1; |
| 131 | + } |
| 132 | + |
| 133 | + /* No memory operand. */ |
| 134 | + if (!i.mem_operands) |
| 135 | + return 0; |
| 136 | + |
| 137 | + if (any_vex_p) |
| 138 | + { |
| 139 | + /* vldmxcsr. */ |
| 140 | + if (i.tm.base_opcode == 0xae |
| 141 | + && i.tm.opcode_modifier.vex |
| 142 | + && i.tm.opcode_modifier.vexopcode == VEX0F |
| 143 | + && i.tm.extension_opcode == 2) |
| 144 | + return 1; |
| 145 | + } |
| 146 | + else |
| 147 | + { |
| 148 | + /* test, not, neg, mul, imul, div, idiv. */ |
| 149 | + if ((i.tm.base_opcode == 0xf6 || i.tm.base_opcode == 0xf7) |
| 150 | + && i.tm.extension_opcode != 1) |
| 151 | + return 1; |
| 152 | + |
| 153 | + /* inc, dec. */ |
| 154 | + if (base_opcode == 0xff && i.tm.extension_opcode <= 1) |
| 155 | + return 1; |
| 156 | + |
| 157 | + /* add, or, adc, sbb, and, sub, xor, cmp. */ |
| 158 | + if (i.tm.base_opcode >= 0x80 && i.tm.base_opcode <= 0x83) |
| 159 | + return 1; |
| 160 | + |
| 161 | + /* bt, bts, btr, btc. */ |
| 162 | + if (i.tm.base_opcode == 0xfba |
| 163 | + && (i.tm.extension_opcode >= 4 && i.tm.extension_opcode <= 7)) |
| 164 | + return 1; |
| 165 | + |
| 166 | + /* rol, ror, rcl, rcr, shl/sal, shr, sar. */ |
| 167 | + if ((base_opcode == 0xc1 |
| 168 | + || (i.tm.base_opcode >= 0xd0 && i.tm.base_opcode <= 0xd3)) |
| 169 | + && i.tm.extension_opcode != 6) |
| 170 | + return 1; |
| 171 | + |
| 172 | + /* cmpxchg8b, cmpxchg16b, xrstors. */ |
| 173 | + if (i.tm.base_opcode == 0xfc7 |
| 174 | + && (i.tm.extension_opcode == 1 || i.tm.extension_opcode == 3)) |
| 175 | + return 1; |
| 176 | + |
| 177 | + /* fxrstor, ldmxcsr, xrstor. */ |
| 178 | + if (i.tm.base_opcode == 0xfae |
| 179 | + && (i.tm.extension_opcode == 1 |
| 180 | + || i.tm.extension_opcode == 2 |
| 181 | + || i.tm.extension_opcode == 5)) |
| 182 | + return 1; |
| 183 | + |
| 184 | + /* lgdt, lidt, lmsw. */ |
| 185 | + if (i.tm.base_opcode == 0xf01 |
| 186 | + && (i.tm.extension_opcode == 2 |
| 187 | + || i.tm.extension_opcode == 3 |
| 188 | + || i.tm.extension_opcode == 6)) |
| 189 | + return 1; |
| 190 | + |
| 191 | + /* vmptrld */ |
| 192 | + if (i.tm.base_opcode == 0xfc7 |
| 193 | + && i.tm.extension_opcode == 6) |
| 194 | + return 1; |
| 195 | + |
| 196 | + /* Check for x87 instructions. */ |
| 197 | + if (i.tm.base_opcode >= 0xd8 && i.tm.base_opcode <= 0xdf) |
| 198 | + { |
| 199 | + /* Skip fst, fstp, fstenv, fstcw. */ |
| 200 | + if (i.tm.base_opcode == 0xd9 |
| 201 | + && (i.tm.extension_opcode == 2 |
| 202 | + || i.tm.extension_opcode == 3 |
| 203 | + || i.tm.extension_opcode == 6 |
| 204 | + || i.tm.extension_opcode == 7)) |
| 205 | + return 0; |
| 206 | + |
| 207 | + /* Skip fisttp, fist, fistp, fstp. */ |
| 208 | + if (i.tm.base_opcode == 0xdb |
| 209 | + && (i.tm.extension_opcode == 1 |
| 210 | + || i.tm.extension_opcode == 2 |
| 211 | + || i.tm.extension_opcode == 3 |
| 212 | + || i.tm.extension_opcode == 7)) |
| 213 | + return 0; |
| 214 | + |
| 215 | + /* Skip fisttp, fst, fstp, fsave, fstsw. */ |
| 216 | + if (i.tm.base_opcode == 0xdd |
| 217 | + && (i.tm.extension_opcode == 1 |
| 218 | + || i.tm.extension_opcode == 2 |
| 219 | + || i.tm.extension_opcode == 3 |
| 220 | + || i.tm.extension_opcode == 6 |
| 221 | + || i.tm.extension_opcode == 7)) |
| 222 | + return 0; |
| 223 | + |
| 224 | + /* Skip fisttp, fist, fistp, fbstp, fistp. */ |
| 225 | + if (i.tm.base_opcode == 0xdf |
| 226 | + && (i.tm.extension_opcode == 1 |
| 227 | + || i.tm.extension_opcode == 2 |
| 228 | + || i.tm.extension_opcode == 3 |
| 229 | + || i.tm.extension_opcode == 6 |
| 230 | + || i.tm.extension_opcode == 7)) |
| 231 | + return 0; |
| 232 | + |
| 233 | + return 1; |
| 234 | + } |
| 235 | + } |
| 236 | + |
| 237 | + dest = i.operands - 1; |
| 238 | + |
| 239 | + /* Check fake imm8 operand and 3 source operands. */ |
| 240 | + if ((i.tm.opcode_modifier.immext |
| 241 | + || i.tm.opcode_modifier.vexsources == VEX3SOURCES) |
| 242 | + && i.types[dest].bitfield.imm8) |
| 243 | + dest--; |
| 244 | + |
| 245 | + /* add, or, adc, sbb, and, sub, xor, cmp, test, xchg, xadd */ |
| 246 | + if (!any_vex_p |
| 247 | + && (base_opcode == 0x1 |
| 248 | + || base_opcode == 0x9 |
| 249 | + || base_opcode == 0x11 |
| 250 | + || base_opcode == 0x19 |
| 251 | + || base_opcode == 0x21 |
| 252 | + || base_opcode == 0x29 |
| 253 | + || base_opcode == 0x31 |
| 254 | + || base_opcode == 0x39 |
| 255 | + || (i.tm.base_opcode >= 0x84 && i.tm.base_opcode <= 0x87) |
| 256 | + || base_opcode == 0xfc1)) |
| 257 | + return 1; |
| 258 | + |
| 259 | + /* Check for load instruction. */ |
| 260 | + return (i.types[dest].bitfield.class != ClassNone |
| 261 | + || i.types[dest].bitfield.instance == Accum); |
| 262 | +} |
| 263 | + |
| 264 | +/* Output lfence, 0xfaee8, after instruction. */ |
| 265 | + |
| 266 | +static void |
| 267 | +insert_lfence_after (void) |
| 268 | +{ |
| 269 | + if (lfence_after_load && load_insn_p ()) |
| 270 | + { |
| 271 | + char *p = frag_more (3); |
| 272 | + *p++ = 0xf; |
| 273 | + *p++ = 0xae; |
| 274 | + *p = 0xe8; |
| 275 | + } |
| 276 | +} |
| 277 | + |
| 278 | +/* Output lfence, 0xfaee8, before instruction. */ |
| 279 | + |
| 280 | +static void |
| 281 | +insert_lfence_before (void) |
| 282 | +{ |
| 283 | + char *p; |
| 284 | + |
| 285 | + if (is_any_vex_encoding (&i.tm)) |
| 286 | + return; |
| 287 | + |
| 288 | + if (i.tm.base_opcode == 0xff |
| 289 | + && (i.tm.extension_opcode == 2 || i.tm.extension_opcode == 4)) |
| 290 | + { |
| 291 | + /* Insert lfence before indirect branch if needed. */ |
| 292 | + |
| 293 | + if (lfence_before_indirect_branch == lfence_branch_none) |
| 294 | + return; |
| 295 | + |
| 296 | + if (i.operands != 1) |
| 297 | + abort (); |
| 298 | + |
| 299 | + if (i.reg_operands == 1) |
| 300 | + { |
| 301 | + /* Indirect branch via register. Don't insert lfence with |
| 302 | + -mlfence-after-load=yes. */ |
| 303 | + if (lfence_after_load |
| 304 | + || lfence_before_indirect_branch == lfence_branch_memory) |
| 305 | + return; |
| 306 | + } |
| 307 | + else if (i.mem_operands == 1 |
| 308 | + && lfence_before_indirect_branch != lfence_branch_register) |
| 309 | + { |
| 310 | + as_warn (_("indirect `%s` with memory operand should be avoided"), |
| 311 | + i.tm.name); |
| 312 | + return; |
| 313 | + } |
| 314 | + else |
| 315 | + return; |
| 316 | + |
| 317 | + if (last_insn.kind != last_insn_other |
| 318 | + && last_insn.seg == now_seg) |
| 319 | + { |
| 320 | + as_warn_where (last_insn.file, last_insn.line, |
| 321 | + _("`%s` skips -mlfence-before-indirect-branch on `%s`"), |
| 322 | + last_insn.name, i.tm.name); |
| 323 | + return; |
| 324 | + } |
| 325 | + |
| 326 | + p = frag_more (3); |
| 327 | + *p++ = 0xf; |
| 328 | + *p++ = 0xae; |
| 329 | + *p = 0xe8; |
| 330 | + return; |
| 331 | + } |
| 332 | + |
| 333 | + /* Output or/not and lfence before ret. */ |
| 334 | + if (lfence_before_ret != lfence_before_ret_none |
| 335 | + && (i.tm.base_opcode == 0xc2 |
| 336 | + || i.tm.base_opcode == 0xc3 |
| 337 | + || i.tm.base_opcode == 0xca |
| 338 | + || i.tm.base_opcode == 0xcb)) |
| 339 | + { |
| 340 | + if (last_insn.kind != last_insn_other |
| 341 | + && last_insn.seg == now_seg) |
| 342 | + { |
| 343 | + as_warn_where (last_insn.file, last_insn.line, |
| 344 | + _("`%s` skips -mlfence-before-ret on `%s`"), |
| 345 | + last_insn.name, i.tm.name); |
| 346 | + return; |
| 347 | + } |
| 348 | + if (lfence_before_ret == lfence_before_ret_or) |
| 349 | + { |
| 350 | + /* orl: 0x830c2400. */ |
| 351 | + p = frag_more ((flag_code == CODE_64BIT ? 1 : 0) + 4 + 3); |
| 352 | + if (flag_code == CODE_64BIT) |
| 353 | + *p++ = 0x48; |
| 354 | + *p++ = 0x83; |
| 355 | + *p++ = 0xc; |
| 356 | + *p++ = 0x24; |
| 357 | + *p++ = 0x0; |
| 358 | + } |
| 359 | + else |
| 360 | + { |
| 361 | + p = frag_more ((flag_code == CODE_64BIT ? 2 : 0) + 6 + 3); |
| 362 | + /* notl: 0xf71424. */ |
| 363 | + if (flag_code == CODE_64BIT) |
| 364 | + *p++ = 0x48; |
| 365 | + *p++ = 0xf7; |
| 366 | + *p++ = 0x14; |
| 367 | + *p++ = 0x24; |
| 368 | + /* notl: 0xf71424. */ |
| 369 | + if (flag_code == CODE_64BIT) |
| 370 | + *p++ = 0x48; |
| 371 | + *p++ = 0xf7; |
| 372 | + *p++ = 0x14; |
| 373 | + *p++ = 0x24; |
| 374 | + } |
| 375 | + *p++ = 0xf; |
| 376 | + *p++ = 0xae; |
| 377 | + *p = 0xe8; |
| 378 | + } |
| 379 | +} |
| 380 | + |
| 381 | /* This is the guts of the machine-dependent assembler. LINE points to a |
| 382 | machine dependent instruction. This function is supposed to emit |
| 383 | the frags/bytes it assembles to. */ |
| 384 | @@ -4628,9 +4927,13 @@ md_assemble (char *line) |
| 385 | if (i.rex != 0) |
| 386 | add_prefix (REX_OPCODE | i.rex); |
| 387 | |
| 388 | + insert_lfence_before (); |
| 389 | + |
| 390 | /* We are ready to output the insn. */ |
| 391 | output_insn (); |
| 392 | |
| 393 | + insert_lfence_after (); |
| 394 | + |
| 395 | last_insn.seg = now_seg; |
| 396 | |
| 397 | if (i.tm.opcode_modifier.isprefix) |
| 398 | @@ -12250,6 +12553,9 @@ const char *md_shortopts = "qnO::"; |
| 399 | #define OPTION_MALIGN_BRANCH_PREFIX_SIZE (OPTION_MD_BASE + 28) |
| 400 | #define OPTION_MALIGN_BRANCH (OPTION_MD_BASE + 29) |
| 401 | #define OPTION_MBRANCHES_WITH_32B_BOUNDARIES (OPTION_MD_BASE + 30) |
| 402 | +#define OPTION_MLFENCE_AFTER_LOAD (OPTION_MD_BASE + 31) |
| 403 | +#define OPTION_MLFENCE_BEFORE_INDIRECT_BRANCH (OPTION_MD_BASE + 32) |
| 404 | +#define OPTION_MLFENCE_BEFORE_RET (OPTION_MD_BASE + 33) |
| 405 | |
| 406 | struct option md_longopts[] = |
| 407 | { |
| 408 | @@ -12289,6 +12595,10 @@ struct option md_longopts[] = |
| 409 | {"malign-branch-prefix-size", required_argument, NULL, OPTION_MALIGN_BRANCH_PREFIX_SIZE}, |
| 410 | {"malign-branch", required_argument, NULL, OPTION_MALIGN_BRANCH}, |
| 411 | {"mbranches-within-32B-boundaries", no_argument, NULL, OPTION_MBRANCHES_WITH_32B_BOUNDARIES}, |
| 412 | + {"mlfence-after-load", required_argument, NULL, OPTION_MLFENCE_AFTER_LOAD}, |
| 413 | + {"mlfence-before-indirect-branch", required_argument, NULL, |
| 414 | + OPTION_MLFENCE_BEFORE_INDIRECT_BRANCH}, |
| 415 | + {"mlfence-before-ret", required_argument, NULL, OPTION_MLFENCE_BEFORE_RET}, |
| 416 | {"mamd64", no_argument, NULL, OPTION_MAMD64}, |
| 417 | {"mintel64", no_argument, NULL, OPTION_MINTEL64}, |
| 418 | {NULL, no_argument, NULL, 0} |
| 419 | @@ -12668,6 +12978,41 @@ md_parse_option (int c, const char *arg) |
| 420 | as_fatal (_("invalid -mfence-as-lock-add= option: `%s'"), arg); |
| 421 | break; |
| 422 | |
| 423 | + case OPTION_MLFENCE_AFTER_LOAD: |
| 424 | + if (strcasecmp (arg, "yes") == 0) |
| 425 | + lfence_after_load = 1; |
| 426 | + else if (strcasecmp (arg, "no") == 0) |
| 427 | + lfence_after_load = 0; |
| 428 | + else |
| 429 | + as_fatal (_("invalid -mlfence-after-load= option: `%s'"), arg); |
| 430 | + break; |
| 431 | + |
| 432 | + case OPTION_MLFENCE_BEFORE_INDIRECT_BRANCH: |
| 433 | + if (strcasecmp (arg, "all") == 0) |
| 434 | + lfence_before_indirect_branch = lfence_branch_all; |
| 435 | + else if (strcasecmp (arg, "memory") == 0) |
| 436 | + lfence_before_indirect_branch = lfence_branch_memory; |
| 437 | + else if (strcasecmp (arg, "register") == 0) |
| 438 | + lfence_before_indirect_branch = lfence_branch_register; |
| 439 | + else if (strcasecmp (arg, "none") == 0) |
| 440 | + lfence_before_indirect_branch = lfence_branch_none; |
| 441 | + else |
| 442 | + as_fatal (_("invalid -mlfence-before-indirect-branch= option: `%s'"), |
| 443 | + arg); |
| 444 | + break; |
| 445 | + |
| 446 | + case OPTION_MLFENCE_BEFORE_RET: |
| 447 | + if (strcasecmp (arg, "or") == 0) |
| 448 | + lfence_before_ret = lfence_before_ret_or; |
| 449 | + else if (strcasecmp (arg, "not") == 0) |
| 450 | + lfence_before_ret = lfence_before_ret_not; |
| 451 | + else if (strcasecmp (arg, "none") == 0) |
| 452 | + lfence_before_ret = lfence_before_ret_none; |
| 453 | + else |
| 454 | + as_fatal (_("invalid -mlfence-before-ret= option: `%s'"), |
| 455 | + arg); |
| 456 | + break; |
| 457 | + |
| 458 | case OPTION_MRELAX_RELOCATIONS: |
| 459 | if (strcasecmp (arg, "yes") == 0) |
| 460 | generate_relax_relocations = 1; |
| 461 | @@ -13025,6 +13370,15 @@ md_show_usage (FILE *stream) |
| 462 | -mbranches-within-32B-boundaries\n\ |
| 463 | align branches within 32 byte boundary\n")); |
| 464 | fprintf (stream, _("\ |
| 465 | + -mlfence-after-load=[no|yes] (default: no)\n\ |
| 466 | + generate lfence after load\n")); |
| 467 | + fprintf (stream, _("\ |
| 468 | + -mlfence-before-indirect-branch=[none|all|register|memory] (default: none)\n\ |
| 469 | + generate lfence before indirect near branch\n")); |
| 470 | + fprintf (stream, _("\ |
| 471 | + -mlfence-before-ret=[none|or|not] (default: none)\n\ |
| 472 | + generate lfence before ret\n")); |
| 473 | + fprintf (stream, _("\ |
| 474 | -mamd64 accept only AMD64 ISA [default]\n")); |
| 475 | fprintf (stream, _("\ |
| 476 | -mintel64 accept only Intel64 ISA\n")); |
| 477 | @@ -13254,6 +13608,16 @@ i386_cons_align (int ignore ATTRIBUTE_UNUSED) |
| 478 | last_insn.kind = last_insn_directive; |
| 479 | last_insn.name = "constant directive"; |
| 480 | last_insn.file = as_where (&last_insn.line); |
| 481 | + if (lfence_before_ret != lfence_before_ret_none) |
| 482 | + { |
| 483 | + if (lfence_before_indirect_branch != lfence_branch_none) |
| 484 | + as_warn (_("constant directive skips -mlfence-before-ret " |
| 485 | + "and -mlfence-before-indirect-branch")); |
| 486 | + else |
| 487 | + as_warn (_("constant directive skips -mlfence-before-ret")); |
| 488 | + } |
| 489 | + else if (lfence_before_indirect_branch != lfence_branch_none) |
| 490 | + as_warn (_("constant directive skips -mlfence-before-indirect-branch")); |
| 491 | } |
| 492 | } |
| 493 | |
| 494 | diff --git a/gas/doc/c-i386.texi b/gas/doc/c-i386.texi |
| 495 | index c536759cb38..1dd99f91bb0 100644 |
| 496 | --- a/gas/doc/c-i386.texi |
| 497 | +++ b/gas/doc/c-i386.texi |
| 498 | @@ -464,6 +464,49 @@ on an instruction. It is equivalent to |
| 499 | @option{-malign-branch-prefix-size=5}. |
| 500 | The default doesn't align branches. |
| 501 | |
| 502 | +@cindex @samp{-mlfence-after-load=} option, i386 |
| 503 | +@cindex @samp{-mlfence-after-load=} option, x86-64 |
| 504 | +@item -mlfence-after-load=@var{no} |
| 505 | +@itemx -mlfence-after-load=@var{yes} |
| 506 | +These options control whether the assembler should generate lfence |
| 507 | +after load instructions. @option{-mlfence-after-load=@var{yes}} will |
| 508 | +generate lfence. @option{-mlfence-after-load=@var{no}} will not generate |
| 509 | +lfence, which is the default. |
| 510 | + |
| 511 | +@cindex @samp{-mlfence-before-indirect-branch=} option, i386 |
| 512 | +@cindex @samp{-mlfence-before-indirect-branch=} option, x86-64 |
| 513 | +@item -mlfence-before-indirect-branch=@var{none} |
| 514 | +@item -mlfence-before-indirect-branch=@var{all} |
| 515 | +@item -mlfence-before-indirect-branch=@var{register} |
| 516 | +@itemx -mlfence-before-indirect-branch=@var{memory} |
| 517 | +These options control whether the assembler should generate lfence |
| 518 | +after indirect near branch instructions. |
| 519 | +@option{-mlfence-before-indirect-branch=@var{all}} will generate lfence |
| 520 | +after indirect near branch via register and issue a warning before |
| 521 | +indirect near branch via memory. |
| 522 | +@option{-mlfence-before-indirect-branch=@var{register}} will generate |
| 523 | +lfence after indirect near branch via register. |
| 524 | +@option{-mlfence-before-indirect-branch=@var{memory}} will issue a |
| 525 | +warning before indirect near branch via memory. |
| 526 | +@option{-mlfence-before-indirect-branch=@var{none}} will not generate |
| 527 | +lfence nor issue warning, which is the default. Note that lfence won't |
| 528 | +be generated before indirect near branch via register with |
| 529 | +@option{-mlfence-after-load=@var{yes}} since lfence will be generated |
| 530 | +after loading branch target register. |
| 531 | + |
| 532 | +@cindex @samp{-mlfence-before-ret=} option, i386 |
| 533 | +@cindex @samp{-mlfence-before-ret=} option, x86-64 |
| 534 | +@item -mlfence-before-ret=@var{none} |
| 535 | +@item -mlfence-before-ret=@var{or} |
| 536 | +@itemx -mlfence-before-ret=@var{not} |
| 537 | +These options control whether the assembler should generate lfence |
| 538 | +before ret. @option{-mlfence-before-ret=@var{or}} will generate |
| 539 | +generate or instruction with lfence. |
| 540 | +@option{-mlfence-before-ret=@var{not}} will generate not instruction |
| 541 | +with lfence. |
| 542 | +@option{-mlfence-before-ret=@var{none}} will not generate lfence, |
| 543 | +which is the default. |
| 544 | + |
| 545 | @cindex @samp{-mx86-used-note=} option, i386 |
| 546 | @cindex @samp{-mx86-used-note=} option, x86-64 |
| 547 | @item -mx86-used-note=@var{no} |
| 548 | -- |
| 549 | 2.18.2 |