Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

powerpc: Add vr save/restore functions

GCC 4.8 now generates out-of-line vr save/restore functions when
optimizing for size. They are needed for the raid6 altivec support.

Signed-off-by: Andreas Schwab <schwab@linux-m68k.org>
Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>

authored by

Andreas Schwab and committed by
Benjamin Herrenschmidt
8fe9c93e dece8ada

+192 -2
+186
arch/powerpc/lib/crtsavres.S
··· 231 231 mr 1,11 232 232 blr 233 233 234 + #ifdef CONFIG_ALTIVEC 235 + /* Called with r0 pointing just beyond the end of the vector save area. */ 236 + 237 + _GLOBAL(_savevr_20) 238 + li r11,-192 239 + stvx vr20,r11,r0 240 + _GLOBAL(_savevr_21) 241 + li r11,-176 242 + stvx vr21,r11,r0 243 + _GLOBAL(_savevr_22) 244 + li r11,-160 245 + stvx vr22,r11,r0 246 + _GLOBAL(_savevr_23) 247 + li r11,-144 248 + stvx vr23,r11,r0 249 + _GLOBAL(_savevr_24) 250 + li r11,-128 251 + stvx vr24,r11,r0 252 + _GLOBAL(_savevr_25) 253 + li r11,-112 254 + stvx vr25,r11,r0 255 + _GLOBAL(_savevr_26) 256 + li r11,-96 257 + stvx vr26,r11,r0 258 + _GLOBAL(_savevr_27) 259 + li r11,-80 260 + stvx vr27,r11,r0 261 + _GLOBAL(_savevr_28) 262 + li r11,-64 263 + stvx vr28,r11,r0 264 + _GLOBAL(_savevr_29) 265 + li r11,-48 266 + stvx vr29,r11,r0 267 + _GLOBAL(_savevr_30) 268 + li r11,-32 269 + stvx vr30,r11,r0 270 + _GLOBAL(_savevr_31) 271 + li r11,-16 272 + stvx vr31,r11,r0 273 + blr 274 + 275 + _GLOBAL(_restvr_20) 276 + li r11,-192 277 + lvx vr20,r11,r0 278 + _GLOBAL(_restvr_21) 279 + li r11,-176 280 + lvx vr21,r11,r0 281 + _GLOBAL(_restvr_22) 282 + li r11,-160 283 + lvx vr22,r11,r0 284 + _GLOBAL(_restvr_23) 285 + li r11,-144 286 + lvx vr23,r11,r0 287 + _GLOBAL(_restvr_24) 288 + li r11,-128 289 + lvx vr24,r11,r0 290 + _GLOBAL(_restvr_25) 291 + li r11,-112 292 + lvx vr25,r11,r0 293 + _GLOBAL(_restvr_26) 294 + li r11,-96 295 + lvx vr26,r11,r0 296 + _GLOBAL(_restvr_27) 297 + li r11,-80 298 + lvx vr27,r11,r0 299 + _GLOBAL(_restvr_28) 300 + li r11,-64 301 + lvx vr28,r11,r0 302 + _GLOBAL(_restvr_29) 303 + li r11,-48 304 + lvx vr29,r11,r0 305 + _GLOBAL(_restvr_30) 306 + li r11,-32 307 + lvx vr30,r11,r0 308 + _GLOBAL(_restvr_31) 309 + li r11,-16 310 + lvx vr31,r11,r0 311 + blr 312 + 313 + #endif /* CONFIG_ALTIVEC */ 314 + 234 315 #else /* CONFIG_PPC64 */ 235 316 236 317 .section ".text.save.restore","ax",@progbits ··· 436 355 ld r31,-8(r1) 437 356 mtlr r0 438 357 blr 358 + 359 + #ifdef CONFIG_ALTIVEC 360 + /* Called with r0 pointing just beyond the end of the vector save area. */ 361 + 362 + .globl _savevr_20 363 + _savevr_20: 364 + li r12,-192 365 + stvx vr20,r12,r0 366 + .globl _savevr_21 367 + _savevr_21: 368 + li r12,-176 369 + stvx vr21,r12,r0 370 + .globl _savevr_22 371 + _savevr_22: 372 + li r12,-160 373 + stvx vr22,r12,r0 374 + .globl _savevr_23 375 + _savevr_23: 376 + li r12,-144 377 + stvx vr23,r12,r0 378 + .globl _savevr_24 379 + _savevr_24: 380 + li r12,-128 381 + stvx vr24,r12,r0 382 + .globl _savevr_25 383 + _savevr_25: 384 + li r12,-112 385 + stvx vr25,r12,r0 386 + .globl _savevr_26 387 + _savevr_26: 388 + li r12,-96 389 + stvx vr26,r12,r0 390 + .globl _savevr_27 391 + _savevr_27: 392 + li r12,-80 393 + stvx vr27,r12,r0 394 + .globl _savevr_28 395 + _savevr_28: 396 + li r12,-64 397 + stvx vr28,r12,r0 398 + .globl _savevr_29 399 + _savevr_29: 400 + li r12,-48 401 + stvx vr29,r12,r0 402 + .globl _savevr_30 403 + _savevr_30: 404 + li r12,-32 405 + stvx vr30,r12,r0 406 + .globl _savevr_31 407 + _savevr_31: 408 + li r12,-16 409 + stvx vr31,r12,r0 410 + blr 411 + 412 + .globl _restvr_20 413 + _restvr_20: 414 + li r12,-192 415 + lvx vr20,r12,r0 416 + .globl _restvr_21 417 + _restvr_21: 418 + li r12,-176 419 + lvx vr21,r12,r0 420 + .globl _restvr_22 421 + _restvr_22: 422 + li r12,-160 423 + lvx vr22,r12,r0 424 + .globl _restvr_23 425 + _restvr_23: 426 + li r12,-144 427 + lvx vr23,r12,r0 428 + .globl _restvr_24 429 + _restvr_24: 430 + li r12,-128 431 + lvx vr24,r12,r0 432 + .globl _restvr_25 433 + _restvr_25: 434 + li r12,-112 435 + lvx vr25,r12,r0 436 + .globl _restvr_26 437 + _restvr_26: 438 + li r12,-96 439 + lvx vr26,r12,r0 440 + .globl _restvr_27 441 + _restvr_27: 442 + li r12,-80 443 + lvx vr27,r12,r0 444 + .globl _restvr_28 445 + _restvr_28: 446 + li r12,-64 447 + lvx vr28,r12,r0 448 + .globl _restvr_29 449 + _restvr_29: 450 + li r12,-48 451 + lvx vr29,r12,r0 452 + .globl _restvr_30 453 + _restvr_30: 454 + li r12,-32 455 + lvx vr30,r12,r0 456 + .globl _restvr_31 457 + _restvr_31: 458 + li r12,-16 459 + lvx vr31,r12,r0 460 + blr 461 + 462 + #endif /* CONFIG_ALTIVEC */ 439 463 440 464 #endif /* CONFIG_PPC64 */ 441 465
+6 -2
scripts/mod/modpost.c
··· 584 584 if (strncmp(symname, "_restgpr_", sizeof("_restgpr_") - 1) == 0 || 585 585 strncmp(symname, "_savegpr_", sizeof("_savegpr_") - 1) == 0 || 586 586 strncmp(symname, "_rest32gpr_", sizeof("_rest32gpr_") - 1) == 0 || 587 - strncmp(symname, "_save32gpr_", sizeof("_save32gpr_") - 1) == 0) 587 + strncmp(symname, "_save32gpr_", sizeof("_save32gpr_") - 1) == 0 || 588 + strncmp(symname, "_restvr_", sizeof("_restvr_") - 1) == 0 || 589 + strncmp(symname, "_savevr_", sizeof("_savevr_") - 1) == 0) 588 590 return 1; 589 591 if (info->hdr->e_machine == EM_PPC64) 590 592 /* Special register function linked on all modules during final link of .ko */ 591 593 if (strncmp(symname, "_restgpr0_", sizeof("_restgpr0_") - 1) == 0 || 592 - strncmp(symname, "_savegpr0_", sizeof("_savegpr0_") - 1) == 0) 594 + strncmp(symname, "_savegpr0_", sizeof("_savegpr0_") - 1) == 0 || 595 + strncmp(symname, "_restvr_", sizeof("_restvr_") - 1) == 0 || 596 + strncmp(symname, "_savevr_", sizeof("_savevr_") - 1) == 0) 593 597 return 1; 594 598 /* Do not ignore this symbol */ 595 599 return 0;