Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

locking/atomic: add arch_atomic_long*()

Now that all architectures provide arch_{atomic,atomic64}_*(), we can
build arch_atomic_long_*() atop these, which can be safely used in
noinstr code. The regular atomic_long_*() wrappers are built atop these,
as we do for {atomic,atomic64}_*() atop arch_{atomic,atomic64}_*().

We don't provide arch_* versions of the cond_read*() variants, as we
don't have arch_* versions of the underlying atomic/atomic64 functions
(nor the smp_cond_load*() helpers these are typically based on).

Note that the headers in this patch under include/linux/atomic/ are
generated by the scripts in scripts/atomic/.

Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Link: https://lore.kernel.org/r/20210713105253.7615-5-mark.rutland@arm.com

authored by

Mark Rutland and committed by
Peter Zijlstra
67d1b0de e3d18cee

+916 -333
+1 -1
include/linux/atomic.h
··· 78 78 }) 79 79 80 80 #include <linux/atomic/atomic-arch-fallback.h> 81 - #include <linux/atomic/atomic-instrumented.h> 82 81 #include <linux/atomic/atomic-long.h> 82 + #include <linux/atomic/atomic-instrumented.h> 83 83 84 84 #endif /* _LINUX_ATOMIC_H */
+579 -1
include/linux/atomic/atomic-instrumented.h
··· 1177 1177 return arch_atomic64_dec_if_positive(v); 1178 1178 } 1179 1179 1180 + static __always_inline long 1181 + atomic_long_read(const atomic_long_t *v) 1182 + { 1183 + instrument_atomic_read(v, sizeof(*v)); 1184 + return arch_atomic_long_read(v); 1185 + } 1186 + 1187 + static __always_inline long 1188 + atomic_long_read_acquire(const atomic_long_t *v) 1189 + { 1190 + instrument_atomic_read(v, sizeof(*v)); 1191 + return arch_atomic_long_read_acquire(v); 1192 + } 1193 + 1194 + static __always_inline void 1195 + atomic_long_set(atomic_long_t *v, long i) 1196 + { 1197 + instrument_atomic_write(v, sizeof(*v)); 1198 + arch_atomic_long_set(v, i); 1199 + } 1200 + 1201 + static __always_inline void 1202 + atomic_long_set_release(atomic_long_t *v, long i) 1203 + { 1204 + instrument_atomic_write(v, sizeof(*v)); 1205 + arch_atomic_long_set_release(v, i); 1206 + } 1207 + 1208 + static __always_inline void 1209 + atomic_long_add(long i, atomic_long_t *v) 1210 + { 1211 + instrument_atomic_read_write(v, sizeof(*v)); 1212 + arch_atomic_long_add(i, v); 1213 + } 1214 + 1215 + static __always_inline long 1216 + atomic_long_add_return(long i, atomic_long_t *v) 1217 + { 1218 + instrument_atomic_read_write(v, sizeof(*v)); 1219 + return arch_atomic_long_add_return(i, v); 1220 + } 1221 + 1222 + static __always_inline long 1223 + atomic_long_add_return_acquire(long i, atomic_long_t *v) 1224 + { 1225 + instrument_atomic_read_write(v, sizeof(*v)); 1226 + return arch_atomic_long_add_return_acquire(i, v); 1227 + } 1228 + 1229 + static __always_inline long 1230 + atomic_long_add_return_release(long i, atomic_long_t *v) 1231 + { 1232 + instrument_atomic_read_write(v, sizeof(*v)); 1233 + return arch_atomic_long_add_return_release(i, v); 1234 + } 1235 + 1236 + static __always_inline long 1237 + atomic_long_add_return_relaxed(long i, atomic_long_t *v) 1238 + { 1239 + instrument_atomic_read_write(v, sizeof(*v)); 1240 + return arch_atomic_long_add_return_relaxed(i, v); 1241 + } 1242 + 1243 + static __always_inline long 1244 + atomic_long_fetch_add(long i, atomic_long_t *v) 1245 + { 1246 + instrument_atomic_read_write(v, sizeof(*v)); 1247 + return arch_atomic_long_fetch_add(i, v); 1248 + } 1249 + 1250 + static __always_inline long 1251 + atomic_long_fetch_add_acquire(long i, atomic_long_t *v) 1252 + { 1253 + instrument_atomic_read_write(v, sizeof(*v)); 1254 + return arch_atomic_long_fetch_add_acquire(i, v); 1255 + } 1256 + 1257 + static __always_inline long 1258 + atomic_long_fetch_add_release(long i, atomic_long_t *v) 1259 + { 1260 + instrument_atomic_read_write(v, sizeof(*v)); 1261 + return arch_atomic_long_fetch_add_release(i, v); 1262 + } 1263 + 1264 + static __always_inline long 1265 + atomic_long_fetch_add_relaxed(long i, atomic_long_t *v) 1266 + { 1267 + instrument_atomic_read_write(v, sizeof(*v)); 1268 + return arch_atomic_long_fetch_add_relaxed(i, v); 1269 + } 1270 + 1271 + static __always_inline void 1272 + atomic_long_sub(long i, atomic_long_t *v) 1273 + { 1274 + instrument_atomic_read_write(v, sizeof(*v)); 1275 + arch_atomic_long_sub(i, v); 1276 + } 1277 + 1278 + static __always_inline long 1279 + atomic_long_sub_return(long i, atomic_long_t *v) 1280 + { 1281 + instrument_atomic_read_write(v, sizeof(*v)); 1282 + return arch_atomic_long_sub_return(i, v); 1283 + } 1284 + 1285 + static __always_inline long 1286 + atomic_long_sub_return_acquire(long i, atomic_long_t *v) 1287 + { 1288 + instrument_atomic_read_write(v, sizeof(*v)); 1289 + return arch_atomic_long_sub_return_acquire(i, v); 1290 + } 1291 + 1292 + static __always_inline long 1293 + atomic_long_sub_return_release(long i, atomic_long_t *v) 1294 + { 1295 + instrument_atomic_read_write(v, sizeof(*v)); 1296 + return arch_atomic_long_sub_return_release(i, v); 1297 + } 1298 + 1299 + static __always_inline long 1300 + atomic_long_sub_return_relaxed(long i, atomic_long_t *v) 1301 + { 1302 + instrument_atomic_read_write(v, sizeof(*v)); 1303 + return arch_atomic_long_sub_return_relaxed(i, v); 1304 + } 1305 + 1306 + static __always_inline long 1307 + atomic_long_fetch_sub(long i, atomic_long_t *v) 1308 + { 1309 + instrument_atomic_read_write(v, sizeof(*v)); 1310 + return arch_atomic_long_fetch_sub(i, v); 1311 + } 1312 + 1313 + static __always_inline long 1314 + atomic_long_fetch_sub_acquire(long i, atomic_long_t *v) 1315 + { 1316 + instrument_atomic_read_write(v, sizeof(*v)); 1317 + return arch_atomic_long_fetch_sub_acquire(i, v); 1318 + } 1319 + 1320 + static __always_inline long 1321 + atomic_long_fetch_sub_release(long i, atomic_long_t *v) 1322 + { 1323 + instrument_atomic_read_write(v, sizeof(*v)); 1324 + return arch_atomic_long_fetch_sub_release(i, v); 1325 + } 1326 + 1327 + static __always_inline long 1328 + atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v) 1329 + { 1330 + instrument_atomic_read_write(v, sizeof(*v)); 1331 + return arch_atomic_long_fetch_sub_relaxed(i, v); 1332 + } 1333 + 1334 + static __always_inline void 1335 + atomic_long_inc(atomic_long_t *v) 1336 + { 1337 + instrument_atomic_read_write(v, sizeof(*v)); 1338 + arch_atomic_long_inc(v); 1339 + } 1340 + 1341 + static __always_inline long 1342 + atomic_long_inc_return(atomic_long_t *v) 1343 + { 1344 + instrument_atomic_read_write(v, sizeof(*v)); 1345 + return arch_atomic_long_inc_return(v); 1346 + } 1347 + 1348 + static __always_inline long 1349 + atomic_long_inc_return_acquire(atomic_long_t *v) 1350 + { 1351 + instrument_atomic_read_write(v, sizeof(*v)); 1352 + return arch_atomic_long_inc_return_acquire(v); 1353 + } 1354 + 1355 + static __always_inline long 1356 + atomic_long_inc_return_release(atomic_long_t *v) 1357 + { 1358 + instrument_atomic_read_write(v, sizeof(*v)); 1359 + return arch_atomic_long_inc_return_release(v); 1360 + } 1361 + 1362 + static __always_inline long 1363 + atomic_long_inc_return_relaxed(atomic_long_t *v) 1364 + { 1365 + instrument_atomic_read_write(v, sizeof(*v)); 1366 + return arch_atomic_long_inc_return_relaxed(v); 1367 + } 1368 + 1369 + static __always_inline long 1370 + atomic_long_fetch_inc(atomic_long_t *v) 1371 + { 1372 + instrument_atomic_read_write(v, sizeof(*v)); 1373 + return arch_atomic_long_fetch_inc(v); 1374 + } 1375 + 1376 + static __always_inline long 1377 + atomic_long_fetch_inc_acquire(atomic_long_t *v) 1378 + { 1379 + instrument_atomic_read_write(v, sizeof(*v)); 1380 + return arch_atomic_long_fetch_inc_acquire(v); 1381 + } 1382 + 1383 + static __always_inline long 1384 + atomic_long_fetch_inc_release(atomic_long_t *v) 1385 + { 1386 + instrument_atomic_read_write(v, sizeof(*v)); 1387 + return arch_atomic_long_fetch_inc_release(v); 1388 + } 1389 + 1390 + static __always_inline long 1391 + atomic_long_fetch_inc_relaxed(atomic_long_t *v) 1392 + { 1393 + instrument_atomic_read_write(v, sizeof(*v)); 1394 + return arch_atomic_long_fetch_inc_relaxed(v); 1395 + } 1396 + 1397 + static __always_inline void 1398 + atomic_long_dec(atomic_long_t *v) 1399 + { 1400 + instrument_atomic_read_write(v, sizeof(*v)); 1401 + arch_atomic_long_dec(v); 1402 + } 1403 + 1404 + static __always_inline long 1405 + atomic_long_dec_return(atomic_long_t *v) 1406 + { 1407 + instrument_atomic_read_write(v, sizeof(*v)); 1408 + return arch_atomic_long_dec_return(v); 1409 + } 1410 + 1411 + static __always_inline long 1412 + atomic_long_dec_return_acquire(atomic_long_t *v) 1413 + { 1414 + instrument_atomic_read_write(v, sizeof(*v)); 1415 + return arch_atomic_long_dec_return_acquire(v); 1416 + } 1417 + 1418 + static __always_inline long 1419 + atomic_long_dec_return_release(atomic_long_t *v) 1420 + { 1421 + instrument_atomic_read_write(v, sizeof(*v)); 1422 + return arch_atomic_long_dec_return_release(v); 1423 + } 1424 + 1425 + static __always_inline long 1426 + atomic_long_dec_return_relaxed(atomic_long_t *v) 1427 + { 1428 + instrument_atomic_read_write(v, sizeof(*v)); 1429 + return arch_atomic_long_dec_return_relaxed(v); 1430 + } 1431 + 1432 + static __always_inline long 1433 + atomic_long_fetch_dec(atomic_long_t *v) 1434 + { 1435 + instrument_atomic_read_write(v, sizeof(*v)); 1436 + return arch_atomic_long_fetch_dec(v); 1437 + } 1438 + 1439 + static __always_inline long 1440 + atomic_long_fetch_dec_acquire(atomic_long_t *v) 1441 + { 1442 + instrument_atomic_read_write(v, sizeof(*v)); 1443 + return arch_atomic_long_fetch_dec_acquire(v); 1444 + } 1445 + 1446 + static __always_inline long 1447 + atomic_long_fetch_dec_release(atomic_long_t *v) 1448 + { 1449 + instrument_atomic_read_write(v, sizeof(*v)); 1450 + return arch_atomic_long_fetch_dec_release(v); 1451 + } 1452 + 1453 + static __always_inline long 1454 + atomic_long_fetch_dec_relaxed(atomic_long_t *v) 1455 + { 1456 + instrument_atomic_read_write(v, sizeof(*v)); 1457 + return arch_atomic_long_fetch_dec_relaxed(v); 1458 + } 1459 + 1460 + static __always_inline void 1461 + atomic_long_and(long i, atomic_long_t *v) 1462 + { 1463 + instrument_atomic_read_write(v, sizeof(*v)); 1464 + arch_atomic_long_and(i, v); 1465 + } 1466 + 1467 + static __always_inline long 1468 + atomic_long_fetch_and(long i, atomic_long_t *v) 1469 + { 1470 + instrument_atomic_read_write(v, sizeof(*v)); 1471 + return arch_atomic_long_fetch_and(i, v); 1472 + } 1473 + 1474 + static __always_inline long 1475 + atomic_long_fetch_and_acquire(long i, atomic_long_t *v) 1476 + { 1477 + instrument_atomic_read_write(v, sizeof(*v)); 1478 + return arch_atomic_long_fetch_and_acquire(i, v); 1479 + } 1480 + 1481 + static __always_inline long 1482 + atomic_long_fetch_and_release(long i, atomic_long_t *v) 1483 + { 1484 + instrument_atomic_read_write(v, sizeof(*v)); 1485 + return arch_atomic_long_fetch_and_release(i, v); 1486 + } 1487 + 1488 + static __always_inline long 1489 + atomic_long_fetch_and_relaxed(long i, atomic_long_t *v) 1490 + { 1491 + instrument_atomic_read_write(v, sizeof(*v)); 1492 + return arch_atomic_long_fetch_and_relaxed(i, v); 1493 + } 1494 + 1495 + static __always_inline void 1496 + atomic_long_andnot(long i, atomic_long_t *v) 1497 + { 1498 + instrument_atomic_read_write(v, sizeof(*v)); 1499 + arch_atomic_long_andnot(i, v); 1500 + } 1501 + 1502 + static __always_inline long 1503 + atomic_long_fetch_andnot(long i, atomic_long_t *v) 1504 + { 1505 + instrument_atomic_read_write(v, sizeof(*v)); 1506 + return arch_atomic_long_fetch_andnot(i, v); 1507 + } 1508 + 1509 + static __always_inline long 1510 + atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v) 1511 + { 1512 + instrument_atomic_read_write(v, sizeof(*v)); 1513 + return arch_atomic_long_fetch_andnot_acquire(i, v); 1514 + } 1515 + 1516 + static __always_inline long 1517 + atomic_long_fetch_andnot_release(long i, atomic_long_t *v) 1518 + { 1519 + instrument_atomic_read_write(v, sizeof(*v)); 1520 + return arch_atomic_long_fetch_andnot_release(i, v); 1521 + } 1522 + 1523 + static __always_inline long 1524 + atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v) 1525 + { 1526 + instrument_atomic_read_write(v, sizeof(*v)); 1527 + return arch_atomic_long_fetch_andnot_relaxed(i, v); 1528 + } 1529 + 1530 + static __always_inline void 1531 + atomic_long_or(long i, atomic_long_t *v) 1532 + { 1533 + instrument_atomic_read_write(v, sizeof(*v)); 1534 + arch_atomic_long_or(i, v); 1535 + } 1536 + 1537 + static __always_inline long 1538 + atomic_long_fetch_or(long i, atomic_long_t *v) 1539 + { 1540 + instrument_atomic_read_write(v, sizeof(*v)); 1541 + return arch_atomic_long_fetch_or(i, v); 1542 + } 1543 + 1544 + static __always_inline long 1545 + atomic_long_fetch_or_acquire(long i, atomic_long_t *v) 1546 + { 1547 + instrument_atomic_read_write(v, sizeof(*v)); 1548 + return arch_atomic_long_fetch_or_acquire(i, v); 1549 + } 1550 + 1551 + static __always_inline long 1552 + atomic_long_fetch_or_release(long i, atomic_long_t *v) 1553 + { 1554 + instrument_atomic_read_write(v, sizeof(*v)); 1555 + return arch_atomic_long_fetch_or_release(i, v); 1556 + } 1557 + 1558 + static __always_inline long 1559 + atomic_long_fetch_or_relaxed(long i, atomic_long_t *v) 1560 + { 1561 + instrument_atomic_read_write(v, sizeof(*v)); 1562 + return arch_atomic_long_fetch_or_relaxed(i, v); 1563 + } 1564 + 1565 + static __always_inline void 1566 + atomic_long_xor(long i, atomic_long_t *v) 1567 + { 1568 + instrument_atomic_read_write(v, sizeof(*v)); 1569 + arch_atomic_long_xor(i, v); 1570 + } 1571 + 1572 + static __always_inline long 1573 + atomic_long_fetch_xor(long i, atomic_long_t *v) 1574 + { 1575 + instrument_atomic_read_write(v, sizeof(*v)); 1576 + return arch_atomic_long_fetch_xor(i, v); 1577 + } 1578 + 1579 + static __always_inline long 1580 + atomic_long_fetch_xor_acquire(long i, atomic_long_t *v) 1581 + { 1582 + instrument_atomic_read_write(v, sizeof(*v)); 1583 + return arch_atomic_long_fetch_xor_acquire(i, v); 1584 + } 1585 + 1586 + static __always_inline long 1587 + atomic_long_fetch_xor_release(long i, atomic_long_t *v) 1588 + { 1589 + instrument_atomic_read_write(v, sizeof(*v)); 1590 + return arch_atomic_long_fetch_xor_release(i, v); 1591 + } 1592 + 1593 + static __always_inline long 1594 + atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v) 1595 + { 1596 + instrument_atomic_read_write(v, sizeof(*v)); 1597 + return arch_atomic_long_fetch_xor_relaxed(i, v); 1598 + } 1599 + 1600 + static __always_inline long 1601 + atomic_long_xchg(atomic_long_t *v, long i) 1602 + { 1603 + instrument_atomic_read_write(v, sizeof(*v)); 1604 + return arch_atomic_long_xchg(v, i); 1605 + } 1606 + 1607 + static __always_inline long 1608 + atomic_long_xchg_acquire(atomic_long_t *v, long i) 1609 + { 1610 + instrument_atomic_read_write(v, sizeof(*v)); 1611 + return arch_atomic_long_xchg_acquire(v, i); 1612 + } 1613 + 1614 + static __always_inline long 1615 + atomic_long_xchg_release(atomic_long_t *v, long i) 1616 + { 1617 + instrument_atomic_read_write(v, sizeof(*v)); 1618 + return arch_atomic_long_xchg_release(v, i); 1619 + } 1620 + 1621 + static __always_inline long 1622 + atomic_long_xchg_relaxed(atomic_long_t *v, long i) 1623 + { 1624 + instrument_atomic_read_write(v, sizeof(*v)); 1625 + return arch_atomic_long_xchg_relaxed(v, i); 1626 + } 1627 + 1628 + static __always_inline long 1629 + atomic_long_cmpxchg(atomic_long_t *v, long old, long new) 1630 + { 1631 + instrument_atomic_read_write(v, sizeof(*v)); 1632 + return arch_atomic_long_cmpxchg(v, old, new); 1633 + } 1634 + 1635 + static __always_inline long 1636 + atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new) 1637 + { 1638 + instrument_atomic_read_write(v, sizeof(*v)); 1639 + return arch_atomic_long_cmpxchg_acquire(v, old, new); 1640 + } 1641 + 1642 + static __always_inline long 1643 + atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new) 1644 + { 1645 + instrument_atomic_read_write(v, sizeof(*v)); 1646 + return arch_atomic_long_cmpxchg_release(v, old, new); 1647 + } 1648 + 1649 + static __always_inline long 1650 + atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new) 1651 + { 1652 + instrument_atomic_read_write(v, sizeof(*v)); 1653 + return arch_atomic_long_cmpxchg_relaxed(v, old, new); 1654 + } 1655 + 1656 + static __always_inline bool 1657 + atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new) 1658 + { 1659 + instrument_atomic_read_write(v, sizeof(*v)); 1660 + instrument_atomic_read_write(old, sizeof(*old)); 1661 + return arch_atomic_long_try_cmpxchg(v, old, new); 1662 + } 1663 + 1664 + static __always_inline bool 1665 + atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new) 1666 + { 1667 + instrument_atomic_read_write(v, sizeof(*v)); 1668 + instrument_atomic_read_write(old, sizeof(*old)); 1669 + return arch_atomic_long_try_cmpxchg_acquire(v, old, new); 1670 + } 1671 + 1672 + static __always_inline bool 1673 + atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new) 1674 + { 1675 + instrument_atomic_read_write(v, sizeof(*v)); 1676 + instrument_atomic_read_write(old, sizeof(*old)); 1677 + return arch_atomic_long_try_cmpxchg_release(v, old, new); 1678 + } 1679 + 1680 + static __always_inline bool 1681 + atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new) 1682 + { 1683 + instrument_atomic_read_write(v, sizeof(*v)); 1684 + instrument_atomic_read_write(old, sizeof(*old)); 1685 + return arch_atomic_long_try_cmpxchg_relaxed(v, old, new); 1686 + } 1687 + 1688 + static __always_inline bool 1689 + atomic_long_sub_and_test(long i, atomic_long_t *v) 1690 + { 1691 + instrument_atomic_read_write(v, sizeof(*v)); 1692 + return arch_atomic_long_sub_and_test(i, v); 1693 + } 1694 + 1695 + static __always_inline bool 1696 + atomic_long_dec_and_test(atomic_long_t *v) 1697 + { 1698 + instrument_atomic_read_write(v, sizeof(*v)); 1699 + return arch_atomic_long_dec_and_test(v); 1700 + } 1701 + 1702 + static __always_inline bool 1703 + atomic_long_inc_and_test(atomic_long_t *v) 1704 + { 1705 + instrument_atomic_read_write(v, sizeof(*v)); 1706 + return arch_atomic_long_inc_and_test(v); 1707 + } 1708 + 1709 + static __always_inline bool 1710 + atomic_long_add_negative(long i, atomic_long_t *v) 1711 + { 1712 + instrument_atomic_read_write(v, sizeof(*v)); 1713 + return arch_atomic_long_add_negative(i, v); 1714 + } 1715 + 1716 + static __always_inline long 1717 + atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 1718 + { 1719 + instrument_atomic_read_write(v, sizeof(*v)); 1720 + return arch_atomic_long_fetch_add_unless(v, a, u); 1721 + } 1722 + 1723 + static __always_inline bool 1724 + atomic_long_add_unless(atomic_long_t *v, long a, long u) 1725 + { 1726 + instrument_atomic_read_write(v, sizeof(*v)); 1727 + return arch_atomic_long_add_unless(v, a, u); 1728 + } 1729 + 1730 + static __always_inline bool 1731 + atomic_long_inc_not_zero(atomic_long_t *v) 1732 + { 1733 + instrument_atomic_read_write(v, sizeof(*v)); 1734 + return arch_atomic_long_inc_not_zero(v); 1735 + } 1736 + 1737 + static __always_inline bool 1738 + atomic_long_inc_unless_negative(atomic_long_t *v) 1739 + { 1740 + instrument_atomic_read_write(v, sizeof(*v)); 1741 + return arch_atomic_long_inc_unless_negative(v); 1742 + } 1743 + 1744 + static __always_inline bool 1745 + atomic_long_dec_unless_positive(atomic_long_t *v) 1746 + { 1747 + instrument_atomic_read_write(v, sizeof(*v)); 1748 + return arch_atomic_long_dec_unless_positive(v); 1749 + } 1750 + 1751 + static __always_inline long 1752 + atomic_long_dec_if_positive(atomic_long_t *v) 1753 + { 1754 + instrument_atomic_read_write(v, sizeof(*v)); 1755 + return arch_atomic_long_dec_if_positive(v); 1756 + } 1757 + 1180 1758 #define xchg(ptr, ...) \ 1181 1759 ({ \ 1182 1760 typeof(ptr) __ai_ptr = (ptr); \ ··· 1912 1334 }) 1913 1335 1914 1336 #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */ 1915 - // 5edd72f105b6f54b7e9492d794abee88e6912d29 1337 + // 2a9553f0a9d5619f19151092df5cabbbf16ce835
+329 -329
include/linux/atomic/atomic-long.h
··· 24 24 #ifdef CONFIG_64BIT 25 25 26 26 static __always_inline long 27 - atomic_long_read(const atomic_long_t *v) 27 + arch_atomic_long_read(const atomic_long_t *v) 28 28 { 29 - return atomic64_read(v); 29 + return arch_atomic64_read(v); 30 30 } 31 31 32 32 static __always_inline long 33 - atomic_long_read_acquire(const atomic_long_t *v) 33 + arch_atomic_long_read_acquire(const atomic_long_t *v) 34 34 { 35 - return atomic64_read_acquire(v); 35 + return arch_atomic64_read_acquire(v); 36 36 } 37 37 38 38 static __always_inline void 39 - atomic_long_set(atomic_long_t *v, long i) 39 + arch_atomic_long_set(atomic_long_t *v, long i) 40 40 { 41 - atomic64_set(v, i); 41 + arch_atomic64_set(v, i); 42 42 } 43 43 44 44 static __always_inline void 45 - atomic_long_set_release(atomic_long_t *v, long i) 45 + arch_atomic_long_set_release(atomic_long_t *v, long i) 46 46 { 47 - atomic64_set_release(v, i); 47 + arch_atomic64_set_release(v, i); 48 48 } 49 49 50 50 static __always_inline void 51 - atomic_long_add(long i, atomic_long_t *v) 51 + arch_atomic_long_add(long i, atomic_long_t *v) 52 52 { 53 - atomic64_add(i, v); 53 + arch_atomic64_add(i, v); 54 54 } 55 55 56 56 static __always_inline long 57 - atomic_long_add_return(long i, atomic_long_t *v) 57 + arch_atomic_long_add_return(long i, atomic_long_t *v) 58 58 { 59 - return atomic64_add_return(i, v); 59 + return arch_atomic64_add_return(i, v); 60 60 } 61 61 62 62 static __always_inline long 63 - atomic_long_add_return_acquire(long i, atomic_long_t *v) 63 + arch_atomic_long_add_return_acquire(long i, atomic_long_t *v) 64 64 { 65 - return atomic64_add_return_acquire(i, v); 65 + return arch_atomic64_add_return_acquire(i, v); 66 66 } 67 67 68 68 static __always_inline long 69 - atomic_long_add_return_release(long i, atomic_long_t *v) 69 + arch_atomic_long_add_return_release(long i, atomic_long_t *v) 70 70 { 71 - return atomic64_add_return_release(i, v); 71 + return arch_atomic64_add_return_release(i, v); 72 72 } 73 73 74 74 static __always_inline long 75 - atomic_long_add_return_relaxed(long i, atomic_long_t *v) 75 + arch_atomic_long_add_return_relaxed(long i, atomic_long_t *v) 76 76 { 77 - return atomic64_add_return_relaxed(i, v); 77 + return arch_atomic64_add_return_relaxed(i, v); 78 78 } 79 79 80 80 static __always_inline long 81 - atomic_long_fetch_add(long i, atomic_long_t *v) 81 + arch_atomic_long_fetch_add(long i, atomic_long_t *v) 82 82 { 83 - return atomic64_fetch_add(i, v); 83 + return arch_atomic64_fetch_add(i, v); 84 84 } 85 85 86 86 static __always_inline long 87 - atomic_long_fetch_add_acquire(long i, atomic_long_t *v) 87 + arch_atomic_long_fetch_add_acquire(long i, atomic_long_t *v) 88 88 { 89 - return atomic64_fetch_add_acquire(i, v); 89 + return arch_atomic64_fetch_add_acquire(i, v); 90 90 } 91 91 92 92 static __always_inline long 93 - atomic_long_fetch_add_release(long i, atomic_long_t *v) 93 + arch_atomic_long_fetch_add_release(long i, atomic_long_t *v) 94 94 { 95 - return atomic64_fetch_add_release(i, v); 95 + return arch_atomic64_fetch_add_release(i, v); 96 96 } 97 97 98 98 static __always_inline long 99 - atomic_long_fetch_add_relaxed(long i, atomic_long_t *v) 99 + arch_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v) 100 100 { 101 - return atomic64_fetch_add_relaxed(i, v); 101 + return arch_atomic64_fetch_add_relaxed(i, v); 102 102 } 103 103 104 104 static __always_inline void 105 - atomic_long_sub(long i, atomic_long_t *v) 105 + arch_atomic_long_sub(long i, atomic_long_t *v) 106 106 { 107 - atomic64_sub(i, v); 107 + arch_atomic64_sub(i, v); 108 108 } 109 109 110 110 static __always_inline long 111 - atomic_long_sub_return(long i, atomic_long_t *v) 111 + arch_atomic_long_sub_return(long i, atomic_long_t *v) 112 112 { 113 - return atomic64_sub_return(i, v); 113 + return arch_atomic64_sub_return(i, v); 114 114 } 115 115 116 116 static __always_inline long 117 - atomic_long_sub_return_acquire(long i, atomic_long_t *v) 117 + arch_atomic_long_sub_return_acquire(long i, atomic_long_t *v) 118 118 { 119 - return atomic64_sub_return_acquire(i, v); 119 + return arch_atomic64_sub_return_acquire(i, v); 120 120 } 121 121 122 122 static __always_inline long 123 - atomic_long_sub_return_release(long i, atomic_long_t *v) 123 + arch_atomic_long_sub_return_release(long i, atomic_long_t *v) 124 124 { 125 - return atomic64_sub_return_release(i, v); 125 + return arch_atomic64_sub_return_release(i, v); 126 126 } 127 127 128 128 static __always_inline long 129 - atomic_long_sub_return_relaxed(long i, atomic_long_t *v) 129 + arch_atomic_long_sub_return_relaxed(long i, atomic_long_t *v) 130 130 { 131 - return atomic64_sub_return_relaxed(i, v); 131 + return arch_atomic64_sub_return_relaxed(i, v); 132 132 } 133 133 134 134 static __always_inline long 135 - atomic_long_fetch_sub(long i, atomic_long_t *v) 135 + arch_atomic_long_fetch_sub(long i, atomic_long_t *v) 136 136 { 137 - return atomic64_fetch_sub(i, v); 137 + return arch_atomic64_fetch_sub(i, v); 138 138 } 139 139 140 140 static __always_inline long 141 - atomic_long_fetch_sub_acquire(long i, atomic_long_t *v) 141 + arch_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v) 142 142 { 143 - return atomic64_fetch_sub_acquire(i, v); 143 + return arch_atomic64_fetch_sub_acquire(i, v); 144 144 } 145 145 146 146 static __always_inline long 147 - atomic_long_fetch_sub_release(long i, atomic_long_t *v) 147 + arch_atomic_long_fetch_sub_release(long i, atomic_long_t *v) 148 148 { 149 - return atomic64_fetch_sub_release(i, v); 149 + return arch_atomic64_fetch_sub_release(i, v); 150 150 } 151 151 152 152 static __always_inline long 153 - atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v) 153 + arch_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v) 154 154 { 155 - return atomic64_fetch_sub_relaxed(i, v); 155 + return arch_atomic64_fetch_sub_relaxed(i, v); 156 156 } 157 157 158 158 static __always_inline void 159 - atomic_long_inc(atomic_long_t *v) 159 + arch_atomic_long_inc(atomic_long_t *v) 160 160 { 161 - atomic64_inc(v); 161 + arch_atomic64_inc(v); 162 162 } 163 163 164 164 static __always_inline long 165 - atomic_long_inc_return(atomic_long_t *v) 165 + arch_atomic_long_inc_return(atomic_long_t *v) 166 166 { 167 - return atomic64_inc_return(v); 167 + return arch_atomic64_inc_return(v); 168 168 } 169 169 170 170 static __always_inline long 171 - atomic_long_inc_return_acquire(atomic_long_t *v) 171 + arch_atomic_long_inc_return_acquire(atomic_long_t *v) 172 172 { 173 - return atomic64_inc_return_acquire(v); 173 + return arch_atomic64_inc_return_acquire(v); 174 174 } 175 175 176 176 static __always_inline long 177 - atomic_long_inc_return_release(atomic_long_t *v) 177 + arch_atomic_long_inc_return_release(atomic_long_t *v) 178 178 { 179 - return atomic64_inc_return_release(v); 179 + return arch_atomic64_inc_return_release(v); 180 180 } 181 181 182 182 static __always_inline long 183 - atomic_long_inc_return_relaxed(atomic_long_t *v) 183 + arch_atomic_long_inc_return_relaxed(atomic_long_t *v) 184 184 { 185 - return atomic64_inc_return_relaxed(v); 185 + return arch_atomic64_inc_return_relaxed(v); 186 186 } 187 187 188 188 static __always_inline long 189 - atomic_long_fetch_inc(atomic_long_t *v) 189 + arch_atomic_long_fetch_inc(atomic_long_t *v) 190 190 { 191 - return atomic64_fetch_inc(v); 191 + return arch_atomic64_fetch_inc(v); 192 192 } 193 193 194 194 static __always_inline long 195 - atomic_long_fetch_inc_acquire(atomic_long_t *v) 195 + arch_atomic_long_fetch_inc_acquire(atomic_long_t *v) 196 196 { 197 - return atomic64_fetch_inc_acquire(v); 197 + return arch_atomic64_fetch_inc_acquire(v); 198 198 } 199 199 200 200 static __always_inline long 201 - atomic_long_fetch_inc_release(atomic_long_t *v) 201 + arch_atomic_long_fetch_inc_release(atomic_long_t *v) 202 202 { 203 - return atomic64_fetch_inc_release(v); 203 + return arch_atomic64_fetch_inc_release(v); 204 204 } 205 205 206 206 static __always_inline long 207 - atomic_long_fetch_inc_relaxed(atomic_long_t *v) 207 + arch_atomic_long_fetch_inc_relaxed(atomic_long_t *v) 208 208 { 209 - return atomic64_fetch_inc_relaxed(v); 209 + return arch_atomic64_fetch_inc_relaxed(v); 210 210 } 211 211 212 212 static __always_inline void 213 - atomic_long_dec(atomic_long_t *v) 213 + arch_atomic_long_dec(atomic_long_t *v) 214 214 { 215 - atomic64_dec(v); 215 + arch_atomic64_dec(v); 216 216 } 217 217 218 218 static __always_inline long 219 - atomic_long_dec_return(atomic_long_t *v) 219 + arch_atomic_long_dec_return(atomic_long_t *v) 220 220 { 221 - return atomic64_dec_return(v); 221 + return arch_atomic64_dec_return(v); 222 222 } 223 223 224 224 static __always_inline long 225 - atomic_long_dec_return_acquire(atomic_long_t *v) 225 + arch_atomic_long_dec_return_acquire(atomic_long_t *v) 226 226 { 227 - return atomic64_dec_return_acquire(v); 227 + return arch_atomic64_dec_return_acquire(v); 228 228 } 229 229 230 230 static __always_inline long 231 - atomic_long_dec_return_release(atomic_long_t *v) 231 + arch_atomic_long_dec_return_release(atomic_long_t *v) 232 232 { 233 - return atomic64_dec_return_release(v); 233 + return arch_atomic64_dec_return_release(v); 234 234 } 235 235 236 236 static __always_inline long 237 - atomic_long_dec_return_relaxed(atomic_long_t *v) 237 + arch_atomic_long_dec_return_relaxed(atomic_long_t *v) 238 238 { 239 - return atomic64_dec_return_relaxed(v); 239 + return arch_atomic64_dec_return_relaxed(v); 240 240 } 241 241 242 242 static __always_inline long 243 - atomic_long_fetch_dec(atomic_long_t *v) 243 + arch_atomic_long_fetch_dec(atomic_long_t *v) 244 244 { 245 - return atomic64_fetch_dec(v); 245 + return arch_atomic64_fetch_dec(v); 246 246 } 247 247 248 248 static __always_inline long 249 - atomic_long_fetch_dec_acquire(atomic_long_t *v) 249 + arch_atomic_long_fetch_dec_acquire(atomic_long_t *v) 250 250 { 251 - return atomic64_fetch_dec_acquire(v); 251 + return arch_atomic64_fetch_dec_acquire(v); 252 252 } 253 253 254 254 static __always_inline long 255 - atomic_long_fetch_dec_release(atomic_long_t *v) 255 + arch_atomic_long_fetch_dec_release(atomic_long_t *v) 256 256 { 257 - return atomic64_fetch_dec_release(v); 257 + return arch_atomic64_fetch_dec_release(v); 258 258 } 259 259 260 260 static __always_inline long 261 - atomic_long_fetch_dec_relaxed(atomic_long_t *v) 261 + arch_atomic_long_fetch_dec_relaxed(atomic_long_t *v) 262 262 { 263 - return atomic64_fetch_dec_relaxed(v); 263 + return arch_atomic64_fetch_dec_relaxed(v); 264 264 } 265 265 266 266 static __always_inline void 267 - atomic_long_and(long i, atomic_long_t *v) 267 + arch_atomic_long_and(long i, atomic_long_t *v) 268 268 { 269 - atomic64_and(i, v); 269 + arch_atomic64_and(i, v); 270 270 } 271 271 272 272 static __always_inline long 273 - atomic_long_fetch_and(long i, atomic_long_t *v) 273 + arch_atomic_long_fetch_and(long i, atomic_long_t *v) 274 274 { 275 - return atomic64_fetch_and(i, v); 275 + return arch_atomic64_fetch_and(i, v); 276 276 } 277 277 278 278 static __always_inline long 279 - atomic_long_fetch_and_acquire(long i, atomic_long_t *v) 279 + arch_atomic_long_fetch_and_acquire(long i, atomic_long_t *v) 280 280 { 281 - return atomic64_fetch_and_acquire(i, v); 281 + return arch_atomic64_fetch_and_acquire(i, v); 282 282 } 283 283 284 284 static __always_inline long 285 - atomic_long_fetch_and_release(long i, atomic_long_t *v) 285 + arch_atomic_long_fetch_and_release(long i, atomic_long_t *v) 286 286 { 287 - return atomic64_fetch_and_release(i, v); 287 + return arch_atomic64_fetch_and_release(i, v); 288 288 } 289 289 290 290 static __always_inline long 291 - atomic_long_fetch_and_relaxed(long i, atomic_long_t *v) 291 + arch_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v) 292 292 { 293 - return atomic64_fetch_and_relaxed(i, v); 293 + return arch_atomic64_fetch_and_relaxed(i, v); 294 294 } 295 295 296 296 static __always_inline void 297 - atomic_long_andnot(long i, atomic_long_t *v) 297 + arch_atomic_long_andnot(long i, atomic_long_t *v) 298 298 { 299 - atomic64_andnot(i, v); 299 + arch_atomic64_andnot(i, v); 300 300 } 301 301 302 302 static __always_inline long 303 - atomic_long_fetch_andnot(long i, atomic_long_t *v) 303 + arch_atomic_long_fetch_andnot(long i, atomic_long_t *v) 304 304 { 305 - return atomic64_fetch_andnot(i, v); 305 + return arch_atomic64_fetch_andnot(i, v); 306 306 } 307 307 308 308 static __always_inline long 309 - atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v) 309 + arch_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v) 310 310 { 311 - return atomic64_fetch_andnot_acquire(i, v); 311 + return arch_atomic64_fetch_andnot_acquire(i, v); 312 312 } 313 313 314 314 static __always_inline long 315 - atomic_long_fetch_andnot_release(long i, atomic_long_t *v) 315 + arch_atomic_long_fetch_andnot_release(long i, atomic_long_t *v) 316 316 { 317 - return atomic64_fetch_andnot_release(i, v); 317 + return arch_atomic64_fetch_andnot_release(i, v); 318 318 } 319 319 320 320 static __always_inline long 321 - atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v) 321 + arch_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v) 322 322 { 323 - return atomic64_fetch_andnot_relaxed(i, v); 323 + return arch_atomic64_fetch_andnot_relaxed(i, v); 324 324 } 325 325 326 326 static __always_inline void 327 - atomic_long_or(long i, atomic_long_t *v) 327 + arch_atomic_long_or(long i, atomic_long_t *v) 328 328 { 329 - atomic64_or(i, v); 329 + arch_atomic64_or(i, v); 330 330 } 331 331 332 332 static __always_inline long 333 - atomic_long_fetch_or(long i, atomic_long_t *v) 333 + arch_atomic_long_fetch_or(long i, atomic_long_t *v) 334 334 { 335 - return atomic64_fetch_or(i, v); 335 + return arch_atomic64_fetch_or(i, v); 336 336 } 337 337 338 338 static __always_inline long 339 - atomic_long_fetch_or_acquire(long i, atomic_long_t *v) 339 + arch_atomic_long_fetch_or_acquire(long i, atomic_long_t *v) 340 340 { 341 - return atomic64_fetch_or_acquire(i, v); 341 + return arch_atomic64_fetch_or_acquire(i, v); 342 342 } 343 343 344 344 static __always_inline long 345 - atomic_long_fetch_or_release(long i, atomic_long_t *v) 345 + arch_atomic_long_fetch_or_release(long i, atomic_long_t *v) 346 346 { 347 - return atomic64_fetch_or_release(i, v); 347 + return arch_atomic64_fetch_or_release(i, v); 348 348 } 349 349 350 350 static __always_inline long 351 - atomic_long_fetch_or_relaxed(long i, atomic_long_t *v) 351 + arch_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v) 352 352 { 353 - return atomic64_fetch_or_relaxed(i, v); 353 + return arch_atomic64_fetch_or_relaxed(i, v); 354 354 } 355 355 356 356 static __always_inline void 357 - atomic_long_xor(long i, atomic_long_t *v) 357 + arch_atomic_long_xor(long i, atomic_long_t *v) 358 358 { 359 - atomic64_xor(i, v); 359 + arch_atomic64_xor(i, v); 360 360 } 361 361 362 362 static __always_inline long 363 - atomic_long_fetch_xor(long i, atomic_long_t *v) 363 + arch_atomic_long_fetch_xor(long i, atomic_long_t *v) 364 364 { 365 - return atomic64_fetch_xor(i, v); 365 + return arch_atomic64_fetch_xor(i, v); 366 366 } 367 367 368 368 static __always_inline long 369 - atomic_long_fetch_xor_acquire(long i, atomic_long_t *v) 369 + arch_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v) 370 370 { 371 - return atomic64_fetch_xor_acquire(i, v); 371 + return arch_atomic64_fetch_xor_acquire(i, v); 372 372 } 373 373 374 374 static __always_inline long 375 - atomic_long_fetch_xor_release(long i, atomic_long_t *v) 375 + arch_atomic_long_fetch_xor_release(long i, atomic_long_t *v) 376 376 { 377 - return atomic64_fetch_xor_release(i, v); 377 + return arch_atomic64_fetch_xor_release(i, v); 378 378 } 379 379 380 380 static __always_inline long 381 - atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v) 381 + arch_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v) 382 382 { 383 - return atomic64_fetch_xor_relaxed(i, v); 383 + return arch_atomic64_fetch_xor_relaxed(i, v); 384 384 } 385 385 386 386 static __always_inline long 387 - atomic_long_xchg(atomic_long_t *v, long i) 387 + arch_atomic_long_xchg(atomic_long_t *v, long i) 388 388 { 389 - return atomic64_xchg(v, i); 389 + return arch_atomic64_xchg(v, i); 390 390 } 391 391 392 392 static __always_inline long 393 - atomic_long_xchg_acquire(atomic_long_t *v, long i) 393 + arch_atomic_long_xchg_acquire(atomic_long_t *v, long i) 394 394 { 395 - return atomic64_xchg_acquire(v, i); 395 + return arch_atomic64_xchg_acquire(v, i); 396 396 } 397 397 398 398 static __always_inline long 399 - atomic_long_xchg_release(atomic_long_t *v, long i) 399 + arch_atomic_long_xchg_release(atomic_long_t *v, long i) 400 400 { 401 - return atomic64_xchg_release(v, i); 401 + return arch_atomic64_xchg_release(v, i); 402 402 } 403 403 404 404 static __always_inline long 405 - atomic_long_xchg_relaxed(atomic_long_t *v, long i) 405 + arch_atomic_long_xchg_relaxed(atomic_long_t *v, long i) 406 406 { 407 - return atomic64_xchg_relaxed(v, i); 407 + return arch_atomic64_xchg_relaxed(v, i); 408 408 } 409 409 410 410 static __always_inline long 411 - atomic_long_cmpxchg(atomic_long_t *v, long old, long new) 411 + arch_atomic_long_cmpxchg(atomic_long_t *v, long old, long new) 412 412 { 413 - return atomic64_cmpxchg(v, old, new); 413 + return arch_atomic64_cmpxchg(v, old, new); 414 414 } 415 415 416 416 static __always_inline long 417 - atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new) 417 + arch_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new) 418 418 { 419 - return atomic64_cmpxchg_acquire(v, old, new); 419 + return arch_atomic64_cmpxchg_acquire(v, old, new); 420 420 } 421 421 422 422 static __always_inline long 423 - atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new) 423 + arch_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new) 424 424 { 425 - return atomic64_cmpxchg_release(v, old, new); 425 + return arch_atomic64_cmpxchg_release(v, old, new); 426 426 } 427 427 428 428 static __always_inline long 429 - atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new) 429 + arch_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new) 430 430 { 431 - return atomic64_cmpxchg_relaxed(v, old, new); 431 + return arch_atomic64_cmpxchg_relaxed(v, old, new); 432 432 } 433 433 434 434 static __always_inline bool 435 - atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new) 435 + arch_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new) 436 436 { 437 - return atomic64_try_cmpxchg(v, (s64 *)old, new); 437 + return arch_atomic64_try_cmpxchg(v, (s64 *)old, new); 438 438 } 439 439 440 440 static __always_inline bool 441 - atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new) 441 + arch_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new) 442 442 { 443 - return atomic64_try_cmpxchg_acquire(v, (s64 *)old, new); 443 + return arch_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new); 444 444 } 445 445 446 446 static __always_inline bool 447 - atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new) 447 + arch_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new) 448 448 { 449 - return atomic64_try_cmpxchg_release(v, (s64 *)old, new); 449 + return arch_atomic64_try_cmpxchg_release(v, (s64 *)old, new); 450 450 } 451 451 452 452 static __always_inline bool 453 - atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new) 453 + arch_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new) 454 454 { 455 - return atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new); 455 + return arch_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new); 456 456 } 457 457 458 458 static __always_inline bool 459 - atomic_long_sub_and_test(long i, atomic_long_t *v) 459 + arch_atomic_long_sub_and_test(long i, atomic_long_t *v) 460 460 { 461 - return atomic64_sub_and_test(i, v); 461 + return arch_atomic64_sub_and_test(i, v); 462 462 } 463 463 464 464 static __always_inline bool 465 - atomic_long_dec_and_test(atomic_long_t *v) 465 + arch_atomic_long_dec_and_test(atomic_long_t *v) 466 466 { 467 - return atomic64_dec_and_test(v); 467 + return arch_atomic64_dec_and_test(v); 468 468 } 469 469 470 470 static __always_inline bool 471 - atomic_long_inc_and_test(atomic_long_t *v) 471 + arch_atomic_long_inc_and_test(atomic_long_t *v) 472 472 { 473 - return atomic64_inc_and_test(v); 473 + return arch_atomic64_inc_and_test(v); 474 474 } 475 475 476 476 static __always_inline bool 477 - atomic_long_add_negative(long i, atomic_long_t *v) 477 + arch_atomic_long_add_negative(long i, atomic_long_t *v) 478 478 { 479 - return atomic64_add_negative(i, v); 479 + return arch_atomic64_add_negative(i, v); 480 480 } 481 481 482 482 static __always_inline long 483 - atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 483 + arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 484 484 { 485 - return atomic64_fetch_add_unless(v, a, u); 485 + return arch_atomic64_fetch_add_unless(v, a, u); 486 486 } 487 487 488 488 static __always_inline bool 489 - atomic_long_add_unless(atomic_long_t *v, long a, long u) 489 + arch_atomic_long_add_unless(atomic_long_t *v, long a, long u) 490 490 { 491 - return atomic64_add_unless(v, a, u); 491 + return arch_atomic64_add_unless(v, a, u); 492 492 } 493 493 494 494 static __always_inline bool 495 - atomic_long_inc_not_zero(atomic_long_t *v) 495 + arch_atomic_long_inc_not_zero(atomic_long_t *v) 496 496 { 497 - return atomic64_inc_not_zero(v); 497 + return arch_atomic64_inc_not_zero(v); 498 498 } 499 499 500 500 static __always_inline bool 501 - atomic_long_inc_unless_negative(atomic_long_t *v) 501 + arch_atomic_long_inc_unless_negative(atomic_long_t *v) 502 502 { 503 - return atomic64_inc_unless_negative(v); 503 + return arch_atomic64_inc_unless_negative(v); 504 504 } 505 505 506 506 static __always_inline bool 507 - atomic_long_dec_unless_positive(atomic_long_t *v) 507 + arch_atomic_long_dec_unless_positive(atomic_long_t *v) 508 508 { 509 - return atomic64_dec_unless_positive(v); 509 + return arch_atomic64_dec_unless_positive(v); 510 510 } 511 511 512 512 static __always_inline long 513 - atomic_long_dec_if_positive(atomic_long_t *v) 513 + arch_atomic_long_dec_if_positive(atomic_long_t *v) 514 514 { 515 - return atomic64_dec_if_positive(v); 515 + return arch_atomic64_dec_if_positive(v); 516 516 } 517 517 518 518 #else /* CONFIG_64BIT */ 519 519 520 520 static __always_inline long 521 - atomic_long_read(const atomic_long_t *v) 521 + arch_atomic_long_read(const atomic_long_t *v) 522 522 { 523 - return atomic_read(v); 523 + return arch_atomic_read(v); 524 524 } 525 525 526 526 static __always_inline long 527 - atomic_long_read_acquire(const atomic_long_t *v) 527 + arch_atomic_long_read_acquire(const atomic_long_t *v) 528 528 { 529 - return atomic_read_acquire(v); 529 + return arch_atomic_read_acquire(v); 530 530 } 531 531 532 532 static __always_inline void 533 - atomic_long_set(atomic_long_t *v, long i) 533 + arch_atomic_long_set(atomic_long_t *v, long i) 534 534 { 535 - atomic_set(v, i); 535 + arch_atomic_set(v, i); 536 536 } 537 537 538 538 static __always_inline void 539 - atomic_long_set_release(atomic_long_t *v, long i) 539 + arch_atomic_long_set_release(atomic_long_t *v, long i) 540 540 { 541 - atomic_set_release(v, i); 541 + arch_atomic_set_release(v, i); 542 542 } 543 543 544 544 static __always_inline void 545 - atomic_long_add(long i, atomic_long_t *v) 545 + arch_atomic_long_add(long i, atomic_long_t *v) 546 546 { 547 - atomic_add(i, v); 547 + arch_atomic_add(i, v); 548 548 } 549 549 550 550 static __always_inline long 551 - atomic_long_add_return(long i, atomic_long_t *v) 551 + arch_atomic_long_add_return(long i, atomic_long_t *v) 552 552 { 553 - return atomic_add_return(i, v); 553 + return arch_atomic_add_return(i, v); 554 554 } 555 555 556 556 static __always_inline long 557 - atomic_long_add_return_acquire(long i, atomic_long_t *v) 557 + arch_atomic_long_add_return_acquire(long i, atomic_long_t *v) 558 558 { 559 - return atomic_add_return_acquire(i, v); 559 + return arch_atomic_add_return_acquire(i, v); 560 560 } 561 561 562 562 static __always_inline long 563 - atomic_long_add_return_release(long i, atomic_long_t *v) 563 + arch_atomic_long_add_return_release(long i, atomic_long_t *v) 564 564 { 565 - return atomic_add_return_release(i, v); 565 + return arch_atomic_add_return_release(i, v); 566 566 } 567 567 568 568 static __always_inline long 569 - atomic_long_add_return_relaxed(long i, atomic_long_t *v) 569 + arch_atomic_long_add_return_relaxed(long i, atomic_long_t *v) 570 570 { 571 - return atomic_add_return_relaxed(i, v); 571 + return arch_atomic_add_return_relaxed(i, v); 572 572 } 573 573 574 574 static __always_inline long 575 - atomic_long_fetch_add(long i, atomic_long_t *v) 575 + arch_atomic_long_fetch_add(long i, atomic_long_t *v) 576 576 { 577 - return atomic_fetch_add(i, v); 577 + return arch_atomic_fetch_add(i, v); 578 578 } 579 579 580 580 static __always_inline long 581 - atomic_long_fetch_add_acquire(long i, atomic_long_t *v) 581 + arch_atomic_long_fetch_add_acquire(long i, atomic_long_t *v) 582 582 { 583 - return atomic_fetch_add_acquire(i, v); 583 + return arch_atomic_fetch_add_acquire(i, v); 584 584 } 585 585 586 586 static __always_inline long 587 - atomic_long_fetch_add_release(long i, atomic_long_t *v) 587 + arch_atomic_long_fetch_add_release(long i, atomic_long_t *v) 588 588 { 589 - return atomic_fetch_add_release(i, v); 589 + return arch_atomic_fetch_add_release(i, v); 590 590 } 591 591 592 592 static __always_inline long 593 - atomic_long_fetch_add_relaxed(long i, atomic_long_t *v) 593 + arch_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v) 594 594 { 595 - return atomic_fetch_add_relaxed(i, v); 595 + return arch_atomic_fetch_add_relaxed(i, v); 596 596 } 597 597 598 598 static __always_inline void 599 - atomic_long_sub(long i, atomic_long_t *v) 599 + arch_atomic_long_sub(long i, atomic_long_t *v) 600 600 { 601 - atomic_sub(i, v); 601 + arch_atomic_sub(i, v); 602 602 } 603 603 604 604 static __always_inline long 605 - atomic_long_sub_return(long i, atomic_long_t *v) 605 + arch_atomic_long_sub_return(long i, atomic_long_t *v) 606 606 { 607 - return atomic_sub_return(i, v); 607 + return arch_atomic_sub_return(i, v); 608 608 } 609 609 610 610 static __always_inline long 611 - atomic_long_sub_return_acquire(long i, atomic_long_t *v) 611 + arch_atomic_long_sub_return_acquire(long i, atomic_long_t *v) 612 612 { 613 - return atomic_sub_return_acquire(i, v); 613 + return arch_atomic_sub_return_acquire(i, v); 614 614 } 615 615 616 616 static __always_inline long 617 - atomic_long_sub_return_release(long i, atomic_long_t *v) 617 + arch_atomic_long_sub_return_release(long i, atomic_long_t *v) 618 618 { 619 - return atomic_sub_return_release(i, v); 619 + return arch_atomic_sub_return_release(i, v); 620 620 } 621 621 622 622 static __always_inline long 623 - atomic_long_sub_return_relaxed(long i, atomic_long_t *v) 623 + arch_atomic_long_sub_return_relaxed(long i, atomic_long_t *v) 624 624 { 625 - return atomic_sub_return_relaxed(i, v); 625 + return arch_atomic_sub_return_relaxed(i, v); 626 626 } 627 627 628 628 static __always_inline long 629 - atomic_long_fetch_sub(long i, atomic_long_t *v) 629 + arch_atomic_long_fetch_sub(long i, atomic_long_t *v) 630 630 { 631 - return atomic_fetch_sub(i, v); 631 + return arch_atomic_fetch_sub(i, v); 632 632 } 633 633 634 634 static __always_inline long 635 - atomic_long_fetch_sub_acquire(long i, atomic_long_t *v) 635 + arch_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v) 636 636 { 637 - return atomic_fetch_sub_acquire(i, v); 637 + return arch_atomic_fetch_sub_acquire(i, v); 638 638 } 639 639 640 640 static __always_inline long 641 - atomic_long_fetch_sub_release(long i, atomic_long_t *v) 641 + arch_atomic_long_fetch_sub_release(long i, atomic_long_t *v) 642 642 { 643 - return atomic_fetch_sub_release(i, v); 643 + return arch_atomic_fetch_sub_release(i, v); 644 644 } 645 645 646 646 static __always_inline long 647 - atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v) 647 + arch_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v) 648 648 { 649 - return atomic_fetch_sub_relaxed(i, v); 649 + return arch_atomic_fetch_sub_relaxed(i, v); 650 650 } 651 651 652 652 static __always_inline void 653 - atomic_long_inc(atomic_long_t *v) 653 + arch_atomic_long_inc(atomic_long_t *v) 654 654 { 655 - atomic_inc(v); 655 + arch_atomic_inc(v); 656 656 } 657 657 658 658 static __always_inline long 659 - atomic_long_inc_return(atomic_long_t *v) 659 + arch_atomic_long_inc_return(atomic_long_t *v) 660 660 { 661 - return atomic_inc_return(v); 661 + return arch_atomic_inc_return(v); 662 662 } 663 663 664 664 static __always_inline long 665 - atomic_long_inc_return_acquire(atomic_long_t *v) 665 + arch_atomic_long_inc_return_acquire(atomic_long_t *v) 666 666 { 667 - return atomic_inc_return_acquire(v); 667 + return arch_atomic_inc_return_acquire(v); 668 668 } 669 669 670 670 static __always_inline long 671 - atomic_long_inc_return_release(atomic_long_t *v) 671 + arch_atomic_long_inc_return_release(atomic_long_t *v) 672 672 { 673 - return atomic_inc_return_release(v); 673 + return arch_atomic_inc_return_release(v); 674 674 } 675 675 676 676 static __always_inline long 677 - atomic_long_inc_return_relaxed(atomic_long_t *v) 677 + arch_atomic_long_inc_return_relaxed(atomic_long_t *v) 678 678 { 679 - return atomic_inc_return_relaxed(v); 679 + return arch_atomic_inc_return_relaxed(v); 680 680 } 681 681 682 682 static __always_inline long 683 - atomic_long_fetch_inc(atomic_long_t *v) 683 + arch_atomic_long_fetch_inc(atomic_long_t *v) 684 684 { 685 - return atomic_fetch_inc(v); 685 + return arch_atomic_fetch_inc(v); 686 686 } 687 687 688 688 static __always_inline long 689 - atomic_long_fetch_inc_acquire(atomic_long_t *v) 689 + arch_atomic_long_fetch_inc_acquire(atomic_long_t *v) 690 690 { 691 - return atomic_fetch_inc_acquire(v); 691 + return arch_atomic_fetch_inc_acquire(v); 692 692 } 693 693 694 694 static __always_inline long 695 - atomic_long_fetch_inc_release(atomic_long_t *v) 695 + arch_atomic_long_fetch_inc_release(atomic_long_t *v) 696 696 { 697 - return atomic_fetch_inc_release(v); 697 + return arch_atomic_fetch_inc_release(v); 698 698 } 699 699 700 700 static __always_inline long 701 - atomic_long_fetch_inc_relaxed(atomic_long_t *v) 701 + arch_atomic_long_fetch_inc_relaxed(atomic_long_t *v) 702 702 { 703 - return atomic_fetch_inc_relaxed(v); 703 + return arch_atomic_fetch_inc_relaxed(v); 704 704 } 705 705 706 706 static __always_inline void 707 - atomic_long_dec(atomic_long_t *v) 707 + arch_atomic_long_dec(atomic_long_t *v) 708 708 { 709 - atomic_dec(v); 709 + arch_atomic_dec(v); 710 710 } 711 711 712 712 static __always_inline long 713 - atomic_long_dec_return(atomic_long_t *v) 713 + arch_atomic_long_dec_return(atomic_long_t *v) 714 714 { 715 - return atomic_dec_return(v); 715 + return arch_atomic_dec_return(v); 716 716 } 717 717 718 718 static __always_inline long 719 - atomic_long_dec_return_acquire(atomic_long_t *v) 719 + arch_atomic_long_dec_return_acquire(atomic_long_t *v) 720 720 { 721 - return atomic_dec_return_acquire(v); 721 + return arch_atomic_dec_return_acquire(v); 722 722 } 723 723 724 724 static __always_inline long 725 - atomic_long_dec_return_release(atomic_long_t *v) 725 + arch_atomic_long_dec_return_release(atomic_long_t *v) 726 726 { 727 - return atomic_dec_return_release(v); 727 + return arch_atomic_dec_return_release(v); 728 728 } 729 729 730 730 static __always_inline long 731 - atomic_long_dec_return_relaxed(atomic_long_t *v) 731 + arch_atomic_long_dec_return_relaxed(atomic_long_t *v) 732 732 { 733 - return atomic_dec_return_relaxed(v); 733 + return arch_atomic_dec_return_relaxed(v); 734 734 } 735 735 736 736 static __always_inline long 737 - atomic_long_fetch_dec(atomic_long_t *v) 737 + arch_atomic_long_fetch_dec(atomic_long_t *v) 738 738 { 739 - return atomic_fetch_dec(v); 739 + return arch_atomic_fetch_dec(v); 740 740 } 741 741 742 742 static __always_inline long 743 - atomic_long_fetch_dec_acquire(atomic_long_t *v) 743 + arch_atomic_long_fetch_dec_acquire(atomic_long_t *v) 744 744 { 745 - return atomic_fetch_dec_acquire(v); 745 + return arch_atomic_fetch_dec_acquire(v); 746 746 } 747 747 748 748 static __always_inline long 749 - atomic_long_fetch_dec_release(atomic_long_t *v) 749 + arch_atomic_long_fetch_dec_release(atomic_long_t *v) 750 750 { 751 - return atomic_fetch_dec_release(v); 751 + return arch_atomic_fetch_dec_release(v); 752 752 } 753 753 754 754 static __always_inline long 755 - atomic_long_fetch_dec_relaxed(atomic_long_t *v) 755 + arch_atomic_long_fetch_dec_relaxed(atomic_long_t *v) 756 756 { 757 - return atomic_fetch_dec_relaxed(v); 757 + return arch_atomic_fetch_dec_relaxed(v); 758 758 } 759 759 760 760 static __always_inline void 761 - atomic_long_and(long i, atomic_long_t *v) 761 + arch_atomic_long_and(long i, atomic_long_t *v) 762 762 { 763 - atomic_and(i, v); 763 + arch_atomic_and(i, v); 764 764 } 765 765 766 766 static __always_inline long 767 - atomic_long_fetch_and(long i, atomic_long_t *v) 767 + arch_atomic_long_fetch_and(long i, atomic_long_t *v) 768 768 { 769 - return atomic_fetch_and(i, v); 769 + return arch_atomic_fetch_and(i, v); 770 770 } 771 771 772 772 static __always_inline long 773 - atomic_long_fetch_and_acquire(long i, atomic_long_t *v) 773 + arch_atomic_long_fetch_and_acquire(long i, atomic_long_t *v) 774 774 { 775 - return atomic_fetch_and_acquire(i, v); 775 + return arch_atomic_fetch_and_acquire(i, v); 776 776 } 777 777 778 778 static __always_inline long 779 - atomic_long_fetch_and_release(long i, atomic_long_t *v) 779 + arch_atomic_long_fetch_and_release(long i, atomic_long_t *v) 780 780 { 781 - return atomic_fetch_and_release(i, v); 781 + return arch_atomic_fetch_and_release(i, v); 782 782 } 783 783 784 784 static __always_inline long 785 - atomic_long_fetch_and_relaxed(long i, atomic_long_t *v) 785 + arch_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v) 786 786 { 787 - return atomic_fetch_and_relaxed(i, v); 787 + return arch_atomic_fetch_and_relaxed(i, v); 788 788 } 789 789 790 790 static __always_inline void 791 - atomic_long_andnot(long i, atomic_long_t *v) 791 + arch_atomic_long_andnot(long i, atomic_long_t *v) 792 792 { 793 - atomic_andnot(i, v); 793 + arch_atomic_andnot(i, v); 794 794 } 795 795 796 796 static __always_inline long 797 - atomic_long_fetch_andnot(long i, atomic_long_t *v) 797 + arch_atomic_long_fetch_andnot(long i, atomic_long_t *v) 798 798 { 799 - return atomic_fetch_andnot(i, v); 799 + return arch_atomic_fetch_andnot(i, v); 800 800 } 801 801 802 802 static __always_inline long 803 - atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v) 803 + arch_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v) 804 804 { 805 - return atomic_fetch_andnot_acquire(i, v); 805 + return arch_atomic_fetch_andnot_acquire(i, v); 806 806 } 807 807 808 808 static __always_inline long 809 - atomic_long_fetch_andnot_release(long i, atomic_long_t *v) 809 + arch_atomic_long_fetch_andnot_release(long i, atomic_long_t *v) 810 810 { 811 - return atomic_fetch_andnot_release(i, v); 811 + return arch_atomic_fetch_andnot_release(i, v); 812 812 } 813 813 814 814 static __always_inline long 815 - atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v) 815 + arch_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v) 816 816 { 817 - return atomic_fetch_andnot_relaxed(i, v); 817 + return arch_atomic_fetch_andnot_relaxed(i, v); 818 818 } 819 819 820 820 static __always_inline void 821 - atomic_long_or(long i, atomic_long_t *v) 821 + arch_atomic_long_or(long i, atomic_long_t *v) 822 822 { 823 - atomic_or(i, v); 823 + arch_atomic_or(i, v); 824 824 } 825 825 826 826 static __always_inline long 827 - atomic_long_fetch_or(long i, atomic_long_t *v) 827 + arch_atomic_long_fetch_or(long i, atomic_long_t *v) 828 828 { 829 - return atomic_fetch_or(i, v); 829 + return arch_atomic_fetch_or(i, v); 830 830 } 831 831 832 832 static __always_inline long 833 - atomic_long_fetch_or_acquire(long i, atomic_long_t *v) 833 + arch_atomic_long_fetch_or_acquire(long i, atomic_long_t *v) 834 834 { 835 - return atomic_fetch_or_acquire(i, v); 835 + return arch_atomic_fetch_or_acquire(i, v); 836 836 } 837 837 838 838 static __always_inline long 839 - atomic_long_fetch_or_release(long i, atomic_long_t *v) 839 + arch_atomic_long_fetch_or_release(long i, atomic_long_t *v) 840 840 { 841 - return atomic_fetch_or_release(i, v); 841 + return arch_atomic_fetch_or_release(i, v); 842 842 } 843 843 844 844 static __always_inline long 845 - atomic_long_fetch_or_relaxed(long i, atomic_long_t *v) 845 + arch_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v) 846 846 { 847 - return atomic_fetch_or_relaxed(i, v); 847 + return arch_atomic_fetch_or_relaxed(i, v); 848 848 } 849 849 850 850 static __always_inline void 851 - atomic_long_xor(long i, atomic_long_t *v) 851 + arch_atomic_long_xor(long i, atomic_long_t *v) 852 852 { 853 - atomic_xor(i, v); 853 + arch_atomic_xor(i, v); 854 854 } 855 855 856 856 static __always_inline long 857 - atomic_long_fetch_xor(long i, atomic_long_t *v) 857 + arch_atomic_long_fetch_xor(long i, atomic_long_t *v) 858 858 { 859 - return atomic_fetch_xor(i, v); 859 + return arch_atomic_fetch_xor(i, v); 860 860 } 861 861 862 862 static __always_inline long 863 - atomic_long_fetch_xor_acquire(long i, atomic_long_t *v) 863 + arch_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v) 864 864 { 865 - return atomic_fetch_xor_acquire(i, v); 865 + return arch_atomic_fetch_xor_acquire(i, v); 866 866 } 867 867 868 868 static __always_inline long 869 - atomic_long_fetch_xor_release(long i, atomic_long_t *v) 869 + arch_atomic_long_fetch_xor_release(long i, atomic_long_t *v) 870 870 { 871 - return atomic_fetch_xor_release(i, v); 871 + return arch_atomic_fetch_xor_release(i, v); 872 872 } 873 873 874 874 static __always_inline long 875 - atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v) 875 + arch_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v) 876 876 { 877 - return atomic_fetch_xor_relaxed(i, v); 877 + return arch_atomic_fetch_xor_relaxed(i, v); 878 878 } 879 879 880 880 static __always_inline long 881 - atomic_long_xchg(atomic_long_t *v, long i) 881 + arch_atomic_long_xchg(atomic_long_t *v, long i) 882 882 { 883 - return atomic_xchg(v, i); 883 + return arch_atomic_xchg(v, i); 884 884 } 885 885 886 886 static __always_inline long 887 - atomic_long_xchg_acquire(atomic_long_t *v, long i) 887 + arch_atomic_long_xchg_acquire(atomic_long_t *v, long i) 888 888 { 889 - return atomic_xchg_acquire(v, i); 889 + return arch_atomic_xchg_acquire(v, i); 890 890 } 891 891 892 892 static __always_inline long 893 - atomic_long_xchg_release(atomic_long_t *v, long i) 893 + arch_atomic_long_xchg_release(atomic_long_t *v, long i) 894 894 { 895 - return atomic_xchg_release(v, i); 895 + return arch_atomic_xchg_release(v, i); 896 896 } 897 897 898 898 static __always_inline long 899 - atomic_long_xchg_relaxed(atomic_long_t *v, long i) 899 + arch_atomic_long_xchg_relaxed(atomic_long_t *v, long i) 900 900 { 901 - return atomic_xchg_relaxed(v, i); 901 + return arch_atomic_xchg_relaxed(v, i); 902 902 } 903 903 904 904 static __always_inline long 905 - atomic_long_cmpxchg(atomic_long_t *v, long old, long new) 905 + arch_atomic_long_cmpxchg(atomic_long_t *v, long old, long new) 906 906 { 907 - return atomic_cmpxchg(v, old, new); 907 + return arch_atomic_cmpxchg(v, old, new); 908 908 } 909 909 910 910 static __always_inline long 911 - atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new) 911 + arch_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new) 912 912 { 913 - return atomic_cmpxchg_acquire(v, old, new); 913 + return arch_atomic_cmpxchg_acquire(v, old, new); 914 914 } 915 915 916 916 static __always_inline long 917 - atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new) 917 + arch_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new) 918 918 { 919 - return atomic_cmpxchg_release(v, old, new); 919 + return arch_atomic_cmpxchg_release(v, old, new); 920 920 } 921 921 922 922 static __always_inline long 923 - atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new) 923 + arch_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new) 924 924 { 925 - return atomic_cmpxchg_relaxed(v, old, new); 925 + return arch_atomic_cmpxchg_relaxed(v, old, new); 926 926 } 927 927 928 928 static __always_inline bool 929 - atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new) 929 + arch_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new) 930 930 { 931 - return atomic_try_cmpxchg(v, (int *)old, new); 931 + return arch_atomic_try_cmpxchg(v, (int *)old, new); 932 932 } 933 933 934 934 static __always_inline bool 935 - atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new) 935 + arch_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new) 936 936 { 937 - return atomic_try_cmpxchg_acquire(v, (int *)old, new); 937 + return arch_atomic_try_cmpxchg_acquire(v, (int *)old, new); 938 938 } 939 939 940 940 static __always_inline bool 941 - atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new) 941 + arch_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new) 942 942 { 943 - return atomic_try_cmpxchg_release(v, (int *)old, new); 943 + return arch_atomic_try_cmpxchg_release(v, (int *)old, new); 944 944 } 945 945 946 946 static __always_inline bool 947 - atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new) 947 + arch_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new) 948 948 { 949 - return atomic_try_cmpxchg_relaxed(v, (int *)old, new); 949 + return arch_atomic_try_cmpxchg_relaxed(v, (int *)old, new); 950 950 } 951 951 952 952 static __always_inline bool 953 - atomic_long_sub_and_test(long i, atomic_long_t *v) 953 + arch_atomic_long_sub_and_test(long i, atomic_long_t *v) 954 954 { 955 - return atomic_sub_and_test(i, v); 955 + return arch_atomic_sub_and_test(i, v); 956 956 } 957 957 958 958 static __always_inline bool 959 - atomic_long_dec_and_test(atomic_long_t *v) 959 + arch_atomic_long_dec_and_test(atomic_long_t *v) 960 960 { 961 - return atomic_dec_and_test(v); 961 + return arch_atomic_dec_and_test(v); 962 962 } 963 963 964 964 static __always_inline bool 965 - atomic_long_inc_and_test(atomic_long_t *v) 965 + arch_atomic_long_inc_and_test(atomic_long_t *v) 966 966 { 967 - return atomic_inc_and_test(v); 967 + return arch_atomic_inc_and_test(v); 968 968 } 969 969 970 970 static __always_inline bool 971 - atomic_long_add_negative(long i, atomic_long_t *v) 971 + arch_atomic_long_add_negative(long i, atomic_long_t *v) 972 972 { 973 - return atomic_add_negative(i, v); 973 + return arch_atomic_add_negative(i, v); 974 974 } 975 975 976 976 static __always_inline long 977 - atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 977 + arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u) 978 978 { 979 - return atomic_fetch_add_unless(v, a, u); 979 + return arch_atomic_fetch_add_unless(v, a, u); 980 980 } 981 981 982 982 static __always_inline bool 983 - atomic_long_add_unless(atomic_long_t *v, long a, long u) 983 + arch_atomic_long_add_unless(atomic_long_t *v, long a, long u) 984 984 { 985 - return atomic_add_unless(v, a, u); 985 + return arch_atomic_add_unless(v, a, u); 986 986 } 987 987 988 988 static __always_inline bool 989 - atomic_long_inc_not_zero(atomic_long_t *v) 989 + arch_atomic_long_inc_not_zero(atomic_long_t *v) 990 990 { 991 - return atomic_inc_not_zero(v); 991 + return arch_atomic_inc_not_zero(v); 992 992 } 993 993 994 994 static __always_inline bool 995 - atomic_long_inc_unless_negative(atomic_long_t *v) 995 + arch_atomic_long_inc_unless_negative(atomic_long_t *v) 996 996 { 997 - return atomic_inc_unless_negative(v); 997 + return arch_atomic_inc_unless_negative(v); 998 998 } 999 999 1000 1000 static __always_inline bool 1001 - atomic_long_dec_unless_positive(atomic_long_t *v) 1001 + arch_atomic_long_dec_unless_positive(atomic_long_t *v) 1002 1002 { 1003 - return atomic_dec_unless_positive(v); 1003 + return arch_atomic_dec_unless_positive(v); 1004 1004 } 1005 1005 1006 1006 static __always_inline long 1007 - atomic_long_dec_if_positive(atomic_long_t *v) 1007 + arch_atomic_long_dec_if_positive(atomic_long_t *v) 1008 1008 { 1009 - return atomic_dec_if_positive(v); 1009 + return arch_atomic_dec_if_positive(v); 1010 1010 } 1011 1011 1012 1012 #endif /* CONFIG_64BIT */ 1013 1013 #endif /* _LINUX_ATOMIC_LONG_H */ 1014 - // c5552b5d78a0c7584dfd03cba985e78a1a86bbed 1014 + // e8f0e08ff072b74d180eabe2ad001282b38c2c88
+5
scripts/atomic/gen-atomic-instrumented.sh
··· 138 138 gen_proto "${meta}" "${name}" "atomic64" "s64" ${args} 139 139 done 140 140 141 + grep '^[a-z]' "$1" | while read name meta args; do 142 + gen_proto "${meta}" "${name}" "atomic_long" "long" ${args} 143 + done 144 + 145 + 141 146 for xchg in "xchg" "cmpxchg" "cmpxchg64" "try_cmpxchg"; do 142 147 for order in "" "_acquire" "_release" "_relaxed"; do 143 148 gen_xchg "${xchg}${order}" ""
+2 -2
scripts/atomic/gen-atomic-long.sh
··· 47 47 48 48 cat <<EOF 49 49 static __always_inline ${ret} 50 - atomic_long_${name}(${params}) 50 + arch_atomic_long_${name}(${params}) 51 51 { 52 - ${retstmt}${atomic}_${name}(${argscast}); 52 + ${retstmt}arch_${atomic}_${name}(${argscast}); 53 53 } 54 54 55 55 EOF