[gcc-6] 183/401: * Revert fix for PR target/55947, causing PR libstdc++/72813. LP: #1610220.
Ximin Luo
infinity0 at debian.org
Wed Apr 5 15:49:02 UTC 2017
This is an automated email from the git hooks/post-receive script.
infinity0 pushed a commit to branch pu/reproducible_builds
in repository gcc-6.
commit 71705b03911e2a67123e687d3687930e6ccecb50
Author: doko <doko at 6ca36cf4-e1d1-0310-8c6f-e303bb2178ca>
Date: Fri Aug 5 12:55:42 2016 +0000
* Revert fix for PR target/55947, causing PR libstdc++/72813. LP: #1610220.
git-svn-id: svn://anonscm.debian.org/gcccvs/branches/sid/gcc-6@8944 6ca36cf4-e1d1-0310-8c6f-e303bb2178ca
---
debian/changelog | 1 +
debian/patches/PR55947-revert.diff | 348 +++++++++++++++++++++++++++++++++++++
debian/rules.patch | 1 +
3 files changed, 350 insertions(+)
diff --git a/debian/changelog b/debian/changelog
index 84114b4..50a5b04 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,6 +1,7 @@
gcc-6 (6.1.1-12) UNRELEASED; urgency=medium
* Fix running the libjava testsuite.
+ * Revert fix for PR target/55947, causing PR libstdc++/72813. LP: #1610220.
-- Matthias Klose <doko at debian.org> Fri, 05 Aug 2016 11:45:44 +0200
diff --git a/debian/patches/PR55947-revert.diff b/debian/patches/PR55947-revert.diff
new file mode 100644
index 0000000..a4861a7
--- /dev/null
+++ b/debian/patches/PR55947-revert.diff
@@ -0,0 +1,348 @@
+# DP: Revert fix for PR target/55947, causing PR libstdc++/72813
+
+libstdc++-v3/
+
+2013-05-08 Andi Kleen <ak at linux.intel.com>
+
+ PR target/55947
+ * libstdc++-v3/include/bits/atomic_base.h
+ (_GLIBCXX_ALWAYS_INLINE): Add new macro.
+ (atomic_thread_fence, atomic_signal_fence, test_and_set,
+ clear, store, load, exchange, compare_exchange_weak)
+ compare_exchange_strong, fetch_add, fetch_sub, fetch_and,
+ fetch_or, fetch_xor): Mark _GLIBCXX_ALWAYS_INLINE.
+
+--- a/src/libstdc++-v3/include/bits/atomic_base.h (revision 198733)
++++ b/src/libstdc++-v3/include/bits/atomic_base.h (revision 198731)
+@@ -97,11 +97,11 @@
+ | (__m & __memory_order_modifier_mask));
+ }
+
+- _GLIBCXX_ALWAYS_INLINE void
++ inline void
+ atomic_thread_fence(memory_order __m) noexcept
+ { __atomic_thread_fence(__m); }
+
+- _GLIBCXX_ALWAYS_INLINE void
++ inline void
+ atomic_signal_fence(memory_order __m) noexcept
+ { __atomic_signal_fence(__m); }
+
+@@ -170,19 +170,19 @@
+ : __atomic_flag_base{ _S_init(__i) }
+ { }
+
+- _GLIBCXX_ALWAYS_INLINE bool
++ bool
+ test_and_set(memory_order __m = memory_order_seq_cst) noexcept
+ {
+ return __atomic_test_and_set (&_M_i, __m);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE bool
++ bool
+ test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
+ {
+ return __atomic_test_and_set (&_M_i, __m);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE void
++ void
+ clear(memory_order __m = memory_order_seq_cst) noexcept
+ {
+ memory_order __b = __m & __memory_order_mask;
+@@ -193,7 +193,7 @@
+ __atomic_clear (&_M_i, __m);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE void
++ void
+ clear(memory_order __m = memory_order_seq_cst) volatile noexcept
+ {
+ memory_order __b = __m & __memory_order_mask;
+@@ -363,7 +363,7 @@
+ reinterpret_cast<void *>(-__alignof(_M_i)));
+ }
+
+- _GLIBCXX_ALWAYS_INLINE void
++ void
+ store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
+ {
+ memory_order __b = __m & __memory_order_mask;
+@@ -374,7 +374,7 @@
+ __atomic_store_n(&_M_i, __i, __m);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE void
++ void
+ store(__int_type __i,
+ memory_order __m = memory_order_seq_cst) volatile noexcept
+ {
+@@ -386,7 +386,7 @@
+ __atomic_store_n(&_M_i, __i, __m);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE __int_type
++ __int_type
+ load(memory_order __m = memory_order_seq_cst) const noexcept
+ {
+ memory_order __b = __m & __memory_order_mask;
+@@ -396,7 +396,7 @@
+ return __atomic_load_n(&_M_i, __m);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE __int_type
++ __int_type
+ load(memory_order __m = memory_order_seq_cst) const volatile noexcept
+ {
+ memory_order __b = __m & __memory_order_mask;
+@@ -406,7 +406,7 @@
+ return __atomic_load_n(&_M_i, __m);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE __int_type
++ __int_type
+ exchange(__int_type __i,
+ memory_order __m = memory_order_seq_cst) noexcept
+ {
+@@ -414,14 +414,14 @@
+ }
+
+
+- _GLIBCXX_ALWAYS_INLINE __int_type
++ __int_type
+ exchange(__int_type __i,
+ memory_order __m = memory_order_seq_cst) volatile noexcept
+ {
+ return __atomic_exchange_n(&_M_i, __i, __m);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE bool
++ bool
+ compare_exchange_weak(__int_type& __i1, __int_type __i2,
+ memory_order __m1, memory_order __m2) noexcept
+ {
+@@ -434,7 +434,7 @@
+ return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE bool
++ bool
+ compare_exchange_weak(__int_type& __i1, __int_type __i2,
+ memory_order __m1,
+ memory_order __m2) volatile noexcept
+@@ -448,7 +448,7 @@
+ return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE bool
++ bool
+ compare_exchange_weak(__int_type& __i1, __int_type __i2,
+ memory_order __m = memory_order_seq_cst) noexcept
+ {
+@@ -456,7 +456,7 @@
+ __cmpexch_failure_order(__m));
+ }
+
+- _GLIBCXX_ALWAYS_INLINE bool
++ bool
+ compare_exchange_weak(__int_type& __i1, __int_type __i2,
+ memory_order __m = memory_order_seq_cst) volatile noexcept
+ {
+@@ -464,7 +464,7 @@
+ __cmpexch_failure_order(__m));
+ }
+
+- _GLIBCXX_ALWAYS_INLINE bool
++ bool
+ compare_exchange_strong(__int_type& __i1, __int_type __i2,
+ memory_order __m1, memory_order __m2) noexcept
+ {
+@@ -477,7 +477,7 @@
+ return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE bool
++ bool
+ compare_exchange_strong(__int_type& __i1, __int_type __i2,
+ memory_order __m1,
+ memory_order __m2) volatile noexcept
+@@ -492,7 +492,7 @@
+ return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE bool
++ bool
+ compare_exchange_strong(__int_type& __i1, __int_type __i2,
+ memory_order __m = memory_order_seq_cst) noexcept
+ {
+@@ -500,7 +500,7 @@
+ __cmpexch_failure_order(__m));
+ }
+
+- _GLIBCXX_ALWAYS_INLINE bool
++ bool
+ compare_exchange_strong(__int_type& __i1, __int_type __i2,
+ memory_order __m = memory_order_seq_cst) volatile noexcept
+ {
+@@ -508,52 +508,52 @@
+ __cmpexch_failure_order(__m));
+ }
+
+- _GLIBCXX_ALWAYS_INLINE __int_type
++ __int_type
+ fetch_add(__int_type __i,
+ memory_order __m = memory_order_seq_cst) noexcept
+ { return __atomic_fetch_add(&_M_i, __i, __m); }
+
+- _GLIBCXX_ALWAYS_INLINE __int_type
++ __int_type
+ fetch_add(__int_type __i,
+ memory_order __m = memory_order_seq_cst) volatile noexcept
+ { return __atomic_fetch_add(&_M_i, __i, __m); }
+
+- _GLIBCXX_ALWAYS_INLINE __int_type
++ __int_type
+ fetch_sub(__int_type __i,
+ memory_order __m = memory_order_seq_cst) noexcept
+ { return __atomic_fetch_sub(&_M_i, __i, __m); }
+
+- _GLIBCXX_ALWAYS_INLINE __int_type
++ __int_type
+ fetch_sub(__int_type __i,
+ memory_order __m = memory_order_seq_cst) volatile noexcept
+ { return __atomic_fetch_sub(&_M_i, __i, __m); }
+
+- _GLIBCXX_ALWAYS_INLINE __int_type
++ __int_type
+ fetch_and(__int_type __i,
+ memory_order __m = memory_order_seq_cst) noexcept
+ { return __atomic_fetch_and(&_M_i, __i, __m); }
+
+- _GLIBCXX_ALWAYS_INLINE __int_type
++ __int_type
+ fetch_and(__int_type __i,
+ memory_order __m = memory_order_seq_cst) volatile noexcept
+ { return __atomic_fetch_and(&_M_i, __i, __m); }
+
+- _GLIBCXX_ALWAYS_INLINE __int_type
++ __int_type
+ fetch_or(__int_type __i,
+ memory_order __m = memory_order_seq_cst) noexcept
+ { return __atomic_fetch_or(&_M_i, __i, __m); }
+
+- _GLIBCXX_ALWAYS_INLINE __int_type
++ __int_type
+ fetch_or(__int_type __i,
+ memory_order __m = memory_order_seq_cst) volatile noexcept
+ { return __atomic_fetch_or(&_M_i, __i, __m); }
+
+- _GLIBCXX_ALWAYS_INLINE __int_type
++ __int_type
+ fetch_xor(__int_type __i,
+ memory_order __m = memory_order_seq_cst) noexcept
+ { return __atomic_fetch_xor(&_M_i, __i, __m); }
+
+- _GLIBCXX_ALWAYS_INLINE __int_type
++ __int_type
+ fetch_xor(__int_type __i,
+ memory_order __m = memory_order_seq_cst) volatile noexcept
+ { return __atomic_fetch_xor(&_M_i, __i, __m); }
+@@ -678,7 +678,7 @@
+ reinterpret_cast<void *>(-__alignof(_M_p)));
+ }
+
+- _GLIBCXX_ALWAYS_INLINE void
++ void
+ store(__pointer_type __p,
+ memory_order __m = memory_order_seq_cst) noexcept
+ {
+@@ -691,7 +691,7 @@
+ __atomic_store_n(&_M_p, __p, __m);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE void
++ void
+ store(__pointer_type __p,
+ memory_order __m = memory_order_seq_cst) volatile noexcept
+ {
+@@ -703,7 +703,7 @@
+ __atomic_store_n(&_M_p, __p, __m);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE __pointer_type
++ __pointer_type
+ load(memory_order __m = memory_order_seq_cst) const noexcept
+ {
+ memory_order __b = __m & __memory_order_mask;
+@@ -713,7 +713,7 @@
+ return __atomic_load_n(&_M_p, __m);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE __pointer_type
++ __pointer_type
+ load(memory_order __m = memory_order_seq_cst) const volatile noexcept
+ {
+ memory_order __b = __m & __memory_order_mask;
+@@ -723,7 +723,7 @@
+ return __atomic_load_n(&_M_p, __m);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE __pointer_type
++ __pointer_type
+ exchange(__pointer_type __p,
+ memory_order __m = memory_order_seq_cst) noexcept
+ {
+@@ -731,14 +731,14 @@
+ }
+
+
+- _GLIBCXX_ALWAYS_INLINE __pointer_type
++ __pointer_type
+ exchange(__pointer_type __p,
+ memory_order __m = memory_order_seq_cst) volatile noexcept
+ {
+ return __atomic_exchange_n(&_M_p, __p, __m);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE bool
++ bool
+ compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
+ memory_order __m1,
+ memory_order __m2) noexcept
+@@ -752,7 +752,7 @@
+ return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE bool
++ bool
+ compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
+ memory_order __m1,
+ memory_order __m2) volatile noexcept
+@@ -767,22 +767,22 @@
+ return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
+ }
+
+- _GLIBCXX_ALWAYS_INLINE __pointer_type
++ __pointer_type
+ fetch_add(ptrdiff_t __d,
+ memory_order __m = memory_order_seq_cst) noexcept
+ { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
+
+- _GLIBCXX_ALWAYS_INLINE __pointer_type
++ __pointer_type
+ fetch_add(ptrdiff_t __d,
+ memory_order __m = memory_order_seq_cst) volatile noexcept
+ { return __atomic_fetch_add(&_M_p, _M_type_size(__d), __m); }
+
+- _GLIBCXX_ALWAYS_INLINE __pointer_type
++ __pointer_type
+ fetch_sub(ptrdiff_t __d,
+ memory_order __m = memory_order_seq_cst) noexcept
+ { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
+
+- _GLIBCXX_ALWAYS_INLINE __pointer_type
++ __pointer_type
+ fetch_sub(ptrdiff_t __d,
+ memory_order __m = memory_order_seq_cst) volatile noexcept
+ { return __atomic_fetch_sub(&_M_p, _M_type_size(__d), __m); }
diff --git a/debian/rules.patch b/debian/rules.patch
index 3e59b8d..4876a70 100644
--- a/debian/rules.patch
+++ b/debian/rules.patch
@@ -89,6 +89,7 @@ debian_patches += \
vulcan-cpu$(if $(with_linaro_branch),-linaro) \
vulcan-costs \
libjava-mips64el \
+ PR55947-revert \
# this is still needed on powerpc, e.g. firefox and insighttoolkit4 will ftbfs.
ifneq (,$(filter $(DEB_TARGET_ARCH),powerpc))
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/reproducible/gcc-6.git
More information about the Reproducible-commits
mailing list