2012-11-22 23:03:11 +01:00
|
|
|
//===-- tsan_update_shadow_word_inl.h ---------------------------*- C++ -*-===//
|
|
|
|
//
|
|
|
|
// This file is distributed under the University of Illinois Open Source
|
|
|
|
// License. See LICENSE.TXT for details.
|
|
|
|
//
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
//
|
|
|
|
// This file is a part of ThreadSanitizer (TSan), a race detector.
|
|
|
|
//
|
|
|
|
// Body of the hottest inner loop.
|
|
|
|
// If we wrap this body into a function, compilers (both gcc and clang)
|
|
|
|
// produce sligtly less efficient code.
|
|
|
|
//===----------------------------------------------------------------------===//
|
|
|
|
do {
|
|
|
|
StatInc(thr, StatShadowProcessed);
|
|
|
|
const unsigned kAccessSize = 1 << kAccessSizeLog;
|
2014-09-23 19:59:53 +02:00
|
|
|
u64 *sp = &shadow_mem[idx];
|
2012-11-22 23:03:11 +01:00
|
|
|
old = LoadShadow(sp);
|
|
|
|
if (old.IsZero()) {
|
|
|
|
StatInc(thr, StatShadowZero);
|
|
|
|
if (store_word)
|
|
|
|
StoreIfNotYetStored(sp, &store_word);
|
|
|
|
// The above StoreIfNotYetStored could be done unconditionally
|
|
|
|
// and it even shows 4% gain on synthetic benchmarks (r4307).
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
// is the memory access equal to the previous?
|
|
|
|
if (Shadow::Addr0AndSizeAreEqual(cur, old)) {
|
|
|
|
StatInc(thr, StatShadowSameSize);
|
|
|
|
// same thread?
|
|
|
|
if (Shadow::TidsAreEqual(old, cur)) {
|
|
|
|
StatInc(thr, StatShadowSameThread);
|
2013-02-13 11:46:01 +01:00
|
|
|
if (old.IsRWWeakerOrEqual(kAccessIsWrite, kIsAtomic))
|
2012-11-22 23:03:11 +01:00
|
|
|
StoreIfNotYetStored(sp, &store_word);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
StatInc(thr, StatShadowAnotherThread);
|
|
|
|
if (HappensBefore(old, thr)) {
|
2015-10-21 09:32:45 +02:00
|
|
|
if (old.IsRWWeakerOrEqual(kAccessIsWrite, kIsAtomic))
|
|
|
|
StoreIfNotYetStored(sp, &store_word);
|
2012-11-22 23:03:11 +01:00
|
|
|
break;
|
|
|
|
}
|
2013-02-13 11:46:01 +01:00
|
|
|
if (old.IsBothReadsOrAtomic(kAccessIsWrite, kIsAtomic))
|
2012-11-22 23:03:11 +01:00
|
|
|
break;
|
|
|
|
goto RACE;
|
|
|
|
}
|
|
|
|
// Do the memory access intersect?
|
2013-11-04 22:33:31 +01:00
|
|
|
if (Shadow::TwoRangesIntersect(old, cur, kAccessSize)) {
|
2012-11-22 23:03:11 +01:00
|
|
|
StatInc(thr, StatShadowIntersect);
|
|
|
|
if (Shadow::TidsAreEqual(old, cur)) {
|
|
|
|
StatInc(thr, StatShadowSameThread);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
StatInc(thr, StatShadowAnotherThread);
|
2013-02-13 11:46:01 +01:00
|
|
|
if (old.IsBothReadsOrAtomic(kAccessIsWrite, kIsAtomic))
|
2012-11-22 23:03:11 +01:00
|
|
|
break;
|
2013-02-13 11:46:01 +01:00
|
|
|
if (HappensBefore(old, thr))
|
2012-11-22 23:03:11 +01:00
|
|
|
break;
|
|
|
|
goto RACE;
|
|
|
|
}
|
|
|
|
// The accesses do not intersect.
|
|
|
|
StatInc(thr, StatShadowNotIntersect);
|
|
|
|
break;
|
|
|
|
} while (0);
|