aboutsummaryrefslogtreecommitdiffstats
path: root/include
diff options
context:
space:
mode:
authorQi Wang <interwq@gwu.edu>2018-04-06 13:45:37 -0700
committerQi Wang <interwq@gmail.com>2018-04-09 16:35:14 -0700
commit2dccf4564016233bd4ef7772b43ec8423b8c44df (patch)
tree22831406db56bd87fe3a764ef39ecf7aeba20369 /include
parent6d02421730e2f2dc6985da699b8e10b3ed4061b6 (diff)
downloadplatform_external_jemalloc_new-2dccf4564016233bd4ef7772b43ec8423b8c44df.tar.gz
platform_external_jemalloc_new-2dccf4564016233bd4ef7772b43ec8423b8c44df.tar.bz2
platform_external_jemalloc_new-2dccf4564016233bd4ef7772b43ec8423b8c44df.zip
Control idump and gdump with prof_active.
Diffstat (limited to 'include')
-rw-r--r--include/jemalloc/internal/arena_inlines_a.h2
-rw-r--r--include/jemalloc/internal/prof_inlines_a.h11
-rw-r--r--include/jemalloc/internal/prof_inlines_b.h11
3 files changed, 12 insertions, 12 deletions
diff --git a/include/jemalloc/internal/arena_inlines_a.h b/include/jemalloc/internal/arena_inlines_a.h
index da587706..9abf7f6a 100644
--- a/include/jemalloc/internal/arena_inlines_a.h
+++ b/include/jemalloc/internal/arena_inlines_a.h
@@ -25,7 +25,7 @@ static inline bool
arena_prof_accum(tsdn_t *tsdn, arena_t *arena, uint64_t accumbytes) {
cassert(config_prof);
- if (likely(prof_interval == 0)) {
+ if (likely(prof_interval == 0 || !prof_active_get_unlocked())) {
return false;
}
diff --git a/include/jemalloc/internal/prof_inlines_a.h b/include/jemalloc/internal/prof_inlines_a.h
index eda6839a..a6efb485 100644
--- a/include/jemalloc/internal/prof_inlines_a.h
+++ b/include/jemalloc/internal/prof_inlines_a.h
@@ -69,4 +69,15 @@ prof_accum_cancel(tsdn_t *tsdn, prof_accum_t *prof_accum, size_t usize) {
#endif
}
+JEMALLOC_ALWAYS_INLINE bool
+prof_active_get_unlocked(void) {
+ /*
+ * Even if opt_prof is true, sampling can be temporarily disabled by
+ * setting prof_active to false. No locking is used when reading
+ * prof_active in the fast path, so there are no guarantees regarding
+ * how long it will take for all threads to notice state changes.
+ */
+ return prof_active;
+}
+
#endif /* JEMALLOC_INTERNAL_PROF_INLINES_A_H */
diff --git a/include/jemalloc/internal/prof_inlines_b.h b/include/jemalloc/internal/prof_inlines_b.h
index d670cb7b..6ff465ad 100644
--- a/include/jemalloc/internal/prof_inlines_b.h
+++ b/include/jemalloc/internal/prof_inlines_b.h
@@ -4,17 +4,6 @@
#include "jemalloc/internal/sz.h"
JEMALLOC_ALWAYS_INLINE bool
-prof_active_get_unlocked(void) {
- /*
- * Even if opt_prof is true, sampling can be temporarily disabled by
- * setting prof_active to false. No locking is used when reading
- * prof_active in the fast path, so there are no guarantees regarding
- * how long it will take for all threads to notice state changes.
- */
- return prof_active;
-}
-
-JEMALLOC_ALWAYS_INLINE bool
prof_gdump_get_unlocked(void) {
/*
* No locking is used when reading prof_gdump_val in the fast path, so