The snapshot buffer belongs to the trace array not the tracer that is
running. The trace array should be the data structure that keeps track
of whether or not the snapshot buffer is allocated, not the tracer
desciptor. Having the trace array keep track of it makes modifications
so much easier.
Signed-off-by: Steven Rostedt <rostedt@goodmis.org>
WARN_ON_ONCE(!irqs_disabled());
WARN_ON_ONCE(!irqs_disabled());
- if (!tr->current_trace->allocated_snapshot) {
+ if (!tr->allocated_snapshot) {
/* Only the nop tracer should hit this when disabling */
WARN_ON_ONCE(tr->current_trace != &nop_trace);
return;
/* Only the nop tracer should hit this when disabling */
WARN_ON_ONCE(tr->current_trace != &nop_trace);
return;
return;
WARN_ON_ONCE(!irqs_disabled());
return;
WARN_ON_ONCE(!irqs_disabled());
- if (WARN_ON_ONCE(!tr->current_trace->allocated_snapshot))
+ if (WARN_ON_ONCE(!tr->allocated_snapshot))
return;
arch_spin_lock(&ftrace_max_lock);
return;
arch_spin_lock(&ftrace_max_lock);
if (ring_buffer_expanded)
ring_buffer_resize(tr->max_buffer.buffer, trace_buf_size,
RING_BUFFER_ALL_CPUS);
if (ring_buffer_expanded)
ring_buffer_resize(tr->max_buffer.buffer, trace_buf_size,
RING_BUFFER_ALL_CPUS);
- type->allocated_snapshot = true;
+ tr->allocated_snapshot = true;
#ifdef CONFIG_TRACER_MAX_TRACE
if (type->use_max_tr) {
#ifdef CONFIG_TRACER_MAX_TRACE
if (type->use_max_tr) {
- type->allocated_snapshot = false;
+ tr->allocated_snapshot = false;
/* Shrink the max buffer again */
if (ring_buffer_expanded)
/* Shrink the max buffer again */
if (ring_buffer_expanded)
static void print_snapshot_help(struct seq_file *m, struct trace_iterator *iter)
{
static void print_snapshot_help(struct seq_file *m, struct trace_iterator *iter)
{
- if (iter->trace->allocated_snapshot)
+ if (iter->tr->allocated_snapshot)
seq_printf(m, "#\n# * Snapshot is allocated *\n#\n");
else
seq_printf(m, "#\n# * Snapshot is freed *\n#\n");
seq_printf(m, "#\n# * Snapshot is allocated *\n#\n");
else
seq_printf(m, "#\n# * Snapshot is freed *\n#\n");
if (tr->current_trace->reset)
tr->current_trace->reset(tr);
if (tr->current_trace->reset)
tr->current_trace->reset(tr);
-#ifdef CONFIG_TRACER_MAX_TRACE
- had_max_tr = tr->current_trace->allocated_snapshot;
-
/* Current trace needs to be nop_trace before synchronize_sched */
tr->current_trace = &nop_trace;
/* Current trace needs to be nop_trace before synchronize_sched */
tr->current_trace = &nop_trace;
+#ifdef CONFIG_TRACER_MAX_TRACE
+ had_max_tr = tr->allocated_snapshot;
+
if (had_max_tr && !t->use_max_tr) {
/*
* We need to make sure that the update_max_tr sees that
if (had_max_tr && !t->use_max_tr) {
/*
* We need to make sure that the update_max_tr sees that
ring_buffer_resize(tr->max_buffer.buffer, 1, RING_BUFFER_ALL_CPUS);
set_buffer_entries(&tr->max_buffer, 1);
tracing_reset_online_cpus(&tr->max_buffer);
ring_buffer_resize(tr->max_buffer.buffer, 1, RING_BUFFER_ALL_CPUS);
set_buffer_entries(&tr->max_buffer, 1);
tracing_reset_online_cpus(&tr->max_buffer);
- tr->current_trace->allocated_snapshot = false;
+ tr->allocated_snapshot = false;
-#else
- tr->current_trace = &nop_trace;
#endif
destroy_trace_option_files(topts);
#endif
destroy_trace_option_files(topts);
RING_BUFFER_ALL_CPUS);
if (ret < 0)
goto out;
RING_BUFFER_ALL_CPUS);
if (ret < 0)
goto out;
- t->allocated_snapshot = true;
+ tr->allocated_snapshot = true;
- if (tr->current_trace->allocated_snapshot) {
+ if (tr->allocated_snapshot) {
/* free spare buffer */
ring_buffer_resize(tr->max_buffer.buffer, 1,
RING_BUFFER_ALL_CPUS);
set_buffer_entries(&tr->max_buffer, 1);
tracing_reset_online_cpus(&tr->max_buffer);
/* free spare buffer */
ring_buffer_resize(tr->max_buffer.buffer, 1,
RING_BUFFER_ALL_CPUS);
set_buffer_entries(&tr->max_buffer, 1);
tracing_reset_online_cpus(&tr->max_buffer);
- tr->current_trace->allocated_snapshot = false;
+ tr->allocated_snapshot = false;
- if (!tr->current_trace->allocated_snapshot) {
+ if (!tr->allocated_snapshot) {
/* allocate spare buffer */
ret = resize_buffer_duplicate_size(&tr->max_buffer,
&tr->trace_buffer, RING_BUFFER_ALL_CPUS);
if (ret < 0)
break;
/* allocate spare buffer */
ret = resize_buffer_duplicate_size(&tr->max_buffer,
&tr->trace_buffer, RING_BUFFER_ALL_CPUS);
if (ret < 0)
break;
- tr->current_trace->allocated_snapshot = true;
+ tr->allocated_snapshot = true;
}
local_irq_disable();
/* Now, we're going to swap */
}
local_irq_disable();
/* Now, we're going to swap */
local_irq_enable();
break;
default:
local_irq_enable();
break;
default:
- if (tr->current_trace->allocated_snapshot) {
+ if (tr->allocated_snapshot) {
if (iter->cpu_file == RING_BUFFER_ALL_CPUS)
tracing_reset_online_cpus(&tr->max_buffer);
else
if (iter->cpu_file == RING_BUFFER_ALL_CPUS)
tracing_reset_online_cpus(&tr->max_buffer);
else
* the trace_buffer so the tracing can continue.
*/
struct trace_buffer max_buffer;
* the trace_buffer so the tracing can continue.
*/
struct trace_buffer max_buffer;
+ bool allocated_snapshot;
#endif
int buffer_disabled;
struct trace_cpu trace_cpu; /* place holder */
#endif
int buffer_disabled;
struct trace_cpu trace_cpu; /* place holder */
bool enabled;
#ifdef CONFIG_TRACER_MAX_TRACE
bool use_max_tr;
bool enabled;
#ifdef CONFIG_TRACER_MAX_TRACE
bool use_max_tr;
- bool allocated_snapshot;