/* * Test the trace buffer to see if all the elements * are still sane. */ static int __maybe_unused trace_test_buffer(struct trace_buffer *buf, unsigned long *count) { unsigned long flags, cnt = 0; int cpu, ret = 0; /* Don't allow flipping of max traces now */ local_irq_save(flags); arch_spin_lock(&buf->tr->max_lock); cnt = ring_buffer_entries(buf->buffer); /* * The trace_test_buffer_cpu runs a while loop to consume all data. * If the calling tracer is broken, and is constantly filling * the buffer, this will run forever, and hard lock the box. * We disable the ring buffer while we do this test to prevent * a hard lock up. */ tracing_off(); for_each_possible_cpu(cpu) { ret = trace_test_buffer_cpu(buf, cpu); if (ret) break; } tracing_on(); arch_spin_unlock(&buf->tr->max_lock); local_irq_restore(flags); if (count) *count = cnt; return ret; }
static int trace_test_buffer(struct trace_array *tr, unsigned long *count) { unsigned long flags, cnt = 0; int cpu, ret = 0; local_irq_save(flags); __raw_spin_lock(&ftrace_max_lock); cnt = ring_buffer_entries(tr->buffer); tracing_off(); for_each_possible_cpu(cpu) { ret = trace_test_buffer_cpu(tr, cpu); if (ret) break; } tracing_on(); __raw_spin_unlock(&ftrace_max_lock); local_irq_restore(flags); if (count) *count = cnt; return ret; }