/* NB: reg/unreg are called while guarded with the tracepoints_mutex */
extern long hcall_tracepoint_refcount;
+#if 0 /* work around buggy use of RCU from dyntick-idle mode */
/*
* Since the tracing code might execute hcalls we need to guard against
* recursion. One example of this are spinlocks calling H_YIELD on
* shared processor partitions.
*/
static DEFINE_PER_CPU(unsigned int, hcall_trace_depth);
+#endif /* #if 0 work around buggy use of RCU from dyntick-idle mode */
void hcall_tracepoint_regfunc(void)
{
void __trace_hcall_entry(unsigned long opcode, unsigned long *args)
{
+#if 0 /* work around buggy use of RCU from dyntick-idle mode */
unsigned long flags;
unsigned int *depth;
out:
local_irq_restore(flags);
+#endif /* #if 0 work around buggy use of RCU from dyntick-idle mode */
}
void __trace_hcall_exit(long opcode, unsigned long retval,
unsigned long *retbuf)
{
+#if 0 /* work around buggy use of RCU from dyntick-idle mode */
unsigned long flags;
unsigned int *depth;
out:
local_irq_restore(flags);
+#endif /* #if 0 work around buggy use of RCU from dyntick-idle mode */
}
#endif