In [1]:
%matplotlib inline
import numpy, scipy, matplotlib.pyplot as plt, IPython.display as ipd
import librosa, librosa.display
import stanford_mir; stanford_mir.init()
from ipywidgets import interact

Beat Tracking

librosa.beat.beat_track

Load an audio file:

In [2]:
x, sr = librosa.load('audio/58bpm.wav')
ipd.Audio(x, rate=sr)
Out[2]:

Use librosa.beat.beat_track to estimate the beat locations and the global tempo:

In [3]:
tempo, beat_times = librosa.beat.beat_track(x, sr=sr, start_bpm=60, units='time')
print(tempo)
print(beat_times)
58.72691761363637
[1.06811791 2.11301587 3.11147392 4.17959184 5.20126984 6.2461678
 7.2678458 ]

Plot the beat locations over the waveform:

In [4]:
plt.figure(figsize=(14, 5))
librosa.display.waveplot(x, alpha=0.6)
plt.vlines(beat_times, -1, 1, color='r')
plt.ylim(-1, 1)
Out[4]:
(-1, 1)

Plot a histogram of the intervals between adjacent beats:

In [5]:
beat_times_diff = numpy.diff(beat_times)
plt.figure(figsize=(14, 5))
plt.hist(beat_times_diff, bins=50, range=(0,4))
plt.xlabel('Beat Length (seconds)')
plt.ylabel('Count')
Out[5]:
Text(0,0.5,'Count')

Visually, it's difficult to tell how correct the estimated beats are. Let's listen to a click track:

In [6]:
clicks = librosa.clicks(beat_times, sr=sr, length=len(x))
ipd.Audio(x + clicks, rate=sr)
Out[6]: