Posenet and Low Pass Filters

I’m working on a project that uses body motion (through Posenet) to perform sound processing (i.e. nose movement pans audio from a sound file). At the moment, I’m trying to use my wrist’s motion to filter the audio on the sound file in the same way that the documentation on the p5.js website shows clicking the mousepad to filter through a low-pass filter (just without the clicking) but running my code seems to break my project. Has anyone ever done this before and can help me figure out how to make it work for my program?

Welcome @karenmsn!

If you have access to the data itself (instead of through an audio channel) you may be able to use some of the code from another forum post: Fluctuating Signals - Filtering

In summary, you can use a digital filter to remove noise from your signal.
You could filter audio as well by using libraries (as you seem to have found).

If you have a work-in-progress that is not… well… working, posting it will bring more help. :slight_smile:

@noahbuddy The code I have so far looks like this! My website doesn’t look pretty yet but what it does so far is pans audio based on nose positioning and then filters a white noise sound when you click on the canvas based on the horizontal position of your wrist. What I want it to do is filter that sound based on your wrist position automatically with movement rather than when you click it (so when your right wrist is detected, the noise is triggered and automatically filtered and then as your move your wrist left and right, you can hear the filtering happening in real-time). Stumped on how to get that from the clickable filtering though so any help would be much appreciated!

let video;
let poseNet;
let poses = [];
let skeletons = [];
let nose, rightWrist, leftWrist, rightKnee, leftKnee;
let nScore, rwScore, lwScore, rkScore, lkScore;
let song;
let button;
let fft, noise, fil;

function preload() {
  soundFormats('mp3', 'ogg');
  song = loadSound('dhol_longer.mp3');
}

function modelLoaded() {
  console.log("Model Loaded");
}

function setup() {
  cnv = createCanvas(640, 480);
  video = createCapture(VIDEO);
  video.size(width, height);

  button = document.getElementById("start");
  button.addEventListener("click", handleStartButtonClick);

  poseNet = ml5.poseNet(video, modelLoaded);
  poseNet.on('pose', getPoses);
  video.hide();

  cnv.mousePressed(makeNoise);
  fil = new p5.BandPass();
  noise = new p5.Noise();
  noise.disconnect();
  noise.connect(fil);
  fft = new p5.FFT();
}

  function draw() {
    //mirrors image so you're viewing 
    translate(video.width, 0);
    scale(-1, 1);
    image(video, 0, 0, video.width, video.height);
  
    //draws dots on nose, wrists, and knees 
    drawPoints();
  
    //pans audio by following nose movement 
    panning();
  
    filterSound();
  }

function getPoses(poses) {
  if(poses.length > 0) {
    let pose = poses[0].pose;

    nose = pose.keypoints[0].position;
    nScore = pose.keypoints[0].score;

    leftWrist = pose.keypoints[9].position;
    lwScore = pose.keypoints[9].score;

    rightWrist = pose.keypoints[10].position;
    rwScore = pose.keypoints[10].score;

    leftKnee = pose.keypoints[13].position;
    lkScore = pose.keypoints[13].score;

    rightKnee = pose.keypoints[14].position;
    rkScore = pose.keypoints[14].score;
  }
}

function drawPoints() {
  if(nose && nScore > 0.6) {
    stroke(255, 0, 0);
    strokeWeight(15);
    point(nose.x, nose.y);
  }

  if(rightWrist && rwScore > 0.2) {
    stroke(0, 0, 255);
    strokeWeight(15);
    point(rightWrist.x, rightWrist.y); 
  }

  if(leftWrist && lwScore > 0.2) {
    stroke(0, 0, 255);
    strokeWeight(15);
    point(leftWrist.x, leftWrist.y);
  }

  if((rightKnee && leftKnee) && (rkScore && lkScore > 0.6)) {
    stroke(0, 255, 0);
    strokeWeight(15);
    point(rightKnee.x, rightKnee.y);
    stroke(0, 255, 0);
    strokeWeight(15);
    point(leftKnee.x, leftKnee.y);
  }
}

function handleStartButtonClick() {
  if(song.isLooping()) {
    button.innerText = "Start Sound";
    song.stop();
  } else {
    button.innerText = "Stop Sound";
    song.loop();
  }
}

function panning() {
  if(song.isLooping() && nose) {
    let panning = map(nose.x, 0, width, 1.0, -1.0); //pan right on 1 and left on -1
    song.pan(panning);
  } 
}

function makeNoise() {
  noise.start();
  noise.amp(1.0, 0.2);
}

function mouseReleased() {
  noise.amp(0, 0.2);
}

function filterSound() {
  if(!rightWrist) {
    return;
  } else if(rightWrist) {
    let freq = map(rightWrist.x, 0, width, 20, 10000);
    freq = constrain(freq, 0, 22050);
    fil.freq(freq);
    fil.res(50);
  }
}

Look for changes to the wrist state (active/inactive) in filterSound().

Then on a change of input, you can call makeNoise() or mouseReleased() directly to start or silence the sound.
Alternatively, start the sound right away and use both x and y properties to control frequency and volume.

That’s helpful thanks! I was able to get the wrist motion working with the white noise! Now I actually have another question about using filters with sound files. My audio starts with a button and then loops like you see in my code, but I want to apply the filter to the looping audio so you’ll see the frequency change as you move your wrist. However, when I try to disconnect my song and connect it to the filter, it keeps freezing my video. Any thoughts?

let video;
let poseNet;
let poses = [];
let skeletons = [];
let nose, rightWrist, leftWrist, rightKnee, leftKnee;
let nScore, rwScore, lwScore, rkScore, lkScore;
let song, song2;
let button;
let fft, noise, fil;

function preload() {
  soundFormats('mp3', 'ogg');
  song = loadSound('dhol_longer.mp3');
  song2 = loadSound('dhol_02.mp3')
}

function modelLoaded() {
  console.log("Model Loaded");
}

function setup() {
  cnv = createCanvas(640, 480);
  video = createCapture(VIDEO);
  video.size(width, height);

  button = document.getElementById("start");
  button.addEventListener("click", handleStartButtonClick);

  poseNet = ml5.poseNet(video, modelLoaded);
  poseNet.on('pose', getPoses);
  video.hide();

  cnv.mousePressed(makeNoise);
  fil = new p5.BandPass();
  //noise = new p5.Noise();
  song.disconnect();
  song.connect(fil);
  fft = new p5.FFT();
}

  function draw() {
    //mirrors image so you're viewing 
    translate(video.width, 0);
    scale(-1, 1);
    image(video, 0, 0, video.width, video.height);
  
    //draws dots on nose, wrists, and knees 
    drawPoints();
  
    //pans audio by following nose movement 
    panning();
  
    if(rightWrist && song.isLooping()) {
      filterSound();
      makeNoise();
    } 
  }

function getPoses(poses) {
  if(poses.length > 0) {
    let pose = poses[0].pose;

    nose = pose.keypoints[0].position;
    nScore = pose.keypoints[0].score;

    leftWrist = pose.keypoints[9].position;
    lwScore = pose.keypoints[9].score;

    rightWrist = pose.keypoints[10].position;
    rwScore = pose.keypoints[10].score;

    leftKnee = pose.keypoints[13].position;
    lkScore = pose.keypoints[13].score;

    rightKnee = pose.keypoints[14].position;
    rkScore = pose.keypoints[14].score;
  }
}

function drawPoints() {
  if(nose && nScore > 0.6) {
    stroke(255, 0, 0);
    strokeWeight(15);
    point(nose.x, nose.y);
  }

  if(rightWrist && rwScore > 0.2) {
    stroke(0, 0, 255);
    strokeWeight(15);
    point(rightWrist.x, rightWrist.y); 
  }

  if(leftWrist && lwScore > 0.2) {
    stroke(0, 0, 255);
    strokeWeight(15);
    point(leftWrist.x, leftWrist.y);
  }

  if((rightKnee && leftKnee) && (rkScore && lkScore > 0.6)) {
    stroke(0, 255, 0);
    strokeWeight(15);
    point(rightKnee.x, rightKnee.y);
    stroke(0, 255, 0);
    strokeWeight(15);
    point(leftKnee.x, leftKnee.y);
  }
}

function handleStartButtonClick() {
  if(song.isLooping()) {
    button.innerText = "Start Sound";
    song.stop();
  } else {
    button.innerText = "Stop Sound";
    song.loop();
  }
}

function panning() {
  if(song.isLooping() && nose) {
    let panning = map(nose.x, 0, width, 1.0, -1.0); //pan right on 1 and left on -1
    song.pan(panning);
  } 
  /*
  if(noise && rightWrist) {
    let panningWrist = map(nose.x, 0, width, 1.0, -1.0);
    noise.pan(panningWrist);
  }*/
}

function makeNoise() {
  song.start();
  song.amp(1.0, 0.2);
}

function filterSound() {
  let freq = map(rightWrist.x, 0, width, 20, 10000);
  freq = constrain(freq, 0, 22050); //change to 80
  fil.freq(freq);
  fil.res(50);
    
  if(!rightWrist) {
    return;
  } 
}

Since song is created as a new instance, “song.disconnect();” in setup should not be necessary.
Probably safe unless the browser is keeping references.

If there is some underlying conflict, maybe load/configure the audio before the video.

Are there any errors in the console when the video stops?