Control Ableton Live from Processing with a Mac

Hi,

I saw a method to use Processing as MIDI controller to Ableton here
http://legacy.andrewflo.com/work/liveosc-processing-bridge/

I’m trying to use to use it to send MIDI value, but I can’t send data or make recognizing the program by Ableton.

Thanks for helping.
The program is below

// LiveOSC_Processing.pde
// made by Andrew Flores
// 2012

import oscP5.*;
import netP5.*;
import controlP5.*;

// Declarations
OscP5 oscP5;
ControlP5 controlP5;
NetAddress myRemoteLocation;

// Global variables
int columns;
int rows;
int NUM_TRACKS = 8;    // Current max TOTAL clips: 89
int NUM_CLIPS = 6;     // Adjust IDs in SetupDraw for more total clips
String name;

// Spacing between controls and size
int spacingX = 62;
int spacingY = 52;
int clipSize = 38;

int MIN_ID_CLIPS = 0;        // Clips 0-89  
int MAX_ID_CLIPS = 89;       //
int MIN_ID_SCENES = 90;      // Scenes 90-99
int MAX_ID_SCENES = 99;      //
int MIN_ID_STOPS = 100;      // Stops 100-108
int MAX_ID_STOPS = 108;      //
int ID_STOP_ALL = 109;       // Stop All 109
int MIN_ID_FADERS = 110;     // Faders 110-118
int MAX_ID_FADERS = 118;     //
int ID_MASTER_FADER = 119;   // Master Fader 119
int MIN_ID_MUTES = 120;      // Mutes 120-129
int MAX_ID_MUTES = 129;
int ID_PLAY = 200;           // Play 200
int ID_STOP = 201;           // Stop 201

Bang[][] myClips;
Bang[] myStops;
Bang[] myScenes;
Slider[] myFaders;
Slider masterFader;
Toggle[] myMutes;
Bang play;
Bang stop;
Bang stopAll;

// Setup and draw clips


// Setup loops
void setup()
{
  // Window properties
  size(800,700);
  frameRate(25);
  
  // Setup OSC
  oscP5 = new OscP5(this,9001);
  myRemoteLocation = new NetAddress("localhost",9000);
  
  // Setup ControlP5
  controlP5 = new ControlP5(this);
  
  // Call setup functions
  setupClips(NUM_TRACKS, NUM_CLIPS);
  setupScenes(NUM_CLIPS);
  setupFaders(NUM_TRACKS);
  setupStops(NUM_TRACKS);
  setupMutes(NUM_TRACKS);
  setupTransport();
  
  // OSC querying
}
void oscEvent(OscMessage theOscMessage)
{
  String msg = theOscMessage.addrPattern();
  String args = theOscMessage.typetag();
  
  int track = 0;
  int clip = 0;
  int armed = 0;
  int clipStatus = 0;
  String name;
  float beats = 0;

  // println(msg);

  if (msg.equals("/live/track/info"))
  {
    // iiiifiifiifiifiifiifiifiifiifiifiifiifiifiifiifiifiifiifiifiif
    
    track = theOscMessage.get(0).intValue();

    int v = 0;

    //for(int i = 0; i < 5; i++)
    //{
    v = 0;
    track = theOscMessage.get(v).intValue();
    armed = theOscMessage.get(v+1).intValue();
    clip = theOscMessage.get(v+2).intValue();
    clip = theOscMessage.get(v+3).intValue();
    beats = theOscMessage.get(v+4).floatValue();
    //}

    //println("OSC Receive: " + " Track " + track + " Armed " + armed + " Clip1: " + clip + " C1 Status: " + clipStatus + " C1 Beats: " + beats);
  }

  if (msg.equals("/live/clip/info"))
  {  
    print(msg + "  " + args);
    
    track = theOscMessage.get(0).intValue();
    clip = theOscMessage.get(1).intValue();
    clipStatus = theOscMessage.get(2).intValue();

    if (track <= NUM_TRACKS && clip <= NUM_CLIPS && clipStatus > 0)
    {
      myClips[track][clip].getColor().setForeground(0xffff0000);
    }
    
    else if(track <= NUM_TRACKS && clip <= NUM_CLIPS && clipStatus == 0)
    {
      // myClips[track][clip].getColor().setForeground(0xff000000);
    }
  }

  if (msg.equals("/live/name/track"))
  {
    track = theOscMessage.get(0).intValue();
    name = theOscMessage.get(1).stringValue();

    if (track < NUM_TRACKS)
    {
      myFaders[track].setLabel("" + name);

     //  println("Track: " + track + "  Name: " + name);
    }
  }

  if (msg.equals("/live/name/clip"))
  {
    track = theOscMessage.get(0).intValue();
    clip = theOscMessage.get(1).intValue();
    name = theOscMessage.get(2).stringValue();

    if (track < NUM_TRACKS && clip < NUM_CLIPS)
    {
      myClips[track][clip].setLabel("" + name);
      // println("Clip: " + track + "  Name: " + name);
    }
    
    else
    {
      myClips[track][clip].setLabel("");
    }
  }
}

// Draw loop
void draw()
{
  // Clear frame
  background(0);
}
void setupClips(int numCols, int numRows)
{
  int clipNumber = 0;

  myClips = new Bang[numCols][numRows];
  for (int r = 0; r < numRows; r++)
  {
    for (int c = 0; c < numCols; c++)
    {
      myClips[c][r] = controlP5.addBang("C" + clipNumber, ((c+1)*spacingX)-clipSize, ((r+1)*spacingY)-clipSize, clipSize, clipSize);
      myClips[c][r].setId(clipNumber);
      myClips[c][r].setLabel("");
      myClips[c][r].align(ControlP5.CENTER,ControlP5.CENTER,ControlP5.CENTER,ControlP5.CENTER);
      // myClips[c][r].getColor().setForeground(0xffff0000);
      // myClips[c][r].setLabelVisible(false);
      // println(myClips[c][r].getColor());

      clipNumber++;
    }
  }
  
  // Query names
  OscMessage myMessage = new OscMessage("/live/name/clip");
  oscP5.send(myMessage, myRemoteLocation);
  
  OscMessage myMessage2 = new OscMessage("/live/track/info");
  oscP5.send(myMessage2, myRemoteLocation);
  
  /*for (int t = 0; t < NUM_TRACKS; t++)
  {
    for (int c = 0; c < NUM_CLIPS; t++)
    {
      OscMessage myMessage3 = new OscMessage("/live/clip/info");
      myMessage3.add(t);
      myMessage3.add(c);
      oscP5.send(myMessage3, myRemoteLocation);
    }
  }*/
}


// Setup and draw stops
void setupStops(int numCols)
{
  myStops = new Bang[numCols];
  for (columns = 0; columns < numCols; columns++)
  {
    myStops[columns] = controlP5.addBang("Stop" + columns, ((columns+1)*spacingX)-clipSize, ((NUM_CLIPS+1)*spacingY)-clipSize, clipSize, clipSize);
    myStops[columns].setLabel("Stop" + columns);
    myStops[columns].setId(MIN_ID_STOPS + columns);
    myStops[columns].align(ControlP5.CENTER,ControlP5.CENTER,ControlP5.CENTER,ControlP5.CENTER);
  }

  // Draw stop all
  stopAll = controlP5.addBang("StopAll", 740, ((NUM_CLIPS+1)*spacingY)-clipSize, clipSize, clipSize);
  stopAll.setLabel("Stop All");
  stopAll.setId(ID_STOP_ALL);
  stopAll.align(ControlP5.CENTER,ControlP5.CENTER,ControlP5.CENTER,ControlP5.CENTER);
}


// Setup and draw scenes
void setupScenes(int numRows)
{
  myScenes = new Bang[numRows];

  for (rows = 0; rows < numRows; rows++)
  {
    myScenes[rows] = controlP5.addBang("Scene" + rows, 740, ((rows+1)*spacingY)-clipSize, clipSize, clipSize);
    myScenes[rows].setLabel("Scene" + rows);
    myScenes[rows].setId(MIN_ID_SCENES + rows);
    myScenes[rows].align(ControlP5.CENTER,ControlP5.CENTER,ControlP5.CENTER,ControlP5.CENTER);
  }
}


// Setup and draw Faders
void setupFaders(int numFaders)
{
  myFaders = new Slider[numFaders];
  for (int i = 0; i < numFaders; i++)
  {
    myFaders[i] = controlP5.addSlider("Fader" + i, 0, 1, 100, spacingX*(1+i)-clipSize, 400, clipSize, 150);
    //myFaders[i].setLabel("Fader " + i);
    myFaders[i].setValue(0.84);
    myFaders[i].setId(110+i);
    // myFaders[i].align(0,0,0,ControlP5.CENTER);
  }

  // Master
  masterFader = controlP5.addSlider("masterFader", 0, 1, 100, 740, 400, clipSize, 150);
  masterFader.setLabel("Master");
  masterFader.setValue(0.84);
  masterFader.setId(119);
  //masterFader.setLabelVisible(false);

  // Query names
  OscMessage myMessage = new OscMessage("/live/name/track");
  oscP5.send(myMessage, myRemoteLocation);
}


// Setup and draw Mutes
void setupMutes(int numTracks)
{
  myMutes = new Toggle[numTracks];
  for (int t = 0; t < numTracks; t++)
  {
    myMutes[t] = controlP5.addToggle("Mute"+t, false, spacingX*(t+1)-clipSize, 575, clipSize, clipSize);
    myMutes[t].setId(MIN_ID_MUTES + t);
    myMutes[t].align(ControlP5.CENTER,ControlP5.CENTER,ControlP5.CENTER,ControlP5.CENTER);
  }
}

// Setup and draw Transport
void setupTransport()
{
  play = controlP5.addBang("Play", 610, 575, clipSize*2, clipSize);
  play.setId(ID_PLAY);
  play.align(ControlP5.CENTER,ControlP5.CENTER,ControlP5.CENTER,ControlP5.CENTER);

  stop = controlP5.addBang("Stop", 610+clipSize+55, 575, clipSize*2, clipSize);
  stop.setId(ID_STOP);
  stop.align(ControlP5.CENTER,ControlP5.CENTER,ControlP5.CENTER,ControlP5.CENTER);
}
void controlEvent(ControlEvent theEvent)
{
  int id = theEvent.getController().getId();
  // println("ControlEvent ID: " + id + "  Value: " + theEvent.getController().getValue());

  // Clips
  if (id >= MIN_ID_CLIPS && id <= MAX_ID_CLIPS)
  {
    int trackNum = id % NUM_TRACKS;
    int clipNum = (id - trackNum) / NUM_TRACKS;
    
    println("Track: " + trackNum + "  Clip: " + clipNum);

    OscMessage myMessage = new OscMessage("/live/play/clipslot");
    myMessage.add(trackNum);
    myMessage.add(clipNum);
    oscP5.send(myMessage, myRemoteLocation);
  }

  // Scenes
  if (id >= MIN_ID_SCENES && id <= MAX_ID_SCENES)
  {
    OscMessage myMessage = new OscMessage("/live/play/scene");
    myMessage.add(id - MIN_ID_SCENES);
    oscP5.send(myMessage, myRemoteLocation);
  }

  // Faders
  if (id >= MIN_ID_FADERS && id <= MAX_ID_FADERS)
  { 
    OscMessage myMessage = new OscMessage("/live/volume");
    myMessage.add(id - MIN_ID_FADERS);
    myMessage.add(theEvent.getController().getValue());
    oscP5.send(myMessage, myRemoteLocation);
  }

  // Master Fader
  if (id == ID_MASTER_FADER)
  {
    OscMessage myMessage = new OscMessage("/live/master/volume");
    myMessage.add(theEvent.getController().getValue());
    oscP5.send(myMessage, myRemoteLocation);
  }

  // Stops
  if (id >= MIN_ID_STOPS && id <= MAX_ID_STOPS)
  {
    OscMessage myMessage = new OscMessage("/live/stop/track");
    myMessage.add(id - MIN_ID_STOPS);
    oscP5.send(myMessage, myRemoteLocation);
  }

  // Stop All
  if (id == ID_STOP_ALL)
  {
    for (int t = 0; t < NUM_TRACKS; t++)
    {
      OscMessage myMessage = new OscMessage("/live/stop/track");
      myMessage.add(t);
      oscP5.send(myMessage, myRemoteLocation);
    }
  }

  // Mutes
  if (id >= MIN_ID_MUTES && id <= MAX_ID_MUTES)
  {
    OscMessage myMessage = new OscMessage("/live/mute");
    myMessage.add(id - MIN_ID_MUTES);
    myMessage.add(theEvent.getController().getValue());
    oscP5.send(myMessage, myRemoteLocation);
  }

  // Play
  if (id == ID_PLAY)
  {
    OscMessage myMessage = new OscMessage("/live/play");
    oscP5.send(myMessage, myRemoteLocation);
  }

  // Stop
  if (id == ID_STOP)
  {
    OscMessage myMessage = new OscMessage("/live/stop");
    oscP5.send(myMessage, myRemoteLocation);
  }
}

Hi,
I think you need to have the :
MIDI remote script for communication with Ableton Live over OSC “inside” Ableton.


Just to be clear, you are not sending any MIDI data from processing… but OSC that ableton can’t understand without the proper script.

best
R_color

Hi,
I downloaded the last version of LiveOSC.
Ableton Live recognize the control surface LiveOsc2 but I don’t have any input or output appearing on the two column in Preferences menu of Ableton.
But if i use the software OSCulator to check midi value which come in the localhost 9001, I can see data from Ableton Live and Processing appear well in this software (Osculator).
Morever when I’ using this software, I can see in MIDI port input and output on the bottom of the Preferences menu of Ableton.
So, i hope it’s just a problem of misunderstood about the good manner to make communicate the three software togethers.
Maybe I have to open Ableton Live first, then Osculator and Processing or something else…???
I put a screenshot

Thanks for yours advices…

hi,
did you follow these steps?

Installation

  1. If you are running OSX 10.5.8 skip this step . Otherwise you must install Python 2.5.1
  2. Unzip the latest release which should give you a folder called trunk containing a folder called LiveOSC. The LiveOSC folder should be moved to Ableton’s MIDI Remote Scripts folder.
  • On Windows this is located in \path\to\ableton\Live x.x.x\Resources\MIDI\Remote Scripts
  • On OSX this is located in /path/to/ableton/Live.app -> Right click -> show package contents. Navigate to contents/app-resources/MIDI Remote Scripts
  1. In ableton goto Preferences, Midi Settings. Select LiveOSC from the drop down list.
  2. LiveOSC accepts packets on port 9000 and sends data on port 9001
  3. LiveOSC comes with an Ableton Device Rack. If you place this as the first device on your master channel you can toggle the playing position and meter listeners on and off

do you have python installed and so on ?
https://livecontrol.q3f.org/ableton-liveapi/liveosc/#Installation

Why do you want to use osculator?

to be honest I never used Processing to send OSC to Ableton, but It seems possible.

Let me know if it works.

best

ps: a good alternative could be to use directly M4L

So.
It really doesn’t want to receive data. Maybe it’s a problem of ports. I don’t know and i’m a bit tired. It’s a pity. So I have to go to pass through Max for Live. But it’s quite expensive. I hope there will be good patch to control sample speed for example.
Is it easy to send data from Processing to Max for Live and control a sample?
Then it’s it easy to output the sound of M4L on a track of Abelton Live?
Thanks.

Maybe it’s a problem of ports

I think it’s a problem related to the script.

I hope there will be good patch to control sample speed for example.

google can help :wink: for instance : http://maxforlive.com/

Is it easy to send data from Processing to Max for Live and control a sample?

mmm if you just want to do that, maybe you don’t need anything else than processing.

http://code.compartmental.net/minim/audioplayer_method_play.html

you can do it inside processing, using MINIM.

Then it’s it easy to output the sound of M4L on a track of Abelton Live?

yes, it is, but you need to be familiar with M4L

anyway, you made me curious, what are you trying to do ?
Maybe I can tell you the simplest way to do it.

best

Hi,

the script =the program at the top of the post?

My project is an interactive installation

I have made with an engineer a machine with an Arduino card that can control the speed and the direction of 5 motors with a 2-bladed propeller attached on each one.
When I vary the speed by touching a blade (blocking it a while), motor is setting itself at a lower speed and if I do it again the motor changes its way. Also I can accelerate the motor by pushing the blade and the motor is setting himself at an upper speed.

I would like to be able to use this machine as a controller of sample player, so that these different speed and ways of rotation control the rate of sample speed and the way of playing (backward/forward).
But I have set only 3 different speed in the two ways. So I will have to precise the speed of samples ratio with a cursor within Processing. ( OscP5 library ?)

I didn’t see example in MINIM library that can setting the rate of a sample player neither the way.

Finally I would like to control 5 tracks or sample a bit as a DJ !!!

I didn’t see example in MINIM library that can setting the rate of a sample player neither the way.

there is :slight_smile:

change the rate:

  // change the rate control value based on mouse position
  float rate = map(mouseX, 0, width, 0.0f, 3.f);
  
  rateControl.value.setLastValue(rate);
/**
  This is an example of how to use a TickRate UGen to control 
  the tick rate of another UGen, in this case a FilePlayer. <br/>
  A TickRate will tick the UGen patched to it at a rate relative 
  to the normal tick rate. So a TickRate with a value of 1 
  will simply pass the audio. A TickRate with a value of 2 
  will tick the incoming UGen twice as fast as normal. 
  If the value of TickRate is ever set to 0 or lower, 
  it will simply generate silence and not tick its incoming UGen at all. 
  This is because there isn't a way to tell a UGen to tick backwards.
  <p>
  Slide the mouse left and right in the window 
  to control the playback rate of the loop.
  <br/>
  Hold 'i' to make TickRate interpolate between actual sample values 
  (this will remove the "crunch" when at rates less than 1).
  <p>
  For more information about Minim and additional features, 
  visit http://code.compartmental.net/minim/
  <p>
  author: Damien Di Fede
*/

import ddf.minim.*;
import ddf.minim.spi.*; // for AudioRecordingStream
import ddf.minim.ugens.*;

// declare everything we need to play our file and control the playback rate
Minim minim;
TickRate rateControl;
FilePlayer filePlayer;
AudioOutput out;

// you can use your own file by putting it in the data directory of this sketch
// and changing the value assigned to fileName here.
String fileName = "again_loop.aif";

void setup()
{
  // setup the size of the app
  size(640, 200);
  
  // create our Minim object for loading audio
  minim = new Minim(this);
                               
  // this opens the file and puts it in the "play" state.                           
  filePlayer = new FilePlayer( minim.loadFileStream(fileName) );
  // and then we'll tell the recording to loop indefinitely
  filePlayer.loop();
  
  // this creates a TickRate UGen with the default playback speed of 1.
  // ie, it will sound as if the file is patched directly to the output
  rateControl = new TickRate(1.f);
  
  // get a line out from Minim. It's important that the file is the same audio format 
  // as our output (i.e. same sample rate, number of channels, etc).
  out = minim.getLineOut();
  
  // patch the file player through the TickRate to the output.
  filePlayer.patch(rateControl).patch(out);
                        
}

// keyPressed is called whenever a key on the keyboard is pressed
void keyPressed()
{
  if ( key == 'i' || key == 'I' )
  {
    // with interpolation on, it will sound as a record would when slowed down or sped up
    rateControl.setInterpolation( true );
  }
}

void keyReleased()
{
  if ( key == 'i' || key == 'I' )
  {
    // with interpolation off, the sound will become "crunchy" when playback is slowed down
    rateControl.setInterpolation( false );
  }
}

// draw is run many times
void draw()
{
  // change the rate control value based on mouse position
  float rate = map(mouseX, 0, width, 0.0f, 3.f);
  
  rateControl.value.setLastValue(rate);
  
  // erase the window to black
  background( 0 );
  // draw using a white stroke
  stroke( 255 );
  // draw the waveforms
  for( int i = 0; i < out.bufferSize() - 1; i++ )
  {
    // find the x position of each buffer value
    float x1  =  map( i, 0, out.bufferSize(), 0, width );
    float x2  =  map( i+1, 0, out.bufferSize(), 0, width );
    // draw a line from one buffer position to the next for both channels
    line( x1, 50  - out.left.get(i)*50,  x2, 50  - out.left.get(i+1)*50);
    line( x1, 150 - out.right.get(i)*50, x2, 150 - out.right.get(i+1)*50);
  }  
}

if you want to think in musical terms:


float[] transposition = new float[25];

for (int notes = -12; notes <=12; notes++) {

  transposition[notes+12] = pow(2.0, notes/12.0);
}

printArray(transposition);

you can use the values in the array to transpose a sound in musical intervals based on Equal_temperament:

This is a Major chord;
es;
transposition[12] = no changes in speed;
transposition[16] = major third
transposition[19] = perfect fifth
1 Like

Hi rand_colors,

I tried your program to change the rate but I have this in the console :confused:

ArrayIndexOutOfBoundsException: 1

for this line highlighted

 filePlayer.patch(rateControl).patch(out);

I don’t understand, it seem to be when you declare one value in more in an array…

Thanks for theory about the transposition of the sample, too :wink:

Hi,
it’s not my example :slight_smile:
it’s just an example inside the minim library, did you try/study/play the examples to understand what this library is about?
http://code.compartmental.net/tools/minim/quickstart/
Those guys spent years of work to give us minim…

if you go in Processing-> file-> example → minim → Synthesis → tickRateExample
it’s there.

About the question where you asked me:

the script is in python …
On youtube I searched for ableton Processing

there are the github.

best

Thanks, I will check that later.