Hi,
I’m really new to Processing but starting to get into it. Currently I’m stuck trying to export my sketch. I’m using three libraries: opencv, processing video and java.awt. The sketch is supposed to start the built in webcam (OS Mojave). The app crashes on startup and shows the following message:
Cannot load GStreamer plugins from /Users/…/sketch_190213a/application.macosx64/sketch_190213a.app/Contents/Java//plugins
The capture plugin does not support device query!
java.lang.RuntimeException: There are no capture devices connected to this computer.
Thanks in advance!
My full code (because I’m lost…)
import gab.opencv.*;
import processing.video.*;
import java.awt.*;
int number = 1;
int wasCaptured = 0;
String rawSlice = "rawslice/";
String newSlice = "newslice/";
String systemStatus;
int c1 = color(2, 71, 254);
int c2 = color(255);
boolean keyDelay = false;
long lastTime = 0;
PImage slice, capturedImage, gray, dst, polySlice, drawNewSlice;
ArrayList<Contour> contours;
ArrayList<Contour> polygons;
Capture video;
OpenCV face;
OpenCV opencv;
void settings() {
size(1080, 480);
}
void setup() {
size(1080, 480);
video = new Capture(this, 640/2, height/2);
face = new OpenCV(this, 640/2, height/2);
face.loadCascade(OpenCV.CASCADE_FRONTALFACE);
background(c1);
lastTime = millis();
video.start();
}
void draw() {
scale(2);
face.loadImage(video);
image(video, 0, 0 );
noFill();
stroke(0, 0, 255);
strokeWeight(2);
Rectangle[] faces = face.detect();
for (int i = 0; i < faces.length; i++) {
if (faces.length > 0 && faces[i].height <= 115 && faces[i].height >= 75) {
rectMode(CORNER);
rect(faces[i].x, faces[i].y, faces[i].width, faces[i].height);
slice = get(faces[i].x * 2, faces[i].y * 2, faces[i].width * 2, faces[i].height * 2);
}
//feedback face detection
rectMode(CENTER);
fill(c1);
noStroke();
rect(320/2, 5, 100, 10);
if (faces.length > 0 && faces[i].height >= 115) {
systemStatus = "Too close to camera";
} else if (faces.length > 0 && faces[i].height <= 75) {
systemStatus = "Too far from camera";
} else {
systemStatus = "Face detected";
}
textAlign(CENTER);
fill(c2);
textSize(6);
text(systemStatus, 320/2, 6, 100, 10);
}
if (wasCaptured >= 1) {
drawPoly();
} else {
println("No image selected");
}
if (millis() - lastTime > 1000 && keyDelay == true) {
keyDelay = false;
}
println("Number = " + number + keyDelay);
}
void captureEvent(Capture c) {
c.read();
}
void keyPressed() { // Create slice on keypress
if (key == ' ' && keyDelay == false) {
captureAndSave();
capturedImage = loadImage(rawSlice + "rawSlice" + nf(number - 1, 1) + ".png");
wasCaptured++;
keyDelay = true;
lastTime = millis();
} else if (key == ' ' && keyDelay == true) {
println("Please wait...");
}
if (key == 'r') {
wasCaptured = 0;
background(c1);
}
}
void captureAndSave() { // Save slice and iterate
slice.save(rawSlice + "rawSlice" + nf(number, 1) + ".png");
number++;
}
void captureNewSlice() {
polySlice = get(640, 15, 440, 440);
polySlice.save(newSlice + "newSlice" + nf(number - 1, 1) + ".png");
delay(50);
}
void drawPoly() {
opencv = new OpenCV(this, capturedImage);
opencv.gray();
opencv.threshold(90);
gray = opencv.getSnapshot();
dst = opencv.getOutput();
//image(gray, 325, 15, 200, 200); // Actual slice
contours = opencv.findContours();
println("found " + contours.size() + " contours");
//image(dst, 325, 15, 200, 200); // Opencv slice
noFill();
strokeWeight(2);
scale(0.8);
translate(425, 40);
for (Contour contour : contours) {
//contour.draw();
beginShape();
stroke(c2);
for (PVector point : contour.getPolygonApproximation().getPoints()) {
vertex(point.x, point.y);
}
endShape();
}
captureNewSlice();
}