Capture image from gstreamer Pipeline

Asked

Viewed 93 times

2

I have an IP camera that uses protocol RTSP to transmit images, the code below uses the gstreamer to connect, grab these images and show on swing (works just right).

I want to get the camera frames straight from the Pipeline of gstreamer (do not use swing), so that so I can analyze frame by frame the image.

I know you have how to save the image in a folder and then upload it, but it is not feasible for me, I would like that after the start of Pipeline I made a loop that captures the image (can be in byte[] same), the problem is that I find nothing on this.

import java.awt.BorderLayout;
import java.awt.Dimension;
import javax.swing.JFrame;
import javax.swing.SwingUtilities;
import org.gstreamer.Element;
import org.gstreamer.Gst;
import org.gstreamer.Pipeline;
import org.gstreamer.State;
import org.gstreamer.swing.VideoComponent;

public class Main {

    public static void main(String[] args) throws InterruptedException {
        args = Gst.init("PipelineLauncher", args);
        final String def = "rtspsrc location=rtsp://192.168.25.160/av0_0 latency=0 ! decodebin ! ffmpegcolorspace name=testp";
        final Pipeline pipe = Pipeline.launch(def);

        SwingUtilities.invokeLater(new Runnable() {

            @Override
            public void run() {
                // Create the video component and link it in                
                VideoComponent videoComponent = new VideoComponent();
                Element videosink = videoComponent.getElement();
                pipe.add(videosink);
                pipe.getElementByName("testp").link(videosink);
                pipe.setState(State.PAUSED);

                if (pipe.isPlaying()) {
                    System.out.println("Pipeline playing");
                } else {
                    System.out.println("Pipeline not playing");
                }

                // Start the pipeline processing
                pipe.play();
                pipe.setState(State.PLAYING);

                if (pipe.isPlaying()) {
                    System.out.println("Pipeline playing");
                } else {
                    System.out.println("Pipeline not playing");
                }

                // Now create a JFrame to display the video output
                JFrame frame = new JFrame("Swing Video Test");
                frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
                frame.add(videoComponent, BorderLayout.CENTER);
                videoComponent.setPreferredSize(new Dimension(800, 600));
                frame.pack();
                frame.setLocationRelativeTo(null);
                frame.setVisible(true);
            }
        });

        Gst.main();
        pipe.setState(State.NULL);
    }

}
  • You can use the 'appsink' element instead of a real Ink video. With appsink you get the frames in a callback that you register. http://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-base-libs/html/gst-plugins-base-libs-appsink.html

  • Try this: http://www.processing.org See references and libraries.

1 answer

0


I checked what the VideoComponent did in the previous example and implemented what it uses:

import java.awt.image.BufferedImage;
import java.awt.image.DataBufferInt;
import java.io.File;
import java.io.IOException;
import java.nio.IntBuffer;
import java.util.concurrent.TimeUnit;
import javax.imageio.ImageIO;
import org.gstreamer.Gst;
import org.gstreamer.Pipeline;
import org.gstreamer.elements.RGBDataSink;

public class Main {

    public static void main(String[] args) throws InterruptedException {

        args = Gst.init("PipelineLauncher", args);
        //String def = "rtspsrc location=rtsp://192.168.25.160/av0_0 latency=0 ! decodebin ! ffmpegcolorspace name=testp";
        String def = "rtspsrc location=rtsp://192.168.25.160/av0_0 ! decodebin ! ffmpegcolorspace name=testp";
        Pipeline pipe = Pipeline.launch(def);

        RGBDataSink videosink = new RGBDataSink("GstVideoComponent", new RGBListener());
        videosink.setPassDirectBuffer(true);
        videosink.getSinkElement().setMaximumLateness(16, TimeUnit.MILLISECONDS);
        videosink.getSinkElement().setQOSEnabled(true);

        pipe.add(videosink);
        pipe.getElementByName("testp").link(videosink);

        pipe.play();

        if (pipe.isPlaying()) {
            System.out.println("Pipeline playing");
        } else {
            System.out.println("Pipeline not playing");
        }

        Thread.sleep(5000);

        pipe.stop();

        System.out.println("Done");
    }

    private static class RGBListener implements RGBDataSink.Listener {

        private volatile int i = 0;

        public void rgbFrame(boolean isPrerollFrame, int width, int height, IntBuffer rgb) {
            BufferedImage renderImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
            renderImage.setAccelerationPriority(0.0f);
            int[] pixels = ((DataBufferInt) renderImage.getRaster().getDataBuffer()).getData();
            rgb.get(pixels, 0, width * height);

            try {
                ImageIO.write(renderImage, "jpg", new File("/home/lala/Desktop/bbbb/" + (i++) + ".jpg"));
            } catch (IOException ex) {
                ex.printStackTrace();
            }
        }
    }

}

Browser other questions tagged

You are not signed in. Login or sign up in order to post.