Capture gstreamer Pipeline Image

2

I have an IP camera that uses the RTSP protocol to transmit images, the code below uses gstreamer to connect, take these images and show in swing (it works right).

I want to get the camera frames straight from Pipeline of gstreamer (do not use swing), so that I can analyze frame by frame the image.

I know I have a way to save the image to a folder and then load it, but it's not feasible for me, I'd like that after start of Pipeline I'd make a loop that captures the image byte[] same), the problem is that I do not think anything about it.

import java.awt.BorderLayout;
import java.awt.Dimension;
import javax.swing.JFrame;
import javax.swing.SwingUtilities;
import org.gstreamer.Element;
import org.gstreamer.Gst;
import org.gstreamer.Pipeline;
import org.gstreamer.State;
import org.gstreamer.swing.VideoComponent;

public class Main {

    public static void main(String[] args) throws InterruptedException {
        args = Gst.init("PipelineLauncher", args);
        final String def = "rtspsrc location=rtsp://192.168.25.160/av0_0 latency=0 ! decodebin ! ffmpegcolorspace name=testp";
        final Pipeline pipe = Pipeline.launch(def);

        SwingUtilities.invokeLater(new Runnable() {

            @Override
            public void run() {
                // Create the video component and link it in                
                VideoComponent videoComponent = new VideoComponent();
                Element videosink = videoComponent.getElement();
                pipe.add(videosink);
                pipe.getElementByName("testp").link(videosink);
                pipe.setState(State.PAUSED);

                if (pipe.isPlaying()) {
                    System.out.println("Pipeline playing");
                } else {
                    System.out.println("Pipeline not playing");
                }

                // Start the pipeline processing
                pipe.play();
                pipe.setState(State.PLAYING);

                if (pipe.isPlaying()) {
                    System.out.println("Pipeline playing");
                } else {
                    System.out.println("Pipeline not playing");
                }

                // Now create a JFrame to display the video output
                JFrame frame = new JFrame("Swing Video Test");
                frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
                frame.add(videoComponent, BorderLayout.CENTER);
                videoComponent.setPreferredSize(new Dimension(800, 600));
                frame.pack();
                frame.setLocationRelativeTo(null);
                frame.setVisible(true);
            }
        });

        Gst.main();
        pipe.setState(State.NULL);
    }

}
    
asked by anonymous 04.12.2015 / 20:11

1 answer

0

I checked what VideoComponent did in the previous example and implemented what it uses:

import java.awt.image.BufferedImage;
import java.awt.image.DataBufferInt;
import java.io.File;
import java.io.IOException;
import java.nio.IntBuffer;
import java.util.concurrent.TimeUnit;
import javax.imageio.ImageIO;
import org.gstreamer.Gst;
import org.gstreamer.Pipeline;
import org.gstreamer.elements.RGBDataSink;

public class Main {

    public static void main(String[] args) throws InterruptedException {

        args = Gst.init("PipelineLauncher", args);
        //String def = "rtspsrc location=rtsp://192.168.25.160/av0_0 latency=0 ! decodebin ! ffmpegcolorspace name=testp";
        String def = "rtspsrc location=rtsp://192.168.25.160/av0_0 ! decodebin ! ffmpegcolorspace name=testp";
        Pipeline pipe = Pipeline.launch(def);

        RGBDataSink videosink = new RGBDataSink("GstVideoComponent", new RGBListener());
        videosink.setPassDirectBuffer(true);
        videosink.getSinkElement().setMaximumLateness(16, TimeUnit.MILLISECONDS);
        videosink.getSinkElement().setQOSEnabled(true);

        pipe.add(videosink);
        pipe.getElementByName("testp").link(videosink);

        pipe.play();

        if (pipe.isPlaying()) {
            System.out.println("Pipeline playing");
        } else {
            System.out.println("Pipeline not playing");
        }

        Thread.sleep(5000);

        pipe.stop();

        System.out.println("Done");
    }

    private static class RGBListener implements RGBDataSink.Listener {

        private volatile int i = 0;

        public void rgbFrame(boolean isPrerollFrame, int width, int height, IntBuffer rgb) {
            BufferedImage renderImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
            renderImage.setAccelerationPriority(0.0f);
            int[] pixels = ((DataBufferInt) renderImage.getRaster().getDataBuffer()).getData();
            rgb.get(pixels, 0, width * height);

            try {
                ImageIO.write(renderImage, "jpg", new File("/home/lala/Desktop/bbbb/" + (i++) + ".jpg"));
            } catch (IOException ex) {
                ex.printStackTrace();
            }
        }
    }

}
    
07.12.2015 / 20:16