001package com.github.sarxos.webcam.ds.gstreamer; 002 003import java.awt.Dimension; 004import java.awt.image.BufferedImage; 005import java.awt.image.DataBufferInt; 006import java.io.File; 007import java.nio.IntBuffer; 008import java.util.ArrayList; 009import java.util.HashMap; 010import java.util.Map; 011import java.util.concurrent.TimeUnit; 012import java.util.concurrent.atomic.AtomicBoolean; 013 014import org.bridj.Platform; 015import org.gstreamer.Caps; 016import org.gstreamer.Element; 017import org.gstreamer.ElementFactory; 018import org.gstreamer.Pad; 019import org.gstreamer.Pipeline; 020import org.gstreamer.State; 021import org.gstreamer.Structure; 022import org.gstreamer.elements.RGBDataSink; 023import org.slf4j.Logger; 024import org.slf4j.LoggerFactory; 025 026import com.github.sarxos.webcam.WebcamDevice; 027import com.github.sarxos.webcam.WebcamResolution; 028 029 030public class GStreamerDevice implements WebcamDevice, RGBDataSink.Listener, WebcamDevice.FPSSource { 031 032 /** 033 * Logger. 034 */ 035 private static final Logger LOG = LoggerFactory.getLogger(GStreamerDevice.class); 036 037 /** 038 * Limit the lateness of frames to no more than 20ms (half a frame at 25fps) 039 */ 040 private static final long LATENESS = 20; // ms 041 042 /** 043 * First formats are better. For example video/x-raw-rgb gives 30 FPS on 044 * HD720p where video/x-raw-yuv only 10 FPS on the same resolution. The goal 045 * is to use these "better" formats first, and then fallback to less 046 * efficient when not available. 047 */ 048 private static final String[] BEST_FORMATS = { 049 "video/x-raw-rgb", 050 "video/x-raw-yuv", 051 }; 052 053 /** 054 * Video format to capture. 055 */ 056 private String format; 057 058 /** 059 * All possible resolutions - populated while initialization phase. 060 */ 061 private Dimension[] resolutions = null; 062 063 /** 064 * Device name, immutable. Used only on Windows platform. 065 */ 066 private final String name; 067 /** 068 * Device name, immutable. Used only on Linux platform. 069 */ 070 private final File vfile; 071 072 /* gstreamer stuff */ 073 074 private Pipeline pipe = null; 075 private Element source = null; 076 private Element filter = null; 077 private RGBDataSink sink = null; 078 079 private Caps caps = null; 080 081 /* logic */ 082 083 private AtomicBoolean open = new AtomicBoolean(false); 084 private AtomicBoolean disposed = new AtomicBoolean(false); 085 private AtomicBoolean starting = new AtomicBoolean(false); 086 private AtomicBoolean initialized = new AtomicBoolean(false); 087 private Dimension resolution = WebcamResolution.VGA.getSize(); 088 private BufferedImage image = null; 089 090 /* used to calculate fps */ 091 092 private long t1 = -1; 093 private long t2 = -1; 094 095 private volatile double fps = 0; 096 097 /** 098 * Create GStreamer webcam device. 099 * 100 * @param name the name of webcam device 101 */ 102 protected GStreamerDevice(String name) { 103 this.name = name; 104 this.vfile = null; 105 } 106 107 protected GStreamerDevice(File vfile) { 108 this.name = null; 109 this.vfile = vfile; 110 } 111 112 /** 113 * Initialize webcam device. 114 */ 115 private synchronized void init() { 116 117 if (!initialized.compareAndSet(false, true)) { 118 return; 119 } 120 121 LOG.debug("GStreamer webcam device initialization"); 122 123 pipe = new Pipeline(name); 124 125 if (Platform.isWindows()) { 126 source = ElementFactory.make("dshowvideosrc", "source"); 127 source.set("device-name", name); 128 } else if (Platform.isLinux()) { 129 source = ElementFactory.make("v4l2src", "source"); 130 source.set("device", vfile.getAbsolutePath()); 131 } 132 133 sink = new RGBDataSink(name, this); 134 sink.setPassDirectBuffer(true); 135 sink.getSinkElement().setMaximumLateness(LATENESS, TimeUnit.MILLISECONDS); 136 sink.getSinkElement().setQOSEnabled(true); 137 138 filter = ElementFactory.make("capsfilter", "filter"); 139 140 if (Platform.isLinux()) { 141 pipe.addMany(source, filter, sink); 142 Element.linkMany(source, filter, sink); 143 pipe.setState(State.READY); 144 } 145 146 resolutions = parseResolutions(source.getPads().get(0)); 147 148 if (Platform.isLinux()) { 149 pipe.setState(State.NULL); 150 Element.unlinkMany(source, filter, sink); 151 pipe.removeMany(source, filter, sink); 152 } 153 } 154 155 /** 156 * Use GStreamer to get all possible resolutions. 157 * 158 * @param pad the pad to get resolutions from 159 * @return Array of resolutions supported by device connected with pad 160 */ 161 private Dimension[] parseResolutions(Pad pad) { 162 163 Caps caps = pad.getCaps(); 164 165 format = findBestFormat(caps); 166 167 LOG.debug("Best format is {}", format); 168 169 Dimension r = null; 170 Structure s = null; 171 String mime = null; 172 173 int n = caps.size(); 174 int i = 0; 175 176 Map<String, Dimension> map = new HashMap<String, Dimension>(); 177 178 do { 179 180 s = caps.getStructure(i++); 181 182 LOG.debug("Found format structure {}", s); 183 184 mime = s.getName(); 185 186 if (mime.equals(format)) { 187 if ((r = capStructToResolution(s)) != null) { 188 map.put(r.width + "x" + r.height, r); 189 } 190 } 191 192 } while (i < n); 193 194 Dimension[] resolutions = new ArrayList<Dimension>(map.values()).toArray(new Dimension[map.size()]); 195 196 if (LOG.isDebugEnabled()) { 197 for (Dimension d : resolutions) { 198 LOG.debug("Resolution detected {}", d); 199 } 200 } 201 202 return resolutions; 203 } 204 205 private static String findBestFormat(Caps caps) { 206 for (String f : BEST_FORMATS) { 207 for (int i = 0, n = caps.size(); i < n; i++) { 208 if (f.equals(caps.getStructure(i).getName())) { 209 return f; 210 } 211 } 212 } 213 return null; 214 } 215 216 private static Dimension capStructToResolution(Structure structure) { 217 218 int w = -1; 219 int h = -1; 220 221 if (Platform.isWindows()) { 222 w = structure.getRange("width").getMinInt(); 223 h = structure.getRange("height").getMinInt(); 224 } else if (Platform.isLinux()) { 225 w = structure.getInteger("width"); 226 h = structure.getInteger("height"); 227 } 228 229 if (w > 0 && h > 0) { 230 return new Dimension(w, h); 231 } else { 232 return null; 233 } 234 } 235 236 @Override 237 public String getName() { 238 if (Platform.isWindows()) { 239 return name; 240 } else if (Platform.isLinux()) { 241 return vfile.getAbsolutePath(); 242 } else { 243 throw new RuntimeException("Platform not supported by GStreamer capture driver"); 244 } 245 } 246 247 @Override 248 public Dimension[] getResolutions() { 249 init(); 250 return resolutions; 251 } 252 253 @Override 254 public Dimension getResolution() { 255 return resolution; 256 } 257 258 @Override 259 public void setResolution(Dimension size) { 260 this.resolution = size; 261 } 262 263 @Override 264 public BufferedImage getImage() { 265 return image; 266 } 267 268 @Override 269 public void open() { 270 271 if (!open.compareAndSet(false, true)) { 272 return; 273 } 274 275 LOG.debug("Opening GStreamer device"); 276 277 init(); 278 279 starting.set(true); 280 281 Dimension size = getResolution(); 282 283 image = new BufferedImage(size.width, size.height, BufferedImage.TYPE_INT_RGB); 284 image.setAccelerationPriority(0); 285 image.flush(); 286 287 if (caps != null) { 288 caps.dispose(); 289 } 290 291 caps = Caps.fromString(String.format("%s,width=%d,height=%d", format, size.width, size.height)); 292 293 filter.setCaps(caps); 294 295 LOG.debug("Link elements"); 296 297 pipe.addMany(source, filter, sink); 298 Element.linkMany(source, filter, sink); 299 pipe.setState(State.PLAYING); 300 301 // wait max 20s for image to appear 302 synchronized (this) { 303 LOG.debug("Wait for device to be ready"); 304 try { 305 this.wait(20000); 306 } catch (InterruptedException e) { 307 return; 308 } 309 } 310 } 311 312 @Override 313 public void close() { 314 315 if (!open.compareAndSet(true, false)) { 316 return; 317 } 318 319 LOG.debug("Closing GStreamer device"); 320 321 image = null; 322 323 LOG.debug("Unlink elements"); 324 325 pipe.setState(State.NULL); 326 Element.unlinkMany(source, filter, sink); 327 pipe.removeMany(source, filter, sink); 328 } 329 330 @Override 331 public void dispose() { 332 333 if (!disposed.compareAndSet(false, true)) { 334 return; 335 } 336 337 LOG.debug("Disposing GStreamer device"); 338 339 close(); 340 341 filter.dispose(); 342 source.dispose(); 343 sink.dispose(); 344 pipe.dispose(); 345 caps.dispose(); 346 } 347 348 @Override 349 public boolean isOpen() { 350 return open.get(); 351 } 352 353 @Override 354 public void rgbFrame(boolean preroll, int width, int height, IntBuffer rgb) { 355 356 LOG.trace("New RGB frame"); 357 358 if (t1 == -1 || t2 == -1) { 359 t1 = System.currentTimeMillis(); 360 t2 = System.currentTimeMillis(); 361 } 362 363 BufferedImage tmp = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); 364 tmp.setAccelerationPriority(0); 365 rgb.get(((DataBufferInt) tmp.getRaster().getDataBuffer()).getData(), 0, width * height); 366 tmp.flush(); 367 368 image = tmp; 369 370 if (starting.compareAndSet(true, false)) { 371 372 synchronized (this) { 373 this.notifyAll(); 374 } 375 376 LOG.debug("GStreamer device ready"); 377 } 378 379 t1 = t2; 380 t2 = System.currentTimeMillis(); 381 382 fps = (4 * fps + 1000 / (t2 - t1 + 1)) / 5; 383 } 384 385 @Override 386 public double getFPS() { 387 return fps; 388 } 389 390 public Pipeline getPipe() { 391 return pipe; 392 } 393 394 public Element getSource() { 395 return source; 396 } 397 398 public Element getFilter() { 399 return filter; 400 } 401 402 public RGBDataSink getSink() { 403 return sink; 404 } 405 406 public Caps getCaps() { 407 return caps; 408 } 409}