View Javadoc
1   /*
2    * Licensed to The Apereo Foundation under one or more contributor license
3    * agreements. See the NOTICE file distributed with this work for additional
4    * information regarding copyright ownership.
5    *
6    *
7    * The Apereo Foundation licenses this file to you under the Educational
8    * Community License, Version 2.0 (the "License"); you may not use this file
9    * except in compliance with the License. You may obtain a copy of the License
10   * at:
11   *
12   *   http://opensource.org/licenses/ecl2.txt
13   *
14   * Unless required by applicable law or agreed to in writing, software
15   * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
16   * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.  See the
17   * License for the specific language governing permissions and limitations under
18   * the License.
19   *
20   */
21  package org.opencastproject.workflow.handler.videogrid;
22  
23  import static java.lang.String.format;
24  
25  import org.opencastproject.composer.api.ComposerService;
26  import org.opencastproject.composer.api.EncoderException;
27  import org.opencastproject.composer.api.EncodingProfile;
28  import org.opencastproject.composer.layout.Dimension;
29  import org.opencastproject.inspection.api.MediaInspectionException;
30  import org.opencastproject.inspection.api.MediaInspectionService;
31  import org.opencastproject.job.api.Job;
32  import org.opencastproject.job.api.JobContext;
33  import org.opencastproject.mediapackage.MediaPackage;
34  import org.opencastproject.mediapackage.MediaPackageElementFlavor;
35  import org.opencastproject.mediapackage.MediaPackageElementParser;
36  import org.opencastproject.mediapackage.MediaPackageException;
37  import org.opencastproject.mediapackage.Track;
38  import org.opencastproject.mediapackage.TrackSupport;
39  import org.opencastproject.mediapackage.VideoStream;
40  import org.opencastproject.mediapackage.selector.TrackSelector;
41  import org.opencastproject.mediapackage.track.TrackImpl;
42  import org.opencastproject.serviceregistry.api.ServiceRegistry;
43  import org.opencastproject.smil.api.util.SmilUtil;
44  import org.opencastproject.util.NotFoundException;
45  import org.opencastproject.util.data.Tuple;
46  import org.opencastproject.videogrid.api.VideoGridService;
47  import org.opencastproject.videogrid.api.VideoGridServiceException;
48  import org.opencastproject.workflow.api.AbstractWorkflowOperationHandler;
49  import org.opencastproject.workflow.api.ConfiguredTagsAndFlavors;
50  import org.opencastproject.workflow.api.WorkflowInstance;
51  import org.opencastproject.workflow.api.WorkflowOperationException;
52  import org.opencastproject.workflow.api.WorkflowOperationHandler;
53  import org.opencastproject.workflow.api.WorkflowOperationInstance;
54  import org.opencastproject.workflow.api.WorkflowOperationResult;
55  import org.opencastproject.workspace.api.Workspace;
56  
57  import com.google.gson.Gson;
58  import com.google.gson.reflect.TypeToken;
59  
60  import org.apache.commons.lang3.StringUtils;
61  import org.apache.commons.lang3.tuple.ImmutablePair;
62  import org.osgi.service.component.annotations.Component;
63  import org.osgi.service.component.annotations.Reference;
64  import org.slf4j.Logger;
65  import org.slf4j.LoggerFactory;
66  import org.w3c.dom.Node;
67  import org.w3c.dom.NodeList;
68  import org.w3c.dom.smil.SMILDocument;
69  import org.w3c.dom.smil.SMILElement;
70  import org.w3c.dom.smil.SMILMediaElement;
71  import org.w3c.dom.smil.SMILParElement;
72  import org.xml.sax.SAXException;
73  
74  import java.io.File;
75  import java.io.IOException;
76  import java.net.URI;
77  import java.util.ArrayList;
78  import java.util.Arrays;
79  import java.util.Collections;
80  import java.util.HashMap;
81  import java.util.List;
82  import java.util.Locale;
83  import java.util.Map;
84  import java.util.regex.Pattern;
85  import java.util.stream.Collectors;
86  
87  /**
88   * The workflow definition for handling multiple videos that have overlapping playtime, e.g. webcam videos from
89   * a video conference call.
90   * Checks which videos are currently playing and dynamically scales them to fit in a single video.
91   *
92   * Relies on a smil with videoBegin and duration times, as is created by ingest through addPartialTrack.
93   * Will pad sections where no video is playing with a background color. This includes beginning and end.
94   *
95   * Returns the final video to the target flavor
96   */
97  @Component(
98      immediate = true,
99      service = WorkflowOperationHandler.class,
100     property = {
101         "service.description=Video Grid Workflow Operation Handler",
102         "workflow.operation=videogrid"
103     }
104 )
105 public class VideoGridWorkflowOperationHandler extends AbstractWorkflowOperationHandler {
106 
107   /** Workflow configuration keys */
108   private static final String SOURCE_FLAVORS = "source-flavors";
109   private static final String SOURCE_SMIL_FLAVOR = "source-smil-flavor";
110   private static final String CONCAT_ENCODING_PROFILE = "concat-encoding-profile";
111 
112   private static final String OPT_RESOLUTION = "resolution";
113   private static final String OPT_BACKGROUND_COLOR = "background-color";
114 
115   /** The logging facility */
116   private static final Logger logger = LoggerFactory.getLogger(VideoGridWorkflowOperationHandler.class);
117 
118   /** Constants */
119   private static final String NODE_TYPE_VIDEO = "video";
120 
121   // TODO: Make ffmpeg commands more "opencasty"
122   private static final String[] FFMPEG = {"ffmpeg", "-y", "-v", "warning", "-nostats", "-max_error_rate", "1.0"};
123   private static final String FFMPEG_WF_CODEC = "h264"; //"mpeg2video";
124   private static final int FFMPEG_WF_FRAMERATE = 24;
125   private static final String[] FFMPEG_WF_ARGS = {
126       "-an", "-codec", FFMPEG_WF_CODEC,
127       "-q:v", "2",
128       "-g", Integer.toString(FFMPEG_WF_FRAMERATE * 10),
129       "-pix_fmt", "yuv420p",
130       "-r", Integer.toString(FFMPEG_WF_FRAMERATE)
131   };
132 
133   /** Services */
134   private Workspace workspace = null;
135   private VideoGridService videoGridService = null;
136   private MediaInspectionService inspectionService = null;
137   private ComposerService composerService = null;
138 
139   /** Service Callbacks **/
140   @Reference
141   public void setWorkspace(Workspace workspace) {
142     this.workspace = workspace;
143   }
144   @Reference
145   public void setVideoGridService(VideoGridService videoGridService) {
146     this.videoGridService = videoGridService;
147   }
148   @Reference
149   protected void setMediaInspectionService(MediaInspectionService inspectionService) {
150     this.inspectionService = inspectionService;
151   }
152   @Reference
153   public void setComposerService(ComposerService composerService) {
154     this.composerService = composerService;
155   }
156   @Reference
157   @Override
158   public void setServiceRegistry(ServiceRegistry serviceRegistry) {
159     super.setServiceRegistry(serviceRegistry);
160   }
161 
162   /** Structs to store data and make code more readable **/
163   /**
164    * Holds basic information on the final video, which is for example used to appropriately place and scale
165    * individual videos.
166    */
167   class LayoutArea {
168     private int x = 0;
169     private int y = 0;
170     private int width = 1920;
171     private int height = 1080;
172     private String name = "webcam";
173     private String bgColor = "0xFFFFFF";
174 
175     public int getX() {
176       return x;
177     }
178     public void setX(int x) {
179       this.x = x;
180     }
181     public int getY() {
182       return y;
183     }
184     public void setY(int y) {
185       this.y = y;
186     }
187     public int getWidth() {
188       return width;
189     }
190     public void setWidth(int width) {
191       this.width = width;
192     }
193     public int getHeight() {
194       return height;
195     }
196     public void setHeight(int height) {
197       this.height = height;
198     }
199     public String getName() {
200       return name;
201     }
202     public void setName(String name) {
203       this.name = name;
204     }
205     public String getBgColor() {
206       return bgColor;
207     }
208     public void setBgColor(String bgColor) {
209       this.bgColor = bgColor;
210     }
211 
212     LayoutArea(int width, int height) {
213       this.width = width;
214       this.height = height;
215     }
216 
217     LayoutArea(String name, int x, int y, int width, int height, String bgColor) {
218       this(width, height);
219       this.name = name;
220       this.x = x;
221       this.y = y;
222       this.bgColor = bgColor;
223     }
224   }
225 
226   /**
227    * Holds information on a single video beyond what is usually stored in a Track
228    */
229   class VideoInfo {
230     private int aspectRatioWidth = 16;
231     private int aspectRatioHeight = 9;
232 
233     private long startTime = 0;
234     private long duration = 0;
235     private Track video;
236 
237     public int getAspectRatioWidth() {
238       return aspectRatioWidth;
239     }
240     public void setAspectRatioWidth(int aspectRatioWidth) {
241       this.aspectRatioWidth = aspectRatioWidth;
242     }
243     public int getAspectRatioHeight() {
244       return aspectRatioHeight;
245     }
246     public void setAspectRatioHeight(int aspectRatioHeight) {
247       this.aspectRatioHeight = aspectRatioHeight;
248     }
249     public long getStartTime() {
250       return startTime;
251     }
252     public void setStartTime(long startTime) {
253       this.startTime = startTime;
254     }
255     public long getDuration() {
256       return duration;
257     }
258     public void setDuration(long duration) {
259       this.duration = duration;
260     }
261     public Track getVideo() {
262       return video;
263     }
264     public void setVideo(Track video) {
265       this.video = video;
266     }
267 
268 
269     VideoInfo() {
270 
271     }
272 
273     VideoInfo(int height, int width) {
274       aspectRatioWidth = width;
275       aspectRatioHeight = height;
276     }
277 
278     VideoInfo(Track video, long timeStamp, int aspectRatioHeight, int aspectRatioWidth, long startTime) {
279       this(aspectRatioHeight, aspectRatioWidth);
280       this.video = video;
281       this.startTime = startTime;
282     }
283   }
284 
285   /**
286    * Pair class for readability
287    */
288   class Offset {
289     private int x = 16;
290     private int y = 9;
291 
292     public int getX() {
293       return x;
294     }
295     public void setX(int x) {
296       this.x = x;
297     }
298     public int getY() {
299       return y;
300     }
301     public void setY(int y) {
302       this.y = y;
303     }
304 
305     Offset(int x, int y) {
306       this.x = x;
307       this.y = y;
308     }
309   }
310 
311   /**
312    * A section of the complete edit decision list.
313    * A new section is defined whenever a video becomes active or inactive.
314    * Therefore it contains information on the timing as well as all currently active videos in the section.
315    */
316   class EditDecisionListSection {
317     private long timeStamp = 0;
318     private long nextTimeStamp = 0;
319     private List<VideoInfo> areas;
320 
321     public long getTimeStamp() {
322       return timeStamp;
323     }
324     public void setTimeStamp(long timeStamp) {
325       this.timeStamp = timeStamp;
326     }
327     public long getNextTimeStamp() {
328       return nextTimeStamp;
329     }
330     public void setNextTimeStamp(long nextTimeStamp) {
331       this.nextTimeStamp = nextTimeStamp;
332     }
333     public List<VideoInfo> getAreas() {
334       return areas;
335     }
336     public void setAreas(List<VideoInfo> areas) {
337       this.areas = areas;
338     }
339 
340     EditDecisionListSection() {
341       areas = new ArrayList<VideoInfo>();
342     }
343   }
344 
345   /**
346    * Stores relevant information from the source SMIL
347    */
348   class StartStopEvent implements Comparable<StartStopEvent> {
349     private boolean start;
350     private long timeStamp;
351     private Track video;
352     private VideoInfo videoInfo;
353 
354     public boolean isStart() {
355       return start;
356     }
357     public void setStart(boolean start) {
358       this.start = start;
359     }
360     public long getTimeStamp() {
361       return timeStamp;
362     }
363     public void setTimeStamp(long timeStamp) {
364       this.timeStamp = timeStamp;
365     }
366     public VideoInfo getVideoInfo() {
367       return videoInfo;
368     }
369     public void setVideoInfo(VideoInfo videoInfo) {
370       this.videoInfo = videoInfo;
371     }
372 
373     StartStopEvent(boolean start, Track video, long timeStamp, VideoInfo videoInfo) {
374       this.start = start;
375       this.timeStamp = timeStamp;
376       this.video = video;
377       this.videoInfo = videoInfo;
378     }
379 
380     @Override
381     public int compareTo(StartStopEvent o) {
382       return Long.compare(this.timeStamp, o.timeStamp);
383     }
384   }
385 
386   @Override
387   public WorkflowOperationResult start(final WorkflowInstance workflowInstance, JobContext context)
388           throws WorkflowOperationException {
389     logger.debug("Running videogrid workflow operation on workflow {}", workflowInstance.getId());
390 
391     final MediaPackage mediaPackage = (MediaPackage) workflowInstance.getMediaPackage().clone();
392     ConfiguredTagsAndFlavors tagsAndFlavors = getTagsAndFlavors(workflowInstance,
393         Configuration.none, Configuration.many, Configuration.many, Configuration.one);
394 
395     // Read config options
396     WorkflowOperationInstance operation = workflowInstance.getCurrentOperation();
397     final MediaPackageElementFlavor smilFlavor = MediaPackageElementFlavor.parseFlavor(
398             getConfig(operation, SOURCE_SMIL_FLAVOR));
399     final MediaPackageElementFlavor targetPresenterFlavor = tagsAndFlavors.getSingleTargetFlavor();
400     String concatEncodingProfile = StringUtils.trimToNull(operation.getConfiguration(CONCAT_ENCODING_PROFILE));
401 
402     // Get source flavors
403     final List<MediaPackageElementFlavor> sourceFlavors = tagsAndFlavors.getSrcFlavors();
404 
405     // Get tracks from flavor
406     final List<Track> sourceTracks = new ArrayList<>();
407     for (MediaPackageElementFlavor sourceFlavor: sourceFlavors) {
408       TrackSelector trackSelector = new TrackSelector();
409       trackSelector.addFlavor(sourceFlavor);
410       sourceTracks.addAll(trackSelector.select(mediaPackage, false));
411     }
412 
413     // No tracks? Skip
414     if (sourceTracks.isEmpty()) {
415       logger.warn("No tracks in source flavors, skipping ...");
416       return createResult(mediaPackage, WorkflowOperationResult.Action.SKIP);
417     }
418 
419     // No concat encoding profile? Fail
420     if (concatEncodingProfile == null) {
421       throw new WorkflowOperationException("Encoding profile must be set!");
422     }
423     EncodingProfile profile = composerService.getProfile(concatEncodingProfile);
424     if (profile == null) {
425       throw new WorkflowOperationException("Encoding profile '" + concatEncodingProfile + "' was not found");
426     }
427 
428 
429     // Define a general Layout for the final video
430     ImmutablePair<Integer, Integer> resolution;
431     try {
432       resolution = getResolution(getConfig(workflowInstance, OPT_RESOLUTION, "1280x720"));
433     } catch (IllegalArgumentException e) {
434       logger.warn("Given resolution was not well formatted!");
435       throw new WorkflowOperationException(e);
436     }
437     logger.info("The resolution of the final video: {}/{}", resolution.getLeft(), resolution.getRight());
438 
439     // Define a bg color for the final video
440     String bgColor = getConfig(workflowInstance, OPT_BACKGROUND_COLOR, "0xFFFFFF");
441     final Pattern pattern = Pattern.compile("0x[A-Fa-f0-9]{6}");
442     if (!pattern.matcher(bgColor).matches()) {
443       logger.warn("Given color {} was not well formatted!", bgColor);
444       throw new WorkflowOperationException("Given color was not well formatted!");
445     }
446     logger.info("The background color of the final video: {}", bgColor);
447 
448     // Target tags
449     ConfiguredTagsAndFlavors.TargetTags targetTags = tagsAndFlavors.getTargetTags();
450 
451     // Define general layout for the final video
452     LayoutArea layoutArea = new LayoutArea("webcam", 0, 0, resolution.getLeft(), resolution.getRight(),
453                                             bgColor);
454 
455     // Get SMIL catalog
456     final SMILDocument smilDocument;
457     try {
458       smilDocument = SmilUtil.getSmilDocumentFromMediaPackage(mediaPackage, smilFlavor, workspace);
459     } catch (SAXException e) {
460       throw new WorkflowOperationException("SMIL is not well formatted", e);
461     } catch (IOException | NotFoundException e) {
462       throw new WorkflowOperationException("SMIL could not be found", e);
463     }
464 
465     final SMILParElement parallel = (SMILParElement) smilDocument.getBody().getChildNodes().item(0);
466     final NodeList sequences = parallel.getTimeChildren();
467     final float trackDurationInSeconds = parallel.getDur();
468     final long trackDurationInMs = Math.round(trackDurationInSeconds * 1000f);
469 
470     // Get Start- and endtime of the final video from SMIL
471     long finalStartTime = 0;
472     long finalEndTime = trackDurationInMs;
473 
474     // Create a list of start and stop events, i.e. every time a new video begins or an old one ends
475     // Create list from SMIL from partial ingests
476     List<StartStopEvent> events = new ArrayList<>();
477     List<Track> videoSourceTracks = new ArrayList<>();
478 
479     for (int i = 0; i < sequences.getLength(); i++) {
480       final SMILElement item = (SMILElement) sequences.item(i);
481       NodeList children = item.getChildNodes();
482 
483       for (int j = 0; j < children.getLength(); j++) {
484         Node node = children.item(j);
485         SMILMediaElement e = (SMILMediaElement) node;
486 
487         // Avoid any element that is not a video or of the source type
488         if (NODE_TYPE_VIDEO.equals(e.getNodeName())) {
489           Track track;
490           try {
491             track = getTrackByID(e.getId(), sourceTracks);
492           } catch (IllegalStateException ex) {
493             logger.info("No track corresponding to SMIL ID found, skipping SMIL ID {}", e.getId());
494             continue;
495           }
496           videoSourceTracks.add(track);
497 
498           double beginInSeconds = e.getBegin().item(0).getResolvedOffset();
499           long beginInMs = Math.round(beginInSeconds * 1000d);
500           double durationInSeconds = e.getDur();
501           long durationInMs = Math.round(durationInSeconds * 1000d);
502 
503           // Gather video information
504           VideoInfo videoInfo = new VideoInfo();
505           // Aspect Ratio, e.g. 16:9
506           List<Track> tmpList = new ArrayList<Track>();
507           tmpList.add(track);
508           LayoutArea trackDimension = determineDimension(tmpList, true);
509           if (trackDimension == null) {
510             throw new WorkflowOperationException("One of the source video tracks did not contain "
511                 + "a valid video stream or dimension");
512           }
513           videoInfo.aspectRatioHeight = trackDimension.getHeight();
514           videoInfo.aspectRatioWidth = trackDimension.getWidth();
515           // "StartTime" is calculated later. It describes how far into the video the next section starts.
516           // (E.g. If webcam2 is started 10 seconds after webcam1, the startTime for webcam1 in the next section is 10)
517           videoInfo.startTime = 0;
518 
519           logger.info("Video information: Width: {}, Height {}, StartTime: {}", videoInfo.aspectRatioWidth,
520                   videoInfo.aspectRatioHeight, videoInfo.startTime);
521 
522           events.add(new StartStopEvent(true, track, beginInMs, videoInfo));
523           events.add(new StartStopEvent(false, track, beginInMs + durationInMs, videoInfo));
524 
525         }
526       }
527     }
528 
529     // No events? Skip
530     if (events.isEmpty()) {
531       logger.warn("Could not generate sections from given SMIL catalogue for tracks in given flavor, skipping ...");
532       return createResult(mediaPackage, WorkflowOperationResult.Action.SKIP);
533     }
534 
535     // Sort by timestamps ascending
536     Collections.sort(events);
537 
538     // Create an edit decision list
539     List<EditDecisionListSection> videoEdl = new ArrayList<EditDecisionListSection>();
540     HashMap<Track, StartStopEvent> activeVideos = new HashMap<>();   // Currently running videos
541 
542     // Define starting point
543     EditDecisionListSection start = new EditDecisionListSection();
544     start.timeStamp = finalStartTime;
545     videoEdl.add(start);
546 
547     // Define mid-points
548     for (StartStopEvent event : events) {
549       if (event.start) {
550         logger.info("Add start event at {}", event.timeStamp);
551         activeVideos.put(event.video, event);
552       } else {
553         logger.info("Add stop event at {}", event);
554         activeVideos.remove(event.video);
555       }
556       videoEdl.add(createEditDecisionList(event, activeVideos));
557     }
558 
559     // Define ending point
560     EditDecisionListSection endVideo = new EditDecisionListSection();
561     endVideo.timeStamp = finalEndTime;
562     endVideo.nextTimeStamp = finalEndTime;
563     videoEdl.add(endVideo);
564 
565     // Pre processing EDL
566     for (int i = 0; i < videoEdl.size() - 1; i++) {
567       // For calculating cut lengths
568       videoEdl.get(i).nextTimeStamp = videoEdl.get(i + 1).timeStamp;
569     }
570 
571     // Create ffmpeg command for each section
572     List<List<String>> commands = new ArrayList<>();          // FFmpeg command
573     List<List<Track>> tracksForCommands = new ArrayList<>();  // Tracks used in the FFmpeg command
574     for (EditDecisionListSection edl : videoEdl) {
575       // A too small duration will result in ffmpeg producing a faulty video, so avoid any section smaller than 50ms
576       if (edl.nextTimeStamp - edl.timeStamp < 50) {
577         logger.info("Skipping {}-length edl entry", edl.nextTimeStamp - edl.timeStamp);
578         continue;
579       }
580       // Create command for section
581       commands.add(compositeSection(layoutArea, edl));
582       tracksForCommands.add(edl.getAreas().stream().map(m -> m.getVideo()).collect(Collectors.toList()));
583     }
584 
585     // Create video tracks for each section
586     List<URI> uris = new ArrayList<>();
587     for (int i = 0; i < commands.size(); i++) {
588       logger.info("Sending command {} of {} to service. Command: {}", i + 1, commands.size(), commands.get(i));
589 
590       Job job;
591       try {
592         job = videoGridService.createPartialTrack(
593             commands.get(i),
594             tracksForCommands.get(i).toArray(new Track[tracksForCommands.get(i).size()])
595         );
596       } catch (VideoGridServiceException | org.apache.commons.codec.EncoderException | MediaPackageException e) {
597         throw new WorkflowOperationException(e);
598       }
599 
600       if (!waitForStatus(job).isSuccess()) {
601         throw new WorkflowOperationException(
602             String.format("VideoGrid job for media package '%s' failed", mediaPackage));
603       }
604 
605       Gson gson = new Gson();
606       uris.add(gson.fromJson(job.getPayload(), new TypeToken<URI>() { }.getType()));
607     }
608 
609     // Parse uris into tracks and enrich them with metadata
610     List<Track> tracks = new ArrayList<>();
611     for (URI uri : uris) {
612       TrackImpl track = new TrackImpl();
613       track.setFlavor(targetPresenterFlavor);
614       track.setURI(uri);
615 
616       Job inspection = null;
617       try {
618         inspection = inspectionService.enrich(track, true);
619       } catch (MediaInspectionException | MediaPackageException e) {
620         throw new WorkflowOperationException("Inspection service could not enrich track", e);
621       }
622       if (!waitForStatus(inspection).isSuccess()) {
623         throw new WorkflowOperationException(String.format("Failed to add metadata to track."));
624       }
625 
626       try {
627         tracks.add((TrackImpl) MediaPackageElementParser.getFromXml(inspection.getPayload()));
628       } catch (MediaPackageException e) {
629         throw new WorkflowOperationException("Could not parse track returned by inspection service", e);
630       }
631     }
632 
633     // Concatenate sections
634     Job concatJob = null;
635     try {
636       concatJob = composerService.concat(composerService.getProfile(concatEncodingProfile).getIdentifier(),
637               new Dimension(layoutArea.width,layoutArea.height) , true, tracks.toArray(new Track[tracks.size()]));
638     } catch (EncoderException | MediaPackageException e) {
639       throw new WorkflowOperationException("The concat job failed", e);
640     }
641     if (!waitForStatus(concatJob).isSuccess()) {
642       throw new WorkflowOperationException("The concat job did not complete successfully.");
643     }
644 
645     // Add to mediapackage
646     if (concatJob.getPayload().length() > 0) {
647       Track concatTrack;
648       try {
649         concatTrack = (Track) MediaPackageElementParser.getFromXml(concatJob.getPayload());
650       } catch (MediaPackageException e) {
651         throw new WorkflowOperationException("Could not parse track returned by concat service", e);
652       }
653       concatTrack.setFlavor(targetPresenterFlavor);
654       concatTrack.setURI(concatTrack.getURI());
655       applyTargetTagsToElement(targetTags, concatTrack);
656 
657       mediaPackage.add(concatTrack);
658     } else {
659       throw new WorkflowOperationException("Concat operation unsuccessful, no payload returned.");
660     }
661 
662     try {
663       workspace.cleanup(mediaPackage.getIdentifier());
664     } catch (IOException e) {
665       throw new WorkflowOperationException(e);
666     }
667 
668     final WorkflowOperationResult result = createResult(mediaPackage, WorkflowOperationResult.Action.CONTINUE);
669     logger.debug("Video Grid operation completed");
670     return result;
671   }
672 
673   /**
674    * Create a ffmpeg command that generates a video for the given section
675    *
676    * The videos passed as part of <code>videoEdl</code> are arranged in a grid layout.
677    * The grid layout is calculated in a way  that maximizes area usage (i.e. minimizes the areas where the background
678    * color has to be shown) by checking the area usage for each combination of vertical and horizontal rows, based
679    * on the resolution of the layout area. The number of tiles per row/column is then used to genrate a complex
680    * ffmpeg filter.
681    *
682    *
683    * @param layoutArea
684    *          General layout information for the video
685    * @param videoEdl
686    *          The edit decision list for the current cut
687    * @return A command line ready ffmpeg command
688    */
689   private List<String> compositeSection(LayoutArea layoutArea, EditDecisionListSection videoEdl) {
690     // Duration for this cut
691     long duration = videoEdl.nextTimeStamp - videoEdl.timeStamp;
692     logger.info("Cut timeStamp {}, duration {}", videoEdl.timeStamp, duration);
693 
694     // Declare ffmpeg command
695     String ffmpegFilter = String.format("color=c=%s:s=%dx%d:r=24", layoutArea.bgColor,
696         layoutArea.width, layoutArea.height);
697 
698     List<VideoInfo> videos = videoEdl.areas;
699     int videoCount = videoEdl.areas.size();
700 
701     logger.info("Laying out {} videos in {}", videoCount, layoutArea.name);
702 
703 
704     if (videoCount > 0) {
705       int tilesH = 0;
706       int tilesV = 0;
707       int tileWidth = 0;
708       int tileHeight = 0;
709       int totalArea = 0;
710 
711       // Do and exhaustive search to maximize video areas
712       for (int tmpTilesV = 1; tmpTilesV < videoCount + 1; tmpTilesV++) {
713         int tmpTilesH = (int) Math.ceil((videoCount / (float)tmpTilesV));
714         int tmpTileWidth = (int) (2 * Math.floor((float)layoutArea.width / tmpTilesH / 2));
715         int tmpTileHeight = (int) (2 * Math.floor((float)layoutArea.height / tmpTilesV / 2));
716 
717         if (tmpTileWidth <= 0 || tmpTileHeight <= 0) {
718           continue;
719         }
720 
721         int tmpTotalArea = 0;
722         for (VideoInfo video: videos) {
723           int videoWidth = video.aspectRatioWidth;
724           int videoHeight = video.aspectRatioHeight;
725           VideoInfo videoScaled = aspectScale(videoWidth, videoHeight, tmpTileWidth, tmpTileHeight);
726           tmpTotalArea += videoScaled.aspectRatioWidth * videoScaled.aspectRatioHeight;
727         }
728 
729         if (tmpTotalArea > totalArea) {
730           tilesH = tmpTilesH;
731           tilesV = tmpTilesV;
732           tileWidth = tmpTileWidth;
733           tileHeight = tmpTileHeight;
734           totalArea = tmpTotalArea;
735         }
736       }
737 
738 
739       int tileX = 0;
740       int tileY = 0;
741 
742       logger.info("Tiling in a {}x{} grid", tilesH, tilesV);
743 
744       ffmpegFilter += String.format("[%s_in];", layoutArea.name);
745 
746       for (VideoInfo video : videos) {
747         //Get videoinfo
748         logger.info("tile location ({}, {})", tileX, tileY);
749         int videoWidth = video.aspectRatioWidth;
750         int videoHeight = video.aspectRatioHeight;
751         logger.info("original aspect: {}x{}", videoWidth, videoHeight);
752 
753         VideoInfo videoScaled = aspectScale(videoWidth, videoHeight, tileWidth, tileHeight);
754         logger.info("scaled size: {}x{}", videoScaled.aspectRatioWidth, videoScaled.aspectRatioHeight);
755 
756         Offset offset = padOffset(videoScaled.aspectRatioWidth, videoScaled.aspectRatioHeight, tileWidth, tileHeight);
757         logger.info("offset: left: {}, top: {}", offset.x, offset.y);
758 
759         // TODO: Get a proper value instead of the badly hardcoded 0
760         // Offset in case the pts is greater than 0
761         long seekOffset = 0;
762         logger.info("seek offset: {}", seekOffset);
763 
764         // Webcam videos are variable, low fps; it might be that there's
765         // no frame until some time after the seek point. Start decoding
766         // 10s before the desired point to avoid this issue.
767         long seek = video.startTime - 10000;
768         if (seek < 0) {
769           seek = 0;
770         }
771 
772         String padName = String.format("%s_x%d_y%d", layoutArea.name, tileX, tileY);
773 
774         // Apply the video start time offset to seek to the correct point.
775         // Only actually apply the offset if we're already seeking so we
776         // don't start seeking in a file where we've overridden the seek
777         // behaviour.
778         if (seek > 0) {
779           seek = seek + seekOffset;
780         }
781         // Instead of adding the filepath here, we put a placeholder.
782         // This is so that the videogrid service can later replace it, after it put the files in it's workspace
783         ffmpegFilter += String.format("movie=%s:sp=%s", "#{" + video.getVideo().getIdentifier() + "}", msToS(seek));
784         // Subtract away the offset from the timestamps, so the trimming
785         // in the fps filter is accurate
786         ffmpegFilter += String.format(",setpts=PTS-%s/TB", msToS(seekOffset));
787         // fps filter fills in frames up to the desired start point, and
788         // cuts the video there
789         ffmpegFilter += String.format(",fps=%d:start_time=%s", FFMPEG_WF_FRAMERATE, msToS(video.startTime));
790         // Reset the timestamps to start at 0 so that everything is synced
791         // for the video tiling, and scale to the desired size.
792         ffmpegFilter += String.format(",setpts=PTS-STARTPTS,scale=%d:%d,setsar=1",
793             videoScaled.aspectRatioWidth, videoScaled.aspectRatioHeight);
794         // And finally, pad the video to the desired aspect ratio
795         ffmpegFilter += String.format(",pad=w=%d:h=%d:x=%d:y=%d:color=%s", tileWidth, tileHeight,
796             offset.x, offset.y, layoutArea.bgColor);
797         ffmpegFilter += String.format("[%s_movie];", padName);
798 
799         // In case the video was shorter than expected, we might have to pad
800         // it to length. do that by concatenating a video generated by the
801         // color filter. (It would be nice to repeat the last frame instead,
802         // but there's no easy way to do that.)
803         ffmpegFilter += String.format("color=c=%s:s=%dx%d:r=%d", layoutArea.bgColor, tileWidth,
804             tileHeight, FFMPEG_WF_FRAMERATE);
805         ffmpegFilter += String.format("[%s_pad];", padName);
806         ffmpegFilter += String.format("[%s_movie][%s_pad]concat=n=2:v=1:a=0[%s];", padName, padName, padName);
807 
808         tileX += 1;
809         if (tileX >= tilesH) {
810           tileX = 0;
811           tileY += 1;
812         }
813       }
814 
815       // Create the video rows
816       int remaining = videoCount;
817       for (tileY = 0; tileY < tilesV; tileY++) {
818         int thisTilesH = Math.min(tilesH, remaining);
819         remaining -= thisTilesH;
820 
821         for (tileX = 0; tileX < thisTilesH; tileX++) {
822           ffmpegFilter += String.format("[%s_x%d_y%d]", layoutArea.name, tileX, tileY);
823         }
824         if (thisTilesH > 1) {
825           ffmpegFilter += String.format("hstack=inputs=%d,", thisTilesH);
826         }
827         ffmpegFilter += String.format("pad=w=%d:h=%d:color=%s", layoutArea.width, tileHeight, layoutArea.bgColor);
828         ffmpegFilter += String.format("[%s_y%d];", layoutArea.name, tileY);
829       }
830 
831       // Stack the video rows
832       for (tileY = 0; tileY < tilesV; tileY++) {
833         ffmpegFilter += String.format("[%s_y%d]", layoutArea.name, tileY);
834       }
835       if (tilesV > 1) {
836         ffmpegFilter += String.format("vstack=inputs=%d,", tilesV);
837       }
838       ffmpegFilter += String.format("pad=w=%d:h=%d:color=%s", layoutArea.width, layoutArea.height, layoutArea.bgColor);
839       ffmpegFilter += String.format("[%s];", layoutArea.name);
840       ffmpegFilter += String.format("[%s_in][%s]overlay=x=%d:y=%d", layoutArea.name,
841           layoutArea.name, layoutArea.x, layoutArea.y);
842 
843       // Here would be the end of the layoutArea Loop
844     }
845 
846     ffmpegFilter += String.format(",trim=end=%s", msToS(duration));
847 
848     List<String> ffmpegCmd = new ArrayList<String>(Arrays.asList(FFMPEG));
849     ffmpegCmd.add("-filter_complex");
850     ffmpegCmd.add(ffmpegFilter);
851     ffmpegCmd.addAll(Arrays.asList(FFMPEG_WF_ARGS));
852 
853     logger.info("Final command:");
854     logger.info(String.join(" ", ffmpegCmd));
855 
856     return ffmpegCmd;
857   }
858 
859   /**
860    * Scale the video resolution to fit the new resolution while maintaining aspect ratio
861    * @param oldWidth
862    *          Width of the video
863    * @param oldHeight
864    *          Height of the video
865    * @param newWidth
866    *          Intended new width of the video
867    * @param newHeight
868    *          Intended new height of the video
869    * @return
870    *          Actual new width and height of the video, guaranteed to be the same or smaller as the intended values
871    */
872   private VideoInfo aspectScale(int oldWidth, int oldHeight, int newWidth, int newHeight) {
873     if ((float)oldWidth / oldHeight > (float)newWidth / newHeight) {
874       newHeight = (int) (2 * Math.round((float)oldHeight * newWidth / oldWidth / 2));
875     } else {
876       newWidth = (int) (2 * Math.round((float)oldWidth * newHeight / oldHeight / 2));
877     }
878     return new VideoInfo(newHeight, newWidth);
879   }
880 
881   /**
882    * Calculate video offset from borders for ffmpeg pad operation
883    * @param videoWidth
884    *          Width of the video
885    * @param videoHeight
886    *          Height of the video
887    * @param areaWidth
888    *          Width of the area
889    * @param areaHeight
890    *          Width of the area
891    * @return
892    *          The position of the video within the padded area
893    */
894   private Offset padOffset(int videoWidth, int videoHeight, int areaWidth, int areaHeight) {
895     int padX = (int) (2 * Math.round((float)(areaWidth - videoWidth) / 4));
896     int padY = (int) (2 * Math.round((float)(areaHeight - videoHeight) / 4));
897     return new Offset(padX, padY);
898   }
899 
900   /**
901    * Converts milliseconds to seconds and to string
902    * @param timestamp
903    *          Time in milliseconds, e.g. 12567
904    * @return
905    *          Time in seconds, e.g. "12.567"
906    */
907   private String msToS(long timestamp) {
908     double s = (double)timestamp / 1000;
909     return String.format(Locale.US, "%.3f", s);   // Locale.US to get a . instead of a ,
910   }
911 
912   /**
913    * Finds and returns the first track matching the given id in a list of tracks
914    * @param trackId
915    *          The id of the track we're looking for
916    * @param tracks
917    *          The collection of tracks we're looking in
918    * @return
919    *          The first track with the given trackId
920    */
921   private Track getTrackByID(String trackId, List<Track> tracks) {
922     for (Track t : tracks) {
923       if (t.getIdentifier().contains(trackId)) {
924         logger.debug("Track-Id from smil found in Mediapackage ID: " + t.getIdentifier());
925         return t;
926       }
927     }
928     throw new IllegalStateException("No track matching smil Track-id: " + trackId);
929   }
930 
931   /**
932    * Determine the largest dimension of the given list of tracks
933    *
934    * @param tracks
935    *          the list of tracks
936    * @param forceDivisible
937    *          Whether to enforce the track's dimension to be divisible by two
938    * @return the largest dimension from the list of track
939    */
940   private LayoutArea determineDimension(List<Track> tracks, boolean forceDivisible) {
941     Tuple<Track, LayoutArea> trackDimension = getLargestTrack(tracks);
942     if (trackDimension == null) {
943       return null;
944     }
945 
946     if (forceDivisible && (trackDimension.getB().getHeight() % 2 != 0 || trackDimension.getB().getWidth() % 2 != 0)) {
947       LayoutArea scaledDimension = new LayoutArea((trackDimension.getB().getWidth() / 2) * 2, (trackDimension
948               .getB().getHeight() / 2) * 2);
949       logger.info("Determined output dimension {} scaled down from {} for track {}", scaledDimension,
950               trackDimension.getB(), trackDimension.getA());
951       return scaledDimension;
952     } else {
953       logger.info("Determined output dimension {} for track {}", trackDimension.getB(), trackDimension.getA());
954       return trackDimension.getB();
955     }
956   }
957 
958   /**
959    * Returns the track with the largest resolution from the list of tracks
960    *
961    * @param tracks
962    *          the list of tracks
963    * @return a {@link Tuple} with the largest track and it's dimension
964    */
965   private Tuple<Track, LayoutArea> getLargestTrack(List<Track> tracks) {
966     Track track = null;
967     LayoutArea dimension = null;
968     for (Track t : tracks) {
969       if (!t.hasVideo()) {
970         continue;
971       }
972 
973       VideoStream[] videoStreams = TrackSupport.byType(t.getStreams(), VideoStream.class);
974       int frameWidth = videoStreams[0].getFrameWidth();
975       int frameHeight = videoStreams[0].getFrameHeight();
976       if (dimension == null || (frameWidth * frameHeight) > (dimension.getWidth() * dimension.getHeight())) {
977         dimension = new LayoutArea(frameWidth, frameHeight);
978         track = t;
979       }
980     }
981     if (track == null || dimension == null) {
982       return null;
983     }
984 
985     return Tuple.tuple(track, dimension);
986   }
987 
988   /**
989    * Returns the absolute path of the track
990    *
991    * @param track
992    *          Track whose path you want
993    * @return {@String} containing the absolute path of the given track
994    * @throws WorkflowOperationException
995    */
996   private String getTrackPath(Track track) throws WorkflowOperationException {
997     File mediaFile;
998     try {
999       mediaFile = workspace.get(track.getURI());
1000     } catch (NotFoundException e) {
1001       throw new WorkflowOperationException(
1002               "Error finding the media file in the workspace", e);
1003     } catch (IOException e) {
1004       throw new WorkflowOperationException(
1005               "Error reading the media file in the workspace", e);
1006     }
1007     return mediaFile.getAbsolutePath();
1008   }
1009 
1010   /**
1011    * Collects the info for the next section of the final video into an object
1012    * @param event
1013    *          Event detailing the time a video has become active/inactive
1014    * @param activeVideos
1015    *          Currently active videos
1016    * @return
1017    */
1018   private EditDecisionListSection createEditDecisionList(
1019       StartStopEvent event,
1020       HashMap<Track, StartStopEvent> activeVideos
1021   ) {
1022     EditDecisionListSection nextEdl = new EditDecisionListSection();
1023     nextEdl.timeStamp = event.timeStamp;
1024 
1025     for (Map.Entry<Track, StartStopEvent> activeVideo : activeVideos.entrySet()) {
1026       nextEdl.areas.add(new VideoInfo(activeVideo.getKey(), event.timeStamp,
1027           activeVideo.getValue().videoInfo.aspectRatioHeight,
1028           activeVideo.getValue().videoInfo.aspectRatioWidth,
1029           event.timeStamp - activeVideo.getValue().timeStamp));
1030     }
1031 
1032     return nextEdl;
1033   }
1034 
1035   /**
1036    * Parses a string detailing a resolution into two integers
1037    * @param s
1038    *          String of the form "AxB"
1039    * @return
1040    *          The width and height
1041    * @throws IllegalArgumentException
1042    */
1043   private ImmutablePair<Integer, Integer> getResolution(String s) throws IllegalArgumentException {
1044     String[] parts = s.split("x");
1045     if (parts.length != 2) {
1046       throw new IllegalArgumentException(format("Unable to create resolution from \"%s\"", s));
1047     }
1048 
1049     return new ImmutablePair<Integer, Integer>(Integer.parseInt(parts[0]), Integer.parseInt(parts[1]));
1050   }
1051 }