Set default crop aspect ratio on html5smartimage - aem

I have succesfully changed the aspectRatioConfig for a html5smartimage component to replace the default "Free crop" with a custom aspect ratio, however I'm struggling to find a way to select my custom aspect ratio as the default selection when cropping images. Essentially, I want to default to my custom aspect ratio rather than have the user select this from the list of aspect ratios (in this case there is only one value). Is this possible? Can anyone assist?
here is my code to replace the free crop, with my custom config, I was thinking that setting the checked value of the available aspect ratios (in initComponent below) would trigger the selected event, but it doesn't appear too. How can I default the selected aspect ratio ? Or perhaps trigger the selected event ?
(function($, undefined) {
/**
* Determine the scaling ratio the image component might have applied
*
* #param data contains `imgWidth` and `imgHeight` keys
* #returns {number} is the scaling ratio, 1 if no scaling was applied.
*/
function getScalingRatio(data) {
var scaleRatio = 1;
if (data.imgWidth > 1280 || data.imgHeight > 1280) {
if (data.imgWidth > 1280) {
scaleRatio = 1280 / data.imgWidth;
} else {
scaleRatio = 1280 / data.imgHeight;
}
}
return scaleRatio;
}
/**
* Wrapper for the HTML5 aspect ratio aware
*
* #type {void|*}
*/
window.CQTG = $.extend(window.CQTG || {}, {
Html5SmartImageWrapper: CQ.Ext.extend(CQ.Ext.Panel, {
/**
* Initialize panel
*
* #param originalConfig
*/
constructor: function (originalConfig) {
// initialize the container so we get some useful information
CQTG.Html5SmartImageWrapper.superclass.constructor.call(this, $.extend({}, originalConfig, {
layout: 'fit',
bodyStyle: 'padding: 0;',
items: {
title: null,
header: false,
frameHeader: false,
border: false
}
}));
// find the data path for this container instance
originalConfig.imagePath = this.findParentByType('dialog').responseScope.path;
// get rid of panel and replace with smart image
this.removeAll();
// add add html5 smart image to the container
this.add(new CQTG.Html5SmartImage(originalConfig));
}
}),
Html5SmartImage: CQ.Ext.extend(CQ.html5.form.SmartImage, {
/**
* Initialize data-members
*
* #param config
*/
constructor: function (config) {
config = config || {};
var aInfo = this.getAspectRatioString(config.imagePath);
// setup some default values
config = CQ.Util.applyDefaults(config, {
"cropConfig": {
"aspectRatios": {
"newsholeCrop": {
"text": "Newshole crop",
"value": aInfo.aspectRatio
}
}
}
}
);
CQTG.Html5SmartImage.superclass.constructor.call(this, config);
// first tool is the crop (rest has been disabled)
this.imageToolDefs[0].cropMinWidth = aInfo.minWidth;
this.imageToolDefs[0].cropMinHeight = aInfo.minHeight;
},
/**
* Retrieve the bounding box and concoct a aspect ratio based on it
*
* There is some additional magic here because the image component uses a rendition,
* not the original and bases all its pixel calculations on it without scaling it back
* up. That is why the scaleRatio is determined
*/
getAspectRatioString: function (cmpPath) {
var
boundingBoxLocation = cmpPath.substring(0, cmpPath.lastIndexOf('/')),
data = CQ.Util.eval(boundingBoxLocation + ".crop.json"),
roundX = Math.ceil(data.aspectX * 100),
roundY = Math.ceil(data.aspectY * 100),
scaleRatio = getScalingRatio(data)
;
return {
aspectRatio: (roundX + "," + roundY),
minWidth: Math.ceil(data.minCropWidth * scaleRatio),
minHeight: Math.ceil(data.minCropHeight * scaleRatio)
};
},
initComponent: function () {
CQTG.Html5SmartImage.superclass.initComponent.call(this);
var cropTool = null;
CQ.Ext.each(this.imageToolDefs, function(tool){
if(tool.toolId == "smartimageCrop"){
cropTool = tool;
}
});
var userInterface = cropTool.userInterface;
this.on("loadimage", function(){
var aRatios = userInterface.aspectRatioMenu.findByType("menucheckitem");
if(!aRatios[0].checked || !aRatios[0].initialConfig.checked){
aRatios[0].checked = true;
aRatios[0].initialConfig.checked = true;
}
});
}
})
});
CQ.Ext.reg("ffx.html5smartimage", CQTG.Html5SmartImageWrapper);
})($CQ);

Sorted this out myself.....I actually wasn't too far off when I posted this initially. I just needed to find the correct event to trigger the aspectRatio update. Turns out to be I needed to override smartImage.js toolClicked() method and reset the aspectRatio via setAspectRatioUI, then re-trigger onRatioChanged
(function($, undefined) {
/**
* Determine the scaling ratio the image component might have applied
*
* #param data contains `imgWidth` and `imgHeight` keys
* #returns {number} is the scaling ratio, 1 if no scaling was applied.
*/
function getScalingRatio(data) {
var scaleRatio = 1;
if (data.imgWidth > 1280 || data.imgHeight > 1280) {
if (data.imgWidth > 1280) {
scaleRatio = 1280 / data.imgWidth;
} else {
scaleRatio = 1280 / data.imgHeight;
}
}
return scaleRatio;
}
/**
* Wrapper for the HTML5 aspect ratio aware
*
* #type {void|*}
*/
window.CQTG = $.extend(window.CQTG || {}, {
Html5SmartImageWrapper: CQ.Ext.extend(CQ.Ext.Panel, {
/**
* Initialize panel
*
* #param originalConfig
*/
constructor: function (originalConfig) {
// initialize the container so we get some useful information
CQTG.Html5SmartImageWrapper.superclass.constructor.call(this, $.extend({}, originalConfig, {
layout: 'fit',
bodyStyle: 'padding: 0;',
items: {
title: null,
header: false,
frameHeader: false,
border: false
}
}));
// find the data path for this container instance
originalConfig.imagePath = this.findParentByType('dialog').responseScope.path;
// get rid of panel and replace with smart image
this.removeAll();
// add add html5 smart image to the container
this.add(new CQTG.Html5SmartImage(originalConfig));
}
}),
Html5SmartImage: CQ.Ext.extend(CQ.html5.form.SmartImage, {
/**
* Initialize data-members
*
* #param config
*/
constructor: function (config) {
config = config || {};
var aInfo = this.getAspectRatioString(config.imagePath);
// setup some default values
config = CQ.Util.applyDefaults(config, {
"cropConfig": {
"aspectRatios": {
"newsholeCrop": {
"text": "Newshole crop",
"value": aInfo.aspectRatio,
"checked": true
}
}
}
}
);
CQTG.Html5SmartImage.superclass.constructor.call(this, config);
// first tool is the crop (rest has been disabled)
this.imageToolDefs[0].cropMinWidth = aInfo.minWidth;
this.imageToolDefs[0].cropMinHeight = aInfo.minHeight;
},
/**
* Retrieve the bounding box and concoct a aspect ratio based on it
*
* There is some additional magic here because the image component uses a rendition,
* not the original and bases all its pixel calculations on it without scaling it back
* up. That is why the scaleRatio is determined
*/
getAspectRatioString: function (cmpPath) {
var
boundingBoxLocation = cmpPath.substring(0, cmpPath.lastIndexOf('/')),
data = CQ.Util.eval(boundingBoxLocation + ".crop.json"),
roundX = Math.ceil(data.aspectX * 100),
roundY = Math.ceil(data.aspectY * 100),
scaleRatio = getScalingRatio(data)
;
return {
aspectRatio: (roundX + "," + roundY),
minWidth: Math.ceil(data.minCropWidth * scaleRatio),
minHeight: Math.ceil(data.minCropHeight * scaleRatio)
};
},
/**
* override smartImage toolClicked and set default cropTool to "Newshole crop"
*/
toolClicked: function(cropTool) {
cropTool = null;
CQ.Ext.each(this.imageToolDefs, function(tool){
if(tool.toolId == "smartimageCrop"){
cropTool = tool;
}
});
CQTG.Html5SmartImage.superclass.toolClicked.call(this, cropTool);
var userInterface = cropTool.userInterface;
var aRatios = userInterface.aspectRatioMenu.findByType("menucheckitem");
if(!aRatios[0].checked){
aRatios[0].checked = true;
}
userInterface.setAspectRatioUI(aRatios[0].value);
userInterface.onRatioChanged(aRatios[0].value, aRatios[0].text);
}
})
});
CQ.Ext.reg("ffx.html5smartimage", CQTG.Html5SmartImageWrapper);
})($CQ);

Related

If scrollIntoView working not good enough / scrollIntoView not working / scrollIntoView alternative

I have faced with the problem that scrollIntoView some times wokrs good, some times no (in most cases not good).So I tried to find an alternative and some kind of it was this article.
But in my program i needed also a horizontal scrolling to element and vertical scrolling showing element in the top of parent window.
So I modifed code from article.
/* This is the main function where which pass two ref parameters of Parent element & Child element */
function scrollIntoView(parent, child) {
const parentBounding = parent.getBoundingClientRect(),
clientBounding = child.getBoundingClientRect();
const parentTop = parentBounding.top,
clientTop = clientBounding.top,
parentLeft = parentBounding.left,
clientLeft = clientBounding.left - 400; /* 400 is a shift so that each difference is not nailed to the upper left edge. You can delete it */
if (parentTop >= clientTop) {
scrollTo(parent, -(parentTop - clientTop), 300, 'vertical');
}
else {
scrollTo(parent, clientTop - parentTop, 300, 'vertical');
}
if (parentLeft >= clientLeft) {
scrollTo(parent, -(parentLeft - clientLeft), 300, 'horizontal');
}
else {
scrollTo(parent, clientLeft - parentLeft, 300, 'horizontal');
}
}
function scrollTo(element, to, duration, type) {
let start = (type == 'vertical') ? element.scrollTop : element.scrollLeft,
currentTime = 0,
increment = 20;
let animateScroll = function() {
currentTime += increment;
let val = easeInOutQuad(currentTime, start, to, duration);
if (type == 'vertical') {
element.scrollTop = val;
}
else {
element.scrollLeft = val;
}
if (currentTime < duration) {
setTimeout(animateScroll, increment);
}
}
animateScroll();
}
/* Function for smooth scroll animation with the time duration */
function easeInOutQuad(time, startPos, endPos, duration) {
time /= duration / 2;
if (time < 1) return (endPos / 2) * time * time + startPos;
time--;
return (-endPos / 2) * (time * (time - 2) - 1) + startPos;
}
This feature is experimental (At the time of my comment)
Both scrollIntoView() its scrollIntoViewOption.

Missing import scala.concurrent.ops

I'm trying to run a really old Scala code for my game but I couldn't run it due to some of the libraries are expired so they don't work.
import collection.mutable
import scala.concurrent.ops._
/**
* Put numbers in a NxN board
* from 1 to N*N
* scala SayiYerlestirmece N
*/
object SayiYerlestirmece {
private var current : Int = 0;
def main(args: Array[String]) {
var size = 5;
//if board size is given use.
if (args.nonEmpty){
size = Integer.parseInt(args(0));
}
var i = 0;
for (x <- 0 until size ){
for(y <- 0 until size){
//run every initial states in parallel.
spawn{
val arr = new Array[Int](size * size);
arr(i) = 1;
//create initial states
val initialList :List[Model] = List(new Model(arr,size,x,y));
solve(initialList);
}
i+=1;
}
}
}
/**
* solve problem recursively
* #param modelList - next states
*/
def solve(modelList: List[Model]){
modelList.foreach(p => {
if (p.completed){
current+=1;
println(p);
println(current);
}
solve(p.nextStepList());
});
}
}
/**
*
* #param data - current state of board
* #param size - size of board 5x5 10x10 etc
* #param x - current x position on the board
* #param y - current y position on the board
*/
class Model(data:Array[Int], size:Int, x:Int, y:Int) {
/**
* convert multi dimensional x,y index to one dimensional index.
* #param size - size of board
* #param x - current x position
* #param y - current y position
* #return - corresponding array index
*/
def xy2Idx(size:Int, x:Int, y:Int): Int = {
if ( x < 0 || y < 0 || x >= size || y >= size)
-1
else
size * x + y;
}
/**
* convert one dimensional array index to multi dimensional x,y index
* #param size
* #param idx
* #return
*/
def idx2Xy(size:Int, idx:Int):(Int,Int) = {
return (idx/size,idx%size);
}
/**
* Checks whether game is completed or not
* #return true if is game completed else false
*/
def completed() :Boolean = { data(xy2Idx(size,x,y)) == size * size };
/**
* Position of next available empty cells.
* #return - list of index of empty cells.
*/
def nextStepIdxList():List[Int] = {
return mutable.MutableList(
xy2Idx(size,x+3,y),
xy2Idx(size,x-3,y),
xy2Idx(size,x,y+3),
xy2Idx(size,x,y-3),
xy2Idx(size,x+2,y+2),
xy2Idx(size,x-2,y+2),
xy2Idx(size,x+2,y-2),
xy2Idx(size,x-2,y-2)
).filter(p => p > -1 && data(p) == 0).toList; //filter out of bounds and non-empty cells
}
/**
* Next states of board. These are derived from indexes
* which are returned by nextStepIdxList() function.
* #return - Next possible states of the board
*/
def nextStepList():List[Model] = {
var modelList = mutable.MutableList[Model]()
nextStepIdxList().foreach( p => {
val newArr = data.clone();
newArr(p) = data(xy2Idx(size,x,y)) + 1;
modelList += new Model(newArr,size,idx2Xy(size,p)._1, idx2Xy(size,p)._2);
});
return modelList.sortWith(_.nextStepSize() < _.nextStepSize()).toList; // sorts board states by least next step size
}
/**
* Available cell count at next step.
* This value is used to determine next move.
* #return - Available empty cell count
*/
def nextStepSize():Int = {
return nextStepIdxList().size;
}
override def toString(): String = {
val sb = new StringBuilder();
data.indices.foreach(p =>{
if (p % size == 0)
sb.append("\n");
else
sb.append(",");
sb.append(data(p));
});
return sb.toString();
}
}
When I run it it says import scala.concurrent.ops._ is not working when I delete it, it can't find spawn. When I added another version of spawn it doesn't work. How can I run this code?
scala.concurrent.ops._ have been deprecated in favour of Future by Futures, promises and execution contexts #200 in Scala 2.10.0 in year 2012 and removed entirely in 2.11.0. Try replacing
spawn {
// expressions
}
with
import concurrent.ExecutionContext.Implicits.global
Future {
// expressions
}
As a side note, concurrency and mutable collections are usually an unsafe combination due to race conditions etc.

Cannot figure out how the bounds of a Region work

I'm currently writing a CAD-like program for logic circuits (it's my first "graphics intensive" program ever). When I place a component on the schematic, let say an AND gate (which is Region class at its root), I want to be able to interact with it (select it, change its properties, etc). So far, so good. I can click on it and everything go well. However, if I click outside of it, the mouse click event still show the component as it source(!).
Digging a bit further, I put some traces in the mouse click handler and found out that getBoundsInLocal() and getBoundsInParent() return bounds that are around 50% larger than it should be. The getLayoutBounds(), getWidth() and getHeight() do return the correct value.
The pane onto which the components are laid out is a simple Pane object, but it uses setScaleX() and setScaleY() to implement zooming capabilities. I did try to disable them, with no luck.
public abstract class SchematicComponent
extends Region {
private Shape graphicShape = null;
public Shape getGraphicShape() {
if( isShapeDirty() ) {
if( graphicShape != null ) {
getChildren().remove( graphicShape );
}
graphicShape = createShape();
markShapeDirty( false );
if( graphicShape != null ) {
getChildren().add( graphicShape );
}
}
return graphicShape;
}
abstract protected Shape createShape();
}
abstract public class CircuitComponent
extends SchematicComponent {
}
abstract public class LogicGate
extends CircuitComponent {
#Override
protected void layoutChildren() {
super.layoutChildren();
Pin outPin;
final double inputLength = getInputPinsMaxLength();
// Layout the component around its center.
// NOTE: I did try to set the center offset to 0 with no luck.
Point2D centerOffset = getCenterPointOffset().multiply( -1 );
Shape gateShape = getGraphicShape();
if( gateShape != null ) {
gateShape.setLayoutX( centerOffset.getX() + inputLength );
gateShape.setLayoutY( centerOffset.getY() );
}
/* Layout the output pins. */
outPin = getOutputPin();
if( outPin != null ) {
outPin.layout();
outPin.setLayoutX( centerOffset.getX() + getWidth() );
outPin.setLayoutY( centerOffset.getY() + getHeight() / 2 );
}
/* Compute the first input pin location and the gap between each
pins */
double pinGap = 2;
double y;
if( getInputPins().size() == 2 ) {
y = centerOffset.getY() + getHeight() / 2 - 2;
pinGap = 4;
}
else {
y = centerOffset.getY() + ( getHeight() / 2 ) - getInputPins().size() + 1;
}
/* Layout the input pins */
for( Pin inPin : getInputPins() ) {
inPin.layout();
inPin.layoutXProperty().set( centerOffset.getX() );
inPin.layoutYProperty().set( y );
y += pinGap;
}
}
}
// The actual object placed on the schematic
public class AndGate
extends LogicGate {
#Override
protected double computePrefWidth( double height ) {
// NOTE: computeMin/MaxWidth methods call this one
double width = getSymbolWidth() + getInputPinsMaxLength();
double length = 0;
width += length;
if( getOutputPin().getLength() > 0 ) {
width += getOutputPin().getLength();
}
return width; // Always 16
}
#Override
protected double computePrefHeight( double width ) {
// NOTE: computeMin/MaxHeight methods call this one
return getSymbolHeight() + getExtraHeight(); // Always 10
}
#Override
protected Shape createShape() {
Path shape;
final double extraHeight = getExtraHeight();
final double inputLength = getInputPinsMaxLength();
final double outputLength = getOutputPin().getLength();
/* Width and Height of the symbol itself (i,e, excluding the
input/output pins */
final double width = getWidth() - inputLength - outputLength;
final double height = getHeight() - extraHeight;
/* Starting point */
double startX = 0;
double startY = extraHeight / 2;
ArrayList<PathElement> elements = new ArrayList<>();
elements.add( new MoveTo( startX, startY ) );
elements.add( new HLineTo( startX + ( width / 2 ) ) );
elements.add( new ArcTo( ( width / 2 ), // X radius
height / 2, // Y radius
180, // Angle 180°
startX + ( width / 2 ), // X position
startY + height, // Y position
false, // large arc
true ) ); // sweep
elements.add( new HLineTo( startX ) );
if( extraHeight > 0 ) {
/* The height of the input pins is larger than the height of
the shape so we need to add extra bar on top and bottom of
the shape.
*/
elements.add( new MoveTo( startX, 0 ) );
elements.add( new VLineTo( extraHeight + height ) );
}
else {
elements.add( new VLineTo( startY ) );
}
shape = new Path( elements );
shape.setStroke( getPenColor() );
shape.setStrokeWidth( getPenSize() );
shape.setStrokeLineJoin( StrokeLineJoin.ROUND );
shape.setStrokeLineCap( StrokeLineCap.ROUND );
shape.setFillRule( FillRule.NON_ZERO );
shape.setFill( getFillColor() );
return shape;
}
} // End: LogiGate
// SchematicView is the ScrollPane container that handles the events
public class SchematicView
extends ScrollPane {
/* Mouse handler inner class */
private class MouseEventHandler
implements EventHandler<MouseEvent> {
#Override
public void handle( MouseEvent event ) {
if( event.getEventType() == MouseEvent.MOUSE_CLICKED ) {
processMouseClicked( event );
}
else { /* ... more stuff ... */ }
}
private void processMouseClicked( MouseEvent event ) {
Object node = event.getSource();
SchematicSheet sheet = getSheet();
Bounds local = ( (Node) node ).getLayoutBounds();
Bounds local1 = ( (Node) node ).getBoundsInLocal();
Bounds parent = ( (Node) node ).getBoundsInParent();
// At this point, here is what I get:
// node.getHeight() = 10 -> Good
// local.getHeight() = 10 -> Good
// local1.getHeight() = 15.6499996... -> Not expected!
// parent.getHeight() = 15.6500015... -> Not expected!
/*... More stuff ... */
}
}
So at this point, I'm running of clues of what is going on. Where do these getBoundsInXXX() values come from? They doesn't match with the parent's scale values either. The same goes with getWidth(): I get 24.825000... instead of 16.
Looking at this, I understand why clicking outside the component works as if I clicked on it. Its bounds are about 50% bigger than what it should be.
I googled the damn thing and search some doc for almost 2 days now and I'm still baffled. I think I understand that getBoundsInXXX() methods do their own computation but could it be off by that much? I don't thing so. My best guess is that it is something inside the createShape() method but I just can't figure what it is.
Anyone has a clue of what is going on?
Many thanks for your help.
P.S.: This is my first post here, so hopefully I did it right ;)
I think I finally found the problem :)
Basically, the Pin custom shape was drawn in the negative part of X axis (wrong calculations, my bad!). My best guess is that somehow, Java notices that I drew outside the standard bounds and then added the extra space used to the bounds, hence, adding 50% to width, which matches the length of the Pin. Drawing it in the positive region seem to have fixed the problem.
I'm not 100% sure if that is the right answer, but it make sense and it is now working has expected.

What is the right size of the area in Facebook instant games to use it with Phaser

I am coding a game in Phaser 2 CE so the actual code is bellow according to https://stackoverflow.com/a/50300726/2107253 but when i open the game in my mobile device or on the desktop the image is not displayed in center and it is hidden
var game = new Phaser.Game(640, 480, Phaser.AUTO, 'game', { preload: preload, create: create, update: update });
var sprite;
function preload () {
// This is equivalent to <https://examples.phaser.io/assets/>.
this.load.image('dude', 'assets/sprites/phaser-dude.png');
}
function create() {
sprite = game.add.sprite(game.world.centerX, game.world.centerY, 'dude');
sprite.inputEnabled = true;
sprite.events.onInputDown.add(myHandler, this);
}
function myHandler() {
sprite.x += 10;
}
function update() {
}
When you do a search there are some related questions https://stackoverflow.com/a/49034911/2107253
So you can try this code it works fine, because you create a game based on the inner property of the device window, and you scale it with SHOW_ALL which is fine according to the book named A Guide to the Phaser Scale Manager that you can fine here https://gumroad.com/photonstorm
//If you use PixelRatio, the sprite will be smaller according to the resolution of the mobile device
PixelW = window.innerWidth;// * window.devicePixelRatio
PixelH = window.innerHeight;// * window.devicePixelRatio
var game = new Phaser.Game(PixelW, PixelH, Phaser.AUTO, 'game', { preload: preload, create: create, update: update });
var sprite;
function preload () {
// This is equivalent to <https://examples.phaser.io/assets/>.
this.load.image('dude', 'assets/sprites/phaser-dude.png');
}
function create() {
game.stage.backgroundColor = 0x3b5998;
game.scale.scaleMode = Phaser.ScaleManager.SHOW_ALL;
sprite = game.add.sprite(game.world.centerX, game.world.centerY, 'dude');
sprite.inputEnabled = true;
sprite.events.onInputDown.add(myHandler, this);
}
function myHandler() {
sprite.anchor.setTo(0.5, 0.5);
sprite.x = Math.floor(Math.random() * PixelW);
sprite.y = Math.floor(Math.random() * PixelH);
}
function update() {
}

JavaFX scene recording

Yesterday as I was working on my game and I decided I wanted to include some sort of a video showing sneak peaks of the themes and tutorials and such. I then decided to check how to record my scene or nodes. After looking around i concluded there was no easy way to do it so I decided to make my own utility which I want to share with all of you.
Please know I am more or less a beginner and I havent been coding for long. I know it is probably not properly done and that it can be done in much better ways. Anyways here it goes.
/*
* JavaFX SceneCaptureUtility 2016/07/02
*
* The author of this software "Eudy Contreras" grants you ("Licensee")
* a non-exclusive, royalty free, license to use,modify and redistribute this
* software in source and binary code form.
*
* Please be aware that this software is simply part of a personal test
* and may in fact be unstable. The software in its current state is not
* considered a finished product and has plenty of room for improvement and
* changes due to the range of different approaches which can be used to
* achieved the desired result of the software.
*
* BEWARE that because of the nature of this software and because of the way
* this software functions the ability of this software to be able to operate
* without probable malfunction is strictly based on factors such as the amount
* of processing power the system running the software has, and the resolution of
* the screen being recorded. The amount of nodes on the scene will have an impact
* as well as the size and recording rate to which this software will be subjected
* to. IN CASE OF MEMORY RELATED PROBLEMS SUCH AS BUT NOT LIMITEd TO LACK OF REMAINING
* HEAP SPACE PLEASE CONSIDER LOWERING THE RESOLUTION OF THE SCENE BEING RECORDED.
*
* BEWARE STABILITY ISSUES MAY ARISE!
* BEWARE SAVING AND LOADING THE RECORDED VIDEO MAY TAKE TIME DEPENDING ON YOUR SYSTEM
*
* PLEASE keep track of the console for useful information and feedback
*/
import java.awt.image.BufferedImage;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import javax.imageio.ImageIO;
import javafx.animation.Animation;
import javafx.animation.KeyFrame;
import javafx.animation.Timeline;
import javafx.application.Platform;
import javafx.collections.ObservableList;
import javafx.concurrent.Task;
import javafx.embed.swing.SwingFXUtils;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.scene.Node;
import javafx.scene.Scene;
import javafx.scene.SnapshotParameters;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.image.WritableImage;
import javafx.scene.layout.Background;
import javafx.scene.layout.BackgroundFill;
import javafx.scene.layout.HBox;
import javafx.scene.layout.Pane;
import javafx.scene.paint.Color;
import javafx.scene.shape.Circle;
import javafx.scene.text.Font;
import javafx.scene.text.FontWeight;
import javafx.scene.text.Text;
import javafx.stage.Screen;
import javafx.util.Duration;
* #author Eudy Contreras
*
* This program records a javaFX scene by taking continuous snapshots of the scene
* which are than stored and saved to a predefined destination. The program allows the
* user to save and retrieve the frame based videos make by this program which can
* then be play on a specified layer or "screen".
public class SceneCaptureUtility {
private int frame = 0; // The current frame which is being displayed
private int timer = 0; // recording timer.
private int record_time; // Amount of time the recorder will record
private int counter = 0;
private int width;
private int height;
private float frameRate = 60.0f;
private long capture_rate = (long) (1000f / frameRate); // Rate at which the recorder will recored. Default rate: 60FPS
private PlaybackSettings playSettings;
private Double frameCap = 1.0 / frameRate; // Framerate at which the recorded video will play
private Double video_size_scale = 1.0; // Scale of the video relative to its size: 0.5 = half, 2.0 = double the size
private Double bounds_scale_X = 0.5; // Scale factor for scaling relative to assigned or obtained resolution
private Double bounds_scale_Y = 0.5;
private Boolean saveFrames = false; //If true saves the individual frames of the video as images
private Boolean loadFrames = false; //If true allows retrieving previously saved frames for playback
private Boolean allowRecording = false;
private Boolean allowPlayback = false;
private Boolean showIndicators = false;
private Pane indicator_layer;
private Pane video_screen;
private Scene scene;
private Timeline videoPlayer;
private ArrayList<Image> recorded_frames; //Stores recorded frames
private ArrayList<ImageView> video_frames; //Stores frames for playback
private ArrayList<byte[]> temp_frames; //Stores frames for saving
private final SnapshotParameters parameters = new SnapshotParameters();
private final ByteArrayOutputStream byteOutput = new ByteArrayOutputStream();
private final Indicator recording = new Indicator(Color.RED, " Recording..");
private final Indicator playing = new Indicator(Color.GREEN, " Playing..");
private final Indicator idle = new Indicator(Color.YELLOW, "paused..");
private final String VIDEO_NAME = "recording4.FXVideo";
private final String FRAME_NAME = "image";
private final String DIRECTORY_NAME = "Snake Game Videos"+ File.separator;
private final String PATH_ROOT = System.getProperty("user.home") + "/Desktop" + File.separator +DIRECTORY_NAME;
private final String FILE_EXTENSION = "jpg";
private final String PATH_FRAME = PATH_ROOT + FRAME_NAME;
private final String PATH_VIDEO = PATH_ROOT + VIDEO_NAME;
/**
* Constructs a scene capture utility with a default scene, a pane which
* will be used to diplay the state indicators, the amount of time which
* the recorder will be running and a condition to whether or not the indicators
* will be shown.
* #param scene: scene which will be recored.
* #param indicatorLayer: layer which will be used to show the state indicators.
* #param record_time: time in minutes for which the recorder will be recording
* #param showIndicators: condition which determines if the indicators will be shown.
*/
public SceneCaptureUtility(Scene scene, Pane indicatorLayer, int record_time, boolean showIndicators) {
this.scene = scene;
this.width = (int) scene.getWidth();
this.height = (int) scene.getHeight();
this.showIndicators = showIndicators;
this.record_time = record_time * 60;
this.initStorages(indicatorLayer);
this.loadRecording();
this.scaleResolution(0, 0, false);
}
/*
* Initializes the list used to store the captured frames.
*/
private void initStorages(Pane layer) {
if(showIndicators)
this.indicator_layer = layer;
video_frames = new ArrayList<ImageView>();
recorded_frames = new ArrayList<Image>();
temp_frames = new ArrayList<byte[]>();
}
/**
* loads recordings and or frames from a specified location
*/
private void loadRecording(){
if (loadFrames) {
loadFromFile();
} else {
retrieveRecording();
}
}
/*
* Resets the list
*/
private void resetStorage() {
if (video_frames != null)
video_frames.clear();
if (recorded_frames != null)
recorded_frames.clear();
if (video_screen != null)
video_screen.getChildren().clear();
}
/**
* Method which when called will start recording the given scene.
*/
public void startRecorder() {
if (!allowRecording) {
resetStorage();
if(showIndicators)
showIndicator(indicator_layer.getChildren(), recording);
videoRecorder();
allowRecording(true);
logState("Recording...");
}
}
/**
* Method which when called will stop the recording
*/
public void stopRecorder() {
if (allowRecording) {
if(showIndicators)
showIndicator(indicator_layer.getChildren(), idle);
allowRecording(false);
logState("Recording stopped");
logState("Amount of recorded frames: " + recorded_frames.size());
processVideo();
saveVideo();
}
}
/**
* Method which when called will start playback of the recorded video onto
* a given screen or layer.
* #param output_screen: layer used to display the video
* #param settings: video settings that determine the playback conditions.
*/
public void starPlayer(Pane output_screen, PlaybackSettings settings) {
video_screen = output_screen;
playSettings = settings;
if(showIndicators)
showIndicator(indicator_layer.getChildren(), playing);
if (video_frames.size() > 0) {
logState("Video playback..");
resetPlayback();
if (videoPlayer == null)
videoPlayer();
else {
videoPlayer.play();
}
allowPlayback(true);
}
else{
logState("Nothing to play!");
}
}
/**
* Method which when called will stop the playback of the video
*/
public void stopPlayer() {
if(showIndicators)
showIndicator(indicator_layer.getChildren(), idle);
if (videoPlayer != null)
videoPlayer.stop();
logState("Playback stopped");
allowPlayback(false);
}
/*
* Method which creates a task which records the video at
* a specified rate for a specifed time
*/
private void videoRecorder() {
Task<Void> task = new Task<Void>() {
#Override
public Void call() throws Exception {
while (true) {
Platform.runLater(new Runnable() {
#Override
public void run() {
if (allowRecording && record_time > 0) {
recorded_frames.add(create_frame());
}
recordingTimer();
}
});
Thread.sleep(capture_rate);
}
}
};
Thread thread = new Thread(task);
thread.setDaemon(true);
thread.start();
}
/*
* Method which creates a timeline which plays the video
* at a specified frameRate onto a given screen or layer.
*/
private void videoPlayer() {
videoPlayer = new Timeline();
videoPlayer.setCycleCount(Animation.INDEFINITE);
KeyFrame keyFrame = new KeyFrame(Duration.seconds(frameCap),
new EventHandler<ActionEvent>() {
#Override
public void handle(ActionEvent e) {
if (allowPlayback) {
playbackVideo();
}
}
});
videoPlayer.getKeyFrames().add(keyFrame);
videoPlayer.play();
}
**/**
* Calls to this method will decreased the time left on the
* recording every second until the recording time reaches
* zero. this will cause the recording to stop.
*/**
private void recordingTimer() {
timer++;
if (allowRecording && timer >= frameRate) {
record_time -= 1;
timer = 0;
if (record_time <= 0) {
record_time = 0;
}
}
}
/**
* A call to this method will add the recorded frames to the video list
* making them reading for playback.
*/
private void processVideo() {
logState("Processing video...");
Task<Void> task = new Task<Void>() {
#Override
public Void call() throws Exception {
for (int i = 0; i < recorded_frames.size(); i++) {
video_frames.add(new ImageView(recorded_frames.get(i)));
}
logState("Video has been processed.");
return null;
}
};
Thread thread = new Thread(task);
thread.setDaemon(true);
thread.start();
}
/**
* Call to this method will play the video on the given screen
* adding a removing frames.
* #return: screen in which the frames are being rendered
*/
private final Pane playbackVideo() {
if (video_screen.getChildren().size() > 0)
video_screen.getChildren().remove(0);
video_screen.getChildren().add(video_frames.get(frame));
frame += 1;
if (frame > video_frames.size() - 1) {
if(playSettings == PlaybackSettings.CONTINUOUS_REPLAY){
frame = 0;
}
else if(playSettings == PlaybackSettings.PLAY_ONCE){
frame = video_frames.size() - 1;
allowPlayback = false;
}
}
return video_screen;
}
public void setVideoScale(double scale) {
this.video_size_scale = scale;
}
/**
* A called to this method will scale the video
* to a given scale.
* #param scale: new scale of the video. 1.0 is normal
* 0.5 is half and 2.0 is twice the size.
*/
public void scaleVideo(double scale) {
this.video_size_scale = scale;
Task<Void> task = new Task<Void>() {
#Override
public Void call() throws Exception {
if (video_frames.size() > 0) {
logState("Scaling video...");
for (int i = 0; i < video_frames.size(); i++) {
video_frames.get(i).setFitWidth(video_frames.get(i).getImage().getWidth() * video_size_scale);
video_frames.get(i).setFitHeight(video_frames.get(i).getImage().getHeight() * video_size_scale);
}
logState("Video has been scaled!");
}
return null;
}
};
Thread thread = new Thread(task);
thread.setDaemon(true);
thread.start();
}
/**
* A called to this method will attempt to prepare the video and or frames
* for saving
*/
private void saveVideo() {
File root = new File(PATH_ROOT);
Task<Void> task = new Task<Void>() {
#Override
public Void call() throws Exception {
root.mkdirs();
for (int i = 0; i < recorded_frames.size(); i++) {
saveToFile(recorded_frames.get(i));
}
saveRecording(temp_frames);
logState("Amount of compiled frames: " + temp_frames.size());
return null;
}
};
Thread thread = new Thread(task);
thread.setDaemon(true);
thread.start();
}
/**
* A called to this method will add the frames store is array list
* to the video list.
* #param list: list containing the byte arrays of the frames
*/
private void loadFrames(ArrayList<byte[]> list) {
Task<Void> task = new Task<Void>() {
#Override
public Void call() throws Exception {
logState("loading frames...");
for (int i = 0; i < list.size(); i++) {
video_frames.add(byteToImage(list.get(i)));
}
logState("frames have been added!");
scaleVideo(video_size_scale);
return null;
}
};
Thread thread = new Thread(task);
thread.setDaemon(true);
thread.start();
}
/**
* Method which when called will add and display a indicator.
* #param rootPane: list to which the indicator will be added
* #param indicator: indicator to be displayed
*/
private void showIndicator(ObservableList<Node> rootPane, Indicator indicator) {
rootPane.removeAll(playing, idle, recording);
indicator.setTranslateX(width - ScaleX(150));
indicator.setTranslateY(ScaleY(100));
rootPane.add(indicator);
}
/**
* Calls to this method will save each frame if conditions are met
* and will also store each frame into a list of byte arrays.
* #param image: image to be saved to file and or converted and store as
* a byte array.
*/
private void saveToFile(Image image) {
counter += 1;
BufferedImage BImage = SwingFXUtils.fromFXImage(image, null);
temp_frames.add(ImageToByte(BImage));
if (saveFrames) {
File video = new File(PATH_FRAME + counter + "." + FILE_EXTENSION);
try {
ImageIO.write(BImage, FILE_EXTENSION, video);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
/**
* Method which when called loads images from a predefined
* directory in order to play them as a video.
*/
private void loadFromFile() {
Task<Void> task = new Task<Void>() {
#Override
public Void call() throws Exception {
for (int i = 1; i < 513; i++) {
File video = new File(PATH_FRAME + i + "." + FILE_EXTENSION);
video_frames.add(new ImageView(new Image(video.toURI().toString())));
}
return null;
}
};
Thread thread = new Thread(task);
thread.setDaemon(true);
thread.start();
}
/**
* Method which when called will attemp to save the video
* to a specified directory.
* #param list
*/
private void saveRecording(ArrayList<byte[]> list) {
Task<Void> task = new Task<Void>() {
#Override
public Void call() throws Exception {
File root = new File(PATH_ROOT);
File video = new File(PATH_VIDEO);
video.delete();
logState("Saving video...");
try {
root.mkdirs();
FileOutputStream fileOut = new FileOutputStream(PATH_VIDEO);
BufferedOutputStream bufferedStream = new BufferedOutputStream(fileOut);
ObjectOutputStream outputStream = new ObjectOutputStream(bufferedStream);
outputStream.writeObject(list);
outputStream.close();
fileOut.close();
logState("Video saved.");
} catch (IOException e) {
logState("Failed to save, I/O exception");
e.printStackTrace();
}
return null;
}
};
Thread thread = new Thread(task);
thread.setDaemon(true);
thread.start();
}
/**
* Method which when called attempts to retrieve the video
* from a specified directory
*/
#SuppressWarnings("unchecked")
private void retrieveRecording() {
Task<Void> task = new Task<Void>() {
#Override
public Void call() throws Exception {
File root = new File(PATH_ROOT);
File video = new File(PATH_VIDEO);
if (root.exists() && video.exists()) {
try {
FileInputStream fileIn = new FileInputStream(PATH_VIDEO);
ObjectInputStream inputStream = new ObjectInputStream(fileIn);
temp_frames = (ArrayList<byte[]>) inputStream.readObject();
inputStream.close();
fileIn.close();
logState("\nLoading video");
loadFrames(temp_frames);
} catch (IOException | ClassNotFoundException e) {
logState("Failed to load! " + e.getLocalizedMessage());
}
} else {
logState("Nothing to load.");
}
return null;
}
};
Thread thread = new Thread(task);
thread.setDaemon(true);
thread.start();
}
/**
* Method which when call creates a frame or snapshot of the
* given scene to be recorded.
* #return: frame taken from the scene.
*/
private synchronized Image create_frame() {
WritableImage wi = new WritableImage(width, height);
if (scene != null)
scene.snapshot(wi);
try {
return wi;
} finally {
wi = null;
}
}
/**
* Method which when called crates a frame or snapshot of the
* given node.
* #param node: node to be recorded
* #return: image or frame of recorded node.
*/
#SuppressWarnings("unused")
private synchronized Image create_node_frame(Node node) {
parameters.setFill(Color.TRANSPARENT);
WritableImage wi = new WritableImage((int)node.getBoundsInLocal().getWidth(), (int) node.getBoundsInLocal().getHeight());
node.snapshot(parameters, wi);
return wi;
}
/**
* Method which when called will create a scale relative to a base
* and current resolution.
* #param scaleX: x scaling factor used for manual scaling
* #param scaleY: y scaling factor used for manual scaling
* #param manualScaling: determines if a manual scaling will be applied or not
*/
public void scaleResolution(double scaleX, double scaleY, boolean manualScaling) {
double resolutionX = Screen.getPrimary().getBounds().getWidth();
double resolutionY = Screen.getPrimary().getBounds().getHeight();
double baseResolutionX = 1920;
double baseResolutionY = 1080;
bounds_scale_X = baseResolutionX / resolutionX;
bounds_scale_Y = baseResolutionY / resolutionY;
if(manualScaling==true){
bounds_scale_X = bounds_scale_X*scaleX;
bounds_scale_Y = bounds_scale_Y*scaleY;
}
}
public void allowRecording(boolean state) {
allowRecording = state;
logState("allowed recording: " + state);
}
public void allowPlayback(boolean state) {
allowPlayback = state;
logState("allowed playback: " + state);
}
public void setLocation(double x, double y) {
video_screen.setTranslateX(x);
video_screen.setTranslateY(y);
}
public void setDimensions(double width, double height) {
video_screen.setPrefSize(width, height);;
}
public void resetPlayback() {
this.frame = 0;
}
public double Scale(double value) {
double newSize = value * (bounds_scale_X + bounds_scale_Y)/2;
return newSize;
}
public double ScaleX(double value) {
double newSize = value * bounds_scale_X;
return newSize;
}
public double ScaleY(double value) {
double newSize = value * bounds_scale_Y;
return newSize;
}
public double getVideoWidth(){
if(!video_frames.isEmpty())
return video_frames.get(0).getImage().getWidth() * video_size_scale;
else{
return 0;
}
}
public double getVideoHeight(){
if(!video_frames.isEmpty())
return video_frames.get(0).getImage().getWidth() * video_size_scale;
else{
return 0;
}
}
#SuppressWarnings("unused")
private String loadResource(String image) {
String url = PATH_ROOT + image;
return url;
}
/**
* Method which converts a bufferedimage to byte array
* #param image: image to be converted
* #return: byte array of the image
*/
public final byte[] ImageToByte(BufferedImage image) {
byte[] imageInByte = null;
try {
if (image != null) {
ImageIO.write(image, FILE_EXTENSION, byteOutput);
imageInByte = byteOutput.toByteArray();
byteOutput.flush();
}
} catch (IOException | IllegalArgumentException e) {
e.printStackTrace();
}
try {
return imageInByte;
} finally {
byteOutput.reset();
}
}
/**
* Method which converts a byte array to a Imageview
* #param data: byte array to be converted.
* #return: imageview of the byte array
*/
public final ImageView byteToImage(byte[] data) {
BufferedImage newImage = null;
ImageView imageView = null;
Image image = null;
try {
InputStream inputStream = new ByteArrayInputStream(data);
newImage = ImageIO.read(inputStream);
inputStream.close();
image = SwingFXUtils.toFXImage(newImage, null);
imageView = new ImageView(image);
} catch (IOException e) {
e.printStackTrace();
}
return imageView;
}
private void logState(String state) {
System.out.println("JAVA_FX SCREEN RECORDER: " + state);
}
public enum PlaybackSettings {
CONTINUOUS_REPLAY, PLAY_ONCE,
}
/**
* Class which crates a simple indicator which can be
* used to display a recording or playing state
* #author Eudy Contreras
*
*/
private class Indicator extends HBox {
public Indicator(Color color, String message) {
Circle indicator = new Circle(Scale(15), color);
Text label = new Text(message);
label.setFont(Font.font("", FontWeight.EXTRA_BOLD, Scale(20)));
label.setFill(Color.WHITE);
setBackground(new Background(new BackgroundFill(Color.TRANSPARENT, null, null)));
getChildren().addAll(indicator, label);
}
}
}
If you have any suggestions of things that will make this program more effecient please share with me. Thanks