new filtering with webgl (#3915)

This commit is contained in:
Andrea Bogazzi 2017-05-29 18:28:24 +02:00 committed by GitHub
parent a8db7b3523
commit e96ccf9ea6
34 changed files with 3256 additions and 1585 deletions

View file

@ -67,6 +67,25 @@ fabric.canvasModule = 'canvas-prebuilt';
*/
fabric.charWidthsCache = { };
/**
* if webgl is enabled and available, textureSize will determine the size
* of the canvas backend
* @since 2.0.0
* @type Number
* @default
*/
fabric.textureSize = 2048;
/**
* Enable webgl for filtering picture is available
* A filtering backend will be initialized, this will both take memory and
* time since a default 2048x2048 canvas will be created for the gl context
* @since 2.0.0
* @type Boolean
* @default
*/
fabric.enableGLFiltering = true;
/**
* Device Pixel Ratio
* @see https://developer.apple.com/library/safari/documentation/AudioVideo/Conceptual/HTML-canvas-guide/SettingUptheCanvas/SettingUptheCanvas.html
@ -75,3 +94,14 @@ fabric.devicePixelRatio = fabric.window.devicePixelRatio ||
fabric.window.webkitDevicePixelRatio ||
fabric.window.mozDevicePixelRatio ||
1;
fabric.initFilterBackend = function() {
if (fabric.isWebglSupported && fabric.isWebglSupported(fabric.textureSize) && fabric.enableGLFiltering) {
console.log('max texture size: ' + fabric.maxTextureSize);
return (new fabric.WebglFilterBackend({ tileSize: fabric.textureSize }));
}
else if (fabric.Canvas2dFilterBackend) {
return (new fabric.Canvas2dFilterBackend());
}
};

View file

@ -202,26 +202,25 @@ var filesToInclude = [
'src/shapes/image.class.js',
ifSpecifiedInclude('object_straightening', 'src/mixins/object_straightening.mixin.js'),
ifSpecifiedInclude('image_filters', 'src/filters/webgl_backend.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/2d_backend.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/base_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/colormatrix_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/brightness_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/convolute_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/gradienttransparency_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/grayscale_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/invert_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/mask_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/noise_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/pixelate_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/removewhite_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/sepia_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/sepia2_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/tint_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/multiply_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/blend_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/removecolor_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/filter_generator.js'),
ifSpecifiedInclude('image_filters', 'src/filters/blendcolor_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/resize_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/colormatrix_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/contrast_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/saturate_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/blur_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/gamma_filter.class.js'),
ifSpecifiedInclude('image_filters', 'src/filters/composed_filter.class.js'),
ifSpecifiedInclude('text', 'src/shapes/text.class.js'),

View file

@ -0,0 +1,64 @@
(function() {
'use strict';
var noop = function() {};
fabric.Canvas2dFilterBackend = Canvas2dFilterBackend;
/**
* Canvas 2D filter backend.
*/
function Canvas2dFilterBackend() {};
Canvas2dFilterBackend.prototype = /** @lends fabric.Canvas2dFilterBackend.prototype */ {
evictCachesForKey: noop,
dispose: noop,
clearWebGLCaches: noop,
/**
* Experimental. This object is a sort of repository of help layers used to avoid
* of recreating them during frequent filtering. If you are previewing a filter with
* a slider you problably do not want to create help layers every filter step.
* in this object there will be appended some canvases, created once, resized sometimes
* cleared never. Clearing is left to the developer.
**/
resources: {
},
/**
* Apply a set of filters against a source image and draw the filtered output
* to the provided destination canvas.
*
* @param {EnhancedFilter} filters The filter to apply.
* @param {HTMLImageElement|HTMLCanvasElement} source The source to be filtered.
* @param {Number} width The width of the source input.
* @param {Number} height The height of the source input.
* @param {HTMLCanvasElement} targetCanvas The destination for filtered output to be drawn.
*/
applyFilters: function(filters, sourceElement, sourceWidth, sourceHeight, targetCanvas) {
var ctx = targetCanvas.getContext('2d');
ctx.drawImage(sourceElement, 0, 0, sourceWidth, sourceHeight);
var imageData = ctx.getImageData(0, 0, sourceWidth, sourceHeight);
var originalImageData = ctx.getImageData(0, 0, sourceWidth, sourceHeight);
var pipelineState = {
sourceWidth: sourceWidth,
sourceHeight: sourceHeight,
imageData: imageData,
originalEl: sourceElement,
originalImageData: originalImageData,
canvasEl: targetCanvas,
ctx: ctx,
};
filters.forEach(function(filter) { filter.applyTo(pipelineState); });
if (pipelineState.imageData.width !== sourceWidth || pipelineState.imageData.height !== sourceHeight) {
targetCanvas.width = pipelineState.imageData.width;
targetCanvas.height = pipelineState.imageData.height;
}
ctx.putImageData(pipelineState.imageData, 0, 0);
return pipelineState;
},
};
})();

View file

@ -20,6 +20,21 @@ fabric.Image.filters.BaseFilter = fabric.util.createClass(/** @lends fabric.Imag
*/
type: 'BaseFilter',
vertexSource: 'attribute vec2 aPosition;\n' +
'attribute vec2 aTexCoord;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vTexCoord = aTexCoord;\n' +
'gl_Position = vec4(aPosition * 2.0 - 1.0, 0.0, 1.0);\n' +
'}',
fragmentSource: 'precision highp float;\n' +
'varying vec2 vTexCoord;\n' +
'uniform sampler2d uTexture;\n' +
'void main() {\n' +
'gl_FragColor = texture2D(uTexture, vTexCoord);\n' +
'}',
/**
* Constructor
* @param {Object} [options] Options object
@ -40,12 +55,264 @@ fabric.Image.filters.BaseFilter = fabric.util.createClass(/** @lends fabric.Imag
}
},
/**
* Compile this filter's shader program.
*
* @param {WebGLRenderingContext} gl The GL canvas context to use for shader compilation.
* @param {String} fragmentSource fragmentShader source for compilation
* @param {String} vertexSource vertexShader source for compilation
*/
createProgram: function(gl, fragmentSource, vertexSource) {
if (!this.vertexSource || !this.fragmentSource) {
return;
}
var vertexShader = gl.createShader(gl.VERTEX_SHADER);
gl.shaderSource(vertexShader, vertexSource || this.vertexSource);
gl.compileShader(vertexShader);
if (!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
throw new Error(
// eslint-disable-next-line prefer-template
'Vertex shader compile error for "${this.type}": ' +
gl.getShaderInfoLog(vertexShader)
);
}
var fragmentShader = gl.createShader(gl.FRAGMENT_SHADER);
gl.shaderSource(fragmentShader, fragmentSource || this.fragmentSource);
gl.compileShader(fragmentShader);
if (!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
throw new Error(
// eslint-disable-next-line prefer-template
'Fragment shader compile error for "${this.type}": ' +
gl.getShaderInfoLog(fragmentShader)
);
}
var program = gl.createProgram();
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
throw new Error(
// eslint-disable-next-line prefer-template
'Shader link error for "${this.type}" ' +
gl.getProgramInfoLog(program)
);
}
var attributeLocations = this.getAttributeLocations(gl, program);
var uniformLocations = this.getUniformLocations(gl, program) || { };
uniformLocations.uWidth = gl.getUniformLocation(program, 'uWidth');
uniformLocations.uHeight = gl.getUniformLocation(program, 'uHeight');
return {
program: program,
attributeLocations: attributeLocations,
uniformLocations: uniformLocations
};
},
/**
* Return a map of attribute names to WebGLAttributeLocation objects.
*
* @param {WebGLRenderingContext} gl The canvas context used to compile the shader program.
* @param {WebGLShaderProgram} program The shader program from which to take attribute locations.
* @returns {Object} A map of attribute names to attribute locations.
*/
getAttributeLocations: function(gl, program) {
return {
aPosition: gl.getAttribLocation(program, 'aPosition'),
aTexCoord: gl.getAttribLocation(program, 'aTexCoord'),
};
},
/**
* Return a map of uniform names to WebGLUniformLocation objects.
*
* Intended to be overridden by subclasses.
*
* @param {WebGLRenderingContext} gl The canvas context used to compile the shader program.
* @param {WebGLShaderProgram} program The shader program from which to take uniform locations.
* @returns {Object} A map of uniform names to uniform locations.
*/
getUniformLocations: function (/* gl, program */) {
// Intentionally left blank, override me in subclasses.
},
/**
* Send attribute data from this filter to its shader program on the GPU.
*
* @param {WebGLRenderingContext} gl The canvas context used to compile the shader program.
* @param {Object} attributeLocations A map of shader attribute names to their locations.
*/
sendAttributeData: function(gl, attributeLocations, squareVertices) {
['aPosition', 'aTexCoord'].forEach(function(attribute) {
var attributeLocation = attributeLocations[attribute];
var buffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.enableVertexAttribArray(attributeLocation);
gl.vertexAttribPointer(attributeLocation, 2, gl.FLOAT, false, 0, 0);
gl.bufferData(gl.ARRAY_BUFFER, squareVertices, gl.STATIC_DRAW);
});
},
_setupFrameBuffer: function(options) {
var gl = options.context;
if (options.passes > 1) {
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D,
options.targetTexture, 0);
}
else {
// draw last filter on canvas and not to framebuffer.
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
gl.finish();
}
},
_swapTextures: function(options) {
options.passes--;
options.pass++;
var temp = options.targetTexture;
options.targetTexture = options.sourceTexture;
options.sourceTexture = temp;
},
/**
* Intentionally left blank, to be overridden in custom filters
* @param {Object} options
**/
isNeutralState: function(/* options */) {
return false;
},
/**
* Apply this filter to the input image data provided.
*
* Determines whether to use WebGL or Canvas2D based on the options.webgl flag.
*
* @param {Object} options
* @param {Number} options.passes The number of filters remaining to be executed
* @param {Boolean} options.webgl Whether to use webgl to render the filter.
* @param {WebGLTexture} options.sourceTexture The texture setup as the source to be filtered.
* @param {WebGLTexture} options.targetTexture The texture where filtered output should be drawn.
* @param {WebGLRenderingContext} options.context The GL context used for rendering.
* @param {Object} options.programCache A map of compiled shader programs, keyed by filter type.
*/
applyTo: function(options) {
if (options.webgl) {
if (options.passes > 1 && this.isNeutralState(options)) {
// avoid doing something that we do not need
return;
}
this._setupFrameBuffer(options);
this.applyToWebGL(options);
this._swapTextures(options);
}
else {
this.applyTo2d(options);
}
},
/**
* Retrieves the cached shader.
* @param {Object} options
* @param {WebGLRenderingContext} options.context The GL context used for rendering.
* @param {Object} options.programCache A map of compiled shader programs, keyed by filter type.
*/
retrieveShader: function(options) {
if (!options.programCache.hasOwnProperty(this.type)) {
options.programCache[this.type] = this.createProgram(options.context);
}
return options.programCache[this.type];
},
/**
* Apply this filter using webgl.
*
* @param {Object} options
* @param {Number} options.passes The number of filters remaining to be executed
* @param {Boolean} options.webgl Whether to use webgl to render the filter.
* @param {WebGLTexture} options.originalTexture The texture of the original input image.
* @param {WebGLTexture} options.sourceTexture The texture setup as the source to be filtered.
* @param {WebGLTexture} options.targetTexture The texture where filtered output should be drawn.
* @param {WebGLRenderingContext} options.context The GL context used for rendering.
* @param {Object} options.programCache A map of compiled shader programs, keyed by filter type.
*/
applyToWebGL: function(options) {
var gl = options.context;
var shader = this.retrieveShader(options);
if (options.pass === 0 && options.originalTexture) {
gl.bindTexture(gl.TEXTURE_2D, options.originalTexture);
}
else {
gl.bindTexture(gl.TEXTURE_2D, options.sourceTexture);
}
gl.useProgram(shader.program);
this.sendAttributeData(gl, shader.attributeLocations, options.squareVertices);
gl.uniform1f(shader.uniformLocations.uStepW, 1 / options.sourceWidth);
gl.uniform1f(shader.uniformLocations.uStepH, 1 / options.sourceHeight);
this.sendUniformData(gl, shader.uniformLocations);
gl.viewport(0, 0, options.sourceWidth, options.sourceHeight);
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
},
bindAdditionalTexture: function(gl, texture, textureUnit) {
gl.activeTexture(textureUnit);
gl.bindTexture(gl.TEXTURE_2D, texture);
// reset active texture to 0 as usual
gl.activeTexture(gl.TEXTURE0);
},
unbindAdditionalTexture: function(gl, textureUnit) {
gl.activeTexture(textureUnit);
gl.bindTexture(gl.TEXTURE_2D, null);
gl.activeTexture(gl.TEXTURE0);
},
getMainParameter: function() {
return this[this.mainParameter];
},
setMainParameter: function(value) {
this[this.mainParameter] = value;
},
/**
* Send uniform data from this filter to its shader program on the GPU.
*
* Intended to be overridden by subclasses.
*
* @param {WebGLRenderingContext} gl The canvas context used to compile the shader program.
* @param {Object} uniformLocations A map of shader uniform names to their locations.
*/
sendUniformData: function(/* gl, uniformLocations */) {
// Intentionally left blank. Override me in subclasses.
},
/**
* If needed by a 2d filter, this functions can create an helper canvas to be used
* remember that options.targetCanvas is available for use till end of chain.
*/
createHelpLayer: function(options) {
if (!options.helpLayer) {
var helpLayer = document.createElement('canvas');
helpLayer.width = options.sourceWidth;
helpLayer.height = options.sourceHeight;
options.helpLayer = helpLayer;
}
},
/**
* Returns object representation of an instance
* @return {Object} Object representation of an instance
*/
toObject: function() {
return { type: this.type };
var object = { type: this.type }, mainP = this.mainParameter;
if (mainP) {
object[mainP] = this[mainP];
}
return object;
},
/**

View file

@ -1,154 +0,0 @@
(function(global) {
'use strict';
var fabric = global.fabric,
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
/**
* Color Blend filter class
* @class fabric.Image.filter.Blend
* @memberOf fabric.Image.filters
* @extends fabric.Image.filters.BaseFilter
* @example
* var filter = new fabric.Image.filters.Blend({
* color: '#000',
* mode: 'multiply'
* });
*
* var filter = new fabric.Image.filters.Blend({
* image: fabricImageObject,
* mode: 'multiply',
* alpha: 0.5
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
*/
filters.Blend = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Blend.prototype */ {
type: 'Blend',
initialize: function(options) {
options = options || {};
this.color = options.color || '#000';
this.image = options.image || false;
this.mode = options.mode || 'multiply';
this.alpha = options.alpha || 1;
},
applyTo: function(canvasEl) {
var context = canvasEl.getContext('2d'),
imageData = context.getImageData(0, 0, canvasEl.width, canvasEl.height),
data = imageData.data,
tr, tg, tb,
r, g, b,
_r, _g, _b,
source,
isImage = false;
if (this.image) {
// Blend images
isImage = true;
var _el = fabric.util.createCanvasElement();
_el.width = this.image.width;
_el.height = this.image.height;
var tmpCanvas = new fabric.StaticCanvas(_el);
tmpCanvas.add(this.image);
var context2 = tmpCanvas.getContext('2d');
source = context2.getImageData(0, 0, tmpCanvas.width, tmpCanvas.height).data;
}
else {
// Blend color
source = new fabric.Color(this.color).getSource();
tr = source[0] * this.alpha;
tg = source[1] * this.alpha;
tb = source[2] * this.alpha;
}
for (var i = 0, len = data.length; i < len; i += 4) {
r = data[i];
g = data[i + 1];
b = data[i + 2];
if (isImage) {
tr = source[i] * this.alpha;
tg = source[i + 1] * this.alpha;
tb = source[i + 2] * this.alpha;
}
switch (this.mode) {
case 'multiply':
data[i] = r * tr / 255;
data[i + 1] = g * tg / 255;
data[i + 2] = b * tb / 255;
break;
case 'screen':
data[i] = 1 - (1 - r) * (1 - tr);
data[i + 1] = 1 - (1 - g) * (1 - tg);
data[i + 2] = 1 - (1 - b) * (1 - tb);
break;
case 'add':
data[i] = Math.min(255, r + tr);
data[i + 1] = Math.min(255, g + tg);
data[i + 2] = Math.min(255, b + tb);
break;
case 'diff':
case 'difference':
data[i] = Math.abs(r - tr);
data[i + 1] = Math.abs(g - tg);
data[i + 2] = Math.abs(b - tb);
break;
case 'subtract':
_r = r - tr;
_g = g - tg;
_b = b - tb;
data[i] = (_r < 0) ? 0 : _r;
data[i + 1] = (_g < 0) ? 0 : _g;
data[i + 2] = (_b < 0) ? 0 : _b;
break;
case 'darken':
data[i] = Math.min(r, tr);
data[i + 1] = Math.min(g, tg);
data[i + 2] = Math.min(b, tb);
break;
case 'lighten':
data[i] = Math.max(r, tr);
data[i + 1] = Math.max(g, tg);
data[i + 2] = Math.max(b, tb);
break;
}
}
context.putImageData(imageData, 0, 0);
},
/**
* Returns object representation of an instance
* @return {Object} Object representation of an instance
*/
toObject: function() {
return {
color: this.color,
image: this.image,
mode: this.mode,
alpha: this.alpha
};
}
});
/**
* Returns filter instance from an object representation
* @static
* @param {Object} object Object to create an instance from
* @param {function} [callback] to be invoked after filter creation
* @return {fabric.Image.filters.Blend} Instance of fabric.Image.filters.Blend
*/
fabric.Image.filters.Blend.fromObject = fabric.Image.filters.BaseFilter.fromObject;
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -0,0 +1,295 @@
(function(global) {
'use strict';
var fabric = global.fabric,
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
/**
* Color Blend filter class
* @class fabric.Image.filter.BlendColor
* @memberOf fabric.Image.filters
* @extends fabric.Image.filters.BaseFilter
* @example
* var filter = new fabric.Image.filters.Blend({
* color: '#000',
* mode: 'multiply'
* });
*
* var filter = new fabric.Image.filters.BlendColor({
* image: fabricImageObject,
* mode: 'multiply',
* alpha: 0.5
* });
* object.filters.push(filter);
* object.applyFilters();
* canvas.renderAll();
*/
filters.BlendColor = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Blend.prototype */ {
type: 'BlendColor',
/**
* Color to make the blend operation with. default to a reddish color since black or white
* gives always strong result.
**/
color: '#F95C63',
/**
* Blend mode for the filter: one of multiply, add, diff, screen, subtract,
* darken, lighten, overlay, exclusion, tint.
**/
mode: 'multiply',
/**
* alpha value. represent the strength of the blend color operation.
**/
alpha: 1,
/**
* Fragment source for the Multiply program
*/
fragmentSource: {
multiply: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform vec4 uColor;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 color = texture2D(uTexture, vTexCoord);\n' +
'color.rgb *= uColor.rgb;\n' +
'gl_FragColor = color;\n' +
'}',
screen: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform vec4 uColor;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 color = texture2D(uTexture, vTexCoord);\n' +
'color.rgb = 1.0 - (1.0 - color.rgb) * (1.0 - uColor.rgb);\n' +
'gl_FragColor = color;\n' +
'}',
add: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform vec4 uColor;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'gl_FragColor = texture2D(uTexture, vTexCoord);\n' +
'gl_FragColor.rgb += uColor.rgb;\n' +
'}',
diff: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform vec4 uColor;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'gl_FragColor = texture2D(uTexture, vTexCoord);\n' +
'gl_FragColor.rgb = abs(gl_FragColor.rgb - uColor.rgb);\n' +
'}',
subtract: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform vec4 uColor;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'gl_FragColor = texture2D(uTexture, vTexCoord);\n' +
'gl_FragColor.rgb -= uColor.rgb;\n' +
'}',
lighten: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform vec4 uColor;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'gl_FragColor = texture2D(uTexture, vTexCoord);\n' +
'gl_FragColor.rgb = max(gl_FragColor.rgb, uColor.rgb);\n' +
'}',
darken: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform vec4 uColor;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'gl_FragColor = texture2D(uTexture, vTexCoord);\n' +
'gl_FragColor.rgb = min(gl_FragColor.rgb, uColor.rgb);\n' +
'}',
exclusion: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform vec4 uColor;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'gl_FragColor = texture2D(uTexture, vTexCoord);\n' +
'gl_FragColor.rgb += uColor.rgb - 2.0 * (uColor.rgb * gl_FragColor.rgb);\n' +
'}',
overlay: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform vec4 uColor;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'gl_FragColor = texture2D(uTexture, vTexCoord);\n' +
'if (uColor.r < 0.5) {\n' +
'gl_FragColor.r *= 2.0 * uColor.r;\n' +
'} else {\n' +
'gl_FragColor.r = 1.0 - 2.0 * (1.0 - gl_FragColor.r) * (1.0 - uColor.r);\n' +
'}\n' +
'if (uColor.g < 0.5) {\n' +
'gl_FragColor.g *= 2.0 * uColor.g;\n' +
'} else {\n' +
'gl_FragColor.g = 1.0 - 2.0 * (1.0 - gl_FragColor.g) * (1.0 - uColor.g);\n' +
'}\n' +
'if (uColor.b < 0.5) {\n' +
'gl_FragColor.b *= 2.0 * uColor.b;\n' +
'} else {\n' +
'gl_FragColor.b = 1.0 - 2.0 * (1.0 - gl_FragColor.b) * (1.0 - uColor.b);\n' +
'}\n' +
'}',
tint: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform vec4 uColor;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'gl_FragColor = texture2D(uTexture, vTexCoord);\n' +
'gl_FragColor.rgb *= (1.0 - uColor.a);\n' +
'gl_FragColor.rgb += uColor.rgb;\n' +
'}'
},
/**
* Retrieves the cached shader.
* @param {Object} options
* @param {WebGLRenderingContext} options.context The GL context used for rendering.
* @param {Object} options.programCache A map of compiled shader programs, keyed by filter type.
*/
retrieveShader: function(options) {
var cacheKey = this.type + '_' + this.mode;
var shaderSource = this.fragmentSource[this.mode];
if (!options.programCache.hasOwnProperty(cacheKey)) {
options.programCache[cacheKey] = this.createProgram(options.context, shaderSource);
}
return options.programCache[cacheKey];
},
/**
* Apply the Blend operation to a Uint8ClampedArray representing the pixels of an image.
*
* @param {Object} options
* @param {ImageData} options.imageData The Uint8ClampedArray to be filtered.
*/
applyTo2d: function(options) {
var imageData = options.imageData,
data = imageData.data, iLen = data.length,
tr, tg, tb,
r, g, b,
source, alpha1 = 1 - this.alpha;
source = new fabric.Color(this.color).getSource();
tr = source[0] * this.alpha;
tg = source[1] * this.alpha;
tb = source[2] * this.alpha;
for (var i = 0; i < iLen; i += 4) {
r = data[i];
g = data[i + 1];
b = data[i + 2];
switch (this.mode) {
case 'multiply':
data[i] = r * tr / 255;
data[i + 1] = g * tg / 255;
data[i + 2] = b * tb / 255;
break;
case 'screen':
data[i] = 255 - (255 - r) * (255 - tr) / 255;
data[i + 1] = 255 - (255 - g) * (255 - tg) / 255;
data[i + 2] = 255 - (255 - b) * (255 - tb) / 255;
break;
case 'add':
data[i] = r + tr;
data[i + 1] = g + tg;
data[i + 2] = b + tb;
break;
case 'diff':
case 'difference':
data[i] = Math.abs(r - tr);
data[i + 1] = Math.abs(g - tg);
data[i + 2] = Math.abs(b - tb);
break;
case 'subtract':
data[i] = r - tr;
data[i + 1] = g - tg;
data[i + 2] = b - tb;
break;
case 'darken':
data[i] = Math.min(r, tr);
data[i + 1] = Math.min(g, tg);
data[i + 2] = Math.min(b, tb);
break;
case 'lighten':
data[i] = Math.max(r, tr);
data[i + 1] = Math.max(g, tg);
data[i + 2] = Math.max(b, tb);
break;
case 'overlay':
data[i] = tr < 128 ? (2 * r * tr / 255) : (255 - 2 * (255 - r) * (255 - tr) / 255);
data[i + 1] = tg < 128 ? (2 * g * tg / 255) : (255 - 2 * (255 - g) * (255 - tg) / 255);
data[i + 2] = tb < 128 ? (2 * b * tb / 255) : (255 - 2 * (255 - b) * (255 - tb) / 255);
break;
case 'exclusion':
data[i] = tr + r - ((2 * tr * r) / 255);
data[i + 1] = tg + g - ((2 * tg * g) / 255);
data[i + 2] = tb + b - ((2 * tb * b) / 255);
break;
case 'tint':
data[i] = tr + r * alpha1;
data[i + 1] = tg + g * alpha1;
data[i + 2] = tb + b * alpha1;
}
}
},
/**
* Return WebGL uniform locations for this filter's shader.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {WebGLShaderProgram} program This filter's compiled shader program.
*/
getUniformLocations: function(gl, program) {
return {
uColor: gl.getUniformLocation(program, 'uColor'),
};
},
/**
* Send data from this filter to its shader program's uniforms.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {Object} uniformLocations A map of string uniform names to WebGLUniformLocation objects
*/
sendUniformData: function(gl, uniformLocations) {
var source = new fabric.Color(this.color).getSource();
source[0] = this.alpha * source[0] / 255;
source[1] = this.alpha * source[1] / 255;
source[2] = this.alpha * source[2] / 255;
source[3] = this.alpha;
gl.uniform4fv(uniformLocations.uColor, source);
},
/**
* Returns object representation of an instance
* @return {Object} Object representation of an instance
*/
toObject: function() {
return {
color: this.color,
mode: this.mode,
alpha: this.alpha
};
}
});
/**
* Returns filter instance from an object representation
* @static
* @param {Object} object Object to create an instance from
* @param {function} [callback] to be invoked after filter creation
* @return {fabric.Image.filters.BlendColor} Instance of fabric.Image.filters.Blend
*/
fabric.Image.filters.BlendColor.fromObject = fabric.Image.filters.BaseFilter.fromObject;
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -0,0 +1,215 @@
(function(global) {
'use strict';
var fabric = global.fabric || (global.fabric = { }),
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
/**
* Blur filter class
* @class fabric.Image.filters.Blur
* @memberOf fabric.Image.filters
* @extends fabric.Image.filters.BaseFilter
* @see {@link fabric.Image.filters.Blur#initialize} for constructor definition
* @see {@link http://fabricjs.com/image-filters|ImageFilters demo}
* @example
* var filter = new fabric.Image.filters.Blur({
* blur: 0.5
* });
* object.filters.push(filter);
* object.applyFilters();
* canvas.renderAll();
*/
filters.Blur = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Blur.prototype */ {
type: 'Blur',
/*
'gl_FragColor = vec4(0.0);',
'gl_FragColor += texture2D(texture, vTexCoord + -7 * uDelta)*0.0044299121055113265;',
'gl_FragColor += texture2D(texture, vTexCoord + -6 * uDelta)*0.00895781211794;',
'gl_FragColor += texture2D(texture, vTexCoord + -5 * uDelta)*0.0215963866053;',
'gl_FragColor += texture2D(texture, vTexCoord + -4 * uDelta)*0.0443683338718;',
'gl_FragColor += texture2D(texture, vTexCoord + -3 * uDelta)*0.0776744219933;',
'gl_FragColor += texture2D(texture, vTexCoord + -2 * uDelta)*0.115876621105;',
'gl_FragColor += texture2D(texture, vTexCoord + -1 * uDelta)*0.147308056121;',
'gl_FragColor += texture2D(texture, vTexCoord )*0.159576912161;',
'gl_FragColor += texture2D(texture, vTexCoord + 1 * uDelta)*0.147308056121;',
'gl_FragColor += texture2D(texture, vTexCoord + 2 * uDelta)*0.115876621105;',
'gl_FragColor += texture2D(texture, vTexCoord + 3 * uDelta)*0.0776744219933;',
'gl_FragColor += texture2D(texture, vTexCoord + 4 * uDelta)*0.0443683338718;',
'gl_FragColor += texture2D(texture, vTexCoord + 5 * uDelta)*0.0215963866053;',
'gl_FragColor += texture2D(texture, vTexCoord + 6 * uDelta)*0.00895781211794;',
'gl_FragColor += texture2D(texture, vTexCoord + 7 * uDelta)*0.0044299121055113265;',
*/
/* eslint-disable max-len */
fragmentSource: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform vec2 uDelta;\n' +
'varying vec2 vTexCoord;\n' +
'const float nSamples = 15.0;\n' +
'vec3 v3offset = vec3(12.9898, 78.233, 151.7182);\n' +
'float random(vec3 scale) {\n' +
/* use the fragment position for a different seed per-pixel */
'return fract(sin(dot(gl_FragCoord.xyz, scale)) * 43758.5453);\n' +
'}\n' +
'void main() {\n' +
'vec4 color = vec4(0.0);\n' +
'float total = 0.0;\n' +
'float offset = random(v3offset);\n' +
'for (float t = -nSamples; t <= nSamples; t++) {\n' +
'float percent = (t + offset - 0.5) / nSamples;\n' +
'float weight = 1.0 - abs(percent);\n' +
'color += texture2D(uTexture, vTexCoord + uDelta * percent) * weight;\n' +
'total += weight;\n' +
'}\n' +
'gl_FragColor = color / total;\n' +
'}',
/* eslint-enable max-len */
/**
* blur value, in percentage of image dimensions.
* specific to keep the image blur constant at different resolutions
* range bewteen 0 and 1.
*/
blur: 0,
mainParameter: 'blur',
applyTo: function(options) {
if (options.webgl) {
// this aspectRatio is used to give the same blur to vertical and horizontal
this.aspectRatio = options.sourceWidth / options.sourceHeight;
options.passes++;
this._setupFrameBuffer(options);
this.horizontal = true;
this.applyToWebGL(options);
this._swapTextures(options);
this._setupFrameBuffer(options);
this.horizontal = false;
this.applyToWebGL(options);
this._swapTextures(options);
}
else {
this.applyTo2d(options);
}
},
applyTo2d: function(options) {
// paint canvasEl with current image data.
//options.ctx.putImageData(options.imageData, 0, 0);
options.imageData = this.simpleBlur(options);
},
simpleBlur: function(options) {
var resources = fabric.filterBackend.resources, canvas1, canvas2,
width = options.imageData.width,
height = options.imageData.height;
if (!resources.blurLayer1) {
resources.blurLayer1 = document.createElement('canvas');
resources.blurLayer2 = document.createElement('canvas');
}
canvas1 = resources.blurLayer1;
canvas2 = resources.blurLayer2;
if (canvas1.width !== width || canvas1.height !== height) {
canvas2.width = canvas1.width = width;
canvas2.height = canvas1.height = height;
}
var ctx1 = canvas1.getContext('2d'),
ctx2 = canvas2.getContext('2d'),
nSamples = 15,
random, percent, j, i,
blur = this.blur * 0.06 * 0.5;
// load first canvas
ctx1.putImageData(options.imageData, 0, 0);
ctx2.clearRect(0, 0, width, height);
for (i = -nSamples; i <= nSamples; i++) {
random = (Math.random() - 0.5) / 4;
percent = i / nSamples;
j = blur * percent * width + random;
ctx2.globalAlpha = 1 - Math.abs(percent);
ctx2.drawImage(canvas1, j, random);
ctx1.drawImage(canvas2, 0, 0);
ctx2.globalAlpha = 1;
ctx2.clearRect(0, 0, canvas2.width, canvas2.height);
}
for (i = -nSamples; i <= nSamples; i++) {
random = (Math.random() - 0.5) / 4;
percent = i / nSamples;
j = blur * percent * height + random;
ctx2.globalAlpha = 1 - Math.abs(percent);
ctx2.drawImage(canvas1, random, j);
ctx1.drawImage(canvas2, 0, 0);
ctx2.globalAlpha = 1;
ctx2.clearRect(0, 0, canvas2.width, canvas2.height);
}
options.ctx.drawImage(canvas1, 0, 0);
var newImageData = options.ctx.getImageData(0, 0, canvas1.width, canvas1.height);
ctx1.globalAlpha = 1;
ctx1.clearRect(0, 0, canvas1.width, canvas1.height);
return newImageData;
},
/**
* Return WebGL uniform locations for this filter's shader.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {WebGLShaderProgram} program This filter's compiled shader program.
*/
getUniformLocations: function(gl, program) {
return {
delta: gl.getUniformLocation(program, 'uDelta'),
};
},
/**
* Send data from this filter to its shader program's uniforms.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {Object} uniformLocations A map of string uniform names to WebGLUniformLocation objects
*/
sendUniformData: function(gl, uniformLocations) {
var delta = this.chooseRightDelta();
gl.uniform2fv(uniformLocations.delta, delta);
},
/**
* choose right value of image percentage to blur with
* @returns {Array} a numeric array with delta values
*/
chooseRightDelta: function() {
var blurScale = 1, delta = [0, 0], blur;
if (this.horizontal) {
if (this.aspectRatio > 1) {
// image is wide, i want to shrink radius horizontal
blurScale = 1 / this.aspectRatio;
}
}
else {
if (this.aspectRatio < 1) {
// image is tall, i want to shrink radius vertical
blurScale = this.aspectRatio;
}
}
blur = blurScale * this.blur * 0.12;
if (this.horizontal) {
delta[0] = blur;
}
else {
delta[1] = blur;
}
return delta;
},
});
/**
* Deserialize a JSON definition of a BlurFilter into a concrete instance.
*/
filters.Blur.fromObject = fabric.Image.filters.BaseFilter.fromObject;
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -3,7 +3,6 @@
'use strict';
var fabric = global.fabric || (global.fabric = { }),
extend = fabric.util.object.extend,
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
@ -19,7 +18,7 @@
* brightness: 200
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
* object.applyFilters();
*/
filters.Brightness = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Brightness.prototype */ {
@ -31,44 +30,75 @@
type: 'Brightness',
/**
* Constructor
* @memberOf fabric.Image.filters.Brightness.prototype
* @param {Object} [options] Options object
* @param {Number} [options.brightness=0] Value to brighten the image up (-255..255)
* Fragment source for the brightness program
*/
initialize: function(options) {
options = options || { };
this.brightness = options.brightness || 0;
},
fragmentSource: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform float uBrightness;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 color = texture2D(uTexture, vTexCoord);\n' +
'color.rgb += uBrightness;\n' +
'gl_FragColor = color;\n' +
'}',
/**
* Applies filter to canvas element
* @param {Object} canvasEl Canvas element to apply filter to
* Brightness value, from -1 to 1.
* translated to -255 to 255 for 2d
* 0.0039215686 is the part of 1 that get translated to 1 in 2d
* @param {Number} brightness
* @default
*/
applyTo: function(canvasEl) {
var context = canvasEl.getContext('2d'),
imageData = context.getImageData(0, 0, canvasEl.width, canvasEl.height),
data = imageData.data,
brightness = this.brightness;
brightness: 0,
for (var i = 0, len = data.length; i < len; i += 4) {
data[i] += brightness;
data[i + 1] += brightness;
data[i + 2] += brightness;
/**
* Describe the property that is the filter parameter
* @param {String} m
* @default
*/
mainParameter: 'brightness',
/**
* Apply the Brightness operation to a Uint8ClampedArray representing the pixels of an image.
*
* @param {Object} options
* @param {ImageData} options.imageData The Uint8ClampedArray to be filtered.
*/
applyTo2d: function(options) {
if (this.brightness === 0) {
return;
}
var imageData = options.imageData,
data = imageData.data, i, len = data.length,
brightness = Math.round(this.brightness * 255);
for (i = 0; i < len; i += 4) {
data[i] = data[i] + brightness;
data[i + 1] = data[i + 1] + brightness;
data[i + 2] = data[i + 2] + brightness;
}
context.putImageData(imageData, 0, 0);
},
/**
* Returns object representation of an instance
* @return {Object} Object representation of an instance
* Return WebGL uniform locations for this filter's shader.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {WebGLShaderProgram} program This filter's compiled shader program.
*/
toObject: function() {
return extend(this.callSuper('toObject'), {
brightness: this.brightness
});
}
getUniformLocations: function(gl, program) {
return {
uBrightness: gl.getUniformLocation(program, 'uBrightness'),
};
},
/**
* Send data from this filter to its shader program's uniforms.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {Object} uniformLocations A map of string uniform names to WebGLUniformLocation objects
*/
sendUniformData: function(gl, uniformLocations) {
gl.uniform1f(uniformLocations.uBrightness, this.brightness);
},
});
/**

View file

@ -3,7 +3,6 @@
'use strict';
var fabric = global.fabric || (global.fabric = { }),
extend = fabric.util.object.extend,
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
@ -26,7 +25,7 @@
]
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
* object.applyFilters();
*/
filters.ColorMatrix = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.ColorMatrix.prototype */ {
@ -37,63 +36,114 @@
*/
type: 'ColorMatrix',
fragmentSource: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'varying vec2 vTexCoord;\n' +
'uniform mat4 uColorMatrix;\n' +
'uniform vec4 uConstants;\n' +
'void main() {\n' +
'vec4 color = texture2D(uTexture, vTexCoord);\n' +
'color *= uColorMatrix;\n' +
'color += uConstants;\n' +
'gl_FragColor = color;\n' +
'}',
/**
* Colormatrix for pixels.
* array of 20 floats. Numbers in positions 4, 9, 14, 19 loose meaning
* outside the -1, 1 range.
* 0.0039215686 is the part of 1 that get translated to 1 in 2d
* @param {Array} matrix array of 20 numbers.
* @default
*/
matrix: [
1, 0, 0, 0, 0,
0, 1, 0, 0, 0,
0, 0, 1, 0, 0,
0, 0, 0, 1, 0
],
mainParameter: 'matrix',
/**
* Lock the colormatrix on the color part, skipping alpha, manly for non webgl scenario
* to save some calculation
*/
colorsOnly: true,
/**
* Constructor
* @memberOf fabric.Image.filters.ColorMatrix.prototype
* @param {Object} [options] Options object
* @param {Array} [options.matrix] Color Matrix to modify the image data with
*/
initialize: function( options ) {
options || ( options = {} );
this.matrix = options.matrix || [
1, 0, 0, 0, 0,
0, 1, 0, 0, 0,
0, 0, 1, 0, 0,
0, 0, 0, 1, 0
];
initialize: function(options) {
this.callSuper('initialize', options);
// create a new array instead mutating the prototype with push
this.matrix = this.matrix.slice(0);
},
/**
* Applies filter to canvas element
* @param {Object} canvasEl Canvas element to apply filter to
* Apply the ColorMatrix operation to a Uint8Array representing the pixels of an image.
*
* @param {Object} options
* @param {ImageData} options.imageData The Uint8Array to be filtered.
*/
applyTo: function( canvasEl ) {
var context = canvasEl.getContext( '2d' ),
imageData = context.getImageData( 0, 0, canvasEl.width, canvasEl.height ),
applyTo2d: function(options) {
var imageData = options.imageData,
data = imageData.data,
iLen = data.length,
i,
r,
g,
b,
a,
m = this.matrix;
m = this.matrix,
r, g, b, a, i, colorsOnly = this.colorsOnly;
for ( i = 0; i < iLen; i += 4 ) {
r = data[ i ];
g = data[ i + 1 ];
b = data[ i + 2 ];
a = data[ i + 3 ];
data[ i ] = r * m[ 0 ] + g * m[ 1 ] + b * m[ 2 ] + a * m[ 3 ] + m[ 4 ];
data[ i + 1 ] = r * m[ 5 ] + g * m[ 6 ] + b * m[ 7 ] + a * m[ 8 ] + m[ 9 ];
data[ i + 2 ] = r * m[ 10 ] + g * m[ 11 ] + b * m[ 12 ] + a * m[ 13 ] + m[ 14 ];
data[ i + 3 ] = r * m[ 15 ] + g * m[ 16 ] + b * m[ 17 ] + a * m[ 18 ] + m[ 19 ];
for (i = 0; i < iLen; i += 4) {
r = data[i];
g = data[i + 1];
b = data[i + 2];
if (colorsOnly) {
data[i] = r * m[0] + g * m[1] + b * m[2] + m[4] * 255;
data[i + 1] = r * m[5] + g * m[6] + b * m[7] + m[9] * 255;
data[i + 2] = r * m[10] + g * m[11] + b * m[12] + m[14] * 255;
}
else {
a = data[i + 3];
data[i] = r * m[0] + g * m[1] + b * m[2] + a * m[3] + m[4] * 255;
data[i + 1] = r * m[5] + g * m[6] + b * m[7] + a * m[8] + m[9] * 255;
data[i + 2] = r * m[10] + g * m[11] + b * m[12] + a * m[13] + m[14] * 255;
data[i + 3] = r * m[15] + g * m[16] + b * m[17] + a * m[18] + m[19] * 255;
}
}
context.putImageData( imageData, 0, 0 );
},
/**
* Returns object representation of an instance
* @return {Object} Object representation of an instance
* Return WebGL uniform locations for this filter's shader.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {WebGLShaderProgram} program This filter's compiled shader program.
*/
toObject: function() {
return extend(this.callSuper('toObject'), {
type: this.type,
matrix: this.matrix
});
}
getUniformLocations: function(gl, program) {
return {
uColorMatrix: gl.getUniformLocation(program, 'uColorMatrix'),
uConstants: gl.getUniformLocation(program, 'uConstants'),
};
},
/**
* Send data from this filter to its shader program's uniforms.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {Object} uniformLocations A map of string uniform names to WebGLUniformLocation objects
*/
sendUniformData: function(gl, uniformLocations) {
var m = this.matrix,
matrix = [
m[0], m[1], m[2], m[3],
m[5], m[6], m[7], m[8],
m[10], m[11], m[12], m[13],
m[15], m[16], m[17], m[18]
],
constants = [m[4], m[9], m[14], m[19]];
gl.uniformMatrix4fv(uniformLocations.uColorMatrix, false, matrix);
gl.uniform4fv(uniformLocations.uConstants, constants);
},
});
/**

View file

@ -0,0 +1,68 @@
(function(global) {
'use strict';
var fabric = global.fabric || (global.fabric = { }),
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
/**
* A container class that knows how to apply a sequence of filters to an input image.
*/
filters.Composed = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Composed.prototype */ {
type: 'Composed',
/**
* A non sparse array of filters to apply
*/
subFilters: [],
/**
* Constructor
* @param {Object} [options] Options object
*/
initialize: function(options) {
this.callSuper('initialize', options);
// create a new array instead mutating the prototype with push
this.subFilters = this.subFilters.slice(0);
},
/**
* Apply this container's filters to the input image provided.
*
* @param {Object} options
* @param {Number} options.passes The number of filters remaining to be applied.
*/
applyTo: function(options) {
options.passes += this.subFilters.length - 1;
this.subFilters.forEach(function(filter) {
filter.applyTo(options);
});
},
/**
* Serialize this filter into JSON.
*
* @returns {Object} A JSON representation of this filter.
*/
toObject: function() {
return fabric.util.object.extend(this.callSuper('toObject'), {
subFilters: this.subFilters.map(function(filter) { return filter.toObject(); }),
});
},
});
/**
* Deserialize a JSON definition of a ComposedFilter into a concrete instance.
*/
fabric.Image.filters.Composed.fromObject = function(object, callback) {
var filters = object.subFilters || [],
subFilters = filters.map(function(filter) {
return new fabric.Image.filters[filter.type](filter);
}),
instance = new fabric.Image.filters.Composed({ subFilters: subFilters });
callback && callback(instance);
return instance;
};
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -3,7 +3,6 @@
'use strict';
var fabric = global.fabric || (global.fabric = { }),
extend = fabric.util.object.extend,
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
@ -19,7 +18,7 @@
* contrast: 40
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
* object.applyFilters();
*/
filters.Contrast = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Contrast.prototype */ {
@ -30,45 +29,71 @@
*/
type: 'Contrast',
fragmentSource: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform float uContrast;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 color = texture2D(uTexture, vTexCoord);\n' +
'float contrastF = 1.015 * (uContrast + 1.0) / (1.0 * (1.015 - uContrast));\n' +
'color.rgb = contrastF * (color.rgb - 0.5) + 0.5;\n' +
'gl_FragColor = color;\n' +
'}',
contrast: 0,
mainParameter: 'contrast',
/**
* Constructor
* @memberOf fabric.Image.filters.Contrast.prototype
* @param {Object} [options] Options object
* @param {Number} [options.contrast=0] Value to contrast the image up (-255...255)
* @param {Number} [options.contrast=0] Value to contrast the image up (-1...1)
*/
initialize: function(options) {
options = options || { };
this.contrast = options.contrast || 0;
},
/**
* Applies filter to canvas element
* @param {Object} canvasEl Canvas element to apply filter to
*/
applyTo: function(canvasEl) {
var context = canvasEl.getContext('2d'),
imageData = context.getImageData(0, 0, canvasEl.width, canvasEl.height),
data = imageData.data,
contrastF = 259 * (this.contrast + 255) / (255 * (259 - this.contrast));
/**
* Apply the Contrast operation to a Uint8Array representing the pixels of an image.
*
* @param {Object} options
* @param {ImageData} options.imageData The Uint8Array to be filtered.
*/
applyTo2d: function(options) {
if (this.contrast === 0) {
return;
}
var imageData = options.imageData, i, len,
data = imageData.data, len = data.length,
contrast = Math.floor(this.contrast * 255),
contrastF = 259 * (contrast + 255) / (255 * (259 - contrast));
for (var i = 0, len = data.length; i < len; i += 4) {
for (i = 0; i < len; i += 4) {
data[i] = contrastF * (data[i] - 128) + 128;
data[i + 1] = contrastF * (data[i + 1] - 128) + 128;
data[i + 2] = contrastF * (data[i + 2] - 128) + 128;
}
context.putImageData(imageData, 0, 0);
},
/**
* Returns object representation of an instance
* @return {Object} Object representation of an instance
* Return WebGL uniform locations for this filter's shader.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {WebGLShaderProgram} program This filter's compiled shader program.
*/
toObject: function() {
return extend(this.callSuper('toObject'), {
contrast: this.contrast
});
}
getUniformLocations: function(gl, program) {
return {
uContrast: gl.getUniformLocation(program, 'uContrast'),
};
},
/**
* Send data from this filter to its shader program's uniforms.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {Object} uniformLocations A map of string uniform names to WebGLUniformLocation objects
*/
sendUniformData: function(gl, uniformLocations) {
gl.uniform1f(uniformLocations.uContrast, this.contrast);
},
});
/**

View file

@ -21,7 +21,8 @@
* 0, -1, 0 ]
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
* object.applyFilters();
* canvas.renderAll();
* @example <caption>Blur filter</caption>
* var filter = new fabric.Image.filters.Convolute({
* matrix: [ 1/9, 1/9, 1/9,
@ -29,7 +30,8 @@
* 1/9, 1/9, 1/9 ]
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
* object.applyFilters();
* canvas.renderAll();
* @example <caption>Emboss filter</caption>
* var filter = new fabric.Image.filters.Convolute({
* matrix: [ 1, 1, 1,
@ -37,7 +39,8 @@
* -1, -1, -1 ]
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
* object.applyFilters();
* canvas.renderAll();
* @example <caption>Emboss filter with opaqueness</caption>
* var filter = new fabric.Image.filters.Convolute({
* opaque: true,
@ -46,7 +49,8 @@
* -1, -1, -1 ]
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
* object.applyFilters();
* canvas.renderAll();
*/
filters.Convolute = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Convolute.prototype */ {
@ -57,6 +61,158 @@
*/
type: 'Convolute',
/*
* Opaque value (true/false)
*/
opaque: false,
/*
* matrix for the filter, max 9x9
*/
matrix: [0, 0, 0, 0, 1, 0, 0, 0, 0],
/**
* Fragment source for the brightness program
*/
fragmentSource: {
Convolute_3_1: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform float uMatrix[9];\n' +
'uniform float uStepW;\n' +
'uniform float uStepH;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 color = vec4(0, 0, 0, 0);\n' +
'for (float h = 0.0; h < 3.0; h+=1.0) {\n' +
'for (float w = 0.0; w < 3.0; w+=1.0) {\n' +
'vec2 matrixPos = vec2(uStepW * (w - 1), uStepH * (h - 1));\n' +
'color += texture2D(uTexture, vTexCoord + matrixPos) * uMatrix[int(h * 3.0 + w)];\n' +
'}\n' +
'}\n' +
'gl_FragColor = color;\n' +
'}',
Convolute_3_0: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform float uMatrix[9];\n' +
'uniform float uStepW;\n' +
'uniform float uStepH;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 color = vec4(0, 0, 0, 1);\n' +
'for (float h = 0.0; h < 3.0; h+=1.0) {\n' +
'for (float w = 0.0; w < 3.0; w+=1.0) {\n' +
'vec2 matrixPos = vec2(uStepW * (w - 1.0), uStepH * (h - 1.0));\n' +
'color.rgb += texture2D(uTexture, vTexCoord + matrixPos).rgb * uMatrix[int(h * 3.0 + w)];\n' +
'}\n' +
'}\n' +
'float alpha = texture2D(uTexture, vTexCoord).a;\n' +
'gl_FragColor = color;\n' +
'gl_FragColor.a = alpha;\n' +
'}',
Convolute_5_1: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform float uMatrix[25];\n' +
'uniform float uStepW;\n' +
'uniform float uStepH;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 color = vec4(0, 0, 0, 0);\n' +
'for (float h = 0.0; h < 5.0; h+=1.0) {\n' +
'for (float w = 0.0; w < 5.0; w+=1.0) {\n' +
'vec2 matrixPos = vec2(uStepW * (w - 2.0), uStepH * (h - 2.0));\n' +
'color += texture2D(uTexture, vTexCoord + matrixPos) * uMatrix[int(h * 5.0 + w)];\n' +
'}\n' +
'}\n' +
'gl_FragColor = color;\n' +
'}',
Convolute_5_0: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform float uMatrix[25];\n' +
'uniform float uStepW;\n' +
'uniform float uStepH;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 color = vec4(0, 0, 0, 1);\n' +
'for (float h = 0.0; h < 5.0; h+=1.0) {\n' +
'for (float w = 0.0; w < 5.0; w+=1.0) {\n' +
'vec2 matrixPos = vec2(uStepW * (w - 2.0), uStepH * (h - 2.0));\n' +
'color.rgb += texture2D(uTexture, vTexCoord + matrixPos).rgb * uMatrix[int(h * 5.0 + w)];\n' +
'}\n' +
'}\n' +
'float alpha = texture2D(uTexture, vTexCoord).a;\n' +
'gl_FragColor = color;\n' +
'gl_FragColor.a = alpha;\n' +
'}',
Convolute_7_1: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform float uMatrix[49];\n' +
'uniform float uStepW;\n' +
'uniform float uStepH;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 color = vec4(0, 0, 0, 0);\n' +
'for (float h = 0.0; h < 7.0; h+=1.0) {\n' +
'for (float w = 0.0; w < 7.0; w+=1.0) {\n' +
'vec2 matrixPos = vec2(uStepW * (w - 3.0), uStepH * (h - 3.0));\n' +
'color += texture2D(uTexture, vTexCoord + matrixPos) * uMatrix[int(h * 7.0 + w)];\n' +
'}\n' +
'}\n' +
'gl_FragColor = color;\n' +
'}',
Convolute_7_0: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform float uMatrix[49];\n' +
'uniform float uStepW;\n' +
'uniform float uStepH;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 color = vec4(0, 0, 0, 1);\n' +
'for (float h = 0.0; h < 7.0; h+=1.0) {\n' +
'for (float w = 0.0; w < 7.0; w+=1.0) {\n' +
'vec2 matrixPos = vec2(uStepW * (w - 3.0), uStepH * (h - 3.0));\n' +
'color.rgb += texture2D(uTexture, vTexCoord + matrixPos).rgb * uMatrix[int(h * 7.0 + w)];\n' +
'}\n' +
'}\n' +
'float alpha = texture2D(uTexture, vTexCoord).a;\n' +
'gl_FragColor = color;\n' +
'gl_FragColor.a = alpha;\n' +
'}',
Convolute_9_1: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform float uMatrix[81];\n' +
'uniform float uStepW;\n' +
'uniform float uStepH;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 color = vec4(0, 0, 0, 0);\n' +
'for (float h = 0.0; h < 9.0; h+=1.0) {\n' +
'for (float w = 0.0; w < 9.0; w+=1.0) {\n' +
'vec2 matrixPos = vec2(uStepW * (w - 4.0), uStepH * (h - 4.0));\n' +
'color += texture2D(uTexture, vTexCoord + matrixPos) * uMatrix[int(h * 9.0 + w)];\n' +
'}\n' +
'}\n' +
'gl_FragColor = color;\n' +
'}',
Convolute_9_0: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform float uMatrix[81];\n' +
'uniform float uStepW;\n' +
'uniform float uStepH;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 color = vec4(0, 0, 0, 1);\n' +
'for (float h = 0.0; h < 9.0; h+=1.0) {\n' +
'for (float w = 0.0; w < 9.0; w+=1.0) {\n' +
'vec2 matrixPos = vec2(uStepW * (w - 4.0), uStepH * (h - 4.0));\n' +
'color.rgb += texture2D(uTexture, vTexCoord + matrixPos).rgb * uMatrix[int(h * 9.0 + w)];\n' +
'}\n' +
'}\n' +
'float alpha = texture2D(uTexture, vTexCoord).a;\n' +
'gl_FragColor = color;\n' +
'gl_FragColor.a = alpha;\n' +
'}',
},
/**
* Constructor
* @memberOf fabric.Image.filters.Convolute.prototype
@ -64,48 +220,55 @@
* @param {Boolean} [options.opaque=false] Opaque value (true/false)
* @param {Array} [options.matrix] Filter matrix
*/
initialize: function(options) {
options = options || { };
this.opaque = options.opaque;
this.matrix = options.matrix || [
0, 0, 0,
0, 1, 0,
0, 0, 0
];
/**
* Retrieves the cached shader.
* @param {Object} options
* @param {WebGLRenderingContext} options.context The GL context used for rendering.
* @param {Object} options.programCache A map of compiled shader programs, keyed by filter type.
*/
retrieveShader: function(options) {
var size = Math.sqrt(this.matrix.length);
var cacheKey = this.type + '_' + size + '_' + this.opaque ? 1 : 0;
var shaderSource = this.fragmentSource[cacheKey];
if (!options.programCache.hasOwnProperty(cacheKey)) {
options.programCache[cacheKey] = this.createProgram(options.context, shaderSource);
}
return options.programCache[cacheKey];
},
/**
* Applies filter to canvas element
* @param {Object} canvasEl Canvas element to apply filter to
* Apply the Brightness operation to a Uint8ClampedArray representing the pixels of an image.
*
* @param {Object} options
* @param {ImageData} options.imageData The Uint8ClampedArray to be filtered.
*/
applyTo: function(canvasEl) {
var weights = this.matrix,
context = canvasEl.getContext('2d'),
pixels = context.getImageData(0, 0, canvasEl.width, canvasEl.height),
applyTo2d: function(options) {
var imageData = options.imageData,
data = imageData.data,
weights = this.matrix,
side = Math.round(Math.sqrt(weights.length)),
halfSide = Math.floor(side / 2),
src = pixels.data,
sw = pixels.width,
sh = pixels.height,
output = context.createImageData(sw, sh),
sw = imageData.width,
sh = imageData.height,
output = options.ctx.createImageData(sw, sh),
dst = output.data,
// go through the destination image pixels
alphaFac = this.opaque ? 1 : 0,
r, g, b, a, dstOff,
scx, scy, srcOff, wt;
scx, scy, srcOff, wt,
x, y, cx, cy;
for (var y = 0; y < sh; y++) {
for (var x = 0; x < sw; x++) {
for (y = 0; y < sh; y++) {
for (x = 0; x < sw; x++) {
dstOff = (y * sw + x) * 4;
// calculate the weighed sum of the source image pixels that
// fall under the convolution matrix
r = 0; g = 0; b = 0; a = 0;
for (var cy = 0; cy < side; cy++) {
for (var cx = 0; cx < side; cx++) {
for (cy = 0; cy < side; cy++) {
for (cx = 0; cx < side; cx++) {
scy = y + cy - halfSide;
scx = x + cx - halfSide;
@ -117,20 +280,52 @@
srcOff = (scy * sw + scx) * 4;
wt = weights[cy * side + cx];
r += src[srcOff] * wt;
g += src[srcOff + 1] * wt;
b += src[srcOff + 2] * wt;
a += src[srcOff + 3] * wt;
r += data[srcOff] * wt;
g += data[srcOff + 1] * wt;
b += data[srcOff + 2] * wt;
// eslint-disable-next-line max-depth
if (!alphaFac) {
a += data[srcOff + 3] * wt;
}
}
}
dst[dstOff] = r;
dst[dstOff + 1] = g;
dst[dstOff + 2] = b;
dst[dstOff + 3] = a + alphaFac * (255 - a);
if (!alphaFac) {
dst[dstOff + 3] = a;
}
else {
dst[dstOff + 3] = data[dstOff + 3];
}
}
}
options.imageData = output;
},
context.putImageData(output, 0, 0);
/**
* Return WebGL uniform locations for this filter's shader.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {WebGLShaderProgram} program This filter's compiled shader program.
*/
getUniformLocations: function(gl, program) {
return {
uMatrix: gl.getUniformLocation(program, 'uMatrix'),
uOpaque: gl.getUniformLocation(program, 'uOpaque'),
uHalfSize: gl.getUniformLocation(program, 'uHalfSize'),
uSize: gl.getUniformLocation(program, 'uSize'),
};
},
/**
* Send data from this filter to its shader program's uniforms.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {Object} uniformLocations A map of string uniform names to WebGLUniformLocation objects
*/
sendUniformData: function(gl, uniformLocations) {
gl.uniform1fv(uniformLocations.uMatrix, this.matrix);
},
/**

View file

@ -0,0 +1,85 @@
(function(global) {
'use strict';
var fabric = global.fabric || (global.fabric = { }),
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
var matrices = {
Brownie: [
0.59970,0.34553,-0.27082,0,0.186,
-0.03770,0.86095,0.15059,0,-0.1449,
0.24113,-0.07441,0.44972,0,-0.02965,
0,0,0,1,0
],
Vintage: [
0.62793,0.32021,-0.03965,0,0.03784,
0.02578,0.64411,0.03259,0,0.02926,
0.04660,-0.08512,0.52416,0,0.02023,
0,0,0,1,0
],
Kodachrome: [
1.12855,-0.39673,-0.03992,0,0.24991,
-0.16404,1.08352,-0.05498,0,0.09698,
-0.16786,-0.56034,1.60148,0,0.13972,
0,0,0,1,0
],
Technicolor: [
1.91252,-0.85453,-0.09155,0,0.04624,
-0.30878,1.76589,-0.10601,0,-0.27589,
-0.23110,-0.75018,1.84759,0,0.12137,
0,0,0,1,0
],
Polaroid: [
1.438,-0.062,-0.062,0,0,
-0.122,1.378,-0.122,0,0,
-0.016,-0.016,1.483,0,0,
0,0,0,1,0
],
Sepia: [
0.393, 0.769, 0.189, 0, 0,
0.349, 0.686, 0.168, 0, 0,
0.272, 0.534, 0.131, 0, 0,
0, 0, 0, 1, 0
],
BlackWhite: [
1.5, 1.5, 1.5, 0, -1,
1.5, 1.5, 1.5, 0, -1,
1.5, 1.5, 1.5, 0, -1,
0, 0, 0, 1, 0,
]
};
for (var key in matrices) {
filters[key] = createClass(filters.ColorMatrix, /** @lends fabric.Image.filters.Sepia.prototype */ {
/**
* Filter type
* @param {String} type
* @default
*/
type: key,
/**
* Colormatrix for the effect
* array of 20 floats. Numbers in positions 4, 9, 14, 19 loose meaning
* outside the -1, 1 range.
* @param {Array} matrix array of 20 numbers.
* @default
*/
matrix: matrices[key],
/**
* Lock the matrix export for this kind of static, parameterless filters.
*/
mainParameter: false,
/**
* Lock the colormatrix on the color part, skipping alpha
*/
colorsOnly: true,
});
fabric.Image.filters[key].fromObject = fabric.Image.filters.BaseFilter.fromObject;
}
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -0,0 +1,127 @@
(function(global) {
'use strict';
var fabric = global.fabric || (global.fabric = { }),
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
/**
* Gamma filter class
* @class fabric.Image.filters.Gamma
* @memberOf fabric.Image.filters
* @extends fabric.Image.filters.BaseFilter
* @see {@link fabric.Image.filters.Gamma#initialize} for constructor definition
* @see {@link http://fabricjs.com/image-filters|ImageFilters demo}
* @example
* var filter = new fabric.Image.filters.Gamma({
* brightness: 200
* });
* object.filters.push(filter);
* object.applyFilters();
*/
filters.Gamma = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Gamma.prototype */ {
/**
* Filter type
* @param {String} type
* @default
*/
type: 'Gamma',
fragmentSource: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform vec3 uGamma;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 color = texture2D(uTexture, vTexCoord);\n' +
'vec3 correction = (1.0 / uGamma);\n' +
'color.r = pow(color.r, correction.r);\n' +
'color.g = pow(color.g, correction.g);\n' +
'color.b = pow(color.b, correction.b);\n' +
'gl_FragColor = color;\n' +
'gl_FragColor.rgb *= color.a;\n' +
'}',
/**
* Gamma array value, from 0.01 to 2.2.
* @param {Array} gamma
* @default
*/
gamma: [1, 1, 1],
/**
* Describe the property that is the filter parameter
* @param {String} m
* @default
*/
mainParameter: 'gamma',
/**
* Apply the Gamma operation to a Uint8Array representing the pixels of an image.
*
* @param {Object} options
* @param {ImageData} options.imageData The Uint8Array to be filtered.
*/
applyTo2d: function(options) {
var imageData = options.imageData, data = imageData.data,
gamma = this.gamma, len = data.length,
rInv = 1 / gamma[0], gInv = 1 / gamma[1],
bInv = 1 / gamma[2], i;
if (!this.rVals) {
// eslint-disable-next-line
this.rVals = new Uint8Array(256);
// eslint-disable-next-line
this.gVals = new Uint8Array(256);
// eslint-disable-next-line
this.bVals = new Uint8Array(256);
}
// This is an optimization - pre-compute a look-up table for each color channel
// instead of performing these pow calls for each pixel in the image.
for (i = 0, len = 256; i < len; i++) {
this.rVals[i] = Math.pow(i / 255, rInv) * 255;
this.gVals[i] = Math.pow(i / 255, gInv) * 255;
this.bVals[i] = Math.pow(i / 255, bInv) * 255;
}
for (i = 0, len = data.length; i < len; i += 4) {
data[i] = this.rVals[data[i]];
data[i + 1] = this.gVals[data[i + 1]];
data[i + 2] = this.bVals[data[i + 2]];
}
},
/**
* Return WebGL uniform locations for this filter's shader.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {WebGLShaderProgram} program This filter's compiled shader program.
*/
getUniformLocations: function(gl, program) {
return {
uGamma: gl.getUniformLocation(program, 'uGamma'),
};
},
/**
* Send data from this filter to its shader program's uniforms.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {Object} uniformLocations A map of string uniform names to WebGLUniformLocation objects
*/
sendUniformData: function(gl, uniformLocations) {
gl.uniform3fv(uniformLocations.uGamma, this.gamma);
},
});
/**
* Returns filter instance from an object representation
* @static
* @param {Object} object Object to create an instance from
* @param {function} [callback] to be invoked after filter creation
* @return {fabric.Image.filters.Gamma} Instance of fabric.Image.filters.Gamma
*/
fabric.Image.filters.Gamma.fromObject = fabric.Image.filters.BaseFilter.fromObject;
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -1,83 +0,0 @@
(function(global) {
'use strict';
var fabric = global.fabric || (global.fabric = { }),
extend = fabric.util.object.extend,
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
/**
* GradientTransparency filter class
* @class fabric.Image.filters.GradientTransparency
* @memberOf fabric.Image.filters
* @extends fabric.Image.filters.BaseFilter
* @see {@link fabric.Image.filters.GradientTransparency#initialize} for constructor definition
* @see {@link http://fabricjs.com/image-filters|ImageFilters demo}
* @example
* var filter = new fabric.Image.filters.GradientTransparency({
* threshold: 200
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
*/
// eslint-disable-next-line max-len
filters.GradientTransparency = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.GradientTransparency.prototype */ {
/**
* Filter type
* @param {String} type
* @default
*/
type: 'GradientTransparency',
/**
* Constructor
* @memberOf fabric.Image.filters.GradientTransparency.prototype
* @param {Object} [options] Options object
* @param {Number} [options.threshold=100] Threshold value
*/
initialize: function(options) {
options = options || { };
this.threshold = options.threshold || 100;
},
/**
* Applies filter to canvas element
* @param {Object} canvasEl Canvas element to apply filter to
*/
applyTo: function(canvasEl) {
var context = canvasEl.getContext('2d'),
imageData = context.getImageData(0, 0, canvasEl.width, canvasEl.height),
data = imageData.data,
threshold = this.threshold,
total = data.length;
for (var i = 0, len = data.length; i < len; i += 4) {
data[i + 3] = threshold + 255 * (total - i) / total;
}
context.putImageData(imageData, 0, 0);
},
/**
* Returns object representation of an instance
* @return {Object} Object representation of an instance
*/
toObject: function() {
return extend(this.callSuper('toObject'), {
threshold: this.threshold
});
}
});
/**
* Returns filter instance from an object representation
* @static
* @param {Object} object Object to create an instance from
* @param {function} [callback] to be invoked after filter creation
* @return {fabric.Image.filters.GradientTransparency} Instance of fabric.Image.filters.GradientTransparency
*/
fabric.Image.filters.GradientTransparency.fromObject = fabric.Image.filters.BaseFilter.fromObject;
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -15,7 +15,7 @@
* @example
* var filter = new fabric.Image.filters.Grayscale();
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
* object.applyFilters();
*/
filters.Grayscale = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Grayscale.prototype */ {
@ -26,29 +26,111 @@
*/
type: 'Grayscale',
fragmentSource: {
average: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 color = texture2D(uTexture, vTexCoord);\n' +
'float average = (color.r + color.b + color.g) / 3.0;\n' +
'gl_FragColor = vec4(average, average, average, color.a);\n' +
'}',
lightness: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform int uMode;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 col = texture2D(uTexture, vTexCoord);\n' +
'float average = (max(max(col.r, col.g),col.b) + min(min(col.r, col.g),col.b)) / 2.0;\n' +
'gl_FragColor = vec4(average, average, average, col.a);\n' +
'}',
luminosity: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform int uMode;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 col = texture2D(uTexture, vTexCoord);\n' +
'float average = 0.21 * col.r + 0.72 * col.g + 0.07 * col.b;\n' +
'gl_FragColor = vec4(average, average, average, col.a);\n' +
'}',
},
/**
* Applies filter to canvas element
* @memberOf fabric.Image.filters.Grayscale.prototype
* @param {Object} canvasEl Canvas element to apply filter to
* Grayscale mode, between 'average', 'lighntess', 'luminosity'
* @param {String} type
* @default
*/
applyTo: function(canvasEl) {
var context = canvasEl.getContext('2d'),
imageData = context.getImageData(0, 0, canvasEl.width, canvasEl.height),
data = imageData.data,
len = imageData.width * imageData.height * 4,
index = 0,
average;
mode: 'average',
while (index < len) {
average = (data[index] + data[index + 1] + data[index + 2]) / 3;
data[index] = average;
data[index + 1] = average;
data[index + 2] = average;
index += 4;
mainParameter: 'mode',
/**
* Apply the Grayscale operation to a Uint8Array representing the pixels of an image.
*
* @param {Object} options
* @param {ImageData} options.imageData The Uint8Array to be filtered.
*/
applyTo2d: function(options) {
var imageData = options.imageData,
data = imageData.data, i,
len = data.length, value,
mode = this.mode;
for (i = 0; i < len; i += 4) {
if (mode === 'average') {
value = (data[i] + data[i + 1] + data[i + 2]) / 3;
}
else if (mode === 'lightness') {
value = (Math.min(data[i], data[i + 1], data[i + 2]) +
Math.max(data[i], data[i + 1], data[i + 2])) / 2;
}
else if (mode === 'luminosity') {
value = 0.21 * data[i] + 0.72 * data[i + 1] + 0.07 * data[i + 2];
}
data[i] = value;
data[i + 1] = value;
data[i + 2] = value;
}
},
context.putImageData(imageData, 0, 0);
}
/**
* Retrieves the cached shader.
* @param {Object} options
* @param {WebGLRenderingContext} options.context The GL context used for rendering.
* @param {Object} options.programCache A map of compiled shader programs, keyed by filter type.
*/
retrieveShader: function(options) {
var cacheKey = this.type + '_' + this.mode;
var shaderSource = this.fragmentSource[this.mode];
if (!options.programCache.hasOwnProperty(cacheKey)) {
options.programCache[cacheKey] = this.createProgram(options.context, shaderSource);
}
return options.programCache[cacheKey];
},
/**
* Return WebGL uniform locations for this filter's shader.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {WebGLShaderProgram} program This filter's compiled shader program.
*/
getUniformLocations: function(gl, program) {
return {
uMode: gl.getUniformLocation(program, 'uMode'),
};
},
/**
* Send data from this filter to its shader program's uniforms.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {Object} uniformLocations A map of string uniform names to WebGLUniformLocation objects
*/
sendUniformData: function(gl, uniformLocations) {
// default average mode.
var mode = 1;
gl.uniform1i(uniformLocations.uMode, mode);
},
});
/**
@ -58,10 +140,6 @@
* @param {function} [callback] to be invoked after filter creation
* @return {fabric.Image.filters.Grayscale} Instance of fabric.Image.filters.Grayscale
*/
fabric.Image.filters.Grayscale.fromObject = function(object, callback) {
object = object || { };
object.type = 'Grayscale';
return fabric.Image.filters.BaseFilter.fromObject(object, callback);
};
fabric.Image.filters.Grayscale.fromObject = fabric.Image.filters.BaseFilter.fromObject;
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -26,25 +26,69 @@
*/
type: 'Invert',
/**
* Applies filter to canvas element
* @memberOf fabric.Image.filters.Invert.prototype
* @param {Object} canvasEl Canvas element to apply filter to
*/
applyTo: function(canvasEl) {
var context = canvasEl.getContext('2d'),
imageData = context.getImageData(0, 0, canvasEl.width, canvasEl.height),
data = imageData.data,
iLen = data.length, i;
fragmentSource: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform int uInvert;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 color = texture2D(uTexture, vTexCoord);\n' +
'if (uInvert == 1) {\n' +
'gl_FragColor = vec4(1.0 - color.r,1.0 -color.g,1.0 -color.b,color.a);\n' +
'} else {\n' +
'gl_FragColor = color;\n' +
'}\n' +
'}',
for (i = 0; i < iLen; i += 4) {
/**
* Filter invert. if false, does nothing
* @param {Boolean} invert
* @default
*/
invert: true,
mainParameter: 'invert',
/**
* Apply the Invert operation to a Uint8Array representing the pixels of an image.
*
* @param {Object} options
* @param {ImageData} options.imageData The Uint8Array to be filtered.
*/
applyTo2d: function(options) {
if (!this.invert) {
return;
}
var imageData = options.imageData,
data = imageData.data, i,
len = data.length;
for (i = 0; i < len; i += 4) {
data[i] = 255 - data[i];
data[i + 1] = 255 - data[i + 1];
data[i + 2] = 255 - data[i + 2];
}
},
context.putImageData(imageData, 0, 0);
}
/**
* Return WebGL uniform locations for this filter's shader.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {WebGLShaderProgram} program This filter's compiled shader program.
*/
getUniformLocations: function(gl, program) {
return {
uInvert: gl.getUniformLocation(program, 'uInvert'),
};
},
/**
* Send data from this filter to its shader program's uniforms.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {Object} uniformLocations A map of string uniform names to WebGLUniformLocation objects
*/
sendUniformData: function(gl, uniformLocations) {
gl.uniform1i(uniformLocations.uInvert, this.invert);
},
});
/**
@ -54,10 +98,7 @@
* @param {function} [callback] to be invoked after filter creation
* @return {fabric.Image.filters.Invert} Instance of fabric.Image.filters.Invert
*/
fabric.Image.filters.Invert.fromObject = function(object, callback) {
object = object || { };
object.type = 'Invert';
return fabric.Image.filters.BaseFilter.fromObject(object, callback);
};
fabric.Image.filters.Invert.fromObject = fabric.Image.filters.BaseFilter.fromObject;
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -1,107 +0,0 @@
(function(global) {
'use strict';
var fabric = global.fabric || (global.fabric = { }),
extend = fabric.util.object.extend,
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
/**
* Mask filter class
* See http://resources.aleph-1.com/mask/
* @class fabric.Image.filters.Mask
* @memberOf fabric.Image.filters
* @extends fabric.Image.filters.BaseFilter
* @see {@link fabric.Image.filters.Mask#initialize} for constructor definition
*/
filters.Mask = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Mask.prototype */ {
/**
* Filter type
* @param {String} type
* @default
*/
type: 'Mask',
/**
* Constructor
* @memberOf fabric.Image.filters.Mask.prototype
* @param {Object} [options] Options object
* @param {fabric.Image} [options.mask] Mask image object
* @param {Number} [options.channel=0] Rgb channel (0, 1, 2 or 3)
*/
initialize: function(options) {
options = options || { };
this.mask = options.mask;
this.channel = [0, 1, 2, 3].indexOf(options.channel) > -1 ? options.channel : 0;
},
/**
* Applies filter to canvas element
* @param {Object} canvasEl Canvas element to apply filter to
*/
applyTo: function(canvasEl) {
if (!this.mask) {
return;
}
var context = canvasEl.getContext('2d'),
imageData = context.getImageData(0, 0, canvasEl.width, canvasEl.height),
data = imageData.data,
maskEl = this.mask.getElement(),
maskCanvasEl = fabric.util.createCanvasElement(),
channel = this.channel,
i,
iLen = imageData.width * imageData.height * 4;
maskCanvasEl.width = canvasEl.width;
maskCanvasEl.height = canvasEl.height;
maskCanvasEl.getContext('2d').drawImage(maskEl, 0, 0, canvasEl.width, canvasEl.height);
var maskImageData = maskCanvasEl.getContext('2d').getImageData(0, 0, canvasEl.width, canvasEl.height),
maskData = maskImageData.data;
for (i = 0; i < iLen; i += 4) {
data[i + 3] = maskData[i + channel];
}
context.putImageData(imageData, 0, 0);
},
/**
* Returns object representation of an instance
* @return {Object} Object representation of an instance
*/
toObject: function() {
return extend(this.callSuper('toObject'), {
mask: this.mask.toObject(),
channel: this.channel
});
}
});
/**
* Returns filter instance from an object representation
* @static
* @param {Object} object Object to create an instance from
* @param {Function} [callback] Callback to invoke when a mask filter instance is created
*/
fabric.Image.filters.Mask.fromObject = function(object, callback) {
fabric.util.loadImage(object.mask.src, function(img) {
object.mask = new fabric.Image(img, object.mask);
return fabric.Image.filters.BaseFilter.fromObject(object, callback);
});
};
/**
* Indicates that instances of this type are async
* @static
* @type Boolean
* @default
*/
fabric.Image.filters.Mask.async = true;
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -1,92 +0,0 @@
(function(global) {
'use strict';
var fabric = global.fabric || (global.fabric = { }),
extend = fabric.util.object.extend,
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
/**
* Multiply filter class
* Adapted from <a href="http://www.laurenscorijn.com/articles/colormath-basics">http://www.laurenscorijn.com/articles/colormath-basics</a>
* @class fabric.Image.filters.Multiply
* @memberOf fabric.Image.filters
* @extends fabric.Image.filters.BaseFilter
* @example <caption>Multiply filter with hex color</caption>
* var filter = new fabric.Image.filters.Multiply({
* color: '#F0F'
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
* @example <caption>Multiply filter with rgb color</caption>
* var filter = new fabric.Image.filters.Multiply({
* color: 'rgb(53, 21, 176)'
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
*/
filters.Multiply = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Multiply.prototype */ {
/**
* Filter type
* @param {String} type
* @default
*/
type: 'Multiply',
/**
* Constructor
* @memberOf fabric.Image.filters.Multiply.prototype
* @param {Object} [options] Options object
* @param {String} [options.color=#000000] Color to multiply the image pixels with
*/
initialize: function(options) {
options = options || { };
this.color = options.color || '#000000';
},
/**
* Applies filter to canvas element
* @param {Object} canvasEl Canvas element to apply filter to
*/
applyTo: function(canvasEl) {
var context = canvasEl.getContext('2d'),
imageData = context.getImageData(0, 0, canvasEl.width, canvasEl.height),
data = imageData.data,
iLen = data.length, i,
source;
source = new fabric.Color(this.color).getSource();
for (i = 0; i < iLen; i += 4) {
data[i] *= source[0] / 255;
data[i + 1] *= source[1] / 255;
data[i + 2] *= source[2] / 255;
}
context.putImageData(imageData, 0, 0);
},
/**
* Returns object representation of an instance
* @return {Object} Object representation of an instance
*/
toObject: function() {
return extend(this.callSuper('toObject'), {
color: this.color
});
}
});
/**
* Returns filter instance from an object representation
* @static
* @param {Object} object Object to create an instance from
* @param {Function} [callback] to be invoked after filter creation
* @return {fabric.Image.filters.Multiply} Instance of fabric.Image.filters.Multiply
*/
fabric.Image.filters.Multiply.fromObject = fabric.Image.filters.BaseFilter.fromObject;
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -19,7 +19,8 @@
* noise: 700
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
* object.applyFilters();
* canvas.renderAll();
*/
filters.Noise = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Noise.prototype */ {
@ -31,24 +32,49 @@
type: 'Noise',
/**
* Constructor
* @memberOf fabric.Image.filters.Noise.prototype
* @param {Object} [options] Options object
* @param {Number} [options.noise=0] Noise value
* Fragment source for the noise program
*/
initialize: function(options) {
options = options || { };
this.noise = options.noise || 0;
},
fragmentSource: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform float uHeight;\n' +
'uniform float uNoise;\n' +
'uniform float uSeed;\n' +
'varying vec2 vTexCoord;\n' +
'float rand(vec2 co, float seed, float vScale) {\n' +
'return fract(sin(dot(co.xy * vScale ,vec2(12.9898 , 78.233))) * 43758.5453 * (seed + 0.01) / 2.0);\n' +
'}\n' +
'void main() {\n' +
'vec4 color = texture2D(uTexture, vTexCoord);\n' +
'color.rgb += (0.5 - rand(vTexCoord, uSeed, uHeight / 10.0)) * uNoise;\n' +
'gl_FragColor = color;\n' +
'}',
/**
* Applies filter to canvas element
* @param {Object} canvasEl Canvas element to apply filter to
* Describe the property that is the filter parameter
* @param {String} m
* @default
*/
applyTo: function(canvasEl) {
var context = canvasEl.getContext('2d'),
imageData = context.getImageData(0, 0, canvasEl.width, canvasEl.height),
data = imageData.data,
mainParameter: 'noise',
/**
* Noise value, from
* @param {Number} noise
* @default
*/
noise: 0,
/**
* Apply the Brightness operation to a Uint8ClampedArray representing the pixels of an image.
*
* @param {Object} options
* @param {ImageData} options.imageData The Uint8ClampedArray to be filtered.
*/
applyTo2d: function(options) {
if (this.noise === 0) {
return;
}
var imageData = options.imageData,
data = imageData.data, i, len = data.length,
noise = this.noise, rand;
for (var i = 0, len = data.length; i < len; i += 4) {
@ -59,8 +85,30 @@
data[i + 1] += rand;
data[i + 2] += rand;
}
},
context.putImageData(imageData, 0, 0);
/**
* Return WebGL uniform locations for this filter's shader.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {WebGLShaderProgram} program This filter's compiled shader program.
*/
getUniformLocations: function(gl, program) {
return {
uNoise: gl.getUniformLocation(program, 'uNoise'),
uSeed: gl.getUniformLocation(program, 'uSeed'),
};
},
/**
* Send data from this filter to its shader program's uniforms.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {Object} uniformLocations A map of string uniform names to WebGLUniformLocation objects
*/
sendUniformData: function(gl, uniformLocations) {
gl.uniform1f(uniformLocations.uNoise, this.noise / 255);
gl.uniform1f(uniformLocations.uSeed, Math.random());
},
/**

View file

@ -3,7 +3,6 @@
'use strict';
var fabric = global.fabric || (global.fabric = { }),
extend = fabric.util.object.extend,
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
@ -19,7 +18,7 @@
* blocksize: 8
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
* object.applyFilters();
*/
filters.Pixelate = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Pixelate.prototype */ {
@ -30,28 +29,47 @@
*/
type: 'Pixelate',
/**
* Constructor
* @memberOf fabric.Image.filters.Pixelate.prototype
* @param {Object} [options] Options object
* @param {Number} [options.blocksize=4] Blocksize for pixelate
*/
initialize: function(options) {
options = options || { };
this.blocksize = options.blocksize || 4;
},
blocksize: 4,
mainParameter: 'blocksize',
/**
* Applies filter to canvas element
* @param {Object} canvasEl Canvas element to apply filter to
* Fragment source for the Pixelate program
*/
applyTo: function(canvasEl) {
var context = canvasEl.getContext('2d'),
imageData = context.getImageData(0, 0, canvasEl.width, canvasEl.height),
fragmentSource: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform float uBlocksize;\n' +
'uniform float uStepW;\n' +
'uniform float uStepH;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'float blockW = uBlocksize * uStepW;\n' +
'float blockH = uBlocksize * uStepW;\n' +
'int posX = int(vTexCoord.x / blockW);\n' +
'int posY = int(vTexCoord.y / blockH);\n' +
'float fposX = float(posX);\n' +
'float fposY = float(posY);\n' +
'vec2 squareCoords = vec2(fposX * blockW, fposY * blockH);\n' +
'vec4 color = texture2D(uTexture, squareCoords);\n' +
'gl_FragColor = color;\n' +
'}',
/**
* Apply the Pixelate operation to a Uint8ClampedArray representing the pixels of an image.
*
* @param {Object} options
* @param {ImageData} options.imageData The Uint8ClampedArray to be filtered.
*/
applyTo2d: function(options) {
if (this.blocksize === 1) {
return;
}
var imageData = options.imageData,
data = imageData.data,
iLen = imageData.height,
jLen = imageData.width,
index, i, j, r, g, b, a;
index, i, j, r, g, b, a,
_i, _j, _iLen, _jLen;
for (i = 0; i < iLen; i += this.blocksize) {
for (j = 0; j < jLen; j += this.blocksize) {
@ -63,18 +81,10 @@
b = data[index + 2];
a = data[index + 3];
/*
blocksize: 4
[1,x,x,x,1]
[x,x,x,x,1]
[x,x,x,x,1]
[x,x,x,x,1]
[1,1,1,1,1]
*/
for (var _i = i, _ilen = i + this.blocksize; _i < _ilen; _i++) {
for (var _j = j, _jlen = j + this.blocksize; _j < _jlen; _j++) {
_iLen = Math.min(i + this.blocksize, iLen);
_jLen = Math.min(j + this.blocksize, jLen);
for (_i = i; _i < _iLen; _i++) {
for (_j = j; _j < _jLen; _j++) {
index = (_i * 4) * jLen + (_j * 4);
data[index] = r;
data[index + 1] = g;
@ -84,19 +94,31 @@
}
}
}
context.putImageData(imageData, 0, 0);
},
/**
* Returns object representation of an instance
* @return {Object} Object representation of an instance
* Return WebGL uniform locations for this filter's shader.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {WebGLShaderProgram} program This filter's compiled shader program.
*/
toObject: function() {
return extend(this.callSuper('toObject'), {
blocksize: this.blocksize
});
}
getUniformLocations: function(gl, program) {
return {
uBlocksize: gl.getUniformLocation(program, 'uBlocksize'),
uWidth: gl.getUniformLocation(program, 'uWidth'),
uHeight: gl.getUniformLocation(program, 'uHeight'),
};
},
/**
* Send data from this filter to its shader program's uniforms.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {Object} uniformLocations A map of string uniform names to WebGLUniformLocation objects
*/
sendUniformData: function(gl, uniformLocations) {
gl.uniform1f(uniformLocations.uBlocksize, this.blocksize);
},
});
/**

View file

@ -0,0 +1,168 @@
(function(global) {
'use strict';
var fabric = global.fabric || (global.fabric = { }),
extend = fabric.util.object.extend,
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
/**
* Remove white filter class
* @class fabric.Image.filters.RemoveColor
* @memberOf fabric.Image.filters
* @extends fabric.Image.filters.BaseFilter
* @see {@link fabric.Image.filters.RemoveWhite#initialize} for constructor definition
* @see {@link http://fabricjs.com/image-filters|ImageFilters demo}
* @example
* var filter = new fabric.Image.filters.RemoveColor({
* threshold: 40,
* distance: 140
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
*/
filters.RemoveColor = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.RemoveColor.prototype */ {
/**
* Filter type
* @param {String} type
* @default
*/
type: 'RemoveColor',
color: '#FFFFFF',
/**
* Fragment source for the brightness program
*/
fragmentSource: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform vec4 uLow;\n' +
'uniform vec4 uHigh;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'gl_FragColor = texture2D(uTexture, vTexCoord);\n' +
'if(all(greaterThan(gl_FragColor.rgb,uLow.rgb)) && all(greaterThan(uHigh.rgb,gl_FragColor.rgb))) {\n' +
'gl_FragColor.a = 0.0;\n' +
'}\n' +
'}',
/**
* distance to actual color, as value up or down from each r,g,b
* between 0 and 1
**/
distance: 0.02,
/**
* For color to remove inside distance, use alpha channel for a smoother deletion
* NOT IMPLEMENTED YET
**/
useAlpha: false,
/**
* Constructor
* @memberOf fabric.Image.filters.RemoveWhite.prototype
* @param {Object} [options] Options object
* @param {Number} [options.color=#RRGGBB] Threshold value
* @param {Number} [options.distance=10] Distance value
*/
/**
* Applies filter to canvas element
* @param {Object} canvasEl Canvas element to apply filter to
*/
applyTo2d: function(options) {
var imageData = options.imageData,
data = imageData.data, i,
distance = this.distance * 255,
r, g, b,
source = new fabric.Color(this.color).getSource(),
lowC = [
source[0] - distance,
source[1] - distance,
source[2] - distance,
],
highC = [
source[0] + distance,
source[1] + distance,
source[2] + distance,
];
for (i = 0; i < data.length; i += 4) {
r = data[i];
g = data[i + 1];
b = data[i + 2];
if (r > lowC[0] &&
g > lowC[1] &&
b > lowC[2] &&
r < highC[0] &&
g < highC[1] &&
b < highC[2]) {
data[i + 3] = 0;
}
}
},
/**
* Return WebGL uniform locations for this filter's shader.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {WebGLShaderProgram} program This filter's compiled shader program.
*/
getUniformLocations: function(gl, program) {
return {
uLow: gl.getUniformLocation(program, 'uLow'),
uHigh: gl.getUniformLocation(program, 'uHigh'),
};
},
/**
* Send data from this filter to its shader program's uniforms.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {Object} uniformLocations A map of string uniform names to WebGLUniformLocation objects
*/
sendUniformData: function(gl, uniformLocations) {
var source = new fabric.Color(this.color).getSource(),
distance = parseFloat(this.distance),
lowC = [
0 + source[0] / 255 - distance,
0 + source[1] / 255 - distance,
0 + source[2] / 255 - distance,
1
],
highC = [
source[0] / 255 + distance,
source[1] / 255 + distance,
source[2] / 255 + distance,
1
];
gl.uniform4fv(uniformLocations.uLow, lowC);
gl.uniform4fv(uniformLocations.uHigh, highC);
},
/**
* Returns object representation of an instance
* @return {Object} Object representation of an instance
*/
toObject: function() {
return extend(this.callSuper('toObject'), {
color: this.color,
distance: this.distance
});
}
});
/**
* Returns filter instance from an object representation
* @static
* @param {Object} object Object to create an instance from
* @param {Function} [callback] to be invoked after filter creation
* @return {fabric.Image.filters.RemoveColor} Instance of fabric.Image.filters.RemoveWhite
*/
fabric.Image.filters.RemoveColor.fromObject = fabric.Image.filters.BaseFilter.fromObject;
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -1,101 +0,0 @@
(function(global) {
'use strict';
var fabric = global.fabric || (global.fabric = { }),
extend = fabric.util.object.extend,
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
/**
* Remove white filter class
* @class fabric.Image.filters.RemoveWhite
* @memberOf fabric.Image.filters
* @extends fabric.Image.filters.BaseFilter
* @see {@link fabric.Image.filters.RemoveWhite#initialize} for constructor definition
* @see {@link http://fabricjs.com/image-filters|ImageFilters demo}
* @example
* var filter = new fabric.Image.filters.RemoveWhite({
* threshold: 40,
* distance: 140
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
*/
filters.RemoveWhite = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.RemoveWhite.prototype */ {
/**
* Filter type
* @param {String} type
* @default
*/
type: 'RemoveWhite',
/**
* Constructor
* @memberOf fabric.Image.filters.RemoveWhite.prototype
* @param {Object} [options] Options object
* @param {Number} [options.threshold=30] Threshold value
* @param {Number} [options.distance=20] Distance value
*/
initialize: function(options) {
options = options || { };
this.threshold = options.threshold || 30;
this.distance = options.distance || 20;
},
/**
* Applies filter to canvas element
* @param {Object} canvasEl Canvas element to apply filter to
*/
applyTo: function(canvasEl) {
var context = canvasEl.getContext('2d'),
imageData = context.getImageData(0, 0, canvasEl.width, canvasEl.height),
data = imageData.data,
threshold = this.threshold,
distance = this.distance,
limit = 255 - threshold,
abs = Math.abs,
r, g, b;
for (var i = 0, len = data.length; i < len; i += 4) {
r = data[i];
g = data[i + 1];
b = data[i + 2];
if (r > limit &&
g > limit &&
b > limit &&
abs(r - g) < distance &&
abs(r - b) < distance &&
abs(g - b) < distance
) {
data[i + 3] = 0;
}
}
context.putImageData(imageData, 0, 0);
},
/**
* Returns object representation of an instance
* @return {Object} Object representation of an instance
*/
toObject: function() {
return extend(this.callSuper('toObject'), {
threshold: this.threshold,
distance: this.distance
});
}
});
/**
* Returns filter instance from an object representation
* @static
* @param {Object} object Object to create an instance from
* @param {Function} [callback] to be invoked after filter creation
* @return {fabric.Image.filters.RemoveWhite} Instance of fabric.Image.filters.RemoveWhite
*/
fabric.Image.filters.RemoveWhite.fromObject = fabric.Image.filters.BaseFilter.fromObject;
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -3,7 +3,7 @@
'use strict';
var fabric = global.fabric || (global.fabric = { }), pow = Math.pow, floor = Math.floor,
sqrt = Math.sqrt, abs = Math.abs, max = Math.max, round = Math.round, sin = Math.sin,
sqrt = Math.sqrt, abs = Math.abs, round = Math.round, sin = Math.sin,
ceil = Math.ceil,
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
@ -56,6 +56,60 @@
*/
lanczosLobes: 3,
// vertexSource: 'attribute vec2 aPosition;\n' +
// 'attribute vec2 aTexCoord;\n' +
// 'uniform float uStepW;\n' +
// 'uniform float uStepH;\n' +
// 'varying vec2 centerTextureCoordinate;\n' +
// 'varying vec2 oneStepLeftTextureCoordinate;\n' +
// 'varying vec2 twoStepsLeftTextureCoordinate;\n' +
// 'varying vec2 threeStepsLeftTextureCoordinate;\n' +
// 'varying vec2 fourStepsLeftTextureCoordinate;\n' +
// 'varying vec2 oneStepRightTextureCoordinate;\n' +
// 'varying vec2 twoStepsRightTextureCoordinate;\n' +
// 'varying vec2 threeStepsRightTextureCoordinate;\n' +
// 'varying vec2 fourStepsRightTextureCoordinate;\n' +
// 'void main() {\n' +
// 'vec2 firstOffset = vec2(uStepW, uStepH);\n' +
// 'vec2 secondOffset = vec2(2.0 * uStepW, 2.0 * uStepH);\n' +
// 'vec2 thirdOffset = vec2(3.0 * uStepW, 3.0 * uStepH);\n' +
// 'vec2 fourthOffset = vec2(4.0 * uStepW, 4.0 * uStepH);\n' +
// 'centerTextureCoordinate = aTexCoord;\n' +
// 'oneStepLeftTextureCoordinate = aTexCoord - firstOffset;\n' +
// 'twoStepsLeftTextureCoordinate = aTexCoord - secondOffset;\n' +
// 'threeStepsLeftTextureCoordinate = aTexCoord - thirdOffset;\n' +
// 'fourStepsLeftTextureCoordinate = aTexCoord - fourthOffset;\n' +
// 'oneStepRightTextureCoordinate = aTexCoord + firstOffset;\n' +
// 'twoStepsRightTextureCoordinate = aTexCoord + secondOffset;\n' +
// 'threeStepsRightTextureCoordinate = aTexCoord + thirdOffset;\n' +
// 'fourStepsRightTextureCoordinate = aTexCoord + fourthOffset;\n' +
// 'gl_Position = vec4(aPosition * 2.0 - 1.0, 0.0, 1.0);\n' +
// '}',
//
// fragmentSource: 'precision highp float;\n' +
// 'varying vec2 centerTextureCoordinate;\n' +
// 'varying vec2 oneStepLeftTextureCoordinate;\n' +
// 'varying vec2 twoStepsLeftTextureCoordinate;\n' +
// 'varying vec2 threeStepsLeftTextureCoordinate;\n' +
// 'varying vec2 fourStepsLeftTextureCoordinate;\n' +
// 'varying vec2 oneStepRightTextureCoordinate;\n' +
// 'varying vec2 twoStepsRightTextureCoordinate;\n' +
// 'varying vec2 threeStepsRightTextureCoordinate;\n' +
// 'varying vec2 fourStepsRightTextureCoordinate;\n' +
// 'uniform sampler2d uTexture;\n' +
// 'void main() {\n' +
// 'vec4 color = texture2D(uTexture, centerTextureCoordinate) * 0.38026;\n' +
// 'color += texture2D(uTexture, oneStepLeftTextureCoordinate) * 0.27667;\n' +
// 'color += texture2D(uTexture, oneStepRightTextureCoordinate) * 0.27667;\n' +
// 'color += texture2D(uTexture, twoStepsLeftTextureCoordinate) * 0.08074;\n' +
// 'color += texture2D(uTexture, twoStepsRightTextureCoordinate) * 0.08074;\n' +
// 'color += texture2D(uTexture, threeStepsLeftTextureCoordinate) * -0.02612;\n' +
// 'color += texture2D(uTexture, threeStepsRightTextureCoordinate) * -0.02612;\n' +
// 'color += texture2D(uTexture, fourStepsLeftTextureCoordinate) * -0.02143;\n' +
// 'color += texture2D(uTexture, fourStepsRightTextureCoordinate) * -0.02143;\n' +
// 'gl_FragColor = color;\n' +
// '}',
/**
* Applies filter to canvas element
* @memberOf fabric.Image.filters.Resize.prototype
@ -63,7 +117,10 @@
* @param {Number} scaleX
* @param {Number} scaleY
*/
applyTo: function(canvasEl, scaleX, scaleY) {
applyTo2d: function(options) {
var imageData = options.imageData,
scaleX = options.scaleX || this.scaleX,
scaleY = options.scaleY || this.scaleY;
if (scaleX === 1 && scaleY === 1) {
return;
}
@ -71,25 +128,23 @@
this.rcpScaleX = 1 / scaleX;
this.rcpScaleY = 1 / scaleY;
var oW = canvasEl.width, oH = canvasEl.height,
var oW = imageData.width, oH = imageData.height,
dW = round(oW * scaleX), dH = round(oH * scaleY),
imageData;
newData;
if (this.resizeType === 'sliceHack') {
imageData = this.sliceByTwo(canvasEl, oW, oH, dW, dH);
newData = this.sliceByTwo(options, oW, oH, dW, dH);
}
if (this.resizeType === 'hermite') {
imageData = this.hermiteFastResize(canvasEl, oW, oH, dW, dH);
else if (this.resizeType === 'hermite') {
newData = this.hermiteFastResize(options, oW, oH, dW, dH);
}
if (this.resizeType === 'bilinear') {
imageData = this.bilinearFiltering(canvasEl, oW, oH, dW, dH);
else if (this.resizeType === 'bilinear') {
newData = this.bilinearFiltering(options, oW, oH, dW, dH);
}
if (this.resizeType === 'lanczos') {
imageData = this.lanczosResize(canvasEl, oW, oH, dW, dH);
else if (this.resizeType === 'lanczos') {
newData = this.lanczosResize(options, oW, oH, dW, dH);
}
canvasEl.width = dW;
canvasEl.height = dH;
canvasEl.getContext('2d').putImageData(imageData, 0, 0);
options.imageData = newData;
},
/**
@ -101,53 +156,49 @@
* @param {Number} dH Destination Height
* @returns {ImageData}
*/
sliceByTwo: function(canvasEl, oW, oH, dW, dH) {
var context = canvasEl.getContext('2d'), imageData,
multW = 0.5, multH = 0.5, signW = 1, signH = 1,
doneW = false, doneH = false, stepW = oW, stepH = oH,
tmpCanvas = fabric.util.createCanvasElement(),
tmpCtx = tmpCanvas.getContext('2d');
sliceByTwo: function(options, oW, oH, dW, dH) {
var imageData = options.imageData,
mult = 0.5, doneW = false, doneH = false, stepW = oW * mult,
stepH = oH * mult, resources = fabric.filterBackend.resources,
tmpCanvas, ctx, sX = 0, sY = 0, dX = oW, dY = 0;
if (!resources.sliceByTwo) {
resources.sliceByTwo = document.createElement('canvas');
}
tmpCanvas = resources.sliceByTwo;
if (tmpCanvas.width < oW * 1.5 || tmpCanvas.height < oH) {
tmpCanvas.width = oW * 1.5;
tmpCanvas.height = oH;
}
ctx = tmpCanvas.getContext('2d');
ctx.clearRect(0, 0, oW * 1.5, oH);
ctx.putImageData(imageData, 0, 0);
dW = floor(dW);
dH = floor(dH);
tmpCanvas.width = max(dW, oW);
tmpCanvas.height = max(dH, oH);
if (dW > oW) {
multW = 2;
signW = -1;
}
if (dH > oH) {
multH = 2;
signH = -1;
}
imageData = context.getImageData(0, 0, oW, oH);
canvasEl.width = max(dW, oW);
canvasEl.height = max(dH, oH);
context.putImageData(imageData, 0, 0);
while (!doneW || !doneH) {
oW = stepW;
oH = stepH;
if (dW * signW < floor(stepW * multW * signW)) {
stepW = floor(stepW * multW);
if (dW < floor(stepW * mult)) {
stepW = floor(stepW * mult);
}
else {
stepW = dW;
doneW = true;
}
if (dH * signH < floor(stepH * multH * signH)) {
stepH = floor(stepH * multH);
if (dH < floor(stepH * mult)) {
stepH = floor(stepH * mult);
}
else {
stepH = dH;
doneH = true;
}
imageData = context.getImageData(0, 0, oW, oH);
tmpCtx.putImageData(imageData, 0, 0);
context.clearRect(0, 0, stepW, stepH);
context.drawImage(tmpCanvas, 0, 0, oW, oH, 0, 0, stepW, stepH);
ctx.drawImage(tmpCanvas, sX, sY, oW, oH, dX, dY, stepW, stepH);
sX = dX;
sY = dY;
dY += stepH;
}
return context.getImageData(0, 0, dW, dH);
return ctx.getImageData(sX, sY, dW, dH);
},
/**
@ -159,7 +210,7 @@
* @param {Number} dH Destination Height
* @returns {ImageData}
*/
lanczosResize: function(canvasEl, oW, oH, dW, dH) {
lanczosResize: function(options, oW, oH, dW, dH) {
function lanczosCreate(lobes) {
return function(x) {
@ -226,10 +277,9 @@
}
}
var context = canvasEl.getContext('2d'),
srcImg = context.getImageData(0, 0, oW, oH),
destImg = context.getImageData(0, 0, dW, dH),
srcData = srcImg.data, destData = destImg.data,
var srcData = options.imageData.data,
destImg = options.ctx.creteImageData(dW, dH),
destData = destImg.data,
lanczos = lanczosCreate(this.lanczosLobes),
ratioX = this.rcpScaleX, ratioY = this.rcpScaleY,
rcpRatioX = 2 / this.rcpScaleX, rcpRatioY = 2 / this.rcpScaleY,
@ -249,12 +299,12 @@
* @param {Number} dH Destination Height
* @returns {ImageData}
*/
bilinearFiltering: function(canvasEl, oW, oH, dW, dH) {
bilinearFiltering: function(options, oW, oH, dW, dH) {
var a, b, c, d, x, y, i, j, xDiff, yDiff, chnl,
color, offset = 0, origPix, ratioX = this.rcpScaleX,
ratioY = this.rcpScaleY, context = canvasEl.getContext('2d'),
w4 = 4 * (oW - 1), img = context.getImageData(0, 0, oW, oH),
pixels = img.data, destImage = context.getImageData(0, 0, dW, dH),
ratioY = this.rcpScaleY,
w4 = 4 * (oW - 1), img = options.imageData,
pixels = img.data, destImage = options.ctx.createImageData(dW, dH),
destPixels = destImage.data;
for (i = 0; i < dH; i++) {
for (j = 0; j < dW; j++) {
@ -287,13 +337,12 @@
* @param {Number} dH Destination Height
* @returns {ImageData}
*/
hermiteFastResize: function(canvasEl, oW, oH, dW, dH) {
hermiteFastResize: function(options, oW, oH, dW, dH) {
var ratioW = this.rcpScaleX, ratioH = this.rcpScaleY,
ratioWHalf = ceil(ratioW / 2),
ratioHHalf = ceil(ratioH / 2),
context = canvasEl.getContext('2d'),
img = context.getImageData(0, 0, oW, oH), data = img.data,
img2 = context.getImageData(0, 0, dW, dH), data2 = img2.data;
img = options.imageData, data = img.data,
img2 = options.ctx.createimageData(dW, dH), data2 = img2.data;
for (var j = 0; j < dH; j++) {
for (var i = 0; i < dW; i++) {
var x2 = (i + j * dW) * 4, weight = 0, weights = 0, weightsAlpha = 0,

View file

@ -3,73 +3,100 @@
'use strict';
var fabric = global.fabric || (global.fabric = { }),
extend = fabric.util.object.extend,
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
/**
* Saturate filter class
* @class fabric.Image.filters.Saturate
* @class fabric.Image.filters.Saturation
* @memberOf fabric.Image.filters
* @extends fabric.Image.filters.BaseFilter
* @see {@link fabric.Image.filters.Saturate#initialize} for constructor definition
* @see {@link fabric.Image.filters.Saturation#initialize} for constructor definition
* @see {@link http://fabricjs.com/image-filters|ImageFilters demo}
* @example
* var filter = new fabric.Image.filters.Saturate({
* saturate: 100
* var filter = new fabric.Image.filters.Saturation({
* saturation: 100
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
* object.applyFilters();
*/
filters.Saturate = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Saturate.prototype */ {
filters.Saturation = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Saturation.prototype */ {
/**
* Filter type
* @param {String} type
* @default
*/
type: 'Saturate',
type: 'Saturation',
fragmentSource: 'precision highp float;\n' +
'uniform sampler2D uTexture;\n' +
'uniform float uSaturation;\n' +
'varying vec2 vTexCoord;\n' +
'void main() {\n' +
'vec4 color = texture2D(uTexture, vTexCoord);\n' +
'float rgMax = max(color.r, color.g);\n' +
'float rgbMax = max(rgMax, color.b);\n' +
'color.r += rgbMax != color.r ? (rgbMax - color.r) * uSaturation : 0.00;\n' +
'color.g += rgbMax != color.g ? (rgbMax - color.g) * uSaturation : 0.00;\n' +
'color.b += rgbMax != color.b ? (rgbMax - color.b) * uSaturation : 0.00;\n' +
'gl_FragColor = color;\n' +
'}',
saturation: 0,
mainParameter: 'saturation',
/**
* Constructor
* @memberOf fabric.Image.filters.Saturate.prototype
* @param {Object} [options] Options object
* @param {Number} [options.saturate=0] Value to saturate the image (-100...100)
* @param {Number} [options.saturate=0] Value to saturate the image (-1...1)
*/
initialize: function(options) {
options = options || { };
this.saturate = options.saturate || 0;
},
/**
* Applies filter to canvas element
* @param {Object} canvasEl Canvas element to apply filter to
* Apply the Saturation operation to a Uint8ClampedArray representing the pixels of an image.
*
* @param {Object} options
* @param {ImageData} options.imageData The Uint8ClampedArray to be filtered.
*/
applyTo: function(canvasEl) {
var context = canvasEl.getContext('2d'),
imageData = context.getImageData(0, 0, canvasEl.width, canvasEl.height),
data = imageData.data,
max, adjust = -this.saturate * 0.01;
applyTo2d: function(options) {
if (this.saturation === 0) {
return;
}
var imageData = options.imageData,
data = imageData.data, len = data.length,
adjust = -this.saturation, i, max;
for (var i = 0, len = data.length; i < len; i += 4) {
for (i = 0; i < len; i += 4) {
max = Math.max(data[i], data[i + 1], data[i + 2]);
data[i] += max !== data[i] ? (max - data[i]) * adjust : 0;
data[i + 1] += max !== data[i + 1] ? (max - data[i + 1]) * adjust : 0;
data[i + 2] += max !== data[i + 2] ? (max - data[i + 2]) * adjust : 0;
}
context.putImageData(imageData, 0, 0);
},
/**
* Returns object representation of an instance
* @return {Object} Object representation of an instance
* Return WebGL uniform locations for this filter's shader.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {WebGLShaderProgram} program This filter's compiled shader program.
*/
toObject: function() {
return extend(this.callSuper('toObject'), {
saturate: this.saturate
});
}
getUniformLocations: function(gl, program) {
return {
uSaturation: gl.getUniformLocation(program, 'uSaturation'),
};
},
/**
* Send data from this filter to its shader program's uniforms.
*
* @param {WebGLRenderingContext} gl The GL canvas context used to compile this filter's shader.
* @param {Object} uniformLocations A map of string uniform names to WebGLUniformLocation objects
*/
sendUniformData: function(gl, uniformLocations) {
gl.uniform1f(uniformLocations.uSaturation, -this.saturation);
},
});
/**
@ -77,8 +104,8 @@
* @static
* @param {Object} object Object to create an instance from
* @param {Function} [callback] to be invoked after filter creation
* @return {fabric.Image.filters.Saturate} Instance of fabric.Image.filters.Saturate
* @return {fabric.Image.filters.Saturation} Instance of fabric.Image.filters.Saturate
*/
fabric.Image.filters.Saturate.fromObject = fabric.Image.filters.BaseFilter.fromObject;
fabric.Image.filters.Saturation.fromObject = fabric.Image.filters.BaseFilter.fromObject;
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -1,67 +0,0 @@
(function(global) {
'use strict';
var fabric = global.fabric || (global.fabric = { }),
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
/**
* Sepia2 filter class
* @class fabric.Image.filters.Sepia2
* @memberOf fabric.Image.filters
* @extends fabric.Image.filters.BaseFilter
* @see {@link http://fabricjs.com/image-filters|ImageFilters demo}
* @example
* var filter = new fabric.Image.filters.Sepia2();
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
*/
filters.Sepia2 = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Sepia2.prototype */ {
/**
* Filter type
* @param {String} type
* @default
*/
type: 'Sepia2',
/**
* Applies filter to canvas element
* @memberOf fabric.Image.filters.Sepia.prototype
* @param {Object} canvasEl Canvas element to apply filter to
*/
applyTo: function(canvasEl) {
var context = canvasEl.getContext('2d'),
imageData = context.getImageData(0, 0, canvasEl.width, canvasEl.height),
data = imageData.data,
iLen = data.length, i, r, g, b;
for (i = 0; i < iLen; i += 4) {
r = data[i];
g = data[i + 1];
b = data[i + 2];
data[i] = (r * 0.393 + g * 0.769 + b * 0.189 ) / 1.351;
data[i + 1] = (r * 0.349 + g * 0.686 + b * 0.168 ) / 1.203;
data[i + 2] = (r * 0.272 + g * 0.534 + b * 0.131 ) / 2.140;
}
context.putImageData(imageData, 0, 0);
}
});
/**
* Returns filter instance from an object representation
* @static
* @param {Object} object Object to create an instance from
* @param {Function} [callback] to be invoked after filter creation
* @return {fabric.Image.filters.Sepia2} Instance of fabric.Image.filters.Sepia2
*/
fabric.Image.filters.Sepia2.fromObject = function(object, callback) {
object = object || { };
object.type = 'Sepia2';
return new fabric.Image.filters.BaseFilter.fromObject(object, callback);
};
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -1,64 +0,0 @@
(function(global) {
'use strict';
var fabric = global.fabric || (global.fabric = { }),
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
/**
* Sepia filter class
* @class fabric.Image.filters.Sepia
* @memberOf fabric.Image.filters
* @extends fabric.Image.filters.BaseFilter
* @see {@link http://fabricjs.com/image-filters|ImageFilters demo}
* @example
* var filter = new fabric.Image.filters.Sepia();
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
*/
filters.Sepia = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Sepia.prototype */ {
/**
* Filter type
* @param {String} type
* @default
*/
type: 'Sepia',
/**
* Applies filter to canvas element
* @memberOf fabric.Image.filters.Sepia.prototype
* @param {Object} canvasEl Canvas element to apply filter to
*/
applyTo: function(canvasEl) {
var context = canvasEl.getContext('2d'),
imageData = context.getImageData(0, 0, canvasEl.width, canvasEl.height),
data = imageData.data,
iLen = data.length, i, avg;
for (i = 0; i < iLen; i += 4) {
avg = 0.3 * data[i] + 0.59 * data[i + 1] + 0.11 * data[i + 2];
data[i] = avg + 100;
data[i + 1] = avg + 50;
data[i + 2] = avg + 255;
}
context.putImageData(imageData, 0, 0);
}
});
/**
* Returns filter instance from an object representation
* @static
* @param {Object} object Object to create an instance from
* @param {Function} [callback] to be invoked after filter creation
* @return {fabric.Image.filters.Sepia} Instance of fabric.Image.filters.Sepia
*/
fabric.Image.filters.Sepia.fromObject = function(object, callback) {
object = object || { };
object.type = 'Sepia';
return new fabric.Image.filters.BaseFilter.fromObject(object, callback);
};
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -1,113 +0,0 @@
(function(global) {
'use strict';
var fabric = global.fabric || (global.fabric = { }),
extend = fabric.util.object.extend,
filters = fabric.Image.filters,
createClass = fabric.util.createClass;
/**
* Tint filter class
* Adapted from <a href="https://github.com/mezzoblue/PaintbrushJS">https://github.com/mezzoblue/PaintbrushJS</a>
* @class fabric.Image.filters.Tint
* @memberOf fabric.Image.filters
* @extends fabric.Image.filters.BaseFilter
* @see {@link fabric.Image.filters.Tint#initialize} for constructor definition
* @see {@link http://fabricjs.com/image-filters|ImageFilters demo}
* @example <caption>Tint filter with hex color and opacity</caption>
* var filter = new fabric.Image.filters.Tint({
* color: '#3513B0',
* opacity: 0.5
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
* @example <caption>Tint filter with rgba color</caption>
* var filter = new fabric.Image.filters.Tint({
* color: 'rgba(53, 21, 176, 0.5)'
* });
* object.filters.push(filter);
* object.applyFilters(canvas.renderAll.bind(canvas));
*/
filters.Tint = createClass(filters.BaseFilter, /** @lends fabric.Image.filters.Tint.prototype */ {
/**
* Filter type
* @param {String} type
* @default
*/
type: 'Tint',
/**
* Constructor
* @memberOf fabric.Image.filters.Tint.prototype
* @param {Object} [options] Options object
* @param {String} [options.color=#000000] Color to tint the image with
* @param {Number} [options.opacity] Opacity value that controls the tint effect's transparency (0..1)
*/
initialize: function(options) {
options = options || { };
this.color = options.color || '#000000';
this.opacity = typeof options.opacity !== 'undefined'
? options.opacity
: new fabric.Color(this.color).getAlpha();
},
/**
* Applies filter to canvas element
* @param {Object} canvasEl Canvas element to apply filter to
*/
applyTo: function(canvasEl) {
var context = canvasEl.getContext('2d'),
imageData = context.getImageData(0, 0, canvasEl.width, canvasEl.height),
data = imageData.data,
iLen = data.length, i,
tintR, tintG, tintB,
r, g, b, alpha1,
source;
source = new fabric.Color(this.color).getSource();
tintR = source[0] * this.opacity;
tintG = source[1] * this.opacity;
tintB = source[2] * this.opacity;
alpha1 = 1 - this.opacity;
for (i = 0; i < iLen; i += 4) {
r = data[i];
g = data[i + 1];
b = data[i + 2];
// alpha compositing
data[i] = tintR + r * alpha1;
data[i + 1] = tintG + g * alpha1;
data[i + 2] = tintB + b * alpha1;
}
context.putImageData(imageData, 0, 0);
},
/**
* Returns object representation of an instance
* @return {Object} Object representation of an instance
*/
toObject: function() {
return extend(this.callSuper('toObject'), {
color: this.color,
opacity: this.opacity
});
}
});
/**
* Returns filter instance from an object representation
* @static
* @param {Object} object Object to create an instance from
* @param {Function} [callback] to be invoked after filter creation
* @return {fabric.Image.filters.Tint} Instance of fabric.Image.filters.Tint
*/
fabric.Image.filters.Tint.fromObject = fabric.Image.filters.BaseFilter.fromObject;
})(typeof exports !== 'undefined' ? exports : this);

View file

@ -0,0 +1,305 @@
(function() {
'use strict';
/**
* Indicate whether this filtering backend is supported by the user's browser.
* @param {Number} tileSize check if the tileSize is supported
* @returns {Boolean} Whether the user's browser supports WebGL.
*/
fabric.isWebglSupported = function(tileSize) {
if (fabric.isLikelyNode) {
return false;
}
tileSize = tileSize || fabric.WebglFilterBackend.prototype.tileSize;
var canvas = document.createElement('canvas');
var gl = canvas.getContext('webgl') || canvas.getContext('experimental-webgl');
var isSupported = false;
// eslint-disable-next-line
if (gl) {
fabric.maxTextureSize = gl.getParameter(gl.MAX_TEXTURE_SIZE);
isSupported = fabric.maxTextureSize >= tileSize;
}
this.isSupported = isSupported;
return isSupported;
};
fabric.WebglFilterBackend = WebglFilterBackend;
/**
* WebGL filter backend.
*/
function WebglFilterBackend(options) {
if (options && options.tileSize) {
this.tileSize = options.tileSize;
}
this.setupGLContext(this.tileSize, this.tileSize);
this.captureGPUInfo();
};
WebglFilterBackend.prototype = /** @lends fabric.WebglFilterBackend.prototype */ {
tileSize: 2048,
/**
* Experimental. This object is a sort of repository of help layers used to avoid
* of recreating them during frequent filtering. If you are previewing a filter with
* a slider you problably do not want to create help layers every filter step.
* in this object there will be appended some canvases, created once, resized sometimes
* cleared never. Clearing is left to the developer.
**/
resources: {
},
/**
* Setup a WebGL context suitable for filtering, and bind any needed event handlers.
*/
setupGLContext: function(width, height) {
this.dispose();
this.createWebGLCanvas(width, height);
// eslint-disable-next-line
this.squareVertices = new Float32Array([0, 0, 0, 1, 1, 0, 1, 1]);
},
/**
* Create a canvas element and associated WebGL context and attaches them as
* class properties to the GLFilterBackend class.
*/
createWebGLCanvas: function(width, height) {
var canvas = fabric.util.createCanvasElement();
canvas.width = width;
canvas.height = height;
var glOptions = { premultipliedAlpha: false },
gl = canvas.getContext('webgl', glOptions);
if (!gl) {
gl = canvas.getContext('experimental-webgl', glOptions);
}
if (!gl) {
return;
}
gl.clearColor(0, 0, 0, 0);
// this canvas can fire webglcontextlost and webglcontextrestored
this.canvas = canvas;
this.gl = gl;
},
/**
* Attempts to apply the requested filters to the source provided, drawing the filtered output
* to the provided target canvas.
*
* @param {Array} filters The filters to apply.
* @param {HTMLImageElement|HTMLCanvasElement} source The source to be filtered.
* @param {Number} width The width of the source input.
* @param {Number} height The height of the source input.
* @param {HTMLCanvasElement} targetCanvas The destination for filtered output to be drawn.
* @param {String|undefined} cacheKey A key used to cache resources related to the source. If
* omitted, caching will be skipped.
*/
applyFilters: function(filters, source, width, height, targetCanvas, cacheKey) {
var gl = this.gl;
var cachedTexture;
if (cacheKey) {
cachedTexture = this.getCachedTexture(cacheKey, source);
}
var pipelineState = {
originalWidth: source.width || source.originalWidth,
originalHeight: source.height || source.originalHeight,
sourceWidth: width,
sourceHeight: height,
context: gl,
sourceTexture: this.createTexture(gl, width, height, !cachedTexture && source),
targetTexture: this.createTexture(gl, width, height),
originalTexture: cachedTexture ||
this.createTexture(gl, width, height, !cachedTexture && source),
passes: filters.length,
webgl: true,
squareVertices: this.squareVertices,
programCache: this.programCache,
pass: 0,
};
var tempFbo = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, tempFbo);
filters.forEach(function(filter) { filter && filter.applyTo(pipelineState); });
this.copyGLTo2D(gl.canvas, targetCanvas);
gl.bindTexture(gl.TEXTURE_2D, null);
gl.deleteTexture(pipelineState.sourceTexture);
gl.deleteTexture(pipelineState.targetTexture);
gl.deleteFramebuffer(tempFbo);
targetCanvas.getContext('2d').setTransform(1, 0, 0, 1, 0, 0);
return pipelineState;
},
/**
* The same as the applyFilter method but with additional logging of WebGL
* errors.
*/
applyFiltersDebug: function(filters, source, width, height, targetCanvas, cacheKey) {
// The following code is useful when debugging a specific issue but adds ~10x slowdown.
var gl = this.gl;
var ret = this.applyFilters(filters, source, width, height, targetCanvas, cacheKey);
var glError = gl.getError();
if (glError !== gl.NO_ERROR) {
var errorString = this.glErrorToString(gl, glError);
var error = new Error('WebGL Error ' + errorString);
error.glErrorCode = glError;
throw error;
}
return ret;
},
glErrorToString: function(context, errorCode) {
if (!context) {
return 'Context undefined for error code: ' + errorCode;
}
else if (typeof errorCode !== 'number') {
return 'Error code is not a number';
}
switch (errorCode) {
case context.NO_ERROR:
return 'NO_ERROR';
case context.INVALID_ENUM:
return 'INVALID_ENUM';
case context.INVALID_VALUE:
return 'INVALID_VALUE';
case context.INVALID_OPERATION:
return 'INVALID_OPERATION';
case context.INVALID_FRAMEBUFFER_OPERATION:
return 'INVALID_FRAMEBUFFER_OPERATION';
case context.OUT_OF_MEMORY:
return 'OUT_OF_MEMORY';
case context.CONTEXT_LOST_WEBGL:
return 'CONTEXT_LOST_WEBGL';
default:
return 'UNKNOWN_ERROR';
}
},
/**
* Detach event listeners, remove references, and clean up caches.
*/
dispose: function() {
if (this.canvas) {
this.canvas = null;
this.gl = null;
}
this.clearWebGLCaches();
},
/**
* Wipe out WebGL-related caches.
*/
clearWebGLCaches: function() {
this.programCache = {};
this.textureCache = {};
},
/**
* Create a WebGL texture object.
*
* Accepts specific dimensions to initialize the textuer to or a source image.
*
* @param {WebGLRenderingContext} gl The GL context to use for creating the texture.
* @param {Number} width The width to initialize the texture at.
* @param {Number} height The height to initialize the texture.
* @param {HTMLImageElement|HTMLCanvasElement} textureImageSource A source for the texture data.
* @returns {WebGLTexture}
*/
createTexture: function(gl, width, height, textureImageSource) {
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
if (textureImageSource) {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, textureImageSource);
}
else {
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
}
return texture;
},
/**
* Can be optionally used to get a texture from the cache array
*
* If an existing texture is not found, a new texture is created and cached.
*
* @param {String} uniqueId A cache key to use to find an existing texture.
* @param {HTMLImageElement|HTMLCanvasElement} textureImageSource A source to use to create the
* texture cache entry if one does not already exist.
*/
getCachedTexture: function(uniqueId, textureImageSource) {
if (this.textureCache[uniqueId]) {
return this.textureCache[uniqueId];
}
else {
var texture = this.createTexture(
this.gl, textureImageSource.width, textureImageSource.height, textureImageSource);
this.textureCache[uniqueId] = texture;
return texture;
}
},
/**
* Copy an input WebGL canvas on to an output 2D canvas.
*
* The WebGL canvas is assumed to be upside down, with the top-left pixel of the
* desired output image appearing in the bottom-left corner of the WebGL canvas.
*
* @param {HTMLCanvasElement} sourceCanvas The WebGL source canvas to copy from.
* @param {HTMLCanvasElement} targetCanvas The 2D target canvas to copy on to.
*/
copyGLTo2D: function(sourceCanvas, targetCanvas) {
var ctx = targetCanvas.getContext('2d');
ctx.translate(0, targetCanvas.height); // move it down again
ctx.scale(1, -1); // vertical flip
// where is my image on the big glcanvas?
var sourceY = sourceCanvas.height - targetCanvas.height;
ctx.drawImage(sourceCanvas, 0, sourceY, targetCanvas.width, targetCanvas.height, 0, 0,
targetCanvas.width, targetCanvas.height);
},
/**
* Clear out cached resources related to a source image that has been
* filtered previously.
*
* @param {String} cacheKey The cache key provided when the source image was filtered.
*/
evictCachesForKey: function(cacheKey) {
if (this.textureCache[cacheKey]) {
this.gl.deleteTexture(this.textureCache[cacheKey]);
delete this.textureCache[cacheKey];
}
},
/**
* Attempt to extract GPU information strings from a WebGL context.
*
* Useful information when debugging or blacklisting specific GPUs.
*
* @returns {Object} A GPU info object with renderer and vendor strings.
*/
captureGPUInfo: function() {
if (this.gpuInfo) {
return this.gpuInfo;
}
var gl = this.gl;
var ext = gl.getExtension('WEBGL_debug_renderer_info');
var gpuInfo = { renderer: '', vendor: '' };
if (ext) {
var renderer = gl.getParameter(ext.UNMASKED_RENDERER_WEBGL);
var vendor = gl.getParameter(ext.UNMASKED_VENDOR_WEBGL);
if (renderer) {
gpuInfo.renderer = renderer.toLowerCase();
}
if (vendor) {
gpuInfo.vendor = vendor.toLowerCase();
}
}
this.gpuInfo = gpuInfo;
return gpuInfo;
},
};
})();

View file

@ -95,11 +95,24 @@
*/
_lastScaleY: 1,
/**
* private
* contains last value of scaling applied by the apply filter chain
* @type Number
*/
_filterScalingX: 1,
/**
* private
* contains last value of scaling applied by the apply filter chain
* @type Number
*/
_filterScalingY: 1,
/**
* minimum scale factor under which any resizeFilter is triggered to resize the image
* 0 will disable the automatic resize. 1 will trigger automatically always.
* number bigger than 1 can be used in case we want to scale with some filter above
* the natural image dimensions
* number bigger than 1 are not implemented yet.
* @type Number
*/
minimumScaleTrigger: 0.5,
@ -121,6 +134,14 @@
*/
objectCaching: false,
/**
* key used to retrieve the texture representing this image
* since 2.0.0
* @type String
* @default
*/
cacheKey: '',
/**
* Constructor
* @param {HTMLImageElement | String} element Image element
@ -128,12 +149,12 @@
* @param {function} [callback] callback function to call after eventual filters applied.
* @return {fabric.Image} thisArg
*/
initialize: function(element, options, callback) {
initialize: function(element, options) {
options || (options = { });
this.filters = [];
this.resizeFilters = [];
this.callSuper('initialize', options);
this._initElement(element, options, callback);
this._initElement(element, options);
this.cacheKey = 'texture' + fabric.Object.__uid++;
},
/**
@ -149,36 +170,20 @@
* If filters defined they are applied to new image.
* You might need to call `canvas.renderAll` and `object.setCoords` after replacing, to render new image and update controls area.
* @param {HTMLImageElement} element
* @param {Function} [callback] Callback is invoked when all filters have been applied and new image is generated
* @param {Object} [options] Options object
* @return {fabric.Image} thisArg
* @chainable
*/
setElement: function(element, callback, options) {
var _callback, _this;
setElement: function(element, options) {
this._element = element;
this._originalElement = element;
this._initConfig(options);
if (this.resizeFilters.length === 0) {
_callback = callback;
if (this.resizeFilter) {
this.applyResizeFilters();
}
else {
_this = this;
_callback = function() {
_this.applyFilters(callback, _this.resizeFilters, _this._filteredEl || _this._originalElement, true);
};
}
if (this.filters.length !== 0) {
this.applyFilters(_callback);
this.applyFilters();
}
else if (_callback) {
_callback(this);
}
return this;
},
@ -252,22 +257,13 @@
* @return {Object} Object representation of an instance
*/
toObject: function(propertiesToInclude) {
var filters = [], resizeFilters = [],
scaleX = 1, scaleY = 1;
var filters = [];
this.filters.forEach(function(filterObj) {
if (filterObj) {
if (filterObj.type === 'Resize') {
scaleX *= filterObj.scaleX;
scaleY *= filterObj.scaleY;
}
filters.push(filterObj.toObject());
}
});
this.resizeFilters.forEach(function(filterObj) {
filterObj && resizeFilters.push(filterObj.toObject());
});
var object = extend(
this.callSuper(
'toObject',
@ -275,11 +271,12 @@
), {
src: this.getSrc(),
filters: filters,
resizeFilters: resizeFilters,
});
object.width /= scaleX;
object.height /= scaleY;
if (this.resizeFilter) {
object.resizeFilter = this.resizeFilter.toObject();
}
object.width /= this._filterScalingX;
object.height /= this._filterScalingY;
return object;
},
@ -358,8 +355,10 @@
*/
setSrc: function(src, callback, options) {
fabric.util.loadImage(src, function(img) {
return this.setElement(img, callback, options);
this.setElement(img, options);
callback(this);
}, this, options && options.crossOrigin);
return this;
},
/**
@ -370,77 +369,99 @@
return '#<fabric.Image: { src: "' + this.getSrc() + '" }>';
},
/**
* Applies filters assigned to this image (from "filters" array)
* @method applyFilters
* @param {Function} callback Callback is invoked when all filters have been applied and new image is generated
* @param {Array} filters to be applied
* @param {fabric.Image} imgElement image to filter ( default to this._element )
* @param {Boolean} forResizing
* @return {CanvasElement} canvasEl to be drawn immediately
* @chainable
*/
applyFilters: function(callback, filters, imgElement, forResizing) {
filters = filters || this.filters;
imgElement = imgElement || this._originalElement;
if (!imgElement) {
applyResizeFilters: function() {
var filter = this.resizeFilter,
retinaScaling = this.canvas ? this.canvas.getRetinaScaling() : 1,
minimumScale = this.minimumScaleTrigger,
scaleX = this.scaleX < minimumScale ? this.scaleX : 1,
scaleY = this.scaleY < minimumScale ? this.scaleY : 1;
if (scaleX * retinaScaling < 1) {
scaleX *= retinaScaling;
}
if (scaleY * retinaScaling < 1) {
scaleY *= retinaScaling;
}
if (!filter || (scaleX >= 1 && scaleY >= 1)) {
this._element = this._filteredEl;
return;
}
if (!fabric.filterBackend) {
fabric.filterBackend = fabric.initFilterBackend();
}
var elementToFilter = this._filteredEl || this._originalElement, imageData;
if (this._element === this._originalElement) {
// if the element is the same we need to create a new element
var canvasEl = fabric.util.createCanvasElement();
canvasEl.width = elementToFilter.width;
canvasEl.height = elementToFilter.height;
this._element = canvasEl;
}
var ctx = this._element.getContext('2d');
if (elementToFilter.getContext) {
imageData =
elementToFilter.getContext('2d').getImageData(0, 0, elementToFilter.width, elementToFilter.height);
}
else {
ctx.drawImage(elementToFilter, 0, 0);
imageData = ctx.getImageData(0, 0, elementToFilter.width, elementToFilter.height);
}
var options = {
imageData: imageData,
scaleX: scaleX,
scaleY: scaleY,
};
filter.applyTo2d(options);
this.width = this._element.width = options.imageData.width;
this.height = this._element.height = options.imageData.height;
ctx.putImageData(options.imageData, 0, 0);
},
var replacement = fabric.util.createImage(),
retinaScaling = this.canvas ? this.canvas.getRetinaScaling() : fabric.devicePixelRatio,
minimumScale = this.minimumScaleTrigger / retinaScaling,
_this = this, scaleX, scaleY;
/**
* Applies filters assigned to this image (from "filters" array) or from filter param
* @method applyFilters
* @param {Array} filters to be applied
* @param {Boolean} forResizing specify if the filter operation is a resize operation
* @return {thisArg} return the fabric.Image object
* @chainable
*/
applyFilters: function(filters) {
filters = filters || this.filters || [];
filters = filters.filter(function(filter) { return filter; });
if (filters.length === 0) {
this._element = imgElement;
callback && callback(this);
return imgElement;
this._element = this._originalElement;
this._filterScalingX = 1;
this._filterScalingY = 1;
return this;
}
var canvasEl = fabric.util.createCanvasElement();
canvasEl.width = imgElement.width;
canvasEl.height = imgElement.height;
canvasEl.getContext('2d').drawImage(imgElement, 0, 0, imgElement.width, imgElement.height);
var imgElement = this._originalElement,
sourceWidth = imgElement.naturalWidth || imgElement.width,
sourceHeight = imgElement.naturalHeight || imgElement.height;
filters.forEach(function(filter) {
if (!filter) {
return;
}
if (forResizing) {
scaleX = _this.scaleX < minimumScale ? _this.scaleX : 1;
scaleY = _this.scaleY < minimumScale ? _this.scaleY : 1;
if (scaleX * retinaScaling < 1) {
scaleX *= retinaScaling;
}
if (scaleY * retinaScaling < 1) {
scaleY *= retinaScaling;
}
}
else {
scaleX = filter.scaleX;
scaleY = filter.scaleY;
}
filter.applyTo(canvasEl, scaleX, scaleY);
if (!forResizing && filter.type === 'Resize') {
_this.width *= filter.scaleX;
_this.height *= filter.scaleY;
}
});
/** @ignore */
replacement.width = canvasEl.width;
replacement.height = canvasEl.height;
replacement.onload = function() {
_this._element = replacement;
!forResizing && (_this._filteredEl = replacement);
callback && callback(_this);
replacement.onload = canvasEl = null;
};
replacement.src = canvasEl.toDataURL('image/png');
return canvasEl;
if (this._element === this._originalElement) {
// if the element is the same we need to create a new element
var canvasEl = fabric.util.createCanvasElement();
canvasEl.width = imgElement.width;
canvasEl.height = imgElement.height;
this._element = canvasEl;
}
else {
// clear the existing element to get new filter data
this._element.getContext('2d').clearRect(0, 0, sourceWidth, sourceHeight);
}
if (!fabric.filterBackend) {
fabric.filterBackend = fabric.initFilterBackend();
}
fabric.filterBackend.applyFilters(
filters, this._originalElement, sourceWidth, sourceHeight, this._element, this.cacheKey);
if (this.width !== this._element.width || this.height !== this._element.height) {
this._filterScalingX = this._element.width / this.width;
this._filterScalingY = this._element.height / this.height;
this.width = this._element.width;
this.height = this._element.height;
}
return this;
},
/**
@ -463,11 +484,9 @@
if (this.isMoving === false && this.resizeFilters.length && this._needsResize()) {
this._lastScaleX = this.scaleX;
this._lastScaleY = this.scaleY;
elementToDraw = this.applyFilters(null, this.resizeFilters, this._filteredEl || this._originalElement, true);
}
else {
elementToDraw = this._element;
this.applyResizeFilters();
}
elementToDraw = this._element;
elementToDraw && ctx.drawImage(elementToDraw,
x + imageMargins.marginX,
y + imageMargins.marginY,
@ -537,8 +556,8 @@
* @param {HTMLImageElement|String} element The element representing the image
* @param {Object} [options] Options object
*/
_initElement: function(element, options, callback) {
this.setElement(fabric.util.getById(element), callback, options);
_initElement: function(element, options) {
this.setElement(fabric.util.getById(element), options);
fabric.util.addClass(this.getElement(), fabric.Image.CSS_CANVAS);
},
@ -618,9 +637,10 @@
}
fabric.Image.prototype._initFilters.call(object, object.filters, function(filters) {
object.filters = filters || [];
fabric.Image.prototype._initFilters.call(object, object.resizeFilters, function(resizeFilters) {
object.resizeFilters = resizeFilters || [];
return new fabric.Image(img, object, callback);
fabric.Image.prototype._initFilters.call(object, [object.resizeFilter], function(resizeFilters) {
object.resizeFilter = resizeFilters[0];
var image = new fabric.Image(img, object);
callback(image);
});
});
}, null, object.crossOrigin);

View file

@ -167,6 +167,8 @@
/**
* When true, canvas is scaled by devicePixelRatio for better rendering on retina screens
* @type Boolean
* @default
*/
enableRetinaScaling: true,
@ -188,6 +190,8 @@
* If One of the corner of the bounding box of the object is on the canvas
* the objects get rendered.
* @memberOf fabric.StaticCanvas.prototype
* @type Boolean
* @default
*/
skipOffscreen: true,

View file

@ -96,7 +96,6 @@
'backgroundColor': '',
'clipTo': null,
'filters': [],
'resizeFilters': [],
'fillRule': 'nonzero',
'globalCompositeOperation': 'source-over',
'transformMatrix': null,

View file

@ -41,7 +41,6 @@
'backgroundColor': '',
'clipTo': null,
'filters': [],
'resizeFilters': [],
'fillRule': 'nonzero',
'globalCompositeOperation': 'source-over',
'skewX': 0,
@ -72,10 +71,10 @@
elImage.width = width;
elImage.height = height;
}
return new fabric.Image(elImage, options, callback);
callback(new fabric.Image(elImage, options));
}
else {
return new fabric.Image(elImage, options, callback);
callback(new fabric.Image(elImage, options));
}
});
}
@ -156,14 +155,14 @@
createImageObject(function(image) {
ok(typeof image.toObject == 'function');
var filter = new fabric.Image.filters.Resize({resizeType: 'bilinear', scaleX: 0.3, scaleY: 0.3});
image.resizeFilters.push(filter);
ok(image.resizeFilters[0] instanceof fabric.Image.filters.Resize, 'should inherit from fabric.Image.filters.Resize');
image.resizeFilter = filter;
ok(image.resizeFilter instanceof fabric.Image.filters.Resize, 'should inherit from fabric.Image.filters.Resize');
var toObject = image.toObject();
deepEqual(toObject.resizeFilters[0], filter.toObject());
deepEqual(toObject.resizeFilter, filter.toObject(), 'the filter is in object form now');
fabric.Image.fromObject(toObject, function(imageFromObject) {
var filterFromObj = imageFromObject.resizeFilters[0];
deepEqual(filterFromObj, filter);
var filterFromObj = imageFromObject.resizeFilter;
ok(filterFromObj instanceof fabric.Image.filters.Resize, 'should inherit from fabric.Image.filters.Resize');
deepEqual(filterFromObj, filter, 'the filter has been restored');
equal(filterFromObj.scaleX, 0.3);
equal(filterFromObj.scaleY, 0.3);
equal(filterFromObj.resizeType, 'bilinear');
@ -174,27 +173,26 @@
asyncTest('toObject with applied resize filter', function() {
createImageObject(function(image) {
ok(typeof image.toObject == 'function');
ok(typeof image.toObject === 'function');
var filter = new fabric.Image.filters.Resize({resizeType: 'bilinear', scaleX: 0.2, scaleY: 0.2});
image.filters.push(filter);
var width = image.width, height = image.height;
ok(image.filters[0] instanceof fabric.Image.filters.Resize, 'should inherit from fabric.Image.filters.Resize');
image.applyFilters(function() {
equal(image.width, width / 5, 'width should be a fifth');
equal(image.height, height / 5, 'height should a fifth');
var toObject = image.toObject();
deepEqual(toObject.filters[0], filter.toObject());
equal(toObject.width, width, 'width is stored as before filters');
equal(toObject.height, height, 'height is stored as before filters');
fabric.Image.fromObject(toObject, function(_imageFromObject) {
var filterFromObj = _imageFromObject.filters[0];
ok(filterFromObj instanceof fabric.Image.filters.Resize, 'should inherit from fabric.Image.filters.Resize');
equal(filterFromObj.scaleY, 0.2);
equal(filterFromObj.scaleX, 0.2);
equal(_imageFromObject.width, width / 5, 'on image reload width is halved again');
equal(_imageFromObject.height, height / 5, 'on image reload width is halved again');
start();
});
image.applyFilters();
equal(image.width, Math.floor(width / 5), 'width should be a fifth');
equal(image.height, Math.floor(height / 5), 'height should a fifth');
var toObject = image.toObject();
deepEqual(toObject.filters[0], filter.toObject());
equal(toObject.width, width, 'width is stored as before filters');
equal(toObject.height, height, 'height is stored as before filters');
fabric.Image.fromObject(toObject, function(_imageFromObject) {
var filterFromObj = _imageFromObject.filters[0];
ok(filterFromObj instanceof fabric.Image.filters.Resize, 'should inherit from fabric.Image.filters.Resize');
equal(filterFromObj.scaleY, 0.2);
equal(filterFromObj.scaleX, 0.2);
equal(_imageFromObject.width, Math.floor(width / 5), 'on image reload width is halved again');
equal(_imageFromObject.height, Math.floor(height / 5), 'on image reload width is halved again');
start();
});
});
});

File diff suppressed because it is too large Load diff