Expose attention center when doing attention-based cropping. (#3164)

* ADded optional output parameter to smartcrop to track the center of
attention.

* Added unit test.

* Added docs & updated changelog.

* Updated argument labels & types in documentation.
This commit is contained in:
Emanuel Jöbstl 2022-11-28 15:10:16 +01:00 committed by GitHub
parent 697311bc9f
commit 25444cd927
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 57 additions and 8 deletions

View File

@ -1,5 +1,6 @@
master
- expose location of interest when using attention based cropping [ejoebstl]
- version bump to 8.14
- remove autotools
- remove various obsolete scripts

View File

@ -14,6 +14,8 @@
* - add low and high
* 19/3/20 jcupitt
* - add all
* 26/11/22 ejoebstl
* - expose location of interest when using attention based cropping
*/
/*
@ -71,6 +73,9 @@ typedef struct _VipsSmartcrop {
int height;
VipsInteresting interesting;
int attention_x;
int attention_y;
} VipsSmartcrop;
typedef VipsConversionClass VipsSmartcropClass;
@ -195,7 +200,7 @@ pythagoras( VipsSmartcrop *smartcrop, VipsImage *in, VipsImage **out )
static int
vips_smartcrop_attention( VipsSmartcrop *smartcrop,
VipsImage *in, int *left, int *top )
VipsImage *in, int *left, int *top, int *attention_x, int *attention_y)
{
/* From smartcrop.js.
*/
@ -290,15 +295,20 @@ vips_smartcrop_attention( VipsSmartcrop *smartcrop,
if( vips_sum( &t[14], &t[18], 3, NULL ) ||
vips_gaussblur( t[18], &t[19], sigma, NULL ) ||
vips_max( t[19], &max, "x", &x_pos, "y", &y_pos, NULL ) )
return( -1 );
return( -1 );
/* Transform back into image coordinates.
*/
*attention_x = x_pos / hscale;
*attention_y = y_pos / vscale;
/* Centre the crop over the max.
*/
*left = VIPS_CLIP( 0,
x_pos / hscale - smartcrop->width / 2,
*attention_x - smartcrop->width / 2,
in->Xsize - smartcrop->width );
*top = VIPS_CLIP( 0,
y_pos / vscale - smartcrop->height / 2,
*attention_y - smartcrop->height / 2,
in->Ysize - smartcrop->height );
return( 0 );
@ -316,6 +326,9 @@ vips_smartcrop_build( VipsObject *object )
int left;
int top;
int attention_x = 0;
int attention_y = 0;
if( VIPS_OBJECT_CLASS( vips_smartcrop_parent_class )->
build( object ) )
return( -1 );
@ -357,7 +370,7 @@ vips_smartcrop_build( VipsObject *object )
break;
case VIPS_INTERESTING_ATTENTION:
if( vips_smartcrop_attention( smartcrop, in, &left, &top ) )
if( vips_smartcrop_attention( smartcrop, in, &left, &top, &attention_x, &attention_y ) )
return( -1 );
break;
@ -383,6 +396,11 @@ vips_smartcrop_build( VipsObject *object )
break;
}
g_object_set(smartcrop,
"attention_x", attention_x,
"attention_y", attention_y,
NULL);
if( vips_extract_area( smartcrop->in, &t[1],
left, top,
smartcrop->width, smartcrop->height, NULL ) ||
@ -420,7 +438,7 @@ vips_smartcrop_class_init( VipsSmartcropClass *class )
G_STRUCT_OFFSET( VipsSmartcrop, width ),
1, VIPS_MAX_COORD, 1 );
VIPS_ARG_INT( class, "height", 5,
VIPS_ARG_INT( class, "height", 5,
_( "Height" ),
_( "Height of extract area" ),
VIPS_ARGUMENT_REQUIRED_INPUT,
@ -434,6 +452,21 @@ vips_smartcrop_class_init( VipsSmartcropClass *class )
G_STRUCT_OFFSET( VipsSmartcrop, interesting ),
VIPS_TYPE_INTERESTING, VIPS_INTERESTING_ATTENTION );
VIPS_ARG_INT( class, "attention_x", 2,
_( "Attention x" ),
_( "Horizontal position of attention centre" ),
VIPS_ARGUMENT_OPTIONAL_OUTPUT,
G_STRUCT_OFFSET( VipsSmartcrop, attention_x ),
0, VIPS_MAX_COORD, 0 );
VIPS_ARG_INT( class, "attention_y", 3,
_( "Attention y" ),
_( "Vertical position of attention centre" ),
VIPS_ARGUMENT_OPTIONAL_OUTPUT,
G_STRUCT_OFFSET( VipsSmartcrop, attention_y ),
0, VIPS_MAX_COORD, 0 );
}
static void
@ -451,9 +484,11 @@ vips_smartcrop_init( VipsSmartcrop *smartcrop )
* @...: %NULL-terminated list of optional named arguments
*
* Optional arguments:
*
*
* * @interesting: #VipsInteresting to use to find interesting areas (default: #VIPS_INTERESTING_ATTENTION)
*
* * @attention_x: %gint, horizontal position of attention centre when using attention based cropping
* * @attention_y: %gint, vertical position of attention centre when using attention based cropping
*
* Crop an image down to a specified width and height by removing boring parts.
*
* Use @interesting to pick the method vips uses to decide which bits of the

View File

@ -334,6 +334,19 @@ class TestConversion:
assert test.width == 100
assert test.height == 100
@pytest.mark.skipif(pyvips.type_find("VipsOperation", "smartcrop") == 0,
reason="no smartcrop, skipping test")
def test_smartcrop_attention(self):
test, opts = self.image.smartcrop(
100, 100,
interesting=pyvips.enums.Interesting.ATTENTION,
attention_x=True, attention_y=True)
assert test.width == 100
assert test.height == 100
assert opts["attention_x"] == 199
assert opts["attention_y"] == 234
def test_falsecolour(self):
for fmt in all_formats:
test = self.colour.cast(fmt)