@@ -69,6 +69,10 @@ enum AvifEncodingSurface {
69
69
RGB, RGBA, AUTO
70
70
};
71
71
72
+ enum AvifChromaSubsampling {
73
+ AVIF_CHROMA_AUTO, AVIF_CHROMA_YUV_420, AVIF_CHROMA_YUV_422, AVIF_CHROMA_YUV_444, AVIF_CHROMA_YUV_400
74
+ };
75
+
72
76
struct heif_error writeHeifData (struct heif_context *ctx,
73
77
const void *data,
74
78
size_t size,
@@ -369,7 +373,9 @@ jbyteArray encodeBitmapAvif(JNIEnv *env,
369
373
const int quality,
370
374
const int dataSpace,
371
375
const AvifQualityMode qualityMode,
372
- const AvifEncodingSurface surface) {
376
+ const AvifEncodingSurface surface,
377
+ const int speed,
378
+ const AvifChromaSubsampling preferredChromaSubsampling) {
373
379
avif::EncoderPtr encoder (avifEncoderCreate ());
374
380
if (encoder == nullptr ) {
375
381
std::string str = " Can't create encoder" ;
@@ -385,6 +391,8 @@ jbyteArray encodeBitmapAvif(JNIEnv *env,
385
391
encoder->qualityAlpha = 100 ;
386
392
}
387
393
394
+ encoder->speed = std::clamp (speed, AVIF_SPEED_SLOWEST, AVIF_SPEED_FASTEST);
395
+
388
396
AndroidBitmapInfo info;
389
397
if (AndroidBitmap_getInfo (env, bitmap, &info) < 0 ) {
390
398
throwPixelsException (env);
@@ -417,6 +425,19 @@ jbyteArray encodeBitmapAvif(JNIEnv *env,
417
425
AndroidBitmap_unlockPixels (env, bitmap);
418
426
419
427
avifPixelFormat pixelFormat = avifPixelFormat::AVIF_PIXEL_FORMAT_YUV420;
428
+ if (preferredChromaSubsampling == AvifChromaSubsampling::AVIF_CHROMA_AUTO) {
429
+ if (qualityMode == AVIF_LOSELESS_MODE || quality > 93 ) {
430
+ pixelFormat = avifPixelFormat::AVIF_PIXEL_FORMAT_YUV444;
431
+ } else if (quality > 65 ) {
432
+ pixelFormat = avifPixelFormat::AVIF_PIXEL_FORMAT_YUV422;
433
+ }
434
+ } else if (preferredChromaSubsampling == AvifChromaSubsampling::AVIF_CHROMA_YUV_422) {
435
+ pixelFormat = avifPixelFormat::AVIF_PIXEL_FORMAT_YUV422;
436
+ } else if (preferredChromaSubsampling == AvifChromaSubsampling::AVIF_CHROMA_YUV_444) {
437
+ pixelFormat = avifPixelFormat::AVIF_PIXEL_FORMAT_YUV444;
438
+ } else if (preferredChromaSubsampling == AvifChromaSubsampling::AVIF_CHROMA_YUV_400) {
439
+ pixelFormat = avifPixelFormat::AVIF_PIXEL_FORMAT_YUV400;
440
+ }
420
441
avif::ImagePtr image (avifImageCreate (info.width , info.height , 8 , pixelFormat));
421
442
422
443
if (image.get () == nullptr ) {
@@ -499,20 +520,28 @@ jbyteArray encodeBitmapAvif(JNIEnv *env,
499
520
return static_cast <jbyteArray>(nullptr );
500
521
}
501
522
502
- uint32_t uStride = image->yuvRowBytes [1 ];
503
- uint8_t *uPlane = image->yuvPlanes [1 ];
504
- if (uPlane == nullptr ) {
505
- std::string str = " Can't add U plane to an image" ;
506
- throwException (env, str);
507
- return static_cast <jbyteArray>(nullptr );
508
- }
523
+ uint32_t uStride = 0 ;
524
+ uint8_t *uPlane = nullptr ;
509
525
510
- uint32_t vStride = image->yuvRowBytes [2 ];
511
- uint8_t *vPlane = image->yuvPlanes [2 ];
512
- if (vPlane == nullptr ) {
513
- std::string str = " Can't add V plane to an image" ;
514
- throwException (env, str);
515
- return static_cast <jbyteArray>(nullptr );
526
+ uint32_t vStride = 0 ;
527
+ uint8_t * vPlane = nullptr ;
528
+
529
+ if (pixelFormat != AVIF_PIXEL_FORMAT_YUV400) {
530
+ uStride = image->yuvRowBytes [1 ];
531
+ uPlane = image->yuvPlanes [1 ];
532
+ if (uPlane == nullptr ) {
533
+ std::string str = " Can't add U plane to an image" ;
534
+ throwException (env, str);
535
+ return static_cast <jbyteArray>(nullptr );
536
+ }
537
+
538
+ vStride = image->yuvRowBytes [2 ];
539
+ vPlane = image->yuvPlanes [2 ];
540
+ if (vPlane == nullptr ) {
541
+ std::string str = " Can't add V plane to an image" ;
542
+ throwException (env, str);
543
+ return static_cast <jbyteArray>(nullptr );
544
+ }
516
545
}
517
546
518
547
std::vector<uint8_t > iccProfile (0 );
@@ -532,18 +561,55 @@ jbyteArray encodeBitmapAvif(JNIEnv *env,
532
561
iccProfile,
533
562
matrix);
534
563
535
- RgbaToYuv420 (imageStore.data (),
536
- stride,
537
- yPlane,
538
- yStride,
539
- uPlane,
540
- uStride,
541
- vPlane,
542
- vStride,
543
- info.width ,
544
- info.height ,
545
- yuvRange == AVIF_RANGE_FULL ? YuvRange::Full : YuvRange::Tv,
546
- matrix);
564
+ if (pixelFormat == AVIF_PIXEL_FORMAT_YUV420) {
565
+ RgbaToYuv420 (imageStore.data (),
566
+ stride,
567
+ yPlane,
568
+ yStride,
569
+ uPlane,
570
+ uStride,
571
+ vPlane,
572
+ vStride,
573
+ info.width ,
574
+ info.height ,
575
+ yuvRange == AVIF_RANGE_FULL ? YuvRange::Full : YuvRange::Tv,
576
+ matrix);
577
+ } else if (pixelFormat == AVIF_PIXEL_FORMAT_YUV422) {
578
+ RgbaToYuv422 (imageStore.data (),
579
+ stride,
580
+ yPlane,
581
+ yStride,
582
+ uPlane,
583
+ uStride,
584
+ vPlane,
585
+ vStride,
586
+ info.width ,
587
+ info.height ,
588
+ yuvRange == AVIF_RANGE_FULL ? YuvRange::Full : YuvRange::Tv,
589
+ matrix);
590
+ } else if (pixelFormat == AVIF_PIXEL_FORMAT_YUV444) {
591
+ RgbaToYuv444 (imageStore.data (),
592
+ stride,
593
+ yPlane,
594
+ yStride,
595
+ uPlane,
596
+ uStride,
597
+ vPlane,
598
+ vStride,
599
+ info.width ,
600
+ info.height ,
601
+ yuvRange == AVIF_RANGE_FULL ? YuvRange::Full : YuvRange::Tv,
602
+ matrix);
603
+ } else if (pixelFormat == AVIF_PIXEL_FORMAT_YUV400) {
604
+ RgbaToYuv400 (imageStore.data (),
605
+ stride,
606
+ yPlane,
607
+ yStride,
608
+ info.width ,
609
+ info.height ,
610
+ yuvRange == AVIF_RANGE_FULL ? YuvRange::Full : YuvRange::Tv,
611
+ matrix);
612
+ }
547
613
548
614
if (nclxResult) {
549
615
if (iccProfile.empty ()) {
@@ -570,7 +636,7 @@ jbyteArray encodeBitmapAvif(JNIEnv *env,
570
636
return static_cast <jbyteArray>(nullptr );
571
637
}
572
638
573
- avifRWData data;
639
+ avifRWData data = AVIF_DATA_EMPTY ;
574
640
result = avifEncoderFinish (encoder.get (), &data);
575
641
if (result != AVIF_RESULT_OK) {
576
642
[[maybe_unused]] auto erelease = encoder.release ();
@@ -598,21 +664,34 @@ Java_com_radzivon_bartoshyk_avif_coder_HeifCoder_encodeAvifImpl(JNIEnv *env,
598
664
jint quality,
599
665
jint dataSpace,
600
666
jint qualityMode,
601
- jint surfaceMode) {
667
+ jint surfaceMode,
668
+ jint speed,
669
+ jint chromaSubsampling) {
602
670
try {
603
671
AvifEncodingSurface surface = AvifEncodingSurface::AUTO;
604
672
if (surfaceMode == 1 ) {
605
673
surface = AvifEncodingSurface::RGB;
606
674
} else if (surfaceMode == 2 ) {
607
675
surface = AvifEncodingSurface::RGBA;
608
676
}
677
+ AvifChromaSubsampling mChromaSubsampling = AvifChromaSubsampling::AVIF_CHROMA_AUTO;
678
+ if (chromaSubsampling == 1 ) {
679
+ mChromaSubsampling = AvifChromaSubsampling::AVIF_CHROMA_YUV_420;
680
+ } else if (chromaSubsampling == 2 ) {
681
+ mChromaSubsampling = AvifChromaSubsampling::AVIF_CHROMA_YUV_422;
682
+ } else if (chromaSubsampling == 3 ) {
683
+ mChromaSubsampling = AvifChromaSubsampling::AVIF_CHROMA_YUV_444;
684
+ } else if (chromaSubsampling == 4 ) {
685
+ mChromaSubsampling = AvifChromaSubsampling::AVIF_CHROMA_YUV_400;
686
+ }
609
687
return encodeBitmapAvif (env,
610
688
thiz,
611
689
bitmap,
612
690
quality,
613
691
dataSpace,
614
692
static_cast <AvifQualityMode>(qualityMode),
615
- surface);
693
+ surface, speed,
694
+ mChromaSubsampling );
616
695
} catch (std::bad_alloc &err) {
617
696
std::string exception = " Not enough memory to encode this image" ;
618
697
throwException (env, exception );
0 commit comments