-
Notifications
You must be signed in to change notification settings - Fork 14
Output grayscale images as possible #11
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Output grayscale images as possible #11
Conversation
There are tons of diffs and it is difficult to just display diff of So, I will paste a result of diff --git a/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m b/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m
index 2dad9db..f181166 100644
--- a/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m
+++ b/SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m
@@ -13,6 +13,44 @@
#import "avif/avif.h"
#endif
+static void FreeImageData(void *info, const void *data, size_t size) {
+ free((void *)data);
+}
+
+static CGImageRef CreateImageFromBuffer(avifImage * avif, vImage_Buffer* result) {
+ BOOL monochrome = avif->yuvPlanes[1] == NULL || avif->yuvPlanes[2] == NULL;
+ BOOL hasAlpha = avif->alphaPlane != NULL;
+ BOOL usesU16 = avifImageUsesU16(avif);
+ size_t components = (monochrome ? 1 : 3) + (hasAlpha ? 1 : 0);
+
+ CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, result->data, result->rowBytes * result->height, FreeImageData);
+ CGBitmapInfo bitmapInfo = usesU16 ? kCGBitmapByteOrder16Host : kCGBitmapByteOrderDefault;
+ bitmapInfo |= hasAlpha ? kCGImageAlphaFirst : kCGImageAlphaNone;
+ // FIXME: (ledyba-z): Set appropriate color space.
+ // Currently, there is no way to get MatrixCoefficients, TransferCharacteristics and ColourPrimaries values
+ // in Sequence Header OBU.
+ // https://github.com/AOMediaCodec/libavif/blob/7d36984b2994210b/include/avif/avif.h#L149-L236
+ CGColorSpaceRef colorSpace = NULL;
+ if(monochrome){
+ colorSpace = CGColorSpaceCreateDeviceGray();
+ }else{
+ colorSpace = CGColorSpaceCreateDeviceRGB();
+ }
+ CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
+
+ size_t bitsPerComponent = usesU16 ? 16 : 8;
+ size_t bitsPerPixel = components * bitsPerComponent;
+ size_t rowBytes = result->width * components * (usesU16 ? sizeof(uint16_t) : sizeof(uint8_t));
+
+ CGImageRef imageRef = CGImageCreate(result->width, result->height, bitsPerComponent, bitsPerPixel, rowBytes, colorSpace, bitmapInfo, provider, NULL, NO, renderingIntent);
+
+ // clean up
+ CGColorSpaceRelease(colorSpace);
+ CGDataProviderRelease(provider);
+
+ return imageRef;
+}
+
static void SetupConversionInfo(avifImage * avif,
avifReformatState* state,
vImage_YpCbCrToARGBMatrix* matrix,
@@ -101,11 +139,13 @@ static void SetupConversionInfo(avifImage * avif,
}
-// Convert 8bit AVIF image into RGB888/ARGB8888 using vImage Acceralation Framework.
-static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels) {
+// Convert 8bit AVIF image into RGB888/ARGB8888/Mono/MonoA using vImage Acceralation Framework.
+static CGImageRef CreateImage8(avifImage * avif) {
vImage_Error err = kvImageNoError;
- BOOL hasAlpha = avif->alphaPlane != NULL;
- size_t components = hasAlpha ? 4 : 3;
+ BOOL const monochrome = avif->yuvPlanes[1] == NULL || avif->yuvPlanes[2] == NULL;
+ BOOL const hasAlpha = avif->alphaPlane != NULL;
+ size_t const components = (monochrome ? 1 : 3) + (hasAlpha ? 1 : 0);
+ size_t const rowBytes = components * sizeof(uint8_t) * avif->width;
// setup conversion info
avifReformatState state = {0};
@@ -115,19 +155,26 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
vImage_YpCbCrToARGB convInfo = {0};
+ uint8_t* outPixels = calloc(components * rowBytes * avif->height, sizeof(uint8_t));
+ if(outPixels == NULL) {
+ return NULL;
+ }
uint8_t* argbPixels = NULL;
uint8_t* dummyCb = NULL;
uint8_t* dummyCr = NULL;
+
+ BOOL const useTempBuffer = monochrome || !hasAlpha;
- if(!hasAlpha) {
+ if(useTempBuffer) {
argbPixels = calloc(avif->width * avif->height * 4, sizeof(uint8_t));
if(!argbPixels) {
- return;
+ free(outPixels);
+ return NULL;
}
}
vImage_Buffer argbBuffer = {
- .data = hasAlpha ? outPixels : argbPixels,
+ .data = useTempBuffer ? argbPixels : outPixels,
.width = avif->width,
.height = avif->height,
.rowBytes = avif->width * 4,
@@ -150,8 +197,9 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
if(!origCb.data) { // allocate dummy data to convert monochrome images.
dummyCb = calloc(origCb.width, sizeof(uint8_t));
if(!dummyCb) {
+ free(outPixels);
free(argbPixels);
- return;
+ return NULL;
}
origCb.data = dummyCb;
origCb.rowBytes = 0;
@@ -167,9 +215,10 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
if(!origCr.data) { // allocate dummy data to convert monochrome images.
dummyCr = calloc(origCr.width, sizeof(uint8_t));
if(!dummyCr) {
+ free(outPixels);
free(argbPixels);
free(dummyCb);
- return;
+ return NULL;
}
origCr.data = dummyCr;
origCr.rowBytes = 0;
@@ -179,11 +228,12 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
uint8_t const permuteMap[4] = {0, 1, 2, 3};
switch(avif->yuvFormat) {
case AVIF_PIXEL_FORMAT_NONE:
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
NSLog(@"Invalid pixel format.");
- return;
+ return NULL;
case AVIF_PIXEL_FORMAT_YUV420:
case AVIF_PIXEL_FORMAT_YV12:
{
@@ -195,11 +245,12 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
kvImageARGB8888,
kvImageNoFlags);
if(err != kvImageNoError) {
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
NSLog(@"Failed to setup conversion: %ld", err);
- return;
+ return NULL;
}
err = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(&origY,
@@ -211,9 +262,10 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
255,
kvImageNoFlags);
if(err != kvImageNoError) {
+ free(outPixels);
free(argbPixels);
NSLog(@"Failed to convert to ARGB8888: %ld", err);
- return;
+ return NULL;
}
break;
}
@@ -227,11 +279,12 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
kvImageARGB8888,
kvImageNoFlags);
if(err != kvImageNoError) {
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
NSLog(@"Failed to setup conversion: %ld", err);
- return;
+ return NULL;
}
vImage_Buffer tmpBuffer = {
@@ -241,17 +294,19 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
.rowBytes = avif->width * 3,
};
if(!tmpBuffer.data) {
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
- return;
+ return NULL;
}
err = vImageConvert_Planar8toRGB888(&origCr, &origY, &origCb, &tmpBuffer, kvImageNoFlags);
if(err != kvImageNoError) {
NSLog(@"Failed to composite kvImage444CrYpCb8: %ld", err);
+ free(outPixels);
free(argbPixels);
free(tmpBuffer.data);
- return;
+ return NULL;
}
vImageConvert_444CrYpCb8ToARGB8888(&tmpBuffer,
&argbBuffer,
@@ -261,9 +316,10 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
kvImageNoFlags);
free(tmpBuffer.data);
if(err != kvImageNoError) {
+ free(outPixels);
free(argbPixels);
NSLog(@"Failed to convert to ARGB8888: %ld", err);
- return;
+ return NULL;
}
break;
}
@@ -277,11 +333,12 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
kvImageARGB8888,
kvImageNoFlags);
if(err != kvImageNoError) {
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
NSLog(@"Failed to setup conversion: %ld", err);
- return;
+ return NULL;
}
const vImagePixelCount alignedWidth = (origY.width+1) & (~1);
@@ -292,10 +349,11 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
.rowBytes = alignedWidth/2 * sizeof(uint8_t),
};
if(!tmpY1.data) {
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
- return;
+ return NULL;
}
err = vImageConvert_ChunkyToPlanar8((const void*[]){origY.data},
(const vImage_Buffer*[]){&tmpY1},
@@ -304,11 +362,12 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
origY.rowBytes, kvImageNoFlags);
if(err != kvImageNoError) {
NSLog(@"Failed to separate first Y channel: %ld", err);
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
free(tmpY1.data);
- return;
+ return NULL;
}
vImage_Buffer tmpY2 = {
.data = calloc(alignedWidth/2 * origY.height, sizeof(uint8_t)),
@@ -317,11 +376,12 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
.rowBytes = alignedWidth/2 * sizeof(uint8_t),
};
if(!tmpY2.data) {
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
free(tmpY1.data);
- return;
+ return NULL;
}
tmpY2.width = origY.width/2;
err = vImageConvert_ChunkyToPlanar8((const void*[]){origY.data + 1},
@@ -332,12 +392,13 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
tmpY2.width = alignedWidth/2;
if(err != kvImageNoError) {
NSLog(@"Failed to separate second Y channel: %ld", err);
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
free(tmpY1.data);
free(tmpY2.data);
- return;
+ return NULL;
}
vImage_Buffer tmpBuffer = {
.data = calloc(alignedWidth * avif->height * 2, sizeof(uint8_t)),
@@ -346,12 +407,13 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
.rowBytes = alignedWidth / 2 * 4 * sizeof(uint8_t),
};
if(!tmpBuffer.data) {
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
free(tmpY1.data);
free(tmpY2.data);
- return;
+ return NULL;
}
err = vImageConvert_Planar8toARGB8888(&tmpY1, &origCb, &tmpY2, &origCr,
@@ -360,9 +422,10 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
free(tmpY2.data);
if(err != kvImageNoError) {
NSLog(@"Failed to composite kvImage422YpCbYpCr8: %ld", err);
+ free(outPixels);
free(argbPixels);
free(tmpBuffer.data);
- return;
+ return NULL;
}
tmpBuffer.width *= 2;
@@ -374,9 +437,10 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
kvImageNoFlags);
free(tmpBuffer.data);
if(err != kvImageNoError) {
+ free(outPixels);
free(argbPixels);
NSLog(@"Failed to convert to ARGB8888: %ld", err);
- return;
+ return NULL;
}
break;
}
@@ -385,38 +449,125 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
free(dummyCr);
if(hasAlpha) {
- vImage_Buffer alpha = {
+ vImage_Buffer alphaBuffer = {
.data = avif->alphaPlane,
.width = avif->width,
.height = avif->height,
.rowBytes = avif->alphaRowBytes,
};
- err = vImageOverwriteChannels_ARGB8888(&alpha, &argbBuffer, &argbBuffer, 0x8, kvImageNoFlags);
- if(err != kvImageNoError) {
- NSLog(@"Failed to overwrite alpha: %ld", err);
- return;
+ if(monochrome) {
+ vImage_Buffer outBuffer = {
+ .data = outPixels,
+ .width = avif->width,
+ .height = avif->height,
+ .rowBytes = avif->width * components,
+ };
+ vImage_Buffer tmpBuffer = {
+ .data = calloc(avif->width, sizeof(uint8_t)),
+ .width = avif->width,
+ .height = avif->height,
+ .rowBytes = 0,
+ };
+ if(!tmpBuffer.data) {
+ free(outPixels);
+ free(argbPixels);
+ return NULL;
+ }
+ vImage_Buffer monoBuffer = {
+ .data = calloc(avif->width * avif->height, sizeof(uint8_t)),
+ .width = avif->width,
+ .height = avif->height,
+ .rowBytes = avif->width,
+ };
+ if(!monoBuffer.data) {
+ free(outPixels);
+ free(argbPixels);
+ free(tmpBuffer.data);
+ return NULL;
+ }
+ err = vImageConvert_ARGB8888toPlanar8(&argbBuffer, &tmpBuffer, &tmpBuffer, &monoBuffer, &tmpBuffer, kvImageNoFlags);
+ free(argbPixels);
+ free(tmpBuffer.data);
+ if(err != kvImageNoError) {
+ free(outPixels);
+ free(monoBuffer.data);
+ NSLog(@"Failed to convert ARGB to RGB: %ld", err);
+ return NULL;
+ }
+ err = vImageConvert_PlanarToChunky8((const vImage_Buffer*[]){&alphaBuffer, &monoBuffer},
+ (void*[]){outBuffer.data, outBuffer.data + 1},
+ 2 /* channelCount */, 2 /* destStrideBytes */,
+ outBuffer.width, outBuffer.height,
+ outBuffer.rowBytes, kvImageNoFlags);
+ free(monoBuffer.data);
+ if(err != kvImageNoError) {
+ free(outPixels);
+ NSLog(@"Failed to combine mono and alpha: %ld", err);
+ return NULL;
+ }
+ return CreateImageFromBuffer(avif, &outBuffer);
+ } else {
+ err = vImageOverwriteChannels_ARGB8888(&alphaBuffer, &argbBuffer, &argbBuffer, 0x8, kvImageNoFlags);
+ if(err != kvImageNoError) {
+ free(outPixels);
+ NSLog(@"Failed to overwrite alpha: %ld", err);
+ return NULL;
+ }
+ return CreateImageFromBuffer(avif, &argbBuffer);
}
} else {
- vImage_Buffer outBuffer = {
- .data = outPixels,
- .width = avif->width,
- .height = avif->height,
- .rowBytes = avif->width * components,
- };
- err = vImageConvert_ARGB8888toRGB888(&argbBuffer, &outBuffer, kvImageNoFlags);
- free(argbPixels);
- if(err != kvImageNoError) {
- NSLog(@"Failed to convert ARGB to RGB: %ld", err);
- return;
+ if(monochrome) {
+ vImage_Buffer outBuffer = {
+ .data = outPixels,
+ .width = avif->width,
+ .height = avif->height,
+ .rowBytes = avif->width * components,
+ };
+ vImage_Buffer tmpBuffer = {
+ .data = calloc(avif->width, sizeof(uint8_t)),
+ .width = avif->width,
+ .height = avif->height,
+ .rowBytes = 0,
+ };
+ if(!tmpBuffer.data) {
+ free(outPixels);
+ free(argbPixels);
+ return NULL;
+ }
+ err = vImageConvert_ARGB8888toPlanar8(&argbBuffer, &tmpBuffer, &tmpBuffer, &outBuffer, &tmpBuffer, kvImageNoFlags);
+ free(argbPixels);
+ free(tmpBuffer.data);
+ if(err != kvImageNoError) {
+ free(outPixels);
+ NSLog(@"Failed to convert ARGB to RGB: %ld", err);
+ return NULL;
+ }
+ return CreateImageFromBuffer(avif, &outBuffer);
+ } else {
+ vImage_Buffer outBuffer = {
+ .data = outPixels,
+ .width = avif->width,
+ .height = avif->height,
+ .rowBytes = avif->width * components,
+ };
+ err = vImageConvert_ARGB8888toRGB888(&argbBuffer, &outBuffer, kvImageNoFlags);
+ free(argbPixels);
+ if(err != kvImageNoError) {
+ free(outPixels);
+ NSLog(@"Failed to convert ARGB to RGB: %ld", err);
+ return NULL;
+ }
+ return CreateImageFromBuffer(avif, &outBuffer);
}
}
}
-// Convert 10/12bit AVIF image into RGB16U/ARGB16U
-static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixels) {
+// Convert 10/12bit AVIF image into RGB16U/ARGB16U/Mono16U/MonoA16U
+static CGImageRef CreateImage16U(avifImage * avif) {
vImage_Error err = kvImageNoError;
- BOOL hasAlpha = avif->alphaPlane != NULL;
- size_t components = hasAlpha ? 4 : 3;
+ BOOL const monochrome = avif->yuvPlanes[1] == NULL || avif->yuvPlanes[2] == NULL;
+ BOOL const hasAlpha = avif->alphaPlane != NULL;
+ size_t const components = (monochrome ? 1 : 3) + (hasAlpha ? 1 : 0);
// setup conversion info
avifReformatState state = {0};
@@ -426,20 +577,27 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
vImage_YpCbCrToARGB convInfo = {0};
+ uint16_t* outPixels = calloc(components * avif->width * avif->height, sizeof(uint16_t));
+ if(outPixels == NULL) {
+ return NULL;
+ }
uint16_t* argbPixels = NULL;
uint16_t* dummyCb = NULL;
uint16_t* dummyCr = NULL;
uint16_t* dummyAlpha = NULL;
- if(!hasAlpha) {
+ BOOL const useTempBuffer = monochrome || !hasAlpha;
+
+ if(useTempBuffer) {
argbPixels = calloc(avif->width * avif->height * 4, sizeof(uint16_t));
if(!argbPixels) {
- return;
+ free(outPixels);
+ return NULL;
}
}
vImage_Buffer argbBuffer = {
- .data = hasAlpha ? outPixels : (uint8_t*)argbPixels,
+ .data = useTempBuffer ? argbPixels : outPixels,
.width = avif->width,
.height = avif->height,
.rowBytes = avif->width * 4 * sizeof(uint16_t),
@@ -464,17 +622,20 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
origCb.rowBytes = origCb.width * sizeof(uint16_t);
dummyCb = calloc(origCb.width, sizeof(uint16_t));
if(!dummyCb) {
+ free(outPixels);
free(argbPixels);
- return;
+ return NULL;
}
origCb.data = dummyCb;
origCb.height = 1;
// fill zero values.
err = vImageOverwriteChannelsWithScalar_Planar16U(pixelRange.CbCr_bias, &origCb, kvImageNoFlags);
if (err != kvImageNoError) {
+ free(outPixels);
free(argbPixels);
+ free(dummyCb);
NSLog(@"Failed to fill dummy Cr buffer: %ld", err);
- return;
+ return NULL;
}
origCb.rowBytes = 0;
origCb.height = origHeight;
@@ -492,19 +653,22 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
origCr.rowBytes = origCr.width * sizeof(uint16_t);
dummyCr = calloc(origCr.width, sizeof(uint16_t));
if(!dummyCr) {
+ free(outPixels);
free(argbPixels);
free(dummyCb);
- return;
+ return NULL;
}
origCr.data = dummyCr;
origCr.height = 1;
// fill zero values.
err = vImageOverwriteChannelsWithScalar_Planar16U(pixelRange.CbCr_bias, &origCr, kvImageNoFlags);
if (err != kvImageNoError) {
+ free(outPixels);
free(argbPixels);
free(dummyCb);
+ free(dummyCr);
NSLog(@"Failed to fill dummy Cr buffer: %ld", err);
- return;
+ return NULL;
}
origCr.rowBytes = 0;
origCr.height = origHeight;
@@ -521,21 +685,24 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
origAlpha.rowBytes = avif->width * sizeof(uint16_t);
dummyAlpha = calloc(avif->width, sizeof(uint16_t));
if(!dummyAlpha) {
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
- return;
+ return NULL;
}
origAlpha.data = dummyAlpha;
origAlpha.width = avif->width;
origAlpha.height = 1;
err = vImageOverwriteChannelsWithScalar_Planar16U(0xffff, &origAlpha, kvImageNoFlags);
if (err != kvImageNoError) {
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
+ free(dummyAlpha);
NSLog(@"Failed to fill dummy alpha buffer: %ld", err);
- return;
+ return NULL;
}
origAlpha.rowBytes = 0;
origAlpha.height = avif->height;
@@ -548,23 +715,25 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
.rowBytes = avif->width * 4 * sizeof(uint16_t),
};
if (!aYpCbCrBuffer.data) {
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
free(dummyAlpha);
- return;
+ return NULL;
}
uint8_t const permuteMap[4] = {0, 1, 2, 3};
switch(avif->yuvFormat) {
case AVIF_PIXEL_FORMAT_NONE:
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
free(dummyAlpha);
free(aYpCbCrBuffer.data);
NSLog(@"Invalid pixel format.");
- return;
+ return NULL;
case AVIF_PIXEL_FORMAT_YUV420:
case AVIF_PIXEL_FORMAT_YUV422:
case AVIF_PIXEL_FORMAT_YV12:
@@ -576,12 +745,13 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
.rowBytes = avif->width * 4 * sizeof(uint16_t),
};
if(!scaledCb.data) {
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
free(dummyAlpha);
free(aYpCbCrBuffer.data);
- return;
+ return NULL;
}
vImage_Buffer scaledCr = {
.data = calloc(avif->width * avif->height * 4, sizeof(uint16_t)),
@@ -590,17 +760,19 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
.rowBytes = avif->width * 4 * sizeof(uint16_t),
};
if(!scaledCr.data) {
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
free(dummyAlpha);
free(aYpCbCrBuffer.data);
free(scaledCb.data);
- return;
+ return NULL;
}
vImage_Error scaleTempBuffSize = vImageScale_Planar16U(&origCb, &scaledCb, NULL, kvImageGetTempBufferSize);
if(scaleTempBuffSize < 0) {
NSLog(@"Failed to get temp buffer size: %ld", scaleTempBuffSize);
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
@@ -608,10 +780,11 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
free(aYpCbCrBuffer.data);
free(scaledCb.data);
free(scaledCr.data);
- return;
+ return NULL;
}
void* scaleTempBuff = malloc(scaleTempBuffSize);
if(!scaleTempBuff) {
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
@@ -619,12 +792,13 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
free(aYpCbCrBuffer.data);
free(scaledCb.data);
free(scaledCr.data);
- return;
+ return NULL;
}
// upscale Cb
err = vImageScale_Planar16U(&origCb, &scaledCb, scaleTempBuff, kvImageNoFlags);
if(err != kvImageNoError) {
NSLog(@"Failed to scale Cb: %ld", err);
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
@@ -633,12 +807,13 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
free(scaledCb.data);
free(scaledCr.data);
free(scaleTempBuff);
- return;
+ return NULL;
}
// upscale Cr
err = vImageScale_Planar16U(&origCr, &scaledCr, scaleTempBuff, kvImageNoFlags);
if(err != kvImageNoError) {
NSLog(@"Failed to scale Cb: %ld", err);
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
@@ -647,7 +822,7 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
free(scaledCb.data);
free(scaledCr.data);
free(scaleTempBuff);
- return;
+ return NULL;
}
free(scaleTempBuff);
@@ -656,12 +831,13 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
free(scaledCr.data);
if(err != kvImageNoError) {
NSLog(@"Failed to composite kvImage444AYpCbCr16: %ld", err);
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
free(dummyAlpha);
free(aYpCbCrBuffer.data);
- return;
+ return NULL;
}
break;
}
@@ -670,12 +846,13 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
err = vImageConvert_Planar16UtoARGB16U(&origAlpha, &origY, &origCb, &origCr, &aYpCbCrBuffer, kvImageNoFlags);
if(err != kvImageNoError) {
NSLog(@"Failed to composite kvImage444AYpCbCr16: %ld", err);
+ free(outPixels);
free(argbPixels);
free(dummyCb);
free(dummyCr);
free(dummyAlpha);
free(aYpCbCrBuffer.data);
- return;
+ return NULL;
}
break;
}
@@ -691,10 +868,11 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
kvImageARGB16U,
kvImageNoFlags);
if(err != kvImageNoError) {
+ free(outPixels);
free(argbPixels);
free(aYpCbCrBuffer.data);
NSLog(@"Failed to setup conversion: %ld", err);
- return;
+ return NULL;
}
err = vImageConvert_444AYpCbCr16ToARGB16U(&aYpCbCrBuffer,
&argbBuffer,
@@ -703,26 +881,202 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
kvImageNoFlags);
free(aYpCbCrBuffer.data);
if(err != kvImageNoError) {
+ free(outPixels);
free(argbPixels);
NSLog(@"Failed to convert to ARGB16U: %ld", err);
- return;
+ return NULL;
}
- if(!hasAlpha) {
+ if(hasAlpha) {
+ if(monochrome){
+ vImage_Buffer tmpBuffer = {
+ .data = calloc(avif->width, sizeof(uint16_t)),
+ .width = avif->width,
+ .height = avif->height,
+ .rowBytes = 0,
+ };
+ if(!tmpBuffer.data) {
+ free(outPixels);
+ free(argbPixels);
+ return NULL;
+ }
+ vImage_Buffer alphaBuffer = {
+ .data = calloc(avif->width * avif->height, sizeof(uint16_t)),
+ .width = avif->width,
+ .height = avif->height,
+ .rowBytes = avif->width * sizeof(uint16_t),
+ };
+ if(!alphaBuffer.data) {
+ free(outPixels);
+ free(argbPixels);
+ free(tmpBuffer.data);
+ return NULL;
+ }
+ vImage_Buffer monoBuffer = {
+ .data = calloc(avif->width * avif->height, sizeof(uint16_t)),
+ .width = avif->width,
+ .height = avif->height,
+ .rowBytes = avif->width * sizeof(uint16_t),
+ };
+ if(!monoBuffer.data) {
+ free(outPixels);
+ free(argbPixels);
+ free(tmpBuffer.data);
+ free(alphaBuffer.data);
+ return NULL;
+ }
+ err = vImageConvert_ARGB16UtoPlanar16U(&argbBuffer, &alphaBuffer, &tmpBuffer, &monoBuffer, &tmpBuffer, kvImageNoFlags);
+ free(argbPixels);
+ free(tmpBuffer.data);
+ if(err != kvImageNoError) {
+ free(outPixels);
+ free(monoBuffer.data);
+ free(alphaBuffer.data);
+ NSLog(@"Failed to convert ARGB to Mono: %ld", err);
+ return NULL;
+ }
+ vImage_Buffer monoBuffer1 = {
+ .data = calloc(avif->width * avif->height, sizeof(uint8_t)),
+ .width = avif->width,
+ .height = avif->height,
+ .rowBytes = avif->width * sizeof(uint8_t),
+ };
+ if(!monoBuffer1.data) {
+ free(outPixels);
+ free(alphaBuffer.data);
+ free(monoBuffer.data);
+ return NULL;
+ }
+ vImage_Buffer monoBuffer2 = {
+ .data = calloc(avif->width * avif->height, sizeof(uint8_t)),
+ .width = avif->width,
+ .height = avif->height,
+ .rowBytes = avif->width * sizeof(uint8_t),
+ };
+ if(!monoBuffer2.data) {
+ free(outPixels);
+ free(alphaBuffer.data);
+ free(monoBuffer.data);
+ free(monoBuffer1.data);
+ return NULL;
+ }
+ err = vImageConvert_ChunkyToPlanar8((const void*[]){monoBuffer.data, monoBuffer.data + 1},
+ (const vImage_Buffer*[]){&monoBuffer1, &monoBuffer2},
+ 2 /* channelCount */, 2 /* src srcStrideBytes */,
+ monoBuffer.width, monoBuffer.height,
+ monoBuffer.rowBytes, kvImageNoFlags);
+ free(monoBuffer.data);
+ if(err != kvImageNoError) {
+ free(outPixels);
+ free(alphaBuffer.data);
+ free(monoBuffer1.data);
+ free(monoBuffer2.data);
+ NSLog(@"Failed to split Mono16: %ld", err);
+ return NULL;
+ }
+
+ vImage_Buffer alphaBuffer1 = {
+ .data = calloc(avif->width * avif->height, sizeof(uint8_t)),
+ .width = avif->width,
+ .height = avif->height,
+ .rowBytes = avif->width * sizeof(uint8_t),
+ };
+ if(!alphaBuffer1.data) {
+ free(outPixels);
+ free(alphaBuffer.data);
+ free(monoBuffer1.data);
+ free(monoBuffer2.data);
+ return NULL;
+ }
+ vImage_Buffer alphaBuffer2 = {
+ .data = calloc(avif->width * avif->height, sizeof(uint8_t)),
+ .width = avif->width,
+ .height = avif->height,
+ .rowBytes = avif->width * sizeof(uint8_t),
+ };
+ if(!alphaBuffer2.data) {
+ free(outPixels);
+ free(alphaBuffer.data);
+ free(monoBuffer1.data);
+ free(monoBuffer2.data);
+ free(alphaBuffer1.data);
+ return NULL;
+ }
+ err = vImageConvert_ChunkyToPlanar8((const void*[]){alphaBuffer.data, alphaBuffer.data + 1},
+ (const vImage_Buffer*[]){&alphaBuffer1, &alphaBuffer2},
+ 2 /* channelCount */, 2 /* src srcStrideBytes */,
+ alphaBuffer.width, alphaBuffer.height,
+ alphaBuffer.rowBytes, kvImageNoFlags);
+ free(alphaBuffer.data);
+ if(err != kvImageNoError) {
+ free(outPixels);
+ free(monoBuffer1.data);
+ free(monoBuffer2.data);
+ free(alphaBuffer1.data);
+ free(alphaBuffer2.data);
+ NSLog(@"Failed to split Mono16: %ld", err);
+ return NULL;
+ }
+
+ vImage_Buffer outBuffer = {
+ .data = outPixels,
+ .width = avif->width,
+ .height = avif->height,
+ .rowBytes = avif->width * components * sizeof(uint16_t),
+ };
+ err = vImageConvert_Planar8toARGB8888(&alphaBuffer1, &alphaBuffer2, &monoBuffer1, &monoBuffer2, &outBuffer, kvImageNoFlags);
+ free(monoBuffer1.data);
+ free(monoBuffer2.data);
+ free(alphaBuffer1.data);
+ free(alphaBuffer2.data);
+ if(err != kvImageNoError) {
+ free(outPixels);
+ NSLog(@"Failed to convert ARGB to MonoA: %ld", err);
+ return NULL;
+ }
+ return CreateImageFromBuffer(avif, &outBuffer);
+ }else{
+ return CreateImageFromBuffer(avif, &argbBuffer);
+ }
+ } else {
vImage_Buffer outBuffer = {
.data = outPixels,
.width = avif->width,
.height = avif->height,
.rowBytes = avif->width * components * sizeof(uint16_t),
};
- err = vImageConvert_ARGB16UtoRGB16U(&argbBuffer, &outBuffer, kvImageNoFlags);
- free(argbPixels);
- if(err != kvImageNoError) {
- NSLog(@"Failed to convert ARGB to RGB: %ld", err);
- return;
+ if(monochrome) {
+ vImage_Buffer tmpBuffer = {
+ .data = calloc(avif->width, sizeof(uint16_t)),
+ .width = avif->width,
+ .height = avif->height,
+ .rowBytes = 0,
+ };
+ if(!tmpBuffer.data) {
+ free(outPixels);
+ free(argbPixels);
+ return NULL;
+ }
+ err = vImageConvert_ARGB16UtoPlanar16U(&argbBuffer, &tmpBuffer, &tmpBuffer, &outBuffer, &tmpBuffer, kvImageNoFlags);
+ free(argbPixels);
+ free(tmpBuffer.data);
+ if(err != kvImageNoError) {
+ free(outPixels);
+ NSLog(@"Failed to convert ARGB to Mono: %ld", err);
+ return NULL;
+ }
+ return CreateImageFromBuffer(avif, &outBuffer);
+ } else {
+ err = vImageConvert_ARGB16UtoRGB16U(&argbBuffer, &outBuffer, kvImageNoFlags);
+ free(argbPixels);
+ if(err != kvImageNoError) {
+ free(outPixels);
+ NSLog(@"Failed to convert ARGB to RGB: %ld", err);
+ return NULL;
+ }
+ return CreateImageFromBuffer(avif, &outBuffer);
}
}
-
}
static void FillRGBABufferWithAVIFImage(vImage_Buffer *red, vImage_Buffer *green, vImage_Buffer *blue, vImage_Buffer *alpha, avifImage *img) {
@@ -749,10 +1103,6 @@ static void FillRGBABufferWithAVIFImage(vImage_Buffer *red, vImage_Buffer *green
}
}
-static void FreeImageData(void *info, const void *data, size_t size) {
- free((void *)data);
-}
-
@implementation SDImageAVIFCoder
+ (instancetype)sharedCoder {
@@ -817,43 +1167,15 @@ static void FreeImageData(void *info, const void *data, size_t size) {
}
avifImage * avif = decoder->image;
- int width = avif->width;
- int height = avif->height;
- BOOL hasAlpha = avif->alphaPlane != NULL;
- BOOL usesU16 = avifImageUsesU16(avif);
- size_t components = hasAlpha ? 4 : 3;
- size_t bitsPerComponent = usesU16 ? 16 : 8;
- size_t bitsPerPixel = components * bitsPerComponent;
- size_t rowBytes = width * components * (usesU16 ? sizeof(uint16_t) : sizeof(uint8_t));
-
- uint8_t * dest = calloc(width * components * height, usesU16 ? sizeof(uint16_t) : sizeof(uint8_t));
- if (!dest) {
- avifDecoderDestroy(decoder);
- return nil;
- }
+ CGImageRef image = NULL;
// convert planar to ARGB/RGB
- if(usesU16) { // 10bit or 12bit
- ConvertAvifImagePlanar16ToRGB16U(avif, dest);
+ if(avifImageUsesU16(avif)) { // 10bit or 12bit
+ image = CreateImage16U(avif);
} else { //8bit
- ConvertAvifImagePlanar8ToRGB8(avif, dest);
+ image = CreateImage8(avif);
}
-
- CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, dest, rowBytes * height, FreeImageData);
- CGBitmapInfo bitmapInfo = usesU16 ? kCGBitmapByteOrder16Host : kCGBitmapByteOrderDefault;
- bitmapInfo |= hasAlpha ? kCGImageAlphaPremultipliedFirst : kCGImageAlphaNone;
- // FIXME: (ledyba-z): Set appropriate color space.
- // Currently, there is no way to get MatrixCoefficients, TransferCharacteristics and ColourPrimaries values
- // in Sequence Header OBU.
- // https://github.com/AOMediaCodec/libavif/blob/7d36984b2994210b/include/avif/avif.h#L149-L236
- CGColorSpaceRef colorSpaceRef = [SDImageCoderHelper colorSpaceGetDeviceRGB];
- CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault;
- CGImageRef imageRef = CGImageCreate(width, height, bitsPerComponent, bitsPerPixel, rowBytes, colorSpaceRef, bitmapInfo, provider, NULL, NO, renderingIntent);
-
- // clean up
- CGDataProviderRelease(provider);
avifDecoderDestroy(decoder);
-
- return imageRef;
+ return image;
}
// The AVIF encoding seems slow at the current time, but at least works |
After libavif is upgraded, github action will be green. |
Codecov Report
@@ Coverage Diff @@
## master #11 +/- ##
===========================================
+ Coverage 70.63% 86.19% +15.56%
===========================================
Files 4 4
Lines 824 971 +147
===========================================
+ Hits 582 837 +255
+ Misses 242 134 -108
Continue to review full report at Codecov.
|
Upgraded the cocoapods version and release the git tag (used for Carthage/SwiftPM). I previously found that libavif 0.5.7 have a new codec (libgav1 ?), but it seems in earily stage and no release tags. So the CocoaPods version will not provide that support.
The diff seems to huge, let me take some time to review or check. 😅 And this is about monochrome images ? One behavior I found on UIKit (actually, the Application-Level UIImage class), it allows a monochrome-backed CGImage, but actualy when rendering on the UIImageView or CALayer, it will be converted into the RGB format...I don't know whether this is the standard behavior. This can be performance on theory, for example, when you load AVIF images on mobile / Mac and just use that to do image processing like Core Image framework. That will save RAM usage, and does not have to use RGB format. |
c911416
to
ca7a8b1
Compare
@dreampiggy Thanks to upgrade! (However, github action is still red... I will debug, so please wait a moment....) libgav1 looks a simplified decoder. And also, there is a report that says libgav1 is not so fast yet. I think it may be too early to support libgav1 today.
Yes, this is about for monochrome images:
Oh, really? I didn't know that... I will survey about it. How about image cache in SDWebImage? I'm hoping that SDWebImage will cache more images by reducing memory usage...
I see, thanks.
Of course! Thank you very much! |
This PR is too large, so I will separate to several PRs, to make easy to review. |
The large part is not the images sample, it the code... I found the 4 cases share some common code strcture (like buffer check, allocation), which looks...a little massive. How about doing some source code level management, like using goto for error handling ? (I know, goto, but it's better than the current if else return code) void *buffer1 = NULL;
void *buffer2 = NULL;
buffer1 = alloc(100, sizeof(uint8_t));
buffer2 = alloc(100, sizeof(uint8_t));
if (decodeFailed1(buffer1, buffer2)) {
goto fail;
}
if (decodeFailed2(buffer1, buffer2)) {
goto fail;
}
return YES;
fail:
if (buffer1) free(buffer1);
if (buffer2) free(buffer2);
return NO; |
@dreampiggy OK, I will try to rewrite codes like that. |
@dreampiggy I refactored the code with goto statements. Could you take another look? |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
LGTM...I just look through the coding style and clang static analyzer report. The calculation and vImage API usage I didn't check them one by one. But this can be checked by your complete test image cases.
Thanks to review!
Yeah, it is why I prepared many and many test images (too complex to check by human's eye). |
CGImage
for monochrome AVIF images (Related to Performance - Using UIGraphicsImageRenderer on iOS 10+, save memory when image bitmap is RGB(-25%) or Grayscale(-75%) SDWebImage#2907).