Skip to content

Commit 7988a16

Browse files
committed
Avoid to use alloca, because iOS non-main threads just have 512kb stack.
Thread Management https://developer.apple.com/library/archive/documentation/Cocoa/Conceptual/Multithreading/CreatingThreads/CreatingThreads.html
1 parent aba3acf commit 7988a16

1 file changed

Lines changed: 96 additions & 14 deletions

File tree

SDWebImageAVIFCoder/Classes/SDImageAVIFCoder.m

Lines changed: 96 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
// Created by lizhuoli on 2018/5/8.
66
//
77

8-
#include <alloca.h>
98
#import "SDImageAVIFCoder.h"
109
#import <Accelerate/Accelerate.h>
1110
#if __has_include(<libavif/avif.h>)
@@ -115,6 +114,8 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
115114
vImage_YpCbCrToARGB convInfo = {0};
116115

117116
uint8_t* argbPixels = NULL;
117+
uint8_t* dummyCb = NULL;
118+
uint8_t* dummyCr = NULL;
118119

119120
if(!hasAlpha) {
120121
argbPixels = calloc(avif->width * avif->height * 4, sizeof(uint8_t));
@@ -145,9 +146,14 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
145146
};
146147

147148
if(!origCb.data) { // allocate dummy data to convert monochrome images.
148-
origCb.data = alloca(origCb.width * sizeof(uint8_t));
149+
dummyCb = calloc(origCb.width, sizeof(uint8_t));
150+
if(!dummyCb) {
151+
free(argbPixels);
152+
return;
153+
}
154+
origCb.data = dummyCb;
149155
origCb.rowBytes = 0;
150-
memset(origCb.data, 128, origCb.width);
156+
memset(origCb.data, pixelRange.CbCr_bias, origCb.width);
151157
}
152158

153159
vImage_Buffer origCr = {
@@ -157,15 +163,23 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
157163
.height = avif->height >> state.formatInfo.chromaShiftY,
158164
};
159165
if(!origCr.data) { // allocate dummy data to convert monochrome images.
160-
origCr.data = alloca(origCr.width * sizeof(uint8_t));
166+
dummyCr = calloc(origCr.width, sizeof(uint8_t));
167+
if(!dummyCr) {
168+
free(argbPixels);
169+
free(dummyCb);
170+
return;
171+
}
172+
origCr.data = dummyCr;
161173
origCr.rowBytes = 0;
162-
memset(origCr.data, 128, origCr.width);
174+
memset(origCr.data, pixelRange.CbCr_bias, origCr.width);
163175
}
164176

165177
uint8_t const permuteMap[4] = {0, 1, 2, 3};
166178
switch(avif->yuvFormat) {
167179
case AVIF_PIXEL_FORMAT_NONE:
168180
free(argbPixels);
181+
free(dummyCb);
182+
free(dummyCr);
169183
NSLog(@"Invalid pixel format.");
170184
return;
171185
case AVIF_PIXEL_FORMAT_YUV420:
@@ -180,6 +194,8 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
180194
kvImageNoFlags);
181195
if(err != kvImageNoError) {
182196
free(argbPixels);
197+
free(dummyCb);
198+
free(dummyCr);
183199
NSLog(@"Failed to setup conversion: %ld", err);
184200
return;
185201
}
@@ -210,6 +226,8 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
210226
kvImageNoFlags);
211227
if(err != kvImageNoError) {
212228
free(argbPixels);
229+
free(dummyCb);
230+
free(dummyCr);
213231
NSLog(@"Failed to setup conversion: %ld", err);
214232
return;
215233
}
@@ -222,6 +240,8 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
222240
};
223241
if(!tmpBuffer.data) {
224242
free(argbPixels);
243+
free(dummyCb);
244+
free(dummyCr);
225245
return;
226246
}
227247
err = vImageConvert_Planar8toRGB888(&origCr, &origY, &origCb, &tmpBuffer, kvImageNoFlags);
@@ -267,6 +287,8 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
267287
};
268288
if(!tmpY1.data) {
269289
free(argbPixels);
290+
free(dummyCb);
291+
free(dummyCr);
270292
return;
271293
}
272294
vImage_Buffer tmpY2 = {
@@ -277,6 +299,8 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
277299
};
278300
if(!tmpY2.data) {
279301
free(argbPixels);
302+
free(dummyCb);
303+
free(dummyCr);
280304
free(tmpY1.data);
281305
return;
282306
}
@@ -288,6 +312,8 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
288312
if(err != kvImageNoError) {
289313
NSLog(@"Failed to separate Y channel: %ld", err);
290314
free(argbPixels);
315+
free(dummyCb);
316+
free(dummyCr);
291317
free(tmpY1.data);
292318
free(tmpY2.data);
293319
return;
@@ -300,18 +326,20 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
300326
};
301327
if(!tmpBuffer.data) {
302328
free(argbPixels);
329+
free(dummyCb);
330+
free(dummyCr);
303331
free(tmpY1.data);
304332
free(tmpY2.data);
305333
return;
306334
}
307335

308336
err = vImageConvert_Planar8toARGB8888(&tmpY1, &origCb, &tmpY2, &origCr,
309337
&tmpBuffer, kvImageNoFlags);
338+
free(tmpY1.data);
339+
free(tmpY2.data);
310340
if(err != kvImageNoError) {
311341
NSLog(@"Failed to composite kvImage422YpCbYpCr8: %ld", err);
312342
free(argbPixels);
313-
free(tmpY1.data);
314-
free(tmpY2.data);
315343
free(tmpBuffer.data);
316344
return;
317345
}
@@ -323,8 +351,6 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
323351
permuteMap,
324352
255,
325353
kvImageNoFlags);
326-
free(tmpY1.data);
327-
free(tmpY2.data);
328354
free(tmpBuffer.data);
329355
if(err != kvImageNoError) {
330356
free(argbPixels);
@@ -334,6 +360,8 @@ static void ConvertAvifImagePlanar8ToRGB8(avifImage * avif, uint8_t * outPixels)
334360
break;
335361
}
336362
}
363+
free(dummyCb);
364+
free(dummyCr);
337365

338366
if(hasAlpha) {
339367
vImage_Buffer alpha = {
@@ -378,6 +406,9 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
378406
vImage_YpCbCrToARGB convInfo = {0};
379407

380408
uint8_t* argbPixels = NULL;
409+
uint8_t* dummyCb = NULL;
410+
uint8_t* dummyCr = NULL;
411+
uint8_t* dummyAlpha = NULL;
381412

382413
if(!hasAlpha) {
383414
argbPixels = calloc(avif->width * avif->height * 4, sizeof(uint16_t));
@@ -400,7 +431,6 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
400431
.height = avif->height,
401432
};
402433

403-
404434
vImage_Buffer origCb = {
405435
.data = avif->yuvPlanes[AVIF_CHAN_U],
406436
.rowBytes = avif->yuvRowBytes[AVIF_CHAN_U],
@@ -411,7 +441,12 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
411441
if(!origCb.data) { // allocate dummy data to convert monochrome images.
412442
vImagePixelCount origHeight = origCb.height;
413443
origCb.rowBytes = origCb.width * sizeof(uint16_t);
414-
origCb.data = alloca(origCb.rowBytes);
444+
dummyCb = calloc(origCb.width, sizeof(uint16_t));
445+
if(!dummyCb) {
446+
free(argbPixels);
447+
return;
448+
}
449+
origCb.data = dummyCb;
415450
origCb.height = 1;
416451
// fill zero values.
417452
err = vImageOverwriteChannelsWithScalar_Planar16U(pixelRange.CbCr_bias, &origCb, kvImageNoFlags);
@@ -434,7 +469,13 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
434469
if(!origCr.data) { // allocate dummy data to convert monochrome images.
435470
vImagePixelCount origHeight = origCr.height;
436471
origCr.rowBytes = origCr.width * sizeof(uint16_t);
437-
origCr.data = alloca(origCr.rowBytes);
472+
dummyCr = calloc(origCr.width, sizeof(uint16_t));
473+
if(!dummyCr) {
474+
free(argbPixels);
475+
free(dummyCb);
476+
return;
477+
}
478+
origCr.data = dummyCr;
438479
origCr.height = 1;
439480
// fill zero values.
440481
err = vImageOverwriteChannelsWithScalar_Planar16U(pixelRange.CbCr_bias, &origCr, kvImageNoFlags);
@@ -456,12 +497,21 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
456497
} else {
457498
// allocate dummy data to convert monochrome images.
458499
origAlpha.rowBytes = avif->width * sizeof(uint16_t);
459-
origAlpha.data = alloca(origAlpha.rowBytes);
500+
dummyAlpha = calloc(avif->width, sizeof(uint16_t));
501+
if(!dummyAlpha) {
502+
free(argbPixels);
503+
free(dummyCb);
504+
free(dummyCr);
505+
return;
506+
}
507+
origAlpha.data = dummyAlpha;
460508
origAlpha.width = avif->width;
461509
origAlpha.height = 1;
462510
err = vImageOverwriteChannelsWithScalar_Planar16U(0xffff, &origAlpha, kvImageNoFlags);
463511
if (err != kvImageNoError) {
464512
free(argbPixels);
513+
free(dummyCb);
514+
free(dummyCr);
465515
NSLog(@"Failed to fill dummy alpha buffer: %ld", err);
466516
return;
467517
}
@@ -477,13 +527,19 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
477527
};
478528
if (!aYpCbCrBuffer.data) {
479529
free(argbPixels);
530+
free(dummyCb);
531+
free(dummyCr);
532+
free(dummyAlpha);
480533
return;
481534
}
482535

483536
uint8_t const permuteMap[4] = {0, 1, 2, 3};
484537
switch(avif->yuvFormat) {
485538
case AVIF_PIXEL_FORMAT_NONE:
486539
free(argbPixels);
540+
free(dummyCb);
541+
free(dummyCr);
542+
free(dummyAlpha);
487543
NSLog(@"Invalid pixel format.");
488544
return;
489545
case AVIF_PIXEL_FORMAT_YUV420:
@@ -498,6 +554,9 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
498554
};
499555
if(!scaledCb.data) {
500556
free(argbPixels);
557+
free(dummyCb);
558+
free(dummyCr);
559+
free(dummyAlpha);
501560
free(aYpCbCrBuffer.data);
502561
return;
503562
}
@@ -509,6 +568,9 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
509568
};
510569
if(!scaledCr.data) {
511570
free(argbPixels);
571+
free(dummyCb);
572+
free(dummyCr);
573+
free(dummyAlpha);
512574
free(aYpCbCrBuffer.data);
513575
free(scaledCb.data);
514576
return;
@@ -517,6 +579,9 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
517579
if(scaleTempBuffSize < 0) {
518580
NSLog(@"Failed to get temp buffer size: %ld", scaleTempBuffSize);
519581
free(argbPixels);
582+
free(dummyCb);
583+
free(dummyCr);
584+
free(dummyAlpha);
520585
free(aYpCbCrBuffer.data);
521586
free(scaledCb.data);
522587
free(scaledCr.data);
@@ -525,6 +590,9 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
525590
void* scaleTempBuff = malloc(scaleTempBuffSize);
526591
if(!scaleTempBuff) {
527592
free(argbPixels);
593+
free(dummyCb);
594+
free(dummyCr);
595+
free(dummyAlpha);
528596
free(aYpCbCrBuffer.data);
529597
free(scaledCb.data);
530598
free(scaledCr.data);
@@ -535,6 +603,9 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
535603
if(err != kvImageNoError) {
536604
NSLog(@"Failed to scale Cb: %ld", err);
537605
free(argbPixels);
606+
free(dummyCb);
607+
free(dummyCr);
608+
free(dummyAlpha);
538609
free(aYpCbCrBuffer.data);
539610
free(scaledCb.data);
540611
free(scaledCr.data);
@@ -546,6 +617,9 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
546617
if(err != kvImageNoError) {
547618
NSLog(@"Failed to scale Cb: %ld", err);
548619
free(argbPixels);
620+
free(dummyCb);
621+
free(dummyCr);
622+
free(dummyAlpha);
549623
free(aYpCbCrBuffer.data);
550624
free(scaledCb.data);
551625
free(scaledCr.data);
@@ -560,6 +634,9 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
560634
if(err != kvImageNoError) {
561635
NSLog(@"Failed to composite kvImage444AYpCbCr16: %ld", err);
562636
free(argbPixels);
637+
free(dummyCb);
638+
free(dummyCr);
639+
free(dummyAlpha);
563640
free(aYpCbCrBuffer.data);
564641
return;
565642
}
@@ -571,14 +648,19 @@ static void ConvertAvifImagePlanar16ToRGB16U(avifImage * avif, uint8_t * outPixe
571648
if(err != kvImageNoError) {
572649
NSLog(@"Failed to composite kvImage444AYpCbCr16: %ld", err);
573650
free(argbPixels);
651+
free(dummyCb);
652+
free(dummyCr);
653+
free(dummyAlpha);
574654
free(aYpCbCrBuffer.data);
575655
return;
576656
}
577657
break;
578658
}
579659
}
660+
free(dummyCb);
661+
free(dummyCr);
662+
free(dummyAlpha);
580663

581-
582664
err = vImageConvert_YpCbCrToARGB_GenerateConversion(&matrix,
583665
&pixelRange,
584666
&convInfo,

0 commit comments

Comments
 (0)