I take a white image with alpha and color it. This worked flawlessly with the code below in iOS 6. However, in iOS 7, after using this code, a pixel-wide white border remains around the edge of the non-zero alpha'd image. What has changed between the two OS's that would keep a pixel-wide white border?

-(void)changeImageColorToColor: (UIColor *)color CGFloat redC = 0.0, greenC = 0.0, blueC = 0.0; const CGFloat *components = CGColorGetComponents(color.CGColor); redC = components[0]; greenC = components[1]; blueC = components[2]; CGImageRef sourceImage = self.addedImageView.whiteColorVersion; CFDataRef theData; theData = CGDataProviderCopyData(CGImageGetDataProvider(sour ceImage)); CFMutableDataRef mutableData = CFDataCreateMutableCopy(0, 0, theData); UInt8 *pixelData = (UInt8 *) CFDataGetBytePtr(mutableData); int dataLength = CFDataGetLength(mutableData); for (int index = 0; index < dataLength; index += 4) { pixelData[index + 0] = pixelData[index + 0] * redC; pixelData[index + 1] = pixelData[index + 1] * greenC; pixelData[index + 2] = pixelData[index + 2] * blueC; } CGContextRef context; context = CGBitmapContextCreate(pixelData, CGImageGetWidth(sourceImage), CGImageGetHeight(sourceImage), 8, CGImageGetBytesPerRow(sourceImage), CGImageGetColorSpace(sourceImage), kCGImageAlphaPremultipliedLast); CGImageRef newCGImage = CGBitmapContextCreateImage(context); UIImage *newImage = [UIImage imageWithCGImage:newCGImage]; CGContextRelease(context); CFRelease(theData); CFRelease(mutableData); CGImageRelease(newCGImage); self.addedImageView.image = newImage; }

View solution