-(void)createImages
{
// Load the alpha image, which is just the same Ship.png image used in the clipping demo
NSString *imagePath = [[NSBundle mainBundle] pathForResource:@"Ship.png" ofType:nil];
UIImage *img = [UIImage imageWithContentsOfFile:imagePath];
_alphaImage = CGImageRetain(img.CGImage);
// To show the difference with an image mask, we take the above image and process it to extract
// the alpha channel as a mask.
// Allocate data
NSMutableData *data = [NSMutableData dataWithLength:90 * 90 * 1];
// Create a bitmap context
CGContextRef context = CGBitmapContextCreate([data mutableBytes], 90, 90, 8, 90, NULL, (CGBitmapInfo)kCGImageAlphaOnly);
// Set the blend mode to copy to avoid any alteration of the source data
CGContextSetBlendMode(context, kCGBlendModeCopy);
// Draw the image to extract the alpha channel
CGContextDrawImage(context, CGRectMake(0.0, 0.0, 90.0, 90.0), _alphaImage);
// Now the alpha channel has been copied into our NSData object above, so discard the context and lets make an image mask.
CGContextRelease(context);
// Create a data provider for our data object (NSMutableData is tollfree bridged to CFMutableDataRef, which is compatible with CFDataRef)
CGDataProviderRef dataProvider = CGDataProviderCreateWithCFData((__bridge CFMutableDataRef)data);
// Create our new mask image with the same size as the original image
_maskingImage = CGImageMaskCreate(90, 90, 8, 8, 90, dataProvider, NULL, YES);
// And release the provider.
CGDataProviderRelease(dataProvider);
}
iOS:获取图片Alpha图片,布布扣,bubuko.com
iOS:获取图片Alpha图片