OpenCVを利用して、画像を白黒に変換するiPhoneアプリを作ってみます。画面の中央にオリジナルの画像を表示しておいて、ボタンで、オリジナル、白黒、HSV変換の3パターンに画像を変換できるようにしました。
動作イメージ
XcodeからiOS6 iPhone Simulatorで動かすとこんな感じになります。
ポイント
OpenCVはXcodeにコピペで使える opencv2.framework というのがダウンロードできるので、それを利用しました。ソースはC++なので、ViewController.mをViewController.mm に拡張子を変更しています。画像変換部分のコードは「OpenCV iOS – Image Processing」というサンプルのコードを利用しています。
サンプルコード
#import “ViewController.h”
#import <QuartzCore/QuartzCore.h>
#import <opencv2/opencv.hpp>
@interface ViewController () {
UIView *photoFrame;
UIImageView *photo;
}
@end
@implementation ViewController
– (void)viewDidLoad
{
[super viewDidLoad];
self.view.backgroundColor = [UIColor colorWithRed:127.0/255.0 green:199.0/255.0 blue:175.0/255.0 alpha:1];
[self createPhotoFrame];
[self createPhoto];
[self createButtons];
}
– (void)createPhotoFrame
{
float x = (568.0 – 150) / 2.0;
photoFrame = [[UIView alloc] initWithFrame:CGRectMake(x, 100, 150, 170)];
photoFrame.backgroundColor = [UIColor colorWithRed:1 green:179.0/255.0 blue:139.0/255.0 alpha:1];
[self.view addSubview:photoFrame];
}
– (void)createPhoto
{
UIImage *image = [UIImage imageNamed:@”fruits.png”];
photo = [[UIImageView alloc] initWithImage:image];
photo.frame = CGRectMake(10, 10, 130, 130);
photo.backgroundColor = [UIColor whiteColor];
[photoFrame addSubview:photo];
}
– (void)createButtons
{
float dAngle = 2.0 * M_PI / 3.0;
NSArray *words = @[@”RGB”, @”HSV”, @”Gray”];
for (int i=0; i<3; i++) {
float r = 150.0;
float x = r * cos(dAngle * i – M_PI/2.0) + photoFrame.center.x;
float y = r * sin(dAngle * i – M_PI/2.0) + photoFrame.center.y + 10;
UILabel *btn = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, 80, 80/1.618)];
btn.center = CGPointMake(x, y);
btn.text = [words objectAtIndex:i];
btn.textAlignment = NSTextAlignmentCenter;
btn.font = [UIFont fontWithName:@”Chalkduster” size:25];
btn.layer.cornerRadius = 10;
btn.backgroundColor = [UIColor colorWithRed:218.0/255.0 green:216.0/255.0 blue:167.0/255.0 alpha:1.0];
[self.view addSubview:btn];
btn.userInteractionEnabled = YES;
UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(change:)];
[btn addGestureRecognizer:tap];
}
}
– (void)change:(UITapGestureRecognizer*)gr
{
UILabel *l = (UILabel*)gr.view;
if ([l.text isEqual:@”RGB”]) {
// normal
UIImage *image = [UIImage imageNamed:@”fruits.png”];
photo.image = image;
}
else if ([l.text isEqual:@”Gray”]) {
// normal
UIImage *image = [UIImage imageNamed:@”fruits.png”];
cv::Mat inputMat = [self cvMatFromUIImage:image];
cv::Mat greyMat;
cv::cvtColor(inputMat, greyMat, CV_RGB2GRAY);
UIImage *grayImage = [self UIImageFromCVMat:greyMat];
photo.image = grayImage;
}
else if ([l.text isEqual:@”HSV”]) {
// normal
UIImage *image = [UIImage imageNamed:@”fruits.png”];
cv::Mat inputMat = [self cvMatFromUIImage:image];
cv::Mat greyMat;
cv::cvtColor(inputMat, greyMat, CV_RGB2HSV);
UIImage *grayImage = [self UIImageFromCVMat:greyMat];
photo.image = grayImage;
}
}
– (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
//
// From
// OpenCV 2.4.6.0 documentation
// OpenCV iOS – Image Processing
//
– (cv::Mat)cvMatFromUIImage:(UIImage*)image
{
CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage);
CGFloat cols = image.size.width;
CGFloat rows = image.size.height;
cv::Mat cvMat(rows, cols, CV_8UC4);
CGContextRef contextRef = CGBitmapContextCreate(cvMat.data, cols, rows, 8, cvMat.step[0], colorSpace, kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault);
CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), image.CGImage);
CGContextRelease(contextRef);
CGColorSpaceRelease(colorSpace);
return cvMat;
}
– (cv::Mat)cvMatGrayFromUIImage:(UIImage *)image
{
CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage);
CGFloat cols = image.size.width;
CGFloat rows = image.size.height;
cv::Mat cvMat(rows, cols, CV_8UC1);
CGContextRef contextRef = CGBitmapContextCreate(cvMat.data, cols, rows, 8, cvMat.step[0], colorSpace, kCGImageAlphaNoneSkipLast | kCGBitmapByteOrderDefault);
CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), image.CGImage);
CGContextRelease(contextRef);
CGColorSpaceRelease(colorSpace);
return cvMat;
}
– (UIImage *)UIImageFromCVMat:(cv::Mat)cvMat
{
NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
CGColorSpaceRef colorSpace;
if (cvMat.elemSize() == 1) {
colorSpace = CGColorSpaceCreateDeviceGray();
} else {
colorSpace = CGColorSpaceCreateDeviceRGB();
}
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
CGImageRef imageRef = CGImageCreate(cvMat.cols, cvMat.rows, 8, 8 * cvMat.elemSize(), cvMat.step[0], colorSpace, kCGImageAlphaNone|kCGBitmapByteOrderDefault, provider, NULL, false, kCGRenderingIntentDefault);
UIImage *finalImage = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef);
CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace);
return finalImage;
}
@end