(※動画には音は入れてません。)
仮面ライダーウィザードドライバーを子供のために手作りしてみる。
画像は手書き
音源はGaragebandとかでそれっぽく
作る。
ベルトの手形がカチット変わる
リングにあわせた色と音を出す。
サンプルコード
#import “ViewController.h”
#import <QuartzCore/QuartzCore.h>
#import <AVFoundation/AVFoundation.h>
typedef enum {
DirectRight = 1,
DirectLeft,
} DriverDirection;
typedef enum {
DriverOn = 1,
Frame,
Water,
Hurricane,
Land
} RingType;
@interface ViewController () {
UIImageView *hand;
UIImageView *finger;
AVAudioPlayer *player;
AVAudioPlayer *sePlayer;
DriverDirection direct;
UIView *ringContainer;
UIView *effect;
}
@end
@implementation ViewController
– (void)viewDidLoad
{
[super viewDidLoad];
self.view.backgroundColor = [UIColor blackColor];
[self createDriver];
[self createRings];
}
– (void)createDriver
{
UIView *belt = [[UIView alloc] initWithFrame:CGRectMake(0, 100, 500, 220)];
belt.backgroundColor = [UIColor lightGrayColor];
[self.view addSubview:belt];
UIView *circle = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 220, 220)];
circle.center = belt.center;
circle.backgroundColor = [UIColor grayColor];
circle.layer.cornerRadius = 110;
[self.view addSubview:circle];
UIView *swRight = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 50, 150)];
swRight.backgroundColor = [UIColor grayColor];
swRight.center = CGPointMake(belt.center.x + 200, belt.center.y);
[self.view addSubview:swRight];
UIPanGestureRecognizer *pan = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(driverSwitch:)];
[swRight addGestureRecognizer:pan];
UIImage *handImg = [UIImage imageNamed:@”hand”];
UIImage *fingerImg = [UIImage imageNamed:@”finger”];
hand = [[UIImageView alloc] initWithImage:handImg];
finger = [[UIImageView alloc] initWithImage:fingerImg];
hand.frame = CGRectMake(50,100,hand.bounds.size.width * 0.3, hand.bounds.size.height * 0.3);
finger.frame = CGRectMake(50,100,finger.bounds.size.width * 0.3, finger.bounds.size.height * 0.3);
[self.view addSubview:finger];
[self.view addSubview:hand];
}
– (void)driverSwitch:(UIGestureRecognizer*)gr
{
static BOOL moving;
if (!moving) {
// 方向を設定
if (direct != DirectLeft) {
direct = DirectLeft;
} else {
direct = DirectRight;
}
moving = YES;
[UIView animateWithDuration:0.2 animations:^{
if (CGAffineTransformEqualToTransform(hand.transform, CGAffineTransformIdentity)) {
hand.transform = [self rotationAtPoint:CGPointMake(-5, –18) angle:M_PI*0.5];
} else {
hand.transform = CGAffineTransformIdentity;
}
} completion:^(BOOL finished) {
moving = NO;
}];
[self performSelector:@selector(playSESound:) withObject:@”kati”];
[self performSelector:@selector(showRings) withObject:nil afterDelay:0.5];
}
}
– (CGAffineTransform)rotationAtPoint:(CGPoint)p angle:(float)a
{
float x = p.x;
float y = p.y;
return CGAffineTransformMake(cos(a),sin(a),-sin(a),cos(a),x-x*cos(a)+y*sin(a),y-x*sin(a)-y*cos(a));
}
– (void)createRings
{
ringContainer = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 1000, 100)];
[self.view addSubview:ringContainer];
// Driver On
UIImage *driverImg = [UIImage imageNamed:@”driver”];
UIImageView *driver = [[UIImageView alloc] initWithImage:driverImg];
driver.frame = CGRectMake(100, 20, driver.bounds.size.width * 0.5,driver.bounds.size.height * 0.5);
driver.tag = DriverOn;
driver.backgroundColor = [UIColor clearColor];
[ringContainer addSubview:driver];
driver.userInteractionEnabled = YES;
// Frame
UIImage *frameImg = [UIImage imageNamed:@”frame”];
UIImageView *frame = [[UIImageView alloc] initWithImage:frameImg];
frame.frame = CGRectMake(550, 20, frame.bounds.size.width * 0.5,frame.bounds.size.height * 0.5);
frame.tag = Frame;
frame.backgroundColor = [UIColor clearColor];
[ringContainer addSubview:frame];
frame.userInteractionEnabled = YES;
// Water
UIImage *waterImg = [UIImage imageNamed:@”water”];
UIImageView *water = [[UIImageView alloc] initWithImage:waterImg];
water.frame = CGRectMake(650, 20, water.bounds.size.width * 0.5,water.bounds.size.height * 0.5);
water.tag = Water;
water.backgroundColor = [UIColor clearColor];
[ringContainer addSubview:water];
water.userInteractionEnabled = YES;
// Land
UIImage *landImg = [UIImage imageNamed:@”land”];
UIImageView *land = [[UIImageView alloc] initWithImage:landImg];
land.frame = CGRectMake(750, 20, land.bounds.size.width * 0.5,land.bounds.size.height * 0.5);
land.tag = Land;
land.backgroundColor = [UIColor clearColor];
[ringContainer addSubview:land];
land.userInteractionEnabled = YES;
// hurricane
UIImage *hurricaneImg = [UIImage imageNamed:@”hurricane”];
UIImageView *hurricane = [[UIImageView alloc] initWithImage:hurricaneImg];
hurricane.frame = CGRectMake(850, 20, hurricane.bounds.size.width * 0.5, hurricane.bounds.size.height * 0.5);
hurricane.tag = Hurricane;
hurricane.backgroundColor = [UIColor clearColor];
[ringContainer addSubview:hurricane];
hurricane.userInteractionEnabled = YES;
NSArray *rings = [NSArray arrayWithObjects:driver, frame, water, hurricane, land, nil];
for (UIView *v in rings) {
UIPanGestureRecognizer *pan = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(touchRing:)];
[v addGestureRecognizer:pan];
}
}
– (void)showRings
{
if (direct == DirectRight) {
[UIView animateWithDuration:0.5 animations:^{
ringContainer.transform = CGAffineTransformIdentity;
}];
} else {
[UIView animateWithDuration:0.5 animations:^{
ringContainer.transform = CGAffineTransformMakeTranslation(-500, 0);
}];
}
}
– (void)touchRing:(UIGestureRecognizer*)gr
{
static int sound;
static CGPoint origin;
if (gr.state == UIGestureRecognizerStateBegan) {
sound = 0;
origin = gr.view.center;
} else if (gr.state == UIGestureRecognizerStateChanged) {
[self.view addSubview:gr.view];
gr.view.center = [gr locationInView:self.view];
if (CGRectContainsPoint(hand.frame, gr.view.center)) {
if (sound == 0) {
sound = 1;
[self playSound:[self getSoundName:gr.view]];
[self lightEffect:gr.view];
}
} else {
sound = 0;
}
} else if (gr.state == UIGestureRecognizerStateEnded) {
[ringContainer addSubview:gr.view];
gr.view.center = origin;
sound = 0;
}
}
– (NSString*)getSoundName:(UIView*)v
{
switch (v.tag) {
case DriverOn:
return @”driverOn”;
case Frame:
return @”frame”;
case Water:
return @”water”;
case Hurricane:
return @”hurricane”;
case Land:
return @”land”;
default:
break;
}
return nil;
}
– (void)lightEffect:(UIView*)v
{
NSArray *colors = [NSArray arrayWithObjects:[UIColor whiteColor],[UIColor whiteColor], [UIColor redColor], [UIColor blueColor], [UIColor greenColor], [UIColor yellowColor], nil];
effect = [[UIView alloc] initWithFrame:CGRectMake(0, 0, 100, 100)];
effect.center = CGPointMake(250, 200);
effect.layer.cornerRadius = 50;
effect.backgroundColor = [colors objectAtIndex:v.tag];
[self.view addSubview:effect];
CALayer *opacityOutLayer = effect.layer;
CABasicAnimation *animation = [CABasicAnimation animationWithKeyPath:@”opacity”];
animation.fromValue = [NSNumber numberWithFloat:0.7];
animation.toValue = [NSNumber numberWithFloat:0.0];
animation.duration = 0.255;
animation.repeatCount = 10;
animation.delegate = self;
[opacityOutLayer addAnimation:animation forKey:@”opacity”];
}
– (void)animationDidStop:(CAAnimation *)anim finished:(BOOL)flag
{
[effect removeFromSuperview];
}
– (void)playSound:(NSString*)name
{
NSString *path = [[NSBundle mainBundle] pathForResource:name ofType:@”mp3″];
player = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:path] error:nil];
[player play];
}
– (void)playSESound:(NSString*)name
{
NSString *path = [[NSBundle mainBundle] pathForResource:name ofType:@”mp3″];
sePlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:[NSURL fileURLWithPath:path] error:nil];
[sePlayer play];
}
– (void)didReceiveMemoryWarning
{
[super didReceiveMemoryWarning];
}
@end