2014年4月24日木曜日

10fpsで動画を作成

- (void)writeImagesAsMovie:(NSArray *)images
                    toPath:(NSString *)path
{
    NSParameterAssert(images);
    NSParameterAssert(path);
    NSAssert((images.count > 0), @"Set least one image.");
    
    NSFileManager *fileManager = [NSFileManager defaultManager];
    
    // 既にファイルがある場合は削除する
    if ([fileManager fileExistsAtPath:path]) {
        [fileManager removeItemAtPath:path error:nil];
    }
    
    // 最初の画像から動画のサイズ指定する
    CGSize size = ((UIImage *)images[0]).size;
    
    NSError *error = nil;
    
    self.videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
                                                 fileType:AVFileTypeQuickTimeMovie
                                                    error:&error];
    
    if (error) {
        NSLog(@"%@", [error localizedDescription]);
        return;
    }
    
    NSDictionary *outputSettings =
    @{
      AVVideoCodecKey  : AVVideoCodecH264,
      AVVideoWidthKey  : @(size.width),
      AVVideoHeightKey : @(size.height),
      };
    
    AVAssetWriterInput *writerInput = [AVAssetWriterInput
                                       assetWriterInputWithMediaType:AVMediaTypeVideo
                                       outputSettings:outputSettings];
    
    [self.videoWriter addInput:writerInput];
    
    NSDictionary *sourcePixelBufferAttributes =
    @{
      (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32ARGB),
      (NSString *)kCVPixelBufferWidthKey           : @(size.width),
      (NSString *)kCVPixelBufferHeightKey          : @(size.height),
      };
    
    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                     sourcePixelBufferAttributes:sourcePixelBufferAttributes];
    
    writerInput.expectsMediaDataInRealTime = YES;
    
    // 動画生成開始
    if (![self.videoWriter startWriting]) {
        NSLog(@"Failed to start writing.");
        return;
    }
    
    [self.videoWriter startSessionAtSourceTime:kCMTimeZero];
    
    CVPixelBufferRef buffer = NULL;
    
    int frameCount = 0;
    double durationForEachImage = 0.05;
    int32_t fps = 20;
    
    for (UIImage *image in images) {
        if (adaptor.assetWriterInput.readyForMoreMediaData) {
            //CMTime frameTime = CMTimeMake((int64_t)frameCount * fps * durationForEachImage, fps);
            //CMTime frameTime = CMTimeMake((int64_t)frameCount * 60, 600);
            CMTime frameTime = CMTimeMake(1, 10);
            CMTime lastTime=CMTimeMake(frameCount, 10);
            CMTime presentTime=CMTimeAdd(lastTime, frameTime);

            buffer = [self pixelBufferFromCGImage:image.CGImage];
            
            if (![adaptor appendPixelBuffer:buffer withPresentationTime:presentTime]) {
                NSLog(@"Failed to append buffer. [image : %@]", image);
            }
            
            if(buffer) {
                CVBufferRelease(buffer);
            }
            
            frameCount++;
            NSLog(@"%d",frameCount);
        }
    }
    
    // 動画生成終了
    [writerInput markAsFinished];
    [self.videoWriter finishWritingWithCompletionHandler:^{
        NSLog(@"Finish writing!");
        NSURL *url = [NSURL URLWithString:path];
        
        if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:url])
        {
            [library writeVideoAtPathToSavedPhotosAlbum:url
                                        completionBlock:^(NSURL *assetURL, NSError *assetError)
             {
                 if (assetError) { }
             }];
        }
    }];
    CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
    
}

- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image
{
    NSDictionary *options = @{
                              (NSString *)kCVPixelBufferCGImageCompatibilityKey : @(YES),
                              (NSString *)kCVPixelBufferCGBitmapContextCompatibilityKey: @(YES),
                              };
    
    CVPixelBufferRef pxbuffer = NULL;
    
    CVPixelBufferCreate(kCFAllocatorDefault,
                        CGImageGetWidth(image),
                        CGImageGetHeight(image),
                        kCVPixelFormatType_32ARGB,
                        (__bridge CFDictionaryRef)options,
                        &pxbuffer);
    
    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    
    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata,
                                                 CGImageGetWidth(image),
                                                 CGImageGetHeight(image),
                                                 8,
                                                 4 * CGImageGetWidth(image),
                                                 rgbColorSpace,
                                                 (CGBitmapInfo)kCGImageAlphaNoneSkipFirst);
    
    CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
    
    CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, CGImageGetHeight(image));
    
    CGContextConcatCTM(context, flipVertical);
    
    CGAffineTransform flipHorizontal = CGAffineTransformMake(-1.0, 0.0, 0.0, 1.0, CGImageGetWidth(image), 0.0);
    
    CGContextConcatCTM(context, flipHorizontal);
    
    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);
    
    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
    
    return pxbuffer;

}

5フレームごとにUIImageを取り出す

if(saveMode){
        if([images count] < 50){
            if(saveTime == 0){
                [images addObject:[self createImage]];
            }
            saveTime++;
            if (saveTime == 3)saveTime = 0;
            NSLog(@"%d",[images count]);
        }else{
            saveMode = false;
            NSString *documentPath = (NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)[0]);
            NSString *moviePath = [documentPath stringByAppendingPathComponent:@"movie.mov"];
            
            [self writeImagesAsMovie:(NSArray *)images
                              toPath:moviePath];
        }

    }

OpenGLの描画内容を画像にする

-(UIImage *)createImage{
    int backingWidth = self.view.bounds.size.width;   // OpenGLのバッファの幅
    int backingHeight = self.view.bounds.size.height;   // OpenGLのバッファの高さ
    
    NSInteger myDataLength = backingWidth * backingHeight * 4;
    GLubyte *buffer = (GLubyte *) malloc(myDataLength);
    glReadPixels(0, 0, backingWidth, backingHeight, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
    
    // FIXME:上下をひっくり返す。この処理が勿体ない。
    GLubyte *buffer2 = (GLubyte *) malloc(myDataLength);
    for(int y = 0; y <backingHeight; y++)
    {
        memcpy(&buffer2[((backingHeight-1) - y) * backingWidth * 4], &buffer[y * 4 * backingWidth], sizeof(GLubyte) * backingWidth * 4);
    }
    free(buffer);
    
    CGDataProviderRef provider = CGDataProviderCreateWithData(NULL, buffer, myDataLength, NULL);
    CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB();
    CGImageRef imageRef = CGImageCreate(backingWidth, backingHeight, 8, 32, 4 * backingWidth, colorSpaceRef, kCGBitmapByteOrderDefault, provider, NULL, NO, kCGRenderingIntentDefault);
    UIImage *image = [UIImage imageWithCGImage:imageRef];
    return image;


}

カメラロールに保存

[library writeImageToSavedPhotosAlbum:image.CGImage
                              orientation:(ALAssetOrientation)image.imageOrientation
                          completionBlock:
     ^(NSURL *assetURL, NSError *error){
         NSLog(@"URL:%@", assetURL);
         NSLog(@"error:%@", error);
         
         ALAuthorizationStatus status = [ALAssetsLibrary authorizationStatus];
         
         if (status == ALAuthorizationStatusDenied) {
             UIAlertView *alertView = [[UIAlertView alloc]
                                       initWithTitle:@"エラー"
                                       message:@"写真へのアクセスが許可されていません。\n設定 > 一般 > 機能制限で許可してください。"
                                       delegate:nil
                                       cancelButtonTitle:@"OK"
                                       otherButtonTitles:nil];
             [alertView show];
         } else {
             UIAlertView *alertView = [[UIAlertView alloc]
                                       initWithTitle:@""
                                       message:@"フォトアルバムへ保存しました。"
                                       delegate:nil
                                       cancelButtonTitle:@"OK"
                                       otherButtonTitles:nil];
             [alertView show];
         }

     }];

2014年3月12日水曜日

似非マウス回転

glMatrixMode(GL_PROJECTION);
glLoadIdentity();
gluPerspective(eye.fovy, (float)eye.width/(float)eye.height, 0.1, 100000);

glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
gluLookAt(eye.eye[0], eye.eye[1], eye.eye[2],
eye.center[0], eye.center[1], eye.center[2],
eye.up[0], eye.up[1], eye.up[2]);

eye.calcuratePosition(beforePoint,beforeMouse[0],beforeMouse[1]);
eye.calcuratePosition(nowPoint,x,y);
eye.rotateEye(beforePoint,nowPoint);

void Eye::calcuratePosition(double result[3],int x,int y)
{
x -= this->width/2;
y -= this->height/2;
printf("%d,%d\n",x,y);
double rate =  (2.0 * 50.0 * tan(PI/180*this->fovy/2))/height;

printf("%lf\n",rate);
double rateX = x * rate;
double rateY = y * rate;

for (int i = 0;i < 3; i++){
result[i] = 50.0 * this->center[i] + rateX * this->left[i] + rateY * this->up[i];
}
}

void Eye::rotateEye(double start[3],double end[3])
{
double axis[3];
double radius;
vectorNormarise(start);
vectorNormarise(end);

exteriorProduct(start ,end, axis);
vectorNormarise(axis);
radius = acos(innerrProduct(start ,end));

printf("%lf,%lf,%lf\n",start[0],start[1],start[2]);
printf("%lf,%lf,%lf\n",end[0],end[1],end[2]);
printf("%lf,%lf,%lf,%lf\n",axis[0],axis[1],axis[2],radius);

rotateVector(axis,radius,center);
rotateVector(axis,radius,up);
rotateVector(axis,radius,left);
}
void Eye::rotateBy(double axis[3],double radius)
{
rotateVector(axis,radius,center);
rotateVector(axis,radius,up);
rotateVector(axis,radius,left);
printf("%lf,%lf,%lf\n",center[0],center[1],center[2]);
printf("%lf,%lf,%lf\n",up[0],up[1],up[2]);

printf("%lf,%lf,%lf\n",up[0],up[1],up[2]);
}

void exteriorProduct(double vec1[3],double vec2[3],double resullt[3])
{
resullt[0] = vec1[1] * vec2[2] - vec1[2] * vec2[1];
resullt[1] = vec1[2] * vec2[0] - vec1[0] * vec2[2];
resullt[2] = vec1[0] * vec2[1] - vec1[1] * vec2[0];
}
double vectorSize(double vec[3])
{
return sqrt(vec[0]*vec[0]+vec[1]*vec[1]+vec[2]*vec[2]);
}
void vectorNormarise(double vec1[3])
{
double size = vectorSize(vec1);
vec1[0] /= size;
vec1[1] /= size;
vec1[2] /= size;
}

double innerrProduct(double vec1[3],double vec2[3])
{
return vec1[0]*vec2[0]+vec1[1]*vec2[1]+vec1[2]*vec2[2];
}

void rotateVector(double axis[3],double radius, double vec[3])
{
double matrix[3][3];
matrix[0][0] = axis[0] * axis[0] * (1 - cos(radius)) + cos(radius);
matrix[0][1] = axis[0] * axis[1] * (1 - cos(radius)) - axis[2] * sin(radius);
matrix[0][2] = axis[2] * axis[0] * (1 - cos(radius)) + axis[1] * sin(radius);


matrix[1][0] = axis[0] * axis[1] * (1 - cos(radius)) + axis[2] * sin(radius);
matrix[1][1] = axis[1] * axis[1] * (1 - cos(radius)) + cos(radius);
matrix[1][2] = axis[1] * axis[2] * (1 - cos(radius)) - axis[0] * sin(radius);

matrix[2][0] = axis[2] * axis[0] * (1 - cos(radius)) - axis[1] * sin(radius);
matrix[2][1] = axis[1] * axis[2] * (1 - cos(radius)) + axis[0] * sin(radius);
matrix[2][2] = axis[2] * axis[2] * (1 - cos(radius)) + cos(radius);

double tmp[3];
for (int i = 0;i < 3;i++){
tmp[i] = vec[i];
}
for (int i = 0;i < 3;i++){
vec[i] = tmp[0] * matrix[i][0] + tmp[1] * matrix[i][1] + tmp[2] * matrix[i][2];
}
}

三角形分割

   for(int i=0;i<this->target.size();i++){
  this->points[i]->x = this->target[i]->x;
  this->points[i]->y = this->target[i]->y;
   }
   count = this->target.size();

   for(int i=0;i<this->target.size();i++){
  this->target[i]->calcuRad(&startPoint,
  this->target[this->target[i]->prevNum],
  this->target[this->target[i]->nextNum]);
  if(this->target[i]->radian == 0.0){
  this->target[i]->life =false;
  this->target[this->target[i]->prevNum]->nextNum = this->target[i]->nextNum;
  this->target[this->target[i]->nextNum]->prevNum = this->target[i]->prevNum;
  count--;
  }
  //fprintf(hoge,"%d,%d\n",(int)this->target[i]->x,(int)this->target[i]->y);
   }
   //fclose(hoge);
   int peke;
 
   while(count>2){
  for(int i=0;i<this->target.size();i++){
  if(this->target[i]->radian > 0 && this->target[i]->life){
  Teto *teto = this->createFrom(i);
  if (this->tetoOK(teto) && !this->otherPointsInTeto(teto))
  {
  this->tetos.push_back(teto);
  this->drawTeto(teto,tetosImg);
  this->target[this->target[i]->nextNum]->prevNum = this->target[i]->prevNum;
  this->target[this->target[i]->nextNum]->calcuRad(&startPoint,
this->target[this->target[this->target[i]->nextNum]->prevNum],
this->target[this->target[this->target[i]->nextNum]->nextNum]);
  this->target[this->target[i]->prevNum]->nextNum = this->target[i]->nextNum;
  this->target[this->target[i]->prevNum]->calcuRad(&startPoint,
this->target[this->target[this->target[i]->prevNum]->prevNum],
this->target[this->target[this->target[i]->prevNum]->nextNum]);
  this->target[i]->life = false;
  count--;
  char c = (char)cvWaitKey ( 1 );
  cvShowImage("tetosImg",tetosImg);
  i++;
  }
  }
  }
   }

STLファイルの読み込み

/*fp = fopen("cube-binary.stl", "r");

Solid *solid = new Solid();
this->solids.push_back(solid);
fread(solid->name, sizeof(char), 80, fp);
printf("%s\n",solid->name);

unsigned int faceNum ;
fread(&faceNum, sizeof(unsigned int), 1, fp);
printf("%d\n",faceNum);

for (int i=0;i<faceNum;i++){
float tmp;
Face *face = new Face();
solid->faces.push_back(face);
for (int j=0;j<3;j++){
fread(&tmp, sizeof(float), 1, fp);
printf("%lf\n",tmp);
face->normal[j] = tmp;
}
for (int k=0;k<3;k++){
for (int j=0;j<3;j++){
fread(&tmp, sizeof(float), 1, fp);
printf("%lf\n",tmp);
face->vertex[j].push_back(tmp);
}
}
char tmpChar[2];
fread(tmpChar, sizeof(char), 2, fp);

}*/
//fp = fopen("cube-ascii.stl", "r");
fp = fopen("kyata001.stl", "r");

while (fscanf(fp,"%s", &s) != -1) {
printf("%s,", s);
if (strcmp(s,"solid") == 0){
Solid *solid = new Solid();
this->solids.push_back(solid);
if (fscanf(fp,"%s", &solid->name) != -1){
//printf("%s,", solid->name);
while (fscanf(fp,"%s", &s) != -1 && fscanf(fp,"%s", &s2) != -1) {
//printf("%s,%s,", s,s2);
if (strcmp(s,"facet") == 0 && strcmp(s2,"normal") == 0){
Face *face = new Face();
for (int i = 0;i<3;i++){
fscanf(fp,"%lf", &face->normal[i]);
}
if (fscanf(fp,"%s", &s) != -1 && fscanf(fp,"%s", &s2) != -1) {
//printf("%s,%s,", s,s2);
if (strcmp(s,"outer") == 0 && strcmp(s2,"loop") == 0){
bool flag = true;
while (fscanf(fp,"%s", &s) != -1) {
//printf("%s,", s);
if (strcmp(s,"vertex") == 0 ){
for (int i = 0;i<3;i++){
double vertex;
fscanf(fp,"%lf", &vertex);
face->vertex[i].push_back(vertex);
}
}else if (strcmp(s,"endloop") == 0){
//printf("OKloop,");
solid->faces.push_back(face);
break;
}
}
}
}
if (fscanf(fp,"%s", &s) != -1) {
if (strcmp(s,"endfacet") == 0 ){
//printf("OKface");
}
}
}
}
}

}
}

2014年3月4日火曜日

たき火

14,3;13,3;14,3;16,3;14,3;13,3;14,3;16,3;18,3;17,3;16,3;16,3;17,12;16,3;14,3;14,3;14,3;13,3;11,3;11,3;11,3;14,3;13,3;16,3;17,3;18,12;17,9;16,3;15,3;16,3;17,6;16,3;14,3;14,3;16,3;14,12;11,3;11,3;11,3;13,3;14,3;14,3;11,3;11,3;16,3;16,3;17,3;17,3;18,12;

http://ototama.com/music/folksong/score.php?id=152

メダカの学校

18,6;17,6;16,9;16,3;17,6;17,3;18,3;17,12;16,6;14,6;13,9;13,3;14,18;-1,6;13,9;13,3;13,3;11,3;13,3;14,3;13,3;14,3;16,3;16,3;16,6;-1,6;13,9;13,3;13,3;11,3;13,3;14,3;13,3;14,3;16,3;16,3;16,6;-1,6;17,6;17,3;18,3;17,3;14,3;16,3;17,3;18,3;18,3;18,3;18,3;18,12

http://ototama.com/music/folksong/score.php?id=166

チューリップ

18,4;17,4;16,8;18,4;17,4;16,8;14,4;16,4;17,4;18,4;17,4;16,4;17,8;18,4;17,4;16,8;18,4;17,4;16,8;14,4;16,4;17,4;18,4;17,4;16,4;18,8;14,4;14,4;16,4;14,4;13,4;13,4;14,8;16,4;16,4;17,4;17,4;18,12;-1,4;

ぞうさん

16,9;17,3;18,6;16,9;17,3;18,6;15,9;14,3;13,3;11,3;13,3;13,3;14,3;15,3;14,6;11,12;13,6;10,3;11,3;13,6;15,6;14,9;13,3;17,3;18,3;15,18;;

きよしこの夜

14,9;13,3;14,6;16,18;14,9;13,3;14,6;16,12;-1,6;10,12;10,6;12,18;11,12;11,6;14,12;-1,6;13,12;13,6;11,9;12,3;13,6;14,9;13,3;14,6;16,12;-1,6;13,12;13,6;11,9;12,3;13,6;14,9;13,3;14,6;16,12;-1,6;10,12;10,6;8,9;10,3;12,6;11,18;9,12;-1,6;11,6;14,6;16,6;14,9;15,3;17,6;18,24;-1,6;

http://ototama.com/music/folksong/score.php?id=231

2014年3月3日月曜日

IplImageの画素値へのアクセス

for (int y = 0; y < dstImage->height; y++) {
        for (int x = 0; x < dstImage->width; x++) {
            dstImage->imageData[dstImage->widthStep * y + x * 4 + 3] = grayscaleImage->imageData[grayscaleImage->widthStep * y + x];

        }

    }

IplImage *base =  cvLoadImage ("IMG_2128.JPG", CV_LOAD_IMAGE_ANYCOLOR);
IplImage *src_img = cvLoadImage ("IMG_2129.JPG", CV_LOAD_IMAGE_ANYCOLOR);
uchar p,p2;
for (int y = 0; y < base->height; y++) {
        for (int x = 0; x < base->width; x++) {
for (int i=0;i<3;i++){
p = base->imageData[base->widthStep * y + x * base->nChannels + i];
p2 = src_img->imageData[src_img->widthStep * y + x * src_img->nChannels + i];
if((255-p)*0.8>0.0){
if ((255-p)*0.8 > 255-p2){
src_img->imageData[src_img->widthStep * y + x * src_img->nChannels + i] = 255;
}else{
src_img->imageData[src_img->widthStep * y + x * src_img->nChannels + i] +=
(255-p)*0.8;
}
}
}

        }

    }

お正月

15,12;14,6;15,3;14,3;13,6;11,6;13,12;14,6;14,6;15,6;14,6;13,13;-1,6;15,3;15,3;17,3;17,3;18,3;18,3;18,6;15,3;15,3;14,3;14,3;13,12;13,3;13,3;14,6;15,3;15,3;15,3;15,3;14,3;14,3;13,3;13,3;11,12;15,12;14,6;15,3;14,3;13,6;11,6;13,6;13,6;14,6;14,6;13,6;14,6;15,18;-1,4

http://ototama.com/music/folksong/score.php?id=139

方角センサ

systemVersion = [[[UIDevice currentDevice] systemVersion] floatValue];
    
    // 周波数(Hz)
    int frequency = 10;
    // インスタンスの生成
    manager = [[CMMotionManager alloc] init];

    // CMDeviceMotionの開始

    [self startCMDeviceMotion:frequency];


- (void)startCMDeviceMotion:(int)frequency
{
    // センサーの有無を確認
    if (manager.deviceMotionAvailable) {
        // 更新間隔の指定
        manager.deviceMotionUpdateInterval = 1 / frequency;  //
        // ハンドラ
        CMDeviceMotionHandler handler = ^(CMDeviceMotion *motion, NSError *error) {
            
            /* magnetometer */
            if (5.0 < systemVersion && manager.magnetometerAvailable) {
                // "磁北""真北"からの角度。Y軸方向を起点にするため、(x, y)を渡す
                double radian = atan2(motion.magneticField.field.x, motion.magneticField.field.y);
            }
            
            /* CMAttitude */
            roll = motion.attitude.roll;
            pitch = motion.attitude.pitch;
            yaw = motion.attitude.yaw;
            rotationMatrix = motion.attitude.rotationMatrix;
            
            double tmp[3];
            double PI = 3.14159283;
            tmp[0] =0.0;
            tmp[1] = 0.0;
            tmp[2] = -1.0;
            center[0] = rotationMatrix.m11 * tmp[0] + rotationMatrix.m21 * tmp[1] +rotationMatrix.m31 * tmp[2];
            center[1] = rotationMatrix.m12 * tmp[0] + rotationMatrix.m22 * tmp[1] +rotationMatrix.m32 * tmp[2];
            center[2] = rotationMatrix.m13 * tmp[0] + rotationMatrix.m23 * tmp[1] +rotationMatrix.m33 * tmp[2];
            for (int i=0;i<3;i++){
                center[i] = round(center[i]*100)/100.0;
            }
            NSLog(@"center=%lf,%lf,%lf",center[0],center[1],center[2]);
            
            tmp[0] = 0.0;
            tmp[1] = 1.0;
            tmp[2] = 0.0;
            up[0] = rotationMatrix.m11 * tmp[0] + rotationMatrix.m21 * tmp[1] +rotationMatrix.m31 * tmp[2];
            up[1] = rotationMatrix.m12 * tmp[0] + rotationMatrix.m22 * tmp[1] +rotationMatrix.m32 * tmp[2];
            up[2] = rotationMatrix.m13 * tmp[0] + rotationMatrix.m23 * tmp[1] +rotationMatrix.m33 * tmp[2];
            for (int i=0;i<3;i++){
                up[i] = round(up[i]*100)/100.0;
            }
            NSLog(@"up=%lf,%lf,%lf",up[0],up[1],up[2]);
        };
        
        // DeviceMotionの開始
        if (5.0 < systemVersion) {
            [manager startDeviceMotionUpdatesUsingReferenceFrame:CMAttitudeReferenceFrameXTrueNorthZVertical toQueue:[NSOperationQueue currentQueue] withHandler:handler];
        } else {
            [manager startDeviceMotionUpdatesToQueue:[NSOperationQueue currentQueue] withHandler:handler];
        }
    }
}

ぞうさん

15,9;17,3;18,6;15,9;17,3;18,6;15,9;14,3;13,3;11,3;13,3;13,3;14,3;15,3;14,6;11,9;13,6;10,3;11,3;13,6;15,6;14,9;13,3;17,3;18,3;15,18

http://ototama.com/music/folksong/score.php?id=151

チューリップ

18,4;17,4;16,8;18,4;17,4;16,8;14,4;16,4;17,4;18,4;17,4;16,4;17,8;18,4;17,4;16,8;18,4;17,4;16,8;14,4;16,4;17,4;18,4;17,4;16,4;18,8;14,4;14,4;16,4;14,4;13,4;13,4;14,8;16,4;16,4;17,4;17,4;18,12;-1,4;-1,1

http://ototama.com/music/folksong/score.php?id=155

うれしいひなまつり

14,4;14,4;14,4;15,4;14,4;14,4;11,4;13,4;14,4;14,4;13,4;13,4;14,12;-1,4;16,4;16,4;16,4;17,4;16,4;16,4;14,4;16,4;17,4;17,4;16,4;17,4;18,12;-1,4;11,8;11,4;10,4;11,4;13,2;14,2;16,4;16,4;14,4;14,4;11,4;13,4;14,12;-1,4;16,8;17,4;18,4;17,4;16,4;14,4;11,4;13,4;14,4;16,4;17,4;18,12;-1,4

http://ototama.com/music/folksong/score.php?id=169

赤とんぼ

18,3;15,3;15,9;14,3;13,3;11,3;8,3;10,3;11,6;10,3;15,3;15,6;14,6;13,12;-1,6;13,3;10,3;11,9;10,3;8,3;10,3;11,3;10,3;11,3;13,3;11,3;13,3;15,3;13,3;14,3;15,3;15,12;-1,6

http://ototama.com/music/folksong/score.php?id=130

タッチ位置と画像の位置を合わせる

 UITapGestureRecognizer* pan = (UITapGestureRecognizer*) sender;
    CGPoint location = [pan locationInView:self.view];
    NSLog(@"pan x=%f, y=%f", location.x, location.y);
    started =true;
    areaValue = [[AreaValue alloc] init];
    areaValue->x = location.x/self.view.frame.size.width*480;

    areaValue->y = location.y/self.view.frame.size.height*640;

2014年2月27日木曜日

iosアプリのアイコン必要画像一覧

・アプリアイコン
iTunesArtwork@2x.png  1024×1024 px
iTunesArtwork.png    512×512
appicon-xxxhdpi.png    196×196
appicon-xxhdpi.png     176×176
appicon-76@2x.png     152×152
appicon-72@2x.png     144×144
appicon@2x.png       120×120
appicon-114.png       114×114
appicon-76.png        76×76
appicon-72.png        72×72
appicon.png         57×57
 
・スプラッシュスクリーン
Default-Landscape@2x.png   2048(横)x1536(縦)
Default-Portrait@2x.png    1536×2048
Default-Landscape.png    1024×768
Default-Portrait.png      768×1024
Default-568h@2x.png     640×1136
Default@2x.png       640×960 
Default.png         320×480
 
・ナビゲーションバー背景画像
navbar@2x.png      640×88
navbar.png        320×44
navbar-ipad@2x.png    1536×88
navbar-ipad.png      768×44

スレッドの作り方

unsigned thID;
HANDLE hTh;
hTh = (HANDLE)_beginthreadex(NULL, 0, scanPaperThread, NULL, 0, &thID);

if (hTh != NULL) {
  CloseHandle(hTh);
  printf("ハンドルクローズしました\n");
}

unsigned __stdcall scanPaperThread(void *lpx)
{

かえるの歌

18,17,16,15,16,17,18,16,15,14,13,14,15,16,18,18,18,18,18,17,16,15,16,17,18

きらきら星

18,18,14,14,13,13,14,14,15,15,16,16,17,17,18,18,14,14,15,15,16,16,17,17,14,14,15,15,16,16,17,17,18,18,14,14,13,13,14,14,15,15,16,16,17,17,18,18

Excelマクロ

Function test(hoge As String)
    If hoge = "ど" Then
        test = 18
    ElseIf hoge = "れ" Then
        test = 17
    ElseIf hoge = "み" Then
        test = 16
    ElseIf hoge = "ふぁ" Then
        test = 15
    ElseIf hoge = "そ" Then
        test = 14
    ElseIf hoge = "ら" Then
        test = 13
    ElseIf hoge = "し" Then
        test = 12
    End If
End Function


sin波

AudioOutputUnitStop(aU);
-(void)playSound{
    
    //Sampling rate
    _sampleRate = 44100.0f;
    
    //Bit rate
    bitRate = 8// 8bit
    

    //AudioComponentDescription
    AudioComponentDescription aCD;
    aCD.componentType = kAudioUnitType_Output;
    aCD.componentSubType = kAudioUnitSubType_RemoteIO;
    aCD.componentManufacturer = kAudioUnitManufacturer_Apple;
    aCD.componentFlags = 0;
    aCD.componentFlagsMask = 0;
    
    //AudioComponent
    AudioComponent aC = AudioComponentFindNext(NULL, &aCD);
    AudioComponentInstanceNew(aC, &aU);
    AudioUnitInitialize(aU);
    
    //コールバック
    AURenderCallbackStruct callbackStruct;
    callbackStruct.inputProc = renderer;
    callbackStruct.inputProcRefCon = (__bridge void*)self;
    AudioUnitSetProperty(aU,
                         kAudioUnitProperty_SetRenderCallback,
                         kAudioUnitScope_Input,
                         0,
                         &callbackStruct,
                         sizeof(AURenderCallbackStruct));
    
    //AudioStreamBasicDescription
    AudioStreamBasicDescription aSBD;
    aSBD.mSampleRate = _sampleRate;
    aSBD.mFormatID = kAudioFormatLinearPCM;
    aSBD.mFormatFlags = kAudioFormatFlagsAudioUnitCanonical;
    aSBD.mChannelsPerFrame = 2;
    aSBD.mBytesPerPacket = sizeof(AudioUnitSampleType);
    aSBD.mBytesPerFrame = sizeof(AudioUnitSampleType);
    aSBD.mFramesPerPacket = 1;
    aSBD.mBitsPerChannel = bitRate * sizeof(AudioUnitSampleType);
    aSBD.mReserved = 0;
    
    //AudioUnit
    AudioUnitSetProperty(aU,
                         kAudioUnitProperty_StreamFormat,
                         kAudioUnitScope_Input,
                         0,
                         &aSBD,
                         sizeof(aSBD));
    

    
    //再生
    AudioOutputUnitStart(aU);

    
}

static OSStatus renderer(void *inRef,
                         AudioUnitRenderActionFlags *ioActionFlags,
                         const AudioTimeStamp* inTimeStamp,
                         UInt32 inBusNumber,
                         UInt32 inNumberFrames,
                         AudioBufferList *ioData) {
    
    //キャスト
    ViewController* def = (__bridge ViewController*)inRef;
    
    //サイン波
    float freq = def.frequency*2.0*M_PI/def.sampleRate;
    
    //値を書き込むポインタ
    AudioUnitSampleType *outL = ioData->mBuffers[0].mData;
    AudioUnitSampleType *outR = ioData->mBuffers[1].mData;
    
    for (int i = 0; i < inNumberFrames; i++) {
        // 周波数を計算
        float wave = sin(def.phase);
        AudioUnitSampleType sample = wave * (1 << kAudioUnitSampleFractionBits);
        *outL++ = sample;
        *outR++ = sample;
        def.phase += freq;
    }
    
    return noErr;
    

};

周波数指定

 switch (button.tag) {
        case 19: //
            _frequency = 246.94;
            break;
        case 18: //
            _frequency = 261.62;
            break;
        case 17: //
            _frequency = 293.66;
            break;
        case 16: //
            _frequency = 329.62;
            break;
        case 15: //ファ
            _frequency = 349.22;
            break;
        case 14: //
            _frequency = 391.99;
            break;
        case 13: //
            _frequency = 440.00;
            break;
        case 12: //
            _frequency = 493.88;
            break;
        case 11: //
            _frequency = 523.25;
            break;
        case 10: //
            _frequency = 587.32;//293.66;
            break;
        case 9: //
            _frequency = 659.25;//329.62;
            break;
        case 8: //ファ
            _frequency = 698.45;//349.22;
            break;
        case 7: //
            _frequency = 783.99;//391.99;
            break;
        case 6: //
            _frequency = 880.00;//440.00;
            break;
        case 5: //
            _frequency = 987.76;//493.88;
            break;
        case 4: //
            _frequency = 1046.50;//523.25;
            break;
        case 3: //
            _frequency = 1174.65;//293.66;
            break;
        case 2: //
            _frequency = 1318.51;//329.62;
            break;
        case 1: //ファ
            _frequency = 1396.91;//349.22;
            break;
        case 0: //
            _frequency = 1567.98;//391.99;
            break;
        case 20: //
            _frequency = 1661.21;
            break;
        case 21: //ファ
            _frequency = 1479.97;//391.99;
            break;
        case 23: //
            _frequency =1244.50;//391.99;
            break;
        case 24: //
            _frequency = 1108.73;//391.99;
            break;
        case 26: //
            _frequency = 932.32;//391.99;
            break;
        case 27: //
            _frequency = 830.60;//391.99;
            break;
        case 28: //ファ
            _frequency = 739.98;//391.99;
            break;
        case 30: //
            _frequency = 622.25;//391.99;
            break;
        case 31: //
            _frequency = 554.36;//391.99;
            break;
        case 33: //
            _frequency = 466.16;//391.99;
            break;
        case 34: //
            _frequency = 415.30;//391.99;
            break;
        case 35: //ファ
            _frequency = 369.99;//391.99;
            break;
        case 37: //
            _frequency = 311.12;//391.99;
            break;
        case 38: //
            _frequency = 277.18;//391.99;
            break;
        case 40: //
            _frequency = 233.08;//391.99;
            break;


            
        default:
            break;

    }

回転禁止

- (BOOL)shouldAutorotate
{
    return NO;
}
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation {
return UIInterfaceOrientationIsPortrait(toInterfaceOrientation);
}

2014年2月16日日曜日

テンプレートマッチング

TemplateMatching.h
--------------------------

#pragma once
#include "cvbase.h"
class TemplateMatching :
public CVBase
{
public:
TemplateMatching(void);
~TemplateMatching(void);
void mainloop(void);
};



TemplateMatching.cpp
--------------------------
#include "TemplateMatching.h"


TemplateMatching::TemplateMatching(void)
{
}


TemplateMatching::~TemplateMatching(void)
{
}

void TemplateMatching::mainloop(void)
{
cv::Mat search_img = cv::imread("room7.png", 1);
  if(search_img.empty()) return;
  // テンプレート画像
  cv::Mat tmp_img = cv::imread("room7_tmp_2.png", 1);
  if(tmp_img.empty()) return;

  cv::Mat result_img;
  // テンプレートマッチング
  cv::matchTemplate(search_img, tmp_img, result_img, CV_TM_CCOEFF_NORMED);

  // 最大のスコアの場所を探す
  cv::Rect roi_rect(0, 0, tmp_img.cols, tmp_img.rows);
  cv::Point max_pt;
  double maxVal;
  cv::minMaxLoc(result_img, NULL, &maxVal, NULL, &max_pt);
  roi_rect.x = max_pt.x;
  roi_rect.y = max_pt.y;
  std::cout << "(" << max_pt.x << ", " << max_pt.y << "), score=" << maxVal << std::endl;
  // 探索結果の場所に矩形を描画
  cv::rectangle(search_img, roi_rect, cv::Scalar(0,0,255), 3);

  cv::namedWindow("search image", CV_WINDOW_AUTOSIZE|CV_WINDOW_FREERATIO);
  cv::namedWindow("result image", CV_WINDOW_AUTOSIZE|CV_WINDOW_FREERATIO);
  cv::imshow("search image", search_img);
  cv::imshow("result image", result_img);
  cv::waitKey(0);
}

指定エリア内のカラー平均グラフの生成

ManyAreaValue.h
--------------------------------

#pragma once
#include "cvbase.h"
#include <vector>

using namespace std;

class Area
{
public:
Area(void);
CvPoint start;
CvPoint end;
int sum[3];
int before[3];
};
class ManyAreaValue :
public CVBase
{
public:
ManyAreaValue(void);
~ManyAreaValue(void);
vector<Area *> areas;
CvPoint pStart;
int speed;

IplImage *graph;

FILE *fp;
cv::VideoCapture cap;
bool mouse;

int count;
int position;
void mouseEvent (int event, int x, int y, int flags, void *param);
void mouseEvent2 (int event, int x, int y, int flags, void *param);
void init(void);
void mainloop(void);

CvScalar createColor(int colorNum);

//void term(void);

void calucurateAreaRGB(IplImage *img);
void printAreaRGB(void);


};



ManyAreaValue.cpp
-----------------------------

#include "ManyAreaValue.h"

void on_mouse (int event, int x, int y, int flags, void *param = NULL);
void on_mouse2 (int event, int x, int y, int flags, void *param = NULL);

Area::Area(void)
{
for (int j=0;j<3;j++){
this->sum[j] = -1;
}

}

ManyAreaValue::ManyAreaValue(void)
{
count = 0;
graph = cvCreateImage( cvSize( 1200, 255*4), IPL_DEPTH_8U, 3);
for (int i=0;i<4;i++){
cvLine (graph, cvPoint(0,255*i), cvPoint(1200,255*i), CV_RGB (    0,  0,  0), 1, CV_AA, 0);
cvLine (graph, cvPoint(0,255*i+100), cvPoint(1200,255*i+100), CV_RGB (    100,  100,  100), 1, CV_AA, 0);
cvLine (graph, cvPoint(0,255*i+200), cvPoint(1200,255*i+200), CV_RGB (    100,  100,  100), 1, CV_AA, 0);
}
}

ManyAreaValue::~ManyAreaValue(void)
{
}

void ManyAreaValue::mouseEvent (int event, int x, int y, int flags, void *param)
{
switch (event) {
case CV_EVENT_MOUSEMOVE:
break;
case CV_EVENT_LBUTTONDOWN:
pStart.x = x;
pStart.y = y;
break;
case CV_EVENT_LBUTTONUP:
Area *area = new Area();
CvPoint pEnd;
area->end.x = x;
area->end.y = y;
area->start = pStart;
areas.push_back(area);
break;
}
}

void ManyAreaValue::mouseEvent2 (int event, int x, int y, int flags, void *param)
{
switch (event) {
case CV_EVENT_MOUSEMOVE:
break;
case CV_EVENT_LBUTTONDOWN:
break;
case CV_EVENT_LBUTTONUP:
int setPos = (int)position/1200;
setPos += x;
count = x;
position = setPos;
cvSetCaptureProperty( this->capture, CV_CAP_PROP_POS_FRAMES, setPos*speed );
//frameNum = x*10;
break;
}
}

void ManyAreaValue::calucurateAreaRGB(IplImage *img)
{
for (int i=0;i<areas.size();i++){
uchar p[3];
for (int j=0;j<3;j++){
areas[i]->before[j] = areas[i]->sum[j] ;
areas[i]->sum[j] = 0;
}
int areaSize = (areas[i]->end.y-areas[i]->start.y) * (areas[i]->end.x-areas[i]->start.x);
for (int y = areas[i]->start.y; y < areas[i]->end.y; y++) {
for (int x = areas[i]->start.x; x < areas[i]->end.x; x++) {
for (int j=0;j<3;j++){
p[0] = img->imageData[img->widthStep * y + x * 3 + 2 - j];
areas[i]->sum[j] += p[0];
}
}
}
for (int j=0;j<3;j++){
areas[i]->sum[j] /= areaSize;
}
}
}

CvScalar ManyAreaValue::createColor(int colorNum)
{
if (colorNum == 0){
return CV_RGB (  255,    0,  0);
}else if(colorNum == 1){
return CV_RGB (    0,  255,  0);
}else if (colorNum == 2){
return CV_RGB (    0,    0, 255);
}
return CV_RGB (    255,  255, 255);
}

void ManyAreaValue::printAreaRGB(void)
{
double msec = cvGetCaptureProperty(this->capture,CV_CAP_PROP_POS_MSEC);
fprintf(this->fp,"%lf,",msec);
for (int i=0;i<areas.size();i++){
for (int j=0;j<3;j++){
if (areas[i]->before[j] !=-1){

cvLine(graph,
cvPoint(count-1,255*i),
cvPoint(count,255*i+255),
CV_RGB (    0,  0,  0),
2, CV_AA, 0);
fprintf(this->fp,"%d,",areas[i]->sum[j]);
cvLine (graph,
cvPoint(count-1,255*i+areas[i]->before[j]),
cvPoint(count,255*i+areas[i]->sum[j]),
createColor(j),
2, CV_AA, 0);
}
}
}
fprintf(this->fp,"\n");
cvShowImage ("Graph",this->graph);
}

void ManyAreaValue::init(void)
{
FILE *initfp;
initfp = fopen("ini/ManyAreaValue.txt", "r");
char fileName[256];
int mode;
fscanf(initfp,"%d",&mode);
if (mode == 1){
fscanf(initfp,"%s",&fileName);
fscanf(initfp,"%d",&speed);
this->capture = cvCaptureFromFile(fileName);//"C:\\Users\\aiko\\Documents\\Visual Studio 2010\\Projects\\OpenCVBase\\1_01_H_131129153000.avi");
int areaNum;
fscanf(initfp,"%d",&areaNum);
for (int i=0;i<areaNum;i++){
Area *area = new Area();
fscanf(initfp,"%d",&area->start.x);
fscanf(initfp,"%d",&area->start.y);
fscanf(initfp,"%d",&area->end.x);
fscanf(initfp,"%d",&area->end.y);
this->areas.push_back(area);
}
}else{
this->capture = this->fromCamera();
}

cvNamedWindow( "Capture", 1 );
cvSetMouseCallback ("Capture", on_mouse);
cvNamedWindow( "Graph", 1 );
cvMoveWindow("Graph",0,0);

cvSetMouseCallback ("Capture", on_mouse);
this->fp = fopen("nyan.txt","w");
double fps = cvGetCaptureProperty(this->capture,CV_CAP_PROP_FPS);

}

void ManyAreaValue:: mainloop(void)
{
while (1) {
count++;
position++;
if(count>1200)count = 0;
for (int i=0;i<speed;i++){
this->frame = cvQueryFrame (this->capture);
}
if ( this->frame == NULL ){
cvSetCaptureProperty( this->capture, CV_CAP_PROP_POS_FRAMES, 0.0 );
this->frame = cvQueryFrame(this->capture);
continue;
}
if (this->areas.size() > 0){
this->calucurateAreaRGB(this->frame);
this->printAreaRGB();
for(int i=0;i<this->areas.size();i++)
this->drawLine(this->frame,this->areas[i]->start,this->areas[i]->end,CV_RGB (255, 0, 0),2);
}
cvShowImage ("Capture",this->frame);
ch = cvWaitKey ( 1 ); // 0 はディレイ時間 (ミリ秒単位)
if (ch == '\x1b') {
// ESC キー
break;
    }
  }
}