google ios iphone camera avassetwriter

google - crear video a partir de imágenes del rollo de la cámara-sdk de iOS



google maps sdk ios (1)

He resuelto img ampliado en cuestión usando este código

-(UIImage*) scaleImage: (UIImage*)image toSize:(CGSize)newSize { UIGraphicsBeginImageContext(newSize); [image drawInRect:CGRectMake(0, 0, newSize.width, newSize.height)]; UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return newImage; }

cambie el tamaño de UIImage antes de convertirlo a CGImage, asegúrese de cambiar el tamaño de image.width en múltiplos de 16.

CGSize your_size = CGSizeMake(1600, 800); UIImage *tempImg = [self scaleImage:img toSize:your_size..]; buffer = [self pixelBufferFromCGImage:[tempImg CGImage]];

He usado el siguiente código para crear videos a partir de las imágenes.

Este código funciona bien cuando selecciono la imagen del rollo de la cámara que se descarga desde la web o la captura de pantalla, pero la imagen seleccionada que se toma de la cámara muestra acercada en la película.

No sé lo que está mal con las imágenes de la cámara.

¿Alguien puede ayudarme a resolver este problema?

-(IBAction)createV:(id)sender { NSString *documentsDirectory = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"]; NSString *videoOutputPath = [documentsDirectory stringByAppendingPathComponent:@"test_output.mp4"]; CGSize imageSize = [DatabaseAccess getusersetsize]; double nospf =[[[NSUserDefaults standardUserDefaults] valueForKey:@"duration"] intValue]; NSUInteger fps = 10; NSMutableArray *imageArray;// = [DatabaseAccess getimagelist:@"select imgname from tbl_userimage"]; NSArray* imagePaths = [DatabaseAccess getimagelist:@"select imgname,strftime(''%d-%m-%Y'', tdate) as tdate from tbl_userimage"]; imageArray = [[NSMutableArray alloc] initWithCapacity:imagePaths.count]; int i=0; for (NSString* path in [imagePaths valueForKey:@"image"] ) { if ([[NSUserDefaults standardUserDefaults] boolForKey:@"disdate"]) { CGSize imgsize = [DatabaseAccess getusersetsize]; //[imageArray addObject:[[DatabaseAccess drawText:[[imagePaths valueForKey:@"date"] objectAtIndex:i] inImage:[UIImage imageWithContentsOfFile:[DatabaseAccess documentsPathForFileName:path]] atPoint:CGPointMake(imgsize.width-250,imgsize.height-60) ] fixOrientation]]; [imageArray addObject:[DatabaseAccess drawText:[[imagePaths valueForKey:@"date"] objectAtIndex:i] inImage:[UIImage imageWithContentsOfFile:[DatabaseAccess documentsPathForFileName:path]] atPoint:CGPointMake(imgsize.width-250,imgsize.height-60) ]]; } else { [imageArray addObject:[UIImage imageWithContentsOfFile:[DatabaseAccess documentsPathForFileName:path]]]; NSLog(@"%@",path); // [imageArray addObject:[UIImage imageNamed:path]]; } i++; } [self exportImages:imageArray asVideoToPath:videoOutputPath withFrameSize:imageSize framesPerSecond:fps numberOfSecondsPerFrame:nospf]; } - (void)exportImages:(NSMutableArray *)imageArray asVideoToPath:(NSString *)videoOutputPath withFrameSize:(CGSize)imageSize framesPerSecond:(NSUInteger)fps numberOfSecondsPerFrame:(double)numberOfSecondsPerFrame { NSError *error = nil; NSString *documentsDirectory = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"]; NSFileManager *fileMgr = [NSFileManager defaultManager]; if ([fileMgr removeItemAtPath:videoOutputPath error:&error] != YES) NSLog(@"Unable to delete file: %@", [error localizedDescription]); ////////////// end setup /////////////////////////////////// NSLog(@"Start building video from defined frames."); AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL: [NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeMPEG4 error:&error]; NSParameterAssert(videoWriter); NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:imageSize.width], AVVideoWidthKey, [NSNumber numberWithInt:imageSize.height], AVVideoHeightKey, nil]; AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings]; AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:nil]; NSParameterAssert(videoWriterInput); NSParameterAssert([videoWriter canAddInput:videoWriterInput]); videoWriterInput.expectsMediaDataInRealTime = YES; [videoWriter addInput:videoWriterInput]; //Start a session: [videoWriter startWriting]; [videoWriter startSessionAtSourceTime:kCMTimeZero]; CVPixelBufferRef buffer = NULL; //convert uiimage to CGImage. int frameCount = 0; //double numberOfSecondsPerFrame = 6; double frameDuration = fps * numberOfSecondsPerFrame; //for(VideoFrame * frm in imageArray) NSLog(@"**************************************************"); for(UIImage * img in imageArray) { //UIImage * img = frm._imageFrame; buffer = [self pixelBufferFromCGImage:[img CGImage]]; BOOL append_ok = NO; int j = 0; while (!append_ok && j < 30) { if (adaptor.assetWriterInput.readyForMoreMediaData) { //print out status: NSLog(@"Processing video frame (%d,%lu)",frameCount,(unsigned long)[imageArray count]); CMTime frameTime = CMTimeMake(frameCount,(int32_t) numberOfSecondsPerFrame); //CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps); // NSLog(@"%@",frameTime); append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime]; if(!append_ok){ NSError *error = videoWriter.error; if(error!=nil) { NSLog(@"Unresolved error %@,%@.", error, [error userInfo]); } } } else { printf("adaptor not ready %d, %d/n", frameCount, j); [NSThread sleepForTimeInterval:0.1]; } j++; } if (!append_ok) { printf("error appending image %d times %d/n, with error.", frameCount, j); } frameCount++; } NSLog(@"**************************************************"); //Finish the session: [videoWriterInput markAsFinished]; [videoWriter finishWriting]; NSLog(@"Write Ended"); [self playMovie:videoOutputPath]; } - (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image { // CGSize size = CGSizeMake(400, 200); CGSize size = [DatabaseAccess getusersetsize]; NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil]; CVPixelBufferRef pxbuffer = NULL; CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options, &pxbuffer); if (status != kCVReturnSuccess){ NSLog(@"Failed to create pixel buffer"); } CVPixelBufferLockBaseAddress(pxbuffer, 0); void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer); CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst); //kCGImageAlphaNoneSkipFirst); NSParameterAssert(context); //CGContextConcatCTM(context, frameTransform); CGContextConcatCTM(context, CGAffineTransformMakeRotation(0)); CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image); CGColorSpaceRelease(rgbColorSpace); CGContextRelease(context); CVPixelBufferUnlockBaseAddress(pxbuffer, 0); return pxbuffer; }