iphone - poner - ¿Cómo puedo agregar una marca de agua en un video capturado en iOS
poner marca de agua a video online gratis (1)
Esta pregunta ya tiene una respuesta aquí:
- Marca de agua de iPhone en video grabado. 5 respuestas
Me preguntaba si alguien puede decirme cómo puedo lograr esto. Si ha estado pensando en un par de soluciones:
Crea imágenes individuales a partir del video capturado y luego combínalos por imagen y luego crea un nuevo AVAsset ... Suena un poco complicado, ¿no crees?
Combina 2 videos, uno es transparente (el que tiene la marca de agua) y el otro es el activo capturado con la cámara.
Este código es para agregar un texto o cadena ENCENDIDO al video y luego de guardar el video, se reproducirá en cualquier reproductor. La mayoría de las ventajas de este código es Proporcionar video con sonido. Y todas las cosas en un código (que es texto e imagen).
#import <AVFoundation/AVFoundation.h>
-(void)MixVideoWithText
{
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:url options:nil];
AVMutableComposition* mixComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableCompositionTrack *compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *clipAudioTrack = [[videoAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
//If you need audio as well add the Asset Track for audio here
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipVideoTrack atTime:kCMTimeZero error:nil];
[compositionAudioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:clipAudioTrack atTime:kCMTimeZero error:nil];
[compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]];
CGSize sizeOfVideo=[videoAsset naturalSize];
//TextLayer defines the text they want to add in Video
//Text of watermark
CATextLayer *textOfvideo=[[CATextLayer alloc] init];
textOfvideo.string=[NSString stringWithFormat:@"%@",text];//text is shows the text that you want add in video.
[textOfvideo setFont:(__bridge CFTypeRef)([UIFont fontWithName:[NSString stringWithFormat:@"%@",fontUsed] size:13])];//fontUsed is the name of font
[textOfvideo setFrame:CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height/6)];
[textOfvideo setAlignmentMode:kCAAlignmentCenter];
[textOfvideo setForegroundColor:[selectedColour CGColor]];
//Image of watermark
UIImage *myImage=[UIImage imageNamed:@"one.png"];
CALayer layerCa = [CALayer layer];
layerCa.contents = (id)myImage.CGImage;
layerCa.frame = CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
layerCa.opacity = 1.0;
CALayer *optionalLayer=[CALayer layer];
[optionalL addSublayer:textOfvideo];
optionalL.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
[optionalL setMasksToBounds:YES];
CALayer *parentLayer=[CALayer layer];
CALayer *videoLayer=[CALayer layer];
parentLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
videoLayer.frame=CGRectMake(0, 0, sizeOfVideo.width, sizeOfVideo.height);
[parentLayer addSublayer:videoLayer];
[parentLayer addSublayer:optionalLayer];
[parentLayer addSublayer:layerCa];
AVMutableVideoComposition *videoComposition=[AVMutableVideoComposition videoComposition] ;
videoComposition.frameDuration=CMTimeMake(1, 30);
videoComposition.renderSize=sizeOfVideo;
videoComposition.animationTool=[AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]);
AVAssetTrack *videoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES)objectAtIndex:0];
NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
[dateFormatter setDateFormat:@"yyyy-MM-dd_HH-mm-ss"];
NSString *destinationPath = [documentsDirectory stringByAppendingFormat:@"/utput_%@.mov", [dateFormatter stringFromDate:[NSDate date]]];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
exportSession.videoComposition=videoComposition;
exportSession.outputURL = [NSURL fileURLWithPath:destinationPath];
exportSession.outputFileType = AVFileTypeQuickTimeMovie;
[exportSession exportAsynchronouslyWithCompletionHandler:^{
switch (exportSession.status)
{
case AVAssetExportSessionStatusCompleted:
NSLog(@"Export OK");
if (UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(destinationPath)) {
UISaveVideoAtPathToSavedPhotosAlbum(destinationPath, self, @selector(video:didFinishSavingWithError:contextInfo:), nil);
}
break;
case AVAssetExportSessionStatusFailed:
NSLog (@"AVAssetExportSessionStatusFailed: %@", exportSession.error);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Export Cancelled");
break;
}
}];
}
Muestra el error que vendrán después de guardar el video.
-(void) video: (NSString *) videoPath didFinishSavingWithError: (NSError *) error contextInfo: (void *) contextInfo
{
if(error)
NSLog(@"Finished saving video with error: %@", error);
}