2011년 3월 30일 수요일
2011년 3월 18일 금요일
2011년 3월 17일 목요일
2011년 3월 10일 목요일
127.0.0.1 activate.adobe.com
127.0.0.1 practivate.adobe.com
127.0.0.1 ereg.adobe.com
127.0.0.1 activate.wip3.adobe.com
127.0.0.1 wip3.adobe.com
127.0.0.1 3dns-3.adobe.com
127.0.0.1 3dns-2.adobe.com
127.0.0.1 adobe-dns.adobe.com
127.0.0.1 adobe-dns-2.adobe.com
127.0.0.1 adobe-dns-3.adobe.com
127.0.0.1 ereg.wip3.adobe.com
127.0.0.1 activate-sea.adobe.com
127.0.0.1 wwis-dubc1-vip60.adobe.com
127.0.0.1 activate-sjc0.adobe.com
iPhone SDK 4, Getting camera frame example
//Video Capture Session init
- (BOOL)initVideoCapture
{
AVCaptureDeviceInput *captureInput =
[AVCaptureDeviceInput deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]
error:nil];
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
[captureOutput setSampleBufferDelegate:self queue:NULL];
NSString *key = (NSString *)kCVPixelBufferPixelFormatTypeKey;
NSNumber *val = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObject:val forKey:key];
[captureOutput setVideoSettings:videoSettings];
AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
[captureSession addInput:captureInput];
[captureSession addOutput:captureOutput];
[captureSession beginConfiguration];
[captureSession setSessionPreset:AVCaptureSessionPresetHigh];
[captureSession commitConfiguration];
[captureSession startRunning];
return YES;
}
//Video frame capture call back
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the image buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get information of the image
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create Colorspace
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
// Copy image data
/*
CFDataRef dataref = CGDataProviderCopyData(CGImageGetDataProvider(newImage));
memcpy(mImageData, dataref, 640 * 480 * sizeof(char) * 4);
CFRelease(dataref);
*/
UIImage *image = [UIImage imageWithCGImage:newImage];
[imageView setImage:image];
[imageView setFrame:CGRectMake(0, 0, width, height)];
// Release it
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
CGImageRelease(newImage);
// Unlock the image buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// CVBufferRelease(imageBuffer);
}
ref: http://forum.unity3d.com/viewtopic.php?p=300819
- (BOOL)initVideoCapture
{
AVCaptureDeviceInput *captureInput =
[AVCaptureDeviceInput deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]
error:nil];
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
[captureOutput setSampleBufferDelegate:self queue:NULL];
NSString *key = (NSString *)kCVPixelBufferPixelFormatTypeKey;
NSNumber *val = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObject:val forKey:key];
[captureOutput setVideoSettings:videoSettings];
AVCaptureSession *captureSession = [[AVCaptureSession alloc] init];
[captureSession addInput:captureInput];
[captureSession addOutput:captureOutput];
[captureSession beginConfiguration];
[captureSession setSessionPreset:AVCaptureSessionPresetHigh];
[captureSession commitConfiguration];
[captureSession startRunning];
return YES;
}
//Video frame capture call back
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the image buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get information of the image
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// Create Colorspace
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
// Copy image data
/*
CFDataRef dataref = CGDataProviderCopyData(CGImageGetDataProvider(newImage));
memcpy(mImageData, dataref, 640 * 480 * sizeof(char) * 4);
CFRelease(dataref);
*/
UIImage *image = [UIImage imageWithCGImage:newImage];
[imageView setImage:image];
[imageView setFrame:CGRectMake(0, 0, width, height)];
// Release it
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
CGImageRelease(newImage);
// Unlock the image buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// CVBufferRelease(imageBuffer);
}
ref: http://forum.unity3d.com/viewtopic.php?p=300819
iOS 4.0, Method of replacement UIGetScreenImage function
Method One,
-(UIImage *)captureView:(UIView *)view
{
CGRect screenRect = [[UIScreen mainScreen] bounds];
UIGraphicsBeginImageContext(screenRect.size);
CGContextRef ctx = UIGraphicsGetCurrentContext();
[[UIColor blackColor] set];
CGContextFillRect(ctx, screenRect);
[view.layer renderInContext:ctx];
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return newImage;
}
Method Two.
UIGraphicsBeginImageContext(self.view.window.bounds.size);
[self.view.window.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *image2 = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
UIImageWriteToSavedPhotosAlbum(image2, nil, nil, nil);
ref: http://cafe.naver.com/mcbugi/71418
-(UIImage *)captureView:(UIView *)view
{
CGRect screenRect = [[UIScreen mainScreen] bounds];
UIGraphicsBeginImageContext(screenRect.size);
CGContextRef ctx = UIGraphicsGetCurrentContext();
[[UIColor blackColor] set];
CGContextFillRect(ctx, screenRect);
[view.layer renderInContext:ctx];
UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return newImage;
}
Method Two.
UIGraphicsBeginImageContext(self.view.window.bounds.size);
[self.view.window.layer renderInContext:UIGraphicsGetCurrentContext()];
UIImage *image2 = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
UIImageWriteToSavedPhotosAlbum(image2, nil, nil, nil);
ref: http://cafe.naver.com/mcbugi/71418
피드 구독하기:
글 (Atom)