이 fullScreenImage하는 aspectRatioThumbnail에서 좌표 (아래 코드의 거대한 블록을 무시합니다. 그것은 경우 다른 사람이 함께 놀고 싶어에서 참조/리소스에 대해 단지있다)스케일링 아이폰 OS CIDetector (얼굴 인식)을
얼굴 탐지 루틴 CoreImage에서 자연스럽게 더 작은 이미지에서 더 빨리 작업 할 수 있으므로 을 사용하여 fullScreenImage
표현을 그리기 위해 크기를 조정할 계획으로 얼굴 데이터를 생성하는 방법을 조사했습니다. 이 작업을 수행하는 이유는 20-30 개의 이미지를 처리해야하므로 작업 시간을 줄이기 위해서입니다.
이것은 간단한 수학 문제 일 수 있지만 한 이미지의 한 점을 다른 점에 매핑하려고하는 부정확 한 결과를 얻고 있습니다.
90 X 120 이미지 - CGPoint (64, 50) rightEyePosition
화상
480 X 640
로 - CGPoint (331, 303) rightEyePosition(90분의 480) * (64) = 341.333 -하지만 331이어야합니다, 네? 내가 잘못하고 있니?
업데이트 - 나중에 몇 가지 테스트가 추가됩니다. 얼굴 데이터 결과가 이미지 해상도가 다르기 때문에 약간 다를 수 있습니다. 그것은 의미가 있습니다 : 데이터 결과 사이에 확장 가능한 관계가 없다는 것입니다. 나는 아직도 생각해 본다 : 는 나의 스케일링 수학 위 잘못이다?
Using CIDetectorAccuracyHigh
useImageOptions: 0
------------ aspectRatioThumbnail 90.000000 120.000000 orientation: 0
2013-01-18 12:33:30.378 SeqMeTestBed[9705:907] aspectRatioThumbnail: features {
bounds = "{{23, 16}, {56, 56}}";
hasLeftEyePosition = 1;
hasMouthPosition = 1;
hasRightEyePosition = 1;
leftEyePosition = "{43, 59}";
mouthPosition = "{51, 31}";
rightEyePosition = "{64, 50}";
}
------------ fullScreenImage 480.000000 640.000000 orientation: 0
2013-01-18 12:33:33.029 SeqMeTestBed[9705:907] fullScreenImage: features {
bounds = "{{135, 81}, {298, 298}}";
hasLeftEyePosition = 1;
hasMouthPosition = 1;
hasRightEyePosition = 1;
leftEyePosition = "{228, 321}";
mouthPosition = "{290, 156}";
rightEyePosition = "{331, 303}";
}
------------ fullResolutionImage 640.000000 480.000000 orientation: 0
2013-01-18 12:33:35.745 SeqMeTestBed[9705:907] fullResolutionImage: features {
bounds = "{{195, 105}, {366, 366}}";
hasLeftEyePosition = 1;
hasMouthPosition = 1;
hasRightEyePosition = 1;
leftEyePosition = "{356, 411}";
mouthPosition = "{350, 201}";
rightEyePosition = "{455, 400}";
// 코드 //
- (void)detectFacialFeatures
{
NSDictionary *detectorOptions = [[NSDictionary alloc] initWithObjectsAndKeys:CIDetectorAccuracyHigh, CIDetectorAccuracy, nil];
CIDetector* faceDetector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:detectorOptions];
NSDictionary *imageOptions = nil;
UIImage *tmpImage;
NSNumber* orientation;
CIImage *ciImage;
NSArray *array;
NSMutableDictionary* featuresDictionary;
Boolean useImageOptions = NO;
printf("Using CIDetectorAccuracyHigh \n");
printf("useImageOptions: %d\n", useImageOptions);
//-----------------aspectRatioThumbnail
tmpImage = [[UIImage alloc] initWithCGImage:self.asset.aspectRatioThumbnail];
orientation = [NSNumber numberWithInt:tmpImage.imageOrientation];
printf("------------ aspectRatioThumbnail %f %f orientation: %d\n", tmpImage.size.width, tmpImage.size.height, [orientation integerValue]);
ciImage = [CIImage imageWithCGImage:tmpImage.CGImage];
if (ciImage == nil) printf("----------!!!aspectRatioThumbnail: ciImage is nil \n");
imageOptions = [NSDictionary dictionaryWithObjectsAndKeys:orientation, CIDetectorImageOrientation,
CIDetectorAccuracyHigh, CIDetectorAccuracy, nil];
if (useImageOptions) {
array = [faceDetector featuresInImage:ciImage];
} else {
array = [faceDetector featuresInImage:ciImage options:imageOptions];
}
featuresDictionary = [self convertFeaturesToDictionary:array];
NSLog(@"aspectRatioThumbnail: features %@", featuresDictionary);
//-----------------fullScreenImage
tmpImage = [[UIImage alloc] initWithCGImage:self.asset.defaultRepresentation.fullScreenImage];
orientation = [NSNumber numberWithInt:tmpImage.imageOrientation];
printf("------------ fullScreenImage %f %f orientation: %d\n", tmpImage.size.width, tmpImage.size.height, [orientation integerValue]);
ciImage = [CIImage imageWithCGImage:tmpImage.CGImage];
if (ciImage == nil) printf("----------!!!fullScreenImage: ciImage is nil \n");
imageOptions = [NSDictionary dictionaryWithObjectsAndKeys:orientation, CIDetectorImageOrientation,
CIDetectorAccuracyHigh, CIDetectorAccuracy, nil];
if (useImageOptions) {
array = [faceDetector featuresInImage:ciImage];
} else {
array = [faceDetector featuresInImage:ciImage options:imageOptions];
}
featuresDictionary = [self convertFeaturesToDictionary:array];
NSLog(@"fullScreenImage: features %@", featuresDictionary);
//-----------------fullResolutionImage
tmpImage = [[UIImage alloc] initWithCGImage:self.asset.defaultRepresentation.fullResolutionImage];
orientation = [NSNumber numberWithInt:tmpImage.imageOrientation];
printf("------------ fullResolutionImage %f %f orientation: %d\n", tmpImage.size.width, tmpImage.size.height, [orientation integerValue]);
ciImage = [CIImage imageWithCGImage:tmpImage.CGImage];
if (ciImage == nil) printf("----------!!!fullResolutionImage: ciImage is nil \n");
imageOptions = [NSDictionary dictionaryWithObjectsAndKeys:orientation, CIDetectorImageOrientation,
CIDetectorAccuracyHigh, CIDetectorAccuracy, nil];
if (useImageOptions) {
array = [faceDetector featuresInImage:ciImage];
} else {
array = [faceDetector featuresInImage:ciImage options:imageOptions];
}
featuresDictionary = [self convertFeaturesToDictionary:array];
NSLog(@"fullResolutionImage: features %@", featuresDictionary);
}
- (NSMutableDictionary*)convertFeaturesToDictionary:(NSArray*)foundFaces
{
NSMutableDictionary * faceFeatures = [[NSMutableDictionary alloc] init];
if (foundFaces.count) {
CIFaceFeature *face = [foundFaces objectAtIndex:0];
NSNumber* hasMouthPosition = [NSNumber numberWithBool:face.hasMouthPosition];
NSNumber* hasLeftEyePosition = [NSNumber numberWithBool:face.hasLeftEyePosition];
NSNumber* hasRightEyePosition = [NSNumber numberWithBool:face.hasRightEyePosition];
[faceFeatures setValue:hasMouthPosition forKey:@"hasMouthPosition"];
[faceFeatures setValue:hasLeftEyePosition forKey:@"hasLeftEyePosition"];
[faceFeatures setValue:hasRightEyePosition forKey:@"hasRightEyePosition"];
NSString * boundRect = NSStringFromCGRect(face.bounds);
// NSLog(@"------------boundRect %@", boundRect);
[faceFeatures setValue:boundRect forKey:@"bounds"];
if (hasMouthPosition){
NSString * mouthPosition = NSStringFromCGPoint(face.mouthPosition);
[faceFeatures setValue:mouthPosition forKey:@"mouthPosition"];
}
if (hasLeftEyePosition){
NSString * leftEyePosition = NSStringFromCGPoint(face.leftEyePosition);
[faceFeatures setValue:leftEyePosition forKey:@"leftEyePosition"];
}
if (hasRightEyePosition){
NSString * rightEyePosition = NSStringFromCGPoint(face.rightEyePosition);
[faceFeatures setValue:rightEyePosition forKey:@"rightEyePosition"];
}
}
return faceFeatures;
}