iOS的CoreImage已經內建了人臉檢測的接口,檢測准確率一般,尤其是側臉,基本上就檢測不到。不過跟其他同類產品比較,也還算是不相上下吧。用起來很簡單:
CIImage* image = [CIImage imageWithCGImage:aImage.CGImage]; NSDictionary *opts = [NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh forKey:CIDetectorAccuracy]; CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeFace context:nil options:opts]; //得到面部數據 NSArray* features = [detector featuresInImage:image];
最後的features中就是檢測到的全部臉部數據,可以用如下方式計算位置:
for (CIFaceFeature *f in features) { CGRect aRect = f.bounds; NSLog(@"%f, %f, %f, %f", aRect.origin.x, aRect.origin.y, aRect.size.width, aRect.size.height); //眼睛和嘴的位置 if(f.hasLeftEyePosition) NSLog(@"Left eye %g %g\n", f.leftEyePosition.x, f.leftEyePosition.y); if(f.hasRightEyePosition) NSLog(@"Right eye %g %g\n", f.rightEyePosition.x, f.rightEyePosition.y); if(f.hasMouthPosition) NSLog(@"Mouth %g %g\n", f.mouthPosition.x, f.mouthPosition.y); }