How to make a photo detector in Objective-C?

macos

#1

Hello! I’ve made an app that analyses front camera stream and I want to change it to analyse photos from path. How to add photo so the detector could use it?

'
//
// ViewController.m
// AffectiveCamera
//
// Created by Agnieszka Lisowska on 11/13/17.
// Copyright © 2017 Agnieszka Lisowska. All rights reserved.
//

#import “ViewController.h”

@implementation ViewController

  • (void)viewDidLoad {
    [super viewDidLoad];

    // detector
    //self.detector = [[AFDXDetector alloc] initWithDelegate:self usingCamera:AFDX_CAMERA_FRONT maximumFaces:2];

    NSString *path;
    path = [[NSString alloc] initWithFormat:@"/Users/agnieszka/Desktop/KA.DI3.44copy.png"];
    self.detector = [[AFDXDetector alloc] initWithDelegate: self usingFile: (NSString *)path maximumFaces:1];

    [self.detector setDetectAllEmotions:YES];
    self.detector.maxProcessRate = 5;
    [self.detector start];
    }

  • (void)setRepresentedObject:(id)representedObject {
    [super setRepresentedObject:representedObject];

}

  • (void)detector:(AFDXDetector *)detector hasResults:(NSMutableDictionary *)faces forImage:(NSImage *)image atTime:(NSTimeInterval)time
    {
    if (faces == nil){
    self.imageView.image = image;
    }
    else {
    NSArray *a = [faces allValues];
    if ([a count] > 0) {
    if ([a count] < 2) {
    AFDXFace *face = [a objectAtIndex:0];
    CGFloat joy = face.emotions.joy;
    CGFloat fear = face.emotions.fear;
    CGFloat sadness = face.emotions.sadness;
    CGFloat disgust = face.emotions.disgust;
    CGFloat anger = face.emotions.anger;
    CGFloat surprise = face.emotions.surprise;
    CGFloat contempt = face.emotions.contempt;
    NSString *result;
    result = [[NSString alloc] initWithFormat:@" Joy: %.1f\n Fear: %.1f\n Sadness: %.1f\n Disgust: %.1f\n Anger: %.1f\n Surprise: %.1f\n Contempt: %.1f", joy, fear, sadness, disgust, anger, surprise, contempt];
    self.textField.stringValue = result;
    self.textField2.stringValue = @“X”;
    }
    else {
    AFDXFace *face = [a objectAtIndex:0];
    CGFloat joy = face.emotions.joy;
    CGFloat fear = face.emotions.fear;
    CGFloat sadness = face.emotions.sadness;
    CGFloat disgust = face.emotions.disgust;
    CGFloat anger = face.emotions.anger;
    CGFloat surprise = face.emotions.surprise;
    CGFloat contempt = face.emotions.contempt;
    NSString *result;
    result = [[NSString alloc] initWithFormat:@" #1\n Joy: %.1f\n Fear: %.1f\n Sadness: %.1f\n Disgust: %.1f\n Anger: %.1f\n Surprise: %.1f\n Contempt: %.1f", joy, fear, sadness, disgust, anger, surprise, contempt];
    self.textField.stringValue = result;

                  AFDXFace *face2 = [a objectAtIndex:1];
                  CGFloat joy2 = face2.emotions.joy;
                  CGFloat fear2 = face2.emotions.fear;
                  CGFloat sadness2 = face2.emotions.sadness;
                  CGFloat disgust2 = face2.emotions.disgust;
                  CGFloat anger2 = face2.emotions.anger;
                  CGFloat surprise2 = face2.emotions.surprise;
                  CGFloat contempt2 = face2.emotions.contempt;
                  NSString *result2;
                  result2 = [[NSString alloc] initWithFormat:@" #2\n Joy: %.1f\n Fear: %.1f\n Sadness: %.1f\n Disgust: %.1f\n Anger: %.1f\n Surprise: %.1f\n Contempt: %.1f", joy2, fear2, sadness2, disgust2, anger2, surprise2, contempt2];
                  self.textField2.stringValue = result2;
              }
          }
      }
    

    }

@end


#2

Hi. Please see our online documentation for how to analyse a photo from Objective C:


#3

It doesn’t say how should I add reference to an image.


#4

Hi, you should call ProcessImage. See the reference documentation here:

http://affectiva.github.io/developerportal/pages/platforms/v4_0_0/ios/classdocs/interface_a_f_d_x_detector.html#abbd541daa935c94a3b2af90cdd16b8a6