Software Archive
Read-only legacy content
17061 Discussions

Face Tracking using Processing Language

Alberto_P_
Beginner
635 Views

Is there any face tracking example or demo using Processing language? (there's only a hand tracking sample)

0 Kudos
2 Replies
Xusheng_L_Intel
Employee
635 Views

We will provide such sample code in the future. In the meantime, you can use our Java sample code in face tracking as a reference. You can find it @C:\Program Files (x86)\Intel\RSSDK\framework\Java\face_tracking_java. Thanks!

0 Kudos
Alberto_P_
Beginner
635 Views

Java face tracking demo does't work in Processing.

It seems PXCMVideoModule.DataDesc.streams.color is unavailable prom Java/Processing

import intel.rssdk.*;
import java.util.*;

void setup()
{
  // Create session
  PXCMSession session = PXCMSession.CreateInstance();
  if (session == null) {
          println("Failed to create a session instance");
          return;
  }
  pxcmStatus sts = pxcmStatus.PXCM_STATUS_NO_ERROR;
  // Initialize Face Analyzer
  PXCMSession.ImplDesc desc = new PXCMSession.ImplDesc();
  desc.subgroup = EnumSet.of(PXCMSession.ImplSubgroup.IMPL_SUBGROUP_FACE_ANALYSIS); 
  desc.friendlyName = "Face 2D/3D Analysis (Intel)";
  PXCMFaceModule faceModule = new PXCMFaceModule(0, true);
  sts = session.CreateImpl(desc, faceModule);
  if (sts.compareTo(pxcmStatus.PXCM_STATUS_NO_ERROR)<0) {
    println("Failed to create a Face3D instance");
    return;
  }
        
  // Create CaptureManager instance
  PXCMCaptureManager captureMgr = session.CreateCaptureManager();
  captureMgr.FilterByDeviceInfo("RealSense", null, 0);
  
  // Retrieve the input requirements
  sts = pxcmStatus.PXCM_STATUS_DATA_UNAVAILABLE; 
  PXCMFaceConfiguration faceConfig = faceModule.CreateActiveConfiguration();
  faceConfig.SetTrackingMode(PXCMFaceConfiguration.TrackingModeType.FACE_MODE_COLOR);
  faceConfig.detection.isEnabled = true; 
  faceConfig.landmarks.isEnabled = true; 
  faceConfig.pose.isEnabled = true; 
  faceConfig.ApplyChanges();
  faceConfig.Update();
  PXCMVideoModule videoModule = (PXCMVideoModule) faceModule.QueryInstance(PXCMVideoModule.CUID);  
  
  for ( int i=0; ; i++ )
  {
     PXCMVideoModule.DataDesc ddesc = new PXCMVideoModule.DataDesc();
     sts = videoModule.QueryCaptureProfile(i, ddesc);
     if (sts.compareTo(pxcmStatus.PXCM_STATUS_NO_ERROR)<0) {
       println("Failed to QueryProfile of the Face3D module.");
       return;
     }
     ddesc.streams.color.sizeMax.height = 480; // ERROR (unexpected token)
     ddesc.streams.color.sizeMax.width = 640; 
     sts = captureMgr.RequestStreams(1, ddesc);  
     if (sts.compareTo(pxcmStatus.PXCM_STATUS_NO_ERROR)<0)
         continue;      
     sts = captureMgr.LocateStreams();
     if (sts.compareTo(pxcmStatus.PXCM_STATUS_NO_ERROR)<0)
         continue;
     sts = videoModule.SetCaptureProfile(ddesc); 
     if (sts.compareTo(pxcmStatus.PXCM_STATUS_NO_ERROR)<0)
         continue;
     else 
         break;
  }        
   
  faceConfig.detection.isEnabled = true; 
  faceConfig.landmarks.isEnabled = true; 
  faceConfig.pose.isEnabled = true; 
  faceConfig.ApplyChanges();
  faceConfig.Update();

  PXCMCapture.Sample sample = new PXCMCapture.Sample(); 
  PXCMFaceData faceData = null; 
  
  for ( int nframes=0; nframes<30000; nframes++ )
  {
      sample.color = new PXCMImage();
      PXCMSyncPoint sp0 = new PXCMSyncPoint(0, true);
      PXCMSyncPoint sp1 = new PXCMSyncPoint(0, true);
      
      sts = captureMgr.ReadModuleStreamsAsync(1, sample, sp0);   
      if (sts.compareTo(pxcmStatus.PXCM_STATUS_NO_ERROR)<0) {
          println("Failed to read images");
          return;
      }  
     
      sts = sp0.Synchronize();
      if (sts.compareTo(pxcmStatus.PXCM_STATUS_NO_ERROR)<0) {
          println("Failed to Synchronize");
          return;
      }            
      
      sts = videoModule.ProcessImageAsync(sample, sp1);
      if (sts.compareTo(pxcmStatus.PXCM_STATUS_NO_ERROR)<0) {
          println ("Failed to invoke ProcessImageAsync");
          return;
      }
     
      sts = sp1.Synchronize();
      if (sts.compareTo(pxcmStatus.PXCM_STATUS_NO_ERROR)<0) {
          println("Failed to Synchronize");
          return;
      }
      
      faceData = faceModule.CreateOutput();
      faceData.Update();
      
      // Read and print data 
      for ( int fidx=0; ; fidx++ )
      {
          PXCMFaceData.Face face = faceData.QueryFaceByIndex(fidx);
          if (face==null) break;
          
          PXCMFaceData.DetectionData detectData = face.QueryDetection(); 
        
          if (detectData != null)
          {
              PXCMRectI32 rect = new PXCMRectI32();
              boolean ret = detectData.QueryBoundingRect(rect);
              if (ret) {
                  println("Detection Rectangle at frame #" + nframes); 
                  println("Top Left corner: (" + rect.x + "," + rect.y + ")" ); 
                  println("Height: " + rect.h + " Width: " + rect.w); 
              }
          } else 
              break;
          
          PXCMFaceData.PoseData poseData = face.QueryPose();
          if (poseData != null)
          {
              PXCMFaceData.PoseEulerAngles pea = new PXCMFaceData.PoseEulerAngles();
              poseData.QueryPoseAngles(pea);
              println("Pose Data at frame #" + nframes); 
              println("(Roll, Yaw, Pitch) = (" + pea.roll + "," + pea.yaw + "," + pea.pitch + ")"); 
          }  
      }  
      faceData.close();
      sample.ReleaseImages();
      sp0.close();
      sp1.close();
  }
}

 

0 Kudos
Reply