Web SDK
Camera Mode

Camera Mode

This mode is used to augment jewellery in live camera mode.

Step 1: Installation

To begin, you need to install the AR Engine and Camera module from the collection of our modules. You can install them using npm.

npm install @mirrar-sdm/ar-engine @mirrar-sdm/camera

Step 2: Setup the camera

<div id="ar-container">
    <video id="camera-video" width="640" height="480"></video>
</div>
import { CameraService } from '@mirrar-sdm/camera'
 
// get required html elements
const container = document.getElementById('ar-container')
const cameraVideo = document.getElementById('camera-video')
 
// intialize the video width and height as camera resolution
const videoWidth = 640
const videoHeight = 480
 
// on camera frame callback
const onCameraFrame = () => {
    // here you will get each of the camera frame to process
}
 
// use this method to start camera in camera service
CameraService.startCamera(container, cameraVideo, videoWidth, videoHeight).then(response => {
    console.log("Camera started")
    CameraService.onFrame(onCameraFrame)
    const cameraCanvas = CameraService.getCanvas()
 
    // setup AR Engine with the dimensions of the camera feed
    setupAREngine(cameraCanvas)            
})

Step 3: Setup & Initialize the AR Engine

import {AR_Engine} from "@mirrar-sdm/ar-engine"
import { TrackingTypeEnum } from "@mirrar-sdm/ar-engine/lib/TrackingManagers/MachineLearningTrackingManager"
 
function setupAREngine(cameraCanvas) {
    const container = document.getElementById('ar-container')
 
    // You can set any dimensions here but it is advised
    // to set the dimensions of AR Engine same as camera feed 
    const options = {
                container: container,
                dimensions: {
                    width: cameraCanvas.width,
                    height: cameraCanvas.height
                }
    }
    const arEngine = new AR_Engine(options)
}

Step 4: Setup Face or Hand Tracking depending on the type of jewellery that you want to augment.

 
// choose tracking type as face or hand
const trackingType = TrackingTypeEnum.Face // TrackingTypeEnum.Hand
 
// function to get the download progress of the AI model
const progressFunction = (values) -> {
    console.log("Download Progress", values)
}
 
// callback that will be called after completion of Face/Hand Tracking Setup
const onComplete = () => {
    console.log("Tracking setup successfull")
}
 
arEngine.setupTracking(trackingType, progressFunction, (data) => {
    onComplete(data)
})

Step 5: Process each camera frame in AR Engine

// on camera frame callback
const onCameraFrame = () => {
    // here you will get each of the camera frame to process
    let canvas = CameraService.getCanvas()
 
    const onProcessingComplete = (results) => {
        console.log(results)
    }
 
    if(arEngine) {
        arEngine.processFrame(canvas, onProcessingComplete)
    }
}