Web SDK
Getting Started

Getting Started with the Web SDK

Welcome to the Getting Started guide for our Web SDK! This guide will help you set up and start using the Web SDK in your web application. There are 2 modes that are supported by the AR Engine -

  1. Camera Mode - Live Tryon with live camera feed
  2. Model/Gallery Mode - Model Image based static tryon

Follow the steps below to get started.

Getting Started in Live Mode

Step 1: Installation

To begin, you need to install the AR Engine and Camera module from the collection of our modules. You can install them using npm.

npm install @mirrar-sdm/ar-engine @mirrar-sdm/camera

Step 2: Setup the camera

<div id="ar-container">
    <video id="camera-video" width="640" height="480"></video>
</div>
import { CameraService } from '@mirrar-sdm/camera'
 
// get required html elements
const container = document.getElementById('ar-container')
const cameraVideo = document.getElementById('camera-video')
 
// intialize the video width and height as camera resolution
const videoWidth = 640
const videoHeight = 480
 
// on camera frame callback
const onCameraFrame = () => {
    // here you will get each of the camera frame to process
}
 
// use this method to start camera in camera service
CameraService.startCamera(container, cameraVideo, videoWidth, videoHeight).then(response => {
    console.log("Camera started")
    CameraService.onFrame(onCameraFrame)
    const cameraCanvas = CameraService.getCanvas()
 
    // setup AR Engine with the dimensions of the camera feed
    setupAREngine(cameraCanvas)            
})

Step 3: Setup & Initialize the AR Engine

import {AR_Engine} from "@mirrar-sdm/ar-engine"
import { TrackingTypeEnum } from "@mirrar-sdm/ar-engine/lib/TrackingManagers/MachineLearningTrackingManager"
 
function setupAREngine(cameraCanvas) {
    const container = document.getElementById('ar-container')
 
    // You can set any dimensions here but it is advised
    // to set the dimensions of AR Engine same as camera feed 
    const options = {
                container: container,
                dimensions: {
                    width: cameraCanvas.width,
                    height: cameraCanvas.height
                }
    }
    const arEngine = new AR_Engine(options)
}

Step 4: Setup Face or Hand Tracking depending on the type of jewellery that you want to augment.

 
// choose tracking type as face or hand
const trackingType = TrackingTypeEnum.Face // TrackingTypeEnum.Hand
 
// function to get the download progress of the AI model
const progressFunction = (values) -> {
    console.log("Download Progress", values)
}
 
// callback that will be called after completion of Face/Hand Tracking Setup
const onComplete = () => {
    console.log("Tracking setup successfull")
}
 
arEngine.setupTracking(trackingType, progressFunction, (data) => {
    onComplete(data)
})

Step 5: Process each camera frame in AR Engine

// on camera frame callback
const onCameraFrame = () => {
    // here you will get each of the camera frame to process
    let canvas = CameraService.getCanvas()
 
    const onProcessingComplete = (results) => {
        console.log(results)
    }
 
    if(arEngine) {
        arEngine.processFrame(canvas, onProcessingComplete)
    }
}

Step 6: Add jewellery

const product = {
        id: '1', //sku or any unique identifier
        bodyPartType: BodyPartTypes.ear, // type of body part
        renderType: '2D', // rendering type 
        asset: {
            url_2D: "image_url"
        },
        visualizationData: {
            height: 15, // height of the earring 
            xOffset: 0, // offset on x axis
            yOffset: -4 // offset on y axis
        }
    }
arEngine.addProduct(product)

Step 7: Remove jewellery

const product = {
        id: '1', //sku or any unique identifier
        bodyPartType: BodyPartTypes.ear, // type of body part
        renderType: '2D', // rendering type 
        asset: {
            url_2D: "image_url"
        },
        visualizationData: {
            height: 15, // height of the earring 
            xOffset: 0, // offset on x axis
            yOffset: -4 // offset on y axis
        }
    }
arEngine.removeProduct(product)

Getting Started in Model Mode

Step 1: Installation

To begin, you need to install the AR Engine and Gallery/Model Mode module from the collection of our modules. You can install them using npm.

npm install @mirrar-sdm/ar-engine @mirrar-sdm/gallery-mode

Step 2: Setup the Model/Gallery mode

<div id="ar-container">
    <canvas id="modelCanvas" width="640" height="480"></canvas>
</div>
import { GalleryMode } from '@mirrar-sdm/gallery-mode'
 
const imageURL = 'model image url'
const canvas = document.getElementById('modelCanvas')
 
GalleryMode.setModelURL(imageURL)
GalleryMode.init(canvas)
 
setupAREngine(canvas)

Step 3: Setup & Initialize the AR Engine

import {AR_Engine} from "@mirrar-sdm/ar-engine"
import { TrackingTypeEnum } from "@mirrar-sdm/ar-engine/lib/TrackingManagers/MachineLearningTrackingManager"
 
function setupAREngine(canvas) {
    const container = document.getElementById('ar-container')
 
    // You can set any dimensions here but it is advised
    // to set the dimensions of AR Engine same as camera feed 
    const options = {
                container: container,
                dimensions: {
                    width: canvas.width,
                    height: canvas.height
                }
    }
    const arEngine = new AR_Engine(options)
}

Step 4: Setup Face or Hand Tracking depending on the type of jewellery that you want to augment.

 
// choose tracking type as face or hand
const trackingType = TrackingTypeEnum.Face // TrackingTypeEnum.Hand
 
// function to get the download progress of the AI model
const progressFunction = (values) -> {
    console.log("Download Progress", values)
}
 
// callback that will be called after completion of Face/Hand Tracking Setup
const onComplete = () => {
    console.log("Tracking setup successfull")
}
 
arEngine.setupTracking(trackingType, progressFunction, (data) => {
    onComplete(data)
})

Step 5: Process gallery/model frame in AR Engine

if(arEngine) {
    arEngine.processFrame(canvas, onProcessingComplete)
}

Step 6: Add jewellery

const product = {
        id: '1', //sku or any unique identifier
        bodyPartType: BodyPartTypes.ear, // type of body part
        renderType: '2D', // rendering type 
        asset: {
            url_2D: "image_url"
        },
        visualizationData: {
            height: 15, // height of the earring 
            xOffset: 0, // offset on x axis
            yOffset: -4 // offset on y axis
        }
    }
arEngine.addProduct(product)

Step 7: Remove jewellery

const product = {
        id: '1', //sku or any unique identifier
        bodyPartType: BodyPartTypes.ear, // type of body part
        renderType: '2D', // rendering type 
        asset: {
            url_2D: "image_url"
        },
        visualizationData: {
            height: 15, // height of the earring 
            xOffset: 0, // offset on x axis
            yOffset: -4 // offset on y axis
        }
    }
arEngine.removeProduct(product)