// Complete working example for edge inference
const tf = require('@tensorflow/tfjs-node');
const fs = require('fs');
async function edgeInference() {
// Set WebGPU backend
await tf.setBackend('webgpu');
await tf.ready();
console.log('WebGPU backend active:', tf.getBackend());
// Load pre-trained model (MNIST example)
const modelPath = './tfjs-model/model.json';
const model = await tf.loadLayersModel(`file://${modelPath}`);
// Sample input: 28x28 grayscale image flattened
const inputData = new Array(784).fill(0).map(() => Math.random());
const inputTensor = tf.tensor2d([inputData], [1, 784]);
// Run inference on GPU
const predictions = model.predict(inputTensor) as tf.Tensor;
const results = await predictions.data();
// Get top prediction
const topPrediction = results.indexOf(Math.max(...results));
console.log('Top prediction:', topPrediction, 'confidence:', Math.max(...results));
inputTensor.dispose();
predictions.dispose();
}
edgeInference().catch(console.error);