-
Notifications
You must be signed in to change notification settings - Fork 38
/
server.js
90 lines (72 loc) · 2.38 KB
/
server.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
const tf = require("@tensorflow/tfjs-node");
const express = require("express");
const Busboy = require('busboy');
const app = express();
let mirNetModel;
let modelInfo;
async function loadModel() {
// Warm up the model
if (!mirNetModel) {
modelInfo = await tf.node.getMetaGraphsFromSavedModel('./model');
// Load the TensorFlow SavedModel through tfjs-node API. You can find more
// details in the API documentation:
// https://js.tensorflow.org/api_node/1.3.1/#node.loadSavedModel
mirNetModel = await tf.node.loadSavedModel(
'./model'
);
}
}
// make all the files in 'public' available
// https://expressjs.com/en/starter/static-files.html
app.use(express.static("public"));
// https://expressjs.com/en/starter/basic-routing.html
app.get("/", (request, response) => {
response.sendFile(__dirname + "/views/index.html");
loadModel();
});
app.post('/predict', async (req, res) => {
// Receive and parse the image from client side, then feed it into the model
// for inference.
const busboy = new Busboy({headers: req.headers});
let fileBuffer = new Buffer('');
req.files = {file: []};
busboy.on('field', (fieldname, value) => {
req.body[fieldname] = value;
});
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
file.on('data', (data) => {fileBuffer = Buffer.concat([fileBuffer, data])});
file.on('end', () => {
const file_object = {
fieldname,
'originalname': filename,
encoding,
mimetype,
buffer: fileBuffer
};
req.files.file.push(file_object)
});
});
busboy.on('finish', async () => {
const buf = req.files.file[0].buffer;
const uint8array = new Uint8Array(buf);
loadModel();
// Decode the image into a tensor.
const imageTensor = await tf.node.decodeImage(uint8array);
const input = imageTensor.expandDims(0);
// Feed the image tensor into the model for inference.
const startTime = tf.util.now();
let outputTensor = mirNetModel.predict({'x': input});
// Parse the model output to get meaningful result(get detection class and
// object location).
const endTime = tf.util.now();
res.send({
inference: outputTensor,
inferenceTime: endTime - startTime
});
});
busboy.end(req.rawBody);
req.pipe(busboy);
});
// listen for requests :)
app.set('port', process.env.PORT || 3001);
app.listen(3001);