-
Notifications
You must be signed in to change notification settings - Fork 0
/
app.js
60 lines (51 loc) · 1.9 KB
/
app.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
const express = require('express')
require('@tensorflow/tfjs');
const toxicity = require('@tensorflow-models/toxicity');
const bodyParser = require('body-parser');
const app = express()
const port = 8088
// The minimum prediction confidence.
const threshold = 0.9;
function loadToxicity(req, res, body){
toxicity.load(threshold).then(model => {
model.classify([body.sentence]).then(predictions => {
// `predictions` is an array of objects, one for each prediction head,
// that contains the raw probabilities for each input along with the
// final prediction in `match` (either `true` or `false`).
// If neither prediction exceeds the threshold, `match` is `null`.
let hasToxic = false;
let toxicSentences = []
let message = 'Your sentence contains no toxic words';
for(i=0;i<7;i++) {
if (predictions[i].results[0].match === true) {
hasToxic = true;
message = 'Your sentence contains toxic words';
toxicSentences.push({'label':predictions[i].label.toUpperCase(),'pecentage':predictions[i].results[0].probabilities[1].toFixed(4)*100+"%"})
}
}
res.status(200).json({
error:false,
message:message,
data:toxicSentences
})
});
});
}
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
app.get('/', (req, res) => {
res.send("<h1>Sentence Classify Service</h1>")
});
app.post('/sentence', (req, res) => {
if (!req.body.sentence) {
res.status(400).json({
error:true,
message:'The field [sentence] cannot be empty',
data:[]
})
}
loadToxicity(req, res, req.body);
})
app.listen(port, () => {
console.log(`App listening at http://localhost:${port}`)
})