-
Notifications
You must be signed in to change notification settings - Fork 0
/
app.js
116 lines (93 loc) · 3.33 KB
/
app.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
'use strict';
var express = require('express'),
app = express(),
request = require('request'),
path = require('path'),
bluemix = require('./config/bluemix'),
validator = require('validator'),
watson = require('watson-developer-cloud'),
extend = require('util')._extend,
fs = require('fs'),
multer = require('multer');
var storage = multer.diskStorage({
destination: function (req, file, cb) {
cb(null, './uploads/');
},
filename: function (req, file, cb) {
cb(null, Date.now() + '-' + file.originalname);
}
});
var upload = multer({ storage: storage });
// Bootstrap application settings
require('./config/express')(app);
// if bluemix credentials exists, then override local
var visualRecognitionCredentials = extend({
version: 'v1',
url: "https://gateway-a.watsonplatform.net/visual-recognition/api",
api_key: "bba89ab8e38a92175bf737667fa13cd7d330fe0c"
}, bluemix.getServiceCreds('watson_vision_combined')); // VCAP_SERVICES
console.log("visualRecognitionCredentials:"+JSON.stringify(visualRecognitionCredentials));
// Create the service wrapper
var visualRecognition = watson.watson_vision_combined(visualRecognitionCredentials);
// For local development, replace username and password
var textToSpeech = watson.text_to_speech({
version: 'v1',
url: 'https://stream.watsonplatform.net/text-to-speech/api',
username: 'a45c75fe-638e-4c34-b583-d1629a78aa49',
password: 'nU3NhjGtWLIH'
});
app.get('/api/synthesize', function(req, res, next) {
// console.log("api/synthesize query:"+JSON.stringify(req.query));
//example of query object
// {"voice":"en-US_MichaelVoice","text":"This is what I see in the picture: Food, Burning, and, Fabric","download":"true","accept":"audio/flac"}
var transcript = textToSpeech.synthesize(req.query);
transcript.on('response', function(response) {
if (req.query.download) {
response.headers['content-disposition'] = 'attachment; filename=transcript.flac';
}
});
transcript.on('error', function(error) {
console.log("error " + error);
next(error);
});
transcript.pipe(res);
});
app.post('/api/visual-recognition', upload.single('image'), function(req, res, next) {
// Classifiers are 0 = all or a json = {label_groups:['<classifier-name>']}
var classifier = req.body.classifier || '0'; // All
if (classifier !== '0') {
classifier = JSON.stringify({label_groups:[classifier]});
}
var imgFile;
if (req.file) {
// file image
imgFile = fs.createReadStream(req.file.path);
} else if(req.body.url && validator.isURL(req.body.url)) {
// web image
imgFile = request(req.body.url.split('?')[0]);
} else if (req.body.url && req.body.url.indexOf('images') === 0) {
// local image
imgFile = fs.createReadStream(path.join('public', req.body.url));
} else {
// malformed url
return next({ error: 'Malformed URL', code: 400 });
}
var formData = {
labels_to_check: classifier,
image_file: imgFile
};
visualRecognition.recognize(formData, function(err, result) {
// delete the recognized file
if(req.file)
fs.unlink(imgFile.path);
if (err)
next(err);
else
return res.json(result);
});
});
// error-handler settings
require('./config/error-handler')(app);
var port = process.env.VCAP_APP_PORT || 3000;
app.listen(port);
console.log('listening at:', port);