This repository has been archived by the owner on Apr 9, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
hearing.js
executable file
·162 lines (150 loc) · 5.02 KB
/
hearing.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
// set start vars
var hearingactive = false;
var hearingprocess = true;
var api_id;
var api_file;
var sr_file;
var sr_hotword;
var sr_sensitivity;
// process config.json file
loadconfig();
// include colored responses module
const response = require('./response.js');
// include sonus module
const Sonus = require('sonus')
// include Google Cloud text-to-speech module
const speech = require('@google-cloud/speech')
const client = new speech.SpeechClient({
projectId: api_id, //'sara-245106',
keyFilename: api_file //'resources/apikeys/googlecloud.json'
})
// include sonus settings
const hotwords = [{ file: sr_file, hotword: sr_hotword, sensitivity: sr_sensitivity }]
const language = 'en-US';
// start sonus interface
const sonus = Sonus.init({ hotwords, language, recordProgram: 'arecord' }, client)
module.exports = {
recognize: function() {
const sfx = require('./sfx.js');
sonus.on('hotword', (index, keyword) => {
sfx.output('hotword');
response.conlog('hearing', 'hotword <'+keyword+'> detected', 'data');
})
sonus.on('partial-result', result => {
response.conlog('hearing', 'Partial ('+result+')', 'data');
})
sonus.on('error', error => {
response.conlog('hearing', error, 'error');
})
sonus.on('final-result', result => {
if (result) {
response.conlog('hearing', 'recognized: '+result, 'info');
result = speechparse(result);
sfx.output('command');
const prompt = require('./prompt.js');
if (hearingprocess) {
prompt.write(result+'\n');
} else {
prompt.write(result);
}
}
})
},
pause: function() {
Sonus.pause(sonus);
},
resume: async function () {
Sonus.resume(sonus);
},
cmdexecute: async function() {
if (hearingprocess == false) {
response.conlog('hearing', 'voice command execution activated', 'info');
hearingprocess = true;
var result = 'I am now able to execute voice commands';
return result;
} else {
response.conlog('hearing', 'voice command execution was already activated', 'info');
var result = 'I am already able to execute voice commands';
return result;
}
},
cmdtoprompt: async function() {
if (hearingprocess == true) {
response.conlog('hearing', 'voice command execution deactivated', 'info');
hearingprocess = false;
var result = 'I am no longer able to execute voice commands';
return result;
} else {
response.conlog('hearing', 'voice command execution was already deactivated', 'info');
var result = 'I am already unable to execute voice commands';
return result;
}
},
listen: async function() {
if (hearingactive == false) {
response.conlog('hearing', 'voice recognition activated', 'info');
hearingactive = true;
await Sonus.start(sonus)
module.exports.recognize();
var result = 'I am now able to hear you';
return result;
} else {
response.conlog('hearing', 'voice recognition was already activated', 'info');
var result = 'I am already able to hear you';
return result;
}
},
stop: async function () {
if (hearingactive == true) {
hearingactive = false;
response.conlog('hearing', 'voice recognition deactivated', 'info');
await Sonus.stop();
var result = 'I am no longer able to hear you';
return result;
} else {
response.conlog('hearing', 'voice recognition was already deactivated', 'info');
var result = 'I am already unable to hear you';
return result;
}
},
speechparse: function(text) {
text = text.replace(/stick.nl/gi, 'ZTiK.nl');
text = text.replace(/sticknl/gi, 'ZTiKnl');
text = text.replace(/stick/gi, 'ZTiK');
return text;
},
status: function () {
return hearingactive;
}
}
function speechparse(text) {
text = text.replace(/sub\s?command\(?:open|start)/gi, '(');
text = text.replace(/sub\s?command\(?:close|end)/gi, ')');
return text;
}
function loadconfig() {
const fs = require('fs')
const path = './config.json'
try {
if (fs.existsSync(path)) {
var configfile = require('./config.json');
if (configfile['google cloud'] != null) {
api_id = configfile['google cloud']['projectid'];
api_file = configfile['google cloud']['file'];
} else {
api_id = 'sara-245106';
api_file = './resources/apikeys/googlecloud.json';
}
if (configfile['hotword']['word'] != null && configfile['hotword']['file'] != null && configfile['hotword']['sensitivity'] != null) {
sr_hotword = configfile['hotword']['word'];
sr_file = configfile['hotword']['file'];
sr_sensitivity = configfile['hotword']['sensitivity'];
} else {
sr_hotword = 'Sara';
sr_file = 'resources/speechrecognition/Sarah.pmdl';
sr_sensitivity = '0.6';
}
}
} catch(err) {
}
}