node.js stream output of command on remote windows server - javascript

I am using node.js (v10.15.3) to execute command on remote windows 8 server and want to get output of command in console. I am using below code,
var Promise = require('promise');
var exec = require('child_process').exec;
function promiseFromChildProcess(command) {
return new Promise(function (resolve, reject) {
command.addListener("error", reject);
command.addListener("exit", resolve);
});
}
var command = exec('PsExec64 \\\\XXX.XXX.XX.XX -u user -p PASSWORD cmd /c typeperf "\\Processor(_Total)\\% Processor Time" -sc 10');
promiseFromChildProcess(command).then(function (result) {
console.log('promise complete: ' + result);
}, function (err) {
console.log('promise rejected: ' + err);
});
command.stdout.on('data', function (data) {
console.log('stdout: ' + data);
});
command.stderr.on('data', function (data) {
console.log('stderr: ' + data);
});
command.on('close', function (code) {
console.log('closing code: ' + code);
});
When i execute this, it executes successfully and exists with no error but nothing is displayed in stdout:. Please help to resolve this as I am struggling for days to resolve this issue.
Many thanks in advance.

Related

Why does javascript ssh2 get stuck in infinite loop when using shell

I am trying to get my application to execute a sudo command on another system by utilizing the ssh2 npm package. I have the end result working, but for some reason the code is getting stuck in an infinite loop. I don't understand how this is possible as I thought that closing the connection with conn.end would prevent this from happening. Can anyone help shed some light on why I am getting stuck in an infinite loop here?
Primary Code being executed:
function sshRunSudoCommand(user, pass, lanIP, command) {
return new Promise((resolve, reject) => {
var Connection = require('ssh2');
var pwSent = false;
var sudosu = false;
var password = pass;
var conn = new Connection();
conn.on('ready', function() {
console.log('Sudo Connection :: ready');
conn.shell( function(err, stream) {
if (err) throw err;
stream
.on('close', function() {
console.log('Sudo Stream :: close');
conn.end();
resolve();
})
.on('data', function(data) {
//handle sudo password prompt
stream.write(password + '\n');
stream.write(command + '\n');
console.log('Sudo STDOUT: ' + data);
resolve(data);
})
.on('exit', function(code, signal) {
let exitCode = 'Stream :: exit :: code: ' + code + ', signal: ' + signal;
conn.end();
resolve(exitCode);
})
.on('end', function() {
conn.end();
resolve('end');
})
.stderr.on('data', function(data) {
console.log('STDERR: ' + data);
conn.end();
resolve(data);
});
});
})
.on('keyboard-interactive',
function(name, instructions, instructionsLang, prompts, finish) {
// Pass answers to `prompts` to `finish()`. Typically `prompts.length === 1`
// with `prompts[0] === "Password: "`
console.log('pass before finish is ', pass);
finish([pass]);
}).on('ready', function() {
}).connect({
host: lanIP,
port: 22,
username: user,
password: pass,
readyTimeout: 99999,
tryKeyboard: true // this attempts keyboard-interactive auth
});
};
}
Result: (Repeated over and over forever)
Sudo STDOUT: activedash:~ activeadmin$ s
Sudo STDOUT: udo
Sudo STDOUT: profiles -I -
Sudo STDOUT: F /Users/S
Sudo STDOUT: hared/Sym
Sudo STDOUT: ply/activ
Sudo STDOUT: e-uuid.
onfigpr
Sudo STDOUT: ofile
Sudo STDOUT: active
sudo profiles -I -F /Users/Shared/Symply/active-uuid.configprofile
active
sudo profiles -I -F /Users/Shared/Symply/active-uuid.configprofile
active
sudo profiles -I -F /Users/Shared/Symply/active-uuid.configprofile
active
sudo profiles -I -F /Users/Shared/Symply/active-uuid.configprofile
active
sudo profiles
So what I found was there was 2 things I needed to do to prevent the repeating action.
I needed to add and 'exit' command to the stream.write to tell the shell to end the session I had created with conn.shell
Then I could move my resolve() callback down to the .on('exit) section of code to only come back after I had run the exit command successfully.
The final code looks like this:
function sshRunSudoCommand(user, pass, lanIP, command) {
return new Promise((resolve, reject) => {
var Connection = require('ssh2');
var password = pass;
var conn = new Connection();
conn.on('ready', function() {
console.log('Sudo Connection :: ready');
conn.shell( function(err, stream) {
if (err) throw err;
stream
.on('close', function() {
console.log('Sudo Stream :: close');
conn.end();
resolve();
})
.on('data', function(data) {
//handle sudo password prompt
stream.write(password + '\n');
stream.write(command + '\n');
stream.write('exit\n');
})
.on('exit', function(code, signal) {
let exitCode = 'Stream :: exit :: code: ' + code + ', signal: ' + signal;
conn.end();
resolve(exitCode);
})
.on('end', function() {
conn.end();
resolve('end');
})
.stderr.on('data', function(data) {
console.log('STDERR: ' + data);
conn.end();
resolve(data);
});
});
})
.on('keyboard-interactive',
function(name, instructions, instructionsLang, prompts, finish) {
// Pass answers to `prompts` to `finish()`. Typically `prompts.length === 1`
// with `prompts[0] === "Password: "`
console.log('pass before finish is ', pass);
finish([pass]);
}).on('ready', function() {
}).connect({
host: lanIP,
port: 22,
username: user,
password: pass,
readyTimeout: 99999,
tryKeyboard: true // this attempts keyboard-interactive auth
});
};
}

How to get specific start & end time in ffmpeg by Node JS?

I want to cut a video in specific start & end time & save it. I cant understand how to cut specific time of that video by Node JS.
Video copy code :
return new Promise(function(resolve ,reject){
var ffmpeg = require('fluent-ffmpeg');
ffmpeg(fs.createReadStream(path.join(__dirname,'..','..','/video.mp4')))
.seekInput('01:00')
.duration('02:00')
.outputOptions('-strict experimental')
.on('start', function(commandLine) {
console.log('Spawned Ffmpeg with command: ' + commandLine);
}).on('end', function(err) {
if (!err) {
console.log('conversion Done');
//res.send("conversion Done");
resolve();
}
}).on('error', function(err) {
console.log('error: ', +err);
reject(err);
}).save(path.join(__dirname,'..','..','/test.mp4'));
});
Please check following steps and sample code,
Install  ffmpeg installed on your system steps
Install module fluent-ffmpeg in your project
Please check following sample code
function clipVideo() {
var params = {
input: 'input_file',
start: 0,
duration: 10,
output: 'output_file'
};
return new Promise(function(resolve, reject) {
ffmpeg(params.input)
.setStartTime(params.start)
.setDuration(params.duration)
.save(params.output)
.on('start', function(commandLine) {
console.log('start : ' + commandLine);
})
.on('progress', function(progress) {
console.log('In Progress !!' + Date());
})
.on('end', function() {
console.log("downlaod resolved");
return resolve(params.clippedFile);
})
.on('error', function(err) {
console.log("reject");
return reject(err);
})
});
}
Hope this will help you !!

i want to send a CTRL + C command to client how can i acheive this in NodeJS?

My Code Looks Like this
var Client = require('ssh2').Client;
var fs=require('fs');
var conn = new Client();
conn.on('ready', function() {
console.log('Client :: ready');
conn.shell('xtail', function(err, stream) {
stream.write('xtail\n\r');
if (err) throw err;
stream.on('close', function(code, signal) {
console.log('Stream :: close :: code: ' + code + ', signal: ' + signal);
conn.end();
}).on('data', function(data) {
// stream.stdin.write('?');
console.log('hey');
console.log('STDOUT: ' + data);
fs.appendFile('D:\\Breme\\As1.txt',data,function(err){
if(err)
{
console.log(err);
}
//console.log('file saving..')
});
// setTimeout(function(){
// process.exit(1);
// }, 20000);
}).stderr.on('data', function(data) {
console.log('STDERR: ' + data);
conn.destroy();
});
});
}).connect({
host: '10.214.14.15',
port: 22,
username: 'bwadn',
password: 'bwain'
});
after connecting to server I am downloading the file but I want to send 'Ctrl+C' command from client, so that it stops receiving data
Can anyone please help me in solving this, As i am new to NodeJS I need your support in solving this...thanks

Returning a bluebird promise to electrons renderer process

Im using Electron's remote function to call a node module which returns a bluebird promise. However, I can't figure out how to get the promise returned back to the renderer process thats calling the module.
Using this code the value of return_data1 is always empty.
exports.call_python = function () {
data1 = []
var Promise = require('bluebird');
var exec = require('child_process').execFile;
function promiseFromChildProcess(child) {
return new Promise(function (resolve, reject) {
child.addListener("error", reject);
child.addListener("exit", resolve);
});
}
var child = exec('ls');
promiseFromChildProcess(child).then(function (result) {
console.log('promise complete: ' + result);
}, function (err) {
console.log('promise rejected: ' + err);
});
child.stdout.on('data', function (data) {
console.log('stdout: ' + data);
data.push={data: data}
});
child.stderr.on('data', function (data) {
console.log('stderr: ' + data);
});
child.on('close', function (code) {
console.log('closing code: ' + code);
});
return data1
};
Once you jump into Promises it's pretty much promises all the way down. You can unwrap a Promise to get back to callback semantics but unless you are writing a service for pre-Promise API why would you?
In this case don't return data1; return the resolved promise. e.g.:
return promiseFromChildProcess(child).then(function (result) {
console.log('promise complete: ' + result);
return result;
}, function (err) {
console.log('promise rejected: ' + err);
return '';
});
or if the client should deal with the error just:
return promiseFromChildProcess(child);
It sounds like maybe you are unsure of how to use child_process. The really simple way is to just grab stdout after the child process exits:
var b = require('bluebird');
var cp = require('child_process');
new b(function(resolve, reject){
cp.exec('dir', function(err, stdout, stderr){
resolve(stdout);
});
}).then(function(data){
console.log("dir is "+data);
});
so do:
exports.call_python = function () {
var b = require('bluebird');
var cp = require('child_process');
return new b(function(resolve, reject){
cp.exec('ls', function(err, stdout, stderr){
resolve(stdout);
});
});
};

How to promisify Node's child_process.exec and child_process.execFile functions with Bluebird?

I'm using the Bluebird promise library under Node.js, it's great! But I have a question:
If you take a look at the documentation of Node's child_process.exec and child_process.execFile you can see that both of these functions are returning a ChildProcess object.
So what's the recommended way to promisify such functions?
Note that the following works (I get a Promise object):
var Promise = require('bluebird');
var execAsync = Promise.promisify(require('child_process').exec);
var execFileAsync = Promise.promisify(require('child_process').execFile);
But how can one get access to the original return value of the original Node.js functions? (In these cases I would need to be able to access the originally returned ChildProcess objects.)
Any suggestion would be appreciated!
EDIT:
Here is an example code which is using the return value of the child_process.exec function:
var exec = require('child_process').exec;
var child = exec('node ./commands/server.js');
child.stdout.on('data', function(data) {
console.log('stdout: ' + data);
});
child.stderr.on('data', function(data) {
console.log('stderr: ' + data);
});
child.on('close', function(code) {
console.log('closing code: ' + code);
});
But if I would use the promisified version of the exec function ( execAsync from above ) then the return value will be a promise, not a ChildProcess object. This is the real problem I am talking about.
It sounds like you'd like to return two things from the call:
the ChildProcess
a promise that resolves when the ChildProcess completes
So "the recommended way to promisify such functions"? Don't.
You're outside the convention. Promise returning functions are expected to return a promise, and that's it. You could return an object with two members (the ChildProcess & the promise), but that'll just confuse people.
I'd suggest calling the unpromisified function, and creating a promise based off the returned childProcess. (Maybe wrap that into a helper function)
This way, it's quite explicit for the next person who reads the code.
Something like:
var Promise = require('bluebird');
var exec = require('child_process').execFile;
function promiseFromChildProcess(child) {
return new Promise(function (resolve, reject) {
child.addListener("error", reject);
child.addListener("exit", resolve);
});
}
var child = exec('ls');
promiseFromChildProcess(child).then(function (result) {
console.log('promise complete: ' + result);
}, function (err) {
console.log('promise rejected: ' + err);
});
child.stdout.on('data', function (data) {
console.log('stdout: ' + data);
});
child.stderr.on('data', function (data) {
console.log('stderr: ' + data);
});
child.on('close', function (code) {
console.log('closing code: ' + code);
});
Here's another way:
function execPromise(command) {
return new Promise(function(resolve, reject) {
exec(command, (error, stdout, stderr) => {
if (error) {
reject(error);
return;
}
resolve(stdout.trim());
});
});
}
execPromise(command).then(function(result) {
console.log(result);
}).catch(function(error) {
console.error(e.message);
});
Or with async/await:
try {
var result = await execPromise(command);
} catch (e) {
console.error(e.message);
}
There's probably not a way to do nicely that covers all use cases. But for limited cases, you can do something like this:
/**
* Promisified child_process.exec
*
* #param cmd
* #param opts See child_process.exec node docs
* #param {stream.Writable} opts.stdout If defined, child process stdout will be piped to it.
* #param {stream.Writable} opts.stderr If defined, child process stderr will be piped to it.
*
* #returns {Promise<{ stdout: string, stderr: stderr }>}
*/
function execp(cmd, opts) {
opts || (opts = {});
return new Promise((resolve, reject) => {
const child = exec(cmd, opts,
(err, stdout, stderr) => err ? reject(err) : resolve({
stdout: stdout,
stderr: stderr
}));
if (opts.stdout) {
child.stdout.pipe(opts.stdout);
}
if (opts.stderr) {
child.stderr.pipe(opts.stderr);
}
});
}
This accepts opts.stdout and opts.stderr arguments, so that stdio can be captured from the child process.
For example:
execp('ls ./', {
stdout: new stream.Writable({
write: (chunk, enc, next) => {
console.log(chunk.toString(enc));
next();
}
}),
stderr: new stream.Writable({
write: (chunk, enc, next) => {
console.error(chunk.toString(enc));
next();
}
})
}).then(() => console.log('done!'));
Or simply:
execp('ls ./', {
stdout: process.stdout,
stderr: process.stderr
}).then(() => console.log('done!'));
Just want to mention that there's a nice tool that will solve your problem completely:
https://www.npmjs.com/package/core-worker
This package makes it a lot easier to handle processes.
import { process } from "CoreWorker";
import fs from "fs";
const result = await process("node Server.js", "Server is ready.").ready(1000);
const result = await process("cp path/to/file /newLocation/newFile").death();
or combine these functions:
import { process } from "core-worker";
const simpleChat = process("node chat.js", "Chat ready");
setTimeout(() => simpleChat.kill(), 360000); // wait an hour and close the chat
simpleChat.ready(500)
.then(console.log.bind(console, "You are now able to send messages."))
.then(::simpleChat.death)
.then(console.log.bind(console, "Chat closed"))
.catch(() => /* handle err */);

Resources