-
Notifications
You must be signed in to change notification settings - Fork 0
/
index.js
188 lines (169 loc) · 5.19 KB
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
// require modules
const fs = require('fs');
const { Client } = require('ssh2');
const archiver = require('archiver');
const { readFileSync } = require('fs');
const path = require('path');
// 执行node脚本的文件夹目录 这里需要为相应的项目的根目录
const cwdPath = process.cwd();
// 读取本工具的默认配置
const configPlainText = readFileSync(path.resolve(__dirname, '.config.json'));
const CONFIG = JSON.parse(configPlainText);
/**
* 读取项目的发布配置文件
*
* @return {*}
*/
function readConfigFile() {
return require(path.resolve(cwdPath, CONFIG.CONFIG_FILE_NAME));
}
/**
* 上传文件
*
* @param {*} conn
* @param {*} localPath
* @param {*} remotePath
* @param {*} callback
*/
function uploadFile(conn, localPath, remotePath, callback){
conn.sftp((err, sftp) => {
if (err) throw err;
sftp.fastPut(localPath, remotePath, function(err, result) {
if (err) throw err;
callback(result)
});
})
}
/**
* 文件压缩
*
* @param {*} localPath
*/
async function compress(localPath) {
const folderName = path.basename(localPath);
// create a file to stream archive data to.
const outDir = path.resolve(localPath, '../', `${folderName}.zip`);
const output = fs.createWriteStream(outDir);
const archive = archiver('zip', {
zlib: { level: 9 } // Sets the compression level.
});
// listen for all archive data to be written
// 'close' event is fired only when a file descriptor is involved
output.on('close', function() {
console.log(archive.pointer() + ' total bytes');
console.log('archiver has been finalized and the output file descriptor has closed.');
});
// This event is fired when the data source is drained no matter what was the data source.
// It is not part of this library but rather from the NodeJS Stream API.
// @see: https://nodejs.org/api/stream.html#stream_event_end
output.on('end', function() {
console.log('Data has been drained');
});
// good practice to catch warnings (ie stat failures and other non-blocking errors)
archive.on('warning', function(err) {
if (err.code === 'ENOENT') {
// log warning
} else {
// throw error
throw err;
}
});
// good practice to catch this error explicitly
archive.on('error', function(err) {
throw err;
});
// pipe archive data to the file
archive.pipe(output);
await compressDir(archive, localPath);
await archive.finalize();
return outDir;
}
/**
* 压缩文件、文件夹
*
* @param {*} archive
* @param {*} dir
*/
function compressDir(archive, dir) {
return new Promise((resolve, reject) => {
fs.stat(dir, (err,data)=>{
if (err){
reject(err);
return console.log(err);
}
const filename = path.basename(dir);
// 如果是文件夹 则压缩该文件夹
if (data.isDirectory()) {
archive.directory(dir, filename);
} else {
// 否则压缩该文件
archive.append(fs.createReadStream(dir), { name: filename });
}
resolve(true);
})
})
}
/**
* 读取node命令的指定参数
*
* @param {*} target
* @return {*}
*/
function resolveArgs(...target) {
const args = process.argv.slice(2)
const res = {};
target.forEach((field) => {
args.forEach((arg, index) => {
const keyVals = arg.split('=');
if (keyVals?.[0] == field) {
res[field.split('--')[1]] = keyVals[1];
args.splice(index, 1);
}
});
})
return res;
}
function deploy() {
console.log("🚀🚀🚀 start deploying...");
// 读取node命令的参数 来获取链接远程Linux服务器的用户名和密码
const args = resolveArgs("--username", "--password");
// 读取用户的默认配置文件 .deploy.config.json
const config = readConfigFile();
const conn = new Client();
conn.on('ready', () => {
console.log('Client :: ready');
const localPath = path.resolve(cwdPath, config.localPath);
const folderName = path.basename(localPath);
compress(localPath).then((outDir) => {
const zipFileName = path.basename(outDir);
let p = config.remotePath;
p = p.replace(/\/+$/, '');
const remotePath = p + '/' + zipFileName;
console.log(outDir, remotePath);
uploadFile(conn, outDir, remotePath, (res) => {
console.log('上传成功!', res === undefined ? '' : res);
console.log('开始解压文件...');
// 执行shell脚本命令
conn.shell((err, stream) => {
if (err) throw err;
stream.on('close', () => {
console.log('Stream :: close');
console.log('文件解压完毕!\n部署完成!🐢🐢🐢');
conn.end();
});
stream.pipe(process.stdout)
// TODO: mv ${folderName} ${folderName + '-' + Date.now()}
// 进入指定文件夹解压缩文件到当前文件夹 并删除压缩包
// 执行自定义脚本
const scripsts = config.shellScripts === undefined ? '' : config.shellScripts;
stream.end(`cd ${p}\nrm -rf ${folderName}\njar xvf ${zipFileName}&&rm -rf ${zipFileName}\n${scripsts}\nexit\n`);
});
});
})
}).connect({
readyTimeout: 20000,
...config,
...args
});
}
module.exports = deploy;