-
Notifications
You must be signed in to change notification settings - Fork 5
/
index.js
170 lines (156 loc) · 5.06 KB
/
index.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
const core = require('@actions/core')
const path = require('path')
const os = require('os')
const fs = require('fs')
const archiver = require('archiver')
const { S3Client } = require('@aws-sdk/client-s3')
const { Upload } = require('@aws-sdk/lib-storage')
require('dotenv').config()
async function main () {
// Load data from environment variables
let cleanupFiles = []
try {
let {
SOURCE_PATH = null,
DEST_FILE = null,
BUCKET_NAME = null,
AWS_SECRET_ID = null,
AWS_SECRET_KEY = null,
AWS_REGION = 'eu-central-1',
S3_ENDPOINT = null,
STORAGE_CLASS = 'STANDARD',
ZIP_PATH = path.join(os.tmpdir(), 'tmp.zip'),
SOURCE_MODE = 'ZIP', // ZIP, FILE
METADATA_KEY = null,
METADATA_VALUE = null,
CONTENT_TYPE = null
} = process.env
// Validate required variables
let missingVars = '';
if (!SOURCE_PATH) missingVars += 'SOURCE_PATH '
if (!DEST_FILE) missingVars += 'DEST_FILE '
if (!BUCKET_NAME) missingVars += 'BUCKET_NAME '
if (!AWS_SECRET_ID) missingVars += 'AWS_SECRET_ID '
if (!AWS_SECRET_KEY) missingVars += 'AWS_SECRET_KEY '
if (!AWS_REGION) missingVars += 'AWS_REGION '
if (!ZIP_PATH) missingVars += 'ZIP_PATH '
if (!SOURCE_MODE) missingVars += 'SOURCE_MODE '
if (missingVars.length > 0) {
throw new Error(`The following variables are missing: ${missingVars}`)
}
// Validate source mode
const modes = {
ZIP: 'ZIP',
FILE: 'FILE'
}
if (!Object.values(modes).includes(SOURCE_MODE)) {
throw Error(`SOURCE_MODE "${SOURCE_MODE}" is not valid. See documentation remove the environment variable to use the default.`)
}
// Validate source mode and source path
const absSourceDir = path.resolve(SOURCE_PATH)
if (!fs.existsSync(absSourceDir)) {
throw Error(`SOURCE_PATH "${absSourceDir}" does not exist`)
}
const sourceStats = fs.lstatSync(SOURCE_PATH)
if (SOURCE_PATH === modes.ZIP && !sourceStats.isDirectory()) {
throw Error(`SOURCE_MODE is set to directory but SOURCE_PATH "${absSourceDir}" is not a directory`)
} else if (SOURCE_PATH === modes.FILE && !sourceStats.isFile()) {
throw Error(`SOURCE_MODE is set file but SOURCE_PATH "${absSourceDir}" is not a file`)
}
// Compress directory if needed
if (SOURCE_MODE === modes.ZIP) {
console.info(`Creating zip file of directory ${path.resolve(SOURCE_PATH)} at ${path.resolve(ZIP_PATH)}`)
try {
cleanupFiles.push(ZIP_PATH)
const archive = archiver('zip', { zlib: { level: 9 } })
const stream = fs.createWriteStream(ZIP_PATH)
await new Promise((resolve, reject) => {
archive
.directory(SOURCE_PATH, false)
.on('error', err => {
console.error('Error inside archive:', err)
reject(err)
})
.on('warning', warning => {
console.warn('Warning:', warning)
})
.on('entry', entry => {
console.info('Archiving:', entry.name)
})
.pipe(stream)
stream.on('close', () => resolve())
archive.finalize()
})
// Override content type
CONTENT_TYPE = 'application/zip'
} catch (err) {
console.error('An error occurred while creating the zip file')
throw err
}
}
// Init S3
console.info(`Initializing S3 upload to bucket "${BUCKET_NAME}"`);
const s3Config = {
apiVersion: '2006-03-01',
credentials: {
accessKeyId: AWS_SECRET_ID,
secretAccessKey: AWS_SECRET_KEY
},
region: AWS_REGION
}
if (S3_ENDPOINT) {
s3Config.endpoint = S3_ENDPOINT
}
const s3 = new S3Client(s3Config);
// Upload file
const fileToUpload = SOURCE_MODE === modes.ZIP ? ZIP_PATH : SOURCE_PATH;
let readStream
try {
readStream = fs.createReadStream(fileToUpload);
} catch (err) {
console.error(`Failed to read file "${fileToUpload}"`);
throw err
}
const req = {
Body: readStream,
Bucket: BUCKET_NAME,
Key: DEST_FILE,
StorageClass: STORAGE_CLASS
}
if (METADATA_KEY && METADATA_VALUE) {
req.Metadata = { METADATA_KEY: METADATA_VALUE }
}
if (CONTENT_TYPE) {
req.ContentType = CONTENT_TYPE
}
console.info(`Uploading zip to "${BUCKET_NAME}" as "${DEST_FILE}"`);
// Use the managed upload feature of the SDK to upload the stream
const upload = new Upload({
client: s3,
params: req
})
try {
await upload.done();
console.info(`Succesful upload to ${BUCKET_NAME}`);
} catch (err) {
console.info(`Failed upload to ${BUCKET_NAME}`);
throw Error(`S3 Upload error: ${err}`);
}
} catch (error) {
core.setFailed(error.message)
} finally {
try {
//cleanup temp files
cleanupFiles.forEach(file => {
if (fs.existsSync(file)) {
fs.unlinkSync(file)
}
})
} catch (err) {
console.error('An error occurred while cleaning up')
console.error(err)
}
}
}
// Run it!
main()