-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlib.coffee
219 lines (195 loc) · 9.07 KB
/
lib.coffee
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
fs = require 'fs'
path = require 'path'
crypto = require 'crypto'
mustache = require 'mustache'
child_process = require 'child_process'
database = require './database'
readDir = (dir_path) ->
files = fs.readdirSync dir_path
result = []
for file_name in files
continue if file_name == '.' or file_name == ',,'
full_path = path.join dir_path, file_name
if fs.lstatSync(full_path).isDirectory()
child_results = readDir full_path
if child_results.length == 0
result.push path.join file_name # Empty Dir
else
result = result.concat child_results.map (child_file) -> path.join file_name, child_file
else
result.push file_name # File
result
calculateSHA256 = (file) ->
new Promise (resolve, reject) ->
resolve "" if fs.lstatSync(file).isDirectory()
hash = crypto.createHash 'sha256'
input = fs.ReadStream file
input.on 'data', (d) -> hash.update d
input.on 'error', (e) -> reject e
input.on 'end', -> resolve hash.digest 'hex'
pack = (from, to, from_dir, to_dir) ->
full_to_path = path.join to_dir, to
use_file_list = false
if Array.isArray from
use_file_list = true
console.log "packager is writing #{from.length} files to temp_file_operation.file_list."
fs.writeFileSync "temp_file_operation.file_list", from.join "\n"
if use_file_list
command = "tar -zcf \"#{full_to_path}\" -C #{from_dir} -T temp_file_operation.file_list"
else
command = "tar -zcf \"#{full_to_path}\" -C #{from_dir} \"#{from}\""
#console.log command
await new Promise (resolve, reject) ->
child_process.exec command, (err, stdout) ->
if err then reject err else resolve ""
sha = await calculateSHA256 full_to_path
await new Promise (resolve, reject) -> fs.rename full_to_path, path.join(to_dir, sha + '.tar.gz'), -> resolve()
console.log "packing " + from_dir + " [" + from + "]" + " to " + to_dir + " [" + to + "]" + sha if process.env.NODE_ENV == "DEBUG"
sha
generateFullArchive = (source_path, target_path) ->
files: [source_path],
type: 'full'
checksum: await pack '.', 'full.tar.gz', source_path, target_path
generateSeparateArchive = (b_name, source_path, files, target_path) ->
# Load cache message: Files and Archives. Load from database, and hash them.
releases = await database.loadRelease b_name
if releases and releases[0]
release_name = releases[0].name
latest_archives = await database.loadArchives release_name
latest_archive_hash = {}
for archive from latest_archives.values()
latest_archive_hash[archive.files[0]] = archive if archive.type == 'sand'
latest_files = await database.loadFiles release_name
latest_file_hash = {}
latest_files.forEach (file) -> latest_file_hash[file.path] = file
answers = []
for file_checksum in files
file = file_checksum.name
# Using cache condition: file exist on latest release AND has the same FILE checksum
# if fits, the returning checksum is the ARCHIVE checksum
if latest_file_hash and latest_file_hash[file] and latest_file_hash[file].checksum == file_checksum.checksum and latest_archive_hash[file]
answers.push
files: [file]
type: 'sand'
checksum: latest_archive_hash[file].checksum
size: latest_archive_hash[file].size
console.log "loaded cached #{file} from release #{release_name}"
else
answer =
files: [file]
type: 'sand'
answer.checksum = await pack file, file.replace(/\//g, '_'), source_path, target_path
answers.push answer
console.log "Finish generate separate archives step."
return answers
generateStrategyArchive = (b_name, release_name, new_release_files, source_path, target_path) ->
releases = await database.loadRelease b_name
release_names = releases.slice(0, 5).map (release) -> release.name
promises = release_names.map (release_name) -> database.loadFiles release_name
old_release_files_array = await Promise.all(promises)
strategy_archives = []
for old_release_files in old_release_files_array
strategy_archive = await generateStrategyArchiveBetweenReleases("#{release_name}And#{if old_release_files[0] then old_release_files[0].release else 'emptyRelease'}", old_release_files, new_release_files, source_path, target_path)
strategy_archives.push strategy_archive
return strategy_archives
generateStrategyArchiveBetweenReleases = (pack_name, old_release, new_release, source_path, target_path) ->
changed_files = compareRelease old_release, new_release
changed_file_names = changed_files.map (file) -> file.name
strategy_archive = { files: changed_file_names, type: 'strategy' }
strategy_archive.checksum = await pack changed_file_names, pack_name, source_path, target_path
strategy_archive
compareRelease = (old_release, new_release) ->
old_release_hash = generateReleaseHash old_release
new_release.filter (file) -> old_release_hash.get(file.name) != file.checksum
generateReleaseHash = (release) ->
release_hash = new Map
release_hash.set file.path, file.checksum for file in release
release_hash
writeSHA256file = (file_checksums, target_path) ->
contents = file_checksums.map (file_checksum) -> "#{file_checksum.checksum} #{file_checksum.name}"
fs.writeFileSync target_path, contents.join("\n")
writeMetalinkFile = (full_archive, target_path) ->
template = fs.readFileSync(path.join(__dirname, 'template.meta4')).toString()
metalink = mustache.render template, { packages: [{checksum: full_archive.checksum, size: full_archive.size}] }
fs.writeFileSync target_path, metalink
# For each RELEASE, execute generate:
# 0、If release already exists, nothing happen.
# Then Release itself.
# 1、ARCHIVES
# Full ARCHIVE
# Separate ARCHIVE
# Strategy ARCHIVE according to VERSION ID
# 2、All ARCHIVE Index to FILE
# 3、Save the RELEASE itself to DATABASE.
execute = (b_name, release_name, release_source_path, release_target_path, running_data) ->
exist = await database.checkRelease release_name
if exist
console.log "#{release_name} #{b_name} already exists, pass the pack."
return 1
console.log "Executing " + b_name + "/" + release_name + " from " + release_source_path + " to " + release_target_path
release_archive_path = path.join release_target_path, 'archives'
release_checksum_path = path.join release_target_path, 'checksums'
release_metalink_path = path.join release_target_path, 'metalinks'
try fs.mkdirSync release_target_path
try fs.mkdirSync release_archive_path
try fs.mkdirSync release_checksum_path
try fs.mkdirSync release_metalink_path
files = readDir release_source_path
# No.3 FILE checksum.
console.log "Checking " + files.length + " Files"
file_checksum = []
for file in files
checksum = { name: file }
checksum.checksum = await calculateSHA256 path.join release_source_path, file
file_checksum.push checksum
console.log "Saving Files to database."
running_data.child_progress = 1 if running_data
writeSHA256file file_checksum, path.join(release_checksum_path, "ygopro-" + b_name)
await database.saveFiles release_name, file_checksum
console.log "Files inventory Step finished."
# No.1 ARCHIVES
archive_indices = []
console.log "Generating full archive."
running_data.child_progress = 11 if running_data
full_archive = await generateFullArchive release_source_path, release_archive_path
archive_indices.push full_archive
console.log "Generating separate archives."
running_data.child_progress = 12 if running_data
archive_indices = archive_indices.concat await generateSeparateArchive b_name, release_source_path, file_checksum, release_archive_path
console.log "Generating strategy archives."
running_data.child_progress = 13 if running_data
result = await generateStrategyArchive(b_name, release_name, file_checksum, release_source_path, release_archive_path)
archive_indices = archive_indices.concat result
# Calculate File Size.
console.log "Calculating file size."
running_data.child_progress = 14 if running_data
for archive_index in archive_indices
unless archive_index.size
try
state = fs.lstatSync path.join release_archive_path, archive_index.checksum + '.tar.gz'
if state.isDirectory()
archive_index.size = 0
else
archive_index.size = state.size
catch ex
console.log "No such file: #{release_archive_path}/#{archive_index.checksum}, #{ex}"
console.log "Generate archive Step finished."
# No.2 ARCHIVE Index
console.log "Saving Archive files."
running_data.child_progress = 20 if running_data
await database.saveArchives release_name, archive_indices
# fs.writeFileSync path.join(release_target_path, 'archive indices.json'), JSON.stringify(archive_indices, null, 1)
# No.0 RELEASE itself.
console.log "Saving Release."
running_data.child_progress = 21
await database.saveRelease b_name, release_name
# No.4 Full ARCHIVE meta4
console.log "Saving Metalink."
running_data.child_progress = 22
writeMetalinkFile full_archive, path.join(release_metalink_path, "ygopro-" + b_name + ".meta4")
# name:
# size:
# hash:
console.log "Finish executing " + b_name + "/" + release_name
return 0
module.exports.execute = execute