52190989765f722acaf0a6e3983458c0c533a356805af7322efe5f55d93efa9babbf919a0fac2a3dc41dd90cab8389456330c56b756552530039393649359f 7.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231
  1. "use strict";
  2. var __importDefault = (this && this.__importDefault) || function (mod) {
  3. return (mod && mod.__esModule) ? mod : { "default": mod };
  4. };
  5. Object.defineProperty(exports, "__esModule", { value: true });
  6. exports.replace = void 0;
  7. // tar -r
  8. const fs_minipass_1 = require("@isaacs/fs-minipass");
  9. const node_fs_1 = __importDefault(require("node:fs"));
  10. const node_path_1 = __importDefault(require("node:path"));
  11. const header_js_1 = require("./header.js");
  12. const list_js_1 = require("./list.js");
  13. const make_command_js_1 = require("./make-command.js");
  14. const options_js_1 = require("./options.js");
  15. const pack_js_1 = require("./pack.js");
  16. // starting at the head of the file, read a Header
  17. // If the checksum is invalid, that's our position to start writing
  18. // If it is, jump forward by the specified size (round up to 512)
  19. // and try again.
  20. // Write the new Pack stream starting there.
  21. const replaceSync = (opt, files) => {
  22. const p = new pack_js_1.PackSync(opt);
  23. let threw = true;
  24. let fd;
  25. let position;
  26. try {
  27. try {
  28. fd = node_fs_1.default.openSync(opt.file, 'r+');
  29. }
  30. catch (er) {
  31. if (er?.code === 'ENOENT') {
  32. fd = node_fs_1.default.openSync(opt.file, 'w+');
  33. }
  34. else {
  35. throw er;
  36. }
  37. }
  38. const st = node_fs_1.default.fstatSync(fd);
  39. const headBuf = Buffer.alloc(512);
  40. POSITION: for (position = 0; position < st.size; position += 512) {
  41. for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
  42. bytes = node_fs_1.default.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos);
  43. if (position === 0 &&
  44. headBuf[0] === 0x1f &&
  45. headBuf[1] === 0x8b) {
  46. throw new Error('cannot append to compressed archives');
  47. }
  48. if (!bytes) {
  49. break POSITION;
  50. }
  51. }
  52. const h = new header_js_1.Header(headBuf);
  53. if (!h.cksumValid) {
  54. break;
  55. }
  56. const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512);
  57. if (position + entryBlockSize + 512 > st.size) {
  58. break;
  59. }
  60. // the 512 for the header we just parsed will be added as well
  61. // also jump ahead all the blocks for the body
  62. position += entryBlockSize;
  63. if (opt.mtimeCache && h.mtime) {
  64. opt.mtimeCache.set(String(h.path), h.mtime);
  65. }
  66. }
  67. threw = false;
  68. streamSync(opt, p, position, fd, files);
  69. }
  70. finally {
  71. if (threw) {
  72. try {
  73. node_fs_1.default.closeSync(fd);
  74. }
  75. catch (er) { }
  76. }
  77. }
  78. };
  79. const streamSync = (opt, p, position, fd, files) => {
  80. const stream = new fs_minipass_1.WriteStreamSync(opt.file, {
  81. fd: fd,
  82. start: position,
  83. });
  84. p.pipe(stream);
  85. addFilesSync(p, files);
  86. };
  87. const replaceAsync = (opt, files) => {
  88. files = Array.from(files);
  89. const p = new pack_js_1.Pack(opt);
  90. const getPos = (fd, size, cb_) => {
  91. const cb = (er, pos) => {
  92. if (er) {
  93. node_fs_1.default.close(fd, _ => cb_(er));
  94. }
  95. else {
  96. cb_(null, pos);
  97. }
  98. };
  99. let position = 0;
  100. if (size === 0) {
  101. return cb(null, 0);
  102. }
  103. let bufPos = 0;
  104. const headBuf = Buffer.alloc(512);
  105. const onread = (er, bytes) => {
  106. if (er || typeof bytes === 'undefined') {
  107. return cb(er);
  108. }
  109. bufPos += bytes;
  110. if (bufPos < 512 && bytes) {
  111. return node_fs_1.default.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread);
  112. }
  113. if (position === 0 &&
  114. headBuf[0] === 0x1f &&
  115. headBuf[1] === 0x8b) {
  116. return cb(new Error('cannot append to compressed archives'));
  117. }
  118. // truncated header
  119. if (bufPos < 512) {
  120. return cb(null, position);
  121. }
  122. const h = new header_js_1.Header(headBuf);
  123. if (!h.cksumValid) {
  124. return cb(null, position);
  125. }
  126. /* c8 ignore next */
  127. const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512);
  128. if (position + entryBlockSize + 512 > size) {
  129. return cb(null, position);
  130. }
  131. position += entryBlockSize + 512;
  132. if (position >= size) {
  133. return cb(null, position);
  134. }
  135. if (opt.mtimeCache && h.mtime) {
  136. opt.mtimeCache.set(String(h.path), h.mtime);
  137. }
  138. bufPos = 0;
  139. node_fs_1.default.read(fd, headBuf, 0, 512, position, onread);
  140. };
  141. node_fs_1.default.read(fd, headBuf, 0, 512, position, onread);
  142. };
  143. const promise = new Promise((resolve, reject) => {
  144. p.on('error', reject);
  145. let flag = 'r+';
  146. const onopen = (er, fd) => {
  147. if (er && er.code === 'ENOENT' && flag === 'r+') {
  148. flag = 'w+';
  149. return node_fs_1.default.open(opt.file, flag, onopen);
  150. }
  151. if (er || !fd) {
  152. return reject(er);
  153. }
  154. node_fs_1.default.fstat(fd, (er, st) => {
  155. if (er) {
  156. return node_fs_1.default.close(fd, () => reject(er));
  157. }
  158. getPos(fd, st.size, (er, position) => {
  159. if (er) {
  160. return reject(er);
  161. }
  162. const stream = new fs_minipass_1.WriteStream(opt.file, {
  163. fd: fd,
  164. start: position,
  165. });
  166. p.pipe(stream);
  167. stream.on('error', reject);
  168. stream.on('close', resolve);
  169. addFilesAsync(p, files);
  170. });
  171. });
  172. };
  173. node_fs_1.default.open(opt.file, flag, onopen);
  174. });
  175. return promise;
  176. };
  177. const addFilesSync = (p, files) => {
  178. files.forEach(file => {
  179. if (file.charAt(0) === '@') {
  180. (0, list_js_1.list)({
  181. file: node_path_1.default.resolve(p.cwd, file.slice(1)),
  182. sync: true,
  183. noResume: true,
  184. onReadEntry: entry => p.add(entry),
  185. });
  186. }
  187. else {
  188. p.add(file);
  189. }
  190. });
  191. p.end();
  192. };
  193. const addFilesAsync = async (p, files) => {
  194. for (let i = 0; i < files.length; i++) {
  195. const file = String(files[i]);
  196. if (file.charAt(0) === '@') {
  197. await (0, list_js_1.list)({
  198. file: node_path_1.default.resolve(String(p.cwd), file.slice(1)),
  199. noResume: true,
  200. onReadEntry: entry => p.add(entry),
  201. });
  202. }
  203. else {
  204. p.add(file);
  205. }
  206. }
  207. p.end();
  208. };
  209. exports.replace = (0, make_command_js_1.makeCommand)(replaceSync, replaceAsync,
  210. /* c8 ignore start */
  211. () => {
  212. throw new TypeError('file is required');
  213. }, () => {
  214. throw new TypeError('file is required');
  215. },
  216. /* c8 ignore stop */
  217. (opt, entries) => {
  218. if (!(0, options_js_1.isFile)(opt)) {
  219. throw new TypeError('file is required');
  220. }
  221. if (opt.gzip ||
  222. opt.brotli ||
  223. opt.file.endsWith('.br') ||
  224. opt.file.endsWith('.tbr')) {
  225. throw new TypeError('cannot append to compressed archives');
  226. }
  227. if (!entries?.length) {
  228. throw new TypeError('no paths specified to add/replace');
  229. }
  230. });
  231. //# sourceMappingURL=replace.js.map