9af7511e871639b9b780052a73c0b621bda131a80810894bc9cbf8ac470d35e4c317497e2d268a783513ece5e605406b36d8331942c4b73a8cbe7c6fb29795 7.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225
  1. // tar -r
  2. import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass';
  3. import fs from 'node:fs';
  4. import path from 'node:path';
  5. import { Header } from './header.js';
  6. import { list } from './list.js';
  7. import { makeCommand } from './make-command.js';
  8. import { isFile, } from './options.js';
  9. import { Pack, PackSync } from './pack.js';
  10. // starting at the head of the file, read a Header
  11. // If the checksum is invalid, that's our position to start writing
  12. // If it is, jump forward by the specified size (round up to 512)
  13. // and try again.
  14. // Write the new Pack stream starting there.
  15. const replaceSync = (opt, files) => {
  16. const p = new PackSync(opt);
  17. let threw = true;
  18. let fd;
  19. let position;
  20. try {
  21. try {
  22. fd = fs.openSync(opt.file, 'r+');
  23. }
  24. catch (er) {
  25. if (er?.code === 'ENOENT') {
  26. fd = fs.openSync(opt.file, 'w+');
  27. }
  28. else {
  29. throw er;
  30. }
  31. }
  32. const st = fs.fstatSync(fd);
  33. const headBuf = Buffer.alloc(512);
  34. POSITION: for (position = 0; position < st.size; position += 512) {
  35. for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
  36. bytes = fs.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos);
  37. if (position === 0 &&
  38. headBuf[0] === 0x1f &&
  39. headBuf[1] === 0x8b) {
  40. throw new Error('cannot append to compressed archives');
  41. }
  42. if (!bytes) {
  43. break POSITION;
  44. }
  45. }
  46. const h = new Header(headBuf);
  47. if (!h.cksumValid) {
  48. break;
  49. }
  50. const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512);
  51. if (position + entryBlockSize + 512 > st.size) {
  52. break;
  53. }
  54. // the 512 for the header we just parsed will be added as well
  55. // also jump ahead all the blocks for the body
  56. position += entryBlockSize;
  57. if (opt.mtimeCache && h.mtime) {
  58. opt.mtimeCache.set(String(h.path), h.mtime);
  59. }
  60. }
  61. threw = false;
  62. streamSync(opt, p, position, fd, files);
  63. }
  64. finally {
  65. if (threw) {
  66. try {
  67. fs.closeSync(fd);
  68. }
  69. catch (er) { }
  70. }
  71. }
  72. };
  73. const streamSync = (opt, p, position, fd, files) => {
  74. const stream = new WriteStreamSync(opt.file, {
  75. fd: fd,
  76. start: position,
  77. });
  78. p.pipe(stream);
  79. addFilesSync(p, files);
  80. };
  81. const replaceAsync = (opt, files) => {
  82. files = Array.from(files);
  83. const p = new Pack(opt);
  84. const getPos = (fd, size, cb_) => {
  85. const cb = (er, pos) => {
  86. if (er) {
  87. fs.close(fd, _ => cb_(er));
  88. }
  89. else {
  90. cb_(null, pos);
  91. }
  92. };
  93. let position = 0;
  94. if (size === 0) {
  95. return cb(null, 0);
  96. }
  97. let bufPos = 0;
  98. const headBuf = Buffer.alloc(512);
  99. const onread = (er, bytes) => {
  100. if (er || typeof bytes === 'undefined') {
  101. return cb(er);
  102. }
  103. bufPos += bytes;
  104. if (bufPos < 512 && bytes) {
  105. return fs.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread);
  106. }
  107. if (position === 0 &&
  108. headBuf[0] === 0x1f &&
  109. headBuf[1] === 0x8b) {
  110. return cb(new Error('cannot append to compressed archives'));
  111. }
  112. // truncated header
  113. if (bufPos < 512) {
  114. return cb(null, position);
  115. }
  116. const h = new Header(headBuf);
  117. if (!h.cksumValid) {
  118. return cb(null, position);
  119. }
  120. /* c8 ignore next */
  121. const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512);
  122. if (position + entryBlockSize + 512 > size) {
  123. return cb(null, position);
  124. }
  125. position += entryBlockSize + 512;
  126. if (position >= size) {
  127. return cb(null, position);
  128. }
  129. if (opt.mtimeCache && h.mtime) {
  130. opt.mtimeCache.set(String(h.path), h.mtime);
  131. }
  132. bufPos = 0;
  133. fs.read(fd, headBuf, 0, 512, position, onread);
  134. };
  135. fs.read(fd, headBuf, 0, 512, position, onread);
  136. };
  137. const promise = new Promise((resolve, reject) => {
  138. p.on('error', reject);
  139. let flag = 'r+';
  140. const onopen = (er, fd) => {
  141. if (er && er.code === 'ENOENT' && flag === 'r+') {
  142. flag = 'w+';
  143. return fs.open(opt.file, flag, onopen);
  144. }
  145. if (er || !fd) {
  146. return reject(er);
  147. }
  148. fs.fstat(fd, (er, st) => {
  149. if (er) {
  150. return fs.close(fd, () => reject(er));
  151. }
  152. getPos(fd, st.size, (er, position) => {
  153. if (er) {
  154. return reject(er);
  155. }
  156. const stream = new WriteStream(opt.file, {
  157. fd: fd,
  158. start: position,
  159. });
  160. p.pipe(stream);
  161. stream.on('error', reject);
  162. stream.on('close', resolve);
  163. addFilesAsync(p, files);
  164. });
  165. });
  166. };
  167. fs.open(opt.file, flag, onopen);
  168. });
  169. return promise;
  170. };
  171. const addFilesSync = (p, files) => {
  172. files.forEach(file => {
  173. if (file.charAt(0) === '@') {
  174. list({
  175. file: path.resolve(p.cwd, file.slice(1)),
  176. sync: true,
  177. noResume: true,
  178. onReadEntry: entry => p.add(entry),
  179. });
  180. }
  181. else {
  182. p.add(file);
  183. }
  184. });
  185. p.end();
  186. };
  187. const addFilesAsync = async (p, files) => {
  188. for (let i = 0; i < files.length; i++) {
  189. const file = String(files[i]);
  190. if (file.charAt(0) === '@') {
  191. await list({
  192. file: path.resolve(String(p.cwd), file.slice(1)),
  193. noResume: true,
  194. onReadEntry: entry => p.add(entry),
  195. });
  196. }
  197. else {
  198. p.add(file);
  199. }
  200. }
  201. p.end();
  202. };
  203. export const replace = makeCommand(replaceSync, replaceAsync,
  204. /* c8 ignore start */
  205. () => {
  206. throw new TypeError('file is required');
  207. }, () => {
  208. throw new TypeError('file is required');
  209. },
  210. /* c8 ignore stop */
  211. (opt, entries) => {
  212. if (!isFile(opt)) {
  213. throw new TypeError('file is required');
  214. }
  215. if (opt.gzip ||
  216. opt.brotli ||
  217. opt.file.endsWith('.br') ||
  218. opt.file.endsWith('.tbr')) {
  219. throw new TypeError('cannot append to compressed archives');
  220. }
  221. if (!entries?.length) {
  222. throw new TypeError('no paths specified to add/replace');
  223. }
  224. });
  225. //# sourceMappingURL=replace.js.map