012f1d68807ecd00858e096941e71b0d8c4517d6e3942d31a7613b8af0da6406733f3566312c39dd09b9aa16c5a3f08db4b2267b2e999425f08e0287c075a1 7.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311
  1. 'use strict'
  2. const util = require('util')
  3. const crypto = require('crypto')
  4. const figgyPudding = require('figgy-pudding')
  5. const fs = require('graceful-fs')
  6. const Minipass = require('minipass')
  7. const path = require('path')
  8. const ssri = require('ssri')
  9. const contentPath = require('./content/path')
  10. const fixOwner = require('./util/fix-owner')
  11. const hashToSegments = require('./util/hash-to-segments')
  12. const indexV = require('../package.json')['cache-version'].index
  13. const appendFile = util.promisify(fs.appendFile)
  14. const readFile = util.promisify(fs.readFile)
  15. const readdir = util.promisify(fs.readdir)
  16. module.exports.NotFoundError = class NotFoundError extends Error {
  17. constructor (cache, key) {
  18. super(`No cache entry for ${key} found in ${cache}`)
  19. this.code = 'ENOENT'
  20. this.cache = cache
  21. this.key = key
  22. }
  23. }
  24. const IndexOpts = figgyPudding({
  25. metadata: {},
  26. size: {}
  27. })
  28. module.exports.insert = insert
  29. function insert (cache, key, integrity, opts) {
  30. opts = IndexOpts(opts)
  31. const bucket = bucketPath(cache, key)
  32. const entry = {
  33. key,
  34. integrity: integrity && ssri.stringify(integrity),
  35. time: Date.now(),
  36. size: opts.size,
  37. metadata: opts.metadata
  38. }
  39. return fixOwner
  40. .mkdirfix(cache, path.dirname(bucket))
  41. .then(() => {
  42. const stringified = JSON.stringify(entry)
  43. // NOTE - Cleverness ahoy!
  44. //
  45. // This works because it's tremendously unlikely for an entry to corrupt
  46. // another while still preserving the string length of the JSON in
  47. // question. So, we just slap the length in there and verify it on read.
  48. //
  49. // Thanks to @isaacs for the whiteboarding session that ended up with this.
  50. return appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
  51. })
  52. .then(() => fixOwner.chownr(cache, bucket))
  53. .catch((err) => {
  54. if (err.code === 'ENOENT') {
  55. return undefined
  56. }
  57. throw err
  58. // There's a class of race conditions that happen when things get deleted
  59. // during fixOwner, or between the two mkdirfix/chownr calls.
  60. //
  61. // It's perfectly fine to just not bother in those cases and lie
  62. // that the index entry was written. Because it's a cache.
  63. })
  64. .then(() => {
  65. return formatEntry(cache, entry)
  66. })
  67. }
  68. module.exports.insert.sync = insertSync
  69. function insertSync (cache, key, integrity, opts) {
  70. opts = IndexOpts(opts)
  71. const bucket = bucketPath(cache, key)
  72. const entry = {
  73. key,
  74. integrity: integrity && ssri.stringify(integrity),
  75. time: Date.now(),
  76. size: opts.size,
  77. metadata: opts.metadata
  78. }
  79. fixOwner.mkdirfix.sync(cache, path.dirname(bucket))
  80. const stringified = JSON.stringify(entry)
  81. fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
  82. try {
  83. fixOwner.chownr.sync(cache, bucket)
  84. } catch (err) {
  85. if (err.code !== 'ENOENT') {
  86. throw err
  87. }
  88. }
  89. return formatEntry(cache, entry)
  90. }
  91. module.exports.find = find
  92. function find (cache, key) {
  93. const bucket = bucketPath(cache, key)
  94. return bucketEntries(bucket)
  95. .then((entries) => {
  96. return entries.reduce((latest, next) => {
  97. if (next && next.key === key) {
  98. return formatEntry(cache, next)
  99. } else {
  100. return latest
  101. }
  102. }, null)
  103. })
  104. .catch((err) => {
  105. if (err.code === 'ENOENT') {
  106. return null
  107. } else {
  108. throw err
  109. }
  110. })
  111. }
  112. module.exports.find.sync = findSync
  113. function findSync (cache, key) {
  114. const bucket = bucketPath(cache, key)
  115. try {
  116. return bucketEntriesSync(bucket).reduce((latest, next) => {
  117. if (next && next.key === key) {
  118. return formatEntry(cache, next)
  119. } else {
  120. return latest
  121. }
  122. }, null)
  123. } catch (err) {
  124. if (err.code === 'ENOENT') {
  125. return null
  126. } else {
  127. throw err
  128. }
  129. }
  130. }
  131. module.exports.delete = del
  132. function del (cache, key, opts) {
  133. return insert(cache, key, null, opts)
  134. }
  135. module.exports.delete.sync = delSync
  136. function delSync (cache, key, opts) {
  137. return insertSync(cache, key, null, opts)
  138. }
  139. module.exports.lsStream = lsStream
  140. function lsStream (cache) {
  141. const indexDir = bucketDir(cache)
  142. const stream = new Minipass({ objectMode: true })
  143. readdirOrEmpty(indexDir).then(buckets => Promise.all(
  144. buckets.map(bucket => {
  145. const bucketPath = path.join(indexDir, bucket)
  146. return readdirOrEmpty(bucketPath).then(subbuckets => Promise.all(
  147. subbuckets.map(subbucket => {
  148. const subbucketPath = path.join(bucketPath, subbucket)
  149. // "/cachename/<bucket 0xFF>/<bucket 0xFF>./*"
  150. return readdirOrEmpty(subbucketPath).then(entries => Promise.all(
  151. entries.map(entry => {
  152. const entryPath = path.join(subbucketPath, entry)
  153. return bucketEntries(entryPath).then(entries =>
  154. // using a Map here prevents duplicate keys from
  155. // showing up twice, I guess?
  156. entries.reduce((acc, entry) => {
  157. acc.set(entry.key, entry)
  158. return acc
  159. }, new Map())
  160. ).then(reduced => {
  161. // reduced is a map of key => entry
  162. for (const entry of reduced.values()) {
  163. const formatted = formatEntry(cache, entry)
  164. if (formatted) {
  165. stream.write(formatted)
  166. }
  167. }
  168. }).catch(err => {
  169. if (err.code === 'ENOENT') { return undefined }
  170. throw err
  171. })
  172. })
  173. ))
  174. })
  175. ))
  176. })
  177. ))
  178. .then(
  179. () => stream.end(),
  180. err => stream.emit('error', err)
  181. )
  182. return stream
  183. }
  184. module.exports.ls = ls
  185. function ls (cache) {
  186. return lsStream(cache).collect().then(entries =>
  187. entries.reduce((acc, xs) => {
  188. acc[xs.key] = xs
  189. return acc
  190. }, {})
  191. )
  192. }
  193. function bucketEntries (bucket, filter) {
  194. return readFile(bucket, 'utf8').then((data) => _bucketEntries(data, filter))
  195. }
  196. function bucketEntriesSync (bucket, filter) {
  197. const data = fs.readFileSync(bucket, 'utf8')
  198. return _bucketEntries(data, filter)
  199. }
  200. function _bucketEntries (data, filter) {
  201. const entries = []
  202. data.split('\n').forEach((entry) => {
  203. if (!entry) {
  204. return
  205. }
  206. const pieces = entry.split('\t')
  207. if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
  208. // Hash is no good! Corruption or malice? Doesn't matter!
  209. // EJECT EJECT
  210. return
  211. }
  212. let obj
  213. try {
  214. obj = JSON.parse(pieces[1])
  215. } catch (e) {
  216. // Entry is corrupted!
  217. return
  218. }
  219. if (obj) {
  220. entries.push(obj)
  221. }
  222. })
  223. return entries
  224. }
  225. module.exports.bucketDir = bucketDir
  226. function bucketDir (cache) {
  227. return path.join(cache, `index-v${indexV}`)
  228. }
  229. module.exports.bucketPath = bucketPath
  230. function bucketPath (cache, key) {
  231. const hashed = hashKey(key)
  232. return path.join.apply(
  233. path,
  234. [bucketDir(cache)].concat(hashToSegments(hashed))
  235. )
  236. }
  237. module.exports.hashKey = hashKey
  238. function hashKey (key) {
  239. return hash(key, 'sha256')
  240. }
  241. module.exports.hashEntry = hashEntry
  242. function hashEntry (str) {
  243. return hash(str, 'sha1')
  244. }
  245. function hash (str, digest) {
  246. return crypto
  247. .createHash(digest)
  248. .update(str)
  249. .digest('hex')
  250. }
  251. function formatEntry (cache, entry) {
  252. // Treat null digests as deletions. They'll shadow any previous entries.
  253. if (!entry.integrity) {
  254. return null
  255. }
  256. return {
  257. key: entry.key,
  258. integrity: entry.integrity,
  259. path: contentPath(cache, entry.integrity),
  260. size: entry.size,
  261. time: entry.time,
  262. metadata: entry.metadata
  263. }
  264. }
  265. function readdirOrEmpty (dir) {
  266. return readdir(dir).catch((err) => {
  267. if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
  268. return []
  269. }
  270. throw err
  271. })
  272. }