@@ -5,7 +5,6 @@ const contentPath = require('./content/path')
5
5
const crypto = require ( 'crypto' )
6
6
const fixOwner = require ( './util/fix-owner' )
7
7
const fs = require ( 'graceful-fs' )
8
- const lockfile = require ( 'lockfile' )
9
8
const path = require ( 'path' )
10
9
const pipe = require ( 'mississippi' ) . pipe
11
10
const Promise = require ( 'bluebird' )
@@ -14,64 +13,30 @@ const through = require('mississippi').through
14
13
15
14
const indexV = require ( '../package.json' ) [ 'cache-version' ] . index
16
15
16
+ const appendFileAsync = Promise . promisify ( fs . appendFile )
17
+
17
18
module . exports . insert = insert
18
19
function insert ( cache , key , digest , opts ) {
19
20
opts = opts || { }
20
21
const bucket = bucketPath ( cache , key )
21
- const lock = bucket + '.lock'
22
22
return fixOwner . mkdirfix (
23
23
path . dirname ( bucket ) , opts . uid , opts . gid
24
- ) . then ( ( ) => (
25
- Promise . fromNode ( _cb => {
26
- const cb = ( err , entry ) => {
27
- lockfile . unlock ( lock , er => {
28
- _cb ( err || er , entry )
29
- } )
30
- }
31
- lockfile . lock ( lock , {
32
- stale : 60000 ,
33
- retries : 10 ,
34
- wait : 10000
35
- } , function ( err ) {
36
- if ( err ) { return _cb ( err ) }
37
- fs . stat ( bucket , function ( err , existing ) {
38
- if ( err && err . code !== 'ENOENT' && err . code !== 'EPERM' ) {
39
- return cb ( err )
40
- }
41
- const entry = {
42
- key : key ,
43
- digest : digest ,
44
- hashAlgorithm : opts . hashAlgorithm ,
45
- time : + ( new Date ( ) ) ,
46
- metadata : opts . metadata
47
- }
48
- // Because of the way these entries work,
49
- // the index is safe from fs.appendFile stopping
50
- // mid-write so long as newlines are *prepended*
51
- //
52
- // That is, if a write fails, it will be ignored
53
- // by `find`, and the next successful one will be
54
- // used.
55
- //
56
- // This should be -very rare-, since `fs.appendFile`
57
- // will often be atomic on most platforms unless
58
- // very large metadata has been included, but caches
59
- // like this one tend to last a long time. :)
60
- // Most corrupted reads are likely to be from attempting
61
- // to read the index while it's being written to --
62
- // which is safe, but not guaranteed to be atomic.
63
- const e = ( existing ? '\n' : '' ) + JSON . stringify ( entry )
64
- fs . appendFile ( bucket , e , function ( err ) {
65
- cb ( err , entry )
66
- } )
67
- } )
68
- } )
69
- } )
70
- ) ) . then ( entry => {
71
- return fixOwner . chownr ( bucket , opts . uid , opts . gid ) . then ( ( ) => {
72
- return formatEntry ( cache , entry )
73
- } )
74
- } )
24
+ ) . then ( ( ) => {
25
+ const entry = {
26
+ key : key ,
27
+ digest : digest ,
28
+ hashAlgorithm : opts . hashAlgorithm ,
29
+ time : + ( new Date ( ) ) ,
30
+ metadata : opts . metadata
31
+ }
32
+ return appendFileAsync (
33
+ bucket , '\n' + JSON . stringify ( entry )
34
+ ) . then ( ( ) => entry )
35
+ } ) . then ( entry => (
36
+ fixOwner . chownr ( bucket , opts . uid , opts . gid ) . then ( ( ) => (
37
+ formatEntry ( cache , entry )
38
+ ) )
39
+ ) )
75
40
}
76
41
77
42
module . exports . find = find
@@ -126,7 +91,7 @@ function lsStream (cache) {
126
91
fs . readFile ( path . join ( indexDir , bucket , f ) , 'utf8' , function ( err , data ) {
127
92
if ( err ) { return cb ( err ) }
128
93
const entries = { }
129
- data . split ( '\n' ) . forEach ( function ( entry ) {
94
+ data . split ( '\n' ) . slice ( 1 ) . forEach ( function ( entry ) {
130
95
let parsed
131
96
try {
132
97
parsed = JSON . parse ( entry )
@@ -186,30 +151,15 @@ function bucketDir (cache) {
186
151
module . exports . _bucketPath = bucketPath
187
152
function bucketPath ( cache , key ) {
188
153
const hashed = hashKey ( key )
189
- return path . join ( bucketDir ( cache ) , hashed . slice ( 0 , 2 ) , hashed )
154
+ return path . join ( bucketDir ( cache ) , hashed . slice ( 0 , 2 ) , hashed . slice ( 2 ) )
190
155
}
191
156
192
157
module . exports . _hashKey = hashKey
193
158
function hashKey ( key ) {
194
- // NOTE (SECURITY)
195
- //
196
- // `sha1` conflicts can be generated, but it doesn't matter in this case,
197
- // since we intend for there to be regular conflicts anyway. You can have
198
- // the entire cache in a single bucket and all that'll do is just make a big
199
- // file with a lot of contention, if you can even pull it off in the `key`
200
- // string. So whatever. `sha1` is faster and it doesn't trigger the warnings
201
- // `md5` tends to (yet?...).
202
- //
203
- // Not to mention, that in the case of pacote/npm, the amount of control
204
- // anyone would have over this key is so minimal that it's incredibly
205
- // unlikely that they could intentionally generate a large number of
206
- // conflicts just with a package key such that they'd do anything resembling
207
- // a hash flood DOS.
208
159
return crypto
209
- . createHash ( 'sha1 ' )
210
- . update ( key . toLowerCase ( ) ) // lump case-variant keys into same bucket.
160
+ . createHash ( 'sha256 ' )
161
+ . update ( key )
211
162
. digest ( 'hex' )
212
- . slice ( 0 , 7 )
213
163
}
214
164
215
165
function formatEntry ( cache , entry ) {
0 commit comments