-
Notifications
You must be signed in to change notification settings - Fork 30
/
Copy pathindex.insert.js
202 lines (188 loc) · 5.31 KB
/
index.insert.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
'use strict'
const BB = require('bluebird')
const CacheIndex = require('./util/cache-index')
const contentPath = require('../lib/content/path')
const fs = require('fs')
const path = require('path')
const Tacks = require('tacks')
const test = require('tap').test
const testDir = require('./util/test-dir')(__filename)
BB.promisifyAll(fs)
const CACHE = path.join(testDir, 'cache')
const index = require('../lib/entry-index')
const KEY = 'foo'
const BUCKET = index._bucketPath(CACHE, KEY)
const INTEGRITY = 'sha512-deadbeef'
const SIZE = 999
function opts (extra) {
return Object.assign({
size: SIZE
}, extra)
}
test('basic insertion', function (t) {
return index.insert(CACHE, KEY, INTEGRITY, opts({
metadata: 'foo'
})).then(entry => {
t.deepEqual(entry, {
key: KEY,
integrity: INTEGRITY,
path: contentPath(CACHE, INTEGRITY),
time: entry.time,
metadata: 'foo',
size: SIZE
}, 'formatted entry returned')
return fs.readFileAsync(BUCKET, 'utf8')
}).then(data => {
t.equal(data[0], '\n', 'first entry starts with a \\n')
const split = data.split('\t')
t.equal(split[0].slice(1), index._hashEntry(split[1]), 'consistency header correct')
const entry = JSON.parse(split[1])
t.ok(entry.time, 'entry has a timestamp')
t.deepEqual(entry, {
key: KEY,
integrity: INTEGRITY,
time: entry.time,
metadata: 'foo',
size: SIZE
}, 'entry matches what was inserted')
})
})
test('inserts additional entries into existing key', function (t) {
return index.insert(CACHE, KEY, INTEGRITY, opts({
metadata: 1
})).then(() => (
index.insert(CACHE, KEY, INTEGRITY, opts({ metadata: 2 }))
)).then(() => {
return fs.readFileAsync(BUCKET, 'utf8')
}).then(data => {
const entries = data.split('\n').slice(1).map(line => {
return JSON.parse(line.split('\t')[1])
})
entries.forEach(function (e) { delete e.time })
t.deepEqual(entries, [{
key: KEY,
integrity: INTEGRITY,
metadata: 1,
size: SIZE
}, {
key: KEY,
integrity: INTEGRITY,
metadata: 2,
size: SIZE
}], 'all entries present')
})
})
test('separates entries even if one is corrupted', function (t) {
// TODO - check that middle-of-string corrupted writes won't hurt.
const fixture = new Tacks(CacheIndex({
'foo': '\n' + JSON.stringify({
key: KEY,
integrity: 'meh',
time: 54321,
size: SIZE
}) + '\n{"key": "' + KEY + '"\noway'
}))
fixture.create(CACHE)
return index.insert(
CACHE, KEY, INTEGRITY, opts()
).then(() => {
return fs.readFileAsync(BUCKET, 'utf8')
}).then(data => {
const entry = JSON.parse(data.split('\n')[4].split('\t')[1])
delete entry.time
t.deepEqual(entry, {
key: KEY,
integrity: INTEGRITY,
size: SIZE
}, 'new entry unaffected by corruption')
})
})
test('optional arbitrary metadata', function (t) {
const metadata = { foo: 'bar' }
return index.insert(
CACHE, KEY, INTEGRITY, opts({ metadata: metadata })
).then(() => {
return fs.readFileAsync(BUCKET, 'utf8')
}).then(data => {
const entry = JSON.parse(data.split('\t')[1])
delete entry.time
t.deepEqual(entry, {
key: KEY,
integrity: INTEGRITY,
metadata: metadata,
size: SIZE
}, 'entry includes inserted metadata')
})
})
test('key case-sensitivity', function (t) {
return BB.join(
index.insert(CACHE, KEY, INTEGRITY, opts()),
index.insert(CACHE, KEY.toUpperCase(), INTEGRITY + 'upper', opts())
).then(() => {
return BB.join(
index.find(CACHE, KEY),
index.find(CACHE, KEY.toUpperCase()),
(entry, upperEntry) => {
delete entry.time
delete upperEntry.time
t.deepEqual({
key: entry.key,
integrity: entry.integrity,
size: SIZE
}, {
key: KEY,
integrity: INTEGRITY,
size: SIZE
}, 'regular entry exists')
t.deepEqual({
key: upperEntry.key,
integrity: upperEntry.integrity,
size: SIZE
}, {
key: KEY.toUpperCase(),
integrity: INTEGRITY + 'upper',
size: SIZE
}, 'case-variant entry intact')
}
)
})
})
test('path-breaking characters', function (t) {
const newKey = ';;!registry\nhttps://registry.npmjs.org/back \\ slash@Cool™?'
return index.insert(
CACHE, newKey, INTEGRITY, opts()
).then(() => {
const bucket = index._bucketPath(CACHE, newKey)
return fs.readFileAsync(bucket, 'utf8')
}).then(data => {
const entry = JSON.parse(data.split('\t')[1])
delete entry.time
t.deepEqual(entry, {
key: newKey,
integrity: INTEGRITY,
size: SIZE
}, 'entry exists and matches original key with invalid chars')
})
})
test('extremely long keys', function (t) {
let newKey = ''
for (let i = 0; i < 10000; i++) {
newKey += i
}
return index.insert(
CACHE, newKey, INTEGRITY, opts()
).then(() => {
const bucket = index._bucketPath(CACHE, newKey)
return fs.readFileAsync(bucket, 'utf8')
}).then(data => {
const entry = JSON.parse(data.split('\t')[1])
delete entry.time
t.deepEqual(entry, {
key: newKey,
integrity: INTEGRITY,
size: SIZE
}, 'entry exists in spite of INCREDIBLY LONG key')
})
})
test('concurrent writes')
test('correct ownership')