Skip to content

Commit

Permalink
Merge pull request #536 from fergiemcdowall/v2-issue-535
Browse files Browse the repository at this point in the history
fix issue #535
  • Loading branch information
fergiemcdowall authored Apr 10, 2021
2 parents 7d14235 + d522d17 commit 559f03f
Show file tree
Hide file tree
Showing 8 changed files with 107 additions and 34 deletions.
4 changes: 2 additions & 2 deletions dist/search-index-2.2.0.js → dist/search-index-2.3.0.js

Large diffs are not rendered by default.

File renamed without changes.
2 changes: 1 addition & 1 deletion dist/search-index.js
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "search-index",
"version": "2.2.1",
"version": "2.3.0",
"description": "A network resilient, persistent full-text search library for the browser and Node.js",
"engines": {
"node": ">=12"
Expand Down
10 changes: 5 additions & 5 deletions src/main.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,14 @@ const Cache = require('./cache.js')
const reader = require('./read.js')
const writer = require('./write.js')

const makeASearchIndex = ops => {
const makeASearchIndex = ops => new Promise((resolve) => {
// ".flush" clears the cache ".cache" creates/promotes a cache entry
const c = new Cache(ops.cacheLength)

const w = writer(ops.fii, ops) // TODO: should be just ops?
const r = reader(ops.fii)

return {
return w._INCREMENT_DOC_COUNT(0).then(() => resolve({
// internal functions inherited from fergies-inverted-index
_AND: ops.fii.AND,
_BUCKET: ops.fii.BUCKET,
Expand All @@ -38,7 +38,7 @@ const makeASearchIndex = ops => {
EXPORT: ops.fii.EXPORT,
FACETS: r.FACETS,
FIELDS: ops.fii.FIELDS,
FLUSH: () => ops.fii.STORE.clear(),
FLUSH: () => c.flush().then(w.FLUSH),
IMPORT: idx => c.flush().then(() => ops.fii.IMPORT(idx)),
INDEX: ops.fii,
LAST_UPDATED: ops.fii.LAST_UPDATED,
Expand All @@ -47,8 +47,8 @@ const makeASearchIndex = ops => {
PUT: (docs, pops) => c.flush().then(() => w.PUT(docs, pops)),
PUT_RAW: docs => c.flush().then(() => w.PUT_RAW(docs)),
QUERY: (q, qops) => c.cache({ QUERY: [q, qops] }, r.QUERY(q, qops))
}
}
}))
})

const initIndex = (ops = {}) => new Promise((resolve, reject) => {
ops = Object.assign({
Expand Down
23 changes: 18 additions & 5 deletions src/write.js
Original file line number Diff line number Diff line change
Expand Up @@ -69,16 +69,16 @@ module.exports = (fii, ops) => {
? ({ body: doc })
: doc

let counter = 0;
let counter = 0
const generateId = (doc, i) => (typeof doc._id === 'undefined')
? Object.assign(doc, {
// counter is needed because if this function is called in quick
// succession, Date.now() is not guaranteed to be unique. This could
// still conflict if the DB is closed, clock is reset to the past, then
// DB reopened. That's a bit of a corner case though.
_id: `${Date.now()}-${i}-${counter++}`,
})
: doc;
_id: `${Date.now()}-${i}-${counter++}`
})
: doc

const indexingPipeline = docs => new Promise(
resolve => resolve(
Expand Down Expand Up @@ -126,13 +126,26 @@ module.exports = (fii, ops) => {
]).then(() => result)
})

const _FLUSH = () => ops.fii.STORE.clear()
.then(() => {
const timestamp = Date.now()
return ops.fii.STORE.batch([
{ type: 'put', key: '○○CREATED', value: timestamp },
{ type: 'put', key: '○○LAST_UPDATED', value: timestamp },
{ type: 'put', key: '○DOCUMENT_COUNT○', value: 0 }
])
})
.then(() => true)

return {
// TODO: DELETE should be able to handle errors (_id not found etc.)
DELETE: docIds => _DELETE(docIds), // for external use
FLUSH: _FLUSH,
IMPORT: fii.IMPORT,
PUT: _PUT,
PUT_RAW: _PUT_RAW,
_DELETE: _DELETE, // for internal use
_DELETE: _DELETE,
_INCREMENT_DOC_COUNT: incrementDocCount,
_PUT: _PUT,
_PUT_RAW: _PUT_RAW
}
Expand Down
45 changes: 25 additions & 20 deletions test/src/FLUSH-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -36,18 +36,18 @@ test('can add some data', t => {

test('verify index structure', t => {
const expectedIndexStructure = [
{ key: 'body.metadata:coolness#1.00', value: [ 'a' ] },
{ key: 'body.metadata:documentness#1.00', value: [ 'a' ] },
{ key: 'body.text:cool#1.00', value: [ 'a' ] },
{ key: 'body.text:document#0.33', value: [ 'a' ] },
{ key: 'body.text:is#0.33', value: [ 'a' ] },
{ key: 'body.text:really#0.33', value: [ 'a' ] },
{ key: 'body.text:this#0.33', value: [ 'a' ] },
{ key: 'importantnumber:5000#1.00', value: [ 'a' ] },
{ key: 'title:a#1.00', value: [ 'a' ] },
{ key: 'title:cool#1.00', value: [ 'a' ] },
{ key: 'title:document#1.00', value: [ 'a' ] },
{ key: 'title:quite#1.00', value: [ 'a' ] },
{ key: 'body.metadata:coolness#1.00', value: ['a'] },
{ key: 'body.metadata:documentness#1.00', value: ['a'] },
{ key: 'body.text:cool#1.00', value: ['a'] },
{ key: 'body.text:document#0.33', value: ['a'] },
{ key: 'body.text:is#0.33', value: ['a'] },
{ key: 'body.text:really#0.33', value: ['a'] },
{ key: 'body.text:this#0.33', value: ['a'] },
{ key: 'importantnumber:5000#1.00', value: ['a'] },
{ key: 'title:a#1.00', value: ['a'] },
{ key: 'title:cool#1.00', value: ['a'] },
{ key: 'title:document#1.00', value: ['a'] },
{ key: 'title:quite#1.00', value: ['a'] },
{ key: '○DOCUMENT_COUNT○', value: 1 },
{
key: '○DOC_RAW○a○',
Expand All @@ -65,14 +65,14 @@ test('verify index structure', t => {
key: '○DOC○a○',
value: {
_id: 'a',
title: [ 'a#1.00', 'cool#1.00', 'document#1.00', 'quite#1.00' ],
title: ['a#1.00', 'cool#1.00', 'document#1.00', 'quite#1.00'],
body: {
text: [
'cool#1.00', 'document#0.33', 'is#0.33', 'really#0.33', 'this#0.33'
],
metadata: [ 'coolness#1.00', 'documentness#1.00' ]
metadata: ['coolness#1.00', 'documentness#1.00']
},
importantNumber: [ '5000#1.00' ]
importantNumber: ['5000#1.00']
}
},
{ key: '○FIELD○body.metadata○', value: 'body.metadata' },
Expand All @@ -83,14 +83,19 @@ test('verify index structure', t => {
t.plan(expectedIndexStructure.length)
global[indexName].INDEX.STORE.createReadStream({ lt: '○○' })
.on('data', d => t.deepEquals(
d, expectedIndexStructure.shift())
)
d, expectedIndexStructure.shift()
))
})

test('FLUSH index and verify', t => {
t.plan(1)
t.plan(2)
const expectedIndexStructure = [
{ key: '○DOCUMENT_COUNT○', value: 0 }
]
global[indexName].FLUSH().then(
() => global[indexName].INDEX.STORE.createReadStream({ lt: '○○' })
.on('data', d => t.fail('there shouldnt be anything here'))
).then(() => t.pass('index appears empty'))
.on('data', d => t.deepEquals(
d, expectedIndexStructure.shift()
))
).then(() => t.pass('index appears empty'))
})
55 changes: 55 additions & 0 deletions test/src/issue-535-test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
const si = require('../../')
const test = require('tape')

const sandbox = 'test/sandbox/'

const docs = [
{ _id: 'qwertyu', idx: 'q' },
{ _id: 'asdfgh', idx: 'a' }
]

test('set up as per issue #535', async function (t) {
t.plan(7)

const { PUT, QUERY, FLUSH } = await si({
name: sandbox + '535'
})
t.ok(PUT)

t.deepEquals(await PUT(docs), [
{ _id: 'qwertyu', status: 'CREATED', operation: 'PUT' },
{ _id: 'asdfgh', status: 'CREATED', operation: 'PUT' }
])

t.deepEquals(await QUERY({
SEARCH: ['q']
}), {
RESULT: [
{ _id: 'qwertyu', _match: ['idx:q#1.00'], _score: 1.1 }
],
RESULT_LENGTH: 1
})

t.ok(await FLUSH())

t.deepEquals(await QUERY({
SEARCH: ['q']
}), {
RESULT: [],
RESULT_LENGTH: 0
})

t.deepEquals(await PUT(docs), [
{ _id: 'qwertyu', status: 'CREATED', operation: 'PUT' },
{ _id: 'asdfgh', status: 'CREATED', operation: 'PUT' }
])

t.deepEquals(await QUERY({
SEARCH: ['q']
}), {
RESULT: [
{ _id: 'qwertyu', _match: ['idx:q#1.00'], _score: 1.1 }
],
RESULT_LENGTH: 1
})
})

0 comments on commit 559f03f

Please sign in to comment.