commit 8c24feee9a4cf2bf1bbf0cf44811187060e7f1f2 Author: Timothy Farrell Date: Wed Jul 4 08:22:23 2018 -0500 PouchORM becomes PouchType API changed and full test coverage. diff --git a/packages/pouchtype/README.md b/packages/pouchtype/README.md new file mode 100644 index 0000000..d9bc899 --- /dev/null +++ b/packages/pouchtype/README.md @@ -0,0 +1,137 @@ +# PouchType + +An type-based abstraction layer over PouchDB inspired by [Hood.ie](https://hood.ie/) and +[Django](https://djangoproject.com) + +## Extending the TypeHandler class + +PouchType works by extending the _TypeHandler_ class with methods to define how a document type +should be handled. The resulting class is instantiated with a PouchDB instance and used to interact +with documents of that type. + +All subclasses of _TypeHandler_ must override the `getUniqueID` method. This method should return a +unique string for documents of this type. (NOTE: The document id will be prefixed with the +type_string as well to avoid collisions across types.) + +```js +import { PouchDB } from 'pouchdb'; +import { TypeHandler } from 'PouchType'; + +const PDB = PouchDB.plugin(find); // PouchType requires the find plugin. + +class ContactHandler extends TypeHandler { + getUniqueID(doc) { + return doc.email; + } + + validate(doc) { + super.validate(doc); + + if (typeof doc.email != 'string') { + throw new Error('email property is required'); + } else if (doc.email.length <= 2) { + throw new Error('email must be longer than 2 characters'); + } + } +} +``` + +## TypeHandler management methods + +### getUniqueID(doc) + +_This method must be overridden in subclasses._ + +Return a unique string that will be used to populate `doc._id` if it isn't already populated. + +### hash(doc) + +Returns a hash string of the current document for comparison. By default this is +"`doc._id`:`doc._rev`" and will work as long as the hash is only taken after any changes are saved. +You may wish to override this to provide content-specific hashing. + +### index(name, fields) + +Create a index to be used in a `filter` selector and sort options. Specify the `name` of the index +and the `fields` as an array of strings of the document properties to include in the index. + +### isType(doc) + +Check if the passed `doc` belongs to this handler. + +### validate(doc) + +Check if the passed `doc` has valid data before it is written. Invalidation happens by raising an +exception. For more fine-grained validation, refer to the +[pouchdb-validation](https://github.com/pouchdb/pouchdb-validation) plugin or the +[validate_doc_update](http://guide.couchdb.org/draft/validation.html) function. + +## TypeHandler query methods + +### get(id) + +Return the document referenced by `id` or `null` if the document does not exist. + +### getOrCreate(doc, defaults={}) + +If the passed document doesn't have a `_id` property, populate it. Try to lookup a document with the +`id`. If it exists, update it with the properties in `doc`. If it does not exist, add the properties +in `defaults` to `props` and save the new document. + +### filter(selector, options={}) + +Return an array of documents that match the criteria in `selector`. `options.index` can contain the +`name` passed to `index()` if needed. All other option properties are passed through to the +[PouchDB.find()](https://pouchdb.com/api.html#query_index) + +### watch(selector, options={}) + +The parameters for `watch()` are identical to `filter()` but `watch()` returns a +[computed](../frptools/README.md#computed) instance that will call subscribers whenever any data +matching the selector changes. + +## TypeHandler change methods + +### remove(docOrId) + +`remove` accepts a document or an id string. + +Flag this document as `doc._deleted == true`. This will cause it to not show up in `get`, `filter` +or `watch` results. (_Note_: documents are left in the database in this state to allow for deletion +to be synced to other nodes. To truly remove documents, +[compact your database](https://pouchdb.com/api.html#compaction).) + +### save(doc) + +If the document does not have properties `_id` or `type`, populate them appropriately. Run +`TypeHandler.validate` on the document. If validate doesn't throw any errors, then save the document +to the database. It's `_rev` property will be updated to reflect the revision in the database. + +### update(doc, props) + +Deeply assign `props` to the passed `doc` object and `save()` it. + +### addAttachment(doc, key, blob) + +Attach the passed `blob` to the document referenced with the `key` string. + +### removeAttachment(doc, key) + +Remove a previously attached blob at `key`. + +### getAttachment(doc, key) + +Return a previously attached blob at `key` or `null` if none exists. + +## Using Types + +```js +const db = PDB('type_example'); + +export const Contact = new ContactHandler(db, 'contact'); + +const doc = await Contact.getOrCreate({ + name: "John Doe", + email: "jd@example.com" +}); +``` diff --git a/packages/pouchtype/package.json b/packages/pouchtype/package.json new file mode 100644 index 0000000..05ace4b --- /dev/null +++ b/packages/pouchtype/package.json @@ -0,0 +1,16 @@ +{ + "name": "pouchtype", + "version": "1.0.0", + "description": "Document Management Layer for PouchDB", + "main": "src/index.js", + "files": ["dist", "lib", "src"], + "scripts": { + "test": "node ../../bin/runTests.js ./" + }, + "author": "Timothy Farrell (https://github.com/explorigin)", + "license": "Apache-2.0", + "dependencies": { + "frptools": "~3.2.1", + "pouchdb": "~7.0.0" + } +} diff --git a/packages/pouchtype/spec/livearray.spec.js b/packages/pouchtype/spec/livearray.spec.js new file mode 100644 index 0000000..139cdd4 --- /dev/null +++ b/packages/pouchtype/spec/livearray.spec.js @@ -0,0 +1,79 @@ +import { LiveArray } from '../src/livearray.js'; +import { pouchDocArrayHash } from '../src/utils.js'; + +describe('A LiveArray', () => { + let fakePouch; + const selector = {}; + const opts = {}; + + beforeEach(() => { + fakePouch = { + find: () => {}, + changes: () => fakePouch, + on: () => fakePouch + }; + }); + + it('returns a computed (subscribable).', async () => { + const la = await LiveArray(fakePouch, selector, opts); + + expect(typeof la).toEqual('function'); + expect(typeof la.subscribe).toEqual('function'); + + expect(JSON.stringify(la())).toEqual('[]'); + }); + + it('fires when data changes.', async () => { + let state = 0; + const changes = { + 234: { id: 234, deleted: false, doc: { _id: 234 } }, + 34: { id: 34, deleted: true, doc: { _id: 34 } }, + 4564565: { id: 4564565, deleted: false, doc: { _id: 4564565 } } + }; + const changeKeys = Object.keys(changes); + let sub = null; + + const db = { + changes: options => { + expect(state).toEqual(1); + expect(options.live).toEqual(true); + expect(options.since).toEqual('now'); + expect(options.selector).toBe(selector); + return db; + }, + on: (eventName, callback) => { + if (eventName == 'change') { + sub = callback; + } + expect(['change', 'error'].indexOf(eventName) !== -1).toBeTruthy(); + return db; + }, + cancel: () => (sub = null), + find: async selector => { + const doc = changes[parseInt(changeKeys[state - 1])]; + state += 1; + if (doc === undefined || doc.deleted) { + return Promise.resolve({ docs: [] }); + } + return Promise.resolve({ docs: [doc.doc] }); + } + }; + + const la = await LiveArray(db, selector, opts); + state = 1; + + let innerState = 0; + + la.subscribe(data => { + if (data.length) { + expect(data[0]._id).toEqual(parseInt(changeKeys[innerState])); + } + innerState += 1; + }); + + changeKeys.forEach(id => { + const doc = changes[parseInt(id)]; + sub(doc); + }); + }); +}); diff --git a/packages/pouchtype/spec/typehandler.spec.js b/packages/pouchtype/spec/typehandler.spec.js new file mode 100644 index 0000000..b7324c2 --- /dev/null +++ b/packages/pouchtype/spec/typehandler.spec.js @@ -0,0 +1,360 @@ +import { TypeHandler } from '../src/index.js'; + +const PDB = PouchDB.plugin(find); + +class ContactHandler extends TypeHandler { + getUniqueID(doc) { + return doc.email; + } + + validate(doc) { + if (typeof doc.email != 'string') { + throw new Error('email property is required'); + } else if (doc.email.length <= 2) { + throw new Error('email must be more than 2 characters'); + } + } +} + +class LocationHandler extends TypeHandler { + getUniqueID(doc) { + return doc.latlon; + } +} + +const notDesignDocs = d => !d.id.startsWith('_design'); + +function compareDataToDoc(dataObj, doc) { + Object.keys(dataObj).forEach(k => { + expect(doc[k]).toEqual(dataObj[k]); + }); +} + +describe('PouchType Handler', () => { + const db = new PDB('pouchtype-test', { adapter: 'memory' }); + const Contact = new ContactHandler(db, 'contact'); + const Location = new LocationHandler(db, 'location'); + + Contact.index('lastIndex', ['last']); + + const dataset = [ + { + first: 'Jane', + last: 'Doe', + email: 'jd@example.com', + type: 'contact', + _id: 'contact_jd@example.com' + }, + { + first: 'Bob', + last: 'Smith', + email: 'bs@example.com', + type: 'contact', + _id: 'contact_bs@example.com' + }, + { + description: 'Home', + latlon: '12345', + type: 'location', + _id: 'location_12345' + }, + { + first: 'Joe', + last: 'Smithers', + email: 'js@example.com', + type: 'contact', + _id: 'contact_js@example.com' + } + ]; + + async function flushDb() { + const res = await db.allDocs(); + await Promise.all(res.rows.filter(notDesignDocs).map(d => db.remove(d.id, d.value.rev))).catch( + () => {} + ); + await db.compact(); + } + + afterEach(flushDb); + + it('.getOrCreate() gets existing records.', async () => { + const doc = { + first: 'Jane', + last: 'Doe', + email: 'jd@example.com' + }; + await db.bulkDocs(dataset); + const existing = await db.get(dataset[0]._id); + const instance = await Contact.getOrCreate(doc); + expect(instance._id).toEqual(existing._id); + expect(instance._rev).toEqual(existing._rev); + }); + + it('.getOrCreate() saves non-existing records and populates _id and _rev.', async () => { + const doc = { + first: 'Jill', + last: 'Doener', + email: 'jd2@example.com' + }; + const expectedId = `${Contact.type}_${doc.email}`; + try { + await db.get(expectedId); + fail('db.get() should throw when passed an id of a removed document.'); + return; + } catch (e) { + expect(e.status).toBe(404); + } + + const instance = await Contact.getOrCreate(doc); + expect(instance._id).toEqual(expectedId); + expect(instance._rev).toBeTruthy(); + compareDataToDoc(doc, instance); + const savedDoc = await db.get(expectedId); + expect(savedDoc._id).toEqual(expectedId); + expect(savedDoc._rev).toBeTruthy(); + compareDataToDoc(doc, savedDoc); + }); + + it('.get() returns a document when it exists.', async () => { + await db.bulkDocs(dataset); + const instance = await Contact.get(dataset[1]._id); + compareDataToDoc(dataset[1], instance); + }); + + it(".get() returns null when it doesn't exist or has been removed.", async () => { + await db.bulkDocs(dataset); + const empty = await Contact.get('does_not_exist'); + expect(empty).toBeNull(); + const doc = await db.get(dataset[1]._id); + doc._deleted = true; + await db.put(doc); + const empty2 = await Contact.get('does_not_exist'); + expect(empty).toBeNull(); + }); + + it('.get() only returns records for its type.', async () => { + await db.bulkDocs(dataset); + const instance = await Contact.get(dataset.filter(d => d.type !== Contact.type)[0]._id); + expect(instance).toBeNull(); + }); + + it('.filter() returns an array of matching instances.', async () => { + await db.bulkDocs(dataset); + const res = await Contact.filter({ last: { $regex: /^Smith.*/ } }); + expect(res.length).toBe(2); + }); + + it('.filter() only returns records for its type.', async () => { + await db.bulkDocs(dataset); + const res = await Contact.filter({}); + expect(res.length).toBe(3); + res.forEach(c => expect(c.type).toBe(Contact.type)); + }); + + it('.isType(instance) identifies instances of the type.', async () => { + const instance = await Contact.getOrCreate({ + first: 'Bob', + last: 'Smith', + email: 'bs@example.com' + }); + + expect(Contact.isType(instance)).toBe(true); + }); + + it('.isType(doc) detects documents of the type.', async () => { + const data = await Contact.getOrCreate({ + first: 'Bob', + last: 'Smith', + email: 'bs@example.com' + }); + const doc = await db.get(data._id); + + expect(Contact.isType(doc)).toBe(true); + }); + + it('.remove() sets the ._deleted property and documents are no longer gettable.', async () => { + const id = dataset[1]._id; + await db.bulkDocs(dataset); + const doc = await Contact.get(id); + await Contact.remove(doc); + expect(doc._deleted).toBe(true); + try { + await db.get(id); + fail('db.get() should throw when passed an id of a removed document.'); + return; + } catch (e) { + expect(e.status).toBe(404); + } + const doc2 = await Contact.get(id); + expect(doc2).toBeNull(); + }); + + it('.save() populates ._id and .type properties and writes to the db.', async () => { + const doc = { + first: 'Bob', + last: 'Smith', + email: 'bs@example.com' + }; + await Contact.save(doc); + const d = await db.get(doc._id); + expect(Contact.isType(d)).toBe(true); + expect(d._id).toBeTruthy(); + expect(d.type).toEqual(Contact.type); + compareDataToDoc(doc, d); + }); + + it('.validate() can interrupt save() by throwing an exception.', async () => { + const doc = { + first: 'Bob', + last: 'Smith', + email: 'bs' + }; + try { + await Contact.save(doc); + } catch (e) { + expect(e.message).toBe('email must be more than 2 characters'); + return; + } + fail('TypeHandler.save() should call validate on save()'); + }); + + it('.update() will deeply apply object properties to a document.', async () => { + const data = { + first: 'Bob', + last: 'Smitherines', + email: 'bsmitherines@example.com', + addresses: { + home: '123 Privet Drive' + } + }; + const doc = await Contact.save(data); + await Contact.update(doc, { addresses: { home: '221B Baker Street' } }); + const doc2 = await Contact.get(doc._id); + expect(doc2.addresses.home).toEqual(doc.addresses.home); + }); + + it('.update() only updates the database when the underlying document changes and save is not precluded.', async () => { + const data = { + first: 'Bob', + last: 'Smitherines', + email: 'bsmitherines@example.com' + }; + const doc = await Contact.save(data); + spyOn(Contact, 'save'); + await Contact.update(doc, { last: 'Shell', email: 'bshell@example.com' }); + expect(Contact.save).toHaveBeenCalledTimes(1); + await Contact.update(doc, { last: 'Shell' }); + expect(Contact.save).toHaveBeenCalledTimes(1); + await Contact.update(doc, { last: 'Tootsie' }, false); + expect(Contact.save).toHaveBeenCalledTimes(1); + + // TODO: Potentially undesirable/non-intuitive behavior here. Update calls save even though the data hasn't truly changed from the last saved state. + await Contact.update(doc, { last: 'Shell' }); + expect(Contact.save).toHaveBeenCalledTimes(2); + }); + + it('.watch() returns a subscribable LiveArray of matching instances.', done => { + let expectedLength = 0; + let checkCount = 0; + let sub; + let livedata; + + function poll(fn, val) { + return new Promise(resolve => { + const wrap = () => { + if (fn()) { + resolve(val); + } else { + setTimeout(wrap, 5); + } + }; + wrap(); + }); + } + + return Contact.watch({ watchTest: { $eq: true } }) + .then(_ld => { + livedata = _ld; + sub = livedata.subscribe(data => { + checkCount += 1; + expect(data.length).toBe(expectedLength); + }); + return db.bulkDocs(dataset); + }) + .then(_ => + poll(() => { + return livedata().length === 0; + }) + ) + .then(_ => { + expectedLength = 1; + return db.put({ + first: 'Bill', + last: 'Smitherts', + email: 'bsmithers@example.com', + _id: 'contact_bsmithers@example.com', + type: 'contact', + watchTest: true + }); + }) + .then(_ => + poll(() => { + return livedata().length === 1; + }) + ) + .then(_ => { + return db.put({ + first: 'Bart', + last: 'Smitty', + email: 'bsmitty@example.com', + type: 'contact', + _id: 'contact_bsmitty@example.com' + }); + }) + .then(res => { + return poll(() => { + return livedata().length === 1; + }, res); + }) + .then(res => { + expectedLength = 2; + db.put({ + first: 'Bart', + last: 'Smitty', + email: 'bsmitty@example.com', + type: 'contact', + _id: 'contact_bsmitty@example.com', + _rev: res.rev, + watchTest: true + }); + return poll(() => { + return livedata().length === 2; + }); + }) + .then(_ => { + expect(livedata().length).toBe(expectedLength); + }) + .then(done); + }); + + it('.index() creates an index for non-id sorting.', async () => { + await db.bulkDocs(dataset); + try { + const res = await Contact.filter( + { + last: { $gte: '' } + }, + { + sort: ['last'], + index: 'lastIndex' + } + ); + expect(res.length).toEqual(3); + expect(res[0].last).toEqual('Doe'); + expect(res[1].last).toEqual('Smith'); + expect(res[2].last).toEqual('Smithers'); + } catch (e) { + fail(e); + } + }); +}); diff --git a/packages/pouchtype/spec/watcher.spec.js b/packages/pouchtype/spec/watcher.spec.js new file mode 100644 index 0000000..fc38e03 --- /dev/null +++ b/packages/pouchtype/spec/watcher.spec.js @@ -0,0 +1,148 @@ +import { Watcher } from '../src/watcher.js'; + +describe('A watcher', () => { + const selector = {}; + const opts = {}; + + it('initially does nothing.', () => { + const db = { + changes: () => fail('Watcher should not call changes until the first subscription.') + }; + + const w = Watcher(db, selector, opts); + expect().nothing(); + }); + + it('calls PouchDB.change API on the first subscription.', () => { + let state = 0; + const db = { + changes: options => { + expect(state).toEqual(1); + expect(options.live).toEqual(true); + expect(options.since).toEqual('now'); + expect(options.selector).toBe(selector); + return db; + }, + on: (eventName, callback) => { + expect(['change', 'error'].indexOf(eventName) !== -1).toBeTruthy(); + return db; + }, + cancel: () => {} + }; + + const w = Watcher(db, selector, opts); + state = 1; + w(() => fail('Subscription callback should not be called until data changes.')); + }); + + it('cancels change subscription when the last subscriber unsubscribes', () => { + let state = 0; + const db = { + changes: options => { + expect(state).toEqual(1); + expect(options.live).toEqual(true); + expect(options.since).toEqual('now'); + expect(options.selector).toBe(selector); + return db; + }, + on: (eventName, callback) => { + expect(['change', 'error'].indexOf(eventName) !== -1).toBeTruthy(); + return db; + }, + cancel: () => { + expect(state).toEqual(2); + state = 3; + } + }; + + const w = Watcher(db, selector, opts); + state = 1; + const unsub = w(() => fail('Subscription callback should not be called until data changes.')); + state = 2; + unsub(); + expect(state).toEqual(3); + }); + + it('passes change events to subscribers.', () => { + let state = 0; + const changes = { + 234: { id: 234, deleted: false, doc: { _id: 234 } }, + 34: { id: 34, deleted: true, doc: { _id: 34 } }, + 4564565: { id: 4564565, deleted: false, doc: { _id: 4564565 } } + }; + let sub = null; + const db = { + changes: options => { + expect(state).toEqual(1); + expect(options.live).toEqual(true); + expect(options.since).toEqual('now'); + expect(options.selector).toBe(selector); + return db; + }, + on: (eventName, callback) => { + if (eventName == 'change') { + sub = callback; + } + expect(['change', 'error'].indexOf(eventName) !== -1).toBeTruthy(); + return db; + }, + cancel: () => (sub = null) + }; + + const w = Watcher(db, selector, opts); + state = 1; + w((id, deleted, doc) => { + expect(changes.hasOwnProperty(id)).toBeTruthy(); + expect(changes[id].doc).toBe(doc); + expect(changes[id].deleted).toEqual(deleted); + }); + + Object.values(changes).forEach(sub); + }); + + it('dumps subscribers when an error event happens.', () => { + let state = 0; + const changes = { + 234: { id: 234, deleted: false, doc: { _id: 234 } }, + 34: { id: 34, deleted: true, doc: { _id: 34 } }, + 4564565: { id: 4564565, deleted: false, doc: { _id: 4564565 } } + }; + let sub = null; + let errorSub = null; + const db = { + changes: options => { + expect(state).toEqual(1); + expect(options.live).toEqual(true); + expect(options.since).toEqual('now'); + expect(options.selector).toBe(selector); + return db; + }, + on: (eventName, callback) => { + if (eventName == 'change') { + sub = callback; + } + if (eventName == 'error') { + errorSub = callback; + } + expect(['change', 'error'].indexOf(eventName) !== -1).toBeTruthy(); + return db; + }, + cancel: () => { + sub = null; + errorSub = null; + } + }; + + const w = Watcher(db, selector, opts); + state = 1; + w(() => fail('Subscription callback should not be called until data changes.')); + + const error = new Error('TestError'); + try { + errorSub(error); + } catch (e) { + expect(e).toBe(error); + } + Object.values(changes).forEach(sub); + }); +}); diff --git a/packages/pouchtype/src/index.js b/packages/pouchtype/src/index.js new file mode 100644 index 0000000..7eae1e1 --- /dev/null +++ b/packages/pouchtype/src/index.js @@ -0,0 +1,2 @@ +import { isObject, deepAssign, pouchDocHash, pouchDocArrayHash } from './utils.js'; +export { TypeHandler } from './type.js'; diff --git a/packages/pouchtype/src/livearray.js b/packages/pouchtype/src/livearray.js new file mode 100644 index 0000000..fe4a5f7 --- /dev/null +++ b/packages/pouchtype/src/livearray.js @@ -0,0 +1,45 @@ +import { prop, computed, id } from '../node_modules/frptools/src/index.js'; + +import { Watcher } from './watcher.js'; +import { pouchDocArrayHash } from './utils.js'; + +// LiveArray is a subscribable property function that always returns the db results that match +// the provided selector and calls subscribers when the results change. +export async function LiveArray(db, selector, opts = {}) { + opts.include_docs = true; + const _watcher = Watcher(db, selector, opts); + let changeSub = null; + + const paginator = opts.paginator; + const data = prop({ docs: [] }); + const docs = computed(r => r.docs, [data], pouchDocArrayHash); + + const cleanup = () => { + docs.unsubscribeAll(); + if (changeSub) { + changeSub(); + changeSub = null; + } + data({ docs: [] }); + }; + + const refresh = async function refresh(...args) { + const queryOpts = { selector }; + if (paginator) { + Object.assign(queryOpts, paginator.queryOptions()); + } + data(await db.find(queryOpts)); + }; + + docs.cleanup = cleanup; + docs.selector = selector; + docs.db = db; + + await refresh(); + changeSub = _watcher(refresh); + if (paginator) { + paginator.queryOptions.subscribe(refresh); + } + + return docs; +} diff --git a/packages/pouchtype/src/type.js b/packages/pouchtype/src/type.js new file mode 100644 index 0000000..472d024 --- /dev/null +++ b/packages/pouchtype/src/type.js @@ -0,0 +1,132 @@ +import { pouchDocHash, deepAssign } from './utils.js'; +import { LiveArray } from './livearray.js'; + +export class TypeHandler { + constructor(db, typeKey) { + this.db = db; + this.type = typeKey; + } + + getUniqueID(doc) { + return undefined; // To be overridden if you want deterministic IDs. + } + + _makeID(doc) { + return `${this.type}_${this.getUniqueID(doc)}`; + } + + isType(doc) { + return doc && doc.type === this.type; + } + + hash(doc) { + return pouchDocHash(doc); + } + + async validate(doc) { + if (!this.isType(doc)) { + throw TypeError(`Document "${doc.type}:${doc._id}" does not match type "${this.type}"`); + } + } + + async remove(docOrId) { + const doc = typeof docOrId === 'string' ? await this.get(docOrId) : docOrId; + return await this.update(doc, { _deleted: true }); + } + + async save(doc) { + if (!doc.type) { + doc.type = this.type; + } + this.validate(doc); + if (!doc._id) { + doc._id = this._makeID(doc); + } + if (!doc.$$links) { + doc.$$links = {}; + } + + const { rev } = await this.db.put(doc); + doc._rev = rev; + return doc; + } + + async addAttachment(doc, attName, dataBlob) { + const { rev } = await this.db.putAttachment(doc._id, attName, doc._rev, dataBlob, dataBlob.type); + + doc._rev = rev; + return doc; + } + + async getAttachment(doc, attName) { + return await this.db.getAttachment(doc._id, attName); + } + + async removeAttachment(doc, attName) { + return await this.db.removeAttachment(doc._id, attName, doc._rev); + } + + async update(doc, props, save = true) { + if (deepAssign(doc, props) && save) { + await this.save(doc); + } + return doc; + } + + _filterOpts(selector, opts) { + selector.type = { $eq: this.type }; + opts.selector = selector; + if (opts.index) { + opts.use_index = `${this.type}_${opts.index}`; + delete opts.index; + } + return opts; + } + + async filter(selector, opts = {}) { + const res = await this.db.find(this._filterOpts(selector, opts)); + return res.docs; + } + + async watch(selector, opts = {}) { + opts.live = true; + this._filterOpts(selector, opts); + return await LiveArray(this.db, opts.selector, opts); + } + + async get(id) { + try { + const doc = await this.db.get(id); + return this.isType(doc) ? doc : null; + } catch (e) { + if (e.status === 404) { + return null; + } + throw e; + } + } + + async getOrCreate(props, defaults = {}) { + const doc = Object.assign({}, defaults, props); + if (!doc._id) { + doc._id = this._makeID(doc); + } + const existing_doc = doc._id && (await this.get(doc._id)); + + if (existing_doc) { + return this.update(existing_doc, props); + } + + return await this.save(doc); + } + + async index(name, fields) { + return this.db.createIndex({ + index: { + ddoc: `${this.type}_${name}`, + fields: fields, + name + } + }); + } +} diff --git a/packages/pouchtype/src/utils.js b/packages/pouchtype/src/utils.js new file mode 100644 index 0000000..a141840 --- /dev/null +++ b/packages/pouchtype/src/utils.js @@ -0,0 +1,30 @@ +export function deepAssign(to, ...rest) { + let updated = false; + for (let src of rest) { + for (let prop in src) { + const value = src[prop]; + const oldValue = to[prop]; + if (typeof value === 'object' && !Array.isArray(value)) { + if (typeof oldValue !== 'object') { + to[prop] = {}; + updated = true; + } + updated = deepAssign(to[prop], value) || updated; + } else if (value === undefined && to[prop] !== undefined) { + delete to[prop]; + } else if (value !== oldValue) { + updated = true; + to[prop] = value; + } + } + } + return updated; +} + +export const pouchDocHash = d => (isObject(d) ? `${d._id}:${d._rev}` : d); +export const pouchDocArrayHash = arr => + Array.isArray(arr) ? arr.map(pouchDocHash).join('?') : arr; + +export function isObject(obj) { + return typeof obj === 'object' && !Array.isArray(obj) && obj !== null; +} diff --git a/packages/pouchtype/src/watcher.js b/packages/pouchtype/src/watcher.js new file mode 100644 index 0000000..52463c4 --- /dev/null +++ b/packages/pouchtype/src/watcher.js @@ -0,0 +1,37 @@ +export function Watcher(db, selector, opts) { + const subscribers = new Set(); + let changes = null; + + return function subscribe(fn) { + subscribers.add(fn); + + if (subscribers.size === 1 && !changes) { + changes = db + .changes( + Object.assign( + { + since: 'now', + live: true, + selector + }, + opts + ) + ) + .on('change', change => { + const { id, deleted, doc } = change; + subscribers.forEach(s => s(id, !!deleted, doc)); + }) + .on('error', err => { + subscribers.clear(); + throw err; + }); + } + return () => { + subscribers.delete(fn); + if (subscribers.size === 0 && changes) { + changes.cancel(); + changes = null; + } + }; + }; +} diff --git a/packages/pouchtype/test.json b/packages/pouchtype/test.json new file mode 100644 index 0000000..2ed9e5a --- /dev/null +++ b/packages/pouchtype/test.json @@ -0,0 +1,9 @@ +{ + "spec_dir": "spec", + "spec_files": ["**/*[sS]pec.js"], + "lib_files": [ + "node_modules/pouchdb/dist/pouchdb.js", + "node_modules/pouchdb/dist/pouchdb.memory.js", + "node_modules/pouchdb/dist/pouchdb.find.js" + ] +}