Progress on PouchORM (this breaks things)

This commit is contained in:
Timothy Farrell 2017-11-13 22:01:55 -06:00
parent d984e96af0
commit b901bc7a26
4 changed files with 261 additions and 104 deletions

View File

@ -0,0 +1,51 @@
import { PouchDB, TYPES as t } from '../services/db.js';
import { log } from '../services/console.js';
import { sha256 } from '../utils/crypto.js';
import { blobToArrayBuffer } from '../utils/conversion.js';
export const FileType = PouchDB.registerType({
name: 'File',
getUniqueID: doc => doc.digest.substr(0, 16),
getSequence: doc =>
new Date(doc.modifiedDate ? doc.modifiedDate : new Date().toISOString()).getTime(),
// schema: {
// name: t.REQUIRED_STRING,
// mimetype: t.REQUIRED_STRING,
// digest: t.REQUIRED_STRING,
// size: t.INTEGER,
// modifiedDate: t.DATE,
// addDate: t.DATE,
// hasData: t.REQUIRED_BOOLEAN,
// tags: {
// type: "object",
// additionalProperties: t.BOOLEAN
// }
// },
methods: {
upload: async function(fileListOrEvent) {
const files = Array.from(
fileListOrEvent instanceof Event ? fileListOrEvent.currentTarget.files : fileListOrEvent
);
return files.map(async f => {
const digest = await sha256(await blobToArrayBuffer(f));
const file = FileType.new({
name: f.name,
mimetype: f.type,
size: f.size,
modifiedDate: new Date(f.lastModified),
addDate: new Date(),
digest,
tags: {},
_attachments: {
data: {
content_type: f.type,
data: f
}
}
});
await file.save();
return file;
});
}
}
});

View File

@ -4,6 +4,7 @@ import { sha256 } from '../utils/crypto.js';
import { blobToArrayBuffer, deepAssign } from '../utils/conversion.js';
import { Event, backgroundTask } from '../utils/event.js';
import { Watcher } from '../utils/watcher.js';
import { FileType } from './file.js';
const db = getDatabase();
const PROCESS_PREFIX = 'importing';
@ -49,26 +50,6 @@ export async function getAttachment(id, attName) {
return await db.getAttachment(id, attName);
}
export async function add(imageFileList) {
const docs = Array.prototype.map.call(imageFileList, f => ({
_id: `${PROCESS_PREFIX}_${f.name}`,
name: f.name,
mimetype: f.type,
size: f.size,
modifiedDate: new Date(f.lastModified).toISOString(),
uploadedDate: new Date().toISOString(),
tags: {},
_attachments: {
image: {
content_type: f.type,
data: f
}
}
}));
const results = await db.bulkDocs(docs);
return docs.filter((d, i) => results[i].ok);
}
export async function remove(ids) {
const docs = await find(Array.isArray(ids) ? ids : [ids]);
const foundDocs = docs.rows.filter(r => !r.error);
@ -92,34 +73,23 @@ export async function addAttachment(doc, key, blob) {
}
// Internal Functions
importWatcher(
backgroundTask(async function _processImportables(changeId, deleted) {
if (deleted) {
return;
}
const selector = changeId ? { _id: changeId } : IMPORT_SELECTOR;
const result = await db.find({
selector,
limit: 1
});
if (!result.docs.length) {
const processImportables = backgroundTask(async function _processImportables(importables) {
if (!importables.length) {
return;
}
const doc = result.docs[0];
const { _id, _rev } = doc;
const imageData = await db.getAttachment(_id, 'image');
const file = importables[0];
const { _id, _rev } = file;
const imageData = await file.getAttachment('data');
const ExifParser = await import('exif-parser');
const buffer = await blobToArrayBuffer(imageData);
const digest = await sha256(buffer);
// Check if this image already exists
// TODO - Create an image.digest index
const digestQuery = await db.find({
selector: { digest },
selector: { digest: file.digest },
fields: ['_id'],
limit: 1
});
@ -130,25 +100,23 @@ importWatcher(
const exifData = ExifParser.create(buffer).parse();
const { tags, imageSize } = exifData;
const originalDate = new Date(
tags.DateTimeOriginal ? new Date(tags.DateTimeOriginal * 1000).toISOString() : doc.modifiedDate
tags.DateTimeOriginal ? new Date(tags.DateTimeOriginal * 1000).toISOString() : file.modifiedDate
);
const id = `${PREFIX}_${originalDate.getTime().toString(36)}_${digest.substr(0, 6)}`;
const id = `${PREFIX}_${originalDate.getTime().toString(36)}_${file.digest.substr(0, 6)}`;
const newDoc = Object.assign(
{},
doc,
{
_id: id,
originalDate: originalDate.toISOString(),
originalDate: originalDate,
orientation: tags.Orientation,
digest,
digest: file.digest,
make: tags.Make,
model: tags.Model,
flash: !!tags.Flash,
ISO: tags.ISO,
attachmentUrls: {
image: generateAttachmentUrl(db.name, id, 'image')
},
fileId: file._id,
url: generateAttachmentUrl('file', file._id, 'data'),
gps: {
latitude: tags.GPSLatitude,
longitude: tags.GPSLongitude,
@ -162,12 +130,21 @@ importWatcher(
try {
await db.put(newDoc);
file.update({ tags: { galleryImage: false } });
imported.fire(id, _id, true);
} catch (e) {
error(`Error processing Image ${id}`, e);
}
}
}, false);
await db.remove({ _id, _rev });
})
);
FileType.find(
{
$and: [{ mimetype: { $in: ['image/jpeg'] } }, { $not: { ['tags.galleryImage']: false } }]
},
true
).then(fw => {
fw.subscribe((...props) => {
processImportables(...props);
});
});

View File

@ -1,6 +1,7 @@
import { defineView } from 'domvm';
import * as image from '../data/image.js';
import * as index from '../data/indexType.js';
import { FileType } from '../data/file.js';
import * as imageTag from '../context/manageImageTags.js';
import { ThumbnailView } from './thumbnail.js';
import { AlbumView } from './album.js';
@ -9,10 +10,6 @@ import { styled, el } from '../services/style.js';
import { LiveArray } from '../utils/livearray.js';
import { Watcher } from '../utils/watcher.js';
function uploadImages(evt) {
image.add(evt.currentTarget.files);
}
export function GalleryView(vm, model) {
const { db } = model;
const NAV_OPTIONS = {
@ -47,7 +44,7 @@ export function GalleryView(vm, model) {
type: 'file',
multiple: true,
accept: 'image/jpeg',
onchange: uploadImages
onchange: FileType.upload
})
]),
...(!data || !data.ready()

View File

@ -4,11 +4,17 @@ import http from 'pouchdb-adapter-http';
import replication from 'pouchdb-replication';
import find from 'pouchdb-find';
const PouchDB = core
import { log } from './console.js';
import { isObject } from '../utils/comparators.js';
import { LiveArray } from '../utils/livearray.js';
import { deepAssign } from '../utils/conversion.js';
export const PouchDB = core
.plugin(idb)
.plugin(http)
.plugin(replication)
.plugin(find);
.plugin(find)
.plugin(PouchORM);
export function generateAttachmentUrl(dbName, docId, attachmentKey) {
return `/_doc_attachments/${dbName}/${docId}/${attachmentKey}`;
@ -35,3 +41,129 @@ export async function getOrCreate(doc) {
throw e;
}
}
export function PouchORM(PouchDB) {
async function update(props, save = true) {
deepAssign(this, props);
if (save) {
await this.save();
} else {
this.validate();
}
return this;
}
PouchDB.registerType = opts => {
const { getUniqueID, getSequence, schema, name } = opts;
const prefix = name.toLowerCase();
const db = opts.db || new PouchDB(prefix);
function populateId(doc) {
if (!doc._id) {
doc._id = `${prefix}_${getSequence(doc).toString(36)}_${getUniqueID(doc)}`;
}
return doc;
}
function validate() {
// FIXME
return this;
}
async function save() {
const { rev } = await db.put(this.validate());
this._rev = rev;
return this;
}
async function addAttachment(attName, dataBlob) {
const { rev } = await db.putAttachment(this._id, attName, this._rev, dataBlob, dataBlob.type);
this._rev = rev;
return this;
}
async function getAttachment(attName) {
return await db.getAttachment(this._id, attName);
}
async function removeAttachment(attName) {
return await db.removeAttachment(this._id, attName, this._rev);
}
function instantiate(docOrResultSet) {
Object.defineProperties(docOrResultSet, {
update: { value: update.bind(docOrResultSet) },
save: { value: save.bind(docOrResultSet) },
delete: { value: _delete.bind(docOrResultSet) },
addAttachment: { value: addAttachment.bind(docOrResultSet) },
getAttachment: { value: getAttachment.bind(docOrResultSet) },
removeAttachment: { value: removeAttachment.bind(docOrResultSet) },
validate: { value: validate.bind(docOrResultSet) }
});
return docOrResultSet;
}
async function find(idOrQuery, live = false, raw = false) {
let results = [];
if (typeof idOrQuery === 'undefined') {
results = await db.find({
selector: {
_id: { $gt: `${prefix}_0`, $lt: `${prefix}_\ufff0` }
}
});
} else if (typeof idOrQuery === 'string') {
results = await db.find({
selector: { _id: idOrQuery }
});
} else if (isObject(idOrQuery)) {
if (live) {
return LiveArray(db, idOrQuery, instantiate);
}
results = await db.find({
selector: idOrQuery
});
}
return raw ? results : instantiate(results);
}
async function _delete() {
try {
const { ok } = await db.remove(this);
this.update({ _id: undefined, _rev: undefined }, false);
return ok;
} catch (e) {
return false;
}
}
function _new(props, save = false) {
const doc = instantiate(populateId(props));
if (save) {
doc.save();
}
return doc;
}
return Object.assign(
{
new: _new,
find
},
opts.methods || {}
);
};
}
export const TYPES = {
STRING: { type: 'string' },
INTEGER: { type: 'integer' },
BOOLEAN: { type: 'boolean' },
DATE: { type: 'date' }
};
// Add required types
Object.keys(TYPES).forEach(k => {
TYPES['REQUIRED_' + k] = Object.assign({ required: true }, TYPES[k]);
});