Add Gallery - core image model working

(add, imported event, thumbnail generation, removal)
This commit is contained in:
Timothy Farrell 2017-03-21 21:12:36 -05:00
commit 386f8e0012
12 changed files with 472 additions and 0 deletions

View File

@ -0,0 +1 @@
# Gallery

View File

@ -0,0 +1,28 @@
{
"name": "Gallery",
"version": "0.0.1",
"description": "Personal photo gallery",
"main": "lib/index.js",
"jsnext:main": "src/index.js",
"keywords": ["javascript"],
"author": "Timothy Farrell <tim@thecookiejar.me> (https://github.com/explorigin)",
"license": "Apache-2.0",
"scripts": {
"start": "webpack --config webpack.config.js",
"dev": "webpack-dev-server"
},
"dependencies": {
"exif-parser": "~0.1.9",
"pica": "~2.0.8",
"pouchdb-adapter-http": "~6.1.2",
"pouchdb-adapter-idb": "~6.1.2",
"pouchdb-adapter-websql": "~6.1.2",
"pouchdb-binary-utils": "~6.1.2",
"pouchdb-core": "~6.1.2",
"pouchdb-replication": "~6.1.2",
"webpack": "~2.3.0"
},
"devDependencies": {
"webpack-dev-server": "~2.4.2"
}
}

View File

@ -0,0 +1,31 @@
import { add, imported, db, remove } from './data/image.js';
import * as thumbnailContext from './context/generateThumbnails.js';
document.querySelector('#fInput').onchange = async evt => {
add(evt.currentTarget.files);
};
window.__DEV__ = true;
window.db = db;
window.remove = remove;
imported.subscribe(refresh);
// To test the output:
function refresh() {
setTimeout(() => history.go(0), 100);
}
db.allDocs({ include_docs: true, attachments: true }).then(results => {
results.rows.forEach(r => {
for (let aName in r.doc._attachments) {
const a = r.doc._attachments[aName];
const e = document.createElement('img');
document.body.appendChild(e);
e.title = `${r.doc._id} ${aName}`;
e.src = `data:${a.content_type};base64,${a.data}`;
e.dataset.id = r.doc._id;
e.onclick = evt => remove(evt.currentTarget.dataset.id).then(refresh);
}
});
});

View File

@ -0,0 +1,72 @@
import pica from 'pica/dist/pica';
import { generateAttachmentUrl } from '../services/db.js';
import { imported, find, update, addAttachment, DB_NAME } from '../data/image.js';
export function maxLinearSize(width, height, max) {
const ratio = width / height;
if (width > height) {
return {
width: max,
height: max / ratio
};
}
return {
width: max * ratio,
height: max
};
}
async function getLoadedImage(src) {
return new Promise(resolve => {
const i = new Image('image');
i.onload = () => resolve(i);
i.src = src;
});
}
async function resizeImage(imageBlob, mimetype, width, height) {
const url = URL.createObjectURL(imageBlob);
const $img = await getLoadedImage(url);
const $destinationCanvas = document.createElement('canvas');
$destinationCanvas.width = width;
$destinationCanvas.height = height;
const afterResize = (resolve, reject) => err => {
if (err) {
return reject(err);
}
$destinationCanvas.toBlob(resolve, mimetype);
};
return new Promise((resolve, reject) => {
pica.resizeCanvas($img, $destinationCanvas, {}, afterResize(resolve, reject));
});
}
export async function generateThumbnailForImage(id) {
const results = await find([id], { attachments: true, binary: true });
const doc = results.rows[0].doc;
if (doc.attachmentUrls.thumbnail && doc._attachments.thumbnail) {
return;
}
const attachment = doc._attachments.image;
const mimetype = attachment.content_type;
const { width, height } = maxLinearSize(doc.width, doc.height, 320);
const resizedBlob = await resizeImage(attachment.data, mimetype, width, height);
const url = generateAttachmentUrl(DB_NAME, id, 'thumbnail');
await addAttachment(doc, 'thumbnail', resizedBlob);
await update(doc._id, {
attachmentUrls: {
thumbnail: url
}
});
return resizedBlob;
}
imported.subscribe(generateThumbnailForImage);

View File

@ -0,0 +1,155 @@
import ExifParser from 'exif-parser';
import { PouchDB, generateAttachmentUrl } from '../services/db.js';
import { log, error } from '../services/console.js';
import { sha256 } from '../utils/crypto.js';
import { blobToArrayBuffer, deepAssign } from '../utils/conversion.js';
import { Event, backgroundTask } from '../utils/event.js';
export const DB_NAME = 'gallery-images';
export const db = new PouchDB(DB_NAME); // FIXME - don't export
const subscribers = [];
const IMPORT_PREFIX = 'importing';
// Events
export const imported = new Event('Image.imported');
// Methods
export async function find(keys, options = {}) {
return await db.allDocs(Object.assign({ include_docs: true }, options, { keys }));
}
export async function add(imageFileList) {
const docs = Array.prototype.map.call(imageFileList, f => ({
_id: `${IMPORT_PREFIX}_${f.name}`,
name: f.name,
mimetype: f.type,
size: f.size,
modifiedDate: new Date(f.lastModified).toISOString(),
uploadedDate: new Date().toISOString(),
_attachments: {
image: {
content_type: f.type,
data: f
}
}
}));
const results = await db.bulkDocs(docs);
processImportables();
return docs.filter((d, i) => results[i].ok);
}
export async function remove(ids, rev) {
if (!Array.isArray(ids)) {
try {
const doc = rev ? { _id: ids, _rev: rev } : await db.get(ids);
await db.remove(doc);
return true;
} catch (e) {
if (e.status !== 404) {
error(`Error removing Image import placeholder ${_id}`, e);
}
return false;
}
}
const docs = await find(ids);
const result = await db.bulkDocs(docs.rows.map(r => Object.assign(r.doc, { _deleted: true })));
return result.map(r => r.ok);
}
export async function update(id, properties) {
const results = await find([id]);
const doc = results.rows[0].doc;
deepAssign(doc, properties);
await db.put(doc);
return doc;
}
export async function addAttachment(doc, key, blob) {
return db.putAttachment(doc._id, key, doc._rev, blob, blob.type);
}
// Internal Functions
const processImportables = backgroundTask(async function _processImportables() {
const result = await db.allDocs({
startkey: `${IMPORT_PREFIX}_0`,
endkey: `${IMPORT_PREFIX}_z`,
include_docs: true,
attachments: true,
binary: true,
limit: 1
});
if (!result.rows.length) {
return;
}
const doc = result.rows[0].doc;
const buffer = await blobToArrayBuffer(doc._attachments.image.data);
const digest = await sha256(buffer);
const exifData = ExifParser.create(buffer).parse();
const { tags, imageSize } = exifData;
const originalDate = new Date(
tags.DateTimeOriginal ? new Date(tags.DateTimeOriginal * 1000).toISOString() : doc.modifiedDate
);
const { _id, _rev } = doc;
const id = `image_${originalDate.getTime().toString(36)}_${digest.substr(0, 6)}`;
let continueProcessing = true;
try {
const existingRecord = await find([id]);
if (existingRecord.rows[0].doc.digest === digest) {
continueProcessing = false;
}
} catch (e) {
// Basically this means there are no existing records
}
if (continueProcessing) {
const newDoc = Object.assign(
{},
doc,
{
_id: id,
originalDate: originalDate.toISOString(),
orientation: tags.Orientation,
digest,
make: tags.Make,
model: tags.Model,
flash: !!tags.Flash,
ISO: tags.ISO,
attachmentUrls: {
image: generateAttachmentUrl(DB_NAME, id, 'image')
},
gps: {
latitude: tags.GPSLatitude,
longitude: tags.GPSLongitude,
altitude: tags.GPSAltitude,
heading: tags.GPSImgDirection
}
},
imageSize // width & height
);
delete newDoc._rev; // assigned from doc but not desired.
try {
await db.put(newDoc);
imported.fire(id, _id, true);
} catch (e) {
error(`Error processing Image ${id}`, e);
}
} else {
imported.fire(id, _id, false);
}
remove(_id, _rev);
processImportables();
});
// Check if we have any unimported images.
processImportables();

View File

@ -0,0 +1,8 @@
<body>
<div>
Images
<input id='fInput' type="file" multiple accept="image/jpeg"/>
</div>
</body>
<script src="/assets/app.bundle.js"></script>

View File

@ -0,0 +1,23 @@
export function log(...args) {
if (__DEV__) {
console.log(...args);
}
}
export function error(...args) {
if (__DEV__) {
console.error(...args);
}
}
export function group(...args) {
if (__DEV__) {
console.group(...args);
}
}
export function groupEnd(...args) {
if (__DEV__) {
console.groupEnd(...args);
}
}

View File

@ -0,0 +1,9 @@
export const PouchDB = require('pouchdb-core')
.plugin(require('pouchdb-adapter-websql'))
.plugin(require('pouchdb-adapter-idb'))
.plugin(require('pouchdb-adapter-http'))
.plugin(require('pouchdb-replication'));
export function generateAttachmentUrl(dbName, docId, attachmentKey) {
return `/_doc_attachments/${dbName}/${docId}/${attachmentKey}`;
}

View File

@ -0,0 +1,35 @@
import { readAsArrayBuffer } from 'pouchdb-binary-utils';
export function bufferToHexString(buffer) {
const hexCodes = [];
const view = new DataView(buffer);
for (let i = 0; i < view.byteLength; i += 4) {
// Using getUint32 reduces the number of iterations needed (we process 4 bytes each time)
const value = view.getUint32(i);
// toString(16) will give the hex representation of the number without padding
// We use concatenation and slice for padding
hexCodes.push(`00000000${value.toString(16)}`.slice(-8));
}
// Join all the hex strings into one
return hexCodes.join('');
}
export function blobToArrayBuffer(blob) {
return new Promise(resolve => readAsArrayBuffer(blob, resolve));
}
export function deepAssign(to, ...rest) {
for (let src of rest) {
for (let prop in src) {
const value = src[prop];
if (typeof value === 'object' && !Array.isArray(value)) {
to[prop] = deepAssign(to[prop] || {}, value);
} else {
to[prop] = value;
}
}
}
return to;
}

View File

@ -0,0 +1,5 @@
import { bufferToHexString } from './conversion.js';
export async function sha256(buffer) {
return bufferToHexString(await crypto.subtle.digest('sha-256', buffer));
}

View File

@ -0,0 +1,87 @@
import { log, error, group, groupEnd } from '../services/console.js';
export class Event {
constructor(name) {
this.name = name;
this.stages = [];
}
async fire(...args) {
const groupName = `Feeding pipeline "${this.name}"`;
group(groupName);
log('params:', ...args);
let i = this.stages.length;
const _next = async res => {
if (!i) {
groupEnd(groupName);
return res;
}
i -= 1;
const stage = this.stages[i];
try {
const result = stage(...args);
if (result && result.then) {
return result.then(_next);
}
return Promise.resolve(result).then(_next);
} catch (e) {
const stageName = stage.name || '<anonymous function>';
error(`${stageName} threw error:`, e);
}
};
return await _next();
}
subscribe(callback, position = 0) {
this.stages.splice(position, 0, callback);
}
unsubscribe(callback) {
this.stages.splice(this.stages.indexOf(callback), 1);
}
}
// requestIdleCallback sortof-polyfill
if (!global.requestIdleCallback) {
const IDLE_TIMEOUT = 10;
global.requestIdleCallback = cb => {
let start = Date.now();
return setTimeout(
() =>
cb({
timeRemaining: () => Math.max(0, IDLE_TIMEOUT - (Date.now() - start))
}),
1
);
};
}
export function backgroundTask(fn) {
let id = null;
let reRunCount = 0;
function runTask({ didTimeout, timeRemaining }) {
if (didTimeout) {
id = requestIdleCallback(runTask);
return;
}
const start = Date.now();
fn();
if (reRunCount && Date.now() - start < timeRemaining()) {
reRunCount -= 1;
id = requestIdleCallback(runTask);
} else {
id = null;
}
}
return () => {
if (id !== null) {
reRunCount += 1;
return;
}
id = requestIdleCallback(runTask);
return;
};
}

View File

@ -0,0 +1,18 @@
const path = require('path');
const webpack = require('webpack');
module.exports = {
context: path.resolve(__dirname, './src'),
entry: {
app: './app.js'
},
output: {
path: path.resolve(__dirname, './dist'),
filename: '[name].bundle.js',
publicPath: '/assets'
},
devServer: {
contentBase: path.resolve(__dirname, './src')
},
devtool: 'source-map'
};