First pass at B2 backend support

This commit is contained in:
Timothy Farrell 2018-06-13 04:31:36 -05:00
parent cee275d9b6
commit e67c9b0d98
7 changed files with 264 additions and 7 deletions

View File

@ -14,14 +14,12 @@
"date-fns": "~1.29.0", "date-fns": "~1.29.0",
"domvm": "~3.2.1", "domvm": "~3.2.1",
"exif-parser": "~0.1.9", "exif-parser": "~0.1.9",
"frptools": "3.1.1",
"express": "~4.16.3", "express": "~4.16.3",
"frptools": "3.2.0",
"linear-partitioning": "0.3.2", "linear-partitioning": "0.3.2",
"pica": "~2.0.8", "pica": "~2.0.8",
"pouchdb-adapter-http": "~6.4.1", "pouchdb-adapter-http": "~6.4.1",
"pouchdb-adapter-idb": "~6.4.1", "pouchdb-adapter-idb": "~6.4.1",
"pouchdb-adapter-websql": "~6.4.1",
"pouchdb-binary-utils": "~6.4.1",
"pouchdb-core": "~6.4.1", "pouchdb-core": "~6.4.1",
"pouchdb-find": "~6.4.1", "pouchdb-find": "~6.4.1",
"pouchdb-replication": "~6.4.1", "pouchdb-replication": "~6.4.1",

View File

@ -1,7 +1,7 @@
import { TypeSpec } from 'pouchorm'; import { TypeSpec } from 'pouchorm';
import { PouchDB } from '../services/db.js'; import { PouchDB } from '../services/db.js';
import { sha256 } from '../utils/crypto.js'; import { sha1 } from '../utils/crypto.js';
import { blobToArrayBuffer } from '../utils/conversion.js'; import { blobToArrayBuffer } from '../utils/conversion.js';
class FileSpec extends TypeSpec { class FileSpec extends TypeSpec {
@ -35,7 +35,7 @@ class FileSpec extends TypeSpec {
} }
static async upload(blob) { static async upload(blob) {
const digest = await sha256(await blobToArrayBuffer(blob)); const digest = await sha1(await blobToArrayBuffer(blob));
const lastModified = blob.lastModified ? new Date(blob.lastModified) : new Date(); const lastModified = blob.lastModified ? new Date(blob.lastModified) : new Date();
return await FileType.getOrCreate({ return await FileType.getOrCreate({
name: blob.name, name: blob.name,

View File

@ -0,0 +1,133 @@
import core from 'pouchdb-core';
import { PouchDBAttachmentProxy } from '../utils/attachmentProxy.js';
import { deepAssign, blobToString } from '../utils/conversion.js';
import { prop, computed, stream } from 'frptools';
const STORAGE_MIMETYPE = 'application/b2storagemap';
export const B2Adapter = function(b2apikey, b2secret, b2bucket) {
const authDate = prop(null);
const authValid = computed(d => !!d && Date.now() - d > 0, [authDate]);
const session = stream(
valid => {
console.log(`Requesting b2 session: ${!valid}`);
if (valid) {
return Promise.resolve(null);
}
return fetch('/api/v1/authorize_account', {
headers: new Headers({
Authorization: 'Basic ' + btoa(`${b2apikey}:${b2secret}`)
})
})
.then(res => res.json())
.then(data => {
const expires = new Date();
expires.setDate(expires.getDate() + 1);
authDate(expires);
console.log(`Session data: `, typeof data, data);
return data;
})
.catch(e => {
authDate(null);
return null;
});
},
[authValid],
s => s && s.authorizationToken
);
const uploadAuthorization = stream(
async function(s) {
const res = await fetch('/api/v1/get_upload_url', {
headers: await headers(),
body: `{"bucketId": "${b2bucket}"}`,
cache: 'no-cache',
method: 'post'
});
return await res.json();
},
[session],
u => u && u.authorizationToken
);
async function headers(otherHeaders) {
const s = await session();
if (!s) {
return new Headers();
}
const h = Object.assign(
{
Authorization: s.authorizationToken,
apiUrl: s.apiUrl
},
otherHeaders || {}
);
return new Headers(h);
}
async function downloadUrl(fileId) {
const s = await session();
return s.downloadUrl + '/b2api/v1/b2_download_file_by_id?fileId=' + fileId;
}
async function readStorageMap(blob) {
return JSON.parse(await blobToString(blob));
}
return PouchDBAttachmentProxy({
getFn: async function getAttachment(docId, attName, att) {
if (att.type !== STORAGE_MIMETYPE) {
return att;
}
const storagemap = await readStorageMap(att);
const res = await fetch(await downloadUrl(storagemap.fileId), {
headers: await headers()
});
return res.blob();
},
remove: async function removeAttachment(docId, attName, rev, att) {
const s = await session();
const storagemap = await readStorageMap(att);
return fetch('/api/v1/remove_file', {
headers: await headers(),
method: 'POST',
body: JSON.stringify({
fileName: storagemap.fileName,
fileId: storagemap.fileId
})
});
},
save: async function saveAttachment(doc, attName, obj) {
try {
const uploadAuth = await uploadAuthorization();
const res = await fetch(uploadAuth.uploadUrl, {
method: 'POST',
headers: await headers({
Authorization: uploadAuth.authorizationToken,
'X-Bz-File-Name': encodeURIComponent(obj.data.name),
'Content-Type': obj.data.type,
'Content-Length': obj.data.size,
'X-Bz-Content-Sha1': doc.digest
}),
body: obj.data
});
const resData = await res.json();
deepAssign(doc, {
_attachments: {
[attName]: {
content_type: STORAGE_MIMETYPE,
data: btoa(JSON.stringify(resData))
}
}
});
} catch (e) {
console.log('Error:', e);
return { ok: false, error: e };
}
return { ok: true };
}
});
};

View File

@ -5,10 +5,18 @@ import replication from 'pouchdb-replication';
import find from 'pouchdb-find'; import find from 'pouchdb-find';
import { PouchORM } from 'pouchorm'; import { PouchORM } from 'pouchorm';
import { B2Adapter } from './b2.js';
export const PouchDB = core export const PouchDB = core
.plugin(idb) .plugin(idb)
.plugin(http) .plugin(http)
.plugin(replication) .plugin(replication)
.plugin(find) .plugin(find)
// .plugin(
// B2Adapter(
// B2_ACCOUNT,
// B2_API_KEY,
// B2_BUCKET_ID
// )
// )
.plugin(PouchORM); .plugin(PouchORM);

View File

@ -0,0 +1,103 @@
import core from 'pouchdb-core';
import { backgroundTask } from '../utils/event.js';
const pouchBulkDocs = core.prototype.bulkDocs;
const pouchGetAttachment = core.prototype.getAttachment;
const pouchRemoveAttachment = core.prototype.removeAttachment;
export function PouchDBAttachmentProxy({ save, getFn, remove }) {
const override = {};
if (getFn) {
override.getAttachment = async function getAttachment(...args) {
const att = await pouchGetAttachment.apply(this, args);
return await getFn.apply(this, args.concat(att));
};
}
if (remove) {
override.removeAttachment = async function removeAttachment(...args) {
const att = await pouchGetAttachment.apply(this, args);
try {
await remove.apply(this, args.concat(att));
} catch (e) {
console.log('Error:', e);
return;
}
return await pouchRemoveAttachment.apply(this, args);
};
}
if (save || remove) {
override.bulkDocs = function bulkDocs(...args) {
console.log('fad', ...args);
let docs;
if (Array.isArray(args[0])) {
docs = args[0];
} else {
docs = args[0].docs;
}
// All documents must have a .name field.
const deletedFiles = new Set();
const attachments = [];
docs.filter(d => d.$$type === 'file').forEach(f => {
if (f._deleted) {
const deleteArgs = [f._id, 'data', f._rev];
deletedFiles.add(deleteArgs.concat(pouchGetAttachment.call(this, f._id, 'data')));
return;
}
if (f._attachments && f._attachments.data.data instanceof Blob) {
console.log(`Saving File ${f._id} attachment`);
attachments.push([f, 'data', f._attachments.data]);
delete f._attachments.data;
}
});
deletedFiles.forEach(cleanupFiles);
return Promise.all(
attachments.map(([doc, attName, obj]) => save.call(this, doc, attName, obj))
).then(() => {
return pouchBulkDocs.call(this, ...args);
});
};
}
const cleanupFiles = backgroundTask(
([id, attName, rev, attPromise]) => attPromise.then(att => remove(id, attName, rev, att)),
false
);
return override;
}
// export const LocalStorageExampleAdapter = function() {
// return PouchDBAttachmentProxy({
// get: async function getAttachment(docId, attName) {
// const data = localStorage[`${docId}-${attName}`].split(';base64,');
// var byteCharacters = atob(data[1]);
// var byteNumbers = new Array(byteCharacters.length);
// for (var i = 0; i < byteCharacters.length; i++) {
// byteNumbers[i] = byteCharacters.charCodeAt(i);
// }
// var byteArray = new Uint8Array(byteNumbers);
// return Promise.resolve(new Blob([byteArray], {type: data[0].substr(5)}));
// },
// remove: async function removeAttachment(docId, attName, rev) {
// delete localStorage[`${docId}-${attName}`];
// return Promise.resolve({"ok": true});
// },
// save: async function saveAttachment(docId, attName, obj) {
// return new Promise((resolve) => {
// var reader = new FileReader();
// reader.onloadend = function() {
// localStorage[`${docId}-${attName}`] = reader.result;
// resolve({"ok": true});
// }
// reader.readAsDataURL(obj.data);
// });
// }
// });
// };

View File

@ -1,4 +1,3 @@
import { readAsArrayBuffer } from 'pouchdb-binary-utils';
import { pick } from 'frptools'; import { pick } from 'frptools';
import { isObject } from './comparators'; import { isObject } from './comparators';
@ -20,7 +19,19 @@ export function bufferToHexString(buffer) {
} }
export function blobToArrayBuffer(blob) { export function blobToArrayBuffer(blob) {
return new Promise(resolve => readAsArrayBuffer(blob, resolve)); return new Promise((resolve, reject) => {
const f = new FileReader();
f.onload = _ => resolve(f.result);
f.readAsArrayBuffer(blob);
});
}
export function blobToString(blob) {
return new Promise((resolve, reject) => {
const f = new FileReader();
f.onload = _ => resolve(f.result);
f.readAsText(blob);
});
} }
export const arrayHashWrapper = hash => arr => (Array.isArray(arr) ? arr.map(hash).join('?') : arr); export const arrayHashWrapper = hash => arr => (Array.isArray(arr) ? arr.map(hash).join('?') : arr);

View File

@ -3,3 +3,7 @@ import { bufferToHexString } from './conversion.js';
export async function sha256(buffer) { export async function sha256(buffer) {
return bufferToHexString(await crypto.subtle.digest('sha-256', buffer)); return bufferToHexString(await crypto.subtle.digest('sha-256', buffer));
} }
export async function sha1(buffer) {
return bufferToHexString(await crypto.subtle.digest('sha-1', buffer));
}