diff --git a/package-lock.json b/package-lock.json index 68ff14c2..d670a928 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,6 +12,7 @@ "packages/create-field-module", "packages/field-components", "packages/field-kit", + "packages/field-module-observations", "packages/field-module-tasks", "packages/field-scripts" ], @@ -2319,7 +2320,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=12" } @@ -9001,7 +9001,6 @@ "os": [ "android" ], - "peer": true, "engines": { "node": ">=12" } @@ -9017,7 +9016,6 @@ "os": [ "android" ], - "peer": true, "engines": { "node": ">=12" } @@ -9033,7 +9031,6 @@ "os": [ "darwin" ], - "peer": true, "engines": { "node": ">=12" } @@ -9049,7 +9046,6 @@ "os": [ "darwin" ], - "peer": true, "engines": { "node": ">=12" } @@ -9065,7 +9061,6 @@ "os": [ "freebsd" ], - "peer": true, "engines": { "node": ">=12" } @@ -9081,7 +9076,6 @@ "os": [ "freebsd" ], - "peer": true, "engines": { "node": ">=12" } @@ -9097,7 +9091,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=12" } @@ -9113,7 +9106,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=12" } @@ -9129,7 +9121,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=12" } @@ -9145,7 +9136,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=12" } @@ -9161,7 +9151,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=12" } @@ -9177,7 +9166,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=12" } @@ -9193,7 +9181,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=12" } @@ -9209,7 +9196,6 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=12" } @@ -9225,7 +9211,6 @@ "os": [ "netbsd" ], - "peer": true, "engines": { "node": ">=12" } @@ -9241,7 +9226,6 @@ "os": [ "openbsd" ], - "peer": true, "engines": { "node": ">=12" } @@ -9275,7 +9259,6 @@ "os": [ "sunos" ], - "peer": true, "engines": { "node": ">=12" } @@ -9291,7 +9274,6 @@ "os": [ "win32" ], - "peer": true, "engines": { "node": ">=12" } @@ -9307,7 +9289,6 @@ "os": [ "win32" ], - "peer": true, "engines": { "node": ">=12" } @@ -9323,7 +9304,6 @@ "os": [ "win32" ], - "peer": true, "engines": { "node": ">=12" } @@ -10136,6 +10116,10 @@ "integrity": "sha512-3yurQZ2hD9VISAhJJP9bpYFNQrHHBXE2JxxjY5aLEcDi46RmAzJE2OC9FAde0yis5ElW0jTTzs0zfg/Cca4XqQ==", "dev": true }, + "node_modules/field-module-observations": { + "resolved": "packages/field-module-observations", + "link": true + }, "node_modules/field-module-tasks": { "resolved": "packages/field-module-tasks", "link": true @@ -18189,6 +18173,13 @@ "node": ">= 16.13.2" } }, + "packages/field-module-observations": { + "version": "2.0.0-alpha.1", + "license": "GPL-3.0-or-later", + "devDependencies": { + "@farmos.org/field-kit": "2.0.0-alpha.8" + } + }, "packages/field-module-tasks": { "version": "2.0.0-beta.2", "license": "GPL-3.0-or-later", diff --git a/package.json b/package.json index e1939214..92e70778 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,9 @@ "build": "npm run build --workspace=@farmos.org/field-kit", "clean": "npx -y rimraf node_modules **/node_modules/ package-lock.json && npm i", "dev:field-kit": "npm run start --workspace=@farmos.org/field-kit", - "dev:field-module-tasks": "npm run start --workspace=field-module-tasks", + "dev:field-module-tasks": "npm run dev:fm-tasks", + "dev:fm-obs": "npm run start --workspace=field-module-observations", + "dev:fm-tasks": "npm run start --workspace=field-module-tasks", "start": "npm run dev:field-kit", "storybook": "npm run --workspace=@farmos.org/field-components storybook", "test": "echo \"Error: no test specified\" && exit 1" @@ -18,6 +20,7 @@ "packages/create-field-module", "packages/field-components", "packages/field-kit", + "packages/field-module-observations", "packages/field-module-tasks", "packages/field-scripts" ], diff --git a/packages/field-kit/src/entities/index.js b/packages/field-kit/src/entities/index.js index 4397cd60..01783236 100644 --- a/packages/field-kit/src/entities/index.js +++ b/packages/field-kit/src/entities/index.js @@ -1,9 +1,10 @@ import { isProxy, isRef, reactive, readonly, ref, shallowReactive, unref, watch, watchEffect, } from 'vue'; +import { useObjectUrl } from '@vueuse/core'; import { - assoc, clone, complement, compose, curryN, equals, is, - map, mapObjIndexed, pick, propEq, when, + assoc, clone, complement, compose, curryN, equals, filter as rFilter, + is, map, mapObjIndexed, pick, propEq, when, } from 'ramda'; import { validate, v4 as uuidv4 } from 'uuid'; import { splitFilterByType } from 'farmos'; @@ -15,11 +16,13 @@ import { syncEntities } from '../http/sync'; import SyncScheduler from '../http/SyncScheduler'; import { getRecords } from '../idb'; import { cacheEntity } from '../idb/cache'; -import asArray from '../utils/asArray'; +import { cacheFileData, fmtFileData, loadFileEntity } from '../idb/files'; +import { isArrayLike } from '../utils/asArray'; import diff from '../utils/diff'; import parseFilter from '../utils/parseFilter'; import { PromiseQueue } from '../utils/promises'; import flattenEntity from '../utils/flattenEntity'; +import upsert from '../utils/upsert'; import { alert } from '../warnings/alert'; import nomenclature from './nomenclature'; import { @@ -72,7 +75,7 @@ const replay = (previous, transactions) => { const syncHandler = revision => interceptor((evaluation) => { const { - entity, type, id, state, + entity, type, id, state, files, } = revision; const { loginRequired, connectivity, repeatable, warnings, data: [value] = [], @@ -82,7 +85,7 @@ const syncHandler = revision => interceptor((evaluation) => { alert(warnings); } if (repeatable.length > 0) { - const subscribe = scheduler.push(entity, type, id); + const subscribe = scheduler.push(entity, type, id, { files }); subscribe((data) => { emit(state, data); }); @@ -180,7 +183,7 @@ export default function useEntities(options = {}) { const route = identifyRoute(); const [backupURI, transactions] = restoreTransactions(entity, type, _id, route); const revision = { - entity, type, id: _id, state, transactions, queue, backupURI, + entity, type, id: _id, state, transactions, queue, backupURI, files: {}, }; revisions.set(reference, revision); return [reference, revision]; @@ -280,9 +283,9 @@ export default function useEntities(options = {}) { const { queue, state, transactions } = revision; queue.push(() => { updateStatus(STATUS_IN_PROGRESS); - return getRecords('entities', _entity, id).then((data) => { - if (data) emit(state, data); - const syncOptions = { cache: asArray(data), filter: { id, type } }; + return getRecords('entities', _entity, id).then((cache) => { + if (cache) emit(state, cache); + const syncOptions = { cache, filter: { id, type } }; return syncEntities(shortName, syncOptions) .then(syncHandler(revision)) .then(({ data: [value] = [] } = {}) => { @@ -417,6 +420,87 @@ export default function useEntities(options = {}) { if (watcher && typeof watcher.unwatch === 'function') watcher.unwatch(); } + function useFile(fileData) { + const file = clone(fileData); + if (file.data instanceof Blob) file.url = useObjectUrl(file.data); + return reactive(file); + } + + function restoreFiles(reference, field) { + const revision = revisions.get(reference); + if (!is(Object, revision.files)) revision.files = {}; + if (!Array.isArray(revision.files[field]) || !isProxy(revision.files[field])) { + revision.files[field] = reactive([]); + } + + // A safe helper for getting file entity identifiers from the host entity. + const getResources = state => state?.relationships?.[field] || []; + revision.queue.push(refState => Promise.all(getResources(refState).map(async (resource) => { + const init = await loadFileEntity(resource); + let fileData = init === null ? fmtFileData(null, null, resource) : init; + let file = useFile(fileData); + upsert(revision.files[field], 'id', file); + revision.files[field].push(file); + if (init === null) fileData = await cacheFileData(null, null, resource); + if (validate(fileData.file_entity?.id)) return fileData; + // Normally the fileData's id and type can't be assumed to be the same as + // the file_entity, but if there's no file_entity, then we can be sure the + // resource identifier was provided as the fileData's id and type above. + const { id, type } = fileData; + const { data: [file_entity] } = await farm.file.fetch({ filter: { id, type } }); + if (!file_entity?.attributes?.uri?.url) { + return cacheFileData(null, file_entity, { id, type }); + } + const { attributes: { uri: { url }, filemime } } = file_entity; + const headers = { + Accept: filemime || 'application/octet-stream', + }; + const { data } = await farm.remote.request({ + headers, responseType: 'blob', url, + }); + const updated = fmtFileData(data, file_entity, { id, type }); + file = useFile(updated); + upsert(revision.files[field], 'id', file); + return cacheFileData(data, file_entity, { id, type }); + })).then(() => refState).catch(() => refState)); + + return readonly(revision.files[field]); + } + + function attachFile(reference, field, file) { + const revision = revisions.get(reference); + if (!revision?.files?.[field]) restoreFiles(reference, field); + const recur = f => attachFile(reference, field, f); + if (isArrayLike(file)) return Array.from(file).map(recur); + const fileData = fmtFileData(file); + const fileState = useFile(fileData); + // Use the queue to push the file state onto the actual revision files, + // in case files are still being loaded or synced by restoreFiles. In the + // meantime, a readonly copy of the state can be returned synchronously. + revision.queue.push((previous) => { + revision.files[field].push(fileState); + return previous; + }); + return readonly(fileState); + } + + function removeFile(reference, field, file) { + const revision = revisions.get(reference); + if (!is(Object, revision.files)) return -1; + if (!Array.isArray(revision.files[field])) return -1; + const findAndSplice = (predicate) => { + const i = revision.files[field].findIndex(predicate); + if (i < 0) return i; + revision.files[field].splice(i, 1); + return i; + }; + if (typeof file === 'string') return findAndSplice(f => file === f.url); + if (file instanceof Blob) return findAndSplice(f => Object.is(file, f.data)); + if (Array.isArray(file)) return file.map(removeFile); + if (isProxy(file)) return removeFile(reference, field, file.url); + return -1; + } + // An ASYNCHRONOUS operation, which replays all transactions based on the // current state of the entity at the time of calling, then writes those // changes to the local database and sends them on to remote systems. @@ -440,6 +524,19 @@ export default function useEntities(options = {}) { emit(state, fields); const next = farm[shortName].update(previous, fields); return cacheEntity(entity, next) + .then(() => { + const cacheByField = (field) => { + const references = [{ id, type, fields: [field] }]; + return (file, i) => cacheFileData(file.data, null, { references }) + .then(({ id: fileId, type: fileType }) => { + revision.files[field][i].id = fileId; + revision.files[field][i].type = fileType; + }); + }; + const fileCachingRequests = Object.entries(revision.files || {}) + .flatMap(([field, files]) => files.map(cacheByField(field))); + return Promise.allSettled(fileCachingRequests); + }) .then(() => { clearBackup(backupURI); if (!is(Map, dependencies)) return Promise.resolve({}); @@ -482,16 +579,31 @@ export default function useEntities(options = {}) { emit(state, finalFields); return farm[shortName].update(next, finalFields); }) - .then((final) => { - const syncOptions = { cache: asArray(final), filter: { id, type } }; - return syncEntities(shortName, syncOptions); - }) - .then(syncHandler(revision)) - .then(({ data: [value] = [] } = {}) => value); + .then((cache) => { + const syncOptions = { cache, filter: { id, type } }; + // File data should only be synced once. + const takeUnsyncedFiles = rFilter(fileData => fileData.file_entity); + const files = map(takeUnsyncedFiles, revision.files); + const { length: fileCount } = Object.values(files || {}).flat(); + if (fileCount > 0) syncOptions.files = { [id]: files }; + return syncEntities(shortName, syncOptions) + .then(syncHandler(revision)) + .then(({ data: [final] = [] } = {}) => final || cache); + }); }); } return { - add, append, checkout, commit, drop, link, revise, unlink, + add, + append, + attachFile, + checkout, + commit, + drop, + link, + removeFile, + restoreFiles, + revise, + unlink, }; } diff --git a/packages/field-kit/src/http/SyncScheduler.js b/packages/field-kit/src/http/SyncScheduler.js index 93fb9048..8d0b49e7 100644 --- a/packages/field-kit/src/http/SyncScheduler.js +++ b/packages/field-kit/src/http/SyncScheduler.js @@ -1,5 +1,7 @@ import { validate } from 'uuid'; -import { is } from 'ramda'; +import { + compose, concat, is, map, mergeDeepWith, pick, uniq, +} from 'ramda'; import { syncEntities } from './sync'; import interceptor from './interceptor'; import { getRecords } from '../idb'; @@ -10,6 +12,7 @@ import { STATUS_IN_PROGRESS, updateStatus } from './connection'; import { alert } from '../warnings/alert'; import { asFlatArray } from '../utils/asArray'; import parseFilter from '../utils/parseFilter'; +import { loadFilesByHostId } from '../idb/files'; // An array of shortNames to ensure only valid entities are pushed onto the scheduler. const entities = Object.keys(nomenclature.entities); @@ -18,8 +21,10 @@ const stringifyID = (entity, type, id) => JSON.stringify({ entity, type, id }); const parseID = string => JSON.parse(string); const FILTER_ID = 'FILTER_ID'; -const makeNewGroup = type => ({ id: null, type, filter: null }); -function groupFilters(pendingIdStrings, pendingFilters) { +const makeNewGroup = type => ({ + id: null, type, files: null, filter: null, +}); +function groupFilters(pendingIdStrings, pendingFilters, pendingFiles) { const groupMap = new Map(); pendingIdStrings.forEach((idString) => { const { type, id } = parseID(idString); @@ -45,19 +50,26 @@ function groupFilters(pendingIdStrings, pendingFilters) { group.id.push(id); } } + const concatUniq = compose(uniq, concat); + const mergeIdentifiers = mergeDeepWith(concatUniq); + const fileIdentifiers = pendingFiles.get(idString); + if (is(Object, fileIdentifiers) && validate(id)) { + const files = { [id]: pendingFiles.get(idString) }; + group.files = mergeIdentifiers(group.files || {}, files); + } groupMap.set(type, group); }); // Iterate through the ESM Map, add the id's as a separate filter, and then // return a plain, flat array. const groups = Array.from(groupMap.values()).map((group) => { - const { id, type } = group; + const { id, type, files } = group; let { filter } = group; if (validate(id) || Array.isArray(id)) { filter = asFlatArray(filter); filter.push({ id, type }); } if (!filter) filter = { type }; - return { type, filter }; + return { type, files, filter }; }); return groups; } @@ -87,21 +99,25 @@ export default function SyncScheduler(intervals = defaultIntervals) { const pending = new Set(); const listeners = new Map(); const pendingFilters = new Map(); + const pendingFiles = new Map(); async function retry() { const retrying = new Set(pending); pending.clear(); const retryingFilters = new Map(pendingFilters); pendingFilters.clear(); - const filterGroups = groupFilters(retrying, retryingFilters); + const filterGroups = groupFilters(retrying, retryingFilters, pendingFiles); const requests = filterGroups.map(async (group) => { updateStatus(STATUS_IN_PROGRESS); const { type, filter } = group; const [entity] = type.split('--'); const { shortName } = nomenclature.entities[entity]; - const query = parseFilter(filter); - const cache = await getRecords('entities', entity, query); - const results = await syncEntities(shortName, { cache, filter }); + const syncOptions = { + cache: await getRecords('entities', entity, parseFilter(filter)), + files: await loadFilesByHostId(group.files || {}), + filter, + }; + const results = await syncEntities(shortName, syncOptions); const handler = ({ connectivity, warnings }) => { updateStatus(connectivity); if (warnings.length > 0) { @@ -151,7 +167,7 @@ export default function SyncScheduler(intervals = defaultIntervals) { }, interval); } - this.push = function push(entity, type, target) { + this.push = function push(entity, type, target, options = {}) { if (!entities.includes(entity)) { throw new Error(`Invalid entity name: ${entity}`); } @@ -170,6 +186,10 @@ export default function SyncScheduler(intervals = defaultIntervals) { } // If the target is invalid, return false instead of a subscriber function. if (!idString) return false; + if (is(Object, options.files)) { + const fileIdentifiers = map(map(pick(['id', 'type'])), options.files); + pendingFiles.set(idString, fileIdentifiers); + } if (pending.size === 0) startClock(); pending.add(idString); return function subscribe(listener) { diff --git a/packages/field-kit/src/http/interceptor.js b/packages/field-kit/src/http/interceptor.js index 77c3842c..8ea85b18 100644 --- a/packages/field-kit/src/http/interceptor.js +++ b/packages/field-kit/src/http/interceptor.js @@ -2,7 +2,7 @@ import { clone, curryN, evolve, partition, reduce, } from 'ramda'; import { getHost } from '../farm/remote'; -import asArray from '../utils/asArray'; +import { asArray } from '../utils/asArray'; import Warning from '../warnings/Warning'; function evaluateResponse(response = {}, errorMsg) { diff --git a/packages/field-kit/src/http/sync.js b/packages/field-kit/src/http/sync.js index 6928cf3f..3c159d36 100644 --- a/packages/field-kit/src/http/sync.js +++ b/packages/field-kit/src/http/sync.js @@ -1,22 +1,41 @@ import { - allPass, insert, reduce, + allPass, anyPass, chain, complement, compose, defaultTo, filter as rFilter, + insert, isNil, map, path, prop, reduce, } from 'ramda'; import farm from '../farm'; +import { asFlatArray } from '../utils/asArray'; import parseFilter from '../utils/parseFilter'; -export const fetchEntities = (shortName, { cache = [], filter, limit }) => - farm[shortName].fetch({ filter, limit }).then((results) => { +const isNotNil = complement(isNil); +const getIncludedIds = chain(compose( + map(prop('id')), + rFilter(isNotNil), + defaultTo([]), + chain(path(['data', 'included'])), +)); +function getIncluded(data, fulfilled) { + const ids = getIncludedIds(fulfilled); + const included = data.filter(d => ids.includes(d.id)); + return included; +} + +export const fetchEntities = (shortName, { cache, ...rest }) => + farm[shortName].fetch({ ...rest }).then((results) => { const { data, fulfilled, rejected } = results; + const included = getIncluded(data, fulfilled); const entities = data.reduce((collection, remote) => { const i = collection.findIndex(ent => ent.id === remote.id); const merged = farm[shortName].merge(collection[i], remote); return insert(i, merged, collection); - }, cache); - return { data: entities, fulfilled, rejected }; + }, asFlatArray(cache)); + return { + data: entities, included, fulfilled, rejected, + }; }); -export const syncEntities = (shortName, { cache = [], filter, limit }) => - fetchEntities(shortName, { cache, filter, limit }).then((fetchResults) => { +export const syncEntities = (shortName, options) => + fetchEntities(shortName, options).then((fetchResults) => { + const { filter, files = {} } = options; const { data: mergedEntities } = fetchResults; const failedBundleNames = fetchResults.rejected.map(({ response = {} }) => { const { config: { url = '' } = {} } = response; @@ -26,11 +45,14 @@ export const syncEntities = (shortName, { cache = [], filter, limit }) => const predicate = allPass([ parseFilter(filter), entity => failedBundleNames.every(b => b !== entity.type), - farm.meta.isUnsynced, + anyPass([farm.meta.isUnsynced, e => e.id in files]), ]); const syncables = mergedEntities.filter(predicate); - const sendRequests = syncables.map(farm[shortName].send); + const withOptions = e => (e.id in files ? { files: files[e.id] } : {}); + const sendWithOptions = e => farm[shortName].send(e, withOptions(e)); + const sendRequests = syncables.map(sendWithOptions); const handleSendResults = reduce((result, { status, reason, value: remote }) => { + // No need to destructure 'included', since there's no reason to modify it. const { data, fulfilled, rejected } = result; if (status === 'rejected') { return { @@ -41,9 +63,9 @@ export const syncEntities = (shortName, { cache = [], filter, limit }) => const i = syncables.findIndex(ent => ent.id === remote.id); const merged = farm[shortName].merge(syncables[i], remote); return { + ...result, data: insert(i, merged, data), fulfilled: [...fulfilled, remote], - rejected, }; }, fetchResults); return Promise.allSettled(sendRequests).then(handleSendResults); diff --git a/packages/field-kit/src/idb/databases.js b/packages/field-kit/src/idb/databases.js index 9cd3bdcc..7ce34602 100644 --- a/packages/field-kit/src/idb/databases.js +++ b/packages/field-kit/src/idb/databases.js @@ -1,10 +1,21 @@ +function createIndices(store, indices) { + if (store instanceof IDBObjectStore && Array.isArray(indices)) { + indices.forEach(({ name, keyPath, options }) => { + store.createIndex(name, keyPath, options); + }); + } +} + function onUpgradeCreateObjectStore(event, options) { - const { name, keyPath = 'id', autoIncrement = false } = options; + const { + name, keyPath = 'id', autoIncrement = false, indices, + } = options; return new Promise((resolve, reject) => { const db = event.target.result; try { - db.createObjectStore(name, { keyPath, autoIncrement }); - resolve(); + const store = db.createObjectStore(name, { keyPath, autoIncrement }); + createIndices(store, indices); + resolve(store); } catch (error) { reject(error); } @@ -83,4 +94,28 @@ export default { }, ], }, + binary_data: { + version: 1, + name: 'binary_data', + stores: [ + { + name: 'file', + indices: [{ + name: 'file_entity_id', + keyPath: 'file_entity.id', + }], + keyPath: 'id', + upgrades, + }, + { + name: 'image', + indices: [{ + name: 'file_entity_id', + keyPath: 'file_entity.id', + }], + keyPath: 'id', + upgrades, + }, + ], + }, }; diff --git a/packages/field-kit/src/idb/files.js b/packages/field-kit/src/idb/files.js new file mode 100644 index 00000000..1462a8c0 --- /dev/null +++ b/packages/field-kit/src/idb/files.js @@ -0,0 +1,105 @@ +import { validate, v4 as uuidv4 } from 'uuid'; +import { getRecords, getRecordsFromIndex, saveRecord } from '.'; +import dbs from './databases.js'; + +const validStores = dbs.binary_data.stores.map(s => s.name); +const defaultMime = 'application/octet-stream'; + +/** + * @typedef {Object} EntityFileData + * @property {String} id + * @property {String} type + * @property {Object|File|Blob} [data] + * @property {Object} [file_entity] + * @property {String} filename + * @property {String} mime + * @property {[{ id, type }]} references + * @property {String} [url] + */ + +/** @typedef {Object.} FileDataByField */ +/** @typedef {Object.} FileDataByHostId */ + +/** + * @type {Function} + * @param {Object|File|Blob} [data] + * @param {Object} [file_entity] + * @param {Object} [options] + * @returns {EntityFileData} + */ +export function fmtFileData(data = null, file_entity = null, options = {}) { + const { id: entity_id, attributes = {} } = file_entity || {}; + const { filename: entity_filename, filemime, uri = {} } = attributes; + const { + mime = data?.type || filemime || defaultMime, + references = [], + url = uri.url || null, + } = options; + let { filename = data?.name || entity_filename, id = entity_id, type } = options; + if (!validate(id)) id = uuidv4(); + if (!validStores.includes(type)) { + const [f1, ...fields] = references.flatMap(r => r?.fields || []); + if (mime.startsWith('image')) type = 'image'; + else if (validStores.includes(f1) && fields.every(fN => f1 === fN)) type = f1; + else type = 'file'; + } + if (!filename) filename = `${type}-${id}`; + return { + id, type, data, file_entity, filename, mime, references, url, + }; +} + +/** + * @type {Function} + * @param {Object|File|Blob} [data] + * @param {Object} [file_entity] + * @param {Object} [options] + * @returns {Promise} +*/ +export function cacheFileData(data, file_entity, options = {}) { + const _options = { ...options }; + // Never store object URL's, which share a lifespan with the window document. + // See https://developer.mozilla.org/en-US/docs/Web/API/URL/createObjectURL + // and https://w3c.github.io/FileAPI/#url-model for more details. + if (_options.url?.startsWith('blob:')) delete _options.url; + const record = fmtFileData(data, file_entity, _options); + return saveRecord('binary_data', record.type, record).then(() => record); +} + +export async function loadFileData(fileIdentifier) { + const { type: store, id } = fileIdentifier; + return getRecords('binary_data', store, id); +} + +export async function loadFilesByField(fileIdentifiers) { + const loader = async ([field, fileIds]) => { + const requests = fileIds.map(loadFileData); + const fileData = await Promise.all(requests); + return [field, fileData]; + }; + const requestsByField = Object.entries(fileIdentifiers).map(loader); + const resultsByField = await Promise.all(requestsByField); + return Object.fromEntries(resultsByField); +} + +export async function loadFilesByHostId(fileIdentifiers) { + const loader = async ([hostId, filesByField]) => { + const resultsByField = await loadFilesByField(filesByField); + return [hostId, resultsByField]; + }; + const requestsByHostId = Object.entries(fileIdentifiers).map(loader); + const resultsByHostId = await Promise.all(requestsByHostId); + return Object.fromEntries(resultsByHostId); +} + +export function loadFileEntity(fileEntity) { + const db = 'binary_data'; + const index = 'file_entity_id'; + function loader(entity, stores) { + if (stores.length <= 0) return null; + const [store, ...tail] = stores; + return getRecordsFromIndex(db, store, index, entity.id) + .catch(() => loader(entity, tail)); + } + return loader(fileEntity, validStores); +} diff --git a/packages/field-kit/src/idb/index.js b/packages/field-kit/src/idb/index.js index e2c376a2..b6b05742 100644 --- a/packages/field-kit/src/idb/index.js +++ b/packages/field-kit/src/idb/index.js @@ -53,19 +53,31 @@ function cursorQuery(store, query) { request.onerror = event => reject(event); }); } +function getter(storeOrIndex, query) { + if (!query) { + return getAllRecords(storeOrIndex); + } + if (Array.isArray(query)) { + return getManyByPrimaryKeys(storeOrIndex, query); + } + if (typeof query === 'function') { + return cursorQuery(storeOrIndex, query); + } + return getOneByPrimaryKey(storeOrIndex, query).then(([, data]) => data); +} export function getRecords(dbName, storeName, query) { return openDatabase(dbName).then((db) => { - const store = db.transaction(storeName, 'readonly').objectStore(storeName); - if (!query) { - return getAllRecords(store); - } - if (Array.isArray(query)) { - return getManyByPrimaryKeys(query); - } - if (typeof query === 'function') { - return cursorQuery(store, query); - } - return getOneByPrimaryKey(query).then(([, data]) => data); + const store = db.transaction(storeName, 'readonly') + .objectStore(storeName); + return getter(store, query); + }); +} +export function getRecordsFromIndex(dbName, storeName, indexName, query) { + return openDatabase(dbName).then((db) => { + const index = db.transaction(storeName, 'readonly') + .objectStore(storeName) + .index(indexName); + return getter(index, query); }); } diff --git a/packages/field-kit/src/utils/asArray.js b/packages/field-kit/src/utils/asArray.js index 830cfc75..90ddbe46 100644 --- a/packages/field-kit/src/utils/asArray.js +++ b/packages/field-kit/src/utils/asArray.js @@ -1,5 +1,5 @@ // Wrap a value in an array, unless it's nullish, then return an empty array. -const asArray = value => (value ? [value] : []); +export const asArray = value => (value ? [value] : []); // Like above, but checks if the value is already an array and if so returns it as is. export const asFlatArray = value => (Array.isArray(value) ? value : asArray(value)); @@ -15,5 +15,3 @@ export const isArrayLike = value => typeof value?.[Symbol.iterator] === 'functio export const fromFlatArray = value => (isArrayLike(value) ? Array.from(value) : asArray(value)) .flat(Infinity); - -export default asArray; diff --git a/packages/field-kit/src/utils/upsert.js b/packages/field-kit/src/utils/upsert.js index 10683780..066c1140 100644 --- a/packages/field-kit/src/utils/upsert.js +++ b/packages/field-kit/src/utils/upsert.js @@ -3,11 +3,8 @@ const upsert = (arr, identifier, el) => { const i = arr.findIndex(_el => el[identifier] === _el[identifier]); const isNew = i < 0; - if (isNew) { - arr.push(el); - } else { - arr.splice(i, 1, el); - } + if (isNew) arr.push(el); + else arr.splice(i, 1, el); }; export default upsert; diff --git a/packages/field-module-observations/module.config.js b/packages/field-module-observations/module.config.js new file mode 100644 index 00000000..8c17622b --- /dev/null +++ b/packages/field-module-observations/module.config.js @@ -0,0 +1,7 @@ +export default { + name: 'observations', + label: 'Observations', + description: 'Take photos and quick notes from the farm.', + widget: './src/ObservationsWidget.vue', + routes: './src/routes', +}; diff --git a/packages/field-module-observations/package.json b/packages/field-module-observations/package.json new file mode 100644 index 00000000..1e56a78a --- /dev/null +++ b/packages/field-module-observations/package.json @@ -0,0 +1,31 @@ +{ + "name": "field-module-observations", + "version": "2.0.0-alpha.1", + "description": "Take photos and quick notes from the farm.", + "main": "module.config.js", + "type": "module", + "scripts": { + "start": "npm run dev", + "dev": "field-scripts develop-module", + "build": "field-scripts build-module --config module.config.js", + "preview": "vite preview", + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/farmOS/field-kit.git" + }, + "keywords": [ + "farmOS" + ], + "author": "Jamie Gaehring (https://jgaehring.com)", + "license": "GPL-3.0-or-later", + "dependencies": {}, + "devDependencies": { + "@farmos.org/field-kit": "2.0.0-alpha.8" + }, + "bugs": { + "url": "https://github.com/farmOS/field-kit/issues" + }, + "homepage": "https://github.com/farmOS/field-kit#readme" +} diff --git a/packages/field-module-observations/src/ObservationsContainer.vue b/packages/field-module-observations/src/ObservationsContainer.vue new file mode 100644 index 00000000..17fe6707 --- /dev/null +++ b/packages/field-module-observations/src/ObservationsContainer.vue @@ -0,0 +1,132 @@ + + + + + diff --git a/packages/field-module-observations/src/ObservationsWidget.vue b/packages/field-module-observations/src/ObservationsWidget.vue new file mode 100644 index 00000000..f071252f --- /dev/null +++ b/packages/field-module-observations/src/ObservationsWidget.vue @@ -0,0 +1,14 @@ + + + + + diff --git a/packages/field-module-observations/src/routes.js b/packages/field-module-observations/src/routes.js new file mode 100644 index 00000000..510b109e --- /dev/null +++ b/packages/field-module-observations/src/routes.js @@ -0,0 +1,11 @@ +import ObservationsContainer from './ObservationsContainer.vue'; + +const routes = [ + { + path: '/observations', + name: 'observations', + component: ObservationsContainer, + }, +]; + +export default routes; diff --git a/packages/field-scripts/bin/develop-module/index.js b/packages/field-scripts/bin/develop-module/index.js index a05dcf85..bf94d994 100644 --- a/packages/field-scripts/bin/develop-module/index.js +++ b/packages/field-scripts/bin/develop-module/index.js @@ -96,6 +96,7 @@ export default async function develop(options = {}) { '/api': proxyPort(80), '/oauth': proxyPort(80), '/fieldkit': proxyPort(80), + '/system': proxyPort(80), }, ...serverOptions, },