我们有一个网络应用程序(使用AngularJS构建),我们逐步添加PWA'功能' (服务人员,可启动,通知等)。我们的网络应用程序具有的功能之一是能够在离线时完成Web表单。目前,我们在离线时将数据存储在IndexedDB中,并且只需鼓励用户在他们联机后将数据推送到服务器("此表单已保存到您的设备中。现在您已经#39;重新上线,你应该把它保存到云端......")。我们会在某个时候自动执行此操作,但目前不需要这样做。
我们正在为这些网络表单添加一项功能,用户可以将文件(图像,文档)附加到表单中,也许可以在整个表单中的多个位置附加。
我的问题是 - 服务工作者有办法处理文件上传吗?以某种方式 - 也许 - 在离线时存储要上载的文件的路径,并在连接恢复后推送该文件?这是否可以在移动设备上运行,我们也可以访问该路径'在那些设备上?任何帮助,建议或参考将不胜感激。
答案 0 :(得分:6)
处理文件上载/删除以及几乎所有内容的一种方法是跟踪脱机请求期间进行的所有更改。我们可以创建一个sync
对象,该对象内部有两个数组,一个用于需要上传的待处理文件,另一个用于在我们重新上线时需要删除的已删除文件。
处理服务工作者fetch
事件,如果获取失败,则我们必须处理对文件列表的请求,将文件上传到服务器的请求以及删除文件的请求从服务器。如果我们没有任何这些请求,那么我们将从默认缓存中返回一个匹配项。
GET
/uploads
)和sync
对象。我们使用concat
文件pending
作为默认列表文件,并删除了deleted
文件,并返回带有JSON结果的新响应对象,因为服务器将返回它。PUT
sync
pending
个文件。如果文件不存在,那么我们将为该文件创建一个新的缓存条目,并使用mime类型和请求中的blob
创建一个新的Response
对象,并将其保存到默认缓存。DELETE
pending
数组中删除该条目,否则,如果它不在deleted
数组中,则添加它。我们最后更新列表,文件并同步对象缓存。
(请阅读内联注释)
const cacheName = 'pwasndbx';
const syncCacheName = 'pwasndbx-sync';
const pendingName = '__pending';
const syncName = '__sync';
const filesToCache = [
'/',
'/uploads',
'/styles.css',
'/main.js',
'/utils.js',
'/favicon.ico',
'/manifest.json',
];
/* Start the service worker and cache all of the app's content */
self.addEventListener('install', function(e) {
console.log('SW:install');
e.waitUntil(Promise.all([
caches.open(cacheName).then(async function(cache) {
let cacheAdds = [];
try {
// Get all the files from the uploads listing
const res = await fetch('/uploads');
const { data = [] } = await res.json();
const files = data.map(f => `/uploads/${f}`);
// Cache all uploads files urls
cacheAdds.push(cache.addAll(files));
} catch(err) {
console.warn('PWA:install:fetch(uploads):err', err);
}
// Also add our static files to the cache
cacheAdds.push(cache.addAll(filesToCache));
return Promise.all(cacheAdds);
}),
// Create the sync cache object
caches.open(syncCacheName).then(cache => cache.put(syncName, jsonResponse({
pending: [], // For storing the penging files that later will be synced
deleted: [] // For storing the files that later will be deleted on sync
}))),
])
);
});
self.addEventListener('fetch', function(event) {
// Clone request so we can consume data later
const request = event.request.clone();
const { method, url, headers } = event.request;
event.respondWith(
fetch(event.request).catch(async function(err) {
const { headers, method, url } = event.request;
// A custom header that we set to indicate the requests come from our syncing method
// so we won't try to fetch anything from cache, we need syncing to be done on the server
const xSyncing = headers.get('X-Syncing');
if(xSyncing && xSyncing.length) {
return caches.match(event.request);
}
switch(method) {
case 'GET':
// Handle listing data for /uploads and return JSON response
break;
case 'PUT':
// Handle upload to cache and return success response
break;
case 'DELETE':
// Handle delete from cache and return success response
break;
}
// If we meet no specific criteria, then lookup to the cache
return caches.match(event.request);
})
);
});
function jsonResponse(data, status = 200) {
return new Response(data && JSON.stringify(data), {
status,
headers: {'Content-Type': 'application/json'}
});
}
GET
if(url.match(/\/uploads\/?$/)) { // Failed to get the uploads listing
// Get the uploads data from cache
const uploadsRes = await caches.match(event.request);
let { data: files = [] } = await uploadsRes.json();
// Get the sync data from cache
const syncRes = await caches.match(new Request(syncName), { cacheName: syncCacheName });
const sync = await syncRes.json();
// Return the files from uploads + pending files from sync - deleted files from sync
const data = files.concat(sync.pending).filter(f => sync.deleted.indexOf(f) < 0);
// Return a JSON response with the updated data
return jsonResponse({
success: true,
data
});
}
PUT
// Get our custom headers
const filename = headers.get('X-Filename');
const mimetype = headers.get('X-Mimetype');
if(filename && mimetype) {
// Get the uploads data from cache
const uploadsRes = await caches.match('/uploads', { cacheName });
let { data: files = [] } = await uploadsRes.json();
// Get the sync data from cache
const syncRes = await caches.match(new Request(syncName), { cacheName: syncCacheName });
const sync = await syncRes.json();
// If the file exists in the uploads or in the pendings, then return a 409 Conflict response
if(files.indexOf(filename) >= 0 || sync.pending.indexOf(filename) >= 0) {
return jsonResponse({ success: false }, 409);
}
caches.open(cacheName).then(async (cache) => {
// Write the file to the cache using the response we cloned at the beggining
const data = await request.blob();
cache.put(`/uploads/${filename}`, new Response(data, {
headers: { 'Content-Type': mimetype }
}));
// Write the updated files data to the uploads cache
cache.put('/uploads', jsonResponse({ success: true, data: files }));
});
// Add the file to the sync pending data and update the sync cache object
sync.pending.push(filename);
caches.open(syncCacheName).then(cache => cache.put(new Request(syncName), jsonResponse(sync)));
// Return a success response with fromSw set to tru so we know this response came from service worker
return jsonResponse({ success: true, fromSw: true });
}
DELETE
// Get our custom headers
const filename = headers.get('X-Filename');
if(filename) {
// Get the uploads data from cache
const uploadsRes = await caches.match('/uploads', { cacheName });
let { data: files = [] } = await uploadsRes.json();
// Get the sync data from cache
const syncRes = await caches.match(new Request(syncName), { cacheName: syncCacheName });
const sync = await syncRes.json();
// Check if the file is already pending or deleted
const pendingIndex = sync.pending.indexOf(filename);
const uploadsIndex = files.indexOf(filename);
if(pendingIndex >= 0) {
// If it's pending, then remove it from pending sync data
sync.pending.splice(pendingIndex, 1);
} else if(sync.deleted.indexOf(filename) < 0) {
// If it's not in pending and not already in sync for deleting,
// then add it for delete when we'll sync with the server
sync.deleted.push(filename);
}
// Update the sync cache
caches.open(syncCacheName).then(cache => cache.put(new Request(syncName), jsonResponse(sync)));
// If the file is in the uplods data
if(uploadsIndex >= 0) {
// Updates the uploads data
files.splice(uploadsIndex, 1);
caches.open(cacheName).then(async (cache) => {
// Remove the file from the cache
cache.delete(`/uploads/${filename}`);
// Update the uploads data cache
cache.put('/uploads', jsonResponse({ success: true, data: files }));
});
}
// Return a JSON success response
return jsonResponse({ success: true });
}
// Get the sync data from cache
const syncRes = await caches.match(new Request(syncName), { cacheName: syncCacheName });
const sync = await syncRes.json();
// If the are pending files send them to the server
if(sync.pending && sync.pending.length) {
sync.pending.forEach(async (file) => {
const url = `/uploads/${file}`;
const fileRes = await caches.match(url);
const data = await fileRes.blob();
fetch(url, {
method: 'PUT',
headers: {
'X-Filename': file,
'X-Syncing': 'syncing' // Tell SW fetch that we are synching so to ignore this fetch
},
body: data
}).catch(err => console.log('sync:pending:PUT:err', file, err));
});
}
// If the are deleted files send delete request to the server
if(sync.deleted && sync.deleted.length) {
sync.deleted.forEach(async (file) => {
const url = `/uploads/${file}`;
fetch(url, {
method: 'DELETE',
headers: {
'X-Filename': file,
'X-Syncing': 'syncing' // Tell SW fetch that we are synching so to ignore this fetch
}
}).catch(err => console.log('sync:deleted:DELETE:err', file, err));
});
}
// Update and reset the sync cache object
caches.open(syncCacheName).then(cache => cache.put(syncName, jsonResponse({
pending: [],
deleted: []
})));
我创建了一个实现所有这些的PWA示例,您可以找到并测试here。我已经使用Chrome和Firefox以及在移动设备上使用Firefox Android对其进行了测试。
您可以在以下Github存储库中找到应用程序的完整源代码(包括express
服务器的 ):https://github.com/clytras/pwa-sandbox。
答案 1 :(得分:4)
当用户通过<input type="file">
元素选择文件时,我们可以通过fileInput.files
获取所选文件。这为我们提供了一个FileList
对象,其中的每个项目都是表示所选文件的File
对象。 HTML5的Structured Clone Algorithm支持FileList
和File
。
将项目添加到IndexedDB存储时,它会创建存储值的结构化克隆。由于结构化克隆算法支持FileList
和File
个对象,这意味着我们可以直接在IndexedDB中存储这些对象。
要在用户再次上线后执行这些文件上传,您可以使用服务工作者的后台同步功能。关于如何做到这一点an introductory article。还有很多其他资源。
为了能够在后台同步代码运行后在请求中包含文件附件,您可以使用FormData
。 FormData
允许将File
个对象添加到将发送到您后端的请求中,并且可以从服务工作者上下文中获取。
答案 2 :(得分:2)
缓存API旨在存储请求(作为键)和响应(作为值),以便为网页缓存来自服务器的内容。在这里,我们谈论的是缓存用户输入,以便将来分发到服务器。换句话说,我们不是在尝试实现缓存,而是在消息代理中实现,并且服务工作者规范({{3} )。
您可以通过尝试以下代码找出答案:
HTML:
<button id="get">GET</button>
<button id="post">POST</button>
<button id="put">PUT</button>
<button id="patch">PATCH</button>
JavaScript:
if ('serviceWorker' in navigator) {
navigator.serviceWorker.register('/service-worker.js', { scope: '/' }).then(function (reg) {
console.log('Registration succeeded. Scope is ' + reg.scope);
}).catch(function (error) {
console.log('Registration failed with ' + error);
});
};
document.getElementById('get').addEventListener('click', async function () {
console.log('Response: ', await fetch('50x.html'));
});
document.getElementById('post').addEventListener('click', async function () {
console.log('Response: ', await fetch('50x.html', { method: 'POST' }));
});
document.getElementById('put').addEventListener('click', async function () {
console.log('Response: ', await fetch('50x.html', { method: 'PUT' }));
});
document.getElementById('patch').addEventListener('click', async function () {
console.log('Response: ', await fetch('50x.html', { method: 'PATCH' }));
});
服务人员:
self.addEventListener('fetch', function (event) {
var response;
event.respondWith(fetch(event.request).then(function (r) {
response = r;
caches.open('v1').then(function (cache) {
cache.put(event.request, response);
}).catch(e => console.error(e));
return response.clone();
}));
});
哪个抛出:
TypeError:请求方法“ POST”不受支持
TypeError:请求方法“ PUT”不受支持
TypeError:请求方法“ PATCH”不受支持
由于无法使用Cache API,因此在Source之后,IndexedDB是作为正在进行的请求的数据存储的最佳解决方案。 然后,消息代理的实现是开发人员的责任,并且没有唯一的通用实现可以覆盖所有用例。有许多参数将决定解决方案:
window.navigator.onLine
?一定超时?其他吗?self.addEventListener('online', ...)
? navigator.connection
?对于单个StackOverflow答案,这确实非常广泛。
话虽如此,这是一个最小的可行解决方案:
HTML:
<input id="file" type="file">
<button id="sync">SYNC</button>
<button id="get">GET</button>
JavaScript:
if ('serviceWorker' in navigator) {
navigator.serviceWorker.register('/service-worker.js', { scope: '/' }).then(function (reg) {
console.log('Registration succeeded. Scope is ' + reg.scope);
}).catch(function (error) {
console.log('Registration failed with ' + error);
});
};
document.getElementById('get').addEventListener('click', async function () {
fetch('api');
});
document.getElementById('file').addEventListener('change', function () {
fetch('api', { method: 'PUT', body: document.getElementById('file').files[0] });
});
document.getElementById('sync').addEventListener('click', async function () {
navigator.serviceWorker.controller.postMessage('sync');
});
服务人员:
self.importScripts('https://unpkg.com/idb@5.0.1/build/iife/index-min.js');
const { openDB, deleteDB, wrap, unwrap } = idb;
const dbPromise = openDB('put-store', 1, {
upgrade(db) {
db.createObjectStore('put');
},
});
const idbKeyval = {
async get(key) {
return (await dbPromise).get('put', key);
},
async set(key, val) {
return (await dbPromise).put('put', val, key);
},
async delete(key) {
return (await dbPromise).delete('put', key);
},
async clear() {
return (await dbPromise).clear('put');
},
async keys() {
return (await dbPromise).getAllKeys('put');
},
};
self.addEventListener('fetch', function (event) {
if (event.request.method === 'PUT') {
let body;
event.respondWith(event.request.blob().then(file => {
// Retrieve the body then clone the request, to avoid "body already used" errors
body = file;
return fetch(new Request(event.request.url, { method: event.request.method, body }));
}).then(response => handleResult(response, event, body)).catch(() => handleResult(null, event, body)));
} else if (event.request.method === 'GET') {
event.respondWith(fetch(event.request).then(response => {
return response.ok ? response : caches.match(event.request);
}).catch(() => caches.match(event.request)));
}
});
async function handleResult(response, event, body) {
const getRequest = new Request(event.request.url, { method: 'GET' });
const cache = await caches.open('v1');
await idbKeyval.set(event.request.method + '.' + event.request.url, { url: event.request.url, method: event.request.method, body });
const returnResponse = response && response.ok ? response : new Response(body);
cache.put(getRequest, returnResponse.clone());
return returnResponse;
}
// Function to call when the network is supposed to be available
async function sync() {
const keys = await idbKeyval.keys();
for (const key of keys) {
try {
const { url, method, body } = await idbKeyval.get(key);
const response = await fetch(url, { method, body });
if (response && response.ok)
await idbKeyval.delete(key);
}
catch (e) {
console.warn(`An error occurred while trying to sync the request: ${key}`, e);
}
}
}
self.addEventListener('message', sync);
有关该解决方案的一些话:它允许为将来的GET请求缓存PUT请求,并且还将PUT请求存储到IndexedDB数据库中以供将来同步。关于键,我受到Angular的Exponential backoff的启发,它允许在服务器端渲染页面上序列化后端请求,以供浏览器渲染页面使用。它使用<verb>.<url>
作为密钥。假设一个请求将覆盖具有相同动词和URL的另一个请求。
此解决方案还假定后端不返回204 No content
作为PUT请求的响应,而是200
带有正文中的实体。
答案 3 :(得分:0)
我最近也偶然发现了它。这是我在索引数据库中存储并在离线时返回响应的操作。
const storeFileAndReturnResponse = async function (request, urlSearchParams) {
let requestClone = request.clone();
let formData = await requestClone.formData();
let tableStore = "fileUploads";
let fileList = [];
let formDataToStore = [];
//Use formData.entries to iterate collection - this assumes you used input type= file
for (const pair of formData.entries()) {
let fileObjectUploaded = pair[1];
//content holds the arrayBuffer (blob) of the uploaded file
formDataToStore.push({
key: pair[0],
value: fileObjectUploaded,
content: await fileObjectUploaded.arrayBuffer(),
});
let fileName = fileObjectUploaded.name;
fileList.push({
fileName: fileName,
});
}
let payloadToStore = {
parentId: parentId,
fileList: fileList,
formDataKeyValue: formDataToStore,
};
(await idbContext).put(tableStore, payloadToStore);
return {
UploadedFileList: fileList,
};
};