mirror of
https://github.com/idanoo/GoScrobble
synced 2025-07-01 13:42:20 +00:00
0.2.0 - Mid migration
This commit is contained in:
parent
139e6a915e
commit
7e38fdbd7d
42393 changed files with 5358157 additions and 62 deletions
27
web/node_modules/workbox-background-sync/BackgroundSyncPlugin.d.ts
generated
vendored
Normal file
27
web/node_modules/workbox-background-sync/BackgroundSyncPlugin.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
import { WorkboxPlugin } from 'workbox-core/types.js';
|
||||
import { QueueOptions } from './Queue.js';
|
||||
import './_version.js';
|
||||
/**
|
||||
* A class implementing the `fetchDidFail` lifecycle callback. This makes it
|
||||
* easier to add failed requests to a background sync Queue.
|
||||
*
|
||||
* @memberof module:workbox-background-sync
|
||||
*/
|
||||
declare class BackgroundSyncPlugin implements WorkboxPlugin {
|
||||
private readonly _queue;
|
||||
/**
|
||||
* @param {string} name See the [Queue]{@link module:workbox-background-sync.Queue}
|
||||
* documentation for parameter details.
|
||||
* @param {Object} [options] See the
|
||||
* [Queue]{@link module:workbox-background-sync.Queue} documentation for
|
||||
* parameter details.
|
||||
*/
|
||||
constructor(name: string, options: QueueOptions);
|
||||
/**
|
||||
* @param {Object} options
|
||||
* @param {Request} options.request
|
||||
* @private
|
||||
*/
|
||||
fetchDidFail: WorkboxPlugin['fetchDidFail'];
|
||||
}
|
||||
export { BackgroundSyncPlugin };
|
36
web/node_modules/workbox-background-sync/BackgroundSyncPlugin.js
generated
vendored
Normal file
36
web/node_modules/workbox-background-sync/BackgroundSyncPlugin.js
generated
vendored
Normal file
|
@ -0,0 +1,36 @@
|
|||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
import { Queue } from './Queue.js';
|
||||
import './_version.js';
|
||||
/**
|
||||
* A class implementing the `fetchDidFail` lifecycle callback. This makes it
|
||||
* easier to add failed requests to a background sync Queue.
|
||||
*
|
||||
* @memberof module:workbox-background-sync
|
||||
*/
|
||||
class BackgroundSyncPlugin {
|
||||
/**
|
||||
* @param {string} name See the [Queue]{@link module:workbox-background-sync.Queue}
|
||||
* documentation for parameter details.
|
||||
* @param {Object} [options] See the
|
||||
* [Queue]{@link module:workbox-background-sync.Queue} documentation for
|
||||
* parameter details.
|
||||
*/
|
||||
constructor(name, options) {
|
||||
/**
|
||||
* @param {Object} options
|
||||
* @param {Request} options.request
|
||||
* @private
|
||||
*/
|
||||
this.fetchDidFail = async ({ request }) => {
|
||||
await this._queue.pushRequest({ request });
|
||||
};
|
||||
this._queue = new Queue(name, options);
|
||||
}
|
||||
}
|
||||
export { BackgroundSyncPlugin };
|
1
web/node_modules/workbox-background-sync/BackgroundSyncPlugin.mjs
generated
vendored
Normal file
1
web/node_modules/workbox-background-sync/BackgroundSyncPlugin.mjs
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
export * from './BackgroundSyncPlugin.js';
|
19
web/node_modules/workbox-background-sync/LICENSE
generated
vendored
Normal file
19
web/node_modules/workbox-background-sync/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
Copyright 2018 Google LLC
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
160
web/node_modules/workbox-background-sync/Queue.d.ts
generated
vendored
Normal file
160
web/node_modules/workbox-background-sync/Queue.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,160 @@
|
|||
import './_version.js';
|
||||
interface OnSyncCallbackOptions {
|
||||
queue: Queue;
|
||||
}
|
||||
interface OnSyncCallback {
|
||||
(options: OnSyncCallbackOptions): void | Promise<void>;
|
||||
}
|
||||
export interface QueueOptions {
|
||||
onSync?: OnSyncCallback;
|
||||
maxRetentionTime?: number;
|
||||
}
|
||||
interface QueueEntry {
|
||||
request: Request;
|
||||
timestamp?: number;
|
||||
metadata?: object;
|
||||
}
|
||||
/**
|
||||
* A class to manage storing failed requests in IndexedDB and retrying them
|
||||
* later. All parts of the storing and replaying process are observable via
|
||||
* callbacks.
|
||||
*
|
||||
* @memberof module:workbox-background-sync
|
||||
*/
|
||||
declare class Queue {
|
||||
private readonly _name;
|
||||
private readonly _onSync;
|
||||
private readonly _maxRetentionTime;
|
||||
private readonly _queueStore;
|
||||
private _syncInProgress;
|
||||
private _requestsAddedDuringSync;
|
||||
/**
|
||||
* Creates an instance of Queue with the given options
|
||||
*
|
||||
* @param {string} name The unique name for this queue. This name must be
|
||||
* unique as it's used to register sync events and store requests
|
||||
* in IndexedDB specific to this instance. An error will be thrown if
|
||||
* a duplicate name is detected.
|
||||
* @param {Object} [options]
|
||||
* @param {Function} [options.onSync] A function that gets invoked whenever
|
||||
* the 'sync' event fires. The function is invoked with an object
|
||||
* containing the `queue` property (referencing this instance), and you
|
||||
* can use the callback to customize the replay behavior of the queue.
|
||||
* When not set the `replayRequests()` method is called.
|
||||
* Note: if the replay fails after a sync event, make sure you throw an
|
||||
* error, so the browser knows to retry the sync event later.
|
||||
* @param {number} [options.maxRetentionTime=7 days] The amount of time (in
|
||||
* minutes) a request may be retried. After this amount of time has
|
||||
* passed, the request will be deleted from the queue.
|
||||
*/
|
||||
constructor(name: string, { onSync, maxRetentionTime }?: QueueOptions);
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
get name(): string;
|
||||
/**
|
||||
* Stores the passed request in IndexedDB (with its timestamp and any
|
||||
* metadata) at the end of the queue.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Request} entry.request The request to store in the queue.
|
||||
* @param {Object} [entry.metadata] Any metadata you want associated with the
|
||||
* stored request. When requests are replayed you'll have access to this
|
||||
* metadata object in case you need to modify the request beforehand.
|
||||
* @param {number} [entry.timestamp] The timestamp (Epoch time in
|
||||
* milliseconds) when the request was first added to the queue. This is
|
||||
* used along with `maxRetentionTime` to remove outdated requests. In
|
||||
* general you don't need to set this value, as it's automatically set
|
||||
* for you (defaulting to `Date.now()`), but you can update it if you
|
||||
* don't want particular requests to expire.
|
||||
*/
|
||||
pushRequest(entry: QueueEntry): Promise<void>;
|
||||
/**
|
||||
* Stores the passed request in IndexedDB (with its timestamp and any
|
||||
* metadata) at the beginning of the queue.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Request} entry.request The request to store in the queue.
|
||||
* @param {Object} [entry.metadata] Any metadata you want associated with the
|
||||
* stored request. When requests are replayed you'll have access to this
|
||||
* metadata object in case you need to modify the request beforehand.
|
||||
* @param {number} [entry.timestamp] The timestamp (Epoch time in
|
||||
* milliseconds) when the request was first added to the queue. This is
|
||||
* used along with `maxRetentionTime` to remove outdated requests. In
|
||||
* general you don't need to set this value, as it's automatically set
|
||||
* for you (defaulting to `Date.now()`), but you can update it if you
|
||||
* don't want particular requests to expire.
|
||||
*/
|
||||
unshiftRequest(entry: QueueEntry): Promise<void>;
|
||||
/**
|
||||
* Removes and returns the last request in the queue (along with its
|
||||
* timestamp and any metadata). The returned object takes the form:
|
||||
* `{request, timestamp, metadata}`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
*/
|
||||
popRequest(): Promise<QueueEntry | undefined>;
|
||||
/**
|
||||
* Removes and returns the first request in the queue (along with its
|
||||
* timestamp and any metadata). The returned object takes the form:
|
||||
* `{request, timestamp, metadata}`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
*/
|
||||
shiftRequest(): Promise<QueueEntry | undefined>;
|
||||
/**
|
||||
* Returns all the entries that have not expired (per `maxRetentionTime`).
|
||||
* Any expired entries are removed from the queue.
|
||||
*
|
||||
* @return {Promise<Array<Object>>}
|
||||
*/
|
||||
getAll(): Promise<QueueEntry[]>;
|
||||
/**
|
||||
* Adds the entry to the QueueStore and registers for a sync event.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Request} entry.request
|
||||
* @param {Object} [entry.metadata]
|
||||
* @param {number} [entry.timestamp=Date.now()]
|
||||
* @param {string} operation ('push' or 'unshift')
|
||||
* @private
|
||||
*/
|
||||
_addRequest({ request, metadata, timestamp, }: QueueEntry, operation: 'push' | 'unshift'): Promise<void>;
|
||||
/**
|
||||
* Removes and returns the first or last (depending on `operation`) entry
|
||||
* from the QueueStore that's not older than the `maxRetentionTime`.
|
||||
*
|
||||
* @param {string} operation ('pop' or 'shift')
|
||||
* @return {Object|undefined}
|
||||
* @private
|
||||
*/
|
||||
_removeRequest(operation: 'pop' | 'shift'): Promise<QueueEntry | undefined>;
|
||||
/**
|
||||
* Loops through each request in the queue and attempts to re-fetch it.
|
||||
* If any request fails to re-fetch, it's put back in the same position in
|
||||
* the queue (which registers a retry for the next sync event).
|
||||
*/
|
||||
replayRequests(): Promise<void>;
|
||||
/**
|
||||
* Registers a sync event with a tag unique to this instance.
|
||||
*/
|
||||
registerSync(): Promise<void>;
|
||||
/**
|
||||
* In sync-supporting browsers, this adds a listener for the sync event.
|
||||
* In non-sync-supporting browsers, this will retry the queue on service
|
||||
* worker startup.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
private _addSyncListener;
|
||||
/**
|
||||
* Returns the set of queue names. This is primarily used to reset the list
|
||||
* of queue names in tests.
|
||||
*
|
||||
* @return {Set}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
static get _queueNames(): Set<unknown>;
|
||||
}
|
||||
export { Queue };
|
366
web/node_modules/workbox-background-sync/Queue.js
generated
vendored
Normal file
366
web/node_modules/workbox-background-sync/Queue.js
generated
vendored
Normal file
|
@ -0,0 +1,366 @@
|
|||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
import { WorkboxError } from 'workbox-core/_private/WorkboxError.js';
|
||||
import { logger } from 'workbox-core/_private/logger.js';
|
||||
import { assert } from 'workbox-core/_private/assert.js';
|
||||
import { getFriendlyURL } from 'workbox-core/_private/getFriendlyURL.js';
|
||||
import { QueueStore } from './lib/QueueStore.js';
|
||||
import { StorableRequest } from './lib/StorableRequest.js';
|
||||
import './_version.js';
|
||||
const TAG_PREFIX = 'workbox-background-sync';
|
||||
const MAX_RETENTION_TIME = 60 * 24 * 7; // 7 days in minutes
|
||||
const queueNames = new Set();
|
||||
/**
|
||||
* Converts a QueueStore entry into the format exposed by Queue. This entails
|
||||
* converting the request data into a real request and omitting the `id` and
|
||||
* `queueName` properties.
|
||||
*
|
||||
* @param {Object} queueStoreEntry
|
||||
* @return {Object}
|
||||
* @private
|
||||
*/
|
||||
const convertEntry = (queueStoreEntry) => {
|
||||
const queueEntry = {
|
||||
request: new StorableRequest(queueStoreEntry.requestData).toRequest(),
|
||||
timestamp: queueStoreEntry.timestamp,
|
||||
};
|
||||
if (queueStoreEntry.metadata) {
|
||||
queueEntry.metadata = queueStoreEntry.metadata;
|
||||
}
|
||||
return queueEntry;
|
||||
};
|
||||
/**
|
||||
* A class to manage storing failed requests in IndexedDB and retrying them
|
||||
* later. All parts of the storing and replaying process are observable via
|
||||
* callbacks.
|
||||
*
|
||||
* @memberof module:workbox-background-sync
|
||||
*/
|
||||
class Queue {
|
||||
/**
|
||||
* Creates an instance of Queue with the given options
|
||||
*
|
||||
* @param {string} name The unique name for this queue. This name must be
|
||||
* unique as it's used to register sync events and store requests
|
||||
* in IndexedDB specific to this instance. An error will be thrown if
|
||||
* a duplicate name is detected.
|
||||
* @param {Object} [options]
|
||||
* @param {Function} [options.onSync] A function that gets invoked whenever
|
||||
* the 'sync' event fires. The function is invoked with an object
|
||||
* containing the `queue` property (referencing this instance), and you
|
||||
* can use the callback to customize the replay behavior of the queue.
|
||||
* When not set the `replayRequests()` method is called.
|
||||
* Note: if the replay fails after a sync event, make sure you throw an
|
||||
* error, so the browser knows to retry the sync event later.
|
||||
* @param {number} [options.maxRetentionTime=7 days] The amount of time (in
|
||||
* minutes) a request may be retried. After this amount of time has
|
||||
* passed, the request will be deleted from the queue.
|
||||
*/
|
||||
constructor(name, { onSync, maxRetentionTime } = {}) {
|
||||
this._syncInProgress = false;
|
||||
this._requestsAddedDuringSync = false;
|
||||
// Ensure the store name is not already being used
|
||||
if (queueNames.has(name)) {
|
||||
throw new WorkboxError('duplicate-queue-name', { name });
|
||||
}
|
||||
else {
|
||||
queueNames.add(name);
|
||||
}
|
||||
this._name = name;
|
||||
this._onSync = onSync || this.replayRequests;
|
||||
this._maxRetentionTime = maxRetentionTime || MAX_RETENTION_TIME;
|
||||
this._queueStore = new QueueStore(this._name);
|
||||
this._addSyncListener();
|
||||
}
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
get name() {
|
||||
return this._name;
|
||||
}
|
||||
/**
|
||||
* Stores the passed request in IndexedDB (with its timestamp and any
|
||||
* metadata) at the end of the queue.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Request} entry.request The request to store in the queue.
|
||||
* @param {Object} [entry.metadata] Any metadata you want associated with the
|
||||
* stored request. When requests are replayed you'll have access to this
|
||||
* metadata object in case you need to modify the request beforehand.
|
||||
* @param {number} [entry.timestamp] The timestamp (Epoch time in
|
||||
* milliseconds) when the request was first added to the queue. This is
|
||||
* used along with `maxRetentionTime` to remove outdated requests. In
|
||||
* general you don't need to set this value, as it's automatically set
|
||||
* for you (defaulting to `Date.now()`), but you can update it if you
|
||||
* don't want particular requests to expire.
|
||||
*/
|
||||
async pushRequest(entry) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert.isType(entry, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'Queue',
|
||||
funcName: 'pushRequest',
|
||||
paramName: 'entry',
|
||||
});
|
||||
assert.isInstance(entry.request, Request, {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'Queue',
|
||||
funcName: 'pushRequest',
|
||||
paramName: 'entry.request',
|
||||
});
|
||||
}
|
||||
await this._addRequest(entry, 'push');
|
||||
}
|
||||
/**
|
||||
* Stores the passed request in IndexedDB (with its timestamp and any
|
||||
* metadata) at the beginning of the queue.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Request} entry.request The request to store in the queue.
|
||||
* @param {Object} [entry.metadata] Any metadata you want associated with the
|
||||
* stored request. When requests are replayed you'll have access to this
|
||||
* metadata object in case you need to modify the request beforehand.
|
||||
* @param {number} [entry.timestamp] The timestamp (Epoch time in
|
||||
* milliseconds) when the request was first added to the queue. This is
|
||||
* used along with `maxRetentionTime` to remove outdated requests. In
|
||||
* general you don't need to set this value, as it's automatically set
|
||||
* for you (defaulting to `Date.now()`), but you can update it if you
|
||||
* don't want particular requests to expire.
|
||||
*/
|
||||
async unshiftRequest(entry) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert.isType(entry, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'Queue',
|
||||
funcName: 'unshiftRequest',
|
||||
paramName: 'entry',
|
||||
});
|
||||
assert.isInstance(entry.request, Request, {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'Queue',
|
||||
funcName: 'unshiftRequest',
|
||||
paramName: 'entry.request',
|
||||
});
|
||||
}
|
||||
await this._addRequest(entry, 'unshift');
|
||||
}
|
||||
/**
|
||||
* Removes and returns the last request in the queue (along with its
|
||||
* timestamp and any metadata). The returned object takes the form:
|
||||
* `{request, timestamp, metadata}`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
*/
|
||||
async popRequest() {
|
||||
return this._removeRequest('pop');
|
||||
}
|
||||
/**
|
||||
* Removes and returns the first request in the queue (along with its
|
||||
* timestamp and any metadata). The returned object takes the form:
|
||||
* `{request, timestamp, metadata}`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
*/
|
||||
async shiftRequest() {
|
||||
return this._removeRequest('shift');
|
||||
}
|
||||
/**
|
||||
* Returns all the entries that have not expired (per `maxRetentionTime`).
|
||||
* Any expired entries are removed from the queue.
|
||||
*
|
||||
* @return {Promise<Array<Object>>}
|
||||
*/
|
||||
async getAll() {
|
||||
const allEntries = await this._queueStore.getAll();
|
||||
const now = Date.now();
|
||||
const unexpiredEntries = [];
|
||||
for (const entry of allEntries) {
|
||||
// Ignore requests older than maxRetentionTime. Call this function
|
||||
// recursively until an unexpired request is found.
|
||||
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000;
|
||||
if (now - entry.timestamp > maxRetentionTimeInMs) {
|
||||
await this._queueStore.deleteEntry(entry.id);
|
||||
}
|
||||
else {
|
||||
unexpiredEntries.push(convertEntry(entry));
|
||||
}
|
||||
}
|
||||
return unexpiredEntries;
|
||||
}
|
||||
/**
|
||||
* Adds the entry to the QueueStore and registers for a sync event.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Request} entry.request
|
||||
* @param {Object} [entry.metadata]
|
||||
* @param {number} [entry.timestamp=Date.now()]
|
||||
* @param {string} operation ('push' or 'unshift')
|
||||
* @private
|
||||
*/
|
||||
async _addRequest({ request, metadata, timestamp = Date.now(), }, operation) {
|
||||
const storableRequest = await StorableRequest.fromRequest(request.clone());
|
||||
const entry = {
|
||||
requestData: storableRequest.toObject(),
|
||||
timestamp,
|
||||
};
|
||||
// Only include metadata if it's present.
|
||||
if (metadata) {
|
||||
entry.metadata = metadata;
|
||||
}
|
||||
await this._queueStore[`${operation}Entry`](entry);
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
logger.log(`Request for '${getFriendlyURL(request.url)}' has ` +
|
||||
`been added to background sync queue '${this._name}'.`);
|
||||
}
|
||||
// Don't register for a sync if we're in the middle of a sync. Instead,
|
||||
// we wait until the sync is complete and call register if
|
||||
// `this._requestsAddedDuringSync` is true.
|
||||
if (this._syncInProgress) {
|
||||
this._requestsAddedDuringSync = true;
|
||||
}
|
||||
else {
|
||||
await this.registerSync();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Removes and returns the first or last (depending on `operation`) entry
|
||||
* from the QueueStore that's not older than the `maxRetentionTime`.
|
||||
*
|
||||
* @param {string} operation ('pop' or 'shift')
|
||||
* @return {Object|undefined}
|
||||
* @private
|
||||
*/
|
||||
async _removeRequest(operation) {
|
||||
const now = Date.now();
|
||||
const entry = await this._queueStore[`${operation}Entry`]();
|
||||
if (entry) {
|
||||
// Ignore requests older than maxRetentionTime. Call this function
|
||||
// recursively until an unexpired request is found.
|
||||
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000;
|
||||
if (now - entry.timestamp > maxRetentionTimeInMs) {
|
||||
return this._removeRequest(operation);
|
||||
}
|
||||
return convertEntry(entry);
|
||||
}
|
||||
else {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Loops through each request in the queue and attempts to re-fetch it.
|
||||
* If any request fails to re-fetch, it's put back in the same position in
|
||||
* the queue (which registers a retry for the next sync event).
|
||||
*/
|
||||
async replayRequests() {
|
||||
let entry;
|
||||
while (entry = await this.shiftRequest()) {
|
||||
try {
|
||||
await fetch(entry.request.clone());
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
logger.log(`Request for '${getFriendlyURL(entry.request.url)}'` +
|
||||
`has been replayed in queue '${this._name}'`);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
await this.unshiftRequest(entry);
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
logger.log(`Request for '${getFriendlyURL(entry.request.url)}'` +
|
||||
`failed to replay, putting it back in queue '${this._name}'`);
|
||||
}
|
||||
throw new WorkboxError('queue-replay-failed', { name: this._name });
|
||||
}
|
||||
}
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
logger.log(`All requests in queue '${this.name}' have successfully ` +
|
||||
`replayed; the queue is now empty!`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Registers a sync event with a tag unique to this instance.
|
||||
*/
|
||||
async registerSync() {
|
||||
if ('sync' in self.registration) {
|
||||
try {
|
||||
await self.registration.sync.register(`${TAG_PREFIX}:${this._name}`);
|
||||
}
|
||||
catch (err) {
|
||||
// This means the registration failed for some reason, possibly due to
|
||||
// the user disabling it.
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
logger.warn(`Unable to register sync event for '${this._name}'.`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* In sync-supporting browsers, this adds a listener for the sync event.
|
||||
* In non-sync-supporting browsers, this will retry the queue on service
|
||||
* worker startup.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
_addSyncListener() {
|
||||
if ('sync' in self.registration) {
|
||||
self.addEventListener('sync', (event) => {
|
||||
if (event.tag === `${TAG_PREFIX}:${this._name}`) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
logger.log(`Background sync for tag '${event.tag}'` +
|
||||
`has been received`);
|
||||
}
|
||||
const syncComplete = async () => {
|
||||
this._syncInProgress = true;
|
||||
let syncError;
|
||||
try {
|
||||
await this._onSync({ queue: this });
|
||||
}
|
||||
catch (error) {
|
||||
syncError = error;
|
||||
// Rethrow the error. Note: the logic in the finally clause
|
||||
// will run before this gets rethrown.
|
||||
throw syncError;
|
||||
}
|
||||
finally {
|
||||
// New items may have been added to the queue during the sync,
|
||||
// so we need to register for a new sync if that's happened...
|
||||
// Unless there was an error during the sync, in which
|
||||
// case the browser will automatically retry later, as long
|
||||
// as `event.lastChance` is not true.
|
||||
if (this._requestsAddedDuringSync &&
|
||||
!(syncError && !event.lastChance)) {
|
||||
await this.registerSync();
|
||||
}
|
||||
this._syncInProgress = false;
|
||||
this._requestsAddedDuringSync = false;
|
||||
}
|
||||
};
|
||||
event.waitUntil(syncComplete());
|
||||
}
|
||||
});
|
||||
}
|
||||
else {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
logger.log(`Background sync replaying without background sync event`);
|
||||
}
|
||||
// If the browser doesn't support background sync, retry
|
||||
// every time the service worker starts up as a fallback.
|
||||
this._onSync({ queue: this });
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Returns the set of queue names. This is primarily used to reset the list
|
||||
* of queue names in tests.
|
||||
*
|
||||
* @return {Set}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
static get _queueNames() {
|
||||
return queueNames;
|
||||
}
|
||||
}
|
||||
export { Queue };
|
1
web/node_modules/workbox-background-sync/Queue.mjs
generated
vendored
Normal file
1
web/node_modules/workbox-background-sync/Queue.mjs
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
export * from './Queue.js';
|
1
web/node_modules/workbox-background-sync/README.md
generated
vendored
Normal file
1
web/node_modules/workbox-background-sync/README.md
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
This module's documentation can be found at https://developers.google.com/web/tools/workbox/modules/workbox-background-sync
|
0
web/node_modules/workbox-background-sync/_version.d.ts
generated
vendored
Normal file
0
web/node_modules/workbox-background-sync/_version.d.ts
generated
vendored
Normal file
6
web/node_modules/workbox-background-sync/_version.js
generated
vendored
Normal file
6
web/node_modules/workbox-background-sync/_version.js
generated
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
"use strict";
|
||||
// @ts-ignore
|
||||
try {
|
||||
self['workbox:background-sync:5.1.4'] && _();
|
||||
}
|
||||
catch (e) { }
|
1
web/node_modules/workbox-background-sync/_version.mjs
generated
vendored
Normal file
1
web/node_modules/workbox-background-sync/_version.mjs
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
try{self['workbox:background-sync:5.1.4']&&_()}catch(e){}// eslint-disable-line
|
818
web/node_modules/workbox-background-sync/build/workbox-background-sync.dev.js
generated
vendored
Normal file
818
web/node_modules/workbox-background-sync/build/workbox-background-sync.dev.js
generated
vendored
Normal file
|
@ -0,0 +1,818 @@
|
|||
this.workbox = this.workbox || {};
|
||||
this.workbox.backgroundSync = (function (exports, WorkboxError_js, logger_js, assert_js, getFriendlyURL_js, DBWrapper_js) {
|
||||
'use strict';
|
||||
|
||||
try {
|
||||
self['workbox:background-sync:5.1.4'] && _();
|
||||
} catch (e) {}
|
||||
|
||||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
const DB_VERSION = 3;
|
||||
const DB_NAME = 'workbox-background-sync';
|
||||
const OBJECT_STORE_NAME = 'requests';
|
||||
const INDEXED_PROP = 'queueName';
|
||||
/**
|
||||
* A class to manage storing requests from a Queue in IndexedDB,
|
||||
* indexed by their queue name for easier access.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
class QueueStore {
|
||||
/**
|
||||
* Associates this instance with a Queue instance, so entries added can be
|
||||
* identified by their queue name.
|
||||
*
|
||||
* @param {string} queueName
|
||||
* @private
|
||||
*/
|
||||
constructor(queueName) {
|
||||
this._queueName = queueName;
|
||||
this._db = new DBWrapper_js.DBWrapper(DB_NAME, DB_VERSION, {
|
||||
onupgradeneeded: this._upgradeDb
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Append an entry last in the queue.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Object} entry.requestData
|
||||
* @param {number} [entry.timestamp]
|
||||
* @param {Object} [entry.metadata]
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
async pushEntry(entry) {
|
||||
{
|
||||
assert_js.assert.isType(entry, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'QueueStore',
|
||||
funcName: 'pushEntry',
|
||||
paramName: 'entry'
|
||||
});
|
||||
assert_js.assert.isType(entry.requestData, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'QueueStore',
|
||||
funcName: 'pushEntry',
|
||||
paramName: 'entry.requestData'
|
||||
});
|
||||
} // Don't specify an ID since one is automatically generated.
|
||||
|
||||
|
||||
delete entry.id;
|
||||
entry.queueName = this._queueName;
|
||||
await this._db.add(OBJECT_STORE_NAME, entry);
|
||||
}
|
||||
/**
|
||||
* Prepend an entry first in the queue.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Object} entry.requestData
|
||||
* @param {number} [entry.timestamp]
|
||||
* @param {Object} [entry.metadata]
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
async unshiftEntry(entry) {
|
||||
{
|
||||
assert_js.assert.isType(entry, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'QueueStore',
|
||||
funcName: 'unshiftEntry',
|
||||
paramName: 'entry'
|
||||
});
|
||||
assert_js.assert.isType(entry.requestData, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'QueueStore',
|
||||
funcName: 'unshiftEntry',
|
||||
paramName: 'entry.requestData'
|
||||
});
|
||||
}
|
||||
|
||||
const [firstEntry] = await this._db.getAllMatching(OBJECT_STORE_NAME, {
|
||||
count: 1
|
||||
});
|
||||
|
||||
if (firstEntry) {
|
||||
// Pick an ID one less than the lowest ID in the object store.
|
||||
entry.id = firstEntry.id - 1;
|
||||
} else {
|
||||
// Otherwise let the auto-incrementor assign the ID.
|
||||
delete entry.id;
|
||||
}
|
||||
|
||||
entry.queueName = this._queueName;
|
||||
await this._db.add(OBJECT_STORE_NAME, entry);
|
||||
}
|
||||
/**
|
||||
* Removes and returns the last entry in the queue matching the `queueName`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
async popEntry() {
|
||||
return this._removeEntry({
|
||||
direction: 'prev'
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Removes and returns the first entry in the queue matching the `queueName`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
async shiftEntry() {
|
||||
return this._removeEntry({
|
||||
direction: 'next'
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Returns all entries in the store matching the `queueName`.
|
||||
*
|
||||
* @param {Object} options See {@link module:workbox-background-sync.Queue~getAll}
|
||||
* @return {Promise<Array<Object>>}
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
async getAll() {
|
||||
return await this._db.getAllMatching(OBJECT_STORE_NAME, {
|
||||
index: INDEXED_PROP,
|
||||
query: IDBKeyRange.only(this._queueName)
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Deletes the entry for the given ID.
|
||||
*
|
||||
* WARNING: this method does not ensure the deleted enry belongs to this
|
||||
* queue (i.e. matches the `queueName`). But this limitation is acceptable
|
||||
* as this class is not publicly exposed. An additional check would make
|
||||
* this method slower than it needs to be.
|
||||
*
|
||||
* @private
|
||||
* @param {number} id
|
||||
*/
|
||||
|
||||
|
||||
async deleteEntry(id) {
|
||||
await this._db.delete(OBJECT_STORE_NAME, id);
|
||||
}
|
||||
/**
|
||||
* Removes and returns the first or last entry in the queue (based on the
|
||||
* `direction` argument) matching the `queueName`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
async _removeEntry({
|
||||
direction
|
||||
}) {
|
||||
const [entry] = await this._db.getAllMatching(OBJECT_STORE_NAME, {
|
||||
direction,
|
||||
index: INDEXED_PROP,
|
||||
query: IDBKeyRange.only(this._queueName),
|
||||
count: 1
|
||||
});
|
||||
|
||||
if (entry) {
|
||||
await this.deleteEntry(entry.id);
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Upgrades the database given an `upgradeneeded` event.
|
||||
*
|
||||
* @param {Event} event
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
_upgradeDb(event) {
|
||||
const db = event.target.result;
|
||||
|
||||
if (event.oldVersion > 0 && event.oldVersion < DB_VERSION) {
|
||||
if (db.objectStoreNames.contains(OBJECT_STORE_NAME)) {
|
||||
db.deleteObjectStore(OBJECT_STORE_NAME);
|
||||
}
|
||||
}
|
||||
|
||||
const objStore = db.createObjectStore(OBJECT_STORE_NAME, {
|
||||
autoIncrement: true,
|
||||
keyPath: 'id'
|
||||
});
|
||||
objStore.createIndex(INDEXED_PROP, INDEXED_PROP, {
|
||||
unique: false
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
const serializableProperties = ['method', 'referrer', 'referrerPolicy', 'mode', 'credentials', 'cache', 'redirect', 'integrity', 'keepalive'];
|
||||
/**
|
||||
* A class to make it easier to serialize and de-serialize requests so they
|
||||
* can be stored in IndexedDB.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
class StorableRequest {
|
||||
/**
|
||||
* Accepts an object of request data that can be used to construct a
|
||||
* `Request` but can also be stored in IndexedDB.
|
||||
*
|
||||
* @param {Object} requestData An object of request data that includes the
|
||||
* `url` plus any relevant properties of
|
||||
* [requestInit]{@link https://fetch.spec.whatwg.org/#requestinit}.
|
||||
* @private
|
||||
*/
|
||||
constructor(requestData) {
|
||||
{
|
||||
assert_js.assert.isType(requestData, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'StorableRequest',
|
||||
funcName: 'constructor',
|
||||
paramName: 'requestData'
|
||||
});
|
||||
assert_js.assert.isType(requestData.url, 'string', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'StorableRequest',
|
||||
funcName: 'constructor',
|
||||
paramName: 'requestData.url'
|
||||
});
|
||||
} // If the request's mode is `navigate`, convert it to `same-origin` since
|
||||
// navigation requests can't be constructed via script.
|
||||
|
||||
|
||||
if (requestData['mode'] === 'navigate') {
|
||||
requestData['mode'] = 'same-origin';
|
||||
}
|
||||
|
||||
this._requestData = requestData;
|
||||
}
|
||||
/**
|
||||
* Converts a Request object to a plain object that can be structured
|
||||
* cloned or JSON-stringified.
|
||||
*
|
||||
* @param {Request} request
|
||||
* @return {Promise<StorableRequest>}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
static async fromRequest(request) {
|
||||
const requestData = {
|
||||
url: request.url,
|
||||
headers: {}
|
||||
}; // Set the body if present.
|
||||
|
||||
if (request.method !== 'GET') {
|
||||
// Use ArrayBuffer to support non-text request bodies.
|
||||
// NOTE: we can't use Blobs becuse Safari doesn't support storing
|
||||
// Blobs in IndexedDB in some cases:
|
||||
// https://github.com/dfahlander/Dexie.js/issues/618#issuecomment-398348457
|
||||
requestData.body = await request.clone().arrayBuffer();
|
||||
} // Convert the headers from an iterable to an object.
|
||||
|
||||
|
||||
for (const [key, value] of request.headers.entries()) {
|
||||
requestData.headers[key] = value;
|
||||
} // Add all other serializable request properties
|
||||
|
||||
|
||||
for (const prop of serializableProperties) {
|
||||
if (request[prop] !== undefined) {
|
||||
requestData[prop] = request[prop];
|
||||
}
|
||||
}
|
||||
|
||||
return new StorableRequest(requestData);
|
||||
}
|
||||
/**
|
||||
* Returns a deep clone of the instances `_requestData` object.
|
||||
*
|
||||
* @return {Object}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
toObject() {
|
||||
const requestData = Object.assign({}, this._requestData);
|
||||
requestData.headers = Object.assign({}, this._requestData.headers);
|
||||
|
||||
if (requestData.body) {
|
||||
requestData.body = requestData.body.slice(0);
|
||||
}
|
||||
|
||||
return requestData;
|
||||
}
|
||||
/**
|
||||
* Converts this instance to a Request.
|
||||
*
|
||||
* @return {Request}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
toRequest() {
|
||||
return new Request(this._requestData.url, this._requestData);
|
||||
}
|
||||
/**
|
||||
* Creates and returns a deep clone of the instance.
|
||||
*
|
||||
* @return {StorableRequest}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
clone() {
|
||||
return new StorableRequest(this.toObject());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
const TAG_PREFIX = 'workbox-background-sync';
|
||||
const MAX_RETENTION_TIME = 60 * 24 * 7; // 7 days in minutes
|
||||
|
||||
const queueNames = new Set();
|
||||
/**
|
||||
* Converts a QueueStore entry into the format exposed by Queue. This entails
|
||||
* converting the request data into a real request and omitting the `id` and
|
||||
* `queueName` properties.
|
||||
*
|
||||
* @param {Object} queueStoreEntry
|
||||
* @return {Object}
|
||||
* @private
|
||||
*/
|
||||
|
||||
const convertEntry = queueStoreEntry => {
|
||||
const queueEntry = {
|
||||
request: new StorableRequest(queueStoreEntry.requestData).toRequest(),
|
||||
timestamp: queueStoreEntry.timestamp
|
||||
};
|
||||
|
||||
if (queueStoreEntry.metadata) {
|
||||
queueEntry.metadata = queueStoreEntry.metadata;
|
||||
}
|
||||
|
||||
return queueEntry;
|
||||
};
|
||||
/**
|
||||
* A class to manage storing failed requests in IndexedDB and retrying them
|
||||
* later. All parts of the storing and replaying process are observable via
|
||||
* callbacks.
|
||||
*
|
||||
* @memberof module:workbox-background-sync
|
||||
*/
|
||||
|
||||
|
||||
class Queue {
|
||||
/**
|
||||
* Creates an instance of Queue with the given options
|
||||
*
|
||||
* @param {string} name The unique name for this queue. This name must be
|
||||
* unique as it's used to register sync events and store requests
|
||||
* in IndexedDB specific to this instance. An error will be thrown if
|
||||
* a duplicate name is detected.
|
||||
* @param {Object} [options]
|
||||
* @param {Function} [options.onSync] A function that gets invoked whenever
|
||||
* the 'sync' event fires. The function is invoked with an object
|
||||
* containing the `queue` property (referencing this instance), and you
|
||||
* can use the callback to customize the replay behavior of the queue.
|
||||
* When not set the `replayRequests()` method is called.
|
||||
* Note: if the replay fails after a sync event, make sure you throw an
|
||||
* error, so the browser knows to retry the sync event later.
|
||||
* @param {number} [options.maxRetentionTime=7 days] The amount of time (in
|
||||
* minutes) a request may be retried. After this amount of time has
|
||||
* passed, the request will be deleted from the queue.
|
||||
*/
|
||||
constructor(name, {
|
||||
onSync,
|
||||
maxRetentionTime
|
||||
} = {}) {
|
||||
this._syncInProgress = false;
|
||||
this._requestsAddedDuringSync = false; // Ensure the store name is not already being used
|
||||
|
||||
if (queueNames.has(name)) {
|
||||
throw new WorkboxError_js.WorkboxError('duplicate-queue-name', {
|
||||
name
|
||||
});
|
||||
} else {
|
||||
queueNames.add(name);
|
||||
}
|
||||
|
||||
this._name = name;
|
||||
this._onSync = onSync || this.replayRequests;
|
||||
this._maxRetentionTime = maxRetentionTime || MAX_RETENTION_TIME;
|
||||
this._queueStore = new QueueStore(this._name);
|
||||
|
||||
this._addSyncListener();
|
||||
}
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
|
||||
|
||||
get name() {
|
||||
return this._name;
|
||||
}
|
||||
/**
|
||||
* Stores the passed request in IndexedDB (with its timestamp and any
|
||||
* metadata) at the end of the queue.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Request} entry.request The request to store in the queue.
|
||||
* @param {Object} [entry.metadata] Any metadata you want associated with the
|
||||
* stored request. When requests are replayed you'll have access to this
|
||||
* metadata object in case you need to modify the request beforehand.
|
||||
* @param {number} [entry.timestamp] The timestamp (Epoch time in
|
||||
* milliseconds) when the request was first added to the queue. This is
|
||||
* used along with `maxRetentionTime` to remove outdated requests. In
|
||||
* general you don't need to set this value, as it's automatically set
|
||||
* for you (defaulting to `Date.now()`), but you can update it if you
|
||||
* don't want particular requests to expire.
|
||||
*/
|
||||
|
||||
|
||||
async pushRequest(entry) {
|
||||
{
|
||||
assert_js.assert.isType(entry, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'Queue',
|
||||
funcName: 'pushRequest',
|
||||
paramName: 'entry'
|
||||
});
|
||||
assert_js.assert.isInstance(entry.request, Request, {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'Queue',
|
||||
funcName: 'pushRequest',
|
||||
paramName: 'entry.request'
|
||||
});
|
||||
}
|
||||
|
||||
await this._addRequest(entry, 'push');
|
||||
}
|
||||
/**
|
||||
* Stores the passed request in IndexedDB (with its timestamp and any
|
||||
* metadata) at the beginning of the queue.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Request} entry.request The request to store in the queue.
|
||||
* @param {Object} [entry.metadata] Any metadata you want associated with the
|
||||
* stored request. When requests are replayed you'll have access to this
|
||||
* metadata object in case you need to modify the request beforehand.
|
||||
* @param {number} [entry.timestamp] The timestamp (Epoch time in
|
||||
* milliseconds) when the request was first added to the queue. This is
|
||||
* used along with `maxRetentionTime` to remove outdated requests. In
|
||||
* general you don't need to set this value, as it's automatically set
|
||||
* for you (defaulting to `Date.now()`), but you can update it if you
|
||||
* don't want particular requests to expire.
|
||||
*/
|
||||
|
||||
|
||||
async unshiftRequest(entry) {
|
||||
{
|
||||
assert_js.assert.isType(entry, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'Queue',
|
||||
funcName: 'unshiftRequest',
|
||||
paramName: 'entry'
|
||||
});
|
||||
assert_js.assert.isInstance(entry.request, Request, {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'Queue',
|
||||
funcName: 'unshiftRequest',
|
||||
paramName: 'entry.request'
|
||||
});
|
||||
}
|
||||
|
||||
await this._addRequest(entry, 'unshift');
|
||||
}
|
||||
/**
|
||||
* Removes and returns the last request in the queue (along with its
|
||||
* timestamp and any metadata). The returned object takes the form:
|
||||
* `{request, timestamp, metadata}`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
*/
|
||||
|
||||
|
||||
async popRequest() {
|
||||
return this._removeRequest('pop');
|
||||
}
|
||||
/**
|
||||
* Removes and returns the first request in the queue (along with its
|
||||
* timestamp and any metadata). The returned object takes the form:
|
||||
* `{request, timestamp, metadata}`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
*/
|
||||
|
||||
|
||||
async shiftRequest() {
|
||||
return this._removeRequest('shift');
|
||||
}
|
||||
/**
|
||||
* Returns all the entries that have not expired (per `maxRetentionTime`).
|
||||
* Any expired entries are removed from the queue.
|
||||
*
|
||||
* @return {Promise<Array<Object>>}
|
||||
*/
|
||||
|
||||
|
||||
async getAll() {
|
||||
const allEntries = await this._queueStore.getAll();
|
||||
const now = Date.now();
|
||||
const unexpiredEntries = [];
|
||||
|
||||
for (const entry of allEntries) {
|
||||
// Ignore requests older than maxRetentionTime. Call this function
|
||||
// recursively until an unexpired request is found.
|
||||
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000;
|
||||
|
||||
if (now - entry.timestamp > maxRetentionTimeInMs) {
|
||||
await this._queueStore.deleteEntry(entry.id);
|
||||
} else {
|
||||
unexpiredEntries.push(convertEntry(entry));
|
||||
}
|
||||
}
|
||||
|
||||
return unexpiredEntries;
|
||||
}
|
||||
/**
|
||||
* Adds the entry to the QueueStore and registers for a sync event.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Request} entry.request
|
||||
* @param {Object} [entry.metadata]
|
||||
* @param {number} [entry.timestamp=Date.now()]
|
||||
* @param {string} operation ('push' or 'unshift')
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
async _addRequest({
|
||||
request,
|
||||
metadata,
|
||||
timestamp = Date.now()
|
||||
}, operation) {
|
||||
const storableRequest = await StorableRequest.fromRequest(request.clone());
|
||||
const entry = {
|
||||
requestData: storableRequest.toObject(),
|
||||
timestamp
|
||||
}; // Only include metadata if it's present.
|
||||
|
||||
if (metadata) {
|
||||
entry.metadata = metadata;
|
||||
}
|
||||
|
||||
await this._queueStore[`${operation}Entry`](entry);
|
||||
|
||||
{
|
||||
logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(request.url)}' has ` + `been added to background sync queue '${this._name}'.`);
|
||||
} // Don't register for a sync if we're in the middle of a sync. Instead,
|
||||
// we wait until the sync is complete and call register if
|
||||
// `this._requestsAddedDuringSync` is true.
|
||||
|
||||
|
||||
if (this._syncInProgress) {
|
||||
this._requestsAddedDuringSync = true;
|
||||
} else {
|
||||
await this.registerSync();
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Removes and returns the first or last (depending on `operation`) entry
|
||||
* from the QueueStore that's not older than the `maxRetentionTime`.
|
||||
*
|
||||
* @param {string} operation ('pop' or 'shift')
|
||||
* @return {Object|undefined}
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
async _removeRequest(operation) {
|
||||
const now = Date.now();
|
||||
const entry = await this._queueStore[`${operation}Entry`]();
|
||||
|
||||
if (entry) {
|
||||
// Ignore requests older than maxRetentionTime. Call this function
|
||||
// recursively until an unexpired request is found.
|
||||
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000;
|
||||
|
||||
if (now - entry.timestamp > maxRetentionTimeInMs) {
|
||||
return this._removeRequest(operation);
|
||||
}
|
||||
|
||||
return convertEntry(entry);
|
||||
} else {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Loops through each request in the queue and attempts to re-fetch it.
|
||||
* If any request fails to re-fetch, it's put back in the same position in
|
||||
* the queue (which registers a retry for the next sync event).
|
||||
*/
|
||||
|
||||
|
||||
async replayRequests() {
|
||||
let entry;
|
||||
|
||||
while (entry = await this.shiftRequest()) {
|
||||
try {
|
||||
await fetch(entry.request.clone());
|
||||
|
||||
if ("dev" !== 'production') {
|
||||
logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(entry.request.url)}'` + `has been replayed in queue '${this._name}'`);
|
||||
}
|
||||
} catch (error) {
|
||||
await this.unshiftRequest(entry);
|
||||
|
||||
{
|
||||
logger_js.logger.log(`Request for '${getFriendlyURL_js.getFriendlyURL(entry.request.url)}'` + `failed to replay, putting it back in queue '${this._name}'`);
|
||||
}
|
||||
|
||||
throw new WorkboxError_js.WorkboxError('queue-replay-failed', {
|
||||
name: this._name
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
logger_js.logger.log(`All requests in queue '${this.name}' have successfully ` + `replayed; the queue is now empty!`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Registers a sync event with a tag unique to this instance.
|
||||
*/
|
||||
|
||||
|
||||
async registerSync() {
|
||||
if ('sync' in self.registration) {
|
||||
try {
|
||||
await self.registration.sync.register(`${TAG_PREFIX}:${this._name}`);
|
||||
} catch (err) {
|
||||
// This means the registration failed for some reason, possibly due to
|
||||
// the user disabling it.
|
||||
{
|
||||
logger_js.logger.warn(`Unable to register sync event for '${this._name}'.`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* In sync-supporting browsers, this adds a listener for the sync event.
|
||||
* In non-sync-supporting browsers, this will retry the queue on service
|
||||
* worker startup.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
_addSyncListener() {
|
||||
if ('sync' in self.registration) {
|
||||
self.addEventListener('sync', event => {
|
||||
if (event.tag === `${TAG_PREFIX}:${this._name}`) {
|
||||
{
|
||||
logger_js.logger.log(`Background sync for tag '${event.tag}'` + `has been received`);
|
||||
}
|
||||
|
||||
const syncComplete = async () => {
|
||||
this._syncInProgress = true;
|
||||
let syncError;
|
||||
|
||||
try {
|
||||
await this._onSync({
|
||||
queue: this
|
||||
});
|
||||
} catch (error) {
|
||||
syncError = error; // Rethrow the error. Note: the logic in the finally clause
|
||||
// will run before this gets rethrown.
|
||||
|
||||
throw syncError;
|
||||
} finally {
|
||||
// New items may have been added to the queue during the sync,
|
||||
// so we need to register for a new sync if that's happened...
|
||||
// Unless there was an error during the sync, in which
|
||||
// case the browser will automatically retry later, as long
|
||||
// as `event.lastChance` is not true.
|
||||
if (this._requestsAddedDuringSync && !(syncError && !event.lastChance)) {
|
||||
await this.registerSync();
|
||||
}
|
||||
|
||||
this._syncInProgress = false;
|
||||
this._requestsAddedDuringSync = false;
|
||||
}
|
||||
};
|
||||
|
||||
event.waitUntil(syncComplete());
|
||||
}
|
||||
});
|
||||
} else {
|
||||
{
|
||||
logger_js.logger.log(`Background sync replaying without background sync event`);
|
||||
} // If the browser doesn't support background sync, retry
|
||||
// every time the service worker starts up as a fallback.
|
||||
|
||||
|
||||
this._onSync({
|
||||
queue: this
|
||||
});
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Returns the set of queue names. This is primarily used to reset the list
|
||||
* of queue names in tests.
|
||||
*
|
||||
* @return {Set}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
|
||||
|
||||
static get _queueNames() {
|
||||
return queueNames;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
/**
|
||||
* A class implementing the `fetchDidFail` lifecycle callback. This makes it
|
||||
* easier to add failed requests to a background sync Queue.
|
||||
*
|
||||
* @memberof module:workbox-background-sync
|
||||
*/
|
||||
|
||||
class BackgroundSyncPlugin {
|
||||
/**
|
||||
* @param {string} name See the [Queue]{@link module:workbox-background-sync.Queue}
|
||||
* documentation for parameter details.
|
||||
* @param {Object} [options] See the
|
||||
* [Queue]{@link module:workbox-background-sync.Queue} documentation for
|
||||
* parameter details.
|
||||
*/
|
||||
constructor(name, options) {
|
||||
/**
|
||||
* @param {Object} options
|
||||
* @param {Request} options.request
|
||||
* @private
|
||||
*/
|
||||
this.fetchDidFail = async ({
|
||||
request
|
||||
}) => {
|
||||
await this._queue.pushRequest({
|
||||
request
|
||||
});
|
||||
};
|
||||
|
||||
this._queue = new Queue(name, options);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
exports.BackgroundSyncPlugin = BackgroundSyncPlugin;
|
||||
exports.Queue = Queue;
|
||||
|
||||
return exports;
|
||||
|
||||
}({}, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private, workbox.core._private));
|
||||
//# sourceMappingURL=workbox-background-sync.dev.js.map
|
1
web/node_modules/workbox-background-sync/build/workbox-background-sync.dev.js.map
generated
vendored
Normal file
1
web/node_modules/workbox-background-sync/build/workbox-background-sync.dev.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
web/node_modules/workbox-background-sync/build/workbox-background-sync.prod.js
generated
vendored
Normal file
2
web/node_modules/workbox-background-sync/build/workbox-background-sync.prod.js
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
this.workbox=this.workbox||{},this.workbox.backgroundSync=function(t,e,s,i,n){"use strict";try{self["workbox:background-sync:5.1.4"]&&_()}catch(t){}class a{constructor(t){this.t=t,this.s=new n.DBWrapper("workbox-background-sync",3,{onupgradeneeded:this.i})}async pushEntry(t){delete t.id,t.queueName=this.t,await this.s.add("requests",t)}async unshiftEntry(t){const[e]=await this.s.getAllMatching("requests",{count:1});e?t.id=e.id-1:delete t.id,t.queueName=this.t,await this.s.add("requests",t)}async popEntry(){return this.h({direction:"prev"})}async shiftEntry(){return this.h({direction:"next"})}async getAll(){return await this.s.getAllMatching("requests",{index:"queueName",query:IDBKeyRange.only(this.t)})}async deleteEntry(t){await this.s.delete("requests",t)}async h({direction:t}){const[e]=await this.s.getAllMatching("requests",{direction:t,index:"queueName",query:IDBKeyRange.only(this.t),count:1});if(e)return await this.deleteEntry(e.id),e}i(t){const e=t.target.result;t.oldVersion>0&&t.oldVersion<3&&e.objectStoreNames.contains("requests")&&e.deleteObjectStore("requests");e.createObjectStore("requests",{autoIncrement:!0,keyPath:"id"}).createIndex("queueName","queueName",{unique:!1})}}const r=["method","referrer","referrerPolicy","mode","credentials","cache","redirect","integrity","keepalive"];class c{constructor(t){"navigate"===t.mode&&(t.mode="same-origin"),this.u=t}static async fromRequest(t){const e={url:t.url,headers:{}};"GET"!==t.method&&(e.body=await t.clone().arrayBuffer());for(const[s,i]of t.headers.entries())e.headers[s]=i;for(const s of r)void 0!==t[s]&&(e[s]=t[s]);return new c(e)}toObject(){const t=Object.assign({},this.u);return t.headers=Object.assign({},this.u.headers),t.body&&(t.body=t.body.slice(0)),t}toRequest(){return new Request(this.u.url,this.u)}clone(){return new c(this.toObject())}}const h=new Set,u=t=>{const e={request:new c(t.requestData).toRequest(),timestamp:t.timestamp};return t.metadata&&(e.metadata=t.metadata),e};class o{constructor(t,{onSync:s,maxRetentionTime:i}={}){if(this.o=!1,this.q=!1,h.has(t))throw new e.WorkboxError("duplicate-queue-name",{name:t});h.add(t),this.l=t,this.m=s||this.replayRequests,this.p=i||10080,this.g=new a(this.l),this.R()}get name(){return this.l}async pushRequest(t){await this.k(t,"push")}async unshiftRequest(t){await this.k(t,"unshift")}async popRequest(){return this.D("pop")}async shiftRequest(){return this.D("shift")}async getAll(){const t=await this.g.getAll(),e=Date.now(),s=[];for(const i of t){const t=60*this.p*1e3;e-i.timestamp>t?await this.g.deleteEntry(i.id):s.push(u(i))}return s}async k({request:t,metadata:e,timestamp:s=Date.now()},i){const n={requestData:(await c.fromRequest(t.clone())).toObject(),timestamp:s};e&&(n.metadata=e),await this.g[i+"Entry"](n),this.o?this.q=!0:await this.registerSync()}async D(t){const e=Date.now(),s=await this.g[t+"Entry"]();if(s){const i=60*this.p*1e3;return e-s.timestamp>i?this.D(t):u(s)}}async replayRequests(){let t;for(;t=await this.shiftRequest();)try{await fetch(t.request.clone())}catch(s){throw await this.unshiftRequest(t),new e.WorkboxError("queue-replay-failed",{name:this.l})}}async registerSync(){if("sync"in self.registration)try{await self.registration.sync.register("workbox-background-sync:"+this.l)}catch(t){}}R(){"sync"in self.registration?self.addEventListener("sync",t=>{if(t.tag==="workbox-background-sync:"+this.l){const e=async()=>{let e;this.o=!0;try{await this.m({queue:this})}catch(t){throw e=t,e}finally{!this.q||e&&!t.lastChance||await this.registerSync(),this.o=!1,this.q=!1}};t.waitUntil(e())}}):this.m({queue:this})}static get _(){return h}}return t.BackgroundSyncPlugin=class{constructor(t,e){this.fetchDidFail=async({request:t})=>{await this.v.pushRequest({request:t})},this.v=new o(t,e)}},t.Queue=o,t}({},workbox.core._private,workbox.core._private,workbox.core._private,workbox.core._private);
|
||||
//# sourceMappingURL=workbox-background-sync.prod.js.map
|
1
web/node_modules/workbox-background-sync/build/workbox-background-sync.prod.js.map
generated
vendored
Normal file
1
web/node_modules/workbox-background-sync/build/workbox-background-sync.prod.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
7
web/node_modules/workbox-background-sync/index.d.ts
generated
vendored
Normal file
7
web/node_modules/workbox-background-sync/index.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
import { Queue } from './Queue.js';
|
||||
import { BackgroundSyncPlugin } from './BackgroundSyncPlugin.js';
|
||||
import './_version.js';
|
||||
/**
|
||||
* @module workbox-background-sync
|
||||
*/
|
||||
export { Queue, BackgroundSyncPlugin, };
|
14
web/node_modules/workbox-background-sync/index.js
generated
vendored
Normal file
14
web/node_modules/workbox-background-sync/index.js
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
import { Queue } from './Queue.js';
|
||||
import { BackgroundSyncPlugin } from './BackgroundSyncPlugin.js';
|
||||
import './_version.js';
|
||||
/**
|
||||
* @module workbox-background-sync
|
||||
*/
|
||||
export { Queue, BackgroundSyncPlugin, };
|
1
web/node_modules/workbox-background-sync/index.mjs
generated
vendored
Normal file
1
web/node_modules/workbox-background-sync/index.mjs
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
export * from './index.js';
|
101
web/node_modules/workbox-background-sync/lib/QueueStore.d.ts
generated
vendored
Normal file
101
web/node_modules/workbox-background-sync/lib/QueueStore.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,101 @@
|
|||
import { RequestData } from './StorableRequest.js';
|
||||
import '../_version.js';
|
||||
export interface UnidentifiedQueueStoreEntry {
|
||||
requestData: RequestData;
|
||||
timestamp: number;
|
||||
id?: number;
|
||||
queueName?: string;
|
||||
metadata?: object;
|
||||
}
|
||||
export interface QueueStoreEntry extends UnidentifiedQueueStoreEntry {
|
||||
id: number;
|
||||
}
|
||||
/**
|
||||
* A class to manage storing requests from a Queue in IndexedDB,
|
||||
* indexed by their queue name for easier access.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
export declare class QueueStore {
|
||||
private readonly _queueName;
|
||||
private readonly _db;
|
||||
/**
|
||||
* Associates this instance with a Queue instance, so entries added can be
|
||||
* identified by their queue name.
|
||||
*
|
||||
* @param {string} queueName
|
||||
* @private
|
||||
*/
|
||||
constructor(queueName: string);
|
||||
/**
|
||||
* Append an entry last in the queue.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Object} entry.requestData
|
||||
* @param {number} [entry.timestamp]
|
||||
* @param {Object} [entry.metadata]
|
||||
* @private
|
||||
*/
|
||||
pushEntry(entry: UnidentifiedQueueStoreEntry): Promise<void>;
|
||||
/**
|
||||
* Prepend an entry first in the queue.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Object} entry.requestData
|
||||
* @param {number} [entry.timestamp]
|
||||
* @param {Object} [entry.metadata]
|
||||
* @private
|
||||
*/
|
||||
unshiftEntry(entry: UnidentifiedQueueStoreEntry): Promise<void>;
|
||||
/**
|
||||
* Removes and returns the last entry in the queue matching the `queueName`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
* @private
|
||||
*/
|
||||
popEntry(): Promise<QueueStoreEntry>;
|
||||
/**
|
||||
* Removes and returns the first entry in the queue matching the `queueName`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
* @private
|
||||
*/
|
||||
shiftEntry(): Promise<QueueStoreEntry>;
|
||||
/**
|
||||
* Returns all entries in the store matching the `queueName`.
|
||||
*
|
||||
* @param {Object} options See {@link module:workbox-background-sync.Queue~getAll}
|
||||
* @return {Promise<Array<Object>>}
|
||||
* @private
|
||||
*/
|
||||
getAll(): Promise<QueueStoreEntry[]>;
|
||||
/**
|
||||
* Deletes the entry for the given ID.
|
||||
*
|
||||
* WARNING: this method does not ensure the deleted enry belongs to this
|
||||
* queue (i.e. matches the `queueName`). But this limitation is acceptable
|
||||
* as this class is not publicly exposed. An additional check would make
|
||||
* this method slower than it needs to be.
|
||||
*
|
||||
* @private
|
||||
* @param {number} id
|
||||
*/
|
||||
deleteEntry(id: number): Promise<void>;
|
||||
/**
|
||||
* Removes and returns the first or last entry in the queue (based on the
|
||||
* `direction` argument) matching the `queueName`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
* @private
|
||||
*/
|
||||
_removeEntry({ direction }: {
|
||||
direction?: IDBCursorDirection;
|
||||
}): Promise<any>;
|
||||
/**
|
||||
* Upgrades the database given an `upgradeneeded` event.
|
||||
*
|
||||
* @param {Event} event
|
||||
* @private
|
||||
*/
|
||||
private _upgradeDb;
|
||||
}
|
185
web/node_modules/workbox-background-sync/lib/QueueStore.js
generated
vendored
Normal file
185
web/node_modules/workbox-background-sync/lib/QueueStore.js
generated
vendored
Normal file
|
@ -0,0 +1,185 @@
|
|||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
import { assert } from 'workbox-core/_private/assert.js';
|
||||
import { DBWrapper } from 'workbox-core/_private/DBWrapper.js';
|
||||
import '../_version.js';
|
||||
const DB_VERSION = 3;
|
||||
const DB_NAME = 'workbox-background-sync';
|
||||
const OBJECT_STORE_NAME = 'requests';
|
||||
const INDEXED_PROP = 'queueName';
|
||||
/**
|
||||
* A class to manage storing requests from a Queue in IndexedDB,
|
||||
* indexed by their queue name for easier access.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
export class QueueStore {
|
||||
/**
|
||||
* Associates this instance with a Queue instance, so entries added can be
|
||||
* identified by their queue name.
|
||||
*
|
||||
* @param {string} queueName
|
||||
* @private
|
||||
*/
|
||||
constructor(queueName) {
|
||||
this._queueName = queueName;
|
||||
this._db = new DBWrapper(DB_NAME, DB_VERSION, {
|
||||
onupgradeneeded: this._upgradeDb,
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Append an entry last in the queue.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Object} entry.requestData
|
||||
* @param {number} [entry.timestamp]
|
||||
* @param {Object} [entry.metadata]
|
||||
* @private
|
||||
*/
|
||||
async pushEntry(entry) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert.isType(entry, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'QueueStore',
|
||||
funcName: 'pushEntry',
|
||||
paramName: 'entry',
|
||||
});
|
||||
assert.isType(entry.requestData, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'QueueStore',
|
||||
funcName: 'pushEntry',
|
||||
paramName: 'entry.requestData',
|
||||
});
|
||||
}
|
||||
// Don't specify an ID since one is automatically generated.
|
||||
delete entry.id;
|
||||
entry.queueName = this._queueName;
|
||||
await this._db.add(OBJECT_STORE_NAME, entry);
|
||||
}
|
||||
/**
|
||||
* Prepend an entry first in the queue.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Object} entry.requestData
|
||||
* @param {number} [entry.timestamp]
|
||||
* @param {Object} [entry.metadata]
|
||||
* @private
|
||||
*/
|
||||
async unshiftEntry(entry) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert.isType(entry, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'QueueStore',
|
||||
funcName: 'unshiftEntry',
|
||||
paramName: 'entry',
|
||||
});
|
||||
assert.isType(entry.requestData, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'QueueStore',
|
||||
funcName: 'unshiftEntry',
|
||||
paramName: 'entry.requestData',
|
||||
});
|
||||
}
|
||||
const [firstEntry] = await this._db.getAllMatching(OBJECT_STORE_NAME, {
|
||||
count: 1,
|
||||
});
|
||||
if (firstEntry) {
|
||||
// Pick an ID one less than the lowest ID in the object store.
|
||||
entry.id = firstEntry.id - 1;
|
||||
}
|
||||
else {
|
||||
// Otherwise let the auto-incrementor assign the ID.
|
||||
delete entry.id;
|
||||
}
|
||||
entry.queueName = this._queueName;
|
||||
await this._db.add(OBJECT_STORE_NAME, entry);
|
||||
}
|
||||
/**
|
||||
* Removes and returns the last entry in the queue matching the `queueName`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
* @private
|
||||
*/
|
||||
async popEntry() {
|
||||
return this._removeEntry({ direction: 'prev' });
|
||||
}
|
||||
/**
|
||||
* Removes and returns the first entry in the queue matching the `queueName`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
* @private
|
||||
*/
|
||||
async shiftEntry() {
|
||||
return this._removeEntry({ direction: 'next' });
|
||||
}
|
||||
/**
|
||||
* Returns all entries in the store matching the `queueName`.
|
||||
*
|
||||
* @param {Object} options See {@link module:workbox-background-sync.Queue~getAll}
|
||||
* @return {Promise<Array<Object>>}
|
||||
* @private
|
||||
*/
|
||||
async getAll() {
|
||||
return await this._db.getAllMatching(OBJECT_STORE_NAME, {
|
||||
index: INDEXED_PROP,
|
||||
query: IDBKeyRange.only(this._queueName),
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Deletes the entry for the given ID.
|
||||
*
|
||||
* WARNING: this method does not ensure the deleted enry belongs to this
|
||||
* queue (i.e. matches the `queueName`). But this limitation is acceptable
|
||||
* as this class is not publicly exposed. An additional check would make
|
||||
* this method slower than it needs to be.
|
||||
*
|
||||
* @private
|
||||
* @param {number} id
|
||||
*/
|
||||
async deleteEntry(id) {
|
||||
await this._db.delete(OBJECT_STORE_NAME, id);
|
||||
}
|
||||
/**
|
||||
* Removes and returns the first or last entry in the queue (based on the
|
||||
* `direction` argument) matching the `queueName`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
* @private
|
||||
*/
|
||||
async _removeEntry({ direction }) {
|
||||
const [entry] = await this._db.getAllMatching(OBJECT_STORE_NAME, {
|
||||
direction,
|
||||
index: INDEXED_PROP,
|
||||
query: IDBKeyRange.only(this._queueName),
|
||||
count: 1,
|
||||
});
|
||||
if (entry) {
|
||||
await this.deleteEntry(entry.id);
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Upgrades the database given an `upgradeneeded` event.
|
||||
*
|
||||
* @param {Event} event
|
||||
* @private
|
||||
*/
|
||||
_upgradeDb(event) {
|
||||
const db = event.target.result;
|
||||
if (event.oldVersion > 0 && event.oldVersion < DB_VERSION) {
|
||||
if (db.objectStoreNames.contains(OBJECT_STORE_NAME)) {
|
||||
db.deleteObjectStore(OBJECT_STORE_NAME);
|
||||
}
|
||||
}
|
||||
const objStore = db.createObjectStore(OBJECT_STORE_NAME, {
|
||||
autoIncrement: true,
|
||||
keyPath: 'id',
|
||||
});
|
||||
objStore.createIndex(INDEXED_PROP, INDEXED_PROP, { unique: false });
|
||||
}
|
||||
}
|
1
web/node_modules/workbox-background-sync/lib/QueueStore.mjs
generated
vendored
Normal file
1
web/node_modules/workbox-background-sync/lib/QueueStore.mjs
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
export * from './QueueStore.js';
|
63
web/node_modules/workbox-background-sync/lib/StorableRequest.d.ts
generated
vendored
Normal file
63
web/node_modules/workbox-background-sync/lib/StorableRequest.d.ts
generated
vendored
Normal file
|
@ -0,0 +1,63 @@
|
|||
import '../_version.js';
|
||||
export interface RequestData {
|
||||
url: string;
|
||||
headers: {
|
||||
[headerName: string]: any;
|
||||
};
|
||||
body?: ArrayBuffer;
|
||||
[propName: string]: any;
|
||||
}
|
||||
/**
|
||||
* A class to make it easier to serialize and de-serialize requests so they
|
||||
* can be stored in IndexedDB.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
declare class StorableRequest {
|
||||
private readonly _requestData;
|
||||
/**
|
||||
* Converts a Request object to a plain object that can be structured
|
||||
* cloned or JSON-stringified.
|
||||
*
|
||||
* @param {Request} request
|
||||
* @return {Promise<StorableRequest>}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
static fromRequest(request: Request): Promise<StorableRequest>;
|
||||
/**
|
||||
* Accepts an object of request data that can be used to construct a
|
||||
* `Request` but can also be stored in IndexedDB.
|
||||
*
|
||||
* @param {Object} requestData An object of request data that includes the
|
||||
* `url` plus any relevant properties of
|
||||
* [requestInit]{@link https://fetch.spec.whatwg.org/#requestinit}.
|
||||
* @private
|
||||
*/
|
||||
constructor(requestData: RequestData);
|
||||
/**
|
||||
* Returns a deep clone of the instances `_requestData` object.
|
||||
*
|
||||
* @return {Object}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
toObject(): RequestData;
|
||||
/**
|
||||
* Converts this instance to a Request.
|
||||
*
|
||||
* @return {Request}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
toRequest(): Request;
|
||||
/**
|
||||
* Creates and returns a deep clone of the instance.
|
||||
*
|
||||
* @return {StorableRequest}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
clone(): StorableRequest;
|
||||
}
|
||||
export { StorableRequest };
|
129
web/node_modules/workbox-background-sync/lib/StorableRequest.js
generated
vendored
Normal file
129
web/node_modules/workbox-background-sync/lib/StorableRequest.js
generated
vendored
Normal file
|
@ -0,0 +1,129 @@
|
|||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
import { assert } from 'workbox-core/_private/assert.js';
|
||||
import '../_version.js';
|
||||
const serializableProperties = [
|
||||
'method',
|
||||
'referrer',
|
||||
'referrerPolicy',
|
||||
'mode',
|
||||
'credentials',
|
||||
'cache',
|
||||
'redirect',
|
||||
'integrity',
|
||||
'keepalive',
|
||||
];
|
||||
/**
|
||||
* A class to make it easier to serialize and de-serialize requests so they
|
||||
* can be stored in IndexedDB.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
class StorableRequest {
|
||||
/**
|
||||
* Accepts an object of request data that can be used to construct a
|
||||
* `Request` but can also be stored in IndexedDB.
|
||||
*
|
||||
* @param {Object} requestData An object of request data that includes the
|
||||
* `url` plus any relevant properties of
|
||||
* [requestInit]{@link https://fetch.spec.whatwg.org/#requestinit}.
|
||||
* @private
|
||||
*/
|
||||
constructor(requestData) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert.isType(requestData, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'StorableRequest',
|
||||
funcName: 'constructor',
|
||||
paramName: 'requestData',
|
||||
});
|
||||
assert.isType(requestData.url, 'string', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'StorableRequest',
|
||||
funcName: 'constructor',
|
||||
paramName: 'requestData.url',
|
||||
});
|
||||
}
|
||||
// If the request's mode is `navigate`, convert it to `same-origin` since
|
||||
// navigation requests can't be constructed via script.
|
||||
if (requestData['mode'] === 'navigate') {
|
||||
requestData['mode'] = 'same-origin';
|
||||
}
|
||||
this._requestData = requestData;
|
||||
}
|
||||
/**
|
||||
* Converts a Request object to a plain object that can be structured
|
||||
* cloned or JSON-stringified.
|
||||
*
|
||||
* @param {Request} request
|
||||
* @return {Promise<StorableRequest>}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
static async fromRequest(request) {
|
||||
const requestData = {
|
||||
url: request.url,
|
||||
headers: {},
|
||||
};
|
||||
// Set the body if present.
|
||||
if (request.method !== 'GET') {
|
||||
// Use ArrayBuffer to support non-text request bodies.
|
||||
// NOTE: we can't use Blobs becuse Safari doesn't support storing
|
||||
// Blobs in IndexedDB in some cases:
|
||||
// https://github.com/dfahlander/Dexie.js/issues/618#issuecomment-398348457
|
||||
requestData.body = await request.clone().arrayBuffer();
|
||||
}
|
||||
// Convert the headers from an iterable to an object.
|
||||
for (const [key, value] of request.headers.entries()) {
|
||||
requestData.headers[key] = value;
|
||||
}
|
||||
// Add all other serializable request properties
|
||||
for (const prop of serializableProperties) {
|
||||
if (request[prop] !== undefined) {
|
||||
requestData[prop] = request[prop];
|
||||
}
|
||||
}
|
||||
return new StorableRequest(requestData);
|
||||
}
|
||||
/**
|
||||
* Returns a deep clone of the instances `_requestData` object.
|
||||
*
|
||||
* @return {Object}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
toObject() {
|
||||
const requestData = Object.assign({}, this._requestData);
|
||||
requestData.headers = Object.assign({}, this._requestData.headers);
|
||||
if (requestData.body) {
|
||||
requestData.body = requestData.body.slice(0);
|
||||
}
|
||||
return requestData;
|
||||
}
|
||||
/**
|
||||
* Converts this instance to a Request.
|
||||
*
|
||||
* @return {Request}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
toRequest() {
|
||||
return new Request(this._requestData.url, this._requestData);
|
||||
}
|
||||
/**
|
||||
* Creates and returns a deep clone of the instance.
|
||||
*
|
||||
* @return {StorableRequest}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
clone() {
|
||||
return new StorableRequest(this.toObject());
|
||||
}
|
||||
}
|
||||
export { StorableRequest };
|
1
web/node_modules/workbox-background-sync/lib/StorableRequest.mjs
generated
vendored
Normal file
1
web/node_modules/workbox-background-sync/lib/StorableRequest.mjs
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
export * from './StorableRequest.js';
|
35
web/node_modules/workbox-background-sync/package.json
generated
vendored
Normal file
35
web/node_modules/workbox-background-sync/package.json
generated
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
{
|
||||
"name": "workbox-background-sync",
|
||||
"version": "5.1.4",
|
||||
"license": "MIT",
|
||||
"author": "Google's Web DevRel Team",
|
||||
"description": "Queues failed requests and uses the Background Sync API to replay them when the network is available",
|
||||
"repository": "googlechrome/workbox",
|
||||
"bugs": "https://github.com/googlechrome/workbox/issues",
|
||||
"homepage": "https://github.com/GoogleChrome/workbox",
|
||||
"keywords": [
|
||||
"workbox",
|
||||
"workboxjs",
|
||||
"service worker",
|
||||
"sw",
|
||||
"background",
|
||||
"sync",
|
||||
"workbox-plugin"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "gulp build-packages --package workbox-background-sync",
|
||||
"version": "npm run build",
|
||||
"prepare": "npm run build"
|
||||
},
|
||||
"workbox": {
|
||||
"browserNamespace": "workbox.backgroundSync",
|
||||
"packageType": "browser"
|
||||
},
|
||||
"main": "index.js",
|
||||
"module": "index.mjs",
|
||||
"types": "index.d.ts",
|
||||
"dependencies": {
|
||||
"workbox-core": "^5.1.4"
|
||||
},
|
||||
"gitHead": "a95b6fd489c2a66574f1655b2de3acd2ece35fb3"
|
||||
}
|
43
web/node_modules/workbox-background-sync/src/BackgroundSyncPlugin.ts
generated
vendored
Normal file
43
web/node_modules/workbox-background-sync/src/BackgroundSyncPlugin.ts
generated
vendored
Normal file
|
@ -0,0 +1,43 @@
|
|||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
|
||||
import {WorkboxPlugin} from 'workbox-core/types.js';
|
||||
import {Queue, QueueOptions} from './Queue.js';
|
||||
import './_version.js';
|
||||
|
||||
/**
|
||||
* A class implementing the `fetchDidFail` lifecycle callback. This makes it
|
||||
* easier to add failed requests to a background sync Queue.
|
||||
*
|
||||
* @memberof module:workbox-background-sync
|
||||
*/
|
||||
class BackgroundSyncPlugin implements WorkboxPlugin {
|
||||
private readonly _queue: Queue;
|
||||
|
||||
/**
|
||||
* @param {string} name See the [Queue]{@link module:workbox-background-sync.Queue}
|
||||
* documentation for parameter details.
|
||||
* @param {Object} [options] See the
|
||||
* [Queue]{@link module:workbox-background-sync.Queue} documentation for
|
||||
* parameter details.
|
||||
*/
|
||||
constructor(name: string, options: QueueOptions) {
|
||||
this._queue = new Queue(name, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {Object} options
|
||||
* @param {Request} options.request
|
||||
* @private
|
||||
*/
|
||||
fetchDidFail: WorkboxPlugin['fetchDidFail'] = async ({request}) => {
|
||||
await this._queue.pushRequest({request});
|
||||
}
|
||||
}
|
||||
|
||||
export {BackgroundSyncPlugin};
|
432
web/node_modules/workbox-background-sync/src/Queue.ts
generated
vendored
Normal file
432
web/node_modules/workbox-background-sync/src/Queue.ts
generated
vendored
Normal file
|
@ -0,0 +1,432 @@
|
|||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
|
||||
import {WorkboxError} from 'workbox-core/_private/WorkboxError.js';
|
||||
import {logger} from 'workbox-core/_private/logger.js';
|
||||
import {assert} from 'workbox-core/_private/assert.js';
|
||||
import {getFriendlyURL} from 'workbox-core/_private/getFriendlyURL.js';
|
||||
import {UnidentifiedQueueStoreEntry, QueueStore} from './lib/QueueStore.js';
|
||||
import {StorableRequest} from './lib/StorableRequest.js';
|
||||
import './_version.js';
|
||||
|
||||
|
||||
// Give TypeScript the correct global.
|
||||
declare let self: ServiceWorkerGlobalScope;
|
||||
|
||||
interface OnSyncCallbackOptions {
|
||||
queue: Queue;
|
||||
}
|
||||
|
||||
interface OnSyncCallback {
|
||||
(options: OnSyncCallbackOptions): void|Promise<void>;
|
||||
}
|
||||
|
||||
export interface QueueOptions {
|
||||
onSync?: OnSyncCallback;
|
||||
maxRetentionTime?: number;
|
||||
}
|
||||
|
||||
interface QueueEntry {
|
||||
request: Request;
|
||||
timestamp?: number;
|
||||
metadata?: object;
|
||||
}
|
||||
|
||||
const TAG_PREFIX = 'workbox-background-sync';
|
||||
const MAX_RETENTION_TIME = 60 * 24 * 7; // 7 days in minutes
|
||||
|
||||
const queueNames = new Set();
|
||||
|
||||
/**
|
||||
* Converts a QueueStore entry into the format exposed by Queue. This entails
|
||||
* converting the request data into a real request and omitting the `id` and
|
||||
* `queueName` properties.
|
||||
*
|
||||
* @param {Object} queueStoreEntry
|
||||
* @return {Object}
|
||||
* @private
|
||||
*/
|
||||
const convertEntry = (queueStoreEntry: UnidentifiedQueueStoreEntry): QueueEntry => {
|
||||
const queueEntry: QueueEntry = {
|
||||
request: new StorableRequest(queueStoreEntry.requestData).toRequest(),
|
||||
timestamp: queueStoreEntry.timestamp,
|
||||
};
|
||||
if (queueStoreEntry.metadata) {
|
||||
queueEntry.metadata = queueStoreEntry.metadata;
|
||||
}
|
||||
return queueEntry;
|
||||
};
|
||||
|
||||
/**
|
||||
* A class to manage storing failed requests in IndexedDB and retrying them
|
||||
* later. All parts of the storing and replaying process are observable via
|
||||
* callbacks.
|
||||
*
|
||||
* @memberof module:workbox-background-sync
|
||||
*/
|
||||
class Queue {
|
||||
private readonly _name: string;
|
||||
private readonly _onSync: OnSyncCallback;
|
||||
private readonly _maxRetentionTime: number;
|
||||
private readonly _queueStore: QueueStore;
|
||||
private _syncInProgress = false;
|
||||
private _requestsAddedDuringSync = false;
|
||||
|
||||
/**
|
||||
* Creates an instance of Queue with the given options
|
||||
*
|
||||
* @param {string} name The unique name for this queue. This name must be
|
||||
* unique as it's used to register sync events and store requests
|
||||
* in IndexedDB specific to this instance. An error will be thrown if
|
||||
* a duplicate name is detected.
|
||||
* @param {Object} [options]
|
||||
* @param {Function} [options.onSync] A function that gets invoked whenever
|
||||
* the 'sync' event fires. The function is invoked with an object
|
||||
* containing the `queue` property (referencing this instance), and you
|
||||
* can use the callback to customize the replay behavior of the queue.
|
||||
* When not set the `replayRequests()` method is called.
|
||||
* Note: if the replay fails after a sync event, make sure you throw an
|
||||
* error, so the browser knows to retry the sync event later.
|
||||
* @param {number} [options.maxRetentionTime=7 days] The amount of time (in
|
||||
* minutes) a request may be retried. After this amount of time has
|
||||
* passed, the request will be deleted from the queue.
|
||||
*/
|
||||
constructor(name: string, {
|
||||
onSync,
|
||||
maxRetentionTime
|
||||
}: QueueOptions = {}) {
|
||||
// Ensure the store name is not already being used
|
||||
if (queueNames.has(name)) {
|
||||
throw new WorkboxError('duplicate-queue-name', {name});
|
||||
} else {
|
||||
queueNames.add(name);
|
||||
}
|
||||
|
||||
this._name = name;
|
||||
this._onSync = onSync || this.replayRequests;
|
||||
this._maxRetentionTime = maxRetentionTime || MAX_RETENTION_TIME;
|
||||
this._queueStore = new QueueStore(this._name);
|
||||
|
||||
this._addSyncListener();
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {string}
|
||||
*/
|
||||
get name() {
|
||||
return this._name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the passed request in IndexedDB (with its timestamp and any
|
||||
* metadata) at the end of the queue.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Request} entry.request The request to store in the queue.
|
||||
* @param {Object} [entry.metadata] Any metadata you want associated with the
|
||||
* stored request. When requests are replayed you'll have access to this
|
||||
* metadata object in case you need to modify the request beforehand.
|
||||
* @param {number} [entry.timestamp] The timestamp (Epoch time in
|
||||
* milliseconds) when the request was first added to the queue. This is
|
||||
* used along with `maxRetentionTime` to remove outdated requests. In
|
||||
* general you don't need to set this value, as it's automatically set
|
||||
* for you (defaulting to `Date.now()`), but you can update it if you
|
||||
* don't want particular requests to expire.
|
||||
*/
|
||||
async pushRequest(entry: QueueEntry) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert!.isType(entry, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'Queue',
|
||||
funcName: 'pushRequest',
|
||||
paramName: 'entry',
|
||||
});
|
||||
assert!.isInstance(entry.request, Request, {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'Queue',
|
||||
funcName: 'pushRequest',
|
||||
paramName: 'entry.request',
|
||||
});
|
||||
}
|
||||
|
||||
await this._addRequest(entry, 'push');
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores the passed request in IndexedDB (with its timestamp and any
|
||||
* metadata) at the beginning of the queue.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Request} entry.request The request to store in the queue.
|
||||
* @param {Object} [entry.metadata] Any metadata you want associated with the
|
||||
* stored request. When requests are replayed you'll have access to this
|
||||
* metadata object in case you need to modify the request beforehand.
|
||||
* @param {number} [entry.timestamp] The timestamp (Epoch time in
|
||||
* milliseconds) when the request was first added to the queue. This is
|
||||
* used along with `maxRetentionTime` to remove outdated requests. In
|
||||
* general you don't need to set this value, as it's automatically set
|
||||
* for you (defaulting to `Date.now()`), but you can update it if you
|
||||
* don't want particular requests to expire.
|
||||
*/
|
||||
async unshiftRequest(entry: QueueEntry) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert!.isType(entry, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'Queue',
|
||||
funcName: 'unshiftRequest',
|
||||
paramName: 'entry',
|
||||
});
|
||||
assert!.isInstance(entry.request, Request, {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'Queue',
|
||||
funcName: 'unshiftRequest',
|
||||
paramName: 'entry.request',
|
||||
});
|
||||
}
|
||||
|
||||
await this._addRequest(entry, 'unshift');
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes and returns the last request in the queue (along with its
|
||||
* timestamp and any metadata). The returned object takes the form:
|
||||
* `{request, timestamp, metadata}`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
*/
|
||||
async popRequest() {
|
||||
return this._removeRequest('pop');
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes and returns the first request in the queue (along with its
|
||||
* timestamp and any metadata). The returned object takes the form:
|
||||
* `{request, timestamp, metadata}`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
*/
|
||||
async shiftRequest() {
|
||||
return this._removeRequest('shift');
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all the entries that have not expired (per `maxRetentionTime`).
|
||||
* Any expired entries are removed from the queue.
|
||||
*
|
||||
* @return {Promise<Array<Object>>}
|
||||
*/
|
||||
async getAll() {
|
||||
const allEntries = await this._queueStore.getAll();
|
||||
const now = Date.now();
|
||||
|
||||
const unexpiredEntries = [];
|
||||
for (const entry of allEntries) {
|
||||
// Ignore requests older than maxRetentionTime. Call this function
|
||||
// recursively until an unexpired request is found.
|
||||
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000;
|
||||
if (now - entry.timestamp > maxRetentionTimeInMs) {
|
||||
await this._queueStore.deleteEntry(entry.id);
|
||||
} else {
|
||||
unexpiredEntries.push(convertEntry(entry));
|
||||
}
|
||||
}
|
||||
|
||||
return unexpiredEntries;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Adds the entry to the QueueStore and registers for a sync event.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Request} entry.request
|
||||
* @param {Object} [entry.metadata]
|
||||
* @param {number} [entry.timestamp=Date.now()]
|
||||
* @param {string} operation ('push' or 'unshift')
|
||||
* @private
|
||||
*/
|
||||
async _addRequest({
|
||||
request,
|
||||
metadata,
|
||||
timestamp = Date.now(),
|
||||
}: QueueEntry, operation: 'push' | 'unshift') {
|
||||
const storableRequest = await StorableRequest.fromRequest(request.clone());
|
||||
const entry: UnidentifiedQueueStoreEntry = {
|
||||
requestData: storableRequest.toObject(),
|
||||
timestamp,
|
||||
};
|
||||
|
||||
// Only include metadata if it's present.
|
||||
if (metadata) {
|
||||
entry.metadata = metadata;
|
||||
}
|
||||
|
||||
await this._queueStore[
|
||||
`${operation}Entry` as 'pushEntry' | 'unshiftEntry'](entry);
|
||||
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
logger.log(`Request for '${getFriendlyURL(request.url)}' has ` +
|
||||
`been added to background sync queue '${this._name}'.`);
|
||||
}
|
||||
|
||||
// Don't register for a sync if we're in the middle of a sync. Instead,
|
||||
// we wait until the sync is complete and call register if
|
||||
// `this._requestsAddedDuringSync` is true.
|
||||
if (this._syncInProgress) {
|
||||
this._requestsAddedDuringSync = true;
|
||||
} else {
|
||||
await this.registerSync();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes and returns the first or last (depending on `operation`) entry
|
||||
* from the QueueStore that's not older than the `maxRetentionTime`.
|
||||
*
|
||||
* @param {string} operation ('pop' or 'shift')
|
||||
* @return {Object|undefined}
|
||||
* @private
|
||||
*/
|
||||
async _removeRequest(operation: 'pop' | 'shift'): Promise<QueueEntry | undefined> {
|
||||
const now = Date.now();
|
||||
const entry = await this._queueStore[
|
||||
`${operation}Entry` as 'popEntry' | 'shiftEntry']();
|
||||
|
||||
if (entry) {
|
||||
// Ignore requests older than maxRetentionTime. Call this function
|
||||
// recursively until an unexpired request is found.
|
||||
const maxRetentionTimeInMs = this._maxRetentionTime * 60 * 1000;
|
||||
if (now - entry.timestamp > maxRetentionTimeInMs) {
|
||||
return this._removeRequest(operation);
|
||||
}
|
||||
|
||||
return convertEntry(entry);
|
||||
} else {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Loops through each request in the queue and attempts to re-fetch it.
|
||||
* If any request fails to re-fetch, it's put back in the same position in
|
||||
* the queue (which registers a retry for the next sync event).
|
||||
*/
|
||||
async replayRequests() {
|
||||
let entry;
|
||||
while (entry = await this.shiftRequest()) {
|
||||
try {
|
||||
await fetch(entry.request.clone());
|
||||
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
logger.log(`Request for '${getFriendlyURL(entry.request.url)}'` +
|
||||
`has been replayed in queue '${this._name}'`);
|
||||
}
|
||||
} catch (error) {
|
||||
await this.unshiftRequest(entry);
|
||||
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
logger.log(`Request for '${getFriendlyURL(entry.request.url)}'` +
|
||||
`failed to replay, putting it back in queue '${this._name}'`);
|
||||
}
|
||||
throw new WorkboxError('queue-replay-failed', {name: this._name});
|
||||
}
|
||||
}
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
logger.log(`All requests in queue '${this.name}' have successfully ` +
|
||||
`replayed; the queue is now empty!`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a sync event with a tag unique to this instance.
|
||||
*/
|
||||
async registerSync() {
|
||||
if ('sync' in self.registration) {
|
||||
try {
|
||||
await self.registration.sync.register(`${TAG_PREFIX}:${this._name}`);
|
||||
} catch (err) {
|
||||
// This means the registration failed for some reason, possibly due to
|
||||
// the user disabling it.
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
logger.warn(
|
||||
`Unable to register sync event for '${this._name}'.`, err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* In sync-supporting browsers, this adds a listener for the sync event.
|
||||
* In non-sync-supporting browsers, this will retry the queue on service
|
||||
* worker startup.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
private _addSyncListener() {
|
||||
if ('sync' in self.registration) {
|
||||
self.addEventListener('sync', (event: SyncEvent) => {
|
||||
if (event.tag === `${TAG_PREFIX}:${this._name}`) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
logger.log(`Background sync for tag '${event.tag}'` +
|
||||
`has been received`);
|
||||
}
|
||||
|
||||
const syncComplete = async () => {
|
||||
this._syncInProgress = true;
|
||||
|
||||
let syncError;
|
||||
try {
|
||||
await this._onSync({queue: this});
|
||||
} catch (error) {
|
||||
syncError = error;
|
||||
|
||||
// Rethrow the error. Note: the logic in the finally clause
|
||||
// will run before this gets rethrown.
|
||||
throw syncError;
|
||||
} finally {
|
||||
// New items may have been added to the queue during the sync,
|
||||
// so we need to register for a new sync if that's happened...
|
||||
// Unless there was an error during the sync, in which
|
||||
// case the browser will automatically retry later, as long
|
||||
// as `event.lastChance` is not true.
|
||||
if (this._requestsAddedDuringSync &&
|
||||
!(syncError && !event.lastChance)) {
|
||||
await this.registerSync();
|
||||
}
|
||||
|
||||
this._syncInProgress = false;
|
||||
this._requestsAddedDuringSync = false;
|
||||
}
|
||||
};
|
||||
event.waitUntil(syncComplete());
|
||||
}
|
||||
});
|
||||
} else {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
logger.log(`Background sync replaying without background sync event`);
|
||||
}
|
||||
// If the browser doesn't support background sync, retry
|
||||
// every time the service worker starts up as a fallback.
|
||||
this._onSync({queue: this});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the set of queue names. This is primarily used to reset the list
|
||||
* of queue names in tests.
|
||||
*
|
||||
* @return {Set}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
static get _queueNames() {
|
||||
return queueNames;
|
||||
}
|
||||
}
|
||||
|
||||
export {Queue};
|
2
web/node_modules/workbox-background-sync/src/_version.ts
generated
vendored
Normal file
2
web/node_modules/workbox-background-sync/src/_version.ts
generated
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
// @ts-ignore
|
||||
try{self['workbox:background-sync:5.1.4']&&_()}catch(e){}
|
21
web/node_modules/workbox-background-sync/src/index.ts
generated
vendored
Normal file
21
web/node_modules/workbox-background-sync/src/index.ts
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
|
||||
import {Queue} from './Queue.js';
|
||||
import {BackgroundSyncPlugin} from './BackgroundSyncPlugin.js';
|
||||
import './_version.js';
|
||||
|
||||
|
||||
/**
|
||||
* @module workbox-background-sync
|
||||
*/
|
||||
|
||||
export {
|
||||
Queue,
|
||||
BackgroundSyncPlugin,
|
||||
};
|
220
web/node_modules/workbox-background-sync/src/lib/QueueStore.ts
generated
vendored
Normal file
220
web/node_modules/workbox-background-sync/src/lib/QueueStore.ts
generated
vendored
Normal file
|
@ -0,0 +1,220 @@
|
|||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
|
||||
import {assert} from 'workbox-core/_private/assert.js';
|
||||
import {DBWrapper} from 'workbox-core/_private/DBWrapper.js';
|
||||
import {RequestData} from './StorableRequest.js';
|
||||
import '../_version.js';
|
||||
|
||||
|
||||
const DB_VERSION = 3;
|
||||
const DB_NAME = 'workbox-background-sync';
|
||||
const OBJECT_STORE_NAME = 'requests';
|
||||
const INDEXED_PROP = 'queueName';
|
||||
|
||||
export interface UnidentifiedQueueStoreEntry {
|
||||
requestData: RequestData;
|
||||
timestamp: number;
|
||||
id?: number;
|
||||
queueName?: string;
|
||||
metadata?: object;
|
||||
}
|
||||
|
||||
export interface QueueStoreEntry extends UnidentifiedQueueStoreEntry {
|
||||
id: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* A class to manage storing requests from a Queue in IndexedDB,
|
||||
* indexed by their queue name for easier access.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
export class QueueStore {
|
||||
private readonly _queueName: string;
|
||||
private readonly _db: DBWrapper;
|
||||
|
||||
/**
|
||||
* Associates this instance with a Queue instance, so entries added can be
|
||||
* identified by their queue name.
|
||||
*
|
||||
* @param {string} queueName
|
||||
* @private
|
||||
*/
|
||||
constructor(queueName: string) {
|
||||
this._queueName = queueName;
|
||||
this._db = new DBWrapper(DB_NAME, DB_VERSION, {
|
||||
onupgradeneeded: this._upgradeDb,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Append an entry last in the queue.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Object} entry.requestData
|
||||
* @param {number} [entry.timestamp]
|
||||
* @param {Object} [entry.metadata]
|
||||
* @private
|
||||
*/
|
||||
async pushEntry(entry: UnidentifiedQueueStoreEntry) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert!.isType(entry, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'QueueStore',
|
||||
funcName: 'pushEntry',
|
||||
paramName: 'entry',
|
||||
});
|
||||
assert!.isType(entry.requestData, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'QueueStore',
|
||||
funcName: 'pushEntry',
|
||||
paramName: 'entry.requestData',
|
||||
});
|
||||
}
|
||||
|
||||
// Don't specify an ID since one is automatically generated.
|
||||
delete entry.id;
|
||||
entry.queueName = this._queueName;
|
||||
|
||||
await this._db.add!(OBJECT_STORE_NAME, entry);
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepend an entry first in the queue.
|
||||
*
|
||||
* @param {Object} entry
|
||||
* @param {Object} entry.requestData
|
||||
* @param {number} [entry.timestamp]
|
||||
* @param {Object} [entry.metadata]
|
||||
* @private
|
||||
*/
|
||||
async unshiftEntry(entry: UnidentifiedQueueStoreEntry) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert!.isType(entry, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'QueueStore',
|
||||
funcName: 'unshiftEntry',
|
||||
paramName: 'entry',
|
||||
});
|
||||
assert!.isType(entry.requestData, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'QueueStore',
|
||||
funcName: 'unshiftEntry',
|
||||
paramName: 'entry.requestData',
|
||||
});
|
||||
}
|
||||
|
||||
const [firstEntry] = await this._db.getAllMatching(OBJECT_STORE_NAME, {
|
||||
count: 1,
|
||||
});
|
||||
|
||||
if (firstEntry) {
|
||||
// Pick an ID one less than the lowest ID in the object store.
|
||||
entry.id = firstEntry.id - 1;
|
||||
} else {
|
||||
// Otherwise let the auto-incrementor assign the ID.
|
||||
delete entry.id;
|
||||
}
|
||||
entry.queueName = this._queueName;
|
||||
|
||||
await this._db.add!(OBJECT_STORE_NAME, entry);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes and returns the last entry in the queue matching the `queueName`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
* @private
|
||||
*/
|
||||
async popEntry(): Promise<QueueStoreEntry> {
|
||||
return this._removeEntry({direction: 'prev'});
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes and returns the first entry in the queue matching the `queueName`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
* @private
|
||||
*/
|
||||
async shiftEntry(): Promise<QueueStoreEntry> {
|
||||
return this._removeEntry({direction: 'next'});
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns all entries in the store matching the `queueName`.
|
||||
*
|
||||
* @param {Object} options See {@link module:workbox-background-sync.Queue~getAll}
|
||||
* @return {Promise<Array<Object>>}
|
||||
* @private
|
||||
*/
|
||||
async getAll(): Promise<QueueStoreEntry[]> {
|
||||
return await this._db.getAllMatching(OBJECT_STORE_NAME, {
|
||||
index: INDEXED_PROP,
|
||||
query: IDBKeyRange.only(this._queueName),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes the entry for the given ID.
|
||||
*
|
||||
* WARNING: this method does not ensure the deleted enry belongs to this
|
||||
* queue (i.e. matches the `queueName`). But this limitation is acceptable
|
||||
* as this class is not publicly exposed. An additional check would make
|
||||
* this method slower than it needs to be.
|
||||
*
|
||||
* @private
|
||||
* @param {number} id
|
||||
*/
|
||||
async deleteEntry(id: number) {
|
||||
await this._db.delete!(OBJECT_STORE_NAME, id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes and returns the first or last entry in the queue (based on the
|
||||
* `direction` argument) matching the `queueName`.
|
||||
*
|
||||
* @return {Promise<Object>}
|
||||
* @private
|
||||
*/
|
||||
async _removeEntry({direction}: {direction?: IDBCursorDirection}) {
|
||||
const [entry] = await this._db.getAllMatching(OBJECT_STORE_NAME, {
|
||||
direction,
|
||||
index: INDEXED_PROP,
|
||||
query: IDBKeyRange.only(this._queueName),
|
||||
count: 1,
|
||||
});
|
||||
|
||||
if (entry) {
|
||||
await this.deleteEntry(entry.id);
|
||||
return entry;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Upgrades the database given an `upgradeneeded` event.
|
||||
*
|
||||
* @param {Event} event
|
||||
* @private
|
||||
*/
|
||||
private _upgradeDb(event: IDBVersionChangeEvent) {
|
||||
const db = (event.target as IDBOpenDBRequest).result;
|
||||
|
||||
if (event.oldVersion > 0 && event.oldVersion < DB_VERSION) {
|
||||
if (db.objectStoreNames.contains(OBJECT_STORE_NAME)) {
|
||||
db.deleteObjectStore(OBJECT_STORE_NAME);
|
||||
}
|
||||
}
|
||||
|
||||
const objStore = db.createObjectStore(OBJECT_STORE_NAME, {
|
||||
autoIncrement: true,
|
||||
keyPath: 'id',
|
||||
});
|
||||
objStore.createIndex(INDEXED_PROP, INDEXED_PROP, {unique: false});
|
||||
}
|
||||
}
|
158
web/node_modules/workbox-background-sync/src/lib/StorableRequest.ts
generated
vendored
Normal file
158
web/node_modules/workbox-background-sync/src/lib/StorableRequest.ts
generated
vendored
Normal file
|
@ -0,0 +1,158 @@
|
|||
/*
|
||||
Copyright 2018 Google LLC
|
||||
|
||||
Use of this source code is governed by an MIT-style
|
||||
license that can be found in the LICENSE file or at
|
||||
https://opensource.org/licenses/MIT.
|
||||
*/
|
||||
|
||||
import {assert} from 'workbox-core/_private/assert.js';
|
||||
import '../_version.js';
|
||||
|
||||
|
||||
type SerializableProperties = 'method' | 'referrer' | 'referrerPolicy' | 'mode'
|
||||
| 'credentials' | 'cache' | 'redirect' | 'integrity' | 'keepalive';
|
||||
|
||||
const serializableProperties: SerializableProperties[] = [
|
||||
'method',
|
||||
'referrer',
|
||||
'referrerPolicy',
|
||||
'mode',
|
||||
'credentials',
|
||||
'cache',
|
||||
'redirect',
|
||||
'integrity',
|
||||
'keepalive',
|
||||
];
|
||||
|
||||
export interface RequestData {
|
||||
url: string;
|
||||
headers: {[headerName: string]: any};
|
||||
body?: ArrayBuffer;
|
||||
[propName: string]: any;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* A class to make it easier to serialize and de-serialize requests so they
|
||||
* can be stored in IndexedDB.
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
class StorableRequest {
|
||||
private readonly _requestData: RequestData;
|
||||
|
||||
/**
|
||||
* Converts a Request object to a plain object that can be structured
|
||||
* cloned or JSON-stringified.
|
||||
*
|
||||
* @param {Request} request
|
||||
* @return {Promise<StorableRequest>}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
static async fromRequest(request: Request): Promise<StorableRequest> {
|
||||
const requestData: RequestData = {
|
||||
url: request.url,
|
||||
headers: {},
|
||||
};
|
||||
|
||||
// Set the body if present.
|
||||
if (request.method !== 'GET') {
|
||||
// Use ArrayBuffer to support non-text request bodies.
|
||||
// NOTE: we can't use Blobs becuse Safari doesn't support storing
|
||||
// Blobs in IndexedDB in some cases:
|
||||
// https://github.com/dfahlander/Dexie.js/issues/618#issuecomment-398348457
|
||||
requestData.body = await request.clone().arrayBuffer();
|
||||
}
|
||||
|
||||
// Convert the headers from an iterable to an object.
|
||||
for (const [key, value] of request.headers.entries()) {
|
||||
requestData.headers[key] = value;
|
||||
}
|
||||
|
||||
// Add all other serializable request properties
|
||||
for (const prop of serializableProperties) {
|
||||
if (request[prop] !== undefined) {
|
||||
requestData[prop] = request[prop];
|
||||
}
|
||||
}
|
||||
|
||||
return new StorableRequest(requestData);
|
||||
}
|
||||
|
||||
/**
|
||||
* Accepts an object of request data that can be used to construct a
|
||||
* `Request` but can also be stored in IndexedDB.
|
||||
*
|
||||
* @param {Object} requestData An object of request data that includes the
|
||||
* `url` plus any relevant properties of
|
||||
* [requestInit]{@link https://fetch.spec.whatwg.org/#requestinit}.
|
||||
* @private
|
||||
*/
|
||||
constructor(requestData: RequestData) {
|
||||
if (process.env.NODE_ENV !== 'production') {
|
||||
assert!.isType(requestData, 'object', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'StorableRequest',
|
||||
funcName: 'constructor',
|
||||
paramName: 'requestData',
|
||||
});
|
||||
assert!.isType(requestData.url, 'string', {
|
||||
moduleName: 'workbox-background-sync',
|
||||
className: 'StorableRequest',
|
||||
funcName: 'constructor',
|
||||
paramName: 'requestData.url',
|
||||
});
|
||||
}
|
||||
|
||||
// If the request's mode is `navigate`, convert it to `same-origin` since
|
||||
// navigation requests can't be constructed via script.
|
||||
if (requestData['mode'] === 'navigate') {
|
||||
requestData['mode'] = 'same-origin';
|
||||
}
|
||||
|
||||
this._requestData = requestData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a deep clone of the instances `_requestData` object.
|
||||
*
|
||||
* @return {Object}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
toObject(): RequestData {
|
||||
const requestData = Object.assign({}, this._requestData);
|
||||
requestData.headers = Object.assign({}, this._requestData.headers);
|
||||
if (requestData.body) {
|
||||
requestData.body = requestData.body.slice(0);
|
||||
}
|
||||
|
||||
return requestData;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts this instance to a Request.
|
||||
*
|
||||
* @return {Request}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
toRequest(): Request {
|
||||
return new Request(this._requestData.url, this._requestData);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and returns a deep clone of the instance.
|
||||
*
|
||||
* @return {StorableRequest}
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
clone(): StorableRequest {
|
||||
return new StorableRequest(this.toObject());
|
||||
}
|
||||
}
|
||||
|
||||
export {StorableRequest};
|
14
web/node_modules/workbox-background-sync/tsconfig.json
generated
vendored
Normal file
14
web/node_modules/workbox-background-sync/tsconfig.json
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"extends": "../../tsconfig",
|
||||
"compilerOptions": {
|
||||
"outDir": "./",
|
||||
"rootDir": "./src",
|
||||
"tsBuildInfoFile": "./tsconfig.tsbuildinfo"
|
||||
},
|
||||
"include": [
|
||||
"src/**/*.ts"
|
||||
],
|
||||
"references": [
|
||||
{ "path": "../workbox-core/" }
|
||||
]
|
||||
}
|
2503
web/node_modules/workbox-background-sync/tsconfig.tsbuildinfo
generated
vendored
Normal file
2503
web/node_modules/workbox-background-sync/tsconfig.tsbuildinfo
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue