mirror of
https://github.com/LadybirdBrowser/ladybird.git
synced 2025-12-07 21:59:54 +00:00
LibWeb/IndexedDB: Prevent copying and moving of RequestList
RequestList cannot be copied or moved, because m_pending_request_queue contains lambdas that store pointers to the original RequestList and completion steps that we don't have a reference to. Fixes a bunch of WPT regressions and imports the ones that work.
This commit is contained in:
parent
4ede2cdf18
commit
85e8d2ba38
Notes:
github-actions[bot]
2025-10-23 12:44:02 +00:00
Author: https://github.com/Lubrsi
Commit: 85e8d2ba38
Pull-request: https://github.com/LadybirdBrowser/ladybird/pull/6429
Reviewed-by: https://github.com/gmta ✅
88 changed files with 5924 additions and 11 deletions
|
|
@ -12,7 +12,17 @@
|
|||
|
||||
namespace Web::IndexedDB {
|
||||
|
||||
using ConnectionMap = HashMap<StorageAPI::StorageKey, HashMap<String, RequestList>>;
|
||||
struct Connection final : public RefCounted<Connection> {
|
||||
Connection(StorageAPI::StorageKey storage_key, String name)
|
||||
: storage_key(move(storage_key))
|
||||
, name(move(name))
|
||||
{
|
||||
}
|
||||
|
||||
StorageAPI::StorageKey storage_key;
|
||||
String name;
|
||||
RequestList request_list;
|
||||
};
|
||||
|
||||
// https://w3c.github.io/IndexedDB/#connection-queues
|
||||
class ConnectionQueueHandler {
|
||||
|
|
@ -25,7 +35,7 @@ public:
|
|||
}
|
||||
|
||||
private:
|
||||
ConnectionMap m_open_requests;
|
||||
Vector<NonnullRefPtr<Connection>> m_open_requests;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -65,12 +65,17 @@ Vector<GC::Root<Database>> Database::for_key(StorageAPI::StorageKey const& key)
|
|||
|
||||
RequestList& ConnectionQueueHandler::for_key_and_name(StorageAPI::StorageKey const& key, String const& name)
|
||||
{
|
||||
return ConnectionQueueHandler::the().m_open_requests.ensure(key, [] {
|
||||
return HashMap<String, RequestList>();
|
||||
})
|
||||
.ensure(name, [] {
|
||||
return RequestList();
|
||||
});
|
||||
auto& instance = ConnectionQueueHandler::the();
|
||||
auto maybe_connection = instance.m_open_requests.find_if([&key, &name](Connection const& connection) {
|
||||
return connection.storage_key == key && connection.name == name;
|
||||
});
|
||||
|
||||
if (!maybe_connection.is_end())
|
||||
return (*maybe_connection)->request_list;
|
||||
|
||||
auto new_connection = adopt_ref(*new Connection(key, name));
|
||||
instance.m_open_requests.append(new_connection);
|
||||
return new_connection->request_list;
|
||||
}
|
||||
|
||||
Optional<GC::Root<Database> const&> Database::for_key_and_name(StorageAPI::StorageKey const& key, String const& name)
|
||||
|
|
|
|||
|
|
@ -29,7 +29,8 @@ void RequestList::all_requests_processed(GC::Heap& heap, GC::Ref<GC::Function<vo
|
|||
|
||||
if (pending_request_process) {
|
||||
pending_request_process->after_all = GC::create_function(heap, [this, pending_request_process, on_complete] {
|
||||
bool was_removed = m_pending_request_queue.remove_first_matching([pending_request_process](GC::Root<PendingRequestProcess> stored_pending_connection_process) {
|
||||
VERIFY(!m_pending_request_queue.is_empty());
|
||||
bool was_removed = m_pending_request_queue.remove_first_matching([pending_request_process](GC::Root<PendingRequestProcess> const& stored_pending_connection_process) {
|
||||
return stored_pending_connection_process.ptr() == pending_request_process.ptr();
|
||||
});
|
||||
VERIFY(was_removed);
|
||||
|
|
@ -60,7 +61,8 @@ void RequestList::all_previous_requests_processed(GC::Heap& heap, GC::Ref<IDBReq
|
|||
|
||||
if (pending_request_process) {
|
||||
pending_request_process->after_all = GC::create_function(heap, [this, pending_request_process, on_complete] {
|
||||
bool was_removed = m_pending_request_queue.remove_first_matching([pending_request_process](GC::Root<PendingRequestProcess> stored_pending_connection_process) {
|
||||
VERIFY(!m_pending_request_queue.is_empty());
|
||||
bool was_removed = m_pending_request_queue.remove_first_matching([pending_request_process](GC::Root<PendingRequestProcess> const& stored_pending_connection_process) {
|
||||
return stored_pending_connection_process.ptr() == pending_request_process.ptr();
|
||||
});
|
||||
VERIFY(was_removed);
|
||||
|
|
|
|||
|
|
@ -13,8 +13,13 @@
|
|||
|
||||
namespace Web::IndexedDB {
|
||||
|
||||
class RequestList : public AK::Vector<GC::Root<IDBRequest>> {
|
||||
class RequestList final : public AK::Vector<GC::Root<IDBRequest>> {
|
||||
AK_MAKE_NONMOVABLE(RequestList);
|
||||
AK_MAKE_NONCOPYABLE(RequestList);
|
||||
|
||||
public:
|
||||
RequestList() = default;
|
||||
|
||||
void all_requests_processed(GC::Heap&, GC::Ref<GC::Function<void()>> on_complete);
|
||||
void all_previous_requests_processed(GC::Heap&, GC::Ref<IDBRequest> const& request, GC::Ref<GC::Function<void()>> on_complete);
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,9 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 4 tests
|
||||
|
||||
4 Pass
|
||||
Pass Active during success handlers
|
||||
Pass Active during success listeners
|
||||
Pass Active during error handlers
|
||||
Pass Active during error listeners
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 17 tests
|
||||
|
||||
7 Pass
|
||||
10 Fail
|
||||
Pass Exception in error event handler on request
|
||||
Pass Exception in error event handler on request, with preventDefault
|
||||
Pass Exception in error event listener on request
|
||||
Pass Exception in error event listener ("handleEvent" lookup) on request
|
||||
Pass Exception in error event listener (non-callable "handleEvent") on request
|
||||
Pass Exception in second error event listener on request
|
||||
Pass Exception in first error event listener on request, transaction active in second
|
||||
Fail Exception in error event handler on transaction
|
||||
Fail Exception in error event handler on transaction, with preventDefault
|
||||
Fail Exception in error event listener on transaction
|
||||
Fail Exception in second error event listener on transaction
|
||||
Fail Exception in first error event listener on transaction, transaction active in second
|
||||
Fail Exception in error event handler on connection
|
||||
Fail Exception in error event handler on connection, with preventDefault
|
||||
Fail Exception in error event listener on connection
|
||||
Fail Exception in second error event listener on connection
|
||||
Fail Exception in first error event listener on connection, transaction active in second
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 6 tests
|
||||
|
||||
6 Pass
|
||||
Pass Exception in success event handler on request
|
||||
Pass Exception in success event listener on request
|
||||
Pass Exception in success event listener ("handleEvent" lookup) on request
|
||||
Pass Exception in success event listener (non-callable "handleEvent") on request
|
||||
Pass Exception in second success event listener on request
|
||||
Pass Exception in first success event listener, tx active in second
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 6 tests
|
||||
|
||||
6 Fail
|
||||
Fail Exception in upgradeneeded handler
|
||||
Fail Exception in upgradeneeded listener
|
||||
Fail Exception in upgradeneeded "handleEvent" lookup
|
||||
Fail Exception in upgradeneeded due to non-callable "handleEvent"
|
||||
Fail Exception in second upgradeneeded listener
|
||||
Fail Exception in first upgradeneeded listener, tx active in second
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 16 tests
|
||||
|
||||
16 Pass
|
||||
Pass Binary keys can be supplied using the view type Uint8Array
|
||||
Pass Binary keys can be supplied using the view type Uint8ClampedArray
|
||||
Pass Binary keys can be supplied using the view type Int8Array
|
||||
Pass Binary keys can be supplied using the view type Uint16Array
|
||||
Pass Binary keys can be supplied using the view type Int16Array
|
||||
Pass Binary keys can be supplied using the view type Uint32Array
|
||||
Pass Binary keys can be supplied using the view type Int32Array
|
||||
Pass Binary keys can be supplied using the view type Float16Array
|
||||
Pass Binary keys can be supplied using the view type Float32Array
|
||||
Pass Binary keys can be supplied using the view type Float64Array
|
||||
Pass ArrayBuffer can be used to supply a binary key
|
||||
Pass DataView can be used to supply a binary key
|
||||
Pass DataView with explicit offset can be used to supply a binary key
|
||||
Pass DataView with explicit offset and length can be used to supply a binary key
|
||||
Pass Uint8Array with explicit offset can be used to supply a binary key
|
||||
Pass Uint8Array with explicit offset and length can be used to supply a binary key
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 6 tests
|
||||
|
||||
4 Pass
|
||||
2 Fail
|
||||
Fail IDBCursor.advance() - invalid - attempt to call advance twice
|
||||
Pass IDBCursor.advance() - invalid - pass something other than number
|
||||
Pass IDBCursor.advance() - invalid - pass null/undefined
|
||||
Pass IDBCursor.advance() - invalid - missing argument
|
||||
Pass IDBCursor.advance() - invalid - pass negative numbers
|
||||
Fail IDBCursor.advance() - invalid - got value not set on exception
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 6 tests
|
||||
|
||||
2 Pass
|
||||
4 Fail
|
||||
Fail IDBCursor.advance() - advances
|
||||
Fail IDBCursor.advance() - advances backwards
|
||||
Pass IDBCursor.advance() - skip far forward
|
||||
Fail IDBCursor.advance() - within range
|
||||
Pass IDBCursor.advance() - within single key range
|
||||
Fail IDBCursor.advance() - within single key range, with several results
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 6 tests
|
||||
|
||||
2 Pass
|
||||
4 Fail
|
||||
Fail IDBCursor.continue() - continues
|
||||
Fail IDBCursor.continue() - with given key
|
||||
Pass IDBCursor.continue() - skip far forward
|
||||
Fail IDBCursor.continue() - within range
|
||||
Pass IDBCursor.continue() - within single key range
|
||||
Fail IDBCursor.continue() - within single key range, with several results
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 8 tests
|
||||
|
||||
6 Pass
|
||||
2 Fail
|
||||
Pass IDBObjectStore::openCursor's request source must be the IDBObjectStore instance that opened the cursor
|
||||
Pass IDBObjectStore::openKeyCursor's request source must be the IDBObjectStore instance that opened the cursor
|
||||
Fail IDBIndex::openCursor's request source must be the IDBIndex instance that opened the cursor
|
||||
Fail IDBIndex::openKeyCursor's request source must be the IDBIndex instance that opened the cursor
|
||||
Pass The source of the request from IDBObjectStore::update() is the cursor itself
|
||||
Pass The source of the request from IDBObjectStore::delete() is the cursor itself
|
||||
Pass The source of the request from IDBIndex::update() is the cursor itself
|
||||
Pass The source of the request from IDBIndex::delete() is the cursor itself
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 12 tests
|
||||
|
||||
6 Pass
|
||||
6 Fail
|
||||
Fail IDBIndex.get exception order: InvalidStateError vs. TransactionInactiveError
|
||||
Pass IDBIndex.get exception order: TransactionInactiveError vs. DataError
|
||||
Fail IDBIndex.getAll exception order: InvalidStateError vs. TransactionInactiveError
|
||||
Pass IDBIndex.getAll exception order: TransactionInactiveError vs. DataError
|
||||
Fail IDBIndex.getAllKeys exception order: InvalidStateError vs. TransactionInactiveError
|
||||
Pass IDBIndex.getAllKeys exception order: TransactionInactiveError vs. DataError
|
||||
Fail IDBIndex.count exception order: InvalidStateError vs. TransactionInactiveError
|
||||
Pass IDBIndex.count exception order: TransactionInactiveError vs. DataError
|
||||
Fail IDBIndex.openCursor exception order: InvalidStateError vs. TransactionInactiveError
|
||||
Pass IDBIndex.openCursor exception order: TransactionInactiveError vs. DataError
|
||||
Fail IDBIndex.openKeyCursor exception order: InvalidStateError vs. TransactionInactiveError
|
||||
Pass IDBIndex.openKeyCursor exception order: TransactionInactiveError vs. DataError
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 7 tests
|
||||
|
||||
7 Pass
|
||||
Pass The source of the request from index => index.get(0) is the index itself
|
||||
Pass The source of the request from index => index.getKey(0) is the index itself
|
||||
Pass The source of the request from index => index.getAll() is the index itself
|
||||
Pass The source of the request from index => index.getAllKeys() is the index itself
|
||||
Pass The source of the request from index => index.count() is the index itself
|
||||
Pass The source of the request from index => index.openCursor() is the index itself
|
||||
Pass The source of the request from index => index.openKeyCursor() is the index itself
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 19 tests
|
||||
|
||||
18 Pass
|
||||
1 Fail
|
||||
Pass Single item get
|
||||
Pass Empty object store
|
||||
Pass Get all
|
||||
Pass Get all with generated keys
|
||||
Pass Get all with large values
|
||||
Pass maxCount=10
|
||||
Pass Get bound range
|
||||
Pass Get bound range with maxCount
|
||||
Pass Get upper excluded
|
||||
Pass Get lower excluded
|
||||
Pass Get bound range (generated) with maxCount
|
||||
Pass Non existent key
|
||||
Pass maxCount=0
|
||||
Pass Max value count
|
||||
Pass Query with empty range where first key < upperBound
|
||||
Pass Query with empty range where lowerBound < last key
|
||||
Pass Retrieve multiEntry key
|
||||
Pass Retrieve one key multiple values
|
||||
Fail Get all values with invalid query keys
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 18 tests
|
||||
|
||||
17 Pass
|
||||
1 Fail
|
||||
Pass Single item get
|
||||
Pass Empty object store
|
||||
Pass Get all keys
|
||||
Pass Get all generated keys
|
||||
Pass maxCount=10
|
||||
Pass Get bound range
|
||||
Pass Get bound range with maxCount
|
||||
Pass Get upper excluded
|
||||
Pass Get lower excluded
|
||||
Pass Get bound range (generated) with maxCount
|
||||
Pass Non existent key
|
||||
Pass maxCount=0
|
||||
Pass Max value count
|
||||
Pass Query with empty range where first key < upperBound
|
||||
Pass Query with empty range where lowerBound < last key
|
||||
Pass Retrieve multiEntry key
|
||||
Pass Retrieve one key multiple values
|
||||
Fail Get all keys with invalid query keys
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 6 tests
|
||||
|
||||
4 Pass
|
||||
2 Fail
|
||||
Fail IDBObjectStore.put exception order: InvalidStateError vs. TransactionInactiveError
|
||||
Pass IDBObjectStore.put exception order: TransactionInactiveError vs. ReadOnlyError
|
||||
Pass IDBObjectStore.put exception order: ReadOnlyError vs. DataError
|
||||
Fail IDBObjectStore.add exception order: InvalidStateError vs. TransactionInactiveError
|
||||
Pass IDBObjectStore.add exception order: TransactionInactiveError vs. ReadOnlyError
|
||||
Pass IDBObjectStore.add exception order: ReadOnlyError vs. DataError
|
||||
|
|
@ -0,0 +1,18 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 12 tests
|
||||
|
||||
6 Pass
|
||||
6 Fail
|
||||
Fail IDBObjectStore.get exception order: InvalidStateError vs. TransactionInactiveError
|
||||
Pass IDBObjectStore.get exception order: TransactionInactiveError vs. DataError
|
||||
Fail IDBObjectStore.getAll exception order: InvalidStateError vs. TransactionInactiveError
|
||||
Pass IDBObjectStore.getAll exception order: TransactionInactiveError vs. DataError
|
||||
Fail IDBObjectStore.getAllKeys exception order: InvalidStateError vs. TransactionInactiveError
|
||||
Pass IDBObjectStore.getAllKeys exception order: TransactionInactiveError vs. DataError
|
||||
Fail IDBObjectStore.count exception order: InvalidStateError vs. TransactionInactiveError
|
||||
Pass IDBObjectStore.count exception order: TransactionInactiveError vs. DataError
|
||||
Fail IDBObjectStore.openCursor exception order: InvalidStateError vs. TransactionInactiveError
|
||||
Pass IDBObjectStore.openCursor exception order: TransactionInactiveError vs. DataError
|
||||
Fail IDBObjectStore.openKeyCursor exception order: InvalidStateError vs. TransactionInactiveError
|
||||
Pass IDBObjectStore.openKeyCursor exception order: TransactionInactiveError vs. DataError
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 11 tests
|
||||
|
||||
11 Pass
|
||||
Pass The source of the request from store => store.put(0) is the object store itself
|
||||
Pass The source of the request from store => store.add(0) is the object store itself
|
||||
Pass The source of the request from store => store.delete(0) is the object store itself
|
||||
Pass The source of the request from store => store.clear() is the object store itself
|
||||
Pass The source of the request from store => store.get(0) is the object store itself
|
||||
Pass The source of the request from store => store.getKey(0) is the object store itself
|
||||
Pass The source of the request from store => store.getAll() is the object store itself
|
||||
Pass The source of the request from store => store.getAllKeys() is the object store itself
|
||||
Pass The source of the request from store => store.count() is the object store itself
|
||||
Pass The source of the request from store => store.openCursor() is the object store itself
|
||||
Pass The source of the request from store => store.openKeyCursor() is the object store itself
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 24 tests
|
||||
|
||||
6 Pass
|
||||
18 Fail
|
||||
Fail Single item get
|
||||
Fail Single item get (generated key)
|
||||
Fail getAll on empty object store
|
||||
Fail Get all values
|
||||
Fail Get all with large values
|
||||
Fail Test maxCount
|
||||
Fail Get bound range
|
||||
Fail Get bound range with maxCount
|
||||
Fail Get upper excluded
|
||||
Fail Get lower excluded
|
||||
Fail Get bound range (generated) with maxCount
|
||||
Fail Non existent key
|
||||
Fail zero maxCount
|
||||
Fail Max value count
|
||||
Fail Query with empty range where first key < upperBound
|
||||
Fail Query with empty range where lowerBound < last key
|
||||
Pass Direction: next
|
||||
Pass Direction: prev
|
||||
Pass Direction: nextunique
|
||||
Pass Direction: prevunique
|
||||
Pass Direction and query
|
||||
Pass Direction, query and count
|
||||
Fail Get all values with both options and count
|
||||
Fail Get all values with invalid query keys
|
||||
|
|
@ -0,0 +1,24 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 18 tests
|
||||
|
||||
17 Pass
|
||||
1 Fail
|
||||
Pass Single item get
|
||||
Pass Single item get (generated key)
|
||||
Pass getAll on empty object store
|
||||
Pass Get all values
|
||||
Pass Get all with large values
|
||||
Pass Test maxCount
|
||||
Pass Get bound range
|
||||
Pass Get bound range with maxCount
|
||||
Pass Get upper excluded
|
||||
Pass Get lower excluded
|
||||
Pass Get bound range (generated) with maxCount
|
||||
Pass Non existent key
|
||||
Pass zero maxCount
|
||||
Pass Max value count
|
||||
Pass Query with empty range where first key < upperBound
|
||||
Pass Query with empty range where lowerBound < last key
|
||||
Pass Get all values with transaction.commit()
|
||||
Fail Get all values with invalid query keys
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 23 tests
|
||||
|
||||
8 Pass
|
||||
15 Fail
|
||||
Fail Single item get
|
||||
Fail Single item get (generated key)
|
||||
Pass getAllKeys on empty object store
|
||||
Pass Get all keys
|
||||
Fail Test maxCount
|
||||
Fail Get bound range
|
||||
Fail Get bound range with maxCount
|
||||
Fail Get upper excluded
|
||||
Fail Get lower excluded
|
||||
Fail Get bound range (generated) with maxCount
|
||||
Fail Non existent key
|
||||
Fail zero maxCount
|
||||
Fail Max value count
|
||||
Fail Query with empty range where first key < upperBound
|
||||
Fail Query with empty range where lowerBound < last key
|
||||
Pass Direction: next
|
||||
Pass Direction: prev
|
||||
Pass Direction: nextunique
|
||||
Pass Direction: prevunique
|
||||
Pass Direction and query
|
||||
Pass Direction, query and count
|
||||
Fail Get all keys with both options and count
|
||||
Fail Get all keys with invalid query keys
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 16 tests
|
||||
|
||||
15 Pass
|
||||
1 Fail
|
||||
Pass Single item get
|
||||
Pass Single item get (generated key)
|
||||
Pass getAllKeys on empty object store
|
||||
Pass Get all keys
|
||||
Pass Test maxCount
|
||||
Pass Get bound range
|
||||
Pass Get bound range with maxCount
|
||||
Pass Get upper excluded
|
||||
Pass Get lower excluded
|
||||
Pass Get bound range (generated) with maxCount
|
||||
Pass Non existent key
|
||||
Pass zero maxCount
|
||||
Pass Max value count
|
||||
Pass Query with empty range where first key < upperBound
|
||||
Pass Query with empty range where lowerBound < last key
|
||||
Fail Get all keys with invalid query keys
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 17 tests
|
||||
|
||||
17 Pass
|
||||
Pass IDBObjectStore.getKey() - invalid parameters
|
||||
Pass IDBObjectStore.getKey() - basic - key
|
||||
Pass IDBObjectStore.getKey() - basic - range
|
||||
Pass IDBObjectStore.getKey() - basic - key - no match
|
||||
Pass IDBObjectStore.getKey() - basic - range - no match
|
||||
Pass IDBObjectStore.getKey() - key path - key
|
||||
Pass IDBObjectStore.getKey() - key path - range
|
||||
Pass IDBObjectStore.getKey() - key path - key - no match
|
||||
Pass IDBObjectStore.getKey() - key path - range - no match
|
||||
Pass IDBObjectStore.getKey() - key generator - key
|
||||
Pass IDBObjectStore.getKey() - key generator - range
|
||||
Pass IDBObjectStore.getKey() - key generator - key - no match
|
||||
Pass IDBObjectStore.getKey() - key generator - range - no match
|
||||
Pass IDBObjectStore.getKey() - key generator and key path - key
|
||||
Pass IDBObjectStore.getKey() - key generator and key path - range
|
||||
Pass IDBObjectStore.getKey() - key generator and key path - key - no match
|
||||
Pass IDBObjectStore.getKey() - key generator and key path - range - no match
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 8 tests
|
||||
|
||||
3 Pass
|
||||
5 Fail
|
||||
Fail IDBTransaction.objectStoreNames - during upgrade transaction
|
||||
Pass IDBTransaction.objectStoreNames - value after close
|
||||
Pass IDBTransaction.objectStoreNames - transaction scope
|
||||
Fail IDBTransaction.objectStoreNames - value after commit
|
||||
Fail IDBTransaction.objectStoreNames - value after abort
|
||||
Pass IDBTransaction.objectStoreNames - sorting
|
||||
Fail IDBTransaction.objectStoreNames - no duplicates
|
||||
Fail IDBTransaction.objectStoreNames - unusual names
|
||||
|
|
@ -0,0 +1,33 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 27 tests
|
||||
|
||||
24 Pass
|
||||
3 Fail
|
||||
Pass IDBFactory cmp() static with throwing/invalid keys
|
||||
Pass IDBCursor continue() method with throwing/invalid keys
|
||||
Pass IndexedDB: Exceptions thrown during key conversion
|
||||
Fail IDBCursor update() method with throwing/invalid keys
|
||||
Pass IDBKeyRange only() static with throwing/invalid keys
|
||||
Pass IDBKeyRange lowerBound() static with throwing/invalid keys
|
||||
Pass IDBKeyRange upperBound() static with throwing/invalid keys
|
||||
Pass IDBKeyRange bound() static with throwing/invalid keys
|
||||
Fail IDBObjectStore add() method with throwing/invalid keys
|
||||
Fail IDBObjectStore put() method with throwing/invalid keys
|
||||
Pass IDBObjectStore delete() method with throwing/invalid keys
|
||||
Pass IDBObjectStore get() method with throwing/invalid keys
|
||||
Pass IDBObjectStore getKey() method with throwing/invalid keys
|
||||
Pass IDBObjectStore count() method with throwing/invalid keys
|
||||
Pass IDBObjectStore openCursor() method with throwing/invalid keys
|
||||
Pass IDBObjectStore openKeyCursor() method with throwing/invalid keys
|
||||
Pass IDBIndex get() method with throwing/invalid keys
|
||||
Pass IDBIndex getKey() method with throwing/invalid keys
|
||||
Pass IDBIndex count() method with throwing/invalid keys
|
||||
Pass IDBIndex openCursor() method with throwing/invalid keys
|
||||
Pass IDBIndex openKeyCursor() method with throwing/invalid keys
|
||||
Pass IDBObjectStore getAll() method with throwing/invalid keys
|
||||
Pass IDBObjectStore getAllKeys() method with throwing/invalid keys
|
||||
Pass IDBObjectStore getAllRecords() method with throwing/invalid keys
|
||||
Pass IDBIndex getAll() method with throwing/invalid keys
|
||||
Pass IDBIndex getAllKeys() method with throwing/invalid keys
|
||||
Pass IDBIndex getAllRecords() method with throwing/invalid keys
|
||||
|
|
@ -0,0 +1,27 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 21 tests
|
||||
|
||||
15 Pass
|
||||
6 Fail
|
||||
Pass Keygenerator - starts at one, and increments by one
|
||||
Pass Keygenerator - increments by one from last set key
|
||||
Pass Keygenerator - don't increment when new key is not bigger than current
|
||||
Pass Keygenerator ConstraintError when using same id as already generated
|
||||
Fail Key generator vs. explicit key 53 bits
|
||||
Fail Key generator vs. explicit key greater than 53 bits, less than 64 bits
|
||||
Pass Key generator vs. explicit key greater than 53 bits, less than 64 bits (negative)
|
||||
Fail Key generator vs. explicit key 63 bits
|
||||
Pass Key generator vs. explicit key 63 bits (negative)
|
||||
Fail Key generator vs. explicit key 64 bits
|
||||
Pass Key generator vs. explicit key 64 bits (negative)
|
||||
Fail Key generator vs. explicit key greater than 64 bits, but still finite
|
||||
Pass Key generator vs. explicit key greater than 64 bits, but still finite (negative)
|
||||
Fail Key generator vs. explicit key equal to Infinity
|
||||
Pass Key generator vs. explicit key equal to -Infinity
|
||||
Pass Key is injected into value - single segment path
|
||||
Pass Key is injected into value - multi-segment path
|
||||
Pass Key is injected into value - multi-segment path, partially populated
|
||||
Pass put() throws if key cannot be injected - single segment path
|
||||
Pass put() throws if key cannot be injected - multi-segment path
|
||||
Pass Keygenerator overflow
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 6 tests
|
||||
|
||||
5 Pass
|
||||
1 Fail
|
||||
Pass The last element of keypath is validated
|
||||
Pass Key path evaluation: Exceptions from non-enumerable getters
|
||||
Fail Key path evaluation: Exceptions from enumerable getters
|
||||
Pass Key path evaluation: Exceptions from non-enumerable getters on prototype
|
||||
Pass Key path evaluation: Exceptions from enumerable getters on prototype
|
||||
Pass Array key conversion should not invoke prototype getters
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
Harness status: OK
|
||||
|
||||
Found 6 tests
|
||||
|
||||
6 Pass
|
||||
Pass Type: String, identifier: length
|
||||
Pass Type: Array, identifier: length
|
||||
Pass Type: Blob, identifier: size
|
||||
Pass Type: Blob, identifier: type
|
||||
Pass Type: File, identifier: name
|
||||
Pass Type: File, identifier: lastModified
|
||||
Binary file not shown.
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB Transaction - active flag is set during event dispatch</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/event-dispatch-active-flag.any.js"></script>
|
||||
|
|
@ -0,0 +1,91 @@
|
|||
// META: title=IndexedDB Transaction - active flag is set during event dispatch
|
||||
// META: global=window,worker
|
||||
// META: script=resources/support.js
|
||||
|
||||
'use strict';
|
||||
|
||||
function createObjectStore() {
|
||||
return (t, db) => {
|
||||
db.createObjectStore('store');
|
||||
};
|
||||
}
|
||||
|
||||
function initializeTransaction(t, db, mode = 'readonly') {
|
||||
const tx = db.transaction('store', mode);
|
||||
const release_tx = keep_alive(tx, 'store');
|
||||
assert_true(
|
||||
is_transaction_active(tx, 'store'),
|
||||
'Transaction should be active after creation');
|
||||
return {tx, release_tx};
|
||||
}
|
||||
|
||||
function assertLifetimeInMicrotasksAndEventLoop(
|
||||
t, tx, release_tx, handlerMessage) {
|
||||
assert_true(is_transaction_active(tx, 'store'), handlerMessage);
|
||||
|
||||
let saw_promise = false;
|
||||
Promise.resolve().then(t.step_func(() => {
|
||||
saw_promise = true;
|
||||
assert_true(
|
||||
is_transaction_active(tx, 'store'),
|
||||
'Transaction should be active in microtasks');
|
||||
}));
|
||||
|
||||
setTimeout(
|
||||
t.step_func(() => {
|
||||
assert_true(saw_promise);
|
||||
assert_false(
|
||||
is_transaction_active(tx, 'store'),
|
||||
'Transaction should be inactive in next task');
|
||||
release_tx();
|
||||
t.done();
|
||||
}),
|
||||
0);
|
||||
};
|
||||
|
||||
indexeddb_test(createObjectStore(), (t, db) => {
|
||||
const {tx, release_tx} = initializeTransaction(t, db);
|
||||
const request = tx.objectStore('store').get(0);
|
||||
request.onerror = t.unreached_func('request should succeed');
|
||||
request.onsuccess = t.step_func((e) => {
|
||||
assertLifetimeInMicrotasksAndEventLoop(
|
||||
t, tx, release_tx,
|
||||
'Transaction should be active during success handler');
|
||||
});
|
||||
}, 'Active during success handlers');
|
||||
|
||||
indexeddb_test(createObjectStore(), (t, db) => {
|
||||
const {tx, release_tx} = initializeTransaction(t, db);
|
||||
const request = tx.objectStore('store').get(0);
|
||||
request.onerror = t.unreached_func('request should succeed');
|
||||
request.onsuccess = t.step_func((e) => {
|
||||
assertLifetimeInMicrotasksAndEventLoop(
|
||||
t, tx, release_tx,
|
||||
'Transaction should be active during success listener');
|
||||
});
|
||||
}, 'Active during success listeners');
|
||||
|
||||
indexeddb_test(createObjectStore(), (t, db) => {
|
||||
const {tx, release_tx} = initializeTransaction(t, db, 'readwrite');
|
||||
tx.objectStore('store').put(0, 0);
|
||||
const request = tx.objectStore('store').add(0, 0);
|
||||
request.onsuccess = t.unreached_func('request should fail');
|
||||
request.onerror = t.step_func((e) => {
|
||||
e.preventDefault();
|
||||
assertLifetimeInMicrotasksAndEventLoop(
|
||||
t, tx, release_tx, 'Transaction should be active during error handler');
|
||||
});
|
||||
}, 'Active during error handlers');
|
||||
|
||||
indexeddb_test(createObjectStore(), (t, db) => {
|
||||
const {tx, release_tx} = initializeTransaction(t, db, 'readwrite');
|
||||
tx.objectStore('store').put(0, 0);
|
||||
const request = tx.objectStore('store').add(0, 0);
|
||||
request.onsuccess = t.unreached_func('request should fail');
|
||||
request.onerror = t.step_func((e) => {
|
||||
e.preventDefault();
|
||||
assertLifetimeInMicrotasksAndEventLoop(
|
||||
t, tx, release_tx,
|
||||
'Transaction should be active during error listener');
|
||||
});
|
||||
}, 'Active during error listeners');
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>Fire error event - Exception thrown</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/fire-error-event-exception.any.js"></script>
|
||||
|
|
@ -0,0 +1,194 @@
|
|||
// META: global=window,worker
|
||||
// META: title=Fire error event - Exception thrown
|
||||
// META: script=resources/support.js
|
||||
|
||||
// Spec: "https://w3c.github.io/IndexedDB/#fire-error-event"
|
||||
|
||||
'use strict';
|
||||
|
||||
setup({allow_uncaught_exception: true});
|
||||
|
||||
function fire_error_event_test(func, description) {
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
db.createObjectStore('s');
|
||||
},
|
||||
(t, db) => {
|
||||
const tx = db.transaction('s', 'readwrite');
|
||||
tx.oncomplete = t.unreached_func('transaction should abort');
|
||||
const store = tx.objectStore('s');
|
||||
store.put(0, 0);
|
||||
const request = store.add(0, 0);
|
||||
request.onsuccess = t.unreached_func('request should fail');
|
||||
func(t, db, tx, request);
|
||||
tx.addEventListener('abort', t.step_func_done(() => {
|
||||
assert_equals(tx.error.name, 'AbortError');
|
||||
}));
|
||||
},
|
||||
description);
|
||||
}
|
||||
|
||||
// Listeners on the request.
|
||||
|
||||
fire_error_event_test((t, db, tx, request) => {
|
||||
request.onerror = () => {
|
||||
throw Error();
|
||||
};
|
||||
}, 'Exception in error event handler on request');
|
||||
|
||||
fire_error_event_test((t, db, tx, request) => {
|
||||
request.onerror = e => {
|
||||
e.preventDefault();
|
||||
throw Error();
|
||||
};
|
||||
}, 'Exception in error event handler on request, with preventDefault');
|
||||
|
||||
fire_error_event_test((t, db, tx, request) => {
|
||||
request.addEventListener('error', () => {
|
||||
throw Error();
|
||||
});
|
||||
}, 'Exception in error event listener on request');
|
||||
|
||||
fire_error_event_test((t, db, tx, request) => {
|
||||
request.addEventListener('error', {
|
||||
get handleEvent() {
|
||||
throw new Error();
|
||||
},
|
||||
});
|
||||
}, 'Exception in error event listener ("handleEvent" lookup) on request');
|
||||
|
||||
fire_error_event_test((t, db, tx, request) => {
|
||||
request.addEventListener('error', {});
|
||||
}, 'Exception in error event listener (non-callable "handleEvent") on request');
|
||||
|
||||
fire_error_event_test((t, db, tx, request) => {
|
||||
request.addEventListener(
|
||||
'error',
|
||||
() => {
|
||||
// no-op
|
||||
});
|
||||
request.addEventListener('error', () => {
|
||||
throw Error();
|
||||
});
|
||||
}, 'Exception in second error event listener on request');
|
||||
|
||||
fire_error_event_test(
|
||||
(t, db, tx, request) => {
|
||||
let second_listener_called = false;
|
||||
request.addEventListener('error', () => {
|
||||
throw Error();
|
||||
});
|
||||
request.addEventListener('error', t.step_func(() => {
|
||||
second_listener_called = true;
|
||||
assert_true(
|
||||
is_transaction_active(tx, 's'),
|
||||
'Transaction should be active until dispatch completes');
|
||||
}));
|
||||
tx.addEventListener('abort', t.step_func(() => {
|
||||
assert_true(second_listener_called);
|
||||
}));
|
||||
},
|
||||
'Exception in first error event listener on request, ' +
|
||||
'transaction active in second');
|
||||
|
||||
// Listeners on the transaction.
|
||||
|
||||
fire_error_event_test((t, db, tx, request) => {
|
||||
tx.onerror = () => {
|
||||
throw Error();
|
||||
};
|
||||
}, 'Exception in error event handler on transaction');
|
||||
|
||||
fire_error_event_test((t, db, tx, request) => {
|
||||
tx.onerror = e => {
|
||||
e.preventDefault();
|
||||
throw Error();
|
||||
};
|
||||
}, 'Exception in error event handler on transaction, with preventDefault');
|
||||
|
||||
fire_error_event_test((t, db, tx, request) => {
|
||||
tx.addEventListener('error', () => {
|
||||
throw Error();
|
||||
});
|
||||
}, 'Exception in error event listener on transaction');
|
||||
|
||||
fire_error_event_test((t, db, tx, request) => {
|
||||
tx.addEventListener(
|
||||
'error',
|
||||
() => {
|
||||
// no-op
|
||||
});
|
||||
tx.addEventListener('error', () => {
|
||||
throw Error();
|
||||
});
|
||||
}, 'Exception in second error event listener on transaction');
|
||||
|
||||
fire_error_event_test(
|
||||
(t, db, tx, request) => {
|
||||
let second_listener_called = false;
|
||||
tx.addEventListener('error', () => {
|
||||
throw Error();
|
||||
});
|
||||
tx.addEventListener('error', t.step_func(() => {
|
||||
second_listener_called = true;
|
||||
assert_true(
|
||||
is_transaction_active(tx, 's'),
|
||||
'Transaction should be active until dispatch completes');
|
||||
}));
|
||||
tx.addEventListener('abort', t.step_func(() => {
|
||||
assert_true(second_listener_called);
|
||||
}));
|
||||
},
|
||||
'Exception in first error event listener on transaction, ' +
|
||||
'transaction active in second');
|
||||
|
||||
// Listeners on the connection.
|
||||
|
||||
fire_error_event_test((t, db, tx, request) => {
|
||||
db.onerror = () => {
|
||||
throw Error();
|
||||
};
|
||||
}, 'Exception in error event handler on connection');
|
||||
|
||||
fire_error_event_test((t, db, tx, request) => {
|
||||
db.onerror = e => {
|
||||
e.preventDefault()
|
||||
throw Error();
|
||||
};
|
||||
}, 'Exception in error event handler on connection, with preventDefault');
|
||||
|
||||
fire_error_event_test((t, db, tx, request) => {
|
||||
db.addEventListener('error', () => {
|
||||
throw Error();
|
||||
});
|
||||
}, 'Exception in error event listener on connection');
|
||||
|
||||
fire_error_event_test((t, db, tx, request) => {
|
||||
db.addEventListener(
|
||||
'error',
|
||||
() => {
|
||||
// no-op
|
||||
});
|
||||
db.addEventListener('error', () => {
|
||||
throw Error();
|
||||
});
|
||||
}, 'Exception in second error event listener on connection');
|
||||
|
||||
fire_error_event_test(
|
||||
(t, db, tx, request) => {
|
||||
let second_listener_called = false;
|
||||
db.addEventListener('error', () => {
|
||||
throw Error();
|
||||
});
|
||||
db.addEventListener('error', t.step_func(() => {
|
||||
second_listener_called = true;
|
||||
assert_true(
|
||||
is_transaction_active(tx, 's'),
|
||||
'Transaction should be active until dispatch completes');
|
||||
}));
|
||||
tx.addEventListener('abort', t.step_func(() => {
|
||||
assert_true(second_listener_called);
|
||||
}));
|
||||
},
|
||||
'Exception in first error event listener on connection, ' +
|
||||
'transaction active in second');
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>Fire success event - Exception thrown</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/fire-success-event-exception.any.js"></script>
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
// META: global=window,worker
|
||||
// META: title=Fire success event - Exception thrown
|
||||
// META: script=resources/support.js
|
||||
|
||||
// Spec: "https://w3c.github.io/IndexedDB/#fire-success-event"
|
||||
|
||||
'use strict';
|
||||
|
||||
setup({allow_uncaught_exception: true});
|
||||
|
||||
function fire_success_event_test(func, description) {
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
db.createObjectStore('s');
|
||||
},
|
||||
(t, db) => {
|
||||
const tx = db.transaction('s', 'readonly');
|
||||
tx.oncomplete = t.unreached_func('transaction should abort');
|
||||
const store = tx.objectStore('s');
|
||||
const request = store.get(0);
|
||||
func(t, db, tx, request);
|
||||
tx.addEventListener('abort', t.step_func_done(() => {
|
||||
assert_equals(tx.error.name, 'AbortError');
|
||||
}));
|
||||
},
|
||||
description);
|
||||
}
|
||||
|
||||
fire_success_event_test((t, db, tx, request) => {
|
||||
request.onsuccess = () => {
|
||||
throw Error();
|
||||
};
|
||||
}, 'Exception in success event handler on request');
|
||||
|
||||
fire_success_event_test((t, db, tx, request) => {
|
||||
request.addEventListener('success', () => {
|
||||
throw Error();
|
||||
});
|
||||
}, 'Exception in success event listener on request');
|
||||
|
||||
fire_success_event_test((t, db, tx, request) => {
|
||||
request.addEventListener('success', {
|
||||
get handleEvent() {
|
||||
throw new Error();
|
||||
},
|
||||
});
|
||||
}, 'Exception in success event listener ("handleEvent" lookup) on request');
|
||||
|
||||
fire_success_event_test((t, db, tx, request) => {
|
||||
request.addEventListener('success', {
|
||||
handleEvent: null,
|
||||
});
|
||||
}, 'Exception in success event listener (non-callable "handleEvent") on request');
|
||||
|
||||
fire_success_event_test((t, db, tx, request) => {
|
||||
request.addEventListener(
|
||||
'success',
|
||||
() => {
|
||||
// no-op
|
||||
});
|
||||
request.addEventListener('success', () => {
|
||||
throw Error();
|
||||
});
|
||||
}, 'Exception in second success event listener on request');
|
||||
|
||||
fire_success_event_test((t, db, tx, request) => {
|
||||
let second_listener_called = false;
|
||||
request.addEventListener('success', () => {
|
||||
throw Error();
|
||||
});
|
||||
request.addEventListener('success', t.step_func(() => {
|
||||
second_listener_called = true;
|
||||
assert_true(
|
||||
is_transaction_active(tx, 's'),
|
||||
'Transaction should be active until dispatch completes');
|
||||
}));
|
||||
tx.addEventListener('abort', t.step_func(() => {
|
||||
assert_true(second_listener_called);
|
||||
}));
|
||||
}, 'Exception in first success event listener, tx active in second');
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>Fire upgradeneeded event - Exception thrown</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/fire-upgradeneeded-event-exception.any.js"></script>
|
||||
|
|
@ -0,0 +1,81 @@
|
|||
// META: global=window,worker
|
||||
// META: title=Fire upgradeneeded event - Exception thrown
|
||||
// META: script=resources/support.js
|
||||
|
||||
// Spec: "https://w3c.github.io/IndexedDB/#fire-a-version-change-event"
|
||||
|
||||
setup({allow_uncaught_exception: true});
|
||||
|
||||
function fire_upgradeneeded_event_test(func, description) {
|
||||
async_test(t => {
|
||||
const dbname = self.location + '-' + t.name;
|
||||
const del = indexedDB.deleteDatabase(dbname);
|
||||
del.onerror = t.unreached_func('deleteDatabase should succeed');
|
||||
const open = indexedDB.open(dbname, 1);
|
||||
open.onsuccess = t.unreached_func('open should fail');
|
||||
let tx;
|
||||
open.addEventListener('upgradeneeded', () => {
|
||||
tx = open.transaction;
|
||||
});
|
||||
func(t, open);
|
||||
open.addEventListener('error', t.step_func_done(() => {
|
||||
assert_equals(tx.error.name, 'AbortError');
|
||||
}));
|
||||
}, description);
|
||||
}
|
||||
|
||||
fire_upgradeneeded_event_test((t, open) => {
|
||||
open.onupgradeneeded = () => {
|
||||
throw Error();
|
||||
};
|
||||
}, 'Exception in upgradeneeded handler');
|
||||
|
||||
fire_upgradeneeded_event_test((t, open) => {
|
||||
open.addEventListener('upgradeneeded', () => {
|
||||
throw Error();
|
||||
});
|
||||
}, 'Exception in upgradeneeded listener');
|
||||
|
||||
fire_upgradeneeded_event_test((t, open) => {
|
||||
open.addEventListener('upgradeneeded', {
|
||||
get handleEvent() {
|
||||
throw new Error();
|
||||
},
|
||||
});
|
||||
}, 'Exception in upgradeneeded "handleEvent" lookup');
|
||||
|
||||
fire_upgradeneeded_event_test((t, open) => {
|
||||
open.addEventListener('upgradeneeded', {
|
||||
get handleEvent() {
|
||||
return 10;
|
||||
},
|
||||
});
|
||||
}, 'Exception in upgradeneeded due to non-callable "handleEvent"');
|
||||
|
||||
fire_upgradeneeded_event_test((t, open) => {
|
||||
open.addEventListener(
|
||||
'upgradeneeded',
|
||||
() => {
|
||||
// No-op.
|
||||
});
|
||||
open.addEventListener('upgradeneeded', () => {
|
||||
throw Error();
|
||||
});
|
||||
}, 'Exception in second upgradeneeded listener');
|
||||
|
||||
fire_upgradeneeded_event_test((t, open) => {
|
||||
let second_listener_called = false;
|
||||
open.addEventListener('upgradeneeded', () => {
|
||||
open.result.createObjectStore('s');
|
||||
throw Error();
|
||||
});
|
||||
open.addEventListener('upgradeneeded', t.step_func(() => {
|
||||
second_listener_called = true;
|
||||
assert_true(
|
||||
is_transaction_active(open.transaction, 's'),
|
||||
'Transaction should be active until dispatch completes');
|
||||
}));
|
||||
open.addEventListener('error', t.step_func(() => {
|
||||
assert_true(second_listener_called);
|
||||
}));
|
||||
}, 'Exception in first upgradeneeded listener, tx active in second');
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: Binary keys written to a database and read back</title>
|
||||
<meta name="timeout" content="long">
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idb-binary-key-roundtrip.any.js"></script>
|
||||
|
|
@ -0,0 +1,112 @@
|
|||
// META: title=IndexedDB: Binary keys written to a database and read back
|
||||
// META: global=window,worker
|
||||
// META: timeout=long
|
||||
// META: script=resources/support.js
|
||||
|
||||
'use strict';
|
||||
|
||||
const sample = [0x44, 0x33, 0x22, 0x11, 0xFF, 0xEE, 0xDD, 0xCC];
|
||||
const buffer = new Uint8Array(sample).buffer;
|
||||
|
||||
function assert_key_valid(a, message) {
|
||||
assert_equals(indexedDB.cmp(a, a), 0, message);
|
||||
}
|
||||
|
||||
function assert_buffer_equals(a, b, message) {
|
||||
assert_array_equals(
|
||||
Array.from(new Uint8Array(a)), Array.from(new Uint8Array(b)), message);
|
||||
}
|
||||
|
||||
// Verifies that a JavaScript value round-trips through IndexedDB as a key.
|
||||
function check_key_roundtrip_and_done(t, db, key, key_buffer) {
|
||||
const tx = db.transaction('store', 'readwrite');
|
||||
const store = tx.objectStore('store');
|
||||
|
||||
// Verify put with key
|
||||
const put_request = store.put('value', key);
|
||||
put_request.onerror = t.unreached_func('put should succeed');
|
||||
|
||||
// Verify get with key
|
||||
const get_request = store.get(key);
|
||||
get_request.onerror = t.unreached_func('get should succeed');
|
||||
get_request.onsuccess = t.step_func(() => {
|
||||
assert_equals(
|
||||
get_request.result, 'value',
|
||||
'get should retrieve the value given to put');
|
||||
|
||||
// Verify iteration returning key
|
||||
const cursor_request = store.openCursor();
|
||||
cursor_request.onerror = t.unreached_func('openCursor should succeed');
|
||||
cursor_request.onsuccess = t.step_func(() => {
|
||||
assert_not_equals(
|
||||
cursor_request.result, null, 'cursor should be present');
|
||||
const retrieved_key = cursor_request.result.key;
|
||||
assert_true(
|
||||
retrieved_key instanceof ArrayBuffer,
|
||||
'IndexedDB binary keys should be returned in ArrayBuffer instances');
|
||||
assert_key_equals(
|
||||
retrieved_key, key,
|
||||
'The key returned by IndexedDB should equal the key given to put()');
|
||||
assert_buffer_equals(
|
||||
retrieved_key, key_buffer,
|
||||
'The ArrayBuffer returned by IndexedDB should equal the buffer ' +
|
||||
'backing the key given to put()');
|
||||
|
||||
t.done();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// Checks that IndexedDB handles the given view type for binary keys correctly.
|
||||
function view_type_test(type) {
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
db.createObjectStore('store');
|
||||
},
|
||||
(t, db) => {
|
||||
const key = new self[type](buffer);
|
||||
assert_key_valid(key, `${type} should be usable as an IndexedDB key`);
|
||||
assert_key_equals(
|
||||
key, buffer,
|
||||
'Binary keys with the same data but different view types should be ' +
|
||||
' equal');
|
||||
check_key_roundtrip_and_done(t, db, key, buffer);
|
||||
},
|
||||
`Binary keys can be supplied using the view type ${type}`,
|
||||
);
|
||||
}
|
||||
|
||||
['Uint8Array', 'Uint8ClampedArray', 'Int8Array', 'Uint16Array', 'Int16Array',
|
||||
'Uint32Array', 'Int32Array', 'Float16Array', 'Float32Array', 'Float64Array']
|
||||
.forEach((type) => {
|
||||
view_type_test(type);
|
||||
});
|
||||
|
||||
// Checks that IndexedDB
|
||||
function value_test(value_description, value, value_buffer) {
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
db.createObjectStore('store');
|
||||
},
|
||||
(t, db) => {
|
||||
assert_key_valid(
|
||||
value, value_description + ' should be usable as an valid key');
|
||||
check_key_roundtrip_and_done(t, db, value, value_buffer);
|
||||
},
|
||||
`${value_description} can be used to supply a binary key`);
|
||||
}
|
||||
|
||||
value_test('ArrayBuffer', buffer, buffer);
|
||||
value_test('DataView', new DataView(buffer), buffer);
|
||||
value_test(
|
||||
'DataView with explicit offset', new DataView(buffer, 3),
|
||||
new Uint8Array([0x11, 0xFF, 0xEE, 0xDD, 0xCC]).buffer);
|
||||
value_test(
|
||||
'DataView with explicit offset and length', new DataView(buffer, 3, 4),
|
||||
new Uint8Array([0x11, 0xFF, 0xEE, 0xDD]).buffer);
|
||||
value_test(
|
||||
'Uint8Array with explicit offset', new Uint8Array(buffer, 3),
|
||||
new Uint8Array([0x11, 0xFF, 0xEE, 0xDD, 0xCC]).buffer);
|
||||
value_test(
|
||||
'Uint8Array with explicit offset and length', new Uint8Array(buffer, 3, 4),
|
||||
new Uint8Array([0x11, 0xFF, 0xEE, 0xDD]).buffer);
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IDBCursor.advance() - invalid</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbcursor-advance-invalid.any.js"></script>
|
||||
|
|
@ -0,0 +1,196 @@
|
|||
// META: global=window,worker
|
||||
// META: title=IDBCursor.advance() - invalid
|
||||
// META: script=resources/support.js
|
||||
|
||||
// Spec:
|
||||
// https://w3c.github.io/IndexedDB/#widl-IDBCursor-advance-void-unsigned-long-count
|
||||
|
||||
'use strict';
|
||||
|
||||
function upgrade_func(t, db, tx) {
|
||||
let objStore = db.createObjectStore('test');
|
||||
objStore.createIndex('index', '');
|
||||
|
||||
objStore.add('data', 1);
|
||||
objStore.add('data2', 2);
|
||||
}
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let count = 0;
|
||||
let rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor();
|
||||
|
||||
rq.onsuccess = t.step_func(function(e) {
|
||||
if (!e.target.result) {
|
||||
assert_equals(count, 2, 'count');
|
||||
t.done();
|
||||
return;
|
||||
}
|
||||
let cursor = e.target.result;
|
||||
|
||||
cursor.advance(1);
|
||||
|
||||
// Second try
|
||||
assert_throws_dom('InvalidStateError', function() {
|
||||
cursor.advance(1);
|
||||
}, 'second advance');
|
||||
|
||||
assert_throws_dom('InvalidStateError', function() {
|
||||
cursor.advance(3);
|
||||
}, 'third advance');
|
||||
|
||||
count++;
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error');
|
||||
}, 'IDBCursor.advance() - invalid - attempt to call advance twice');
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor();
|
||||
|
||||
rq.onsuccess = t.step_func(function(e) {
|
||||
let cursor = e.target.result;
|
||||
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance(self);
|
||||
});
|
||||
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance({});
|
||||
});
|
||||
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance([]);
|
||||
});
|
||||
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance('');
|
||||
});
|
||||
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance('1 2');
|
||||
});
|
||||
|
||||
t.done();
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error');
|
||||
}, 'IDBCursor.advance() - invalid - pass something other than number');
|
||||
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor();
|
||||
|
||||
rq.onsuccess = t.step_func(function(e) {
|
||||
let cursor = e.target.result;
|
||||
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance(null);
|
||||
});
|
||||
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance(undefined);
|
||||
});
|
||||
|
||||
let mylet = null;
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance(mylet);
|
||||
});
|
||||
|
||||
t.done();
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error');
|
||||
}, 'IDBCursor.advance() - invalid - pass null/undefined');
|
||||
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor();
|
||||
|
||||
rq.onsuccess = t.step_func(function(e) {
|
||||
let cursor = e.target.result;
|
||||
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance();
|
||||
});
|
||||
|
||||
t.done();
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error');
|
||||
}, 'IDBCursor.advance() - invalid - missing argument');
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor();
|
||||
|
||||
rq.onsuccess = t.step_func(function(e) {
|
||||
let cursor = e.target.result;
|
||||
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance(-1);
|
||||
});
|
||||
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance(NaN);
|
||||
});
|
||||
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance(0);
|
||||
});
|
||||
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance(-0);
|
||||
});
|
||||
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance(Infinity);
|
||||
});
|
||||
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance(-Infinity);
|
||||
});
|
||||
|
||||
let mylet = -999999;
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance(mylet);
|
||||
});
|
||||
|
||||
t.done();
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error');
|
||||
}, 'IDBCursor.advance() - invalid - pass negative numbers');
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let count = 0;
|
||||
let rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor();
|
||||
|
||||
rq.onsuccess = t.step_func(function(e) {
|
||||
let cursor = e.target.result;
|
||||
if (!cursor) {
|
||||
assert_equals(count, 2, 'count runs');
|
||||
t.done();
|
||||
return;
|
||||
}
|
||||
|
||||
assert_throws_js(TypeError, function() {
|
||||
cursor.advance(0);
|
||||
});
|
||||
|
||||
cursor.advance(1);
|
||||
count++;
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error');
|
||||
}, 'IDBCursor.advance() - invalid - got value not set on exception');
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IDBCursor.advance()</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbcursor-advance.any.js"></script>
|
||||
|
|
@ -0,0 +1,232 @@
|
|||
// META: global=window,worker
|
||||
// META: title=IDBCursor.advance()
|
||||
// META: script=resources/support.js
|
||||
|
||||
'use strict';
|
||||
|
||||
function upgrade_func(t, db, tx) {
|
||||
let objStore = db.createObjectStore('test');
|
||||
objStore.createIndex('index', '');
|
||||
|
||||
objStore.add('cupcake', 5);
|
||||
objStore.add('pancake', 3);
|
||||
objStore.add('pie', 1);
|
||||
objStore.add('pie', 4);
|
||||
objStore.add('taco', 2);
|
||||
}
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let count = 0;
|
||||
let rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor();
|
||||
|
||||
rq.onsuccess = t.step_func(function(e) {
|
||||
if (!e.target.result) {
|
||||
assert_equals(count, 3, 'count');
|
||||
t.done();
|
||||
return;
|
||||
}
|
||||
let cursor = e.target.result;
|
||||
|
||||
switch (count) {
|
||||
case 0:
|
||||
assert_equals(cursor.value, 'cupcake');
|
||||
assert_equals(cursor.primaryKey, 5);
|
||||
break;
|
||||
|
||||
case 1:
|
||||
assert_equals(cursor.value, 'pie');
|
||||
assert_equals(cursor.primaryKey, 1);
|
||||
break;
|
||||
|
||||
case 2:
|
||||
assert_equals(cursor.value, 'taco');
|
||||
assert_equals(cursor.primaryKey, 2);
|
||||
break;
|
||||
|
||||
default:
|
||||
assert_unreached('Unexpected count: ' + count);
|
||||
}
|
||||
|
||||
count++;
|
||||
cursor.advance(2);
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error');
|
||||
}, 'IDBCursor.advance() - advances');
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let count = 0;
|
||||
let rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor(null, 'prev');
|
||||
|
||||
rq.onsuccess = t.step_func(function(e) {
|
||||
if (!e.target.result) {
|
||||
assert_equals(count, 3, 'count');
|
||||
t.done();
|
||||
return;
|
||||
}
|
||||
let cursor = e.target.result;
|
||||
|
||||
switch (count) {
|
||||
case 0:
|
||||
assert_equals(cursor.value, 'taco');
|
||||
assert_equals(cursor.primaryKey, 2);
|
||||
break;
|
||||
|
||||
case 1:
|
||||
assert_equals(cursor.value, 'pie');
|
||||
assert_equals(cursor.primaryKey, 1);
|
||||
break;
|
||||
|
||||
case 2:
|
||||
assert_equals(cursor.value, 'cupcake');
|
||||
assert_equals(cursor.primaryKey, 5);
|
||||
break;
|
||||
|
||||
default:
|
||||
assert_unreached('Unexpected count: ' + count);
|
||||
}
|
||||
|
||||
count++;
|
||||
cursor.advance(2);
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error');
|
||||
}, 'IDBCursor.advance() - advances backwards');
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let count = 0;
|
||||
let rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor();
|
||||
|
||||
rq.onsuccess = t.step_func(function(e) {
|
||||
if (!e.target.result) {
|
||||
assert_equals(count, 1, 'count');
|
||||
t.done();
|
||||
return;
|
||||
}
|
||||
let cursor = e.target.result;
|
||||
|
||||
switch (count) {
|
||||
case 0:
|
||||
assert_equals(cursor.value, 'cupcake');
|
||||
assert_equals(cursor.primaryKey, 5);
|
||||
break;
|
||||
|
||||
default:
|
||||
assert_unreached('Unexpected count: ' + count);
|
||||
}
|
||||
|
||||
count++;
|
||||
cursor.advance(100000);
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error');
|
||||
}, 'IDBCursor.advance() - skip far forward');
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let count = 0;
|
||||
let rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor(IDBKeyRange.lowerBound('cupcake', true));
|
||||
|
||||
rq.onsuccess = t.step_func(function(e) {
|
||||
if (!e.target.result) {
|
||||
assert_equals(count, 2, 'count');
|
||||
t.done();
|
||||
return;
|
||||
}
|
||||
let cursor = e.target.result;
|
||||
|
||||
switch (count) {
|
||||
case 0:
|
||||
assert_equals(cursor.value, 'pancake');
|
||||
assert_equals(cursor.primaryKey, 3);
|
||||
break;
|
||||
|
||||
case 1:
|
||||
assert_equals(cursor.value, 'pie');
|
||||
assert_equals(cursor.primaryKey, 4);
|
||||
break;
|
||||
|
||||
default:
|
||||
assert_unreached('Unexpected count: ' + count);
|
||||
}
|
||||
|
||||
count++;
|
||||
cursor.advance(2);
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error');
|
||||
}, 'IDBCursor.advance() - within range');
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let count = 0;
|
||||
let rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor('pancake');
|
||||
|
||||
rq.onsuccess = t.step_func(function(e) {
|
||||
if (!e.target.result) {
|
||||
assert_equals(count, 1, 'count');
|
||||
t.done();
|
||||
return;
|
||||
}
|
||||
let cursor = e.target.result;
|
||||
|
||||
switch (count) {
|
||||
case 0:
|
||||
assert_equals(cursor.value, 'pancake');
|
||||
assert_equals(cursor.primaryKey, 3);
|
||||
break;
|
||||
|
||||
default:
|
||||
assert_unreached('Unexpected count: ' + count);
|
||||
}
|
||||
|
||||
count++;
|
||||
cursor.advance(1);
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error');
|
||||
}, 'IDBCursor.advance() - within single key range');
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let count = 0;
|
||||
let rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor('pie');
|
||||
|
||||
rq.onsuccess = t.step_func(function(e) {
|
||||
if (!e.target.result) {
|
||||
assert_equals(count, 2, 'count');
|
||||
t.done();
|
||||
return;
|
||||
}
|
||||
let cursor = e.target.result;
|
||||
|
||||
switch (count) {
|
||||
case 0:
|
||||
assert_equals(cursor.value, 'pie');
|
||||
assert_equals(cursor.primaryKey, 1);
|
||||
break;
|
||||
|
||||
case 1:
|
||||
assert_equals(cursor.value, 'pie');
|
||||
assert_equals(cursor.primaryKey, 4);
|
||||
break;
|
||||
|
||||
default:
|
||||
assert_unreached('Unexpected count: ' + count);
|
||||
}
|
||||
|
||||
count++;
|
||||
cursor.advance(1);
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error');
|
||||
}, 'IDBCursor.advance() - within single key range, with several results');
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IDBCursor.continue()</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbcursor-continue.any.js"></script>
|
||||
|
|
@ -0,0 +1,233 @@
|
|||
// META: global=window,worker
|
||||
// META: title=IDBCursor.continue()
|
||||
// META: script=resources/support.js
|
||||
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbcursor-continue
|
||||
|
||||
'use strict';
|
||||
|
||||
const store = [
|
||||
{ value: 'cupcake', key: 5 },
|
||||
{ value: 'pancake', key: 3 },
|
||||
{ value: 'pie', key: 1 },
|
||||
{ value: 'pie', key: 4 },
|
||||
{ value: 'taco', key: 2 }
|
||||
];
|
||||
|
||||
function upgrade_func(t, db, tx) {
|
||||
let os;
|
||||
let i;
|
||||
os = db.createObjectStore('test');
|
||||
os.createIndex('index', '');
|
||||
|
||||
for (i = 0; i < store.length; i++)
|
||||
os.add(store[i].value, store[i].key);
|
||||
}
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let count = 0;
|
||||
const rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor();
|
||||
|
||||
rq.onsuccess = t.step_func((e) => {
|
||||
if (!e.target.result) {
|
||||
assert_equals(count, 5, 'count');
|
||||
t.done();
|
||||
return;
|
||||
}
|
||||
const cursor = e.target.result;
|
||||
|
||||
assert_equals(cursor.value, store[count].value);
|
||||
assert_equals(cursor.primaryKey, store[count].key);
|
||||
|
||||
cursor.continue();
|
||||
|
||||
count++;
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error');
|
||||
}, 'IDBCursor.continue() - continues');
|
||||
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let count = 0;
|
||||
const rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor();
|
||||
|
||||
rq.onsuccess = t.step_func((e) => {
|
||||
if (!e.target.result) {
|
||||
assert_equals(count, 3, 'count');
|
||||
t.done();
|
||||
return;
|
||||
}
|
||||
const cursor = e.target.result;
|
||||
|
||||
switch (count) {
|
||||
case 0:
|
||||
assert_equals(cursor.value, 'cupcake');
|
||||
assert_equals(cursor.primaryKey, 5);
|
||||
cursor.continue('pie');
|
||||
break;
|
||||
|
||||
case 1:
|
||||
assert_equals(cursor.value, 'pie');
|
||||
assert_equals(cursor.primaryKey, 1);
|
||||
cursor.continue('taco');
|
||||
break;
|
||||
|
||||
case 2:
|
||||
assert_equals(cursor.value, 'taco');
|
||||
assert_equals(cursor.primaryKey, 2);
|
||||
cursor.continue();
|
||||
break;
|
||||
|
||||
default:
|
||||
assert_unreached('Unexpected count: ' + count);
|
||||
}
|
||||
|
||||
count++;
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error');
|
||||
}, 'IDBCursor.continue() - with given key');
|
||||
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let count = 0;
|
||||
const rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor();
|
||||
|
||||
rq.onsuccess = t.step_func((e) => {
|
||||
if (!e.target.result) {
|
||||
assert_equals(count, 1, 'count');
|
||||
t.done();
|
||||
return;
|
||||
}
|
||||
const cursor = e.target.result;
|
||||
|
||||
switch (count) {
|
||||
case 0:
|
||||
assert_equals(cursor.value, 'cupcake');
|
||||
assert_equals(cursor.primaryKey, 5);
|
||||
break;
|
||||
|
||||
default:
|
||||
assert_unreached('Unexpected count: ' + count);
|
||||
}
|
||||
|
||||
count++;
|
||||
cursor.continue([]); // Arrays are always bigger than strings
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error2');
|
||||
}, 'IDBCursor.continue() - skip far forward');
|
||||
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let count = 0;
|
||||
const rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor(IDBKeyRange.lowerBound('cupcake', true));
|
||||
|
||||
rq.onsuccess = t.step_func((e) => {
|
||||
if (!e.target.result) {
|
||||
assert_equals(count, 2, 'count');
|
||||
t.done();
|
||||
return;
|
||||
}
|
||||
const cursor = e.target.result;
|
||||
|
||||
switch (count) {
|
||||
case 0:
|
||||
assert_equals(cursor.value, 'pancake');
|
||||
assert_equals(cursor.primaryKey, 3);
|
||||
cursor.continue('pie');
|
||||
break;
|
||||
|
||||
case 1:
|
||||
assert_equals(cursor.value, 'pie');
|
||||
assert_equals(cursor.primaryKey, 1);
|
||||
cursor.continue('zzz');
|
||||
break;
|
||||
|
||||
default:
|
||||
assert_unreached('Unexpected count: ' + count);
|
||||
}
|
||||
|
||||
count++;
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error1');
|
||||
}, 'IDBCursor.continue() - within range');
|
||||
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let count = 0;
|
||||
const rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor('pancake');
|
||||
|
||||
rq.onsuccess = t.step_func((e) => {
|
||||
if (!e.target.result) {
|
||||
assert_equals(count, 1, 'count');
|
||||
t.done();
|
||||
return;
|
||||
}
|
||||
const cursor = e.target.result;
|
||||
|
||||
switch (count) {
|
||||
case 0:
|
||||
assert_equals(cursor.value, 'pancake');
|
||||
assert_equals(cursor.primaryKey, 3);
|
||||
cursor.continue('pie');
|
||||
break;
|
||||
|
||||
default:
|
||||
assert_unreached('Unexpected count: ' + count);
|
||||
}
|
||||
|
||||
count++;
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error1');
|
||||
}, 'IDBCursor.continue() - within single key range');
|
||||
|
||||
indexeddb_test(upgrade_func, function(t, db) {
|
||||
let count = 0;
|
||||
const rq = db.transaction('test', 'readonly')
|
||||
.objectStore('test')
|
||||
.index('index')
|
||||
.openCursor('pie');
|
||||
|
||||
rq.onsuccess = t.step_func((e) => {
|
||||
if (!e.target.result) {
|
||||
assert_equals(count, 2, 'count');
|
||||
t.done();
|
||||
return;
|
||||
}
|
||||
const cursor = e.target.result;
|
||||
|
||||
switch (count) {
|
||||
case 0:
|
||||
assert_equals(cursor.value, 'pie');
|
||||
assert_equals(cursor.primaryKey, 1);
|
||||
cursor.continue();
|
||||
break;
|
||||
|
||||
case 1:
|
||||
assert_equals(cursor.value, 'pie');
|
||||
assert_equals(cursor.primaryKey, 4);
|
||||
cursor.continue();
|
||||
break;
|
||||
|
||||
default:
|
||||
assert_unreached('Unexpected count: ' + count);
|
||||
}
|
||||
|
||||
count++;
|
||||
});
|
||||
rq.onerror = t.unreached_func('unexpected error1');
|
||||
}, 'IDBCursor.continue() - within single key range, with several results');
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: The source of requests made against cursors</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbcursor-request-source.any.js"></script>
|
||||
|
|
@ -0,0 +1,116 @@
|
|||
// META: global=window,worker
|
||||
// META: title=IndexedDB: The source of requests made against cursors
|
||||
// META: script=resources/support.js
|
||||
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbrequest-source
|
||||
|
||||
'use strict';
|
||||
|
||||
// Setup each test by populating an object store with an index for the cursor to
|
||||
// iterate and manipulate.
|
||||
function initializeDatabase(db) {
|
||||
const store = db.createObjectStore('store', {autoIncrement: true});
|
||||
store.createIndex('index', /*keypath=*/ 'value');
|
||||
store.put({value: 'z'});
|
||||
store.put({value: 'y'});
|
||||
store.put({value: 'x'});
|
||||
store.put({value: 'w'});
|
||||
}
|
||||
|
||||
function isIndex(cursorSourceType) {
|
||||
return cursorSourceType === 'IDBIndex';
|
||||
}
|
||||
|
||||
// Return the object store or index, depending on the test's `cursorSourceType`.
|
||||
function getCursorSource(transaction, cursorSourceType) {
|
||||
let cursorSource = transaction.objectStore('store');
|
||||
if (isIndex(cursorSourceType)) {
|
||||
cursorSource = cursorSource.index('index');
|
||||
}
|
||||
return cursorSource;
|
||||
}
|
||||
|
||||
// Verify the request source after calling delete() or update() on the cursor.
|
||||
function cursor_request_source_test(
|
||||
cursorSourceType, createRequestFunctionName, createRequestFunctionArgs) {
|
||||
indexeddb_test(
|
||||
(t, db) => initializeDatabase(db),
|
||||
(t, db) => {
|
||||
const tx = db.transaction('store', 'readwrite');
|
||||
const cursorSource = getCursorSource(tx, cursorSourceType);
|
||||
|
||||
// Open the cursor.
|
||||
const openCursorRequest = cursorSource.openCursor();
|
||||
openCursorRequest.onerror =
|
||||
t.unreached_func('The cursor must not fail to open.');
|
||||
|
||||
openCursorRequest.onsuccess = t.step_func(e => {
|
||||
// Use the cursor to create a new request.
|
||||
const cursor = e.target.result;
|
||||
const request =
|
||||
cursor[createRequestFunctionName](...createRequestFunctionArgs);
|
||||
assert_equals(
|
||||
request.source, cursor,
|
||||
`The request's source must be the cursor itself.`);
|
||||
t.done();
|
||||
});
|
||||
},
|
||||
`The source of the request from ${cursorSourceType}::${
|
||||
createRequestFunctionName}() is the cursor itself`);
|
||||
}
|
||||
|
||||
// Verify the request source after calling openCursor() or openKeyCursor() and
|
||||
// then using the cursor to iterate.
|
||||
function open_cursor_request_source_test(
|
||||
cursorSourceType, openCursorFunctionName) {
|
||||
indexeddb_test(
|
||||
(t, db) => initializeDatabase(db),
|
||||
(t, db) => {
|
||||
const tx = db.transaction('store', 'readonly');
|
||||
const cursorSource = getCursorSource(tx, cursorSourceType);
|
||||
|
||||
// Open the cursor.
|
||||
const openCursorRequest = cursorSource[openCursorFunctionName]();
|
||||
openCursorRequest.onerror =
|
||||
t.unreached_func('The cursor must not fail to open or iterate.');
|
||||
|
||||
assert_equals(
|
||||
openCursorRequest.source, cursorSource,
|
||||
'The request source must be the opener of the cursor.');
|
||||
|
||||
// Verify the cursor's `request.source` after iterating with
|
||||
// `advance()`, `continue()`, and `continuePrimaryKey()`.
|
||||
let iterationCount = 0;
|
||||
openCursorRequest.onsuccess = t.step_func(e => {
|
||||
assert_equals(
|
||||
openCursorRequest.source, cursorSource,
|
||||
'The request source must be the opener of the cursor after iterating.');
|
||||
|
||||
const cursor = e.target.result;
|
||||
++iterationCount;
|
||||
|
||||
if (iterationCount == 1) {
|
||||
cursor.advance(1);
|
||||
} else if (iterationCount == 2) {
|
||||
cursor.continue();
|
||||
} else if (iterationCount == 3 && isIndex(cursorSourceType)) {
|
||||
cursor.continuePrimaryKey('z', 0);
|
||||
} else {
|
||||
t.done();
|
||||
}
|
||||
});
|
||||
},
|
||||
`${cursorSourceType}::${
|
||||
openCursorFunctionName}'s request source must be the ${
|
||||
cursorSourceType} instance that opened the cursor`);
|
||||
}
|
||||
|
||||
open_cursor_request_source_test('IDBObjectStore', 'openCursor');
|
||||
open_cursor_request_source_test('IDBObjectStore', 'openKeyCursor');
|
||||
open_cursor_request_source_test('IDBIndex', 'openCursor');
|
||||
open_cursor_request_source_test('IDBIndex', 'openKeyCursor');
|
||||
|
||||
cursor_request_source_test('IDBObjectStore', 'update', /*args=*/[0]);
|
||||
cursor_request_source_test('IDBObjectStore', 'delete', /*args=*/[]);
|
||||
cursor_request_source_test('IDBIndex', 'update', /*args=*/[0]);
|
||||
cursor_request_source_test('IDBIndex', 'delete', /*args=*/[]);
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: IDBIndex query method Ordering</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbindex-query-exception-order.any.js"></script>
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
// META: title=IndexedDB: IDBIndex query method Ordering
|
||||
// META: global=window,worker
|
||||
// META: script=resources/support.js
|
||||
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbindex-get
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbindex-getall
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbindex-getallkeys
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbindex-count
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbindex-opencursor
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbindex-openkeycursor
|
||||
|
||||
'use strict';
|
||||
|
||||
[ 'get',
|
||||
'getAll',
|
||||
'getAllKeys',
|
||||
'count',
|
||||
'openCursor',
|
||||
'openKeyCursor'
|
||||
].forEach(method => {
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
const store = db.createObjectStore('s');
|
||||
const store2 = db.createObjectStore('s2');
|
||||
const index = store2.createIndex('i', 'keyPath');
|
||||
store2.deleteIndex('i');
|
||||
|
||||
setTimeout(t.step_func(() => {
|
||||
assert_throws_dom(
|
||||
'InvalidStateError', () => { index[method]('key'); },
|
||||
'"has been deleted" check (InvalidStateError) should precede ' +
|
||||
'"not active" check (TransactionInactiveError)');
|
||||
t.done();
|
||||
}), 0);
|
||||
},
|
||||
(t, db) => {},
|
||||
`IDBIndex.${method} exception order: ` +
|
||||
'InvalidStateError vs. TransactionInactiveError'
|
||||
);
|
||||
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
const store = db.createObjectStore('s');
|
||||
const index = store.createIndex('i', 'keyPath');
|
||||
},
|
||||
(t, db) => {
|
||||
const tx = db.transaction('s', 'readonly');
|
||||
const store = tx.objectStore('s');
|
||||
const index = store.index('i');
|
||||
|
||||
setTimeout(t.step_func(() => {
|
||||
assert_throws_dom(
|
||||
'TransactionInactiveError', () => { index[method]({}); },
|
||||
'"not active" check (TransactionInactiveError) should precede ' +
|
||||
'query check (DataError)');
|
||||
t.done();
|
||||
}), 0);
|
||||
},
|
||||
`IDBIndex.${method} exception order: ` +
|
||||
'TransactionInactiveError vs. DataError'
|
||||
);
|
||||
});
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: The source of requests made against indexes</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbindex-request-source.any.js"></script>
|
||||
|
|
@ -0,0 +1,32 @@
|
|||
// META: title=IndexedDB: The source of requests made against indexes
|
||||
// META: global=window,worker
|
||||
// META: script=resources/support.js
|
||||
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbrequest-source
|
||||
|
||||
'use strict';
|
||||
|
||||
[
|
||||
index => index.get(0),
|
||||
index => index.getKey(0),
|
||||
index => index.getAll(),
|
||||
index => index.getAllKeys(),
|
||||
index => index.count(),
|
||||
index => index.openCursor(),
|
||||
index => index.openKeyCursor()
|
||||
].forEach(func => indexeddb_test(
|
||||
(t, db) => {
|
||||
const store =
|
||||
db.createObjectStore('store', {autoIncrement: true});
|
||||
store.createIndex('index', 'kp');
|
||||
},
|
||||
(t, db) => {
|
||||
const tx = db.transaction('store', 'readwrite');
|
||||
const index = tx.objectStore('store').index('index');
|
||||
assert_equals(
|
||||
func(index).source, index,
|
||||
`${func}.source should be the index itself`);
|
||||
t.done();
|
||||
},
|
||||
`The source of the request from ${func} is the index itself`
|
||||
));
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: Test IDBIndex.getAll</title>
|
||||
<meta name="timeout" content="long">
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/nested-cloning-common.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<script src="resources/support-get-all.js"></script>
|
||||
<script src="resources/support-promises.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbindex_getAll.any.js"></script>
|
||||
|
|
@ -0,0 +1,90 @@
|
|||
// META: title=IndexedDB: Test IDBIndex.getAll
|
||||
// META: global=window,worker
|
||||
// META: script=resources/nested-cloning-common.js
|
||||
// META: script=resources/support.js
|
||||
// META: script=resources/support-get-all.js
|
||||
// META: script=resources/support-promises.js
|
||||
// META: timeout=long
|
||||
|
||||
'use_strict';
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {query: 'C'}, 'Single item get');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'empty', /*options=*/ undefined, 'Empty object store');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ undefined, 'Get all');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'generated', /*options=*/ undefined,
|
||||
'Get all with generated keys');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'large-values', /*options=*/ undefined,
|
||||
'Get all with large values');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {count: 10}, 'maxCount=10');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.bound('G', 'M')}, 'Get bound range');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.bound('G', 'M'), count: 3},
|
||||
'Get bound range with maxCount');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {
|
||||
query:
|
||||
IDBKeyRange.bound('G', 'K', /*lowerOpen=*/ false, /*upperOpen=*/ true)
|
||||
},
|
||||
'Get upper excluded');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {
|
||||
query:
|
||||
IDBKeyRange.bound('G', 'K', /*lowerOpen=*/ true, /*upperOpen=*/ false)
|
||||
},
|
||||
'Get lower excluded');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'generated',
|
||||
/*options=*/ {query: IDBKeyRange.bound(4, 15), count: 3},
|
||||
'Get bound range (generated) with maxCount');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {query: 'Doesn\'t exist'},
|
||||
'Non existent key');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {count: 0}, 'maxCount=0');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {count: 4294967295},
|
||||
'Max value count');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.upperBound('0')},
|
||||
'Query with empty range where first key < upperBound');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.lowerBound('ZZ')},
|
||||
'Query with empty range where lowerBound < last key');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line-not-unique', /*options=*/ {query: 'first'},
|
||||
'Retrieve multiEntry key');
|
||||
|
||||
index_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line-multi', /*options=*/ {query: 'vowel'},
|
||||
'Retrieve one key multiple values');
|
||||
|
||||
get_all_with_invalid_keys_test(
|
||||
'getAll', /*storeName=*/ 'out-of-line', /*indexName=*/ 'test_idx',
|
||||
/*shouldUseDictionary=*/ false, 'Get all values with invalid query keys');
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: Test IDBIndex.getAllKeys.</title>
|
||||
<meta name="timeout" content="long">
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/nested-cloning-common.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<script src="resources/support-get-all.js"></script>
|
||||
<script src="resources/support-promises.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbindex_getAllKeys.any.js"></script>
|
||||
|
|
@ -0,0 +1,89 @@
|
|||
// META: title=IndexedDB: Test IDBIndex.getAllKeys.
|
||||
// META: global=window,worker
|
||||
// META: script=resources/nested-cloning-common.js
|
||||
// META: script=resources/support.js
|
||||
// META: script=resources/support-get-all.js
|
||||
// META: script=resources/support-promises.js
|
||||
// META: timeout=long
|
||||
|
||||
'use_strict';
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {query: 'C'}, 'Single item get');
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'empty', /*options=*/ undefined, 'Empty object store');
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ undefined, 'Get all keys');
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'generated', /*options=*/ undefined,
|
||||
'Get all generated keys');
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {count: 10}, 'maxCount=10');
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.bound('G', 'M')}, 'Get bound range');
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.bound('G', 'M'), count: 3},
|
||||
'Get bound range with maxCount');
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {
|
||||
query:
|
||||
IDBKeyRange.bound('G', 'K', /*lowerOpen=*/ false, /*upperOpen=*/ true)
|
||||
},
|
||||
'Get upper excluded');
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {
|
||||
query:
|
||||
IDBKeyRange.bound('G', 'K', /*lowerOpen=*/ true, /*upperOpen=*/ false)
|
||||
},
|
||||
'Get lower excluded');
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'generated',
|
||||
/*options=*/ {query: IDBKeyRange.bound(4, 15), count: 3},
|
||||
'Get bound range (generated) with maxCount');
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: 'Doesn\'t exist'}, 'Non existent key');
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: 0}, 'maxCount=0');
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: 4294967295}, 'Max value count');
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.upperBound('0')},
|
||||
'Query with empty range where first key < upperBound');
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.lowerBound('ZZ')},
|
||||
'Query with empty range where lowerBound < last key');
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line-not-unique', /*options=*/ {query: 'first'},
|
||||
'Retrieve multiEntry key');
|
||||
|
||||
index_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line-multi',
|
||||
/*options=*/ {query: 'vowel'}, 'Retrieve one key multiple values');
|
||||
|
||||
get_all_with_invalid_keys_test(
|
||||
'getAllKeys', /*storeName=*/ 'out-of-line', /*indexName=*/ 'test_idx',
|
||||
/*shouldUseDictionary=*/ false, 'Get all keys with invalid query keys');
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: IDBObjectStore add()/put() Exception Ordering</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbobjectstore-add-put-exception-order.any.js"></script>
|
||||
|
|
@ -0,0 +1,81 @@
|
|||
// META: global=window,worker
|
||||
// META: title=IndexedDB: IDBObjectStore add()/put() Exception Ordering
|
||||
// META: script=resources/support.js
|
||||
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbobjectstore-put
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbobjectstore-add
|
||||
|
||||
'use strict';
|
||||
|
||||
['put', 'add'].forEach(method => {
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
const store = db.createObjectStore('s');
|
||||
const store2 = db.createObjectStore('s2');
|
||||
|
||||
db.deleteObjectStore('s2');
|
||||
|
||||
setTimeout(
|
||||
t.step_func(() => {
|
||||
assert_throws_dom(
|
||||
'InvalidStateError',
|
||||
() => {
|
||||
store2[method]('key', 'value');
|
||||
},
|
||||
'"has been deleted" check (InvalidStateError) should precede ' +
|
||||
'"not active" check (TransactionInactiveError)');
|
||||
t.done();
|
||||
}),
|
||||
0);
|
||||
},
|
||||
(t, db) => {},
|
||||
`IDBObjectStore.${method} exception order: ` +
|
||||
'InvalidStateError vs. TransactionInactiveError');
|
||||
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
const store = db.createObjectStore('s');
|
||||
},
|
||||
(t, db) => {
|
||||
const tx = db.transaction('s', 'readonly');
|
||||
const store = tx.objectStore('s');
|
||||
|
||||
setTimeout(
|
||||
t.step_func(() => {
|
||||
assert_throws_dom(
|
||||
'TransactionInactiveError',
|
||||
() => {
|
||||
store[method]('key', 'value');
|
||||
},
|
||||
'"not active" check (TransactionInactiveError) should precede ' +
|
||||
'"read only" check (ReadOnlyError)');
|
||||
t.done();
|
||||
}),
|
||||
0);
|
||||
},
|
||||
|
||||
`IDBObjectStore.${method} exception order: ` +
|
||||
'TransactionInactiveError vs. ReadOnlyError');
|
||||
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
const store = db.createObjectStore('s');
|
||||
},
|
||||
(t, db) => {
|
||||
const tx = db.transaction('s', 'readonly');
|
||||
const store = tx.objectStore('s');
|
||||
|
||||
assert_throws_dom(
|
||||
'ReadOnlyError',
|
||||
() => {
|
||||
store[method]({}, 'value');
|
||||
},
|
||||
'"read only" check (ReadOnlyError) should precede ' +
|
||||
'key/data check (DataError)');
|
||||
|
||||
t.done();
|
||||
},
|
||||
|
||||
`IDBObjectStore.${method} exception order: ` +
|
||||
'ReadOnlyError vs. DataError');
|
||||
});
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: IDBObjectStore query method Ordering</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbobjectstore-query-exception-order.any.js"></script>
|
||||
|
|
@ -0,0 +1,64 @@
|
|||
// META: global=window,worker
|
||||
// META: title=IndexedDB: IDBObjectStore query method Ordering
|
||||
// META: script=resources/support.js
|
||||
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbobjectstore-get
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbobjectstore-getall
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbobjectstore-getallkeys
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbobjectstore-count
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbobjectstore-opencursor
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbobjectstore-openkeycursor
|
||||
|
||||
'use strict';
|
||||
|
||||
['get', 'getAll', 'getAllKeys', 'count', 'openCursor', 'openKeyCursor'].forEach(
|
||||
method => {
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
const store = db.createObjectStore('s');
|
||||
const store2 = db.createObjectStore('s2');
|
||||
|
||||
db.deleteObjectStore('s2');
|
||||
|
||||
setTimeout(
|
||||
t.step_func(() => {
|
||||
assert_throws_dom(
|
||||
'InvalidStateError',
|
||||
() => {
|
||||
store2[method]('key');
|
||||
},
|
||||
'"has been deleted" check (InvalidStateError) should precede ' +
|
||||
'"not active" check (TransactionInactiveError)');
|
||||
|
||||
t.done();
|
||||
}),
|
||||
0);
|
||||
},
|
||||
(t, db) => {},
|
||||
`IDBObjectStore.${method} exception order: ` +
|
||||
'InvalidStateError vs. TransactionInactiveError');
|
||||
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
const store = db.createObjectStore('s');
|
||||
},
|
||||
(t, db) => {
|
||||
const tx = db.transaction('s', 'readonly');
|
||||
const store = tx.objectStore('s');
|
||||
|
||||
setTimeout(
|
||||
t.step_func(() => {
|
||||
assert_throws_dom(
|
||||
'TransactionInactiveError',
|
||||
() => {
|
||||
store[method]({});
|
||||
},
|
||||
'"not active" check (TransactionInactiveError) should precede ' +
|
||||
'query check (DataError)');
|
||||
t.done();
|
||||
}),
|
||||
0);
|
||||
},
|
||||
`IDBObjectStore.${method} exception order: ` +
|
||||
'TransactionInactiveError vs. DataError');
|
||||
});
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: The source of requests made against object stores</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbobjectstore-request-source.any.js"></script>
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
// META: global=window,worker
|
||||
// META: title=IndexedDB: The source of requests made against object stores
|
||||
// META: script=resources/support.js
|
||||
|
||||
// Spec: https://w3c.github.io/IndexedDB/#dom-idbrequest-source
|
||||
|
||||
'use strict';
|
||||
|
||||
[
|
||||
store => store.put(0),
|
||||
store => store.add(0),
|
||||
store => store.delete(0),
|
||||
store => store.clear(),
|
||||
|
||||
store => store.get(0),
|
||||
store => store.getKey(0),
|
||||
store => store.getAll(),
|
||||
store => store.getAllKeys(),
|
||||
store => store.count(),
|
||||
|
||||
store => store.openCursor(),
|
||||
store => store.openKeyCursor()
|
||||
|
||||
].forEach(
|
||||
func => indexeddb_test(
|
||||
(t, db) => {
|
||||
db.createObjectStore('store', {autoIncrement: true});
|
||||
},
|
||||
(t, db) => {
|
||||
const tx = db.transaction('store', 'readwrite');
|
||||
const store = tx.objectStore('store');
|
||||
|
||||
assert_equals(
|
||||
func(store).source, store,
|
||||
`${func}.source should be the object store itself`);
|
||||
t.done();
|
||||
},
|
||||
`The source of the request from ${
|
||||
func} is the object store itself`));
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: Test IDBObjectStore.getAll with options dictionary.</title>
|
||||
<meta name="timeout" content="long">
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/nested-cloning-common.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<script src="resources/support-get-all.js"></script>
|
||||
<script src="resources/support-promises.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbobjectstore_getAll-options.any.js"></script>
|
||||
|
|
@ -0,0 +1,119 @@
|
|||
// META: title=IndexedDB: Test IDBObjectStore.getAll with options dictionary.
|
||||
// META: global=window,worker
|
||||
// META: script=resources/nested-cloning-common.js
|
||||
// META: script=resources/support.js
|
||||
// META: script=resources/support-get-all.js
|
||||
// META: script=resources/support-promises.js
|
||||
// META: timeout=long
|
||||
|
||||
'use strict';
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {query: 'c'}, 'Single item get');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'generated', /*options=*/ {query: 3},
|
||||
'Single item get (generated key)');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'empty', /*options=*/ {}, 'getAll on empty object store');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {}, 'Get all values');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'large-values', /*options=*/ {},
|
||||
'Get all with large values');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {count: 10}, 'Test maxCount');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.bound('g', 'm')}, 'Get bound range');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.bound('g', 'm'), count: 3},
|
||||
'Get bound range with maxCount');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {
|
||||
query:
|
||||
IDBKeyRange.bound('g', 'k', /*lowerOpen=*/ false, /*upperOpen=*/ true)
|
||||
},
|
||||
'Get upper excluded');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {
|
||||
query:
|
||||
IDBKeyRange.bound('g', 'k', /*lowerOpen=*/ true, /*upperOpen=*/ false)
|
||||
},
|
||||
'Get lower excluded');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'generated',
|
||||
/*options=*/ {query: IDBKeyRange.bound(4, 15), count: 3},
|
||||
'Get bound range (generated) with maxCount');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: 'Doesn\'t exist'}, 'Non existent key');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {count: 0}, 'zero maxCount');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {count: 4294967295},
|
||||
'Max value count');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.upperBound('0')},
|
||||
'Query with empty range where first key < upperBound');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.lowerBound('zz')},
|
||||
'Query with empty range where lowerBound < last key');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {direction: 'next'},
|
||||
'Direction: next');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {direction: 'prev'},
|
||||
'Direction: prev');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {direction: 'nextunique'},
|
||||
'Direction: nextunique');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {direction: 'prevunique'},
|
||||
'Direction: prevunique');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {
|
||||
direction: 'prev',
|
||||
query: IDBKeyRange.bound('b', 'x'),
|
||||
},
|
||||
'Direction and query');
|
||||
|
||||
object_store_get_all_values_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {
|
||||
direction: 'prev',
|
||||
query: IDBKeyRange.bound('b', 'x'),
|
||||
count: 4
|
||||
},
|
||||
'Direction, query and count');
|
||||
|
||||
get_all_with_options_and_count_test(
|
||||
'getAll', /*storeName=*/ 'out-of-line', /*indexName=*/ undefined,
|
||||
'Get all values with both options and count');
|
||||
|
||||
get_all_with_invalid_keys_test(
|
||||
'getAll', /*storeName=*/ 'out-of-line', /*indexName=*/ undefined,
|
||||
/*shouldUseDictionary=*/ true, 'Get all values with invalid query keys');
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: Test IDBObjectStore.getAll</title>
|
||||
<meta name="timeout" content="long">
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/nested-cloning-common.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<script src="resources/support-get-all.js"></script>
|
||||
<script src="resources/support-promises.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbobjectstore_getAll.any.js"></script>
|
||||
|
|
@ -0,0 +1,106 @@
|
|||
// META: title=IndexedDB: Test IDBObjectStore.getAll
|
||||
// META: global=window,worker
|
||||
// META: script=resources/nested-cloning-common.js
|
||||
// META: script=resources/support.js
|
||||
// META: script=resources/support-get-all.js
|
||||
// META: script=resources/support-promises.js
|
||||
// META: timeout=long
|
||||
|
||||
'use strict';
|
||||
|
||||
object_store_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {query: 'c'}, 'Single item get');
|
||||
|
||||
object_store_get_all_values_test(
|
||||
/*storeName=*/ 'generated', /*options=*/ {query: 3},
|
||||
'Single item get (generated key)');
|
||||
|
||||
object_store_get_all_values_test(
|
||||
/*storeName=*/ 'empty', /*options=*/ undefined,
|
||||
'getAll on empty object store');
|
||||
|
||||
object_store_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ undefined, 'Get all values');
|
||||
|
||||
object_store_get_all_values_test(
|
||||
/*storeName=*/ 'large-values', /*options=*/ undefined,
|
||||
'Get all with large values');
|
||||
|
||||
object_store_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {count: 10}, 'Test maxCount');
|
||||
|
||||
object_store_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.bound('g', 'm')}, 'Get bound range');
|
||||
|
||||
object_store_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.bound('g', 'm'), count: 3},
|
||||
'Get bound range with maxCount');
|
||||
|
||||
object_store_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {
|
||||
query:
|
||||
IDBKeyRange.bound('g', 'k', /*lowerOpen=*/ false, /*upperOpen=*/ true)
|
||||
},
|
||||
'Get upper excluded');
|
||||
|
||||
object_store_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {
|
||||
query:
|
||||
IDBKeyRange.bound('g', 'k', /*lowerOpen=*/ true, /*upperOpen=*/ false)
|
||||
},
|
||||
'Get lower excluded');
|
||||
|
||||
object_store_get_all_values_test(
|
||||
/*storeName=*/ 'generated',
|
||||
/*options=*/ {query: IDBKeyRange.bound(4, 15), count: 3},
|
||||
'Get bound range (generated) with maxCount');
|
||||
|
||||
object_store_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: 'Doesn\'t exist'}, 'Non existent key');
|
||||
|
||||
object_store_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {count: 0}, 'zero maxCount');
|
||||
|
||||
object_store_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {count: 4294967295},
|
||||
'Max value count');
|
||||
|
||||
object_store_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.upperBound('0')},
|
||||
'Query with empty range where first key < upperBound');
|
||||
|
||||
object_store_get_all_values_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.lowerBound('zz')},
|
||||
'Query with empty range where lowerBound < last key');
|
||||
|
||||
object_store_get_all_test_setup(
|
||||
/*storeName=*/ 'out-of-line', (test, connection, expectedRecords) => {
|
||||
const transaction = connection.transaction('out-of-line', 'readonly');
|
||||
const store = transaction.objectStore('out-of-line');
|
||||
const request = store.getAll();
|
||||
transaction.commit();
|
||||
transaction.oncomplete =
|
||||
test.unreached_func('transaction completed before request succeeded');
|
||||
|
||||
request.onerror = test.unreached_func('getAll request should succeed');
|
||||
request.onsuccess = test.step_func(event => {
|
||||
// Convert the expected array of records to an array of IDB values.
|
||||
const expectedResults =
|
||||
calculateExpectedGetAllResults('getAll', expectedRecords);
|
||||
|
||||
const actualResults = event.target.result;
|
||||
verifyGetAllResults('getAll', actualResults, expectedResults);
|
||||
transaction.oncomplete = test.step_func_done();
|
||||
});
|
||||
}, 'Get all values with transaction.commit()');
|
||||
|
||||
get_all_with_invalid_keys_test(
|
||||
'getAll', /*storeName=*/ 'out-of-line', /*indexName=*/ undefined,
|
||||
/*shouldUseDictionary=*/ false, 'Get all values with invalid query keys');
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: Test IDBObjectStore.getAllKeys with options dictionary.</title>
|
||||
<meta name="timeout" content="long">
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/nested-cloning-common.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<script src="resources/support-get-all.js"></script>
|
||||
<script src="resources/support-promises.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbobjectstore_getAllKeys-options.any.js"></script>
|
||||
|
|
@ -0,0 +1,115 @@
|
|||
// META: title=IndexedDB: Test IDBObjectStore.getAllKeys with options dictionary.
|
||||
// META: global=window,worker
|
||||
// META: script=resources/nested-cloning-common.js
|
||||
// META: script=resources/support.js
|
||||
// META: script=resources/support-get-all.js
|
||||
// META: script=resources/support-promises.js
|
||||
// META: timeout=long
|
||||
|
||||
'use strict';
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {query: 'c'}, 'Single item get');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'generated', /*options=*/ {query: 3},
|
||||
'Single item get (generated key)');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'empty', /*options=*/ undefined,
|
||||
'getAllKeys on empty object store');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ undefined, 'Get all keys');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {count: 10}, 'Test maxCount');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.bound('g', 'm')}, 'Get bound range');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.bound('g', 'm'), count: 3},
|
||||
'Get bound range with maxCount');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {
|
||||
query:
|
||||
IDBKeyRange.bound('g', 'k', /*lowerOpen=*/ false, /*upperOpen=*/ true)
|
||||
},
|
||||
'Get upper excluded');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {
|
||||
query:
|
||||
IDBKeyRange.bound('g', 'k', /*lowerOpen=*/ true, /*upperOpen=*/ false)
|
||||
},
|
||||
'Get lower excluded');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'generated',
|
||||
/*options=*/ {query: IDBKeyRange.bound(4, 15), count: 3},
|
||||
'Get bound range (generated) with maxCount');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: 'Doesn\'t exist'}, 'Non existent key');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {count: 0}, 'zero maxCount');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {count: 4294967295}, 'Max value count');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.upperBound('0')},
|
||||
'Query with empty range where first key < upperBound');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.lowerBound('zz')},
|
||||
'Query with empty range where lowerBound < last key');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {direction: 'next'},
|
||||
'Direction: next');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {direction: 'prev'},
|
||||
'Direction: prev');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {direction: 'nextunique'},
|
||||
'Direction: nextunique');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {direction: 'prevunique'},
|
||||
'Direction: prevunique');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {
|
||||
direction: 'prev',
|
||||
query: IDBKeyRange.bound('b', 'x'),
|
||||
},
|
||||
'Direction and query');
|
||||
|
||||
object_store_get_all_keys_with_options_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {
|
||||
direction: 'prev',
|
||||
query: IDBKeyRange.bound('b', 'x'),
|
||||
count: 4
|
||||
},
|
||||
'Direction, query and count');
|
||||
|
||||
get_all_with_options_and_count_test(
|
||||
'getAllKeys', /*storeName=*/ 'out-of-line', /*indexName=*/ undefined,
|
||||
'Get all keys with both options and count');
|
||||
|
||||
get_all_with_invalid_keys_test(
|
||||
'getAllKeys', /*storeName=*/ 'out-of-line', /*indexName=*/ undefined,
|
||||
/*shouldUseDictionary=*/ true, 'Get all keys with invalid query keys');
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: Test IDBObjectStore.getAllKeys</title>
|
||||
<meta name="timeout" content="long">
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/nested-cloning-common.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<script src="resources/support-get-all.js"></script>
|
||||
<script src="resources/support-promises.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbobjectstore_getAllKeys.any.js"></script>
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
// META: title=IndexedDB: Test IDBObjectStore.getAllKeys
|
||||
// META: global=window,worker
|
||||
// META: script=resources/nested-cloning-common.js
|
||||
// META: script=resources/support.js
|
||||
// META: script=resources/support-get-all.js
|
||||
// META: script=resources/support-promises.js
|
||||
// META: timeout=long
|
||||
|
||||
'use strict';
|
||||
|
||||
object_store_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {query: 'c'}, 'Single item get');
|
||||
|
||||
object_store_get_all_keys_test(
|
||||
/*storeName=*/ 'generated', /*options=*/ {query: 3},
|
||||
'Single item get (generated key)');
|
||||
|
||||
object_store_get_all_keys_test(
|
||||
/*storeName=*/ 'empty', /*options=*/ undefined,
|
||||
'getAllKeys on empty object store');
|
||||
|
||||
object_store_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ undefined, 'Get all keys');
|
||||
|
||||
object_store_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {count: 10}, 'Test maxCount');
|
||||
|
||||
object_store_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.bound('g', 'm')}, 'Get bound range');
|
||||
|
||||
object_store_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.bound('g', 'm'), count: 3},
|
||||
'Get bound range with maxCount');
|
||||
|
||||
object_store_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {
|
||||
query:
|
||||
IDBKeyRange.bound('g', 'k', /*lowerOpen=*/ false, /*upperOpen=*/ true)
|
||||
},
|
||||
'Get upper excluded');
|
||||
|
||||
object_store_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line', /*options=*/ {
|
||||
query:
|
||||
IDBKeyRange.bound('g', 'k', /*lowerOpen=*/ true, /*upperOpen=*/ false)
|
||||
},
|
||||
'Get lower excluded');
|
||||
|
||||
object_store_get_all_keys_test(
|
||||
/*storeName=*/ 'generated',
|
||||
/*options=*/ {query: IDBKeyRange.bound(4, 15), count: 3},
|
||||
'Get bound range (generated) with maxCount');
|
||||
|
||||
object_store_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: 'Doesn\'t exist'}, 'Non existent key');
|
||||
|
||||
object_store_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {count: 0}, 'zero maxCount');
|
||||
|
||||
object_store_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {count: 4294967295}, 'Max value count');
|
||||
|
||||
object_store_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.upperBound('0')},
|
||||
'Query with empty range where first key < upperBound');
|
||||
|
||||
object_store_get_all_keys_test(
|
||||
/*storeName=*/ 'out-of-line',
|
||||
/*options=*/ {query: IDBKeyRange.lowerBound('zz')},
|
||||
'Query with empty range where lowerBound < last key');
|
||||
|
||||
get_all_with_invalid_keys_test(
|
||||
'getAllKeys', /*storeName=*/ 'out-of-line', /*indexName=*/ undefined,
|
||||
/*shouldUseDictionary=*/ false, 'Get all keys with invalid query keys');
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: Test IDBObjectStore.getKey()</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbobjectstore_getKey.any.js"></script>
|
||||
|
|
@ -0,0 +1,89 @@
|
|||
// META: title=IndexedDB: Test IDBObjectStore.getKey()
|
||||
// META: script=resources/support.js
|
||||
|
||||
'use strict';
|
||||
|
||||
function getkey_test(func, name) {
|
||||
indexeddb_test(
|
||||
(t, db, tx) => {
|
||||
const basic = db.createObjectStore('basic');
|
||||
const key_path_store = db.createObjectStore('key path',
|
||||
{ keyPath: 'id' });
|
||||
const key_generator_store = db.createObjectStore('key generator',
|
||||
{ autoIncrement: true });
|
||||
const key_generator_and_path_store = db.createObjectStore(
|
||||
'key generator and key path',
|
||||
{ autoIncrement: true, key_path: 'id' });
|
||||
|
||||
for (let i = 1; i <= 10; ++i) {
|
||||
basic.put(`value: ${i}`, i);
|
||||
key_path_store.put({ id: i });
|
||||
key_generator_store.put(`value: ${i}`);
|
||||
key_generator_and_path_store.put({});
|
||||
}
|
||||
},
|
||||
func,
|
||||
name
|
||||
);
|
||||
}
|
||||
|
||||
getkey_test((t, db) => {
|
||||
const tx = db.transaction('basic', 'readonly');
|
||||
const store = tx.objectStore('basic');
|
||||
assert_throws_js(TypeError, () => store.getKey());
|
||||
assert_throws_dom('DataError', () => store.getKey(null));
|
||||
assert_throws_dom('DataError', () => store.getKey({}));
|
||||
t.done();
|
||||
}, 'IDBObjectStore.getKey() - invalid parameters');
|
||||
|
||||
[
|
||||
'basic',
|
||||
'key path',
|
||||
'key generator',
|
||||
'key generator and key path'
|
||||
].forEach(store_name => {
|
||||
getkey_test((t, db) => {
|
||||
const tx = db.transaction(store_name);
|
||||
const store = tx.objectStore(store_name);
|
||||
const request = store.getKey(5);
|
||||
request.onerror = t.unreached_func('request failed');
|
||||
request.onsuccess = t.step_func(() =>
|
||||
assert_equals(request.result, 5));
|
||||
tx.onabort = t.unreached_func('transaction aborted');
|
||||
tx.oncomplete = t.step_func(() => t.done());
|
||||
}, `IDBObjectStore.getKey() - ${store_name} - key`);
|
||||
|
||||
getkey_test((t, db) => {
|
||||
const tx = db.transaction(store_name);
|
||||
const store = tx.objectStore(store_name);
|
||||
const request = store.getKey(IDBKeyRange.lowerBound(4.5));
|
||||
request.onerror = t.unreached_func('request failed');
|
||||
request.onsuccess = t.step_func(() =>
|
||||
assert_equals(request.result, 5));
|
||||
tx.onabort = t.unreached_func('transaction aborted');
|
||||
tx.oncomplete = t.step_func(() => t.done());
|
||||
}, `IDBObjectStore.getKey() - ${store_name} - range`);
|
||||
|
||||
getkey_test((t, db) => {
|
||||
const tx = db.transaction(store_name);
|
||||
const store = tx.objectStore(store_name);
|
||||
const request = store.getKey(11);
|
||||
request.onerror = t.unreached_func('request failed');
|
||||
request.onsuccess = t.step_func(() =>
|
||||
assert_equals(request.result, undefined));
|
||||
tx.onabort = t.unreached_func('transaction aborted');
|
||||
tx.oncomplete = t.step_func(() => t.done());
|
||||
}, `IDBObjectStore.getKey() - ${store_name} - key - no match`);
|
||||
|
||||
getkey_test((t, db) => {
|
||||
const tx = db.transaction(store_name);
|
||||
const store = tx.objectStore(store_name);
|
||||
const request = store.getKey(IDBKeyRange.lowerBound(11));
|
||||
request.onerror = t.unreached_func('request failed');
|
||||
request.onsuccess = t.step_func(() =>
|
||||
assert_equals(request.result, undefined)
|
||||
);
|
||||
tx.onabort = t.unreached_func('transaction aborted');
|
||||
tx.oncomplete = t.step_func(() => t.done());
|
||||
}, `IDBObjectStore.getKey() - ${store_name} - range - no match`);
|
||||
});
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: IDBTransaction.objectStoreNames attribute</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/idbtransaction_objectStoreNames.any.js"></script>
|
||||
|
|
@ -0,0 +1,192 @@
|
|||
// META: title=IndexedDB: IDBTransaction.objectStoreNames attribute
|
||||
// META: global=window,worker
|
||||
// META: script=resources/support.js
|
||||
|
||||
'use strict';
|
||||
|
||||
function with_stores_test(store_names, open_func, description) {
|
||||
indexeddb_test(function(t, db, tx) {
|
||||
store_names.forEach(function(name) {
|
||||
db.createObjectStore(name);
|
||||
});
|
||||
}, open_func, description);
|
||||
}
|
||||
|
||||
indexeddb_test(
|
||||
function(t, db, tx) {
|
||||
assert_array_equals(
|
||||
tx.objectStoreNames, [],
|
||||
'transaction objectStoreNames should be empty');
|
||||
assert_array_equals(
|
||||
db.objectStoreNames, tx.objectStoreNames,
|
||||
'connection and transacton objectStoreNames should match');
|
||||
|
||||
db.createObjectStore('s1');
|
||||
assert_array_equals(
|
||||
tx.objectStoreNames, ['s1'],
|
||||
'transaction objectStoreNames should have new store');
|
||||
assert_array_equals(
|
||||
db.objectStoreNames, tx.objectStoreNames,
|
||||
'connection and transacton objectStoreNames should match');
|
||||
|
||||
db.createObjectStore('s3');
|
||||
assert_array_equals(
|
||||
tx.objectStoreNames, ['s1', 's3'],
|
||||
'transaction objectStoreNames should have new store');
|
||||
assert_array_equals(
|
||||
db.objectStoreNames, tx.objectStoreNames,
|
||||
'connection and transacton objectStoreNames should match');
|
||||
|
||||
db.createObjectStore('s2');
|
||||
assert_array_equals(
|
||||
tx.objectStoreNames, ['s1', 's2', 's3'],
|
||||
'transaction objectStoreNames should be sorted');
|
||||
assert_array_equals(
|
||||
db.objectStoreNames, tx.objectStoreNames,
|
||||
'connection and transacton objectStoreNames should match');
|
||||
|
||||
db.deleteObjectStore('s1');
|
||||
assert_array_equals(
|
||||
tx.objectStoreNames, ['s2', 's3'],
|
||||
'transaction objectStoreNames should be updated after delete');
|
||||
assert_array_equals(
|
||||
db.objectStoreNames, tx.objectStoreNames,
|
||||
'connection and transacton objectStoreNames should match');
|
||||
},
|
||||
function(t, db) {
|
||||
t.done();
|
||||
},
|
||||
'IDBTransaction.objectStoreNames - during upgrade transaction');
|
||||
|
||||
(function() {
|
||||
let saved_tx;
|
||||
indexeddb_test(
|
||||
function(t, db, tx) {
|
||||
saved_tx = tx;
|
||||
db.createObjectStore('s2');
|
||||
db.createObjectStore('s3');
|
||||
},
|
||||
function(t, db) {
|
||||
db.close();
|
||||
let open2 = indexedDB.open(db.name, db.version + 1);
|
||||
open2.onerror = t.unreached_func('open should succeed');
|
||||
open2.onupgradeneeded = t.step_func(function() {
|
||||
let db2 = open2.result;
|
||||
let tx2 = open2.transaction;
|
||||
assert_array_equals(
|
||||
tx2.objectStoreNames, ['s2', 's3'],
|
||||
'transaction should have previous stores in scope');
|
||||
assert_array_equals(
|
||||
db2.objectStoreNames, tx2.objectStoreNames,
|
||||
'connection and transacton objectStoreNames should match');
|
||||
|
||||
db2.createObjectStore('s4');
|
||||
assert_array_equals(
|
||||
tx2.objectStoreNames, ['s2', 's3', 's4'],
|
||||
'transaction should have new store in scope');
|
||||
assert_array_equals(
|
||||
db2.objectStoreNames, tx2.objectStoreNames,
|
||||
'connection and transacton objectStoreNames should match');
|
||||
|
||||
assert_array_equals(
|
||||
saved_tx.objectStoreNames, ['s2', 's3'],
|
||||
'previous transaction objectStoreNames should be unchanged');
|
||||
assert_array_equals(
|
||||
db.objectStoreNames, saved_tx.objectStoreNames,
|
||||
'connection and transaction objectStoreNames should match');
|
||||
db2.close();
|
||||
t.done();
|
||||
});
|
||||
},
|
||||
'IDBTransaction.objectStoreNames - value after close');
|
||||
}());
|
||||
|
||||
with_stores_test(['s1', 's2'], function(t, db) {
|
||||
assert_array_equals(
|
||||
db.transaction('s1', 'readonly').objectStoreNames, ['s1'],
|
||||
'transaction should have one store in scope');
|
||||
assert_array_equals(
|
||||
db.transaction(['s1', 's2']).objectStoreNames, ['s1', 's2'],
|
||||
'transaction should have two stores in scope');
|
||||
t.done();
|
||||
}, 'IDBTransaction.objectStoreNames - transaction scope');
|
||||
|
||||
with_stores_test(['s1', 's2'], function(t, db) {
|
||||
let tx = db.transaction(['s1', 's2'], 'readwrite');
|
||||
tx.objectStore('s1').put(0, 0);
|
||||
tx.onabort = t.unreached_func('transaction should complete');
|
||||
tx.oncomplete = t.step_func(function() {
|
||||
assert_array_equals(
|
||||
tx.objectStoreNames, ['s1', 's2'],
|
||||
'objectStoreNames should return scope after transaction commits');
|
||||
t.done();
|
||||
});
|
||||
}, 'IDBTransaction.objectStoreNames - value after commit');
|
||||
|
||||
with_stores_test(['s1', 's2'], function(t, db) {
|
||||
let tx = db.transaction(['s1', 's2'], 'readwrite');
|
||||
tx.objectStore('s1').put(0, 0);
|
||||
tx.objectStore('s1').add(0, 0);
|
||||
tx.oncomplete = t.unreached_func('transaction should abort');
|
||||
tx.onabort = t.step_func(function() {
|
||||
assert_array_equals(
|
||||
tx.objectStoreNames, ['s1', 's2'],
|
||||
'objectStoreNames should return scope after transaction aborts');
|
||||
t.done();
|
||||
});
|
||||
}, 'IDBTransaction.objectStoreNames - value after abort');
|
||||
|
||||
with_stores_test(['s1', 's2', 's3'], function(t, db) {
|
||||
assert_array_equals(
|
||||
db.transaction(['s3', 's2', 's1']).objectStoreNames, ['s1', 's2', 's3'],
|
||||
'transaction objectStoreNames should be sorted');
|
||||
t.done();
|
||||
}, 'IDBTransaction.objectStoreNames - sorting');
|
||||
|
||||
with_stores_test(['s1', 's2'], function(t, db) {
|
||||
assert_array_equals(
|
||||
db.transaction(['s2', 's1', 's2']).objectStoreNames, ['s1', 's2'],
|
||||
'transaction objectStoreNames should not have duplicates');
|
||||
t.done();
|
||||
}, 'IDBTransaction.objectStoreNames - no duplicates');
|
||||
|
||||
let unusual_names = [
|
||||
'', // empty string
|
||||
|
||||
'\x00', // U+0000 NULL
|
||||
'\xFF', // U+00FF LATIN SMALL LETTER Y WITH DIAERESIS
|
||||
|
||||
'1', // basic ASCII
|
||||
'12', // basic ASCII
|
||||
'123', // basic ASCII
|
||||
'abc', // basic ASCII
|
||||
'ABC', // basic ASCII
|
||||
|
||||
'\xA2', // U+00A2 CENT SIGN
|
||||
'\u6C34', // U+6C34 CJK UNIFIED IDEOGRAPH (water)
|
||||
'\uD834\uDD1E', // U+1D11E MUSICAL SYMBOL G-CLEF (UTF-16 surrogate pair)
|
||||
'\uFFFD', // U+FFFD REPLACEMENT CHARACTER
|
||||
|
||||
'\uD800', // UTF-16 surrogate lead
|
||||
'\uDC00', // UTF-16 surrogate trail
|
||||
];
|
||||
unusual_names.sort();
|
||||
|
||||
indexeddb_test(
|
||||
function(t, db, tx) {
|
||||
unusual_names.slice().reverse().forEach(function(name) {
|
||||
db.createObjectStore(name);
|
||||
});
|
||||
assert_array_equals(
|
||||
tx.objectStoreNames, unusual_names,
|
||||
'transaction should have names sorted');
|
||||
},
|
||||
function(t, db) {
|
||||
let tx =
|
||||
db.transaction(unusual_names.slice().reverse().concat(unusual_names));
|
||||
assert_array_equals(
|
||||
tx.objectStoreNames, unusual_names,
|
||||
'transaction should have names sorted with no duplicates');
|
||||
t.done();
|
||||
},
|
||||
'IDBTransaction.objectStoreNames - unusual names');
|
||||
|
|
@ -0,0 +1,16 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: Exceptions thrown during key conversion</title>
|
||||
<meta name="timeout" content="long">
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/key-conversion-exceptions.any.js"></script>
|
||||
|
|
@ -0,0 +1,256 @@
|
|||
// META: global=window,worker
|
||||
// META: title=IndexedDB: Exceptions thrown during key conversion
|
||||
// META: script=resources/support.js
|
||||
// META: timeout=long
|
||||
|
||||
'use strict';
|
||||
|
||||
// Convenience function for tests that only need to run code in onupgradeneeded.
|
||||
function indexeddb_upgrade_only_test(upgrade_callback, description) {
|
||||
indexeddb_test(upgrade_callback, t => t.done(), description);
|
||||
}
|
||||
|
||||
// Key that throws during conversion.
|
||||
function throwing_key(name) {
|
||||
const throws = [];
|
||||
throws.length = 1;
|
||||
const err = new Error('throwing from getter');
|
||||
err.name = name;
|
||||
Object.defineProperty(throws, '0', {
|
||||
get: function() {
|
||||
throw err;
|
||||
},
|
||||
enumerable: true,
|
||||
});
|
||||
return [throws, err];
|
||||
}
|
||||
|
||||
const valid_key = [];
|
||||
const invalid_key = {};
|
||||
|
||||
// Calls method on receiver with the specified number of args (default 1)
|
||||
// and asserts that the method fails appropriately (rethrowing if
|
||||
// conversion throws, or DataError if not a valid key), and that
|
||||
// the first argument is fully processed before the second argument
|
||||
// (if appropriate).
|
||||
function check_method(receiver, method, args) {
|
||||
args = args || 1;
|
||||
if (args < 2) {
|
||||
const [key, err] = throwing_key('getter');
|
||||
assert_throws_exactly(err, () => {
|
||||
receiver[method](key);
|
||||
}, 'key conversion with throwing getter should rethrow');
|
||||
|
||||
assert_throws_dom('DataError', () => {
|
||||
receiver[method](invalid_key);
|
||||
}, 'key conversion with invalid key should throw DataError');
|
||||
} else {
|
||||
const [key1, err1] = throwing_key('getter 1');
|
||||
const [key2, err2] = throwing_key('getter 2');
|
||||
assert_throws_exactly(err1, () => {
|
||||
receiver[method](key1, key2);
|
||||
}, 'first key conversion with throwing getter should rethrow');
|
||||
|
||||
assert_throws_dom('DataError', () => {
|
||||
receiver[method](invalid_key, key2);
|
||||
}, 'first key conversion with invalid key should throw DataError');
|
||||
|
||||
assert_throws_exactly(err2, () => {
|
||||
receiver[method](valid_key, key2);
|
||||
}, 'second key conversion with throwing getter should rethrow');
|
||||
|
||||
assert_throws_dom('DataError', () => {
|
||||
receiver[method](valid_key, invalid_key);
|
||||
}, 'second key conversion with invalid key should throw DataError');
|
||||
}
|
||||
}
|
||||
|
||||
// Verifies that invalid keys throw when used with the `IDBGetAllOptions`
|
||||
// dictionary. `getAllRecords()` added `IDBGetAllOptions`, which `getAll()` and
|
||||
// `getAllKeys()` also support.
|
||||
function check_method_with_get_all_options(receiver, method) {
|
||||
assert_throws_dom('DataError', () => {
|
||||
receiver[method]({query: invalid_key});
|
||||
}, 'options query key conversion with invalid key should throw DataError');
|
||||
|
||||
const [key, err] = throwing_key('getter');
|
||||
assert_throws_exactly(err, () => {
|
||||
receiver[method]({query: key});
|
||||
}, 'options query key conversion with throwing getter should rethrow');
|
||||
|
||||
// Verify `getAll()` and `getAllKeys()` throw when given an invalid key range
|
||||
// directly without the options dictionary. `getAllRecords()` only supports
|
||||
// the options dictionary.
|
||||
if (method !== 'getAllRecords') {
|
||||
assert_throws_exactly(err, () => {
|
||||
receiver[method](key);
|
||||
}, 'query key conversion with throwing getter should rethrow');
|
||||
}
|
||||
}
|
||||
|
||||
// Static key comparison utility on IDBFactory.
|
||||
test(
|
||||
t => check_method(indexedDB, 'cmp', 2),
|
||||
'IDBFactory cmp() static with throwing/invalid keys');
|
||||
|
||||
// Continue methods on IDBCursor.
|
||||
indexeddb_upgrade_only_test((t, db) => {
|
||||
const store = db.createObjectStore('store');
|
||||
store.put('a', 1).onerror = t.unreached_func('put should succeed');
|
||||
|
||||
const request = store.openCursor();
|
||||
request.onerror = t.unreached_func('openCursor should succeed');
|
||||
request.onsuccess = t.step_func(() => {
|
||||
const cursor = request.result;
|
||||
assert_not_equals(cursor, null, 'cursor should find a value');
|
||||
check_method(cursor, 'continue');
|
||||
});
|
||||
}, 'IDBCursor continue() method with throwing/invalid keys');
|
||||
|
||||
indexeddb_upgrade_only_test((t, db) => {
|
||||
const store = db.createObjectStore('store');
|
||||
const index = store.createIndex('index', 'prop');
|
||||
store.put({prop: 'a'}, 1).onerror = t.unreached_func('put should succeed');
|
||||
|
||||
const request = index.openCursor();
|
||||
request.onerror = t.unreached_func('openCursor should succeed');
|
||||
request.onsuccess = t.step_func(() => {
|
||||
const cursor = request.result;
|
||||
assert_not_equals(cursor, null, 'cursor should find a value');
|
||||
|
||||
check_method(cursor, 'continuePrimaryKey', 2);
|
||||
});
|
||||
}, null, 'IDBCursor continuePrimaryKey() method with throwing/invalid keys');
|
||||
|
||||
// Mutation methods on IDBCursor.
|
||||
indexeddb_upgrade_only_test((t, db) => {
|
||||
const store = db.createObjectStore('store', {keyPath: 'prop'});
|
||||
store.put({prop: 1}).onerror = t.unreached_func('put should succeed');
|
||||
|
||||
const request = store.openCursor();
|
||||
request.onerror = t.unreached_func('openCursor should succeed');
|
||||
request.onsuccess = t.step_func(() => {
|
||||
const cursor = request.result;
|
||||
assert_not_equals(cursor, null, 'cursor should find a value');
|
||||
|
||||
const value = {};
|
||||
let err;
|
||||
[value.prop, err] = throwing_key('getter');
|
||||
assert_throws_exactly(err, () => {
|
||||
cursor.update(value);
|
||||
}, 'throwing getter should rethrow during clone');
|
||||
|
||||
// Throwing from the getter during key conversion is
|
||||
// not possible since (1) a clone is used, (2) only own
|
||||
// properties are cloned, and (3) only own properties
|
||||
// are used for key path evaluation.
|
||||
|
||||
value.prop = invalid_key;
|
||||
assert_throws_dom('DataError', () => {
|
||||
cursor.update(value);
|
||||
}, 'key conversion with invalid key should throw DataError');
|
||||
});
|
||||
}, 'IDBCursor update() method with throwing/invalid keys');
|
||||
|
||||
// Static constructors on IDBKeyRange
|
||||
['only', 'lowerBound', 'upperBound'].forEach((method) => {
|
||||
test(
|
||||
t => check_method(IDBKeyRange, method),
|
||||
'IDBKeyRange ' + method + '() static with throwing/invalid keys');
|
||||
});
|
||||
|
||||
test(
|
||||
t => check_method(IDBKeyRange, 'bound', 2),
|
||||
'IDBKeyRange bound() static with throwing/invalid keys');
|
||||
|
||||
// Insertion methods on IDBObjectStore.
|
||||
['add', 'put'].forEach((method) => {
|
||||
indexeddb_upgrade_only_test((t, db) => {
|
||||
const out_of_line = db.createObjectStore('out-of-line keys');
|
||||
const in_line = db.createObjectStore('in-line keys', {keyPath: 'prop'});
|
||||
let [key, err] = throwing_key('getter');
|
||||
assert_throws_exactly(err, () => {
|
||||
out_of_line[method]('value', key);
|
||||
}, 'key conversion with throwing getter should rethrow');
|
||||
|
||||
assert_throws_dom('DataError', () => {
|
||||
out_of_line[method]('value', invalid_key);
|
||||
}, 'key conversion with invalid key should throw DataError');
|
||||
|
||||
const value = {};
|
||||
[value.prop, err] = throwing_key('getter');
|
||||
assert_throws_exactly(err, () => {
|
||||
in_line[method](value);
|
||||
}, 'throwing getter should rethrow during clone');
|
||||
|
||||
// Throwing from the getter during key conversion is
|
||||
// not possible since (1) a clone is used, (2) only own
|
||||
// properties are cloned, and (3) only own properties
|
||||
// are used for key path evaluation.
|
||||
|
||||
value.prop = invalid_key;
|
||||
assert_throws_dom('DataError', () => {
|
||||
in_line[method](value);
|
||||
}, 'key conversion with invalid key should throw DataError');
|
||||
}, `IDBObjectStore ${method}() method with throwing/invalid keys`);
|
||||
});
|
||||
|
||||
// Generic (key-or-key-path) methods on IDBObjectStore.
|
||||
['delete',
|
||||
'get',
|
||||
'getKey',
|
||||
'count',
|
||||
'openCursor',
|
||||
'openKeyCursor',
|
||||
].forEach(method => {
|
||||
indexeddb_upgrade_only_test((t, db) => {
|
||||
const store = db.createObjectStore('store');
|
||||
|
||||
check_method(store, method);
|
||||
}, `IDBObjectStore ${method}() method with throwing/invalid keys`);
|
||||
});
|
||||
|
||||
// Generic (key-or-key-path) methods on IDBIndex.
|
||||
['get',
|
||||
'getKey',
|
||||
'count',
|
||||
'openCursor',
|
||||
'openKeyCursor',
|
||||
].forEach((method) => {
|
||||
indexeddb_upgrade_only_test((t, db) => {
|
||||
const store = db.createObjectStore('store');
|
||||
const index = store.createIndex('index', 'keyPath');
|
||||
|
||||
check_method(index, method);
|
||||
}, `IDBIndex ${method}() method with throwing/invalid keys`);
|
||||
});
|
||||
|
||||
// Verify methods that take `IDBGetAllOptions` on `IDBObjectStore`.
|
||||
['getAll',
|
||||
'getAllKeys',
|
||||
'getAllRecords',
|
||||
].forEach(method => {
|
||||
indexeddb_upgrade_only_test((t, db) => {
|
||||
const store = db.createObjectStore('store');
|
||||
if ('getAllRecords' in store) {
|
||||
check_method_with_get_all_options(store, method);
|
||||
} else if (method !== 'getAllRecords') {
|
||||
// This browser does not support `getAllRecords()` or the
|
||||
// `IDBGetAllOptions` dictionary.
|
||||
check_method(store, method);
|
||||
}
|
||||
}, `IDBObjectStore ${method}() method with throwing/invalid keys`);
|
||||
});
|
||||
|
||||
// Verify methods that take `IDBGetAllOptions` on `IDBIndex`.
|
||||
['getAll', 'getAllKeys', 'getAllRecords'].forEach(method => {
|
||||
indexeddb_upgrade_only_test((t, db) => {
|
||||
const store = db.createObjectStore('store');
|
||||
const index = store.createIndex('index', 'keyPath');
|
||||
if ('getAllRecords' in index) {
|
||||
check_method_with_get_all_options(index, method);
|
||||
} else if (method !== 'getAllRecords') {
|
||||
check_method(store, method);
|
||||
}
|
||||
}, `IDBIndex ${method}() method with throwing/invalid keys`);
|
||||
});
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/keygenerator.any.js"></script>
|
||||
389
Tests/LibWeb/Text/input/wpt-import/IndexedDB/keygenerator.any.js
Normal file
389
Tests/LibWeb/Text/input/wpt-import/IndexedDB/keygenerator.any.js
Normal file
|
|
@ -0,0 +1,389 @@
|
|||
// META: global=window,worker
|
||||
// META: script=resources/support.js
|
||||
|
||||
'use strict';
|
||||
|
||||
function keygenerator(objects, expected_keys, desc, func) {
|
||||
let db;
|
||||
let t = async_test("Keygenerator" + " - " + desc);
|
||||
let open_rq = createdb(t);
|
||||
open_rq.onupgradeneeded = function(e) {
|
||||
db = e.target.result;
|
||||
let objStore = db.createObjectStore("store", { keyPath: "id", autoIncrement: true });
|
||||
for (let i = 0; i < objects.length; i++)
|
||||
{
|
||||
if (objects[i] === null)
|
||||
objStore.add({});
|
||||
else
|
||||
objStore.add({ id: objects[i] });
|
||||
}
|
||||
};
|
||||
|
||||
open_rq.onsuccess = function(e) {
|
||||
let actual_keys = [];
|
||||
let rq = db.transaction("store", "readonly")
|
||||
.objectStore("store")
|
||||
.openCursor();
|
||||
rq.onsuccess = t.step_func(function(e) {
|
||||
let cursor = e.target.result;
|
||||
if (cursor) {
|
||||
actual_keys.push(cursor.key.valueOf());
|
||||
cursor.continue();
|
||||
}
|
||||
else {
|
||||
assert_key_equals(actual_keys, expected_keys, "keygenerator array - " + desc);
|
||||
t.done();
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
keygenerator([null, null, null, null], [1, 2, 3, 4],
|
||||
"starts at one, and increments by one");
|
||||
|
||||
keygenerator([2, null, 5, null, 6.66, 7], [2, 3, 5, 6, 6.66, 7],
|
||||
"increments by one from last set key");
|
||||
|
||||
keygenerator([-10, null, "6", 6.3, [10], -2, 4, null], [-10, -2, 1, 4, 6.3, 7, "6", [10]],
|
||||
"don't increment when new key is not bigger than current");
|
||||
|
||||
async_test(t => {
|
||||
let db;
|
||||
let objects = [1, null, { id: 2 }, null, 2.00001, 5, null, { id: 6 }];
|
||||
let expected = [1, 2, 2.00001, 3, 5, 6];
|
||||
let errors = 0;
|
||||
let open_rq = createdb(t);
|
||||
open_rq.onupgradeneeded = function(e) {
|
||||
db = e.target.result;
|
||||
let objStore = db.createObjectStore("store", { keyPath: "id", autoIncrement: true });
|
||||
|
||||
for (let i = 0; i < objects.length; i++)
|
||||
{
|
||||
if (objects[i] === null)
|
||||
{
|
||||
objStore.add({});
|
||||
}
|
||||
else if (typeof objects[i] === "object")
|
||||
{
|
||||
let rq = objStore.add(objects[i]);
|
||||
rq.onerror = t.step_func(function(e) {
|
||||
errors++;
|
||||
assert_equals(e.target.error.name, "ConstraintError");
|
||||
assert_equals(e.type, "error");
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
});
|
||||
rq.onsuccess = t.step_func(function(e) {
|
||||
assert_unreached("Got rq.success when adding duplicate id " + objects[i]);
|
||||
});
|
||||
}
|
||||
else
|
||||
objStore.add({ id: objects[i] });
|
||||
}
|
||||
};
|
||||
|
||||
open_rq.onsuccess = function(e) {
|
||||
let actual_keys = [];
|
||||
let rq = db.transaction("store", "readonly")
|
||||
.objectStore("store")
|
||||
.openCursor();
|
||||
rq.onsuccess = t.step_func(function(e) {
|
||||
let cursor = e.target.result;
|
||||
if (cursor) {
|
||||
actual_keys.push(cursor.key.valueOf());
|
||||
cursor.continue();
|
||||
}
|
||||
else {
|
||||
assert_equals(errors, 2, "expected ConstraintError's");
|
||||
assert_array_equals(actual_keys, expected, "keygenerator array");
|
||||
t.done();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
}, "Keygenerator ConstraintError when using same id as already generated");
|
||||
|
||||
function big_key_test(key, description) {
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
assert_equals(indexedDB.cmp(key, key), 0, 'Key is valid');
|
||||
db.createObjectStore('store', {autoIncrement: true});
|
||||
},
|
||||
(t, db) => {
|
||||
const tx = db.transaction('store', 'readwrite');
|
||||
const store = tx.objectStore('store');
|
||||
const value = 0;
|
||||
let request;
|
||||
request = store.put(value);
|
||||
request.onerror = t.unreached_func('put should succeed');
|
||||
request.onsuccess = t.step_func(e => {
|
||||
assert_equals(e.target.result, 1,
|
||||
'Key generator should initially be 1');
|
||||
});
|
||||
|
||||
request = store.put(value);
|
||||
request.onerror = t.unreached_func('put should succeed');
|
||||
request.onsuccess = t.step_func(e => {
|
||||
assert_equals(e.target.result, 2,
|
||||
'Key generator should increment');
|
||||
});
|
||||
|
||||
request = store.put(value, 1000);
|
||||
request.onerror = t.unreached_func('put should succeed');
|
||||
request.onsuccess = t.step_func(e => {
|
||||
assert_equals(e.target.result, 1000,
|
||||
'Explicit key should be used');
|
||||
});
|
||||
|
||||
request = store.put(value);
|
||||
request.onerror = t.unreached_func('put should succeed');
|
||||
request.onsuccess = t.step_func(e => {
|
||||
assert_equals(e.target.result, 1001,
|
||||
'Key generator should have updated');
|
||||
});
|
||||
|
||||
request = store.put(value, key);
|
||||
request.onerror = t.unreached_func('put should succeed');
|
||||
request.onsuccess = t.step_func(e => {
|
||||
assert_equals(e.target.result, key,
|
||||
'Explicit key should be used');
|
||||
});
|
||||
|
||||
if (key >= 0) {
|
||||
// Large positive values will max out the key generator, so it
|
||||
// can no longer produce keys.
|
||||
request = store.put(value);
|
||||
request.onsuccess = t.unreached_func('put should fail');
|
||||
request.onerror = t.step_func(e => {
|
||||
e.preventDefault();
|
||||
assert_equals(e.target.error.name, 'ConstraintError',
|
||||
'Key generator should have returned failure');
|
||||
});
|
||||
} else {
|
||||
// Large negative values are always lower than the key generator's
|
||||
// current number, so have no effect on the generator.
|
||||
request = store.put(value);
|
||||
request.onerror = t.unreached_func('put should succeed');
|
||||
request.onsuccess = t.step_func(e => {
|
||||
assert_equals(e.target.result, 1002,
|
||||
'Key generator should have updated');
|
||||
});
|
||||
}
|
||||
|
||||
request = store.put(value, 2000);
|
||||
request.onerror = t.unreached_func('put should succeed');
|
||||
request.onsuccess = t.step_func(e => {
|
||||
assert_equals(e.target.result, 2000,
|
||||
'Explicit key should be used');
|
||||
});
|
||||
tx.onabort = t.step_func(() => {
|
||||
assert_unreached(`Transaction aborted: ${tx.error.message}`);
|
||||
});
|
||||
tx.oncomplete = t.step_func(() => { t.done(); });
|
||||
},
|
||||
description);
|
||||
}
|
||||
|
||||
[
|
||||
{
|
||||
key: Number.MAX_SAFE_INTEGER + 1,
|
||||
description: '53 bits'
|
||||
},
|
||||
{
|
||||
key: Math.pow(2, 60),
|
||||
description: 'greater than 53 bits, less than 64 bits'
|
||||
},
|
||||
{
|
||||
key: -Math.pow(2, 60),
|
||||
description: 'greater than 53 bits, less than 64 bits (negative)'
|
||||
},
|
||||
{
|
||||
key: Math.pow(2, 63),
|
||||
description: '63 bits'
|
||||
},
|
||||
{
|
||||
key: -Math.pow(2, 63),
|
||||
description: '63 bits (negative)'
|
||||
},
|
||||
{
|
||||
key: Math.pow(2, 64),
|
||||
description: '64 bits'
|
||||
},
|
||||
{
|
||||
key: -Math.pow(2, 64),
|
||||
description: '64 bits (negative)'
|
||||
},
|
||||
{
|
||||
key: Math.pow(2, 70),
|
||||
description: 'greater than 64 bits, but still finite'
|
||||
},
|
||||
{
|
||||
key: -Math.pow(2, 70),
|
||||
description: 'greater than 64 bits, but still finite (negative)'
|
||||
},
|
||||
{
|
||||
key: Infinity,
|
||||
description: 'equal to Infinity'
|
||||
},
|
||||
{
|
||||
key: -Infinity,
|
||||
description: 'equal to -Infinity'
|
||||
}
|
||||
].forEach(function(testCase) {
|
||||
big_key_test(testCase.key,
|
||||
`Key generator vs. explicit key ${testCase.description}`);
|
||||
});
|
||||
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
db.createObjectStore('store', {autoIncrement: true, keyPath: 'id'});
|
||||
},
|
||||
(t, db) => {
|
||||
const tx = db.transaction('store', 'readwrite');
|
||||
t.onabort = t.unreached_func('transaction should not abort');
|
||||
const store = tx.objectStore('store');
|
||||
store.put({name: 'n'}).onsuccess = t.step_func(e => {
|
||||
const key = e.target.result;
|
||||
assert_equals(key, 1, 'Key generator initial value should be 1');
|
||||
store.get(key).onsuccess = t.step_func(e => {
|
||||
const value = e.target.result;
|
||||
assert_equals(typeof value, 'object', 'Result should be object');
|
||||
assert_equals(value.name, 'n', 'Result should have name property');
|
||||
assert_equals(value.id, key, 'Key should be injected');
|
||||
t.done();
|
||||
});
|
||||
});
|
||||
},
|
||||
'Key is injected into value - single segment path');
|
||||
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
db.createObjectStore('store', {autoIncrement: true, keyPath: 'a.b.id'});
|
||||
},
|
||||
(t, db) => {
|
||||
const tx = db.transaction('store', 'readwrite');
|
||||
t.onabort = t.unreached_func('transaction should not abort');
|
||||
const store = tx.objectStore('store');
|
||||
store.put({name: 'n'}).onsuccess = t.step_func(e => {
|
||||
const key = e.target.result;
|
||||
assert_equals(key, 1, 'Key generator initial value should be 1');
|
||||
store.get(key).onsuccess = t.step_func(e => {
|
||||
const value = e.target.result;
|
||||
assert_equals(typeof value, 'object', 'Result should be object');
|
||||
assert_equals(value.name, 'n', 'Result should have name property');
|
||||
assert_equals(value.a.b.id, key, 'Key should be injected');
|
||||
t.done();
|
||||
});
|
||||
});
|
||||
},
|
||||
'Key is injected into value - multi-segment path');
|
||||
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
db.createObjectStore('store', {autoIncrement: true, keyPath: 'a.b.id'});
|
||||
},
|
||||
(t, db) => {
|
||||
const tx = db.transaction('store', 'readwrite');
|
||||
t.onabort = t.unreached_func('transaction should not abort');
|
||||
const store = tx.objectStore('store');
|
||||
store.put({name: 'n1', b: {name: 'n2'}}).onsuccess = t.step_func(e => {
|
||||
const key = e.target.result;
|
||||
assert_equals(key, 1, 'Key generator initial value should be 1');
|
||||
store.get(key).onsuccess = t.step_func(e => {
|
||||
const value = e.target.result;
|
||||
assert_equals(typeof value, 'object', 'Result should be object');
|
||||
assert_equals(value.name, 'n1', 'Result should have name property');
|
||||
assert_equals(value.b.name, 'n2', 'Result should have name property');
|
||||
assert_equals(value.a.b.id, key, 'Key should be injected');
|
||||
t.done();
|
||||
});
|
||||
});
|
||||
},
|
||||
'Key is injected into value - multi-segment path, partially populated');
|
||||
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
db.createObjectStore('store', {autoIncrement: true, keyPath: 'id'});
|
||||
},
|
||||
(t, db) => {
|
||||
const tx = db.transaction('store', 'readwrite');
|
||||
const store = tx.objectStore('store');
|
||||
|
||||
assert_throws_dom('DataError', () => {
|
||||
store.put(123);
|
||||
}, 'Key path should be checked against value');
|
||||
|
||||
t.done();
|
||||
},
|
||||
'put() throws if key cannot be injected - single segment path');
|
||||
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
db.createObjectStore('store', {autoIncrement: true, keyPath: 'a.b.id'});
|
||||
},
|
||||
(t, db) => {
|
||||
const tx = db.transaction('store', 'readwrite');
|
||||
const store = tx.objectStore('store');
|
||||
|
||||
assert_throws_dom('DataError', () => {
|
||||
store.put({a: 123});
|
||||
}, 'Key path should be checked against value');
|
||||
|
||||
assert_throws_dom('DataError', () => {
|
||||
store.put({a: {b: 123} });
|
||||
}, 'Key path should be checked against value');
|
||||
|
||||
t.done();
|
||||
},
|
||||
'put() throws if key cannot be injected - multi-segment path');
|
||||
|
||||
async_test(t => {
|
||||
let db;
|
||||
let overflow_error_fired = false;
|
||||
let objects = [9007199254740991, null, "error", 2, "error"];
|
||||
let expected_keys = [2, 9007199254740991, 9007199254740992];
|
||||
let open_rq = createdb(t);
|
||||
open_rq.onupgradeneeded = function(e) {
|
||||
db = e.target.result;
|
||||
let objStore = db.createObjectStore("store", { keyPath: "id", autoIncrement: true });
|
||||
for (let i = 0; i < objects.length; i++)
|
||||
{
|
||||
if (objects[i] === null)
|
||||
{
|
||||
objStore.add({});
|
||||
}
|
||||
else if (objects[i] === "error")
|
||||
{
|
||||
let rq = objStore.add({});
|
||||
rq.onsuccess = fail(t, 'When "current number" overflows, error event is expected');
|
||||
rq.onerror = t.step_func(function(e) {
|
||||
overflow_error_fired = true;
|
||||
assert_equals(e.target.error.name, "ConstraintError", "error name");
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
});
|
||||
}
|
||||
else
|
||||
objStore.add({ id: objects[i] });
|
||||
}
|
||||
};
|
||||
|
||||
open_rq.onsuccess = function(e) {
|
||||
let actual_keys = [];
|
||||
let rq = db.transaction("store", "readonly")
|
||||
.objectStore("store")
|
||||
.openCursor();
|
||||
rq.onsuccess = t.step_func(function(e) {
|
||||
let cursor = e.target.result;
|
||||
if (cursor) {
|
||||
actual_keys.push(cursor.key.valueOf());
|
||||
cursor.continue();
|
||||
}
|
||||
else {
|
||||
assert_true(overflow_error_fired, "error fired on 'current number' overflow");
|
||||
assert_array_equals(actual_keys, expected_keys, "keygenerator array");
|
||||
|
||||
t.done();
|
||||
}
|
||||
});
|
||||
};
|
||||
}, "Keygenerator overflow");
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: Exceptions in extracting keys from values (ES bindings)</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/keypath-exceptions.any.js"></script>
|
||||
|
|
@ -0,0 +1,288 @@
|
|||
// META: global=window,worker
|
||||
// META: title=IndexedDB: Exceptions in extracting keys from values (ES bindings)
|
||||
// META: script=resources/support.js
|
||||
|
||||
// Spec: https://w3c.github.io/IndexedDB/#extract-key-from-value
|
||||
|
||||
'use strict';
|
||||
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
db.createObjectStore('store', {autoIncrement: true, keyPath: 'a.b.c'});
|
||||
},
|
||||
(t, db) => {
|
||||
const tx = db.transaction('store', 'readwrite', {durability: 'relaxed'});
|
||||
assert_throws_dom('DataError', () => {
|
||||
tx.objectStore('store').put({a: {b: 'foo'}});
|
||||
}, 'Put should throw if key can not be inserted at key path location.');
|
||||
t.done();
|
||||
},
|
||||
'The last element of keypath is validated');
|
||||
|
||||
const err = Error();
|
||||
err.name = 'getter';
|
||||
|
||||
function throwingGetter() {
|
||||
throw err;
|
||||
}
|
||||
|
||||
indexeddb_test(
|
||||
function(t, db) {
|
||||
const o = {};
|
||||
Object.defineProperty(
|
||||
o, 'throws',
|
||||
{get: throwingGetter, enumerable: false, configurable: true});
|
||||
|
||||
// Value should be cloned before key path is evaluated,
|
||||
// and non-enumerable getter will be ignored. The clone
|
||||
// will have no such property, so key path evaluation
|
||||
// will fail.
|
||||
const s1 = db.createObjectStore('s1', {keyPath: 'throws'});
|
||||
assert_throws_dom('DataError', () => {
|
||||
s1.put(o);
|
||||
}, 'Key path failing to resolve should throw');
|
||||
|
||||
// Value should be cloned before key path is evaluated,
|
||||
// and non-enumerable getter will be ignored. The clone
|
||||
// will have no such property, so key path evaluation
|
||||
// will fail.
|
||||
const s2 = db.createObjectStore('s2', {keyPath: 'throws.x'});
|
||||
assert_throws_dom('DataError', () => {
|
||||
s2.put(o);
|
||||
}, 'Key path failing to resolve should throw');
|
||||
|
||||
// Value should be cloned before key path is evaluated,
|
||||
// and non-enumerable getter will be ignored. The clone
|
||||
// will have no such property, so generated key can be
|
||||
// inserted.
|
||||
const s3 =
|
||||
db.createObjectStore('s3', {keyPath: 'throws', autoIncrement: true});
|
||||
assert_class_string(
|
||||
s3.put(o), 'IDBRequest',
|
||||
'Key injectability test at throwing getter should succeed');
|
||||
|
||||
// Value should be cloned before key path is evaluated,
|
||||
// and non-enumerable getter will be ignored. The clone
|
||||
// will have no such property, so intermediate object
|
||||
// and generated key can be inserted.
|
||||
const s4 = db.createObjectStore(
|
||||
's4', {keyPath: 'throws.x', autoIncrement: true});
|
||||
assert_class_string(
|
||||
s4.put(o), 'IDBRequest',
|
||||
'Key injectability test past throwing getter should succeed');
|
||||
},
|
||||
(t, db) => {
|
||||
t.done();
|
||||
},
|
||||
'Key path evaluation: Exceptions from non-enumerable getters');
|
||||
|
||||
indexeddb_test(
|
||||
function(t, db) {
|
||||
const o = {};
|
||||
Object.defineProperty(
|
||||
o, 'throws',
|
||||
{get: throwingGetter, enumerable: true, configurable: true});
|
||||
|
||||
// Value should be cloned before key path is evaluated,
|
||||
// and enumerable getter will rethrow.
|
||||
const s1 = db.createObjectStore('s1', {keyPath: 'throws'});
|
||||
assert_throws_exactly(err, () => {
|
||||
s1.put(o);
|
||||
}, 'Key path resolving to throwing getter rethrows');
|
||||
|
||||
// Value should be cloned before key path is evaluated,
|
||||
// and enumerable getter will rethrow.
|
||||
const s2 = db.createObjectStore('s2', {keyPath: 'throws.x'});
|
||||
assert_throws_exactly(err, () => {
|
||||
s2.put(o);
|
||||
}, 'Key path resolving past throwing getter rethrows');
|
||||
|
||||
// Value should be cloned before key path is evaluated,
|
||||
// and enumerable getter will rethrow.
|
||||
const s3 =
|
||||
db.createObjectStore('s3', {keyPath: 'throws', autoIncrement: true});
|
||||
assert_throws_exactly(err, () => {
|
||||
s3.put(o);
|
||||
}, 'Key injectability test at throwing getter should rethrow');
|
||||
|
||||
// Value should be cloned before key path is evaluated,
|
||||
// and enumerable getter will rethrow.
|
||||
const s4 = db.createObjectStore(
|
||||
's4', {keyPath: 'throws.x', autoIncrement: true});
|
||||
assert_throws_exactly(err, () => {
|
||||
s4.put(o);
|
||||
}, 'Key injectability test past throwing getter should rethrow');
|
||||
},
|
||||
(t, db) => {
|
||||
t.done();
|
||||
},
|
||||
'Key path evaluation: Exceptions from enumerable getters');
|
||||
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
// Implemented as function wrapper to clean up
|
||||
// immediately after use, otherwise it may
|
||||
// interfere with the test harness.
|
||||
function with_proto_getter(f) {
|
||||
return function() {
|
||||
Object.defineProperty(
|
||||
Object.prototype, 'throws',
|
||||
{get: throwingGetter, enumerable: false, configurable: true});
|
||||
try {
|
||||
f();
|
||||
} finally {
|
||||
delete Object.prototype['throws'];
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Value should be cloned before key path is evaluated,
|
||||
// and non-enumerable getter will be ignored. The clone
|
||||
// will have no own property, so key path evaluation will
|
||||
// fail and DataError should be thrown.
|
||||
const s1 = db.createObjectStore('s1', {keyPath: 'throws'});
|
||||
assert_throws_dom(
|
||||
'DataError', with_proto_getter(function() {
|
||||
s1.put({});
|
||||
}),
|
||||
'Key path resolving to no own property throws DataError');
|
||||
|
||||
// Value should be cloned before key path is evaluated,
|
||||
// and non-enumerable getter will be ignored. The clone
|
||||
// will have no own property, so key path evaluation will
|
||||
// fail and DataError should be thrown.
|
||||
const s2 = db.createObjectStore('s2', {keyPath: 'throws.x'});
|
||||
assert_throws_dom(
|
||||
'DataError', with_proto_getter(function() {
|
||||
s2.put({});
|
||||
}),
|
||||
'Key path resolving past no own property throws DataError');
|
||||
|
||||
// Value should be cloned before key path is evaluated,
|
||||
// and non-enumerable getter will be ignored. The clone
|
||||
// will have no own property, so key path evaluation will
|
||||
// fail and injection can succeed.
|
||||
const s3 =
|
||||
db.createObjectStore('s3', {keyPath: 'throws', autoIncrement: true});
|
||||
assert_equals(
|
||||
s3.put({}).readyState, 'pending',
|
||||
'put should not throw due to inherited property');
|
||||
|
||||
// Value should be cloned before key path is evaluated,
|
||||
// and non-enumerable getter will be ignored. The clone
|
||||
// will have no own property, so key path evaluation will
|
||||
// fail and injection can succeed.
|
||||
const s4 = db.createObjectStore(
|
||||
's4', {keyPath: 'throws.x', autoIncrement: true});
|
||||
assert_equals(
|
||||
s4.put({}).readyState, 'pending',
|
||||
'put should not throw due to inherited property');
|
||||
},
|
||||
(t, db) => {
|
||||
t.done();
|
||||
},
|
||||
'Key path evaluation: Exceptions from non-enumerable getters on prototype');
|
||||
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
// Implemented as function wrapper to clean up
|
||||
// immediately after use, otherwise it may
|
||||
// interfere with the test harness.
|
||||
function with_proto_getter(f) {
|
||||
return () => {
|
||||
Object.defineProperty(
|
||||
Object.prototype, 'throws',
|
||||
{get: throwingGetter, enumerable: true, configurable: true});
|
||||
try {
|
||||
f();
|
||||
} finally {
|
||||
delete Object.prototype['throws'];
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Value should be cloned before key path is evaluated.
|
||||
// The clone will have no own property, so key path
|
||||
// evaluation will fail and DataError should be thrown.
|
||||
const s1 = db.createObjectStore('s1', {keyPath: 'throws'});
|
||||
assert_throws_dom(
|
||||
'DataError', with_proto_getter(function() {
|
||||
s1.put({});
|
||||
}),
|
||||
'Key path resolving to no own property throws DataError');
|
||||
|
||||
// Value should be cloned before key path is evaluated.
|
||||
// The clone will have no own property, so key path
|
||||
// evaluation will fail and DataError should be thrown.
|
||||
const s2 = db.createObjectStore('s2', {keyPath: 'throws.x'});
|
||||
assert_throws_dom(
|
||||
'DataError', with_proto_getter(function() {
|
||||
s2.put({});
|
||||
}),
|
||||
'Key path resolving past throwing getter rethrows');
|
||||
|
||||
// Value should be cloned before key path is evaluated.
|
||||
// The clone will have no own property, so key path
|
||||
// evaluation will fail and injection can succeed.
|
||||
let s3 =
|
||||
db.createObjectStore('s3', {keyPath: 'throws', autoIncrement: true});
|
||||
assert_equals(
|
||||
s3.put({}).readyState, 'pending',
|
||||
'put should not throw due to inherited property');
|
||||
|
||||
// Value should be cloned before key path is evaluated.
|
||||
// The clone will have no own property, so key path
|
||||
// evaluation will fail and injection can succeed.
|
||||
let s4 = db.createObjectStore(
|
||||
's4', {keyPath: 'throws.x', autoIncrement: true});
|
||||
assert_equals(
|
||||
s4.put({}).readyState, 'pending',
|
||||
'put should not throw due to inherited property');
|
||||
},
|
||||
(t, db) => {
|
||||
t.done();
|
||||
},
|
||||
'Key path evaluation: Exceptions from enumerable getters on prototype');
|
||||
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
const store = db.createObjectStore('store');
|
||||
store.createIndex('index', 'index0');
|
||||
},
|
||||
(t, db) => {
|
||||
const tx = db.transaction('store', 'readwrite', {durability: 'relaxed'});
|
||||
|
||||
const array = [];
|
||||
array[99] = 1;
|
||||
|
||||
// Implemented as function wrapper to clean up
|
||||
// immediately after use, otherwise it may
|
||||
// interfere with the test harness.
|
||||
let getter_called = 0;
|
||||
function with_proto_getter(f) {
|
||||
const prop = '50';
|
||||
Object.defineProperty(Object.prototype, prop, {
|
||||
enumerable: true,
|
||||
configurable: true,
|
||||
get: () => {
|
||||
++getter_called;
|
||||
return 'foo';
|
||||
}
|
||||
});
|
||||
try {
|
||||
return f();
|
||||
} finally {
|
||||
delete Object.prototype[prop];
|
||||
}
|
||||
}
|
||||
|
||||
const request = with_proto_getter(
|
||||
() => tx.objectStore('store').put({index0: array}, 'key'));
|
||||
request.onerror = t.unreached_func('put should not fail');
|
||||
request.onsuccess = t.step_func(function() {
|
||||
assert_equals(
|
||||
getter_called, 0, 'Prototype getter should not be called');
|
||||
t.done();
|
||||
});
|
||||
},
|
||||
'Array key conversion should not invoke prototype getters');
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>IndexedDB: Special-cased identifiers in extracting keys from values (ES bindings)</title>
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/keypath-special-identifiers.any.js"></script>
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
// META: global=window,worker
|
||||
// META: title=IndexedDB: Special-cased identifiers in extracting keys from values (ES bindings)
|
||||
// META: script=resources/support.js
|
||||
|
||||
// Spec: https://w3c.github.io/IndexedDB/#extract-key-from-value
|
||||
|
||||
'use strict';
|
||||
|
||||
[{
|
||||
type: 'String',
|
||||
property: 'length',
|
||||
instance: 'abc',
|
||||
},
|
||||
{
|
||||
type: 'Array',
|
||||
property: 'length',
|
||||
instance: ['a', 'b', 'c'],
|
||||
},
|
||||
{
|
||||
type: 'Blob',
|
||||
property: 'size',
|
||||
instance: new Blob(['abc']),
|
||||
},
|
||||
{
|
||||
type: 'Blob',
|
||||
property: 'type',
|
||||
instance: new Blob([''], {type: 'foo/bar'}),
|
||||
},
|
||||
{
|
||||
type: 'File',
|
||||
property: 'name',
|
||||
instance: new File([''], 'foo'),
|
||||
},
|
||||
{
|
||||
type: 'File',
|
||||
property: 'lastModified',
|
||||
instance: new File([''], '', {lastModified: 123}),
|
||||
},
|
||||
].forEach(function(testcase) {
|
||||
indexeddb_test(
|
||||
(t, db) => {
|
||||
db.createObjectStore(
|
||||
'store', {autoIncrement: true, keyPath: testcase.property});
|
||||
},
|
||||
(t, db) => {
|
||||
const key = testcase.instance[testcase.property];
|
||||
const tx =
|
||||
db.transaction('store', 'readwrite', {durability: 'relaxed'});
|
||||
tx.objectStore('store').put(testcase.instance);
|
||||
const request = tx.objectStore('store').get(key);
|
||||
request.onerror = t.unreached_func('request should not fail');
|
||||
request.onsuccess = t.step_func(function() {
|
||||
const result = request.result;
|
||||
assert_key_equals(
|
||||
result[testcase.property], key, 'Property should be used as key');
|
||||
});
|
||||
tx.oncomplete = t.step_func(function() {
|
||||
t.done();
|
||||
});
|
||||
},
|
||||
'Type: ' + testcase.type + ', identifier: ' + testcase.property);
|
||||
});
|
||||
|
|
@ -0,0 +1,212 @@
|
|||
'use strict';
|
||||
|
||||
// Should be large enough to trigger large value handling in the IndexedDB
|
||||
// engines that have special code paths for large values.
|
||||
const wrapThreshold = 128 * 1024;
|
||||
|
||||
// Returns an IndexedDB value created from a descriptor.
|
||||
//
|
||||
// See the bottom of the file for descriptor samples.
|
||||
function createValue(descriptor) {
|
||||
if (typeof(descriptor) != 'object')
|
||||
return descriptor;
|
||||
|
||||
if (Array.isArray(descriptor))
|
||||
return descriptor.map((element) => createValue(element));
|
||||
|
||||
if (!descriptor.hasOwnProperty('type')) {
|
||||
const value = {};
|
||||
for (let property of Object.getOwnPropertyNames(descriptor))
|
||||
value[property] = createValue(descriptor[property]);
|
||||
return value;
|
||||
}
|
||||
|
||||
switch (descriptor.type) {
|
||||
case 'blob':
|
||||
return new Blob(
|
||||
[largeValue(descriptor.size, descriptor.seed)],
|
||||
{ type: descriptor.mimeType });
|
||||
case 'buffer':
|
||||
return largeValue(descriptor.size, descriptor.seed);
|
||||
}
|
||||
}
|
||||
|
||||
// Checks an IndexedDB value against a descriptor.
|
||||
//
|
||||
// Returns a Promise that resolves if the value passes the check.
|
||||
//
|
||||
// See the bottom of the file for descriptor samples.
|
||||
function checkValue(testCase, value, descriptor) {
|
||||
if (typeof(descriptor) != 'object') {
|
||||
assert_equals(
|
||||
descriptor, value,
|
||||
'IndexedDB result should match put() argument');
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
if (Array.isArray(descriptor)) {
|
||||
assert_true(
|
||||
Array.isArray(value),
|
||||
'IndexedDB result type should match put() argument');
|
||||
assert_equals(
|
||||
descriptor.length, value.length,
|
||||
'IndexedDB result array size should match put() argument');
|
||||
|
||||
const subChecks = [];
|
||||
for (let i = 0; i < descriptor.length; ++i)
|
||||
subChecks.push(checkValue(testCase, value[i], descriptor[i]));
|
||||
return Promise.all(subChecks);
|
||||
}
|
||||
|
||||
if (!descriptor.hasOwnProperty('type')) {
|
||||
assert_array_equals(
|
||||
Object.getOwnPropertyNames(value).sort(),
|
||||
Object.getOwnPropertyNames(descriptor).sort(),
|
||||
'IndexedDB result object properties should match put() argument');
|
||||
const subChecks = [];
|
||||
return Promise.all(Object.getOwnPropertyNames(descriptor).map(property =>
|
||||
checkValue(testCase, value[property], descriptor[property])));
|
||||
}
|
||||
|
||||
switch (descriptor.type) {
|
||||
case 'blob':
|
||||
assert_class_string(
|
||||
value, 'Blob',
|
||||
'IndexedDB result class should match put() argument');
|
||||
assert_equals(
|
||||
descriptor.mimeType, value.type,
|
||||
'IndexedDB result Blob MIME type should match put() argument');
|
||||
assert_equals(descriptor.size, value.size, 'incorrect Blob size');
|
||||
return new Promise((resolve, reject) => {
|
||||
const reader = new FileReader();
|
||||
reader.onloadend = testCase.step_func(() => {
|
||||
if (reader.error) {
|
||||
reject(reader.error);
|
||||
return;
|
||||
}
|
||||
const view = new Uint8Array(reader.result);
|
||||
assert_equals(
|
||||
view.join(','),
|
||||
largeValue(descriptor.size, descriptor.seed).join(','),
|
||||
'IndexedDB result Blob content should match put() argument');
|
||||
resolve();
|
||||
});
|
||||
reader.readAsArrayBuffer(value);
|
||||
});
|
||||
|
||||
case 'buffer':
|
||||
assert_class_string(
|
||||
value, 'Uint8Array',
|
||||
'IndexedDB result type should match put() argument');
|
||||
assert_equals(
|
||||
value.join(','),
|
||||
largeValue(descriptor.size, descriptor.seed).join(','),
|
||||
'IndexedDB result typed array content should match put() argument');
|
||||
return Promise.resolve();
|
||||
}
|
||||
}
|
||||
|
||||
function cloningTestInternal(label, valueDescriptors, options) {
|
||||
promise_test(testCase => {
|
||||
return createDatabase(testCase, (database, transaction) => {
|
||||
testCase.add_cleanup(() => database.close());
|
||||
let store;
|
||||
if (options.useKeyGenerator) {
|
||||
store = database.createObjectStore(
|
||||
'test-store', { keyPath: 'primaryKey', autoIncrement: true });
|
||||
} else {
|
||||
store = database.createObjectStore('test-store');
|
||||
}
|
||||
for (let i = 0; i < valueDescriptors.length; ++i) {
|
||||
if (options.useKeyGenerator) {
|
||||
store.put(createValue(valueDescriptors[i]));
|
||||
} else {
|
||||
store.put(createValue(valueDescriptors[i]), i + 1);
|
||||
}
|
||||
}
|
||||
}).then(database => {
|
||||
const transaction = database.transaction(['test-store'], 'readonly');
|
||||
const store = transaction.objectStore('test-store');
|
||||
const subChecks = [];
|
||||
let resultIndex = 0;
|
||||
for (let i = 0; i < valueDescriptors.length; ++i) {
|
||||
subChecks.push(new Promise((resolve, reject) => {
|
||||
const requestIndex = i;
|
||||
const primaryKey = requestIndex + 1;
|
||||
const request = store.get(primaryKey);
|
||||
request.onerror =
|
||||
testCase.step_func(() => { reject(request.error); });
|
||||
request.onsuccess = testCase.step_func(() => {
|
||||
assert_equals(
|
||||
resultIndex, requestIndex,
|
||||
'IDBRequest success events should be fired in request order');
|
||||
++resultIndex;
|
||||
|
||||
const result = request.result;
|
||||
if (options.useKeyGenerator) {
|
||||
assert_equals(
|
||||
result.primaryKey, primaryKey,
|
||||
'IndexedDB result should have auto-incremented primary key');
|
||||
delete result.primaryKey;
|
||||
}
|
||||
resolve(checkValue(
|
||||
testCase, result, valueDescriptors[requestIndex]));
|
||||
});
|
||||
}));
|
||||
}
|
||||
|
||||
subChecks.push(new Promise((resolve, reject) => {
|
||||
const requestIndex = valueDescriptors.length;
|
||||
const request = store.getAll();
|
||||
request.onerror =
|
||||
testCase.step_func(() => { reject(request.error); });
|
||||
request.onsuccess = testCase.step_func(() => {
|
||||
assert_equals(
|
||||
resultIndex, requestIndex,
|
||||
'IDBRequest success events should be fired in request order');
|
||||
++resultIndex;
|
||||
const result = request.result;
|
||||
if (options.useKeyGenerator) {
|
||||
for (let i = 0; i < valueDescriptors.length; ++i) {
|
||||
const primaryKey = i + 1;
|
||||
assert_equals(
|
||||
result[i].primaryKey, primaryKey,
|
||||
'IndexedDB result should have auto-incremented primary key');
|
||||
delete result[i].primaryKey;
|
||||
}
|
||||
}
|
||||
resolve(checkValue(testCase, result, valueDescriptors));
|
||||
});
|
||||
}));
|
||||
|
||||
return Promise.all(subChecks);
|
||||
});
|
||||
}, label);
|
||||
}
|
||||
|
||||
// Performs a series of put()s and verifies that get()s and getAll() match.
|
||||
//
|
||||
// Each element of the valueDescriptors array is fed into createValue(), and the
|
||||
// resulting value is written to IndexedDB via a put() request. After the writes
|
||||
// complete, the values are read in the same order in which they were written.
|
||||
// Last, all the results are read one more time via a getAll().
|
||||
//
|
||||
// The test verifies that the get() / getAll() results match the arguments to
|
||||
// put() and that the order in which the get() result events are fired matches
|
||||
// the order of the get() requests.
|
||||
function cloningTest(label, valueDescriptors) {
|
||||
cloningTestInternal(label, valueDescriptors, { useKeyGenerator: false });
|
||||
}
|
||||
|
||||
// cloningTest, with coverage for key generators.
|
||||
//
|
||||
// This creates two tests. One test performs a series of put()s and verifies
|
||||
// that get()s and getAll() match, exactly like cloningTestWithoutKeyGenerator.
|
||||
// The other test performs the same put()s in an object store with a key
|
||||
// generator, and checks that the key generator works properly.
|
||||
function cloningTestWithKeyGenerator(label, valueDescriptors) {
|
||||
cloningTestInternal(label, valueDescriptors, { useKeyGenerator: false });
|
||||
cloningTestInternal(
|
||||
label + " with key generator", valueDescriptors,
|
||||
{ useKeyGenerator: true });
|
||||
}
|
||||
|
|
@ -0,0 +1,581 @@
|
|||
// META: script=nested-cloning-common.js
|
||||
// META: script=support.js
|
||||
// META: script=support-promises.js
|
||||
|
||||
'use strict';
|
||||
|
||||
// Define constants used to populate object stores and indexes.
|
||||
const alphabet = 'abcdefghijklmnopqrstuvwxyz'.split('');
|
||||
const ALPHABET = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'.split('');
|
||||
const vowels = 'aeiou'.split('');
|
||||
|
||||
// Setup the object store identified by `storeName` to test `getAllKeys()`,
|
||||
// `getAll()` and `getAllRecords()`.
|
||||
// - `callback` is a function that runs after setup with the arguments: `test`,
|
||||
// `connection`, and `expectedRecords`.
|
||||
// - The `expectedRecords` callback argument records all of the keys and values
|
||||
// added to the object store during setup. It is an array of records where
|
||||
// each element contains a `key`, `primaryKey` and `value`. Tests can use
|
||||
// `expectedRecords` to verify the actual results from a get all request.
|
||||
function object_store_get_all_test_setup(storeName, callback, testDescription) {
|
||||
const expectedRecords = [];
|
||||
|
||||
indexeddb_test(
|
||||
(test, connection) => {
|
||||
switch (storeName) {
|
||||
case 'generated': {
|
||||
// Create an object store with auto-generated, auto-incrementing,
|
||||
// inline keys.
|
||||
const store = connection.createObjectStore(
|
||||
storeName, {autoIncrement: true, keyPath: 'id'});
|
||||
alphabet.forEach(letter => {
|
||||
store.put({ch: letter});
|
||||
|
||||
const generatedKey = alphabet.indexOf(letter) + 1;
|
||||
expectedRecords.push({
|
||||
key: generatedKey,
|
||||
primaryKey: generatedKey,
|
||||
value: {ch: letter}
|
||||
});
|
||||
});
|
||||
return;
|
||||
}
|
||||
case 'out-of-line': {
|
||||
// Create an object store with out-of-line keys.
|
||||
const store = connection.createObjectStore(storeName);
|
||||
alphabet.forEach(letter => {
|
||||
store.put(`value-${letter}`, letter);
|
||||
|
||||
expectedRecords.push(
|
||||
{key: letter, primaryKey: letter, value: `value-${letter}`});
|
||||
});
|
||||
return;
|
||||
}
|
||||
case 'empty': {
|
||||
// Create an empty object store.
|
||||
connection.createObjectStore(storeName);
|
||||
return;
|
||||
}
|
||||
case 'large-values': {
|
||||
// Create an object store with 3 large values. `largeValue()`
|
||||
// generates the value using the key as the seed. The keys start at
|
||||
// 0 and then increment by 1.
|
||||
const store = connection.createObjectStore(storeName);
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const value = largeValue(/*size=*/ wrapThreshold, /*seed=*/ i);
|
||||
store.put(value, i);
|
||||
|
||||
expectedRecords.push({key: i, primaryKey: i, value});
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
},
|
||||
// Bind `expectedRecords` to the `indexeddb_test` callback function.
|
||||
(test, connection) => {
|
||||
callback(test, connection, expectedRecords);
|
||||
},
|
||||
testDescription);
|
||||
}
|
||||
|
||||
// Similar to `object_store_get_all_test_setup()` above, but also creates an
|
||||
// index named `test_idx` for each object store.
|
||||
function index_get_all_test_setup(storeName, callback, testDescription) {
|
||||
const expectedRecords = [];
|
||||
|
||||
indexeddb_test(
|
||||
function(test, connection) {
|
||||
switch (storeName) {
|
||||
case 'generated': {
|
||||
// Create an object store with auto-incrementing, inline keys.
|
||||
// Create an index on the uppercase letter property `upper`.
|
||||
const store = connection.createObjectStore(
|
||||
storeName, {autoIncrement: true, keyPath: 'id'});
|
||||
store.createIndex('test_idx', 'upper');
|
||||
alphabet.forEach(function(letter) {
|
||||
const value = {ch: letter, upper: letter.toUpperCase()};
|
||||
store.put(value);
|
||||
|
||||
const generatedKey = alphabet.indexOf(letter) + 1;
|
||||
expectedRecords.push(
|
||||
{key: value.upper, primaryKey: generatedKey, value});
|
||||
});
|
||||
return;
|
||||
}
|
||||
case 'out-of-line': {
|
||||
// Create an object store with out-of-line keys. Create an index on
|
||||
// the uppercase letter property `upper`.
|
||||
const store = connection.createObjectStore(storeName);
|
||||
store.createIndex('test_idx', 'upper');
|
||||
alphabet.forEach(function(letter) {
|
||||
const value = {ch: letter, upper: letter.toUpperCase()};
|
||||
store.put(value, letter);
|
||||
|
||||
expectedRecords.push(
|
||||
{key: value.upper, primaryKey: letter, value});
|
||||
});
|
||||
return;
|
||||
}
|
||||
case 'out-of-line-not-unique': {
|
||||
// Create an index on the `half` property, which is not unique, with
|
||||
// two possible values: `first` and `second`.
|
||||
const store = connection.createObjectStore(storeName);
|
||||
store.createIndex('test_idx', 'half');
|
||||
alphabet.forEach(function(letter) {
|
||||
let half = 'first';
|
||||
if (letter > 'm') {
|
||||
half = 'second';
|
||||
}
|
||||
|
||||
const value = {ch: letter, half};
|
||||
store.put(value, letter);
|
||||
|
||||
expectedRecords.push({key: half, primaryKey: letter, value});
|
||||
});
|
||||
return
|
||||
}
|
||||
case 'out-of-line-multi': {
|
||||
// Create a multi-entry index on `attribs`, which is an array of
|
||||
// strings.
|
||||
const store = connection.createObjectStore(storeName);
|
||||
store.createIndex('test_idx', 'attribs', {multiEntry: true});
|
||||
alphabet.forEach(function(letter) {
|
||||
let attrs = [];
|
||||
if (['a', 'e', 'i', 'o', 'u'].indexOf(letter) != -1) {
|
||||
attrs.push('vowel');
|
||||
} else {
|
||||
attrs.push('consonant');
|
||||
}
|
||||
if (letter == 'a') {
|
||||
attrs.push('first');
|
||||
}
|
||||
if (letter == 'z') {
|
||||
attrs.push('last');
|
||||
}
|
||||
const value = {ch: letter, attribs: attrs};
|
||||
store.put(value, letter);
|
||||
|
||||
for (let attr of attrs) {
|
||||
expectedRecords.push({key: attr, primaryKey: letter, value});
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
case 'empty': {
|
||||
// Create an empty index.
|
||||
const store = connection.createObjectStore(storeName);
|
||||
store.createIndex('test_idx', 'upper');
|
||||
return;
|
||||
}
|
||||
case 'large-values': {
|
||||
// Create an object store and index with 3 large values and their
|
||||
// seed. Use the large value's seed as the index key.
|
||||
const store = connection.createObjectStore('large-values');
|
||||
store.createIndex('test_idx', 'seed');
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const seed = i;
|
||||
const randomValue = largeValue(/*size=*/ wrapThreshold, seed);
|
||||
const recordValue = {seed, randomValue};
|
||||
store.put(recordValue, i);
|
||||
|
||||
expectedRecords.push(
|
||||
{key: seed, primaryKey: i, value: recordValue});
|
||||
}
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
test.assert_unreached(`Unknown storeName: ${storeName}`);
|
||||
}
|
||||
}
|
||||
},
|
||||
// Bind `expectedRecords` to the `indexeddb_test` callback function.
|
||||
(test, connection) => {
|
||||
callback(test, connection, expectedRecords);
|
||||
},
|
||||
testDescription);
|
||||
}
|
||||
|
||||
// Test `getAll()`, `getAllKeys()` or `getAllRecords()` on either `storeName` or
|
||||
// `optionalIndexName` with the given `options`.
|
||||
//
|
||||
// - `getAllFunctionName` is name of the function to test, which must be
|
||||
// `getAll`, `getAllKeys` or `getAllRecords`.
|
||||
//
|
||||
// - `options` is an `IDBGetAllOptions` dictionary that may contain a `query`,
|
||||
// `direction` and `count`.
|
||||
//
|
||||
// - `shouldUseDictionaryArgument` is true when testing the get all function
|
||||
// overloads that takes an `IDBGetAllOptions` dictionary. False tests the
|
||||
// overloads that take two optional arguments: `query` and `count`.
|
||||
function get_all_test(
|
||||
getAllFunctionName, storeName, optionalIndexName, options,
|
||||
shouldUseDictionaryArgument, testDescription) {
|
||||
const testGetAllCallback = (test, connection, expectedRecords) => {
|
||||
// Create a transaction and a get all request.
|
||||
const transaction = connection.transaction(storeName, 'readonly');
|
||||
let queryTarget = transaction.objectStore(storeName);
|
||||
if (optionalIndexName) {
|
||||
queryTarget = queryTarget.index(optionalIndexName);
|
||||
}
|
||||
const request = createGetAllRequest(
|
||||
getAllFunctionName, queryTarget, options, shouldUseDictionaryArgument);
|
||||
request.onerror = test.unreached_func('The get all request must succeed');
|
||||
|
||||
// Verify the results after the get all request completes.
|
||||
request.onsuccess = test.step_func(event => {
|
||||
const actualResults = event.target.result;
|
||||
const expectedResults = calculateExpectedGetAllResults(
|
||||
getAllFunctionName, expectedRecords, options);
|
||||
verifyGetAllResults(getAllFunctionName, actualResults, expectedResults);
|
||||
test.done();
|
||||
});
|
||||
};
|
||||
|
||||
if (optionalIndexName) {
|
||||
index_get_all_test_setup(storeName, testGetAllCallback, testDescription);
|
||||
} else {
|
||||
object_store_get_all_test_setup(
|
||||
storeName, testGetAllCallback, testDescription);
|
||||
}
|
||||
}
|
||||
|
||||
function object_store_get_all_keys_test(storeName, options, testDescription) {
|
||||
get_all_test(
|
||||
'getAllKeys', storeName, /*indexName=*/ undefined, options,
|
||||
/*shouldUseDictionaryArgument=*/ false, testDescription);
|
||||
}
|
||||
|
||||
function object_store_get_all_values_test(storeName, options, testDescription) {
|
||||
get_all_test(
|
||||
'getAll', storeName, /*indexName=*/ undefined, options,
|
||||
/*shouldUseDictionaryArgument=*/ false, testDescription);
|
||||
}
|
||||
|
||||
function object_store_get_all_values_with_options_test(
|
||||
storeName, options, testDescription) {
|
||||
get_all_test(
|
||||
'getAll', storeName, /*indexName=*/ undefined, options,
|
||||
/*shouldUseDictionaryArgument=*/ true, testDescription);
|
||||
}
|
||||
|
||||
function object_store_get_all_keys_with_options_test(
|
||||
storeName, options, testDescription) {
|
||||
get_all_test(
|
||||
'getAllKeys', storeName, /*indexName=*/ undefined, options,
|
||||
/*shouldUseDictionaryArgument=*/ true, testDescription);
|
||||
}
|
||||
|
||||
function object_store_get_all_records_test(
|
||||
storeName, options, testDescription) {
|
||||
get_all_test(
|
||||
'getAllRecords', storeName, /*indexName=*/ undefined, options,
|
||||
/*shouldUseDictionaryArgument=*/ true, testDescription);
|
||||
}
|
||||
|
||||
function index_get_all_keys_test(storeName, options, testDescription) {
|
||||
get_all_test(
|
||||
'getAllKeys', storeName, 'test_idx', options,
|
||||
/*shouldUseDictionaryArgument=*/ false, testDescription);
|
||||
}
|
||||
|
||||
function index_get_all_keys_with_options_test(
|
||||
storeName, options, testDescription) {
|
||||
get_all_test(
|
||||
'getAllKeys', storeName, 'test_idx', options,
|
||||
/*shouldUseDictionaryArgument=*/ true, testDescription);
|
||||
}
|
||||
|
||||
function index_get_all_values_test(storeName, options, testDescription) {
|
||||
get_all_test(
|
||||
'getAll', storeName, 'test_idx', options,
|
||||
/*shouldUseDictionaryArgument=*/ false, testDescription);
|
||||
}
|
||||
|
||||
function index_get_all_values_with_options_test(
|
||||
storeName, options, testDescription) {
|
||||
get_all_test(
|
||||
'getAll', storeName, 'test_idx', options,
|
||||
/*shouldUseDictionaryArgument=*/ true, testDescription);
|
||||
}
|
||||
|
||||
function index_get_all_records_test(storeName, options, testDescription) {
|
||||
get_all_test(
|
||||
'getAllRecords', storeName, 'test_idx', options,
|
||||
/*shouldUseDictionaryArgument=*/ true, testDescription);
|
||||
}
|
||||
|
||||
function createGetAllRequest(
|
||||
getAllFunctionName, queryTarget, options, shouldUseDictionaryArgument) {
|
||||
if (options && shouldUseDictionaryArgument) {
|
||||
assert_true(
|
||||
'getAllRecords' in queryTarget,
|
||||
`"${queryTarget}" must support "getAllRecords()" to use an "IDBGetAllOptions" dictionary with "${
|
||||
getAllFunctionName}".`);
|
||||
return queryTarget[getAllFunctionName](options);
|
||||
}
|
||||
// `getAll()` and `getAllKeys()` use optional arguments. Omit the
|
||||
// optional arguments when undefined.
|
||||
if (options && options.count) {
|
||||
return queryTarget[getAllFunctionName](options.query, options.count);
|
||||
}
|
||||
if (options && options.query) {
|
||||
return queryTarget[getAllFunctionName](options.query);
|
||||
}
|
||||
return queryTarget[getAllFunctionName]();
|
||||
}
|
||||
|
||||
// Returns the expected results when `getAllFunctionName` is called with
|
||||
// `options` to query an object store or index containing `records`.
|
||||
function calculateExpectedGetAllResults(getAllFunctionName, records, options) {
|
||||
const expectedRecords = filterWithGetAllRecordsOptions(records, options);
|
||||
switch (getAllFunctionName) {
|
||||
case 'getAll':
|
||||
return expectedRecords.map(({value}) => {return value});
|
||||
case 'getAllKeys':
|
||||
return expectedRecords.map(({primaryKey}) => {return primaryKey});
|
||||
case 'getAllRecords':
|
||||
return expectedRecords;
|
||||
}
|
||||
assert_unreached(`Unknown getAllFunctionName: "${getAllFunctionName}"`);
|
||||
}
|
||||
|
||||
// Asserts that the array of results from `getAllFunctionName` matches the
|
||||
// expected results.
|
||||
function verifyGetAllResults(getAllFunctionName, actual, expected) {
|
||||
switch (getAllFunctionName) {
|
||||
case 'getAll':
|
||||
assert_idb_values_equals(actual, expected);
|
||||
return;
|
||||
case 'getAllKeys':
|
||||
assert_array_equals(actual, expected);
|
||||
return;
|
||||
case 'getAllRecords':
|
||||
assert_records_equals(actual, expected);
|
||||
return;
|
||||
}
|
||||
assert_unreached(`Unknown getAllFunctionName: "${getAllFunctionName}"`);
|
||||
}
|
||||
|
||||
// Returns the array of `records` that satisfy `options`. Tests may use this to
|
||||
// generate expected results.
|
||||
// - `records` is an array of objects where each object has the properties:
|
||||
// `key`, `primaryKey`, and `value`.
|
||||
// - `options` is an `IDBGetAllRecordsOptions ` dictionary that may contain a
|
||||
// `query`, `direction` and `count`.
|
||||
function filterWithGetAllRecordsOptions(records, options) {
|
||||
if (!options) {
|
||||
return records;
|
||||
}
|
||||
|
||||
// Remove records that don't satisfy the query.
|
||||
if (options.query) {
|
||||
let query = options.query;
|
||||
if (!(query instanceof IDBKeyRange)) {
|
||||
// Create an IDBKeyRange for the query's key value.
|
||||
query = IDBKeyRange.only(query);
|
||||
}
|
||||
records = records.filter(record => query.includes(record.key));
|
||||
}
|
||||
|
||||
// Remove duplicate records.
|
||||
if (options.direction === 'nextunique' ||
|
||||
options.direction === 'prevunique') {
|
||||
const uniqueRecords = [];
|
||||
records.forEach(record => {
|
||||
if (!uniqueRecords.some(
|
||||
unique => IDBKeyRange.only(unique.key).includes(record.key))) {
|
||||
uniqueRecords.push(record);
|
||||
}
|
||||
});
|
||||
records = uniqueRecords;
|
||||
}
|
||||
|
||||
// Reverse the order of the records.
|
||||
if (options.direction === 'prev' || options.direction === 'prevunique') {
|
||||
records = records.slice().reverse();
|
||||
}
|
||||
|
||||
// Limit the number of records.
|
||||
if (options.count) {
|
||||
records = records.slice(0, options.count);
|
||||
}
|
||||
return records;
|
||||
}
|
||||
|
||||
function isArrayOrArrayBufferView(value) {
|
||||
return Array.isArray(value) || ArrayBuffer.isView(value);
|
||||
}
|
||||
|
||||
// This function compares the string representation of the arrays because
|
||||
// `assert_array_equals()` is too slow for large values.
|
||||
function assert_large_array_equals(actual, expected, description) {
|
||||
const array_string = actual.join(',');
|
||||
const expected_string = expected.join(',');
|
||||
assert_equals(array_string, expected_string, description);
|
||||
}
|
||||
|
||||
// Verifies two IDB values are equal. The expected value may be a primitive, an
|
||||
// object, or an array.
|
||||
function assert_idb_value_equals(actual_value, expected_value) {
|
||||
if (isArrayOrArrayBufferView(expected_value)) {
|
||||
assert_large_array_equals(
|
||||
actual_value, expected_value,
|
||||
'The record must have the expected value');
|
||||
} else if (typeof expected_value === 'object') {
|
||||
// Verify each property of the object value.
|
||||
for (let property_name of Object.keys(expected_value)) {
|
||||
if (isArrayOrArrayBufferView(expected_value[property_name])) {
|
||||
// Verify the array property value.
|
||||
assert_large_array_equals(
|
||||
actual_value[property_name], expected_value[property_name],
|
||||
`The record must contain the array value "${
|
||||
JSON.stringify(
|
||||
expected_value)}" with property "${property_name}"`);
|
||||
} else {
|
||||
// Verify the primitive property value.
|
||||
assert_equals(
|
||||
actual_value[property_name], expected_value[property_name],
|
||||
`The record must contain the value "${
|
||||
JSON.stringify(
|
||||
expected_value)}" with property "${property_name}"`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Verify the primitive value.
|
||||
assert_equals(
|
||||
actual_value, expected_value,
|
||||
'The record must have the expected value');
|
||||
}
|
||||
}
|
||||
|
||||
// Verifies each record from the results of `getAllRecords()`.
|
||||
function assert_record_equals(actual_record, expected_record) {
|
||||
assert_class_string(
|
||||
actual_record, 'IDBRecord', 'The record must be an IDBRecord');
|
||||
assert_idl_attribute(
|
||||
actual_record, 'key', 'The record must have a key attribute');
|
||||
assert_idl_attribute(
|
||||
actual_record, 'primaryKey',
|
||||
'The record must have a primaryKey attribute');
|
||||
assert_idl_attribute(
|
||||
actual_record, 'value', 'The record must have a value attribute');
|
||||
|
||||
// Verify the attributes: `key`, `primaryKey` and `value`.
|
||||
assert_equals(
|
||||
actual_record.primaryKey, expected_record.primaryKey,
|
||||
'The record must have the expected primaryKey');
|
||||
assert_equals(
|
||||
actual_record.key, expected_record.key,
|
||||
'The record must have the expected key');
|
||||
assert_idb_value_equals(actual_record.value, expected_record.value);
|
||||
}
|
||||
|
||||
// Verifies the results from `getAllRecords()`, which is an array of records:
|
||||
// [
|
||||
// { 'key': key1, 'primaryKey': primary_key1, 'value': value1 },
|
||||
// { 'key': key2, 'primaryKey': primary_key2, 'value': value2 },
|
||||
// ...
|
||||
// ]
|
||||
function assert_records_equals(actual_records, expected_records) {
|
||||
assert_true(
|
||||
Array.isArray(actual_records),
|
||||
'The records must be an array of IDBRecords');
|
||||
assert_equals(
|
||||
actual_records.length, expected_records.length,
|
||||
'The records array must contain the expected number of records');
|
||||
|
||||
for (let i = 0; i < actual_records.length; i++) {
|
||||
assert_record_equals(actual_records[i], expected_records[i]);
|
||||
}
|
||||
}
|
||||
|
||||
// Verifies the results from `getAll()`, which is an array of IndexedDB record
|
||||
// values.
|
||||
function assert_idb_values_equals(actual_values, expected_values) {
|
||||
assert_true(Array.isArray(actual_values), 'The values must be an array');
|
||||
assert_equals(
|
||||
actual_values.length, expected_values.length,
|
||||
'The values array must contain the expected number of values');
|
||||
|
||||
for (let i = 0; i < actual_values.length; i++) {
|
||||
assert_idb_value_equals(actual_values[i], expected_values[i]);
|
||||
}
|
||||
}
|
||||
|
||||
// Test passing both an options dictionary and a count to `getAll()` and
|
||||
// `getAllKeys()`. The get all request must ignore the `count` argument, using
|
||||
// count from the options dictionary instead.
|
||||
function get_all_with_options_and_count_test(
|
||||
getAllFunctionName, storeName, optionalIndexName, testDescription) {
|
||||
// Set up the object store or index to query.
|
||||
const setupFunction = optionalIndexName ? index_get_all_test_setup :
|
||||
object_store_get_all_test_setup;
|
||||
|
||||
setupFunction(storeName, (test, connection, expectedRecords) => {
|
||||
const transaction = connection.transaction(storeName, 'readonly');
|
||||
let queryTarget = transaction.objectStore(storeName);
|
||||
if (optionalIndexName) {
|
||||
queryTarget = queryTarget.index(optionalIndexName);
|
||||
}
|
||||
|
||||
const options = {count: 10};
|
||||
const request = queryTarget[getAllFunctionName](options, /*count=*/ 17);
|
||||
|
||||
request.onerror =
|
||||
test.unreached_func(`"${getAllFunctionName}()" request must succeed.`);
|
||||
|
||||
request.onsuccess = test.step_func(event => {
|
||||
const expectedResults = calculateExpectedGetAllResults(
|
||||
getAllFunctionName, expectedRecords, options);
|
||||
|
||||
const actualResults = event.target.result;
|
||||
verifyGetAllResults(getAllFunctionName, actualResults, expectedResults);
|
||||
|
||||
test.done();
|
||||
});
|
||||
}, testDescription);
|
||||
}
|
||||
|
||||
// Get all operations must throw a `DataError` exception for invalid query keys.
|
||||
// See `get_all_test()` above for a description of the parameters.
|
||||
function get_all_with_invalid_keys_test(
|
||||
getAllFunctionName, storeName, optionalIndexName,
|
||||
shouldUseDictionaryArgument, testDescription) {
|
||||
// Set up the object store or index to query.
|
||||
const setupFunction = optionalIndexName ? index_get_all_test_setup :
|
||||
object_store_get_all_test_setup;
|
||||
|
||||
setupFunction(storeName, (test, connection, expectedRecords) => {
|
||||
const transaction = connection.transaction(storeName, 'readonly');
|
||||
let queryTarget = transaction.objectStore(storeName);
|
||||
if (optionalIndexName) {
|
||||
queryTarget = queryTarget.index(optionalIndexName);
|
||||
}
|
||||
|
||||
const invalidKeys = [
|
||||
{
|
||||
description: 'Date(NaN)',
|
||||
value: new Date(NaN),
|
||||
},
|
||||
{
|
||||
description: 'Array',
|
||||
value: [{}],
|
||||
},
|
||||
{
|
||||
description: 'detached TypedArray',
|
||||
value: createDetachedArrayBuffer(),
|
||||
},
|
||||
{
|
||||
description: 'detached ArrayBuffer',
|
||||
value: createDetachedArrayBuffer().buffer
|
||||
},
|
||||
];
|
||||
invalidKeys.forEach(({description, value}) => {
|
||||
const argument = shouldUseDictionaryArgument ? {query: value} : value;
|
||||
assert_throws_dom('DataError', () => {
|
||||
queryTarget[getAllFunctionName](argument);
|
||||
}, `An invalid ${description} key must throw an exception.`);
|
||||
});
|
||||
test.done();
|
||||
}, testDescription);
|
||||
}
|
||||
|
|
@ -0,0 +1,369 @@
|
|||
'use strict';
|
||||
|
||||
// Returns an IndexedDB database name that is unique to the test case.
|
||||
function databaseName(testCase) {
|
||||
return 'db' + self.location.pathname + '-' + testCase.name;
|
||||
}
|
||||
|
||||
// EventWatcher covering all the events defined on IndexedDB requests.
|
||||
//
|
||||
// The events cover IDBRequest and IDBOpenDBRequest.
|
||||
function requestWatcher(testCase, request) {
|
||||
return new EventWatcher(testCase, request,
|
||||
['blocked', 'error', 'success', 'upgradeneeded']);
|
||||
}
|
||||
|
||||
// EventWatcher covering all the events defined on IndexedDB transactions.
|
||||
//
|
||||
// The events cover IDBTransaction.
|
||||
function transactionWatcher(testCase, transaction) {
|
||||
return new EventWatcher(testCase, transaction, ['abort', 'complete', 'error']);
|
||||
}
|
||||
|
||||
// Promise that resolves with an IDBRequest's result.
|
||||
//
|
||||
// The promise only resolves if IDBRequest receives the "success" event. Any
|
||||
// other event causes the promise to reject with an error. This is correct in
|
||||
// most cases, but insufficient for indexedDB.open(), which issues
|
||||
// "upgradeneded" events under normal operation.
|
||||
function promiseForRequest(testCase, request) {
|
||||
const eventWatcher = requestWatcher(testCase, request);
|
||||
return eventWatcher.wait_for('success').then(event => event.target.result);
|
||||
}
|
||||
|
||||
// Promise that resolves when an IDBTransaction completes.
|
||||
//
|
||||
// The promise resolves with undefined if IDBTransaction receives the "complete"
|
||||
// event, and rejects with an error for any other event.
|
||||
//
|
||||
// NB: be careful NOT to invoke this after the transaction may have already
|
||||
// completed due to racing transaction auto-commit. A problematic sequence might
|
||||
// look like:
|
||||
//
|
||||
// const txn = db.transaction('store', 'readwrite');
|
||||
// txn.objectStore('store').put(value, key);
|
||||
// await foo();
|
||||
// await promiseForTransaction(t, txn);
|
||||
function promiseForTransaction(testCase, transaction) {
|
||||
const eventWatcher = transactionWatcher(testCase, transaction);
|
||||
return eventWatcher.wait_for('complete');
|
||||
}
|
||||
|
||||
// Migrates an IndexedDB database whose name is unique for the test case.
|
||||
//
|
||||
// newVersion must be greater than the database's current version.
|
||||
//
|
||||
// migrationCallback will be called during a versionchange transaction and will
|
||||
// given the created database, the versionchange transaction, and the database
|
||||
// open request.
|
||||
//
|
||||
// Returns a promise. If the versionchange transaction goes through, the promise
|
||||
// resolves to an IndexedDB database that should be closed by the caller. If the
|
||||
// versionchange transaction is aborted, the promise resolves to an error.
|
||||
function migrateDatabase(testCase, newVersion, migrationCallback) {
|
||||
return migrateNamedDatabase(
|
||||
testCase, databaseName(testCase), newVersion, migrationCallback);
|
||||
}
|
||||
|
||||
// Migrates an IndexedDB database.
|
||||
//
|
||||
// newVersion must be greater than the database's current version.
|
||||
//
|
||||
// migrationCallback will be called during a versionchange transaction and will
|
||||
// given the created database, the versionchange transaction, and the database
|
||||
// open request.
|
||||
//
|
||||
// Returns a promise. If the versionchange transaction goes through, the promise
|
||||
// resolves to an IndexedDB database that should be closed by the caller. If the
|
||||
// versionchange transaction is aborted, the promise resolves to an error.
|
||||
function migrateNamedDatabase(
|
||||
testCase, databaseName, newVersion, migrationCallback) {
|
||||
// We cannot use eventWatcher.wait_for('upgradeneeded') here, because
|
||||
// the versionchange transaction auto-commits before the Promise's then
|
||||
// callback gets called.
|
||||
return new Promise((resolve, reject) => {
|
||||
const request = indexedDB.open(databaseName, newVersion);
|
||||
request.onupgradeneeded = testCase.step_func(event => {
|
||||
const database = event.target.result;
|
||||
const transaction = event.target.transaction;
|
||||
let shouldBeAborted = false;
|
||||
let requestEventPromise = null;
|
||||
|
||||
// We wrap IDBTransaction.abort so we can set up the correct event
|
||||
// listeners and expectations if the test chooses to abort the
|
||||
// versionchange transaction.
|
||||
const transactionAbort = transaction.abort.bind(transaction);
|
||||
transaction.abort = () => {
|
||||
transaction._willBeAborted();
|
||||
transactionAbort();
|
||||
}
|
||||
transaction._willBeAborted = () => {
|
||||
requestEventPromise = new Promise((resolve, reject) => {
|
||||
request.onerror = event => {
|
||||
event.preventDefault();
|
||||
resolve(event.target.error);
|
||||
};
|
||||
request.onsuccess = () => reject(new Error(
|
||||
'indexedDB.open should not succeed for an aborted ' +
|
||||
'versionchange transaction'));
|
||||
});
|
||||
shouldBeAborted = true;
|
||||
}
|
||||
|
||||
// If migration callback returns a promise, we'll wait for it to resolve.
|
||||
// This simplifies some tests.
|
||||
const callbackResult = migrationCallback(database, transaction, request);
|
||||
if (!shouldBeAborted) {
|
||||
request.onerror = null;
|
||||
request.onsuccess = null;
|
||||
requestEventPromise = promiseForRequest(testCase, request);
|
||||
}
|
||||
|
||||
// requestEventPromise needs to be the last promise in the chain, because
|
||||
// we want the event that it resolves to.
|
||||
resolve(Promise.resolve(callbackResult).then(() => requestEventPromise));
|
||||
});
|
||||
request.onerror = event => reject(event.target.error);
|
||||
request.onsuccess = () => {
|
||||
const database = request.result;
|
||||
testCase.add_cleanup(() => { database.close(); });
|
||||
reject(new Error(
|
||||
'indexedDB.open should not succeed without creating a ' +
|
||||
'versionchange transaction'));
|
||||
};
|
||||
}).then(databaseOrError => {
|
||||
if (databaseOrError instanceof IDBDatabase)
|
||||
testCase.add_cleanup(() => { databaseOrError.close(); });
|
||||
return databaseOrError;
|
||||
});
|
||||
}
|
||||
|
||||
// Creates an IndexedDB database whose name is unique for the test case.
|
||||
//
|
||||
// setupCallback will be called during a versionchange transaction, and will be
|
||||
// given the created database, the versionchange transaction, and the database
|
||||
// open request.
|
||||
//
|
||||
// Returns a promise that resolves to an IndexedDB database. The caller should
|
||||
// close the database.
|
||||
function createDatabase(testCase, setupCallback) {
|
||||
return createNamedDatabase(testCase, databaseName(testCase), setupCallback);
|
||||
}
|
||||
|
||||
// Creates an IndexedDB database.
|
||||
//
|
||||
// setupCallback will be called during a versionchange transaction, and will be
|
||||
// given the created database, the versionchange transaction, and the database
|
||||
// open request.
|
||||
//
|
||||
// Returns a promise that resolves to an IndexedDB database. The caller should
|
||||
// close the database.
|
||||
function createNamedDatabase(testCase, databaseName, setupCallback) {
|
||||
const request = indexedDB.deleteDatabase(databaseName);
|
||||
return promiseForRequest(testCase, request).then(() => {
|
||||
testCase.add_cleanup(() => { indexedDB.deleteDatabase(databaseName); });
|
||||
return migrateNamedDatabase(testCase, databaseName, 1, setupCallback)
|
||||
});
|
||||
}
|
||||
|
||||
// Opens an IndexedDB database without performing schema changes.
|
||||
//
|
||||
// The given version number must match the database's current version.
|
||||
//
|
||||
// Returns a promise that resolves to an IndexedDB database. The caller should
|
||||
// close the database.
|
||||
function openDatabase(testCase, version) {
|
||||
return openNamedDatabase(testCase, databaseName(testCase), version);
|
||||
}
|
||||
|
||||
// Opens an IndexedDB database without performing schema changes.
|
||||
//
|
||||
// The given version number must match the database's current version.
|
||||
//
|
||||
// Returns a promise that resolves to an IndexedDB database. The caller should
|
||||
// close the database.
|
||||
function openNamedDatabase(testCase, databaseName, version) {
|
||||
const request = indexedDB.open(databaseName, version);
|
||||
return promiseForRequest(testCase, request).then(database => {
|
||||
testCase.add_cleanup(() => { database.close(); });
|
||||
return database;
|
||||
});
|
||||
}
|
||||
|
||||
// The data in the 'books' object store records in the first example of the
|
||||
// IndexedDB specification.
|
||||
const BOOKS_RECORD_DATA = [
|
||||
{ title: 'Quarry Memories', author: 'Fred', isbn: 123456 },
|
||||
{ title: 'Water Buffaloes', author: 'Fred', isbn: 234567 },
|
||||
{ title: 'Bedrock Nights', author: 'Barney', isbn: 345678 },
|
||||
];
|
||||
|
||||
// Creates a 'books' object store whose contents closely resembles the first
|
||||
// example in the IndexedDB specification.
|
||||
const createBooksStore = (testCase, database) => {
|
||||
const store = database.createObjectStore('books',
|
||||
{ keyPath: 'isbn', autoIncrement: true });
|
||||
store.createIndex('by_author', 'author');
|
||||
store.createIndex('by_title', 'title', { unique: true });
|
||||
for (const record of BOOKS_RECORD_DATA)
|
||||
store.put(record);
|
||||
return store;
|
||||
}
|
||||
|
||||
// Creates a 'books' object store whose contents closely resembles the first
|
||||
// example in the IndexedDB specification, just without autoincrementing.
|
||||
const createBooksStoreWithoutAutoIncrement = (testCase, database) => {
|
||||
const store = database.createObjectStore('books',
|
||||
{ keyPath: 'isbn' });
|
||||
store.createIndex('by_author', 'author');
|
||||
store.createIndex('by_title', 'title', { unique: true });
|
||||
for (const record of BOOKS_RECORD_DATA)
|
||||
store.put(record);
|
||||
return store;
|
||||
}
|
||||
|
||||
// Creates a 'not_books' object store used to test renaming into existing or
|
||||
// deleted store names.
|
||||
function createNotBooksStore(testCase, database) {
|
||||
const store = database.createObjectStore('not_books');
|
||||
store.createIndex('not_by_author', 'author');
|
||||
store.createIndex('not_by_title', 'title', { unique: true });
|
||||
return store;
|
||||
}
|
||||
|
||||
// Verifies that an object store's indexes match the indexes used to create the
|
||||
// books store in the test database's version 1.
|
||||
//
|
||||
// The errorMessage is used if the assertions fail. It can state that the
|
||||
// IndexedDB implementation being tested is incorrect, or that the testing code
|
||||
// is using it incorrectly.
|
||||
function checkStoreIndexes (testCase, store, errorMessage) {
|
||||
assert_array_equals(
|
||||
store.indexNames, ['by_author', 'by_title'], errorMessage);
|
||||
const authorIndex = store.index('by_author');
|
||||
const titleIndex = store.index('by_title');
|
||||
return Promise.all([
|
||||
checkAuthorIndexContents(testCase, authorIndex, errorMessage),
|
||||
checkTitleIndexContents(testCase, titleIndex, errorMessage),
|
||||
]);
|
||||
}
|
||||
|
||||
// Verifies that an object store's key generator is in the same state as the
|
||||
// key generator created for the books store in the test database's version 1.
|
||||
//
|
||||
// The errorMessage is used if the assertions fail. It can state that the
|
||||
// IndexedDB implementation being tested is incorrect, or that the testing code
|
||||
// is using it incorrectly.
|
||||
function checkStoreGenerator(testCase, store, expectedKey, errorMessage) {
|
||||
const request = store.put(
|
||||
{ title: 'Bedrock Nights ' + expectedKey, author: 'Barney' });
|
||||
return promiseForRequest(testCase, request).then(result => {
|
||||
assert_equals(result, expectedKey, errorMessage);
|
||||
});
|
||||
}
|
||||
|
||||
// Verifies that an object store's contents matches the contents used to create
|
||||
// the books store in the test database's version 1.
|
||||
//
|
||||
// The errorMessage is used if the assertions fail. It can state that the
|
||||
// IndexedDB implementation being tested is incorrect, or that the testing code
|
||||
// is using it incorrectly.
|
||||
function checkStoreContents(testCase, store, errorMessage) {
|
||||
const request = store.get(123456);
|
||||
return promiseForRequest(testCase, request).then(result => {
|
||||
assert_equals(result.isbn, BOOKS_RECORD_DATA[0].isbn, errorMessage);
|
||||
assert_equals(result.author, BOOKS_RECORD_DATA[0].author, errorMessage);
|
||||
assert_equals(result.title, BOOKS_RECORD_DATA[0].title, errorMessage);
|
||||
});
|
||||
}
|
||||
|
||||
// Verifies that index matches the 'by_author' index used to create the
|
||||
// by_author books store in the test database's version 1.
|
||||
//
|
||||
// The errorMessage is used if the assertions fail. It can state that the
|
||||
// IndexedDB implementation being tested is incorrect, or that the testing code
|
||||
// is using it incorrectly.
|
||||
function checkAuthorIndexContents(testCase, index, errorMessage) {
|
||||
const request = index.get(BOOKS_RECORD_DATA[2].author);
|
||||
return promiseForRequest(testCase, request).then(result => {
|
||||
assert_equals(result.isbn, BOOKS_RECORD_DATA[2].isbn, errorMessage);
|
||||
assert_equals(result.title, BOOKS_RECORD_DATA[2].title, errorMessage);
|
||||
});
|
||||
}
|
||||
|
||||
// Verifies that an index matches the 'by_title' index used to create the books
|
||||
// store in the test database's version 1.
|
||||
//
|
||||
// The errorMessage is used if the assertions fail. It can state that the
|
||||
// IndexedDB implementation being tested is incorrect, or that the testing code
|
||||
// is using it incorrectly.
|
||||
function checkTitleIndexContents(testCase, index, errorMessage) {
|
||||
const request = index.get(BOOKS_RECORD_DATA[2].title);
|
||||
return promiseForRequest(testCase, request).then(result => {
|
||||
assert_equals(result.isbn, BOOKS_RECORD_DATA[2].isbn, errorMessage);
|
||||
assert_equals(result.author, BOOKS_RECORD_DATA[2].author, errorMessage);
|
||||
});
|
||||
}
|
||||
|
||||
// Returns an Uint8Array.
|
||||
// When `seed` is non-zero, the data is pseudo-random, otherwise it is repetitive.
|
||||
// The PRNG should be sufficient to defeat compression schemes, but it is not
|
||||
// cryptographically strong.
|
||||
function largeValue(size, seed) {
|
||||
const buffer = new Uint8Array(size);
|
||||
// Fill with a lot of the same byte.
|
||||
if (seed == 0) {
|
||||
buffer.fill(0x11, 0, size - 1);
|
||||
return buffer;
|
||||
}
|
||||
|
||||
// 32-bit xorshift - the seed can't be zero
|
||||
let state = 1000 + seed;
|
||||
|
||||
for (let i = 0; i < size; ++i) {
|
||||
state ^= state << 13;
|
||||
state ^= state >> 17;
|
||||
state ^= state << 5;
|
||||
buffer[i] = state & 0xff;
|
||||
}
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
async function deleteAllDatabases(testCase) {
|
||||
const dbs_to_delete = await indexedDB.databases();
|
||||
for( const db_info of dbs_to_delete) {
|
||||
let request = indexedDB.deleteDatabase(db_info.name);
|
||||
let eventWatcher = requestWatcher(testCase, request);
|
||||
await eventWatcher.wait_for('success');
|
||||
}
|
||||
}
|
||||
|
||||
// Keeps the passed transaction alive indefinitely (by making requests
|
||||
// against the named store). Returns a function that asserts that the
|
||||
// transaction has not already completed and then ends the request loop so that
|
||||
// the transaction may autocommit and complete.
|
||||
function keepAlive(testCase, transaction, storeName) {
|
||||
let completed = false;
|
||||
transaction.addEventListener('complete', () => { completed = true; });
|
||||
|
||||
let keepSpinning = true;
|
||||
|
||||
function spin() {
|
||||
if (!keepSpinning)
|
||||
return;
|
||||
transaction.objectStore(storeName).get(0).onsuccess = spin;
|
||||
}
|
||||
spin();
|
||||
|
||||
return testCase.step_func(() => {
|
||||
assert_false(completed, 'Transaction completed while kept alive');
|
||||
keepSpinning = false;
|
||||
});
|
||||
}
|
||||
|
||||
// Return a promise that resolves after a setTimeout finishes to break up the
|
||||
// scope of a function's execution.
|
||||
function timeoutPromise(ms) {
|
||||
return new Promise(resolve => { setTimeout(resolve, ms); });
|
||||
}
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
<!doctype html>
|
||||
<meta charset=utf-8>
|
||||
<title>Indexed DB and Structured Serializing/Deserializing</title>
|
||||
<meta name="timeout" content="long">
|
||||
<script>
|
||||
self.GLOBAL = {
|
||||
isWindow: function() { return true; },
|
||||
isWorker: function() { return false; },
|
||||
isShadowRealm: function() { return false; },
|
||||
};
|
||||
</script>
|
||||
<script src="../resources/testharness.js"></script>
|
||||
<script src="../resources/testharnessreport.js"></script>
|
||||
<script src="resources/support-promises.js"></script>
|
||||
<script src="../common/subset-tests.js"></script>
|
||||
<div id=log></div>
|
||||
<script src="../IndexedDB/structured-clone.any.js"></script>
|
||||
|
|
@ -0,0 +1,334 @@
|
|||
// META: title=Indexed DB and Structured Serializing/Deserializing
|
||||
// META: timeout=long
|
||||
// META: script=resources/support-promises.js
|
||||
// META: script=/common/subset-tests.js
|
||||
// META: variant=?1-20
|
||||
// META: variant=?21-40
|
||||
// META: variant=?41-60
|
||||
// META: variant=?61-80
|
||||
// META: variant=?81-100
|
||||
// META: variant=?101-last
|
||||
|
||||
// Tests Indexed DB coverage of HTML's Safe "passing of structured data"
|
||||
// https://html.spec.whatwg.org/multipage/structured-data.html
|
||||
|
||||
function describe(value) {
|
||||
let type, str;
|
||||
if (typeof value === 'object' && value) {
|
||||
type = Object.getPrototypeOf(value).constructor.name;
|
||||
// Handle Number(-0), etc.
|
||||
str = Object.is(value.valueOf(), -0) ? '-0' : String(value);
|
||||
} else {
|
||||
type = typeof value;
|
||||
// Handle primitive -0.
|
||||
str = Object.is(value, -0) ? '-0' : String(value);
|
||||
}
|
||||
return `${type}: ${str}`;
|
||||
}
|
||||
|
||||
function cloneTest(value, verifyFunc) {
|
||||
subsetTest(promise_test, async t => {
|
||||
const db = await createDatabase(t, db => {
|
||||
const store = db.createObjectStore('store');
|
||||
// This index is not used, but evaluating key path on each put()
|
||||
// call will exercise (de)serialization.
|
||||
store.createIndex('index', 'dummyKeyPath');
|
||||
});
|
||||
t.add_cleanup(() => {
|
||||
if (db) {
|
||||
db.close();
|
||||
indexedDB.deleteDatabase(db.name);
|
||||
}
|
||||
});
|
||||
const tx = db.transaction('store', 'readwrite');
|
||||
const store = tx.objectStore('store');
|
||||
await promiseForRequest(t, store.put(value, 'key'));
|
||||
const result = await promiseForRequest(t, store.get('key'));
|
||||
// Because the async verifyFunc may await async values that are independent
|
||||
// of the transaction lifetime (ex: blob.text()), we must only await it
|
||||
// after adding listeners to the transaction.
|
||||
await promiseForTransaction(t, tx);
|
||||
await verifyFunc(value, result);
|
||||
}, describe(value));
|
||||
}
|
||||
|
||||
// Specialization of cloneTest() for objects, with common asserts.
|
||||
function cloneObjectTest(value, verifyFunc) {
|
||||
cloneTest(value, async (orig, clone) => {
|
||||
assert_not_equals(orig, clone);
|
||||
assert_equals(typeof clone, 'object');
|
||||
assert_equals(Object.getPrototypeOf(orig), Object.getPrototypeOf(clone));
|
||||
await verifyFunc(orig, clone);
|
||||
});
|
||||
}
|
||||
|
||||
function cloneFailureTest(value) {
|
||||
subsetTest(promise_test, async t => {
|
||||
const db = await createDatabase(t, db => {
|
||||
db.createObjectStore('store');
|
||||
});
|
||||
t.add_cleanup(() => {
|
||||
if (db) {
|
||||
db.close();
|
||||
indexedDB.deleteDatabase(db.name);
|
||||
}
|
||||
});
|
||||
const tx = db.transaction('store', 'readwrite');
|
||||
const store = tx.objectStore('store');
|
||||
assert_throws_dom('DataCloneError', () => store.put(value, 'key'));
|
||||
}, 'Not serializable: ' + describe(value));
|
||||
}
|
||||
|
||||
//
|
||||
// ECMAScript types
|
||||
//
|
||||
|
||||
// Primitive values: Undefined, Null, Boolean, Number, BigInt, String
|
||||
const booleans = [false, true];
|
||||
const numbers = [
|
||||
NaN,
|
||||
-Infinity,
|
||||
-Number.MAX_VALUE,
|
||||
-0xffffffff,
|
||||
-0x80000000,
|
||||
-0x7fffffff,
|
||||
-1,
|
||||
-Number.MIN_VALUE,
|
||||
-0,
|
||||
0,
|
||||
1,
|
||||
Number.MIN_VALUE,
|
||||
0x7fffffff,
|
||||
0x80000000,
|
||||
0xffffffff,
|
||||
Number.MAX_VALUE,
|
||||
Infinity,
|
||||
];
|
||||
const bigints = [
|
||||
-12345678901234567890n,
|
||||
-1n,
|
||||
0n,
|
||||
1n,
|
||||
12345678901234567890n,
|
||||
];
|
||||
const strings = [
|
||||
'',
|
||||
'this is a sample string',
|
||||
'null(\0)',
|
||||
];
|
||||
|
||||
[undefined, null].concat(booleans, numbers, bigints, strings)
|
||||
.forEach(value => cloneTest(value, (orig, clone) => {
|
||||
assert_equals(orig, clone);
|
||||
}));
|
||||
|
||||
// "Primitive" Objects (Boolean, Number, BigInt, String)
|
||||
[].concat(booleans, numbers, bigints, strings)
|
||||
.forEach(value => cloneObjectTest(Object(value), (orig, clone) => {
|
||||
assert_equals(orig.valueOf(), clone.valueOf());
|
||||
}));
|
||||
|
||||
// Dates
|
||||
[
|
||||
new Date(-1e13),
|
||||
new Date(-1e12),
|
||||
new Date(-1e9),
|
||||
new Date(-1e6),
|
||||
new Date(-1e3),
|
||||
new Date(0),
|
||||
new Date(1e3),
|
||||
new Date(1e6),
|
||||
new Date(1e9),
|
||||
new Date(1e12),
|
||||
new Date(1e13)
|
||||
].forEach(value => cloneTest(value, (orig, clone) => {
|
||||
assert_not_equals(orig, clone);
|
||||
assert_equals(typeof clone, 'object');
|
||||
assert_equals(Object.getPrototypeOf(orig), Object.getPrototypeOf(clone));
|
||||
assert_equals(orig.valueOf(), clone.valueOf());
|
||||
}));
|
||||
|
||||
// Regular Expressions
|
||||
[
|
||||
new RegExp(),
|
||||
/abc/,
|
||||
/abc/g,
|
||||
/abc/i,
|
||||
/abc/gi,
|
||||
/abc/m,
|
||||
/abc/mg,
|
||||
/abc/mi,
|
||||
/abc/mgi,
|
||||
/abc/gimsuy,
|
||||
].forEach(value => cloneObjectTest(value, (orig, clone) => {
|
||||
assert_equals(orig.toString(), clone.toString());
|
||||
}));
|
||||
|
||||
// ArrayBuffer
|
||||
cloneObjectTest(new Uint8Array([0, 1, 254, 255]).buffer, (orig, clone) => {
|
||||
assert_array_equals(new Uint8Array(orig), new Uint8Array(clone));
|
||||
});
|
||||
|
||||
// TODO SharedArrayBuffer
|
||||
|
||||
// Array Buffer Views
|
||||
let byteArrays = [
|
||||
new Uint8Array([]),
|
||||
new Uint8Array([0, 1, 254, 255]),
|
||||
new Uint16Array([0x0000, 0x0001, 0xFFFE, 0xFFFF]),
|
||||
new Uint32Array([0x00000000, 0x00000001, 0xFFFFFFFE, 0xFFFFFFFF]),
|
||||
new Int8Array([0, 1, 254, 255]),
|
||||
new Int16Array([0x0000, 0x0001, 0xFFFE, 0xFFFF]),
|
||||
new Int32Array([0x00000000, 0x00000001, 0xFFFFFFFE, 0xFFFFFFFF]),
|
||||
new Uint8ClampedArray([0, 1, 254, 255]),
|
||||
new Float32Array([-Infinity, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, Infinity, NaN]),
|
||||
new Float64Array([-Infinity, -Number.MAX_VALUE, -Number.MIN_VALUE, 0,
|
||||
Number.MIN_VALUE, Number.MAX_VALUE, Infinity, NaN])
|
||||
]
|
||||
|
||||
if (typeof Float16Array !== 'undefined') {
|
||||
byteArrays.push(
|
||||
new Float16Array([-Infinity, -1.5, -1, -0.5, 0, 0.5, 1, 1.5, Infinity, NaN]));
|
||||
}
|
||||
|
||||
byteArrays.forEach(value => cloneObjectTest(value, (orig, clone) => {
|
||||
assert_array_equals(orig, clone);
|
||||
}));
|
||||
|
||||
// Map
|
||||
cloneObjectTest(new Map([[1,2],[3,4]]), (orig, clone) => {
|
||||
assert_array_equals([...orig.keys()], [...clone.keys()]);
|
||||
assert_array_equals([...orig.values()], [...clone.values()]);
|
||||
});
|
||||
|
||||
// Set
|
||||
cloneObjectTest(new Set([1,2,3,4]), (orig, clone) => {
|
||||
assert_array_equals([...orig.values()], [...clone.values()]);
|
||||
});
|
||||
|
||||
// Error
|
||||
[
|
||||
new Error(),
|
||||
new Error('abc', 'def'),
|
||||
new EvalError(),
|
||||
new EvalError('ghi', 'jkl'),
|
||||
new RangeError(),
|
||||
new RangeError('ghi', 'jkl'),
|
||||
new ReferenceError(),
|
||||
new ReferenceError('ghi', 'jkl'),
|
||||
new SyntaxError(),
|
||||
new SyntaxError('ghi', 'jkl'),
|
||||
new TypeError(),
|
||||
new TypeError('ghi', 'jkl'),
|
||||
new URIError(),
|
||||
new URIError('ghi', 'jkl'),
|
||||
].forEach(value => cloneObjectTest(value, (orig, clone) => {
|
||||
assert_equals(orig.name, clone.name);
|
||||
assert_equals(orig.message, clone.message);
|
||||
}));
|
||||
|
||||
// Arrays
|
||||
[
|
||||
[],
|
||||
[1,2,3],
|
||||
Object.assign(
|
||||
['foo', 'bar'],
|
||||
{10: true, 11: false, 20: 123, 21: 456, 30: null}),
|
||||
Object.assign(
|
||||
['foo', 'bar'],
|
||||
{a: true, b: false, foo: 123, bar: 456, '': null}),
|
||||
].forEach(value => cloneObjectTest(value, (orig, clone) => {
|
||||
assert_array_equals(orig, clone);
|
||||
assert_array_equals(Object.keys(orig), Object.keys(clone));
|
||||
Object.keys(orig).forEach(key => {
|
||||
assert_equals(orig[key], clone[key], `Property ${key}`);
|
||||
});
|
||||
}));
|
||||
|
||||
// Objects
|
||||
cloneObjectTest({foo: true, bar: false}, (orig, clone) => {
|
||||
assert_array_equals(Object.keys(orig), Object.keys(clone));
|
||||
Object.keys(orig).forEach(key => {
|
||||
assert_equals(orig[key], clone[key], `Property ${key}`);
|
||||
});
|
||||
});
|
||||
|
||||
//
|
||||
// [Serializable] Platform objects
|
||||
//
|
||||
|
||||
// TODO: Test these additional interfaces:
|
||||
// * DOMQuad
|
||||
// * DOMException
|
||||
// * RTCCertificate
|
||||
|
||||
// Geometry types
|
||||
[
|
||||
new DOMMatrix(),
|
||||
new DOMMatrixReadOnly(),
|
||||
new DOMPoint(),
|
||||
new DOMPointReadOnly(),
|
||||
new DOMRect,
|
||||
new DOMRectReadOnly(),
|
||||
].forEach(value => cloneObjectTest(value, (orig, clone) => {
|
||||
Object.keys(Object.getPrototypeOf(orig)).forEach(key => {
|
||||
assert_equals(orig[key], clone[key], `Property ${key}`);
|
||||
});
|
||||
}));
|
||||
|
||||
// ImageData
|
||||
const image_data = new ImageData(8, 8);
|
||||
for (let i = 0; i < 256; ++i) {
|
||||
image_data.data[i] = i;
|
||||
}
|
||||
cloneObjectTest(image_data, (orig, clone) => {
|
||||
assert_equals(orig.width, clone.width);
|
||||
assert_equals(orig.height, clone.height);
|
||||
assert_array_equals(orig.data, clone.data);
|
||||
});
|
||||
|
||||
// Blob
|
||||
cloneObjectTest(
|
||||
new Blob(['This is a test.'], {type: 'a/b'}),
|
||||
async (orig, clone) => {
|
||||
assert_equals(orig.size, clone.size);
|
||||
assert_equals(orig.type, clone.type);
|
||||
assert_equals(await orig.text(), await clone.text());
|
||||
});
|
||||
|
||||
// File
|
||||
cloneObjectTest(
|
||||
new File(['This is a test.'], 'foo.txt', {type: 'c/d'}),
|
||||
async (orig, clone) => {
|
||||
assert_equals(orig.size, clone.size);
|
||||
assert_equals(orig.type, clone.type);
|
||||
assert_equals(orig.name, clone.name);
|
||||
assert_equals(orig.lastModified, clone.lastModified);
|
||||
assert_equals(await orig.text(), await clone.text());
|
||||
});
|
||||
|
||||
|
||||
// FileList - exposed in Workers, but not constructable.
|
||||
if ('document' in self) {
|
||||
// TODO: Test with populated list.
|
||||
cloneObjectTest(
|
||||
Object.assign(document.createElement('input'),
|
||||
{type: 'file', multiple: true}).files,
|
||||
async (orig, clone) => {
|
||||
assert_equals(orig.length, clone.length);
|
||||
});
|
||||
}
|
||||
|
||||
//
|
||||
// Non-serializable types
|
||||
//
|
||||
[
|
||||
// ECMAScript types
|
||||
function() {},
|
||||
Symbol('desc'),
|
||||
|
||||
// Non-[Serializable] platform objects
|
||||
self,
|
||||
new Event(''),
|
||||
new MessageChannel()
|
||||
].forEach(cloneFailureTest);
|
||||
Loading…
Add table
Add a link
Reference in a new issue