]>
Commit | Line | Data |
---|---|---|
6527f429 DM |
1 | /**\r |
2 | * The Store class encapsulates a client side cache of {@link Ext.data.Model Model} objects. Stores load data via a\r | |
3 | * {@link Ext.data.proxy.Proxy Proxy}, and also provide functions for {@link #method-sort sorting}, {@link #filter filtering}\r | |
4 | * and querying the {@link Ext.data.Model model} instances contained within it.\r | |
5 | *\r | |
6 | * Creating a Store is easy - we just tell it the Model and the Proxy to use for loading and saving its data:\r | |
7 | *\r | |
8 | * // Set up a {@link Ext.data.Model model} to use in our Store\r | |
9 | * Ext.define('User', {\r | |
10 | * extend: 'Ext.data.Model',\r | |
11 | * fields: [\r | |
12 | * {name: 'firstName', type: 'string'},\r | |
13 | * {name: 'lastName', type: 'string'},\r | |
14 | * {name: 'age', type: 'int'},\r | |
15 | * {name: 'eyeColor', type: 'string'}\r | |
16 | * ]\r | |
17 | * });\r | |
18 | *\r | |
19 | * var myStore = Ext.create('Ext.data.Store', {\r | |
20 | * model: 'User',\r | |
21 | * proxy: {\r | |
22 | * type: 'ajax',\r | |
23 | * url: '/users.json',\r | |
24 | * reader: {\r | |
25 | * type: 'json',\r | |
26 | * rootProperty: 'users'\r | |
27 | * }\r | |
28 | * },\r | |
29 | * autoLoad: true\r | |
30 | * });\r | |
31 | *\r | |
32 | * In the example above we configured an AJAX proxy to load data from the url '/users.json'. We told our Proxy to use a\r | |
33 | * {@link Ext.data.reader.Json JsonReader} to parse the response from the server into Model object - {@link\r | |
34 | * Ext.data.reader.Json see the docs on JsonReader} for details.\r | |
35 | *\r | |
36 | * ## Inline data\r | |
37 | *\r | |
38 | * Stores can also load data inline. Internally, Store converts each of the objects we pass in as {@link #cfg-data} into\r | |
39 | * Model instances:\r | |
40 | *\r | |
41 | * Ext.create('Ext.data.Store', {\r | |
42 | * model: 'User',\r | |
43 | * data : [\r | |
44 | * {firstName: 'Peter', lastName: 'Venkman'},\r | |
45 | * {firstName: 'Egon', lastName: 'Spengler'},\r | |
46 | * {firstName: 'Ray', lastName: 'Stantz'},\r | |
47 | * {firstName: 'Winston', lastName: 'Zeddemore'}\r | |
48 | * ]\r | |
49 | * });\r | |
50 | *\r | |
51 | * Loading inline data using the method above is great if the data is in the correct format already (e.g. it doesn't\r | |
52 | * need to be processed by a {@link Ext.data.reader.Reader reader}). If your inline data requires processing to decode\r | |
53 | * the data structure, use a {@link Ext.data.proxy.Memory MemoryProxy} instead (see the {@link Ext.data.proxy.Memory\r | |
54 | * MemoryProxy} docs for an example).\r | |
55 | *\r | |
56 | * Additional data can also be loaded locally using {@link #method-add}.\r | |
57 | * \r | |
58 | * ## Dynamic Loading\r | |
59 | *\r | |
60 | * Stores can be dynamically updated by calling the {@link #method-load} method:\r | |
61 | *\r | |
62 | * store.load({\r | |
63 | * params: {\r | |
64 | * group: 3,\r | |
65 | * type: 'user'\r | |
66 | * },\r | |
67 | * callback: function(records, operation, success) {\r | |
68 | * // do something after the load finishes\r | |
69 | * },\r | |
70 | * scope: this\r | |
71 | * });\r | |
72 | *\r | |
73 | * Here a bunch of arbitrary parameters is passed along with the load request and a callback function is set\r | |
74 | * up to do something after the loading is over.\r | |
75 | *\r | |
76 | * ## Loading Nested Data\r | |
77 | *\r | |
78 | * Applications often need to load sets of associated data - for example a CRM system might load a User and her Orders.\r | |
79 | * Instead of issuing an AJAX request for the User and a series of additional AJAX requests for each Order, we can load\r | |
80 | * a nested dataset and allow the Reader to automatically populate the associated models. Below is a brief example, see\r | |
81 | * the {@link Ext.data.reader.Reader} intro docs for a full explanation:\r | |
82 | *\r | |
83 | * var store = Ext.create('Ext.data.Store', {\r | |
84 | * autoLoad: true,\r | |
85 | * model: "User",\r | |
86 | * proxy: {\r | |
87 | * type: 'ajax',\r | |
88 | * url: 'users.json',\r | |
89 | * reader: {\r | |
90 | * type: 'json',\r | |
91 | * rootProperty: 'users'\r | |
92 | * }\r | |
93 | * }\r | |
94 | * });\r | |
95 | *\r | |
96 | * Which would consume a response like this:\r | |
97 | *\r | |
98 | * {\r | |
99 | * "users": [{\r | |
100 | * "id": 1,\r | |
101 | * "name": "Peter",\r | |
102 | * "orders": [{\r | |
103 | * "id": 10,\r | |
104 | * "total": 10.76,\r | |
105 | * "status": "invoiced"\r | |
106 | * },{\r | |
107 | * "id": 11,\r | |
108 | * "total": 13.45,\r | |
109 | * "status": "shipped"\r | |
110 | * }]\r | |
111 | * }]\r | |
112 | * }\r | |
113 | *\r | |
114 | * See the {@link Ext.data.reader.Reader} intro docs for a full explanation.\r | |
115 | *\r | |
116 | * ## Filtering and Sorting\r | |
117 | *\r | |
118 | * Stores can be sorted and filtered - in both cases either remotely or locally. The {@link #cfg-sorters} and\r | |
119 | * {@link #cfg-filters} are held inside {@link Ext.util.MixedCollection MixedCollection} instances to make them easy to manage.\r | |
120 | * Usually it is sufficient to either just specify sorters and filters in the Store configuration or call {@link #method-sort}\r | |
121 | * or {@link #filter}:\r | |
122 | *\r | |
123 | * var store = Ext.create('Ext.data.Store', {\r | |
124 | * model: 'User',\r | |
125 | * sorters: [{\r | |
126 | * property: 'age',\r | |
127 | * direction: 'DESC'\r | |
128 | * }, {\r | |
129 | * property: 'firstName',\r | |
130 | * direction: 'ASC'\r | |
131 | * }],\r | |
132 | *\r | |
133 | * filters: [{\r | |
134 | * property: 'firstName',\r | |
135 | * value: /Peter/\r | |
136 | * }]\r | |
137 | * });\r | |
138 | *\r | |
139 | * The new Store will keep the configured sorters and filters in the MixedCollection instances mentioned above. By\r | |
140 | * default, sorting and filtering are both performed locally by the Store - see {@link #remoteSort} and\r | |
141 | * {@link #remoteFilter} to allow the server to perform these operations instead.\r | |
142 | *\r | |
143 | * Filtering and sorting after the Store has been instantiated is also easy. Calling {@link #filter} adds another filter\r | |
144 | * to the Store and automatically filters the dataset (calling {@link #filter} with no arguments simply re-applies all\r | |
145 | * existing filters).\r | |
146 | *\r | |
147 | * store.filter('eyeColor', 'Brown');\r | |
148 | *\r | |
149 | * Change the sorting at any time by calling {@link #method-sort}:\r | |
150 | *\r | |
151 | * store.sort('height', 'ASC');\r | |
152 | *\r | |
153 | * Note that all existing sorters will be removed in favor of the new sorter data (if {@link #method-sort} is called with no\r | |
154 | * arguments, the existing sorters are just reapplied instead of being removed). To keep existing sorters and add new\r | |
155 | * ones, just add them to the MixedCollection:\r | |
156 | *\r | |
157 | * store.sorters.add(new Ext.util.Sorter({\r | |
158 | * property : 'shoeSize',\r | |
159 | * direction: 'ASC'\r | |
160 | * }));\r | |
161 | *\r | |
162 | * store.sort();\r | |
163 | *\r | |
164 | * ## Registering with StoreManager\r | |
165 | *\r | |
166 | * Any Store that is instantiated with a {@link #storeId} will automatically be registered with the {@link\r | |
167 | * Ext.data.StoreManager StoreManager}. This makes it easy to reuse the same store in multiple views:\r | |
168 | *\r | |
169 | * //this store can be used several times\r | |
170 | * Ext.create('Ext.data.Store', {\r | |
171 | * model: 'User',\r | |
172 | * storeId: 'usersStore'\r | |
173 | * });\r | |
174 | *\r | |
175 | * new Ext.List({\r | |
176 | * store: 'usersStore',\r | |
177 | * //other config goes here\r | |
178 | * });\r | |
179 | *\r | |
180 | * new Ext.view.View({\r | |
181 | * store: 'usersStore',\r | |
182 | * //other config goes here\r | |
183 | * });\r | |
184 | *\r | |
185 | * ## Further Reading\r | |
186 | *\r | |
187 | * Stores are backed up by an ecosystem of classes that enables their operation. To gain a full understanding of these\r | |
188 | * pieces and how they fit together, see:\r | |
189 | *\r | |
190 | * - {@link Ext.data.proxy.Proxy Proxy} - overview of what Proxies are and how they are used\r | |
191 | * - {@link Ext.data.Model Model} - the core class in the data package\r | |
192 | * - {@link Ext.data.reader.Reader Reader} - used by any subclass of {@link Ext.data.proxy.Server ServerProxy} to read a response\r | |
193 | */\r | |
194 | Ext.define('Ext.data.Store', {\r | |
195 | extend: 'Ext.data.ProxyStore',\r | |
196 | \r | |
197 | alias: 'store.store',\r | |
198 | \r | |
199 | mixins: [\r | |
200 | 'Ext.data.LocalStore'\r | |
201 | ],\r | |
202 | \r | |
203 | // Required classes must be loaded before the definition callback runs\r | |
204 | // The class definition callback creates a dummy Store which requires that\r | |
205 | // all the classes below have been loaded.\r | |
206 | requires: [\r | |
207 | 'Ext.data.Model',\r | |
208 | 'Ext.data.proxy.Ajax',\r | |
209 | 'Ext.data.reader.Json',\r | |
210 | 'Ext.data.writer.Json',\r | |
211 | \r | |
212 | // This ensures that we have Ext.util.Collection and all of its requirements.\r | |
213 | 'Ext.util.GroupCollection',\r | |
214 | 'Ext.util.DelayedTask'\r | |
215 | ],\r | |
216 | \r | |
217 | uses: [\r | |
218 | 'Ext.data.StoreManager',\r | |
219 | 'Ext.util.Grouper'\r | |
220 | ],\r | |
221 | \r | |
222 | config: {\r | |
223 | /**\r | |
224 | * @cfg {Object[]/Ext.data.Model[]} data\r | |
225 | * Array of Model instances or data objects to load locally. See "Inline data"\r | |
226 | * above for details.\r | |
227 | */\r | |
228 | data: 0, // pass 0 to ensure applyData is called\r | |
229 | \r | |
230 | /**\r | |
231 | * @cfg {Boolean} [clearRemovedOnLoad=true]\r | |
232 | * `true` to clear anything in the {@link #removed} record collection when the store loads.\r | |
233 | */\r | |
234 | clearRemovedOnLoad: true,\r | |
235 | \r | |
236 | /**\r | |
237 | * @cfg {Boolean} [clearOnPageLoad=true]\r | |
238 | * True to empty the store when loading another page via {@link #loadPage},\r | |
239 | * {@link #nextPage} or {@link #previousPage}. Setting to false keeps existing records, allowing\r | |
240 | * large data sets to be loaded one page at a time but rendered all together.\r | |
241 | */\r | |
242 | clearOnPageLoad: true,\r | |
243 | \r | |
244 | /**\r | |
245 | * @cfg {Ext.data.Model} [associatedEntity]\r | |
246 | * The owner of this store if the store is used as part of an association.\r | |
247 | * \r | |
248 | * @private\r | |
249 | */\r | |
250 | associatedEntity: null,\r | |
251 | \r | |
252 | /**\r | |
253 | * @cfg {Ext.data.schema.Role} [role]\r | |
254 | * The role for the {@link #associatedEntity}.\r | |
255 | *\r | |
256 | * @private\r | |
257 | */\r | |
258 | role: null,\r | |
259 | \r | |
260 | /**\r | |
261 | * @cfg {Ext.data.Session} session\r | |
262 | * The session for this store. By specifying a session, it ensures any records that are\r | |
263 | * added to this store are also included in the session. This store does not become a member\r | |
264 | * of the session itself.\r | |
265 | *\r | |
266 | * @since 5.0.0\r | |
267 | */\r | |
268 | session: null\r | |
269 | },\r | |
270 | \r | |
271 | /**\r | |
272 | * @property {Ext.util.Collection} data\r | |
273 | * The `data` property is a `Collection` which holds this store's local cache of records.\r | |
274 | * @private\r | |
275 | * @readonly\r | |
276 | */\r | |
277 | \r | |
278 | /**\r | |
279 | * @private\r | |
280 | * Used as a parameter to loadRecords\r | |
281 | */\r | |
282 | addRecordsOptions: {\r | |
283 | addRecords: true\r | |
284 | },\r | |
285 | \r | |
286 | /**\r | |
287 | * @property {Number} loadCount\r | |
288 | * The number of times records have been loaded into the store. This includes loads via \r | |
289 | * {@link #loadData} & {@link #loadRecords}.\r | |
290 | * @readonly\r | |
291 | */\r | |
292 | loadCount: 0,\r | |
293 | \r | |
294 | /**\r | |
295 | * `true` once the store has loaded data from the server.\r | |
296 | * @property {Boolean} complete\r | |
297 | *\r | |
298 | * @private\r | |
299 | */\r | |
300 | complete: false,\r | |
301 | \r | |
302 | moveMapCount: 0,\r | |
303 | \r | |
304 | /**\r | |
305 | * Creates the store.\r | |
306 | * @param {Object} [config] Config object.\r | |
307 | */\r | |
308 | constructor: function (config) {\r | |
309 | var me = this,\r | |
310 | data;\r | |
311 | \r | |
312 | if (config) {\r | |
313 | if (config.buffered) {\r | |
314 | //<debug>\r | |
315 | if (this.self !== Ext.data.Store) {\r | |
316 | Ext.raise('buffered config not supported on derived Store classes. '+\r | |
317 | 'Please derive from Ext.data.BufferedStore.');\r | |
318 | }\r | |
319 | //</debug>\r | |
320 | \r | |
321 | return new Ext.data.BufferedStore(config);\r | |
322 | }\r | |
323 | \r | |
324 | //<debug>\r | |
325 | if (config.remoteGroup) {\r | |
326 | Ext.log.warn('Ext.data.Store: remoteGroup has been removed. Use remoteSort instead.');\r | |
327 | }\r | |
328 | //</debug>\r | |
329 | }\r | |
330 | \r | |
331 | /**\r | |
332 | * @event beforeprefetch\r | |
333 | * Fires before a prefetch occurs. Return `false` to cancel.\r | |
334 | * @param {Ext.data.Store} this\r | |
335 | * @param {Ext.data.operation.Operation} operation The associated operation.\r | |
336 | */\r | |
337 | /**\r | |
338 | * @event groupchange\r | |
339 | * Fired whenever the grouping in the grid changes.\r | |
340 | * @param {Ext.data.Store} store The store.\r | |
341 | * @param {Ext.util.Grouper} grouper The grouper object.\r | |
342 | */\r | |
343 | /**\r | |
344 | * @event prefetch\r | |
345 | * Fires whenever records have been prefetched.\r | |
346 | * @param {Ext.data.Store} this\r | |
347 | * @param {Ext.data.Model[]} records An array of records.\r | |
348 | * @param {Boolean} successful `true` if the operation was successful.\r | |
349 | * @param {Ext.data.operation.Operation} operation The associated operation.\r | |
350 | */\r | |
351 | /**\r | |
352 | * @event filterchange\r | |
353 | * Fired whenever the filter set changes.\r | |
354 | * @param {Ext.data.Store} store The store.\r | |
355 | * @param {Ext.util.Filter[]} filters The array of Filter objects.\r | |
356 | */\r | |
357 | \r | |
358 | me.callParent([config]);\r | |
359 | \r | |
360 | me.getData().addObserver(me);\r | |
361 | \r | |
362 | // See applyData for the details.\r | |
363 | data = me.inlineData;\r | |
364 | if (data) {\r | |
365 | delete me.inlineData;\r | |
366 | me.loadInlineData(data);\r | |
367 | }\r | |
368 | \r | |
369 | },\r | |
370 | \r | |
371 | /**\r | |
372 | * @method getData \r | |
373 | * Returns the store's records.\r | |
374 | *\r | |
375 | * **Note:** If your store has been filtered, getData() will return a filtered \r | |
376 | * collection. Use `getData().{@link Ext.util.Collection#getSource getSource()}` to \r | |
377 | * fetch all unfiltered records.\r | |
378 | *\r | |
379 | * @return {Ext.util.Collection} An Ext.util.Collection of records \r | |
380 | * (an empty Collection if no records are held by the store).\r | |
381 | */\r | |
382 | \r | |
383 | /**\r | |
384 | * @method setData\r | |
385 | * Loads an array of data directly into the Store.\r | |
386 | *\r | |
387 | * setData() is ideal if your data's format is already in its appropriate format (e.g. it doesn't need to be\r | |
388 | * processed by a reader). If your data's structure requires processing, use a\r | |
389 | * {@link Ext.data.proxy.Memory MemoryProxy} or {@link #loadRawData}.\r | |
390 | *\r | |
391 | * Use {@link #loadData}, {@link #method-add}, or {@link #insert} if records need to be\r | |
392 | * appended to the current recordset.\r | |
393 | *\r | |
394 | * @param {Ext.data.Model[]/Object[]} data Array of data to load. Any non-model instances will be cast\r | |
395 | * into model instances first.\r | |
396 | */\r | |
397 | \r | |
398 | onCollectionBeginUpdate: function() {\r | |
399 | this.beginUpdate();\r | |
400 | },\r | |
401 | \r | |
402 | onCollectionEndUpdate: function() {\r | |
403 | this.endUpdate();\r | |
404 | },\r | |
405 | \r | |
406 | applyData: function (data, dataCollection) {\r | |
407 | // We bring up the Collection for records which forms the bottom of the config\r | |
408 | // dependency graph. The appliers for "filters" and "sorters" depend on "data"\r | |
409 | // and "remoteFilter" and "remoteSort" depend on both in their updaters.\r | |
410 | var me = this;\r | |
411 | \r | |
412 | // Ensure that the model class exits\r | |
413 | me.getFields();\r | |
414 | me.getModel();\r | |
415 | \r | |
416 | // We might be configured with a Collection instance\r | |
417 | if (data && data.isCollection) {\r | |
418 | dataCollection = data;\r | |
419 | } else {\r | |
420 | if (!dataCollection) {\r | |
421 | dataCollection = me.constructDataCollection();\r | |
422 | }\r | |
423 | \r | |
424 | if (data) {\r | |
425 | if (me.isInitializing) {\r | |
426 | // When data is configured on the instance of a Store we must wait for\r | |
427 | // all the things to initialize (sorters, filters, groupers) so that we\r | |
428 | // can properly process that data. All of those appliers, however, depend\r | |
429 | // on the dataCollection (us) to get booted up first so we must defer\r | |
430 | // this back to after initConfig. In previous versions this was hacked\r | |
431 | // at by the constructor via "config.data" but "data" can also be set on\r | |
432 | // the Ext.define level so best to pick it up here and store aside to be\r | |
433 | // finished in the constructor.\r | |
434 | me.inlineData = data;\r | |
435 | } else {\r | |
436 | // If we are not constructing the Store than a setData call needs to be equivalent\r | |
437 | // to the legacy loadData method with respect to events that fire, etc..\r | |
438 | me.loadData(data);\r | |
439 | }\r | |
440 | }\r | |
441 | }\r | |
442 | \r | |
443 | return dataCollection;\r | |
444 | },\r | |
445 | \r | |
446 | loadInlineData: function(data) {\r | |
447 | var me = this,\r | |
448 | proxy = me.getProxy();\r | |
449 | \r | |
450 | if (proxy && proxy.isMemoryProxy) {\r | |
451 | proxy.setData(data);\r | |
452 | \r | |
453 | // Allow a memory proxy to trigger a load initially\r | |
454 | me.suspendEvents();\r | |
455 | me.read();\r | |
456 | me.resumeEvents();\r | |
457 | \r | |
458 | } else {\r | |
459 | // We make it silent because we don't want to fire a refresh event\r | |
460 | me.removeAll(true);\r | |
461 | \r | |
462 | // We don't want to fire addrecords event since we will be firing\r | |
463 | // a refresh event later which will already take care of updating\r | |
464 | // any views bound to this store\r | |
465 | me.suspendEvents();\r | |
466 | me.loadData(data);\r | |
467 | me.resumeEvents();\r | |
468 | }\r | |
469 | },\r | |
470 | \r | |
471 | /**\r | |
472 | * @method insert\r | |
473 | * @inheritdoc Ext.data.LocalStore#insert\r | |
474 | */\r | |
475 | \r | |
476 | onCollectionAdd: function(collection, info) {\r | |
477 | this.onCollectionAddItems(collection, info.items, info);\r | |
478 | },\r | |
479 | \r | |
480 | onCollectionFilterAdd: function(collection, items) {\r | |
481 | this.onCollectionAddItems(collection, items);\r | |
482 | },\r | |
483 | \r | |
484 | onCollectionAddItems: function(collection, records, info) {\r | |
485 | var me = this,\r | |
486 | len = records.length,\r | |
487 | lastChunk = info ? !info.next : false,\r | |
488 | \r | |
489 | // Must use class-specific removed property.\r | |
490 | // Regular Stores add to the "removed" property on remove.\r | |
491 | // TreeStores are having records removed all the time; node collapse removes.\r | |
492 | // TreeStores add to the "removedNodes" property onNodeRemove\r | |
493 | removed = me.removed,\r | |
494 | ignoreAdd = me.ignoreCollectionAdd,\r | |
495 | session = me.getSession(),\r | |
496 | replaced = info && info.replaced,\r | |
497 | i, sync, record, replacedItems;\r | |
498 | \r | |
499 | for (i = 0; i < len; ++i) {\r | |
500 | record = records[i];\r | |
501 | \r | |
502 | if (session) {\r | |
503 | session.adopt(record);\r | |
504 | }\r | |
505 | \r | |
506 | // If ignoring, we don't want to do anything other than pull\r | |
507 | // the added records into the session \r | |
508 | if (!ignoreAdd) {\r | |
509 | record.join(me);\r | |
510 | if (removed && removed.length) {\r | |
511 | Ext.Array.remove(removed, record);\r | |
512 | }\r | |
513 | sync = sync || record.phantom || record.dirty;\r | |
514 | }\r | |
515 | }\r | |
516 | \r | |
517 | if (ignoreAdd) {\r | |
518 | return;\r | |
519 | }\r | |
520 | \r | |
521 | if (replaced) {\r | |
522 | replacedItems = [];\r | |
523 | \r | |
524 | do {\r | |
525 | Ext.Array.push(replacedItems, replaced.items);\r | |
526 | replaced = replaced.next;\r | |
527 | } while (replaced);\r | |
528 | \r | |
529 | me.setMoving(replacedItems, true);\r | |
530 | }\r | |
531 | \r | |
532 | if (info) {\r | |
533 | // If this is a replacement operation, there will have been a\r | |
534 | // previous call to onCollectionRemove which will have fired no\r | |
535 | // events in anticipation of a final refresh event.\r | |
536 | // Here is where we inform interested parties of all the changes.\r | |
537 | if (info.replaced) {\r | |
538 | if (lastChunk) {\r | |
539 | me.fireEvent('refresh', me);\r | |
540 | }\r | |
541 | } else {\r | |
542 | me.fireEvent('add', me, records, info.at);\r | |
543 | // If there is a next property, that means there is another range that needs\r | |
544 | // to be removed after this. Wait until everything is gone before firing datachanged\r | |
545 | // since it should be a bulk operation\r | |
546 | if (lastChunk) {\r | |
547 | me.fireEvent('datachanged', me);\r | |
548 | }\r | |
549 | }\r | |
550 | }\r | |
551 | \r | |
552 | if (replacedItems) {\r | |
553 | me.setMoving(replacedItems, false);\r | |
554 | }\r | |
555 | \r | |
556 | // Addition means a sync is needed.\r | |
557 | me.needsSync = me.needsSync || sync;\r | |
558 | },\r | |
559 | \r | |
560 | // If our source collection informs us that a filtered out item has changed, we must still fire the events...\r | |
561 | onCollectionFilteredItemChange: function() {\r | |
562 | this.onCollectionItemChange.apply(this, arguments);\r | |
563 | },\r | |
564 | \r | |
565 | onCollectionItemChange: function(collection, info) {\r | |
566 | var me = this,\r | |
567 | record = info.item,\r | |
568 | modifiedFieldNames = info.modified || null,\r | |
569 | type = info.meta;\r | |
570 | \r | |
571 | if (me.fireChangeEvent(record)) {\r | |
572 | // Inform any interested parties that a record has been mutated.\r | |
573 | // This will be invoked on TreeStores in which the invoking record\r | |
574 | // is an descendant of a collapsed node, and so *will not be contained by this store\r | |
575 | me.onUpdate(record, type, modifiedFieldNames, info);\r | |
576 | me.fireEvent('update', me, record, type, modifiedFieldNames, info);\r | |
577 | }\r | |
578 | },\r | |
579 | \r | |
580 | fireChangeEvent: function(record) {\r | |
581 | return this.getDataSource().contains(record);\r | |
582 | },\r | |
583 | \r | |
584 | afterChange: function(record, modifiedFieldNames, type) {\r | |
585 | this.getData().itemChanged(record, modifiedFieldNames || null, undefined, type);\r | |
586 | },\r | |
587 | \r | |
588 | afterCommit: function(record, modifiedFieldNames) {\r | |
589 | this.afterChange(record, modifiedFieldNames, Ext.data.Model.COMMIT);\r | |
590 | },\r | |
591 | \r | |
592 | afterEdit: function(record, modifiedFieldNames) {\r | |
593 | this.needsSync = this.needsSync || record.dirty;\r | |
594 | this.afterChange(record, modifiedFieldNames, Ext.data.Model.EDIT);\r | |
595 | },\r | |
596 | \r | |
597 | afterReject: function(record) {\r | |
598 | this.afterChange(record, null, Ext.data.Model.REJECT);\r | |
599 | },\r | |
600 | \r | |
601 | afterDrop: function(record) {\r | |
602 | this.getData().remove(record);\r | |
603 | },\r | |
604 | \r | |
605 | afterErase: function(record) {\r | |
606 | this.removeFromRemoved(record);\r | |
607 | },\r | |
608 | \r | |
609 | /**\r | |
610 | * @method add\r | |
611 | * @inheritdoc Ext.data.LocalStore#add\r | |
612 | */\r | |
613 | \r | |
614 | /**\r | |
615 | * (Local sort only) Inserts the passed Record into the Store at the index where it\r | |
616 | * should go based on the current sort information.\r | |
617 | *\r | |
618 | * @param {Ext.data.Record} record\r | |
619 | */\r | |
620 | addSorted: function(record) {\r | |
621 | var me = this,\r | |
622 | remote = me.getRemoteSort(),\r | |
623 | data = me.getData(),\r | |
624 | index;\r | |
625 | \r | |
626 | if (remote) {\r | |
627 | data.setSorters(me.getSorters());\r | |
628 | }\r | |
629 | index = data.findInsertionIndex(record);\r | |
630 | if (remote) {\r | |
631 | data.setSorters(null);\r | |
632 | }\r | |
633 | \r | |
634 | return me.insert(index, record);\r | |
635 | },\r | |
636 | \r | |
637 | /**\r | |
638 | * Removes the specified record(s) from the Store, firing the {@link #event-remove}\r | |
639 | * event for the removed records.\r | |
640 | *\r | |
641 | * After all records have been removed a single `datachanged` is fired.\r | |
642 | *\r | |
643 | * @param {Ext.data.Model/Ext.data.Model[]/Number/Number[]} records Model instance or\r | |
644 | * array of instances to remove or an array of indices from which to remove records.\r | |
645 | */\r | |
646 | remove: function(records, /* private */ isMove, silent) {\r | |
647 | var me = this,\r | |
648 | data = me.getDataSource(),\r | |
649 | len, i, toRemove, record;\r | |
650 | \r | |
651 | if (records) {\r | |
652 | if (records.isModel) {\r | |
653 | if (data.indexOf(records) > -1) {\r | |
654 | toRemove = [records];\r | |
655 | len = 1;\r | |
656 | } else {\r | |
657 | len = 0;\r | |
658 | }\r | |
659 | } else {\r | |
660 | toRemove = [];\r | |
661 | for (i = 0, len = records.length; i < len; ++i) {\r | |
662 | record = records[i];\r | |
663 | \r | |
664 | if (record && record.isEntity) {\r | |
665 | if (!data.contains(record)) {\r | |
666 | continue;\r | |
667 | }\r | |
668 | } else if (!(record = data.getAt(record))) { // an index\r | |
669 | continue;\r | |
670 | }\r | |
671 | \r | |
672 | toRemove.push(record);\r | |
673 | }\r | |
674 | \r | |
675 | len = toRemove.length;\r | |
676 | }\r | |
677 | }\r | |
678 | \r | |
679 | if (!len) {\r | |
680 | return [];\r | |
681 | }\r | |
682 | \r | |
683 | me.removeIsMove = isMove === true;\r | |
684 | me.removeIsSilent = silent;\r | |
685 | data.remove(toRemove);\r | |
686 | me.removeIsSilent = false;\r | |
687 | return toRemove;\r | |
688 | },\r | |
689 | \r | |
690 | onCollectionRemove: function(collection, info) {\r | |
691 | var me = this,\r | |
692 | // Must use class-specific removed property.\r | |
693 | // Regular Stores add to the "removed" property on remove.\r | |
694 | // TreeStores are having records removed all the time; node collapse removes.\r | |
695 | // TreeStores add to the "removedNodes" property onNodeRemove\r | |
696 | removed = me.removed,\r | |
697 | records = info.items,\r | |
698 | len = records.length,\r | |
699 | index = info.at,\r | |
700 | replacement = info.replacement,\r | |
701 | isMove = me.removeIsMove || (replacement && Ext.Array.equals(records, replacement.items)),\r | |
702 | silent = me.removeIsSilent,\r | |
703 | lastChunk = !info.next,\r | |
704 | data = me.getDataSource(),\r | |
705 | i, record;\r | |
706 | \r | |
707 | if (me.ignoreCollectionRemove) {\r | |
708 | return;\r | |
709 | }\r | |
710 | \r | |
711 | if (replacement) {\r | |
712 | me.setMoving(replacement.items, true);\r | |
713 | }\r | |
714 | \r | |
715 | for (i = 0; i < len; ++i) {\r | |
716 | record = records[i];\r | |
717 | \r | |
718 | // If the data contains the record, that means the record is filtered out, so\r | |
719 | // it's not being removed, nor should it be unjoined\r | |
720 | if (!data.contains(record)) {\r | |
721 | // Don't push interally moving, or phantom (client side only), \r | |
722 | // erasing (informing server through its own proxy) records\r | |
723 | if (removed && !isMove && !record.phantom && !record.erasing) {\r | |
724 | // Store the index the record was removed from so that rejectChanges can re-insert at the correct place.\r | |
725 | // The record's index property won't do, as that is the index in the overall dataset when Store is buffered.\r | |
726 | record.removedFrom = index + i;\r | |
727 | removed.push(record);\r | |
728 | \r | |
729 | // Removal of a non-phantom record which is NOT erasing (informing the server through its own proxy)\r | |
730 | // requires that the store be synced at some point.\r | |
731 | me.needsSync = true;\r | |
732 | } else {\r | |
733 | // Only unjoin if we're not being pushed into the removed collection. We still\r | |
734 | // have an interest in that record otherwise.\r | |
735 | record.unjoin(me);\r | |
736 | }\r | |
737 | }\r | |
738 | }\r | |
739 | \r | |
740 | if (!silent) {\r | |
741 | // If this removal is just the first part of a replacement operation,\r | |
742 | // do not fire the events now.\r | |
743 | //\r | |
744 | // onCollectionAddItems will fire a refresh event, and convert multiple\r | |
745 | // remove and add operations to an atomic refresh event.\r | |
746 | // This will provide a better UI update.\r | |
747 | // Also, focus can only be preserved around one operation, so\r | |
748 | // editing a field which is the sorted field could result in \r | |
749 | // incorrect focus..\r | |
750 | if (!replacement || !replacement.items.length) {\r | |
751 | me.fireEvent('remove', me, records, index, isMove);\r | |
752 | // If there is a next property, that means there is another range that needs\r | |
753 | // to be removed after this. Wait until everything is gone before firing datachanged\r | |
754 | // since it should be a bulk operation\r | |
755 | if (lastChunk) {\r | |
756 | me.fireEvent('datachanged', me);\r | |
757 | }\r | |
758 | }\r | |
759 | }\r | |
760 | \r | |
761 | if (replacement) {\r | |
762 | me.setMoving(replacement.items, false);\r | |
763 | }\r | |
764 | },\r | |
765 | \r | |
766 | onFilterEndUpdate: function() {\r | |
767 | this.callParent(arguments);\r | |
768 | this.callObservers('Filter');\r | |
769 | },\r | |
770 | \r | |
771 | /**\r | |
772 | * Removes the model instance(s) at the given index\r | |
773 | * @param {Number} index The record index\r | |
774 | * @param {Number} [count=1] The number of records to delete\r | |
775 | */\r | |
776 | removeAt: function(index, count) {\r | |
777 | var data = this.getData();\r | |
778 | \r | |
779 | // Sanity check input.\r | |
780 | index = Math.max(index, 0);\r | |
781 | \r | |
782 | if (index < data.length) {\r | |
783 | if (arguments.length === 1) {\r | |
784 | count = 1;\r | |
785 | } else if (!count) {\r | |
786 | return;\r | |
787 | }\r | |
788 | \r | |
789 | data.removeAt(index, count);\r | |
790 | }\r | |
791 | },\r | |
792 | \r | |
793 | /**\r | |
794 | * Removes all items from the store.\r | |
795 | *\r | |
796 | * Individual record `{@link #event-remove}` events are not fired by this method.\r | |
797 | *\r | |
798 | * @param {Boolean} [silent=false] Pass `true` to prevent the `{@link #event-clear}` event from being fired.\r | |
799 | *\r | |
800 | * This method is affected by filtering.\r | |
801 | * \r | |
802 | * @return {Ext.data.Model[]} The removed records.\r | |
803 | */\r | |
804 | removeAll: function(silent) {\r | |
805 | var me = this,\r | |
806 | data = me.getData(),\r | |
807 | hasClear = me.hasListeners.clear,\r | |
808 | records = data.getRange();\r | |
809 | \r | |
810 | // We want to remove and mute any events here\r | |
811 | if (data.length) {\r | |
812 | // Explicit true here, we never want to fire remove events\r | |
813 | me.removeIsSilent = true;\r | |
814 | me.callObservers('BeforeRemoveAll');\r | |
815 | data.removeAll();\r | |
816 | me.removeIsSilent = false;\r | |
817 | if (!silent) {\r | |
818 | me.fireEvent('clear', me, records);\r | |
819 | me.fireEvent('datachanged', me);\r | |
820 | }\r | |
821 | me.callObservers('AfterRemoveAll', [!!silent]);\r | |
822 | }\r | |
823 | return records;\r | |
824 | },\r | |
825 | \r | |
826 | /**\r | |
827 | * Make a set of records be current in the store. This means that unneeded records\r | |
828 | * will be removed and new records will be added.\r | |
829 | * @param {Ext.data.Model[]} records The records to be current in the store.\r | |
830 | * \r | |
831 | * @private\r | |
832 | */\r | |
833 | setRecords: function(records) {\r | |
834 | var count = this.getCount();\r | |
835 | \r | |
836 | ++this.loadCount;\r | |
837 | if (count) {\r | |
838 | this.getData().splice(0, count, records);\r | |
839 | } else {\r | |
840 | this.add(records);\r | |
841 | }\r | |
842 | },\r | |
843 | \r | |
844 | /**\r | |
845 | * This method is basically the same as the JavaScript Array splice method.\r | |
846 | *\r | |
847 | * Negative indexes are interpreted starting at the end of the collection. That is,\r | |
848 | * a value of -1 indicates the last item, or equivalent to `length - 1`.\r | |
849 | *\r | |
850 | * @param {Number} index The index at which to add or remove items.\r | |
851 | * @param {Number/Object[]} toRemove The number of items to remove or an array of the\r | |
852 | * items to remove.\r | |
853 | * @param {Object[]} [toAdd] The items to insert at the given `index`.\r | |
854 | * @private\r | |
855 | */\r | |
856 | splice: function(index, toRemove, toAdd) {\r | |
857 | return this.getData().splice(index, toRemove, toAdd);\r | |
858 | },\r | |
859 | \r | |
860 | /**\r | |
861 | * @private\r | |
862 | * Called internally when a Proxy has completed a load request\r | |
863 | */\r | |
864 | onProxyLoad: function(operation) {\r | |
865 | var me = this,\r | |
866 | resultSet = operation.getResultSet(),\r | |
867 | records = operation.getRecords(),\r | |
868 | successful = operation.wasSuccessful();\r | |
869 | \r | |
870 | if (me.destroyed) {\r | |
871 | return;\r | |
872 | }\r | |
873 | \r | |
874 | if (resultSet) {\r | |
875 | me.totalCount = resultSet.getTotal();\r | |
876 | }\r | |
877 | \r | |
878 | if (successful) {\r | |
879 | records = me.processAssociation(records);\r | |
880 | me.loadRecords(records, operation.getAddRecords() ? {\r | |
881 | addRecords: true\r | |
882 | } : undefined);\r | |
883 | } else {\r | |
884 | me.loading = false;\r | |
885 | }\r | |
886 | \r | |
887 | if (me.hasListeners.load) {\r | |
888 | me.fireEvent('load', me, records, successful, operation);\r | |
889 | }\r | |
890 | me.callObservers('AfterLoad', [records, successful, operation]);\r | |
891 | },\r | |
892 | \r | |
893 | // private\r | |
894 | filterDataSource: function (fn) {\r | |
895 | var source = this.getDataSource(),\r | |
896 | items = source.items,\r | |
897 | len = items.length,\r | |
898 | ret = [],\r | |
899 | i;\r | |
900 | \r | |
901 | for (i = 0; i < len; i++) {\r | |
902 | if (fn.call(source, items[i])) {\r | |
903 | ret.push(items[i]);\r | |
904 | }\r | |
905 | }\r | |
906 | \r | |
907 | return ret;\r | |
908 | },\r | |
909 | \r | |
910 | getNewRecords: function() {\r | |
911 | return this.filterDataSource(this.filterNew);\r | |
912 | },\r | |
913 | \r | |
914 | getRejectRecords: function() {\r | |
915 | return this.filterDataSource(this.filterRejects);\r | |
916 | },\r | |
917 | \r | |
918 | getUpdatedRecords: function() {\r | |
919 | return this.filterDataSource(this.filterUpdated);\r | |
920 | },\r | |
921 | \r | |
922 | /**\r | |
923 | * Loads an array of data straight into the Store.\r | |
924 | *\r | |
925 | * Using this method is great if the data is in the correct format already (e.g. it doesn't need to be\r | |
926 | * processed by a reader). If your data requires processing to decode the data structure, use a\r | |
927 | * {@link Ext.data.proxy.Memory MemoryProxy} or {@link #loadRawData}.\r | |
928 | *\r | |
929 | * @param {Ext.data.Model[]/Object[]} data Array of data to load. Any non-model instances will be cast\r | |
930 | * into model instances first.\r | |
931 | * @param {Boolean} [append=false] `true` to add the records to the existing records in the store, `false`\r | |
932 | * to remove the old ones first.\r | |
933 | */\r | |
934 | loadData: function(data, append) {\r | |
935 | var me = this,\r | |
936 | length = data.length,\r | |
937 | newData = [],\r | |
938 | i;\r | |
939 | \r | |
940 | //make sure each data element is an Ext.data.Model instance\r | |
941 | for (i = 0; i < length; i++) {\r | |
942 | newData.push(me.createModel(data[i]));\r | |
943 | }\r | |
944 | \r | |
945 | newData = me.processAssociation(newData);\r | |
946 | \r | |
947 | me.loadRecords(newData, append ? me.addRecordsOptions : undefined);\r | |
948 | },\r | |
949 | \r | |
950 | /**\r | |
951 | * Loads data via the bound Proxy's reader\r | |
952 | *\r | |
953 | * Use this method if you are attempting to load data and want to utilize the configured data reader.\r | |
954 | *\r | |
955 | * As of 4.2, this method will no longer fire the {@link #event-load} event.\r | |
956 | *\r | |
957 | * @param {Object[]} data The full JSON object you'd like to load into the Data store.\r | |
958 | * @param {Boolean} [append=false] `true` to add the records to the existing records in the store, `false`\r | |
959 | * to remove the old ones first.\r | |
960 | * \r | |
961 | * @return {Boolean} `true` if the reader processed the records correctly. See {@link Ext.data.reader.Reader#successProperty}.\r | |
962 | * If the reader did not process the records, nothing will be added.\r | |
963 | */\r | |
964 | loadRawData : function(data, append) {\r | |
965 | var me = this,\r | |
966 | session = me.getSession(),\r | |
967 | result = me.getProxy().getReader().read(data, session ? {\r | |
968 | recordCreator: session.recordCreator\r | |
969 | } : undefined),\r | |
970 | records = result.getRecords(),\r | |
971 | success = result.getSuccess();\r | |
972 | \r | |
973 | if (success) {\r | |
974 | me.totalCount = result.getTotal();\r | |
975 | me.loadRecords(records, append ? me.addRecordsOptions : undefined);\r | |
976 | }\r | |
977 | return success;\r | |
978 | },\r | |
979 | \r | |
980 | /**\r | |
981 | * Loads an array of {@link Ext.data.Model model} instances into the store, fires the datachanged event. This should only usually\r | |
982 | * be called internally when loading from the {@link Ext.data.proxy.Proxy Proxy}, when adding records manually use {@link #method-add} instead\r | |
983 | * @param {Ext.data.Model[]} records The array of records to load\r | |
984 | * @param {Object} options\r | |
985 | * @param {Boolean} [options.addRecords=false] Pass `true` to add these records to the existing records, `false` to remove the Store's existing records first.\r | |
986 | */\r | |
987 | loadRecords: function(records, options) {\r | |
988 | var me = this,\r | |
989 | length = records.length,\r | |
990 | data = me.getData(),\r | |
991 | addRecords, i, skipSort;\r | |
992 | \r | |
993 | if (options) {\r | |
994 | addRecords = options.addRecords;\r | |
995 | }\r | |
996 | \r | |
997 | if (!me.getRemoteSort() && !me.getSortOnLoad()) {\r | |
998 | skipSort = true;\r | |
999 | data.setAutoSort(false);\r | |
1000 | }\r | |
1001 | \r | |
1002 | if (!addRecords) {\r | |
1003 | me.clearData(true);\r | |
1004 | }\r | |
1005 | \r | |
1006 | // Clear the flag AFTER the stores collection has been cleared down so that\r | |
1007 | // observers of that collection know that it was due to a load, and a refresh is imminent.\r | |
1008 | me.loading = false;\r | |
1009 | \r | |
1010 | me.ignoreCollectionAdd = true;\r | |
1011 | me.callObservers('BeforePopulate');\r | |
1012 | data.add(records);\r | |
1013 | me.ignoreCollectionAdd = false;\r | |
1014 | \r | |
1015 | if (skipSort) {\r | |
1016 | data.setAutoSort(true);\r | |
1017 | }\r | |
1018 | \r | |
1019 | for (i = 0; i < length; i++) {\r | |
1020 | records[i].join(me);\r | |
1021 | }\r | |
1022 | \r | |
1023 | ++me.loadCount;\r | |
1024 | me.complete = true;\r | |
1025 | me.fireEvent('datachanged', me);\r | |
1026 | me.fireEvent('refresh', me);\r | |
1027 | me.callObservers('AfterPopulate');\r | |
1028 | },\r | |
1029 | \r | |
1030 | // PAGING METHODS\r | |
1031 | /**\r | |
1032 | * Loads a given 'page' of data by setting the start and limit values appropriately. Internally this just causes a normal\r | |
1033 | * load operation, passing in calculated 'start' and 'limit' params.\r | |
1034 | * @param {Number} page The number of the page to load.\r | |
1035 | * @param {Object} [options] See options for {@link #method-load}.\r | |
1036 | */\r | |
1037 | loadPage: function(page, options) {\r | |
1038 | var me = this,\r | |
1039 | size = me.getPageSize();\r | |
1040 | \r | |
1041 | me.currentPage = page;\r | |
1042 | \r | |
1043 | // Copy options into a new object so as not to mutate passed in objects\r | |
1044 | options = Ext.apply({\r | |
1045 | page: page,\r | |
1046 | start: (page - 1) * size,\r | |
1047 | limit: size,\r | |
1048 | addRecords: !me.getClearOnPageLoad()\r | |
1049 | }, options);\r | |
1050 | \r | |
1051 | me.read(options);\r | |
1052 | },\r | |
1053 | \r | |
1054 | /**\r | |
1055 | * Loads the next 'page' in the current data set\r | |
1056 | * @param {Object} options See options for {@link #method-load}\r | |
1057 | */\r | |
1058 | nextPage: function(options) {\r | |
1059 | this.loadPage(this.currentPage + 1, options);\r | |
1060 | },\r | |
1061 | \r | |
1062 | /**\r | |
1063 | * Loads the previous 'page' in the current data set\r | |
1064 | * @param {Object} options See options for {@link #method-load}\r | |
1065 | */\r | |
1066 | previousPage: function(options) {\r | |
1067 | this.loadPage(this.currentPage - 1, options);\r | |
1068 | },\r | |
1069 | \r | |
1070 | /**\r | |
1071 | * @private\r | |
1072 | */\r | |
1073 | clearData: function(isLoad) {\r | |
1074 | var me = this,\r | |
1075 | removed = me.removed,\r | |
1076 | data = me.getDataSource(),\r | |
1077 | clearRemovedOnLoad = me.getClearRemovedOnLoad(),\r | |
1078 | needsUnjoinCheck = removed && isLoad && !clearRemovedOnLoad,\r | |
1079 | records, record, i, len;\r | |
1080 | \r | |
1081 | // We only have to do the unjoining if not buffered. PageMap will unjoin its records when it clears itself.\r | |
1082 | // There is a potential for a race condition in stores configured with autoDestroy: true;\r | |
1083 | // if loading was initiated but didn't complete by the time the store is destroyed,\r | |
1084 | // the data MC may not have been created yet so we have to check for its existence\r | |
1085 | // here and below.\r | |
1086 | if (data) {\r | |
1087 | records = data.items;\r | |
1088 | for (i = 0, len = records.length; i < len; ++i) {\r | |
1089 | record = records[i];\r | |
1090 | if (needsUnjoinCheck && Ext.Array.contains(removed, record)) {\r | |
1091 | continue;\r | |
1092 | }\r | |
1093 | record.unjoin(me);\r | |
1094 | }\r | |
1095 | me.ignoreCollectionRemove = true;\r | |
1096 | me.callObservers('BeforeClear');\r | |
1097 | data.removeAll();\r | |
1098 | me.ignoreCollectionRemove = false;\r | |
1099 | me.callObservers('AfterClear');\r | |
1100 | }\r | |
1101 | \r | |
1102 | if (removed && (!isLoad || clearRemovedOnLoad)) {\r | |
1103 | removed.length = 0;\r | |
1104 | }\r | |
1105 | },\r | |
1106 | \r | |
1107 | onIdChanged: function(rec, oldId, newId){\r | |
1108 | this.getData().updateKey(rec, oldId);\r | |
1109 | // This event is used internally\r | |
1110 | this.fireEvent('idchanged', this, rec, oldId, newId);\r | |
1111 | },\r | |
1112 | \r | |
1113 | /**\r | |
1114 | * Commits all Records with {@link #getModifiedRecords outstanding changes}. To handle updates for changes,\r | |
1115 | * subscribe to the Store's {@link #event-update update event}, and perform updating when the third parameter is\r | |
1116 | * Ext.data.Record.COMMIT.\r | |
1117 | */\r | |
1118 | commitChanges: function() {\r | |
1119 | var me = this,\r | |
1120 | recs = me.getModifiedRecords(),\r | |
1121 | len = recs.length,\r | |
1122 | i = 0;\r | |
1123 | \r | |
1124 | Ext.suspendLayouts();\r | |
1125 | me.beginUpdate();\r | |
1126 | for (; i < len; i++){\r | |
1127 | recs[i].commit();\r | |
1128 | }\r | |
1129 | \r | |
1130 | me.cleanRemoved();\r | |
1131 | \r | |
1132 | me.endUpdate();\r | |
1133 | Ext.resumeLayouts(true);\r | |
1134 | },\r | |
1135 | \r | |
1136 | filterNewOnly: function(item) {\r | |
1137 | return item.phantom === true;\r | |
1138 | },\r | |
1139 | \r | |
1140 | filterRejects: function(item) {\r | |
1141 | return item.phantom || item.dirty;\r | |
1142 | },\r | |
1143 | \r | |
1144 | /**\r | |
1145 | * {@link Ext.data.Model#reject Rejects} outstanding changes on all {@link #getModifiedRecords modified records}\r | |
1146 | * and re-insert any records that were removed locally. Any phantom records will be removed.\r | |
1147 | */\r | |
1148 | rejectChanges: function() {\r | |
1149 | var me = this,\r | |
1150 | recs = me.getRejectRecords(),\r | |
1151 | len = recs.length,\r | |
1152 | i, rec, toRemove, sorted, data, currentAutoSort;\r | |
1153 | \r | |
1154 | Ext.suspendLayouts();\r | |
1155 | me.beginUpdate();\r | |
1156 | for (i = 0; i < len; i++) {\r | |
1157 | rec = recs[i];\r | |
1158 | if (rec.phantom) {\r | |
1159 | toRemove = toRemove || [];\r | |
1160 | toRemove.push(rec);\r | |
1161 | } else {\r | |
1162 | rec.reject();\r | |
1163 | }\r | |
1164 | }\r | |
1165 | \r | |
1166 | if (toRemove) {\r | |
1167 | me.remove(toRemove);\r | |
1168 | for (i = 0, len = toRemove.length; i < len; ++i) {\r | |
1169 | toRemove[i].reject();\r | |
1170 | }\r | |
1171 | }\r | |
1172 | \r | |
1173 | // Restore removed records back to their original positions.\r | |
1174 | recs = me.getRawRemovedRecords();\r | |
1175 | if (recs) {\r | |
1176 | len = recs.length;\r | |
1177 | sorted = !me.getRemoteSort() && me.isSorted();\r | |
1178 | \r | |
1179 | if (sorted) {\r | |
1180 | // Temporarily turn off sorting so .reject() doesn't attempt to sort the record.\r | |
1181 | // It would throw b/c the record isn't yet in its collection.\r | |
1182 | data = me.getData();\r | |
1183 | currentAutoSort = data.getAutoSort();\r | |
1184 | \r | |
1185 | data.setAutoSort(false);\r | |
1186 | }\r | |
1187 | \r | |
1188 | for (i = len - 1; i >= 0; i--) {\r | |
1189 | rec = recs[i];\r | |
1190 | rec.reject();\r | |
1191 | \r | |
1192 | if (!sorted) {\r | |
1193 | me.insert(rec.removedFrom || 0, rec);\r | |
1194 | }\r | |
1195 | }\r | |
1196 | \r | |
1197 | if (sorted) {\r | |
1198 | // Turn sorting back on so the collection is auto-sorted when added.\r | |
1199 | data.setAutoSort(currentAutoSort);\r | |
1200 | me.add(recs);\r | |
1201 | }\r | |
1202 | \r | |
1203 | // Don't need to call cleanRemoved because we've re-added everything, don't\r | |
1204 | // need to unjoin the store\r | |
1205 | recs.length = 0;\r | |
1206 | }\r | |
1207 | me.endUpdate();\r | |
1208 | Ext.resumeLayouts(true);\r | |
1209 | },\r | |
1210 | \r | |
1211 | onDestroy: function() {\r | |
1212 | var me = this,\r | |
1213 | task = me.loadTask,\r | |
1214 | data = me.getData(),\r | |
1215 | source = data.getSource();\r | |
1216 | \r | |
1217 | // clearData ensures everything is unjoined\r | |
1218 | me.clearData();\r | |
1219 | me.callParent();\r | |
1220 | me.setSession(null);\r | |
1221 | me.observers = null;\r | |
1222 | if (task) {\r | |
1223 | task.cancel();\r | |
1224 | me.loadTask = null;\r | |
1225 | }\r | |
1226 | if (source) {\r | |
1227 | source.destroy();\r | |
1228 | }\r | |
1229 | },\r | |
1230 | \r | |
1231 | privates: {\r | |
1232 | /**\r | |
1233 | * Similar to a load, however no records are added to the store. This is useful\r | |
1234 | * in allowing the developer to decide what to do with the new records.\r | |
1235 | * @param {Object} [options] See {@link #method-load load options}.\r | |
1236 | *\r | |
1237 | * @private\r | |
1238 | */\r | |
1239 | fetch: function(options) {\r | |
1240 | options = Ext.apply({}, options);\r | |
1241 | \r | |
1242 | this.setLoadOptions(options);\r | |
1243 | var operation = this.createOperation('read', options);\r | |
1244 | operation.execute();\r | |
1245 | },\r | |
1246 | \r | |
1247 | onBeforeLoad: function(operation) {\r | |
1248 | this.callObservers('BeforeLoad', [operation]);\r | |
1249 | },\r | |
1250 | \r | |
1251 | onRemoteFilterSet: function (filters, remoteFilter) {\r | |
1252 | if (filters) {\r | |
1253 | this.getData().setFilters(remoteFilter ? null : filters);\r | |
1254 | }\r | |
1255 | this.callParent([filters, remoteFilter]);\r | |
1256 | },\r | |
1257 | \r | |
1258 | onRemoteSortSet: function (sorters, remoteSort) {\r | |
1259 | var data = this.getData();\r | |
1260 | if (sorters) {\r | |
1261 | data.setSorters(remoteSort ? null : sorters);\r | |
1262 | }\r | |
1263 | data.setAutoGroup(!remoteSort);\r | |
1264 | this.callParent([sorters, remoteSort]);\r | |
1265 | },\r | |
1266 | \r | |
1267 | /**\r | |
1268 | * Checks whether records are being moved within the store. This can be used in conjunction with the\r | |
1269 | * {@link #event-add} and {@link #event-remove} events to determine whether the records are being removed/added\r | |
1270 | * or just having the position changed.\r | |
1271 | * @param {Ext.data.Model[]/Ext.data.Model} [records] The record(s).\r | |
1272 | * @return {Number} The number of records being moved. `0` if no records are moving. If records are passed\r | |
1273 | * the number will refer to how many of the passed records are moving.\r | |
1274 | *\r | |
1275 | * @private\r | |
1276 | */\r | |
1277 | isMoving: function(records, getMap) {\r | |
1278 | var map = this.moveMap,\r | |
1279 | moving = 0,\r | |
1280 | len, i;\r | |
1281 | \r | |
1282 | if (map) {\r | |
1283 | if (records) {\r | |
1284 | if (Ext.isArray(records)) {\r | |
1285 | for (i = 0, len = records.length; i < len; ++i) {\r | |
1286 | moving += map[records[i].id] ? 1 : 0;\r | |
1287 | }\r | |
1288 | } else if (map[records.id]) {\r | |
1289 | ++moving;\r | |
1290 | }\r | |
1291 | } else {\r | |
1292 | moving = getMap ? map : this.moveMapCount;\r | |
1293 | }\r | |
1294 | }\r | |
1295 | return moving;\r | |
1296 | },\r | |
1297 | \r | |
1298 | setLoadOptions: function(options) {\r | |
1299 | // Only add grouping options if grouping is remote\r | |
1300 | var me = this,\r | |
1301 | pageSize = me.getPageSize(),\r | |
1302 | session, grouper;\r | |
1303 | \r | |
1304 | if (me.getRemoteSort() && !options.grouper) {\r | |
1305 | grouper = me.getGrouper();\r | |
1306 | if (grouper) {\r | |
1307 | options.grouper = grouper;\r | |
1308 | }\r | |
1309 | }\r | |
1310 | \r | |
1311 | if (pageSize || 'start' in options || 'limit' in options || 'page' in options) {\r | |
1312 | options.page = options.page != null ? options.page : me.currentPage;\r | |
1313 | options.start = (options.start !== undefined) ? options.start : (options.page - 1) * pageSize;\r | |
1314 | options.limit = options.limit != null ? options.limit : pageSize;\r | |
1315 | \r | |
1316 | me.currentPage = options.page;\r | |
1317 | }\r | |
1318 | \r | |
1319 | options.addRecords = options.addRecords || false;\r | |
1320 | \r | |
1321 | if (!options.recordCreator) {\r | |
1322 | session = me.getSession();\r | |
1323 | if (session) {\r | |
1324 | options.recordCreator = session.recordCreator;\r | |
1325 | }\r | |
1326 | }\r | |
1327 | me.callParent([options]);\r | |
1328 | },\r | |
1329 | \r | |
1330 | setMoving: function(records, isMoving) {\r | |
1331 | var me = this,\r | |
1332 | map = me.moveMap || (me.moveMap = {}),\r | |
1333 | len = records.length,\r | |
1334 | i, id;\r | |
1335 | \r | |
1336 | for (i = 0; i < len; ++i) {\r | |
1337 | id = records[i].id;\r | |
1338 | if (isMoving) {\r | |
1339 | if (map[id]) {\r | |
1340 | ++map[id];\r | |
1341 | } else {\r | |
1342 | map[id] = 1;\r | |
1343 | ++me.moveMapCount;\r | |
1344 | }\r | |
1345 | } else {\r | |
1346 | if (--map[id] === 0) {\r | |
1347 | delete map[id];\r | |
1348 | --me.moveMapCount;\r | |
1349 | }\r | |
1350 | }\r | |
1351 | }\r | |
1352 | \r | |
1353 | if (me.moveMapCount === 0) {\r | |
1354 | me.moveMap = null;\r | |
1355 | }\r | |
1356 | },\r | |
1357 | \r | |
1358 | processAssociation: function(records) {\r | |
1359 | var me = this,\r | |
1360 | associatedEntity = me.getAssociatedEntity();\r | |
1361 | \r | |
1362 | if (associatedEntity) {\r | |
1363 | records = me.getRole().processLoad(me, associatedEntity, records, me.getSession());\r | |
1364 | }\r | |
1365 | return records;\r | |
1366 | }\r | |
1367 | }\r | |
1368 | \r | |
1369 | // Provides docs from the mixin\r | |
1370 | /**\r | |
1371 | * @method each\r | |
1372 | * @inheritdoc Ext.data.LocalStore#each\r | |
1373 | */\r | |
1374 | \r | |
1375 | /**\r | |
1376 | * @method collect\r | |
1377 | * @inheritdoc Ext.data.LocalStore#collect\r | |
1378 | */\r | |
1379 | \r | |
1380 | /**\r | |
1381 | * @method getById\r | |
1382 | * @inheritdoc Ext.data.LocalStore#getById\r | |
1383 | */\r | |
1384 | \r | |
1385 | /**\r | |
1386 | * @method getByInternalId\r | |
1387 | * @inheritdoc Ext.data.LocalStore#getByInternalId\r | |
1388 | */\r | |
1389 | \r | |
1390 | /**\r | |
1391 | * @method indexOf\r | |
1392 | * @inheritdoc Ext.data.LocalStore#indexOf\r | |
1393 | */\r | |
1394 | \r | |
1395 | /**\r | |
1396 | * @method indexOfId\r | |
1397 | * @inheritdoc Ext.data.LocalStore#indexOfId\r | |
1398 | */\r | |
1399 | \r | |
1400 | /**\r | |
1401 | * @method queryBy\r | |
1402 | * @inheritdoc Ext.data.LocalStore#queryBy\r | |
1403 | */\r | |
1404 | \r | |
1405 | /**\r | |
1406 | * @method query\r | |
1407 | * @inheritdoc Ext.data.LocalStore#query\r | |
1408 | */\r | |
1409 | \r | |
1410 | /**\r | |
1411 | * @method first\r | |
1412 | * @inheritdoc Ext.data.LocalStore#first\r | |
1413 | */\r | |
1414 | \r | |
1415 | /**\r | |
1416 | * @method last\r | |
1417 | * @inheritdoc Ext.data.LocalStore#last\r | |
1418 | */\r | |
1419 | \r | |
1420 | /**\r | |
1421 | * @method sum\r | |
1422 | * @inheritdoc Ext.data.LocalStore#sum\r | |
1423 | */\r | |
1424 | \r | |
1425 | /**\r | |
1426 | * @method count\r | |
1427 | * @inheritdoc Ext.data.LocalStore#count\r | |
1428 | */\r | |
1429 | \r | |
1430 | /**\r | |
1431 | * @method min\r | |
1432 | * @inheritdoc Ext.data.LocalStore#min\r | |
1433 | */\r | |
1434 | \r | |
1435 | /**\r | |
1436 | * @method max\r | |
1437 | * @inheritdoc Ext.data.LocalStore#max\r | |
1438 | */\r | |
1439 | \r | |
1440 | /**\r | |
1441 | * @method average\r | |
1442 | * @inheritdoc Ext.data.LocalStore#average\r | |
1443 | */\r | |
1444 | \r | |
1445 | /**\r | |
1446 | * @method aggregate\r | |
1447 | * @inheritdoc Ext.data.LocalStore#aggregate\r | |
1448 | */\r | |
1449 | });\r |