react-native-storage
Advanced tools
Comparing version 0.0.8 to 0.0.9
{ | ||
"name": "react-native-storage", | ||
"version": "0.0.8", | ||
"version": "0.0.9", | ||
"description": "This is a local storage wrapper for both react-native(AsyncStorage) and browser(localStorage). ES6/babel is needed.", | ||
"main": "storage.js", | ||
"scripts": { | ||
"test": "echo \"Error: no test specified\" && exit 1" | ||
"test": "jest --verbose" | ||
}, | ||
"jest": { | ||
"scriptPreprocessor": "node_modules/babel-jest", | ||
"setupEnvScriptFile": "jestSupport/mockStorage.js" | ||
}, | ||
"repository": { | ||
@@ -23,3 +27,7 @@ "type": "git", | ||
}, | ||
"homepage": "https://github.com/sunnylqm/react-native-storage#readme" | ||
"homepage": "https://github.com/sunnylqm/react-native-storage#readme", | ||
"devDependencies": { | ||
"babel-jest": "^5.3.0", | ||
"jest-cli": "^0.5.10" | ||
} | ||
} |
240
README.md
# react-native-storage | ||
This is a local storage wrapper for both react-native(AsyncStorage) and browser(localStorage). [ES6/babel](https://babeljs.io/) is needed. | ||
This is a local storage wrapper for both react-native(AsyncStorage) and browser(localStorage). [ES6](http://babeljs.io/docs/learn-es2015/) syntax, promise for async load, fully tested with jest. | ||
You may need a [Promise polyfill](https://github.com/jakearchibald/es6-promise) for [legacy iOS devices/browsers](http://caniuse.com/#search=promise). | ||
这是一个本地持久存储的封装,可以同时支持react-native(AsyncStorage)和浏览器(localStorage)。由于代码使用ES6语法编写,因而需要[babel库](https://babeljs.io/)的支持。 | ||
这是一个本地持久存储的封装,可以同时支持react-native(AsyncStorage)和浏览器(localStorage)。ES6语法,promise异步读取,使用jest进行了完整的单元测试。由于代码使用ES6语法编写,因而需要[babel库](http://babeljs.io/docs/setup/#browserify)的支持。 | ||
如果iOS设备或浏览器版本较老(不支持[Promise](http://caniuse.com/#search=promise)),则还需要一个Promise的[兼容库](https://github.com/jakearchibald/es6-promise)。 | ||
@@ -16,3 +16,4 @@ | ||
对于Web开发你需要使用[babel](https://babeljs.io/)来支持es6模块导入功能。(我会在下个版本中提供一个示例) 如果是React-Native开发,把这个[babel配置文件](https://github.com/brentvatne/react-native-animated-demo-tinder/blob/master/.babelrc)放到你的项目根目录中即可。 | ||
对于Web开发你需要使用[babel](https://babeljs.io/)来支持es6模块导入功能。 | ||
如果是React-Native开发,把这个[babel配置文件](https://github.com/brentvatne/react-native-animated-demo-tinder/blob/master/.babelrc)放到你的项目根目录中即可。 | ||
@@ -25,49 +26,159 @@ ### Import 导入 | ||
import Storage from 'react-native-storage'; | ||
### Init 初始化 | ||
var storage = new Storage({ | ||
//maximum capacity, default 1000 | ||
//最大容量,默认值1000条数据循环存储 | ||
size: 1000, | ||
//expire time, default 1 day(1000 * 3600 * 24 secs) | ||
//数据过期时间,默认一整天(1000 * 3600 * 24秒) | ||
defaultExpires: 1000 * 3600 * 24, | ||
//cache data in the memory. default is true. | ||
//读写时在内存中缓存数据。默认启用。 | ||
enableCache: true, | ||
//if data was not found in storage or expired, | ||
//the corresponding sync method will be invoked and return | ||
//the latest data. | ||
//如果storage中没有相应数据,或数据已过期, | ||
//则会调用相应的sync同步方法,无缝返回最新数据。 | ||
sync : { | ||
//we'll talk about the details later. | ||
//同步方法的具体说明会在后文提到 | ||
} | ||
}) | ||
//I suggest you have one(and only one) storage instance in global scope. | ||
//最好在全局范围内创建一个(且只有一个)storage实例,方便使用 | ||
//for web | ||
//window.storage = storage; | ||
//for react native | ||
//global.storage = storage; | ||
//or CMD | ||
//module.exports = storage; | ||
### Save & Load 保存和读取 | ||
Storage.save(id, data, global, expires) | ||
Storage.load(id, global).then( data => { | ||
console.info(data); | ||
}).catch(error){ | ||
console.warn(error); | ||
} | ||
Use these methods to save/load any legal type of javaScript data. They will automatically transform data into JSON string and vice versa. | ||
使用这两个方法来保存和读取任意合法类型的js数据。它们会自动转换JSON字符串或将JSON解析为数据对象。 | ||
**Attention:** By default, there is a private property *map* which records indices of stored data. And it has a default SIZE of 1000, which means the capacity would be 1000 records. And if exceeded, new data would overwrite the first record without warnings(but won't cause disorder), to prevent the size from uncontrollable increasing. You can alter the SIZE using: | ||
Storage.SIZE = 1500 | ||
//Save something with key only. | ||
//Something more unique, and constantly being used. | ||
//They are perminently stored unless you remove. | ||
//Even expires, the data won't be removed. Only sync method would be invoked. | ||
//使用key来保存数据。这些数据一般是全局独有的,常常需要调用的。 | ||
//除非你手动移除,这些数据会被永久保存,而且默认不会过期。 | ||
//即便指定了且达到了过期时间,数据也不会被删除,而只是触发调用同步方法。 | ||
storage.save({ | ||
key: 'loginState', | ||
rawData: { | ||
from: 'some other site', | ||
userid: 'some userid', | ||
token: 'some token' | ||
}, | ||
//if not specified, the defaultExpires will be applied instead. | ||
//if set to null, then it will never expires. | ||
//如果不指定过期时间,则会使用defaultExpires参数 | ||
//如果设为null,则永不过期 | ||
expires: 1000 * 3600 | ||
}); | ||
注意:默认情况下,会使用一个**map**来记录所存数据的索引,其默认大小为1000。也就是说Storage的默认容量为1000条记录。如果超出了容量,那么新保存的数据就会在不提示的情况下覆盖第一条数据(但并不会引起数据紊乱),以避免Storage占用的空间不可控地持续增大。你可以修改这个默认SIZE的大小。 | ||
//load 读取 | ||
storage.load({ | ||
key: 'loginState', | ||
//autoSync(default true) means if data not found or expired, | ||
//then invoke the corresponding sync method | ||
//autoSync(默认为true)意味着在没有找到数据或数据过期时自动调用相应的同步方法 | ||
autoSync: true, | ||
//syncInBackground(default true) means if data expired, | ||
//return the outdated data first while invoke the sync method. | ||
//It can be set to false to always return data provided by sync method when expired.(Of course it's slower) | ||
//syncInBackground(默认为true)意味着如果数据过期, | ||
//在调用同步方法的同时先返回已经过期的数据。 | ||
//设置为false的话,则始终强制返回同步方法提供的最新数据(当然会需要更多等待时间)。 | ||
syncInBackground: true | ||
}).then( ret => { //found data goes to then() | ||
console.log(ret.userid); //如果找到数据,则在then方法中返回 | ||
}).catch( err => { //any exception including data not found | ||
console.warn(err); //goes to catch() | ||
//如果没有找到数据且没有同步方法, | ||
//或者有其他异常,则在catch中返回 | ||
}) | ||
__________________________________________________________________ | ||
//Save something with key and id. Something of the same type(key). | ||
//There is a quota over "key-id" data(the size parameter you pass in constructor). | ||
//By default the 1001th data will overwrite the 1st data. | ||
//If you then load the 1st data, a catch(data not found) or sync will be invoked. | ||
//使用key和id来保存数据,一般是保存同类别(key)的大量数据。 | ||
//这些"key-id"数据有一个保存上限,即在初始化storage时传入的size参数。 | ||
//在默认上限参数下,第1001个数据会覆盖第1个数据。 | ||
//覆盖之后,再读取第1个数据,会返回catch或是相应的同步方法。 | ||
var userA = { | ||
name: 'A', | ||
age: 20, | ||
tags: [ | ||
'geek', | ||
'nerd', | ||
'otaku' | ||
] | ||
}; | ||
storage.save({ | ||
key: 'user', | ||
id: '1001', | ||
rawData: userA, | ||
expires: 1000 * 60 | ||
}); | ||
//load 读取 | ||
storage.load({ | ||
key: 'user' | ||
id: '1001' | ||
}).then( ret => { //found data goes to then() | ||
console.log(ret.userid); //如果找到数据,则在then方法中返回 | ||
}).catch( err => { //any exception including data not found | ||
console.warn(err); //goes to catch() | ||
//如果没有找到数据且没有同步方法, | ||
//或者有其他异常,则在catch中返回 | ||
}) | ||
Or if you don't want some long term data to be "recycled", then you can set the param **global** to be true, which gets your data stored out of the **map**. And when you load it, **global** should also be **true**. | ||
**Expires** has a default value of one whole day(24 * 3600 * 1000). When data expired, they won't be deleted but trigger a refresh operation if avaiable. Set it to *null* if you want it never expires. | ||
或者如果你不想有些长期存放的数据被“回收”,那么可以将**global**参数设置为true,这样它就不会被记录到map中,也就不会被回收。而当你读取这种不会被回收的数据时,也需要将load方法中的global参数设为true. | ||
**Expires**参数的默认值为一整天(24 * 3600 * 1000)。过期的数据不会被删除,而是会触发一个刷新的动作(需要你自己实现)。如果你希望某个数据永不过期,则将此参数设置为*null* 即可。 | ||
### Sync remote data(refresh) 同步远程数据(刷新) | ||
You can pass sync methods as one object parameter to the storage constructor, but also you can add it any time. | ||
### Sync remote data 同步远程数据(刷新) | ||
Following is an example. You must implement it yourself under Storage.sync. | ||
下面是一段示例代码。你必须在Storage.sync中自己实现。 | ||
Storage.sync = { | ||
user(id, resolve, reject){ | ||
fetch('user/', { | ||
method: 'GET', | ||
body: 'id=' + id | ||
}).then(function(response) { | ||
return response.json(); | ||
}).then(function(data) { | ||
//console.info(data); | ||
if(data && data.user){ | ||
data = data.user; | ||
Storage.save('user_' + data.id, data); | ||
resolve && resolve(data); | ||
} | ||
else{ | ||
reject && reject(); | ||
} | ||
}).catch((error) => { | ||
console.warn(error); | ||
}); | ||
} | ||
storage.sync = { | ||
//The name of the sync method must be the same of the data's key | ||
//And the passed params will be an all-in-one object. | ||
//同步方法的名字必须和所存数据的key完全相同 | ||
//方法接受的参数为一整个object,所有参数从object中解构取出 | ||
user(params){ | ||
let { id, resolve, reject } = params; | ||
fetch('user/', { | ||
method: 'GET', | ||
body: 'id=' + id | ||
}).then( response => { | ||
return response.json(); | ||
}).then( json => { | ||
//console.log(json); | ||
if(json && json.user){ | ||
storage.save({ | ||
key: 'user', | ||
id, | ||
rawData: json.user | ||
}); | ||
resolve && resolve(json.user); | ||
} | ||
else{ | ||
reject && reject('data parse error'); | ||
} | ||
}).catch( err => { | ||
console.warn(err); | ||
reject && reject(err); | ||
}); | ||
} | ||
} | ||
@@ -77,25 +188,38 @@ | ||
Storage.load('user_123').then(...) | ||
If there is no user_123 stored currently, then Storage.sync.user would be triggered to fetch remote data and returned. | ||
有了上面这个sync方法,以后再调用Storage.load时,如果本地并没有存储相应的user,那么会自动触发Storage.sync.user去远程取回数据并无缝返回。 | ||
storage.load({ | ||
key: 'user', | ||
id: '1002' | ||
}).then(...) | ||
If there is no user 1002 stored currently, then storage.sync.user would be invoked to fetch remote data and returned. | ||
有了上面这个sync方法,以后再调用storage.load时,如果本地并没有存储相应的user,那么会自动触发storage.sync.user去远程取回数据并无缝返回。 | ||
### Load batch data 读取批量数据 | ||
Storage.getBatchDataWithKeys(keys) | ||
Storage.getBatchDataWithIds(key, ids) | ||
If you want to get several data in one action, then use these methods. Examples: | ||
如果你想在一次操作中获取多个数据,那就使用上面的两个方法。示例: | ||
Storage.getBatchDataWithKeys(['user_123', 'company_321', 'token']) | ||
.then( results => { | ||
//Load batch data with the same parameters as storage.load, but in an array. | ||
//It will invoke sync methods on demand, | ||
//and finally return them all in an ordered array. | ||
//使用和load方法一样的参数读取批量数据,但是参数是以数组的方式提供。 | ||
//会在需要时分别调用相应的同步方法,最后统一返回一个有序数组。 | ||
storage.getBatchData([ | ||
{ key: 'loginState' }, | ||
{ key: 'checkPoint', syncInBackground: false }, | ||
{ key: 'balance' }, | ||
{ key: 'user', id: '1009' } | ||
]) | ||
.then( results => { | ||
results.forEach( result => { | ||
console.info(result); | ||
console.log(result); | ||
}) | ||
}) | ||
Storage.getBatchDataWithIds('images', ['1001', '1002', '1003']) | ||
//Load batch data with one key and an array of ids. | ||
//根据key和一个id数组来读取批量数据 | ||
storage.getBatchDataWithIds({ | ||
key: 'user', | ||
ids: ['1001', '1002', '1003'] | ||
}) | ||
.then( ... ) | ||
There is a notable difference between the two methods except the arguments. **getBatchDataWithKeys** will invoke different sync methods(since the keys are different) one by one when corresponding data is missing. However, **getBatchDataWithIds** will collect missing data, push their ids to an array, then pass the array to the corresponding sync method(to avoid too many requests), so you need to implement array query on server end. | ||
这两个方法除了参数形式不同,还有个值得注意的差异。**getBatchDataWithKeys**会在数据缺失时挨个调用不同的sync方法(因为key不同)。但是**getBatchDataWithIds**却会把缺失的数据统计起来,将它们的id收集到一个数组中,然后一次传递给对应的sync方法(避免挨个查询导致同时发起大量请求),所以你需要在服务端实现通过数组来查询返回。 | ||
There is a notable difference between the two methods except the arguments. **getBatchData** will invoke different sync methods(since the keys may be different) one by one when corresponding data is missing. However, **getBatchDataWithIds** will collect missing data, push their ids to an array, then pass the array to the corresponding sync method(to avoid too many requests) once, so you need to implement array query on server end and handle the parameters of sync method properly(cause the id parameter can be a single string or an array of strings). | ||
这两个方法除了参数形式不同,还有个值得注意的差异。**getBatchData**会在数据缺失时挨个调用不同的sync方法(因为key不同)。但是**getBatchDataWithIds**却会把缺失的数据统计起来,将它们的id收集到一个数组中,然后一次传递给对应的sync方法(避免挨个查询导致同时发起大量请求),所以你需要在服务端实现通过数组来查询返回,还要注意对应的sync方法的参数处理(因为id参数可能是一个字符串,也可能是一个数组的字符串)。 |
510
storage.js
/* | ||
* local storage(web/react native) wrapper | ||
* sunnylqm 2015-09-01 | ||
* version 0.0.8 | ||
* sunnylqm 2015-10-01 | ||
* version 0.0.9 | ||
*/ | ||
let cache = {}; | ||
let m; | ||
let _SIZE = 1000; | ||
let _s; | ||
let isBrowser = false; | ||
if(window && window.localStorage){ | ||
try { | ||
window.localStorage.setItem('test', 'test'); | ||
_s = window.localStorage; | ||
isBrowser = true; | ||
m = JSON.parse(_s.getItem('map')) || { index: 0 }; | ||
} | ||
catch(e){ | ||
console.warn(e); | ||
} | ||
} | ||
else{ | ||
_s = require('react-native').AsyncStorage; | ||
_s.getItem('map').then( map => { | ||
m = JSON.parse(map) || { index: 0 }; | ||
}); | ||
} | ||
export default class Storage { | ||
static get cache(){ | ||
return cache; | ||
constructor(options = {}) { | ||
let me = this; | ||
me._SIZE = options.size || 1000; // maximum capacity | ||
me.sync = options.sync || {}; // remote sync method | ||
me.defaultExpires = options.defaultExpires || 1000 * 3600 * 24; | ||
me.enableCache = options.enableCache || true; | ||
me._innerVersion = 9; | ||
me.cache = {}; | ||
//detect browser or ios javascriptCore | ||
me.isBrowser = false; | ||
if(window && window.localStorage) { | ||
try { | ||
window.localStorage.setItem('test', 'test'); | ||
me._s = window.localStorage; | ||
me.isBrowser = true; | ||
me._m = me._checkMap(JSON.parse(me._s.getItem('map'))); | ||
} | ||
catch(e) { | ||
console.warn(e); | ||
throw e; | ||
} | ||
} | ||
else { | ||
me.readyQueue = []; | ||
me._s = require('react-native').AsyncStorage; | ||
me._s.getItem('map').then( map => { | ||
me._m = me._checkMap(JSON.parse(map)); | ||
me.onReady(); | ||
}); | ||
} | ||
} | ||
static get _s() { | ||
return _s; | ||
onReady() { | ||
let me = this; | ||
let rq = me.readyQueue; | ||
for(let i = 0, task; task = rq[i++];) { | ||
me[task.method](task.params); | ||
} | ||
} | ||
static get _SIZE() { | ||
return _SIZE; | ||
_checkMap(map) { | ||
let me = this; | ||
if(map && map.innerVersion && map.innerVersion === me._innerVersion ) { | ||
return map; | ||
} | ||
else { | ||
return { | ||
innerVersion: me._innerVersion, | ||
index: 0 | ||
}; | ||
} | ||
} | ||
static set _SIZE(newSize) { | ||
_SIZE = newSize; | ||
_getId(key, id) { | ||
return key + '_' + id; | ||
} | ||
static _saveToMap(id, d) { | ||
let s = Storage._s; | ||
if(m[id]){ | ||
s.setItem('map_' + m[id],d); | ||
_saveToMap(params) { | ||
let me = this, | ||
s = me._s, | ||
m = me._m; | ||
if(!m) { | ||
return me.readyQueue.push({ | ||
method: '_saveToMap', | ||
params | ||
}); | ||
} | ||
else{ | ||
if(++m.index === Storage._SIZE){ | ||
m.index = 1; | ||
let { key, id, data } = params; | ||
//join key and id | ||
let newId = me._getId(key, id); | ||
//update existed data | ||
if(m[newId]) { | ||
s.setItem('map_' + m[newId], data); | ||
} | ||
//create new data | ||
else { | ||
if(m[m.index] !== undefined){ | ||
//loop over, delete old data | ||
let oldId = m[m.index]; | ||
delete m[oldId]; | ||
if(me.enableCache) { | ||
delete me.cache[oldId]; | ||
} | ||
} | ||
m[id] = m.index; | ||
s.setItem('map_' + m.index, d); | ||
m[newId] = m.index; | ||
m[m.index] = newId; | ||
if(me.enableCache) { | ||
const cacheData = JSON.parse(data); | ||
me.cache[newId] = cacheData; | ||
} | ||
s.setItem('map_' + m.index, data); | ||
s.setItem('map', JSON.stringify(m)); | ||
if(++m.index === me._SIZE) { | ||
m.index = 0; | ||
} | ||
} | ||
} | ||
static save(id, rawData, global, expires){ | ||
let s = Storage._s, d; | ||
if(s){ | ||
let data = { | ||
rawData | ||
}; | ||
let now = new Date().getTime(); | ||
if(expires === undefined){ | ||
expires = 24*3600*1000; | ||
//expires = 1000*60; | ||
save(params) { | ||
let me = this, | ||
s = me._s; | ||
let { key, id, rawData, expires } = params; | ||
let data = { | ||
rawData | ||
}; | ||
let now = new Date().getTime(); | ||
if(expires === undefined) { | ||
expires = me.defaultExpires; | ||
} | ||
if(expires !== null) { | ||
data.expires = now + expires; | ||
} | ||
data = JSON.stringify(data); | ||
if(id === undefined) { | ||
if(me.enableCache) { | ||
const cacheData = JSON.parse(data); | ||
me.cache[key] = cacheData; | ||
} | ||
if(expires !== null){ | ||
data.expires = now + expires; | ||
} | ||
d = JSON.stringify(data); | ||
if(global){ | ||
s.setItem(id, d); | ||
} | ||
else{ | ||
Storage._saveToMap(id, d); | ||
//if(isBrowser){ | ||
// let m = Storage.cache.map || (Storage.cache.map = JSON.parse(s.getItem('map'))); | ||
// Storage._saveToMap(m, id, d); | ||
//} | ||
//else{ | ||
// s.getItem('map').then( m => { | ||
// Storage._saveToMap(JSON.parse(m), id, d); | ||
// }) | ||
//} | ||
} | ||
s.setItem(key, data); | ||
} | ||
else { | ||
me._saveToMap({ | ||
key, | ||
id, | ||
data | ||
}); | ||
} | ||
} | ||
static getBatchDataWithKeys(keys){ | ||
//全局存储,不循环 | ||
let global = true; | ||
getBatchData(querys) { | ||
let me = this; | ||
let tasks = []; | ||
for(let i = 0, key; key = keys[i]; i++){ | ||
if(!Storage.cache[key]){ | ||
//含_字符,循环存储 | ||
global = (key.indexOf('_') === -1); | ||
tasks[i] = Storage.load(key, global); | ||
} | ||
else{ | ||
tasks[i] = Storage.cache[key]; | ||
} | ||
for(let i = 0, query; query = querys[i]; i++) { | ||
tasks[i] = me.load(query); | ||
//let { key, id } = query; | ||
//let newId = id === undefined ? key : me._getId(key, id); | ||
//if(me.enableCache && me.cache[newId] !== undefined) { | ||
// tasks[i] = me.cache[newId]; | ||
//} | ||
//else { | ||
// tasks[i] = me.load(query); | ||
//} | ||
} | ||
@@ -110,6 +157,8 @@ return new Promise((resolve, reject) => { | ||
} | ||
static getBatchDataWithIds(key, ids){ | ||
getBatchDataWithIds(params) { | ||
let me = this; | ||
let { key, ids, syncInBackground } = params; | ||
let tasks = []; | ||
for(var i = 0, id; id = ids[i]; i++){ | ||
tasks[i] = Storage.load(key + '_' + id, false, false); | ||
for(var i = 0, id; id = ids[i]; i++) { | ||
tasks[i] = me.load({ key, id, syncInBackground, autoSync: false, batched: true }); | ||
} | ||
@@ -119,17 +168,21 @@ return new Promise((resolve, reject) => { | ||
Promise.all(tasks).then(values => { | ||
values = values.filter(value =>{ | ||
if(value.syncId !== undefined){ | ||
values = values.filter(value => { | ||
if(value.syncId !== undefined) { | ||
missed.push(value.syncId); | ||
return false; | ||
} | ||
else{ | ||
else { | ||
return true; | ||
} | ||
}); | ||
if(missed.length){ | ||
Storage.sync[key](missed, data => { | ||
resolve(values.concat(data)); | ||
}, reject); | ||
if(missed.length) { | ||
me.sync[key]({ | ||
id : missed, | ||
resolve: data => { | ||
resolve(values.concat(data)); | ||
}, | ||
reject | ||
}); | ||
} | ||
else{ | ||
else { | ||
resolve(values); | ||
@@ -142,47 +195,92 @@ } | ||
} | ||
static _loadGlobalItem(id, ret, resolve, reject){ | ||
if(ret === null || ret === 'undefined'){ | ||
if(Storage.sync[id]){ | ||
Storage.sync[id](resolve, reject); | ||
_lookupGlobalItem(params) { | ||
let me = this, | ||
s = me._s, | ||
ret; | ||
let { key } = params; | ||
if(me.enableCache && me.cache[key] !== undefined) { | ||
ret = me.cache[key]; | ||
me._loadGlobalItem({ ret, ...params }); | ||
} | ||
else { | ||
if(me.isBrowser) { | ||
ret = s.getItem(key); | ||
me._loadGlobalItem({ ret, ...params }); | ||
} | ||
else{ | ||
else { | ||
s.getItem(key).then(ret => { | ||
me._loadGlobalItem({ ret, ...params }); | ||
}) | ||
} | ||
} | ||
} | ||
_loadGlobalItem(params) { | ||
let me = this; | ||
let { key, ret, resolve, reject, autoSync, syncInBackground } = params; | ||
if(ret === null || ret === undefined) { | ||
if(autoSync && me.sync[key]) { | ||
me.sync[key]({resolve, reject}); | ||
} | ||
else { | ||
reject(); | ||
} | ||
} | ||
else{ | ||
ret = JSON.parse(ret); | ||
if(ret.expires < new Date().getTime()){ | ||
Storage.sync[id] && Storage.sync[id](); | ||
else { | ||
if(typeof ret === 'string') { | ||
ret = JSON.parse(ret); | ||
} | ||
let now = new Date().getTime(); | ||
if(autoSync && ret.expires < now && me.sync[key]) { | ||
if(syncInBackground) { | ||
me.sync[key]({}); | ||
} | ||
else { | ||
me.sync[key]({resolve, reject}); | ||
return; | ||
} | ||
} | ||
resolve(ret.rawData); | ||
} | ||
} | ||
static _noItemFound(kv, resolve, reject, autoSync){ | ||
if(kv.length > 1 && Storage.sync[kv[0]]){ | ||
if(autoSync){ | ||
Storage.sync[kv[0]](kv[1], resolve, reject); | ||
_noItemFound(params) { | ||
let me = this; | ||
let { key, id, resolve, reject, autoSync } = params; | ||
if(me.sync[key]) { | ||
if(autoSync) { | ||
me.sync[key]({id, resolve, reject}); | ||
} | ||
else{ | ||
else { | ||
resolve({ | ||
syncId: kv[1] | ||
syncId: id | ||
}); | ||
} | ||
} | ||
else{ | ||
else { | ||
reject(); | ||
} | ||
} | ||
static _loadMapItem(ret, kv, resolve, reject, autoSync){ | ||
if(ret === null || ret === 'undefined'){ | ||
Storage._noItemFound(kv, resolve, reject, autoSync); | ||
_loadMapItem(params) { | ||
let me = this; | ||
let { ret, key, id, resolve, reject, autoSync, batched, syncInBackground } = params; | ||
if(ret === null || ret === undefined) { | ||
me._noItemFound(params); | ||
} | ||
else{ | ||
ret = JSON.parse(ret); | ||
if(ret.expires < new Date().getTime()){ | ||
if(autoSync && kv.length > 1 && Storage.sync[kv[0]]) { | ||
Storage.sync[kv[0]](kv[1]); | ||
else { | ||
if(typeof ret === 'string'){ | ||
ret = JSON.parse(ret); | ||
} | ||
let now = new Date().getTime(); | ||
if(autoSync && ret.expires < now) { | ||
if(me.sync[key]) { | ||
if(syncInBackground){ | ||
me.sync[key]({id}); | ||
} | ||
else{ | ||
me.sync[key]({id, resolve, reject}); | ||
return; | ||
} | ||
} | ||
else{ | ||
else if(batched) { | ||
resolve({ | ||
syncId: kv[1] | ||
syncId: id | ||
}); | ||
@@ -195,48 +293,91 @@ return; | ||
} | ||
static _lookUpInMap(id, resolve, reject, autoSync){ | ||
let s = Storage._s, | ||
kv = id.split('_'), | ||
_lookUpInMap(params) { | ||
let me = this, | ||
s = me._s, | ||
m = me._m, | ||
ret; | ||
if(m[id] !== undefined){ | ||
if(isBrowser){ | ||
ret = s.getItem('map_' + m[id]); | ||
Storage._loadMapItem(ret, kv, resolve, reject, autoSync); | ||
let { key, id } = params; | ||
let newId = me._getId(key, id); | ||
if(me.enableCache && me.cache[newId]) { | ||
ret = me.cache[newId]; | ||
me._loadMapItem( {ret, ...params } ); | ||
} | ||
else if(m[newId] !== undefined) { | ||
if(me.isBrowser) { | ||
ret = s.getItem('map_' + m[newId]); | ||
me._loadMapItem( {ret, ...params } ); | ||
} | ||
else{ | ||
s.getItem('map_' + m[id]).then( ret => { | ||
Storage._loadMapItem(ret, kv, resolve, reject, autoSync); | ||
else { | ||
s.getItem('map_' + m[newId]).then( ret => { | ||
me._loadMapItem( {ret, ...params } ); | ||
}) | ||
} | ||
} | ||
else{ | ||
Storage._noItemFound(kv, resolve, reject, autoSync); | ||
else { | ||
me._noItemFound( {ret, ...params } ); | ||
} | ||
} | ||
static load(id, global, autoSync = true){ | ||
let s = Storage._s, | ||
ret; | ||
remove(params) { | ||
let me = this, | ||
m = me._m, | ||
s = me._s; | ||
let { key, id } = params; | ||
if(!m) { | ||
me.readyQueue.push({ | ||
method: 'remove', | ||
params | ||
}); | ||
} | ||
else if(id === undefined) { | ||
if(me.enableCache && me.cache[key]) { | ||
delete me.cache[key]; | ||
} | ||
s.removeItem(key); | ||
} | ||
else { | ||
//join key and id | ||
let newId = me._getId(key, id); | ||
//remove existed data | ||
if(m[newId]) { | ||
if(me.enableCache && me.cache[newId]) { | ||
delete me.cache[newId]; | ||
} | ||
delete m[newId]; | ||
s.removeItem('map_' + m[newId]); | ||
} | ||
} | ||
} | ||
load(params) { | ||
let me = this, | ||
m = me._m; | ||
let { key, id, autoSync, syncInBackground } = params; | ||
if(autoSync === undefined) { | ||
autoSync = true; | ||
} | ||
if(syncInBackground === undefined) { | ||
syncInBackground = true; | ||
} | ||
let promise = new Promise((resolve, reject) => { | ||
if(global){ | ||
if(isBrowser){ | ||
ret = s.getItem(id); | ||
Storage._loadGlobalItem(id, ret, resolve, reject); | ||
if(id === undefined) { | ||
if(!m) { | ||
me.readyQueue.push({ | ||
method: '_lookupGlobalItem', | ||
params: {key, resolve, reject, autoSync, syncInBackground} | ||
}); | ||
} | ||
else{ | ||
s.getItem(id).then(ret => { | ||
Storage._loadGlobalItem(id, ret, resolve, reject); | ||
}) | ||
else { | ||
me._lookupGlobalItem({key, resolve, reject, autoSync, syncInBackground}); | ||
} | ||
} | ||
else{ | ||
Storage._lookUpInMap(id, resolve, reject, autoSync); | ||
//if(isBrowser){ | ||
// let m = JSON.parse(s.getItem('map')); | ||
// Storage._lookUpInMap(m, id, resolve, reject, autoSync); | ||
//} | ||
//else{ | ||
// s.getItem('map').then( m => { | ||
// Storage._lookUpInMap(JSON.parse(m), id, resolve, reject, autoSync); | ||
// }) | ||
//} | ||
else { | ||
if(!m) { | ||
me.readyQueue.push({ | ||
method: '_lookUpInMap', | ||
params: {key, id, resolve, reject, autoSync, syncInBackground} | ||
}); | ||
} | ||
else { | ||
me._lookUpInMap({key, id, resolve, reject, autoSync, syncInBackground}); | ||
} | ||
} | ||
@@ -246,56 +387,11 @@ }); | ||
} | ||
static clearMap(){ | ||
let s = Storage._s; | ||
clearMap(){ | ||
let me = this, | ||
s = me._s; | ||
s.removeItem('map'); | ||
m = { index: 0 }; | ||
//var s = Storage._s, | ||
// m = JSON.parse(s.getItem('map')); | ||
//if(m){ | ||
// for(var i = 1;i <= m.index;i++){ | ||
// s.removeItem('map_' + i); | ||
// } | ||
// m.index = 0; | ||
//} | ||
me._m = { | ||
innerVersion: me._innerVersion, | ||
index: 0 | ||
}; | ||
} | ||
static getGlobal(key){ | ||
if(!Storage.cache[key]){ | ||
Storage.load(key, true).then(ret => { | ||
Storage.cache[key] = ret; | ||
}) | ||
} | ||
return Storage.cache[key]; | ||
} | ||
static setGlobal(key, value, expires = null){ | ||
Storage.save(key, value, true, expires); | ||
Storage.cache[key] = value; | ||
} | ||
static removeGlobal(key){ | ||
Storage._s.removeItem(key); | ||
delete Storage.cache[key]; | ||
} | ||
} | ||
Storage.sync = { | ||
//TODO implement your own sync methods like the following. | ||
//TODO Do not forget append **resolve** and **reject** | ||
//user(id, resolve, reject){ | ||
// fetch('user/', { | ||
// method: 'GET', | ||
// body: 'id=' + id | ||
// }).then(function(response) { | ||
// return response.json(); | ||
// }).then(function(data) { | ||
// //console.info(data); | ||
// if(data && data.user){ | ||
// data = data.user; | ||
// Storage.save('user_' + data.id, data); | ||
// resolve && resolve(data); | ||
// } | ||
// else{ | ||
// reject && reject(); | ||
// } | ||
// }).catch((error) => { | ||
// console.warn(error); | ||
// }); | ||
//} | ||
} |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
No tests
QualityPackage does not have any tests. This is a strong signal of a poorly maintained or low quality package.
Found 1 instance in 1 package
45478
10
1311
1
222
2
1