7

有没有办法增加 chrome.storage.sync.QUOTA_BYTES_PER_ITEM ?

对我来说,默认的 4096 字节有点短。

我试图执行

chrome.storage.sync.QUOTA_BYTES_PER_ITEM = 8192;

但是,实际限制似乎没有改变。

我怎样才能做到这一点?

4

2 回答 2

9

不,QUOTA_BYTES_PER_ITEM仅供参考;它不是一个可设置的值。但是,您可以使用 的值QUOTA_BYTES_PER_ITEM将一个项目拆分为多个项目:

function syncStore(key, objectToStore, callback) {
    var jsonstr = JSON.stringify(objectToStore);
    var i = 0;
    var storageObj = {};

    // split jsonstr into chunks and store them in an object indexed by `key_i`
    while(jsonstr.length > 0) {
        var index = key + "_" + i++;

        // since the key uses up some per-item quota, see how much is left for the value
        // also trim off 2 for quotes added by storage-time `stringify`
        var valueLength = chrome.storage.sync.QUOTA_BYTES_PER_ITEM - index.length - 2;

        // trim down segment so it will be small enough even when run through `JSON.stringify` again at storage time
        var segment = jsonstr.substr(0, valueLength);           
        while(JSON.stringify(segment).length > valueLength)
            segment = jsonstr.substr(0, --valueLength);

        storageObj[index] = segment;
        jsonstr = jsonstr.substr(valueLength);
    }

    // store all the chunks
    chrome.storage.sync.set(storageObj, callback);
}

然后编写一个类似的 fetch 函数,通过键获取并将对象重新粘合在一起。

于 2012-11-14T15:04:53.360 回答
0

只需修改@apsilliers 的答案

function syncStore(key, objectToStore) {
    var jsonstr = JSON.stringify(objectToStore);
    var i = 0;
    var storageObj = {};

    // split jsonstr into chunks and store them in an object indexed by `key_i`
    while(jsonstr.length > 0) {
        var index = key + "_" + i++;

        // since the key uses up some per-item quota, see how much is left for the value
        // also trim off 2 for quotes added by storage-time `stringify`
        const maxLength = chrome.storage.sync.QUOTA_BYTES_PER_ITEM - index.length - 2;
        var valueLength = jsonstr.length;
        if(valueLength > maxLength){
            valueLength = maxLength;
        }

        // trim down segment so it will be small enough even when run through `JSON.stringify` again at storage time
        //max try is QUOTA_BYTES_PER_ITEM to avoid infinite loop
        var segment = jsonstr.substr(0, valueLength); 
        for(let i = 0; i < chrome.storage.sync.QUOTA_BYTES_PER_ITEM; i++){
            const jsonLength = JSON.stringify(segment).length;
            if(jsonLength > maxLength){
                segment = jsonstr.substr(0, --valueLength);
            }else {
                break;
            }
        }

        storageObj[index] = segment;
        jsonstr = jsonstr.substr(valueLength);
    }

还可以读取每个分区并再次合并

function syncGet(key, callback) {
    chrome.storage.sync.get(key, (data) => {
        console.log(data[key]);
        console.log(typeof data[key]);
        if(data != undefined && data != "undefined" && data != {} && data[key] != undefined && data[key] != "undefined"){
            const keyArr = new Array();
            for(let i = 0; i <= data[key].count; i++) {
                keyArr.push(`${data[key].prefix}${i}`)
            }   
            chrome.storage.sync.get(keyArr, (items) => {
                console.log(data)
                const keys = Object.keys( items );
                const length = keys.length;
                let results = "";
                if(length > 0){
                    const sepPos = keys[0].lastIndexOf("_");
                    const prefix = keys[0].substring(0, sepPos);
                    for(let x = 0; x < length; x ++){
                        results += items[`${prefix }_${x}`];
                    }
                    callback(JSON.parse(results));
                    return;
                }
                callback(undefined);
            
            });
        } else {
            callback(undefined);
        }
    });
}

它经过测试,适用于我的情况

于 2021-07-18T09:25:17.023 回答