function createWorker(main){
var blob = new Blob(
["(" + main.toString() + ")(self)"],
{type: "text/javascript"}
);
return new Worker(window.URL.createObjectURL(blob));
}
// Worker
var worker = createWorker(function(self){
setInterval(function(){
self.postMessage(Date.now());
}, 250);
});
worker.onmessage = function(e) {
console.log("Worker: " + e.data);
}
这样可以正常使用
t2 - 3百万 t4 - 3百万 总共我有600万条记录,由于某些原因我的临时空间被填满所以我想要的是在插入每10000条记录后提交。
怎么办?
答案 0 :(得分:0)
我会使用BULK COLLECT
和FORALL
。
请参阅Incremental Commit Processing with FORALL和Bulk Processing with BULK COLLECT and FORALL了解一些可以适应您案例的例子。
答案 1 :(得分:0)
希望这段代码有所帮助。
--Test data creation
CREATE TABLE TEST_SO_BULK
AS
SELECT LEVEL COL1,'AVRAJIT'||LEVEL COL2 FROM DUAL
CONNECT BY LEVEL < 100000;
--Create another table to insert
CREATE TABLE TEST_SO1
AS
SELECT * FROM TEST_SO_BULK
WHERE 1=2;
--Bulk collect with limit clause
set serveroutput on;
DECLARE
TYPE lv
IS
TABLE OF TEST_SO_BULK%ROWTYPE;
lv_tab lv;
CURSOR lvsql IS
SELECT * FROM TEST_SO_BULK;
BEGIN
OPEN lvsql;
LOOP
FETCH lvsql BULK COLLECT INTO lv_tab LIMIT 10000;
dbms_output.put_line(lv_tab.COUNT);
FORALL i IN lv_tab.FIRST..lv_tab.LAST
INSERT INTO TEST_SO1 VALUES
(
lv_tab(i).col1,
lv_tab(i).col2
);
EXIT WHEN lvsql%NOTFOUND;
END LOOP;
END;
--Check data count
select count(1) from test_so1;