2

我正在尝试将所有 googlebooks-1gram 文件导入 postgresql 数据库。为此,我编写了以下 Java 代码:

public class ToPostgres {

    public static void main(String[] args) throws Exception {
        String filePath = "./";
        List<String> files = new ArrayList<String>();
        for (int i =0; i < 10; i++) {
            files.add(filePath+"googlebooks-eng-all-1gram-20090715-"+i+".csv");
        }
        Connection c = null;
        try {
            c = DriverManager.getConnection("jdbc:postgresql://localhost/googlebooks",
                    "postgres", "xxxxxx");
        } catch (SQLException e) {
            e.printStackTrace();
        }

        if (c != null) {
            try {
                PreparedStatement wordInsert = c.prepareStatement(
                    "INSERT INTO words (word) VALUES (?)", Statement.RETURN_GENERATED_KEYS
                );
                PreparedStatement countInsert = c.prepareStatement(
                    "INSERT INTO wordcounts (word_id, \"year\", total_count, total_pages, total_books) " +
                    "VALUES (?,?,?,?,?)"
                );
                String lastWord = "";
                Long lastId = -1L;
                for (String filename: files) {
                    BufferedReader input =  new BufferedReader(new FileReader(new File(filename)));
                    String line = "";
                    while ((line = input.readLine()) != null) {
                        String[] data = line.split("\t");
                        Long id = -1L;
                        if (lastWord.equals(data[0])) {
                            id = lastId;
                        } else {
                            wordInsert.setString(1, data[0]);
                            wordInsert.executeUpdate();
                            ResultSet resultSet = wordInsert.getGeneratedKeys();
                            if (resultSet != null && resultSet.next()) 
                            {
                                id = resultSet.getLong(1);
                            }
                        }
                        countInsert.setLong(1, id);
                        countInsert.setInt(2, Integer.parseInt(data[1]));
                        countInsert.setInt(3, Integer.parseInt(data[2]));
                        countInsert.setInt(4, Integer.parseInt(data[3]));
                        countInsert.setInt(5, Integer.parseInt(data[4]));
                        countInsert.executeUpdate();
                        lastWord = data[0];
                        lastId = id;
                    }
                }
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }
    }

}

然而,当运行这个大约 3 小时时,它只在 wordcounts 表中放置了 1.000.000 个条目。当我检查整个 1 克数据集中的行数时,它是 500.000.000 行。所以导入所有东西大约需要 62.5 天,我可以接受它在大约一周内导入,但是 2 个月?我想我在这里做错了(我确实有一个 24/7 运行的服务器,所以我实际上可以运行这么长时间,但更快会很好 XD)

编辑:这段代码是我解决它的方法:

public class ToPostgres {

    public static void main(String[] args) throws Exception {
        String filePath = "./";
        List<String> files = new ArrayList<String>();
        for (int i =0; i < 10; i++) {
            files.add(filePath+"googlebooks-eng-all-1gram-20090715-"+i+".csv");
        }
        Connection c = null;
        try {
            c = DriverManager.getConnection("jdbc:postgresql://localhost/googlebooks",
                    "postgres", "xxxxxx");
        } catch (SQLException e) {
            e.printStackTrace();
        }

        if (c != null) {
            c.setAutoCommit(false);
            try {
                PreparedStatement wordInsert = c.prepareStatement(
                    "INSERT INTO words (id, word) VALUES (?,?)"
                );
                PreparedStatement countInsert = c.prepareStatement(
                    "INSERT INTO wordcounts (word_id, \"year\", total_count, total_pages, total_books) " +
                    "VALUES (?,?,?,?,?)"
                );
                String lastWord = "";
                Long id = 0L;
                for (String filename: files) {
                    BufferedReader input =  new BufferedReader(new FileReader(new File(filename)));
                    String line = "";
                    int i = 0;
                    while ((line = input.readLine()) != null) {
                        String[] data = line.split("\t");
                        if (!lastWord.equals(data[0])) {
                            id++;
                            wordInsert.setLong(1, id);
                            wordInsert.setString(2, data[0]);
                            wordInsert.executeUpdate();
                        }
                        countInsert.setLong(1, id);
                        countInsert.setInt(2, Integer.parseInt(data[1]));
                        countInsert.setInt(3, Integer.parseInt(data[2]));
                        countInsert.setInt(4, Integer.parseInt(data[3]));
                        countInsert.setInt(5, Integer.parseInt(data[4]));
                        countInsert.executeUpdate();
                        lastWord = data[0];
                        if (i % 10000 == 0) {
                            c.commit();
                        }
                        if (i % 100000 == 0) {
                            System.out.println(i+" mark file "+filename);
                        }
                        i++;
                    }
                    c.commit();
                }
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }
    }

}

我现在在大约 15 分钟内达到了 150 万行。这对我来说已经足够快了,谢谢大家!

4

6 回答 6

4

JDBC 连接默认启用自动提交,这会产生每个语句的开销。尝试禁用它:

c.setAutoCommit(false)

然后分批提交,类似于:

long ops = 0;

for(String filename : files) {
    // ...
    while ((line = input.readLine()) != null) {
        // insert some stuff...

        ops ++;

        if(ops % 1000 == 0) {
            c.commit();
        }
    }
}

c.commit();
于 2011-04-09T15:33:06.803 回答
3

如果您的表有索引,则删除它们、插入数据并稍后重新创建索引可能会更快。

设置自动提交关闭,并每隔 10 000 条左右的记录进行一次手动提交(查看文档以获得合理的值 - 有一些限制)也可以加快速度。

自己生成索引/外键并跟踪它应该比wordInsert.getGeneratedKeys();但我不确定是否可以从您的内容中更快。

有一种方法称为“批量插入”。我不记得细节,但它是搜索的起点。

于 2011-04-09T15:37:01.223 回答
2

编写它来进行线程化,同时运行 4 个线程,或者将其拆分为多个部分(从配置文件中读取)并将其分发到 X 台机器并让它们一起获取数据。

于 2011-04-09T15:22:22.527 回答
0

使用批处理语句同时执行多个插入,而不是一次执行一个 INSERT。

此外,我将删除算法中在每次插入words表后更新字数的部分,而不是在插入完成后计算所有字数words

于 2011-04-09T15:34:08.210 回答
0

另一种方法是进行批量插入而不是单次插入。看到这个问题什么是批量插入 Postgres 的最快方法?了解更多信息。

于 2011-04-09T15:36:56.137 回答
0

创建线程

String lastWord = "";
    Long lastId = -1L;
    PreparedStatement wordInsert;
    PreparedStatement countInsert ;
    public class ToPostgres {
        public void main(String[] args) throws Exception {
            String filePath = "./";
            List<String> files = new ArrayList<String>();
            for (int i =0; i < 10; i++) {
                files.add(filePath+"googlebooks-eng-all-1gram-20090715-"+i+".csv");
            }
            Connection c = null;
            try {
                c = DriverManager.getConnection("jdbc:postgresql://localhost/googlebooks",
                        "postgres", "xxxxxx");
            } catch (SQLException e) {
                e.printStackTrace();
            }

            if (c != null) {
                try {
                    wordInsert = c.prepareStatement(
                        "INSERT INTO words (word) VALUES (?)", Statement.RETURN_GENERATED_KEYS
                    );
                    countInsert = c.prepareStatement(
                        "INSERT INTO wordcounts (word_id, \"year\", total_count, total_pages, total_books) " +
                        "VALUES (?,?,?,?,?)"
                    );
                    for (String filename: files) {
                        new MyThread(filename). start();
                    }
                } catch (SQLException e) {
                    e.printStackTrace();
                }
            }
        }

    }
    class MyThread extends Thread{
        String file;
        public MyThread(String file) {
            this.file = file;
        }
        @Override
        public void run() {         
            try {
                super.run();
                BufferedReader input =  new BufferedReader(new FileReader(new File(file)));
                String line = "";
                while ((line = input.readLine()) != null) {
                    String[] data = line.split("\t");
                    Long id = -1L;
                    if (lastWord.equals(data[0])) {
                        id = lastId;
                    } else {
                        wordInsert.setString(1, data[0]);
                        wordInsert.executeUpdate();
                        ResultSet resultSet = wordInsert.getGeneratedKeys();
                        if (resultSet != null && resultSet.next()) 
                        {
                            id = resultSet.getLong(1);
                        }
                    }
                    countInsert.setLong(1, id);
                    countInsert.setInt(2, Integer.parseInt(data[1]));
                    countInsert.setInt(3, Integer.parseInt(data[2]));
                    countInsert.setInt(4, Integer.parseInt(data[3]));
                    countInsert.setInt(5, Integer.parseInt(data[4]));
                    countInsert.executeUpdate();
                    lastWord = data[0];
                    lastId = id;
                }
            } catch (NumberFormatException e) {
                e.printStackTrace();
            } catch (FileNotFoundException e) {
                e.printStackTrace();
            } catch (IOException e) {
                e.printStackTrace();
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }
于 2011-04-09T15:43:36.533 回答