1

I have recently started testing the OTL library with the SQL Server using the Visual Studio 2013. My tests demonstrated that the performance of simple select statements against a 10000 count table is 40% slower than the performance of a similar .NET 4.0 test application. All tests were performed with all optimizations turned on for both platforms.

Both apps perform the following tasks: Open the db connection Create (and reserve space) for the container object. Execute the select statement command. For each record fetched from db create an entity using the db(stream/reader) object add the object to container close

.NET C# app requires 0.5 secs to complete this task, while OTL-C++ app takes 0.7 secs to complete and I wonder if it is possible to optimize the C++ app to perform faster?

Snippet of C++ code:

#define OTL_ODBC_MSSQL_2008     // Compile OTL 4/ODBC, MS SQL 2008
#define OTL_CPP_11_ON
#define OTL_STL                 // Turn on STL features
#define OTL_ANSI_CPP            // Turn on ANSI C++ typecasts
#define OTL_UNICODE             // Enable Unicode OTL for ODBC
#include "otlv4.h"    
 class Employee

{
private:
    int employeeId;
    wstring regno;
    wstring name;
    wstring surname;

public:
    Employee()
    {
    }

    Employee(otl_stream& stream)
    {
        unsigned short _regno[32];
        unsigned short _name[32];
        unsigned short _surname[32];

        if (!stream.is_null())
        {
            stream >> employeeId;
        }

        if (!stream.is_null())
        {
            stream >> (unsigned char*)_regno;
            regno = (wchar_t*)_regno;
        }

        if (!stream.is_null()){
            stream >> (unsigned char*)_name;
            name = (wchar_t*)_name;
        }


        if (!stream.is_null()){
            stream >> (unsigned char*)_surname;
            surname = (wchar_t*)_surname;
        }
    }

    int GetEmployeeId() const
    {
        return employeeId;
    }
};



otl_connect db;
int main()
{
    otl_connect::otl_initialize();
    try
    {
otl_connect::otl_initialize();
    try
    {
        // connect
        db.rlogon("DSN=SQLODBC");

        // start timing
        clock_t begin = clock();
        otl_stream i(10000, "SELECT Id, Field1, Field2, Field3 FROM Test", db);

        // create container
        vector<Employee> employeeList;
        employeeList.reserve(10000);

        // iterate and fill container
        while (!i.eof())
        {
            Employee employee(i);
            employeeList.push_back(employee);
        }
        i.close();

        // cleanup
        size_t size = employeeList.size();  
        clock_t end = clock();
        double elapsed_secs = double(end - begin) / CLOCKS_PER_SEC;
        cout << "Total records:" << size << endl;
        cout << "Time elapsed to read all records:" << elapsed_secs << endl;



    }

    catch (otl_exception& p){ // intercept OTL exceptions
        cerr << p.msg << endl; // print out error message
        cerr << p.stm_text << endl; // print out SQL that caused the error
        cerr << p.sqlstate << endl; // print out SQLSTATE message
        cerr << p.var_info << endl; // print out the variable that caused the error
    }

    db.logoff();
return EXIT_SUCCESS;
}
4

1 回答 1

1

我不这么认为,当您查看 otl 的代码源时,它确实使用了 SQL Server 的 ODBC api,并且仅作为 odbc 顶层进行了优化。出于性能原因,SQL Server .NET 4.0 将使用 sql driver api 而不是 odbc api。

此外,如果您不预先分配内存消耗,由于 SysAllocMem 函数调用,您将始终松散到 .NET 和 Java。这就像试图测量对 SysAlloc 的 4000 次调用与对 SysAlloc 的 1 次调用。您的性能问题与这些功能直接相关。

于 2015-03-25T12:08:13.230 回答