SELECT `tb1`.`id`
FROM `table1` as tb1
INNER JOIN `table2` as tb2 ON tb1.id = tb2.id
INNER JOIN `table3` as tb3 ON tb1.id = tb3.id
WHERE (tb1.item_id = '1')
AND (tb1.user_id = '20')
AND (tb1.type IN ('UPDATE1','UPDATE2','UPDATE3'))
AND (tb1.status = 'DELIVERED')
ORDER BY tb1.date DESC
LIMIT 100
CREATE TABLE `table1` (
`id` INT(11) UNSIGNED NOT NULL AUTO_INCREMENT,
`user_id` INT(11) UNSIGNED NOT NULL,
`item_id` INT(11) UNSIGNED NULL DEFAULT NULL,
`source` ENUM('CPAS','UNIQUE_KEY','BILLING_PARTNER','GAME','MERCURY') NOT NULL,
`date` DATETIME NOT NULL,
`status` ENUM('PENDING','DELIVERED','FAILED','REFUNDED') NOT NULL,
`source_transaction_id` VARCHAR(127) NULL DEFAULT NULL,
`type` ENUM('UPDATE1','UPDATE2','UPDATE3','UPDATE4') NULL DEFAULT NULL,
PRIMARY KEY (`id`)
);
CREATE TABLE `table2` (
`id_p` INT(11) UNSIGNED NOT NULL AUTO_INCREMENT,
`id` INT(11) UNSIGNED NOT NULL,
`amount` DECIMAL(18,2) NOT NULL,
`old_balance` DECIMAL(18,2) NULL DEFAULT NULL,
`description` VARCHAR(255) NULL DEFAULT NULL,
`date` DATETIME NULL DEFAULT NULL,
`wallet_currency_id` INT(11) NULL DEFAULT NULL,
`wallet_currency_code` VARCHAR(50) NULL DEFAULT NULL,
`wallet_currency_name` VARCHAR(100) NULL DEFAULT NULL,
`type` ENUM('GAIN','SPENT') NULL DEFAULT NULL,
PRIMARY KEY (`id_p`),
INDEX `id` (`id`)
)
CREATE TABLE `table3` (
`id_p` INT(11) UNSIGNED NOT NULL AUTO_INCREMENT,
`id` INT(11) UNSIGNED NOT NULL,
`amount` DECIMAL(18,2) NOT NULL,
`old_balance` DECIMAL(18,2) NULL DEFAULT NULL,
`description` VARCHAR(255) NULL DEFAULT NULL,
`date` DATETIME NULL DEFAULT NULL,
`wallet_currency_id` INT(11) NULL DEFAULT NULL,
`wallet_currency_code` VARCHAR(50) NULL DEFAULT NULL,
`wallet_currency_name` VARCHAR(100) NULL DEFAULT NULL,
`type` ENUM('GAIN','SPENT') NULL DEFAULT NULL,
PRIMARY KEY (`id_p`),
INDEX `id` (`id`)
)
What optimization possible on the above query. table1 contains more than 500000 rows, table2 and table3 can also have more than 100000 rows.
As per query for particular player and game table1 can have more than 100000 rows.
Is the above query is ok for large large tables or should I split the query in multiple queries.
NDB Engine used.
Please suggest me possible optimization.
Thanks, Shiv