我有一个大小约为 50M 的 json 文件,大约 10k 条记录。我正在使用以下代码以编程方式插入 Joomla,它工作得很好,因为所有相关表都同时更新,例如 #_assets,但是代码运行速度非常慢:现在已经过去了大约 40 个小时,但只处理了大约 4k 篇文章。我可以以某种方式加快这个过程吗?
我确实知道,如果我确实插入#_content
,#_tags
等,它会快得多,但这种方法有一些我试图避免的琐碎并发症。
<?php
define( '_JEXEC', 1 );
define('JPATH_BASE', dirname(dirname(__FILE__)));
define( 'DS', DIRECTORY_SEPARATOR );
require_once (JPATH_BASE . DS . 'includes' . DS . 'defines.php');
require_once (JPATH_BASE . DS . 'includes' . DS . 'framework.php');
require_once (JPATH_BASE . DS . 'libraries' . DS . 'joomla' . DS . 'factory.php' );
define('JPATH_COMPONENT_ADMINISTRATOR', JPATH_BASE . DS . 'administrator' . DS . 'components' . DS . 'com_content');
$mainframe = JFactory::getApplication('site');
require_once (JPATH_ADMINISTRATOR.'/components/com_content/models/article.php');
$string = file_get_contents("items.json");
$json_str = json_decode($string, true);
foreach($json_str as $row){
$new_article = new ContentModelArticle();
$data = array(
'title' => $row['title'][0],
'alias' => $row['alias'],
'introtext' => $row['content'],
'state' => 1,
'catid' => 8, /* don't hard code here! */
'created' => $row['pdate'][0],
'created_by' => 798,
'created_by_alias' => $row['poster'][0],
'publish_up' => $row['pdate'][0],
'urls' => $row['urls'],
'access' => 1,
'metadata' => array(
'tags' => $row['tags'],
'robots' => "",
'author' => implode(" ", $row['poster']),
'rights' => "",
'xreference' => "",
),
);
$new_article->save($data);
}