我开始尝试使用 R 测试网页加载时间的想法。我设计了一个很小的 R 代码来做到这一点:
page.load.time <- function(theURL, N = 10, wait_time = 0.05)
{
require(RCurl)
require(XML)
TIME <- numeric(N)
for(i in seq_len(N))
{
Sys.sleep(wait_time)
TIME[i] <- system.time(webpage <- getURL(theURL, header=FALSE,
verbose=TRUE) )[3]
}
return(TIME)
}
并以多种方式欢迎您的帮助:
- 是否可以这样做,但还要知道页面的哪些部分需要加载哪些部分?(类似于雅虎的YSlow)
- 我有时会遇到以下错误 -
curlPerform 中的错误(curl = curl,.opts = opts,.encoding = .encoding):从对等方接收数据时失败计时停止在:0.03 0 43.72
关于导致此问题的原因以及如何捕获此类错误并丢弃它们的任何建议?
你能想出改进上述功能的方法吗?
更新:我重做了这个功能。现在慢得让人心疼...
one.page.load.time <- function(theURL, HTML = T, JavaScript = T, Images = T, CSS = T)
{
require(RCurl)
require(XML)
TIME <- NULL
if(HTML) TIME["HTML"] <- system.time(doc <- htmlParse(theURL))[3]
if(JavaScript) {
theJS <- xpathSApply(doc, "//script/@src") # find all JavaScript files
TIME["JavaScript"] <- system.time(getBinaryURL(theJS))[3]
} else ( TIME["JavaScript"] <- NA)
if(Images) {
theIMG <- xpathSApply(doc, "//img/@src") # find all image files
TIME["Images"] <- system.time(getBinaryURL(theIMG))[3]
} else ( TIME["Images"] <- NA)
if(CSS) {
theCSS <- xpathSApply(doc, "//link/@href") # find all "link" types
ss_CSS <- str_detect(tolower(theCSS), ".css") # find the CSS in them
theCSS <- theCSS[ss_CSS]
TIME["CSS"] <- system.time(getBinaryURL(theCSS))[3]
} else ( TIME["CSS"] <- NA)
return(TIME)
}
page.load.time <- function(theURL, N = 3, wait_time = 0.05,...)
{
require(RCurl)
require(XML)
TIME <- vector(length = N, "list")
for(i in seq_len(N))
{
Sys.sleep(wait_time)
TIME[[i]] <- one.page.load.time(theURL,...)
}
require(plyr)
TIME <- data.frame(URL = theURL, ldply(TIME, function(x) {x}))
return(TIME)
}
a <- page.load.time("http://www.r-bloggers.com/", 2)
a