我正在尝试抓取http://www.nscb.gov.ph/ggi/database.asp,特别是您从选择市/省获得的所有表格。我正在使用带有 lxml.html 和机械化的 python。到目前为止,我的刮板工作正常,但是HTTP Error 500: Internal Server Error
在提交市政当局[19]“Peñarrubia,Abra”时我得到了。我怀疑这是由于字符编码。我的猜测是 ene 字符(上面带有波浪号的 n)会导致这个问题。我怎样才能解决这个问题?
我的脚本这部分的一个工作示例如下所示。由于我刚刚开始使用 python(并且经常使用我在 SO 上找到的片段),因此非常感谢任何进一步的评论。
from BeautifulSoup import BeautifulSoup
import mechanize
import lxml.html
import csv
class PrettifyHandler(mechanize.BaseHandler):
def http_response(self, request, response):
if not hasattr(response, "seek"):
response = mechanize.response_seek_wrapper(response)
# only use BeautifulSoup if response is html
if response.info().dict.has_key('content-type') and ('html' in response.info().dict['content-type']):
soup = BeautifulSoup(response.get_data())
response.set_data(soup.prettify())
return response
site = "http://www.nscb.gov.ph/ggi/database.asp"
output_mun = csv.writer(open(r'output-municipalities.csv','wb'))
output_prov = csv.writer(open(r'output-provinces.csv','wb'))
br = mechanize.Browser()
br.add_handler(PrettifyHandler())
# gets municipality stats
response = br.open(site)
br.select_form(name="form2")
muns = br.find_control("strMunicipality2", type="select").items
# municipality #19 is not working, those before do
for pos, item in enumerate(muns[19:]):
br.select_form(name="form2")
br["strMunicipality2"] = [item.name]
print pos, item.name
response = br.submit(id="button2", type="submit")
html = response.read()
root = lxml.html.fromstring(html)
table = root.xpath('//table')[1]
data = [
[td.text_content().strip() for td in row.findall("td")]
for row in table.findall("tr")
]
print data, "\n"
for row in data[2:]:
if row:
row.append(item.name)
output_mun.writerow([s.encode('utf8') if type(s) is unicode else s for s in row])
response = br.open(site) #go back button not working
# provinces follow here
非常感谢你!
编辑:具体来说,错误发生在这一行
response = br.submit(id="button2", type="submit")