-1

我在使用 StreamWriter 为我拥有的当前项目编写刮板时遇到问题。我编码的循环如下

我已经调试了所有进入循环的变量,并且一切都设置好了。当我根据 url 中的 ID GET 变量传入 url 和要搜索的范围时,它无法写入第二个 sourceCode 字符串

有人可以告诉我我是不是在冲洗什么东西还是这里有其他东西在工作吗?

我为了找到根本原因而绞尽脑汁,但事实证明它非常顽固

using System;
using System.IO;
using System.Windows.Forms;

namespace Scraper
{
    public partial class Form1 : Form
    {
        Scraper scraper = new Scraper();
        private StreamWriter sw;

        public Form1()
        {
            InitializeComponent();
        }

        private void button1_Click(object sender, EventArgs e)
        {
            string url = textBox1.Text;
            string[] urlBits = url.Split('.');
            string[] domain = urlBits[2].Split('/');

            string filepath = @"C:\Users\Herbaldinho\Desktop\"+urlBits[1]+"-"+domain[0];
            string parentPath = @"C:\Users\Herbaldinho\Desktop\";
            string newPath = Path.Combine(parentPath, filepath);

            if (File.Exists(filepath))
            {}
            else
            {
                Directory.CreateDirectory(newPath);
            }
            DateTime today = DateTime.Today;
            string curDate = String.Format("{0:ddd-MMM-dd-yyyy}", today);
            string subPath = newPath + "\\" + curDate;
            string newSubPath = Path.Combine(newPath, subPath);

            if (File.Exists(subPath))
            { }
            else
            {
                Directory.CreateDirectory(newSubPath);
            }

            string lower = textBox2.Text;
            int lowerValue;
            int.TryParse(lower, out lowerValue);

            string upper = textBox3.Text;
            int upperValue;
            int.TryParse(upper, out upperValue);

            int i;
            for (i = lowerValue; i < upperValue; i++)
            {
                string filename = newSubPath+"\\Advert-"+i+".html";
                string adPage = url + i;
                bool write = scraper.UrlExists(adPage);
                if (write)
                {
                    string sourceCode = scraper.getSourceCode(adPage);
                    using (sw = new StreamWriter(filename))
                    {
                        sw.Write(sourceCode);
                    }
                }
            }
            MessageBox.Show("Scrape Complete");

        }
    }
}

####This is the Scraper Object
using System.Net;

namespace Scraper
{
class Scraper
{
    WebClient w = new WebClient();
    public bool UrlExists(string url)
    {
        try
        {
            HttpWebRequest request = WebRequest.Create(url) as HttpWebRequest;
            request.Method = "HEAD";
            HttpWebResponse response = request.GetResponse() as HttpWebResponse;
            return (response.StatusCode == HttpStatusCode.OK);
        }
        catch
        {
            return false;
        }
    }

    public string getSourceCode(string url)
    {
        string s = w.DownloadString(url);
        return s;
    }
}

}

4

1 回答 1

0

今天早上找到了问题的答案对于有类似问题的其他人,UrlExists 方法中的 try catch 逻辑需要关闭响应(response.Close())据我所知它自动关闭,但事实并非如此希望这有帮助

非常感谢大家的回复帮助我解决这个问题

于 2012-05-14T14:38:23.170 回答