答案 0 :(得分:1)
您可以通过各种方式实现这一目标:
1)使用PowerShell脚本: h t t p://blogs.iis.net/carlosag/archive/2008/02/10/using-microsoft-web-administration-in-windows-powershell.aspx
PS C:\ > $iis = new-object Microsoft.Web.Administration.ServerManager
PS C:\> $ iis.Sites | foreach { $ .Applications |其中{$ .ApplicationPoolName -eq'DefaultAppPool'} | select-object Path,@ {Name =“AnonymousEnabled”;表达式= { $ _。GetWebConfiguration()。GetSection( “system.webServer /安全/认证/ anonymousAuthentication”)。GetAttributeValue( “启用”) }} }
2)您可以设置创建一个像这样的小型C#程序:
使用System; 使用System.IO; 使用System.Linq; 使用System.Net; 使用System.Threading; 使用Microsoft.Web.Management.SEO.Crawler;
命名空间SEORunner { 课程计划{
static void Main(string[] args) {
if (args.Length != 1) {
Console.WriteLine("Please specify the URL.");
return;
}
// Create a URI class
Uri startUrl = new Uri(args[0]);
// Run the analysis
CrawlerReport report = RunAnalysis(startUrl);
// Run a few queries...
LogSummary(report);
LogStatusCodeSummary(report);
LogBrokenLinks(report);
}
private static CrawlerReport RunAnalysis(Uri startUrl) {
CrawlerSettings settings = new CrawlerSettings(startUrl);
settings.ExternalLinkCriteria = ExternalLinkCriteria.SameFolderAndDeeper;
// Generate a unique name
settings.Name = startUrl.Host + " " + DateTime.Now.ToString("yy-MM-dd hh-mm-ss");
// Use the same directory as the default used by the UI
string path = Path.Combine(
Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments),
"IIS SEO Reports");
settings.DirectoryCache = Path.Combine(path, settings.Name);
// Create a new crawler and start running
WebCrawler crawler = new WebCrawler(settings);
crawler.Start();
Console.WriteLine("Processed - Remaining - Download Size");
while (crawler.IsRunning) {
Thread.Sleep(1000);
Console.WriteLine("{0,9:N0} - {1,9:N0} - {2,9:N2} MB",
crawler.Report.GetUrlCount(),
crawler.RemainingUrls,
crawler.BytesDownloaded / 1048576.0f);
}
// Save the report
crawler.Report.Save(path);
Console.WriteLine("Crawling complete!!!");
return crawler.Report;
}
private static void LogSummary(CrawlerReport report) {
Console.WriteLine();
Console.WriteLine("----------------------------");
Console.WriteLine(" Overview");
Console.WriteLine("----------------------------");
Console.WriteLine("Start URL: {0}", report.Settings.StartUrl);
Console.WriteLine("Start Time: {0}", report.Settings.StartTime);
Console.WriteLine("End Time: {0}", report.Settings.EndTime);
Console.WriteLine("URLs: {0}", report.GetUrlCount());
Console.WriteLine("Links: {0}", report.Settings.LinkCount);
Console.WriteLine("Violations: {0}", report.Settings.ViolationCount);
}
private static void LogBrokenLinks(CrawlerReport report) {
Console.WriteLine();
Console.WriteLine("----------------------------");
Console.WriteLine(" Broken links");
Console.WriteLine("----------------------------");
foreach (var item in from url in report.GetUrls()
where url.StatusCode == HttpStatusCode.NotFound &&
!url.IsExternal
orderby url.Url.AbsoluteUri ascending
select url) {
Console.WriteLine(item.Url.AbsoluteUri);
}
}
private static void LogStatusCodeSummary(CrawlerReport report) {
Console.WriteLine();
Console.WriteLine("----------------------------");
Console.WriteLine(" Status Code summary");
Console.WriteLine("----------------------------");
foreach (var item in from url in report.GetUrls()
group url by url.StatusCode into g
orderby g.Key
select g) {
Console.WriteLine("{0,20} - {1,5:N0}", item.Key, item.Count());
}
}
}
}
然后配置为使用Windows scheduler
运行它我们在http://www.seo-genie.com使用相同的工具包,并且可以在每周婴儿身上为您运行thouse测试,如果您可以将其解决或仅使用我粘贴在上面的代码+ windows sheduler,或者可能是使用Power Shell的方式......
答案 1 :(得分:0)
我发布了一篇关于如何构建使用引擎的命令行工具的博客文章。然后,您可以使用Windows中的任务计划程序安排它运行。