How to Schedule an IIS SEO Toolkit to Run Daily
I have installed Microsoft SEO Toolkit on IIS. http://www.iis.net/download/seotoolkit
I want to be able to schedule it to run daily and generate a report.
Does anyone know how to do this?
a source to share
You can do this in various ways:
1) Using PowerShell scripts : http://blogs.iis.net/carlosag/archive/2008/02/10/using-microsoft-web-administration-in-windows-powershell.aspx
PS C:\ > $iis = new-object Microsoft.Web.Administration.ServerManager
PS C: \> $ iis.Sites | for each {$ .Application | where {$ .ApplicationPoolName -eq 'DefaultAppPool'} | select-object Path, @ {Name = "AnonymousEnabled"; Expression = {$ _. GetWebConfiguration (). GetSection ("system.webServer / security / authentication / anonymousAuthentication"). GetAttributeValue ("enabled")}}}
2) You can install Create a small C # program like :
using System; using System.IO; using System.Linq; using System.Net; using System.Threading; using Microsoft.Web.Management.SEO.Crawler;
namespace SEORunner {class {
static void Main(string[] args) {
if (args.Length != 1) {
Console.WriteLine("Please specify the URL.");
return;
}
// Create a URI class
Uri startUrl = new Uri(args[0]);
// Run the analysis
CrawlerReport report = RunAnalysis(startUrl);
// Run a few queries...
LogSummary(report);
LogStatusCodeSummary(report);
LogBrokenLinks(report);
}
private static CrawlerReport RunAnalysis(Uri startUrl) {
CrawlerSettings settings = new CrawlerSettings(startUrl);
settings.ExternalLinkCriteria = ExternalLinkCriteria.SameFolderAndDeeper;
// Generate a unique name
settings.Name = startUrl.Host + " " + DateTime.Now.ToString("yy-MM-dd hh-mm-ss");
// Use the same directory as the default used by the UI
string path = Path.Combine(
Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments),
"IIS SEO Reports");
settings.DirectoryCache = Path.Combine(path, settings.Name);
// Create a new crawler and start running
WebCrawler crawler = new WebCrawler(settings);
crawler.Start();
Console.WriteLine("Processed - Remaining - Download Size");
while (crawler.IsRunning) {
Thread.Sleep(1000);
Console.WriteLine("{0,9:N0} - {1,9:N0} - {2,9:N2} MB",
crawler.Report.GetUrlCount(),
crawler.RemainingUrls,
crawler.BytesDownloaded / 1048576.0f);
}
// Save the report
crawler.Report.Save(path);
Console.WriteLine("Crawling complete!!!");
return crawler.Report;
}
private static void LogSummary(CrawlerReport report) {
Console.WriteLine();
Console.WriteLine("----------------------------");
Console.WriteLine(" Overview");
Console.WriteLine("----------------------------");
Console.WriteLine("Start URL: {0}", report.Settings.StartUrl);
Console.WriteLine("Start Time: {0}", report.Settings.StartTime);
Console.WriteLine("End Time: {0}", report.Settings.EndTime);
Console.WriteLine("URLs: {0}", report.GetUrlCount());
Console.WriteLine("Links: {0}", report.Settings.LinkCount);
Console.WriteLine("Violations: {0}", report.Settings.ViolationCount);
}
private static void LogBrokenLinks(CrawlerReport report) {
Console.WriteLine();
Console.WriteLine("----------------------------");
Console.WriteLine(" Broken links");
Console.WriteLine("----------------------------");
foreach (var item in from url in report.GetUrls()
where url.StatusCode == HttpStatusCode.NotFound &&
!url.IsExternal
orderby url.Url.AbsoluteUri ascending
select url) {
Console.WriteLine(item.Url.AbsoluteUri);
}
}
private static void LogStatusCodeSummary(CrawlerReport report) {
Console.WriteLine();
Console.WriteLine("----------------------------");
Console.WriteLine(" Status Code summary");
Console.WriteLine("----------------------------");
foreach (var item in from url in report.GetUrls()
group url by url.StatusCode into g
orderby g.Key
select g) {
Console.WriteLine("{0,20} - {1,5:N0}", item.Key, item.Count());
}
}
}
}
And then configure it to run with the window scheduler
We use the same toolkit at http://www.seo-genie.com and can run tests on you on weekly babies if you can do that or just use the code I pasted above + windows sheduler or maybe do it like this using Power Shell ...
a source to share