Hardening wordpress on IIS7+ web.config equivalent of .htaccess - wordpress

On Linux servers, we can benefit from .htaccess rules in order to make wordpress installations more secure.
How is that possible on IIS7+?

Using Better WP Security .htaccess rules and the rule converter wizard on IIS Manager, I got the following for the web.config file.
This file includes:
usual wordpress rewrite
denying blacklisted agents
file leeching protection
trace | delete | track protection
forbidden access to some directories
In addition to these, another tip: wordpress does work if you move your wp-config.php file one level up (Do not keep it under /www/)
<configuration>
<system.webServer>
<rewrite>
<rules>
<rule name="wordpress" patternSyntax="Wildcard">
<match url="*" />
<conditions>
<add input="{REQUEST_FILENAME}" matchType="IsFile" negate="true" />
<add input="{REQUEST_FILENAME}" matchType="IsDirectory" negate="true" />
</conditions>
<action type="Rewrite" url="index.php" />
</rule>
<rule name="Abuse Agent Blocking from HackRepair.com" stopProcessing="true">
<match url="^.*" ignoreCase="false" />
<conditions logicalGrouping="MatchAny">
<!--# BEGIN Better WP Security-->
<!--# Begin HackRepair.com Blacklist-->
<!--# Abuse Agent Blocking-->
<add input="{HTTP_USER_AGENT}" pattern="^BlackWidow" />
<add input="{HTTP_USER_AGENT}" pattern="^Bolt\ 0" />
<add input="{HTTP_USER_AGENT}" pattern="^Bot\ mailto:craftbot\#yahoo\.com" />
<add input="{HTTP_USER_AGENT}" pattern="CazoodleBot" />
<add input="{HTTP_USER_AGENT}" pattern="^ChinaClaw" />
<add input="{HTTP_USER_AGENT}" pattern="^Custo" />
<add input="{HTTP_USER_AGENT}" pattern="^Default\ Browser\ 0" />
<add input="{HTTP_USER_AGENT}" pattern="^DIIbot" />
<add input="{HTTP_USER_AGENT}" pattern="^DISCo" />
<add input="{HTTP_USER_AGENT}" pattern="discobot" />
<add input="{HTTP_USER_AGENT}" pattern="^Download\ Demon" />
<add input="{HTTP_USER_AGENT}" pattern="^eCatch" />
<add input="{HTTP_USER_AGENT}" pattern="ecxi" />
<add input="{HTTP_USER_AGENT}" pattern="^EirGrabber" />
<add input="{HTTP_USER_AGENT}" pattern="^EmailCollector" />
<add input="{HTTP_USER_AGENT}" pattern="^EmailSiphon" />
<add input="{HTTP_USER_AGENT}" pattern="^EmailWolf" />
<add input="{HTTP_USER_AGENT}" pattern="^Express\ WebPictures" />
<add input="{HTTP_USER_AGENT}" pattern="^ExtractorPro" />
<add input="{HTTP_USER_AGENT}" pattern="^EyeNetIE" />
<add input="{HTTP_USER_AGENT}" pattern="^FlashGet" />
<add input="{HTTP_USER_AGENT}" pattern="^GetRight" />
<add input="{HTTP_USER_AGENT}" pattern="^GetWeb!" />
<add input="{HTTP_USER_AGENT}" pattern="^Go!Zilla" />
<add input="{HTTP_USER_AGENT}" pattern="^Go-Ahead-Got-It" />
<add input="{HTTP_USER_AGENT}" pattern="^GrabNet" />
<add input="{HTTP_USER_AGENT}" pattern="^Grafula" />
<add input="{HTTP_USER_AGENT}" pattern="GT::WWW" />
<add input="{HTTP_USER_AGENT}" pattern="heritrix" />
<add input="{HTTP_USER_AGENT}" pattern="^HMView" />
<add input="{HTTP_USER_AGENT}" pattern="HTTP::Lite" />
<add input="{HTTP_USER_AGENT}" pattern="HTTrack" />
<add input="{HTTP_USER_AGENT}" pattern="ia_archiver" />
<add input="{HTTP_USER_AGENT}" pattern="IDBot" />
<add input="{HTTP_USER_AGENT}" pattern="id-search" />
<add input="{HTTP_USER_AGENT}" pattern="id-search\.org" />
<add input="{HTTP_USER_AGENT}" pattern="^Image\ Stripper" />
<add input="{HTTP_USER_AGENT}" pattern="^Image\ Sucker" />
<add input="{HTTP_USER_AGENT}" pattern="Indy\ Library" />
<add input="{HTTP_USER_AGENT}" pattern="^InterGET" />
<add input="{HTTP_USER_AGENT}" pattern="^Internet\ Ninja" />
<add input="{HTTP_USER_AGENT}" pattern="^InternetSeer\.com" />
<add input="{HTTP_USER_AGENT}" pattern="IRLbot" />
<add input="{HTTP_USER_AGENT}" pattern="ISC\ Systems\ iRc\ Search\ 2\.1" />
<add input="{HTTP_USER_AGENT}" pattern="^Java" />
<add input="{HTTP_USER_AGENT}" pattern="^JetCar" />
<add input="{HTTP_USER_AGENT}" pattern="^JOC\ Web\ Spider" />
<add input="{HTTP_USER_AGENT}" pattern="^larbin" />
<add input="{HTTP_USER_AGENT}" pattern="^LeechFTP" />
<add input="{HTTP_USER_AGENT}" pattern="libwww" />
<add input="{HTTP_USER_AGENT}" pattern="libwww-perl" />
<add input="{HTTP_USER_AGENT}" pattern="^Link" />
<add input="{HTTP_USER_AGENT}" pattern="LinksManager.com_bot" />
<add input="{HTTP_USER_AGENT}" pattern="linkwalker" />
<add input="{HTTP_USER_AGENT}" pattern="lwp-trivial" />
<add input="{HTTP_USER_AGENT}" pattern="^Mass\ Downloader" />
<add input="{HTTP_USER_AGENT}" pattern="^Maxthon$" />
<add input="{HTTP_USER_AGENT}" pattern="MFC_Tear_Sample" />
<add input="{HTTP_USER_AGENT}" pattern="^microsoft\.url" />
<add input="{HTTP_USER_AGENT}" pattern="Microsoft\ URL\ Control" />
<add input="{HTTP_USER_AGENT}" pattern="^MIDown\ tool" />
<add input="{HTTP_USER_AGENT}" pattern="^Mister\ PiX" />
<add input="{HTTP_USER_AGENT}" pattern="Missigua\ Locator" />
<add input="{HTTP_USER_AGENT}" pattern="^Mozilla\.*Indy" />
<add input="{HTTP_USER_AGENT}" pattern="^Mozilla\.*NEWT" />
<add input="{HTTP_USER_AGENT}" pattern="^MSFrontPage" />
<add input="{HTTP_USER_AGENT}" pattern="^Navroad" />
<add input="{HTTP_USER_AGENT}" pattern="^NearSite" />
<add input="{HTTP_USER_AGENT}" pattern="^NetAnts" />
<add input="{HTTP_USER_AGENT}" pattern="^NetSpider" />
<add input="{HTTP_USER_AGENT}" pattern="^Net\ Vampire" />
<add input="{HTTP_USER_AGENT}" pattern="^NetZIP" />
<add input="{HTTP_USER_AGENT}" pattern="^Nutch" />
<add input="{HTTP_USER_AGENT}" pattern="^Octopus" />
<add input="{HTTP_USER_AGENT}" pattern="^Offline\ Explorer" />
<add input="{HTTP_USER_AGENT}" pattern="^Offline\ Navigator" />
<add input="{HTTP_USER_AGENT}" pattern="^PageGrabber" />
<add input="{HTTP_USER_AGENT}" pattern="panscient.com" />
<add input="{HTTP_USER_AGENT}" pattern="^Papa\ Foto" />
<add input="{HTTP_USER_AGENT}" pattern="^pavuk" />
<add input="{HTTP_USER_AGENT}" pattern="PECL::HTTP" />
<add input="{HTTP_USER_AGENT}" pattern="^PeoplePal" />
<add input="{HTTP_USER_AGENT}" pattern="^pcBrowser" />
<add input="{HTTP_USER_AGENT}" pattern="PHPCrawl" />
<add input="{HTTP_USER_AGENT}" pattern="PleaseCrawl" />
<add input="{HTTP_USER_AGENT}" pattern="^psbot" />
<add input="{HTTP_USER_AGENT}" pattern="^RealDownload" />
<add input="{HTTP_USER_AGENT}" pattern="^ReGet" />
<add input="{HTTP_USER_AGENT}" pattern="^Rippers\ 0" />
<add input="{HTTP_USER_AGENT}" pattern="SBIder" />
<add input="{HTTP_USER_AGENT}" pattern="^SeaMonkey$" />
<add input="{HTTP_USER_AGENT}" pattern="^sitecheck\.internetseer\.com" />
<add input="{HTTP_USER_AGENT}" pattern="^SiteSnagger" />
<add input="{HTTP_USER_AGENT}" pattern="^SmartDownload" />
<add input="{HTTP_USER_AGENT}" pattern="Snoopy" />
<add input="{HTTP_USER_AGENT}" pattern="Steeler" />
<add input="{HTTP_USER_AGENT}" pattern="^SuperBot" />
<add input="{HTTP_USER_AGENT}" pattern="^SuperHTTP" />
<add input="{HTTP_USER_AGENT}" pattern="^Surfbot" />
<add input="{HTTP_USER_AGENT}" pattern="^tAkeOut" />
<add input="{HTTP_USER_AGENT}" pattern="^Teleport\ Pro" />
<add input="{HTTP_USER_AGENT}" pattern="^Toata\ dragostea\ mea\ pentru\ diavola" />
<add input="{HTTP_USER_AGENT}" pattern="URI::Fetch" />
<add input="{HTTP_USER_AGENT}" pattern="urllib" />
<add input="{HTTP_USER_AGENT}" pattern="User-Agent" />
<add input="{HTTP_USER_AGENT}" pattern="^VoidEYE" />
<add input="{HTTP_USER_AGENT}" pattern="^Web\ Image\ Collector" />
<add input="{HTTP_USER_AGENT}" pattern="^Web\ Sucker" />
<add input="{HTTP_USER_AGENT}" pattern="Web\ Sucker" />
<add input="{HTTP_USER_AGENT}" pattern="webalta" />
<add input="{HTTP_USER_AGENT}" pattern="^WebAuto" />
<add input="{HTTP_USER_AGENT}" pattern="^[Ww]eb[Bb]andit" />
<add input="{HTTP_USER_AGENT}" pattern="WebCollage" />
<add input="{HTTP_USER_AGENT}" pattern="^WebCopier" />
<add input="{HTTP_USER_AGENT}" pattern="^WebFetch" />
<add input="{HTTP_USER_AGENT}" pattern="^WebGo\ IS" />
<add input="{HTTP_USER_AGENT}" pattern="^WebLeacher" />
<add input="{HTTP_USER_AGENT}" pattern="^WebReaper" />
<add input="{HTTP_USER_AGENT}" pattern="^WebSauger" />
<add input="{HTTP_USER_AGENT}" pattern="^Website\ eXtractor" />
<add input="{HTTP_USER_AGENT}" pattern="^Website\ Quester" />
<add input="{HTTP_USER_AGENT}" pattern="^WebStripper" />
<add input="{HTTP_USER_AGENT}" pattern="^WebWhacker" />
<add input="{HTTP_USER_AGENT}" pattern="^WebZIP" />
<add input="{HTTP_USER_AGENT}" pattern="Wells\ Search\ II" />
<add input="{HTTP_USER_AGENT}" pattern="WEP\ Search" />
<add input="{HTTP_USER_AGENT}" pattern="^Wget" />
<add input="{HTTP_USER_AGENT}" pattern="^Widow" />
<add input="{HTTP_USER_AGENT}" pattern="^WWW-Mechanize" />
<add input="{HTTP_USER_AGENT}" pattern="^WWWOFFLE" />
<add input="{HTTP_USER_AGENT}" pattern="^Xaldon\ WebSpider" />
<add input="{HTTP_USER_AGENT}" pattern="zermelo" />
<add input="{HTTP_USER_AGENT}" pattern="^Zeus" />
<add input="{HTTP_USER_AGENT}" pattern="^Zeus\.*Webster" />
<add input="{HTTP_USER_AGENT}" pattern="ZyBorg" />
</conditions>
<action type="CustomResponse" statusCode="403" statusReason="Forbidden" statusDescription="Forbidden" />
</rule>
<rule name="Imported Rule 2" stopProcessing="true">
<match url="^wp-admin/includes/" ignoreCase="false" />
<action type="CustomResponse" statusCode="403" statusReason="Forbidden" statusDescription="Forbidden" />
</rule>
<rule name="Imported Rule 3" stopProcessing="true">
<match url="^wp-includes/[^/]+\.php$" ignoreCase="false" />
<conditions>
<!--# RewriteRule !^wp-includes/ - [S=3]-->
<add input="{SCRIPT_FILENAME}" pattern="^(.*)wp-includes/ms-files.php" ignoreCase="false" negate="true" />
</conditions>
<action type="CustomResponse" statusCode="403" statusReason="Forbidden" statusDescription="Forbidden" />
</rule>
<rule name="Imported Rule 4" stopProcessing="true">
<match url="^wp-includes/js/tinymce/langs/.+\.php" ignoreCase="false" />
<action type="CustomResponse" statusCode="403" statusReason="Forbidden" statusDescription="Forbidden" />
</rule>
<rule name="Imported Rule 5" stopProcessing="true">
<match url="^wp-includes/theme-compat/" ignoreCase="false" />
<action type="CustomResponse" statusCode="403" statusReason="Forbidden" statusDescription="Forbidden" />
</rule>
<rule name="Imported Rule 6" stopProcessing="true">
<match url="^(.*)$" ignoreCase="false" />
<conditions>
<add input="{REQUEST_METHOD}" pattern="^(TRACE|DELETE|TRACK)" />
</conditions>
<action type="CustomResponse" statusCode="403" statusReason="Forbidden" statusDescription="Forbidden" />
</rule>
</rules>
</rewrite>
</system.webServer>
</configuration>

Related

Disable OPTIONS, TRACE Method through web server configuration in IIS 8.5

I am using this but still showing 200 response code while checking it on burpsuit
<verbs allowUnlisted="false">
<clear />
<add verb="GET" allowed="true" />
<add verb="POST" allowed="true" />
<add verb="TRACE" allowed="false" />
<add verb="OPTIONS" allowed="false" />
<add verb="HEAD" allowed="false" />
<add verb="PUT" allowed="false" />
<add verb="CONNECT" allowed="false" />
<add verb="DELETE" allowed="false" />
<add verb="TRACK" allowed="false" />
</verbs>

How FIX choosing your permalink structure on WordPress for IIS server on Azure

Need your help.
There is one IIS server in Azure.
It contains the Website and the Angular SPA application.
I need to install in a separate directory WordPress blog.
Blog unfolded.
But there was a problem with Choosing your permalink structure.
I have an error:
403 - Forbidden: Access is denied.
Credentials that you are supplied with.
I have two web.config files
1. ROOT web.config
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<location path=".">
<system.webServer>
<security>
<authentication>
<anonymousAuthentication enabled="true" />
</authentication>
</security>
<rewrite>
<rules>
<clear />
<rule name="Redirect http to https" stopProcessing="true">
<match url=".*" />
<conditions logicalGrouping="MatchAll" trackAllCaptures="false">
<add input="{HTTPS}" pattern="^OFF$" />
</conditions>
<action type="Redirect" url="https://{HTTP_HOST}/{R:0}" />
</rule>
<rule name="login redirect" patternSyntax="ECMAScript" stopProcessing="true">
<match url="^login" />
<conditions logicalGrouping="MatchAll" trackAllCaptures="false" />
<action type="Redirect" url="ANGULAR-SPA-DIR/login" />
</rule>
<rule name="AngularJS Routes" stopProcessing="true">
<match url=".*" />
<conditions logicalGrouping="MatchAll" trackAllCaptures="false">
<add input="{REQUEST_FILENAME}" matchType="IsFile" negate="true" />
<add input="{REQUEST_FILENAME}" matchType="IsDirectory" negate="true" />
<add input="{REQUEST_URI}" matchType="Pattern" pattern="/ROOT-DIR/" ignoreCase="true" negate="true" />
<add input="{REQUEST_URI}" matchType="Pattern" pattern="/ANGULAR-SPA-DIR/" ignoreCase="true" negate="true" />
<add input="{REQUEST_URI}" matchType="Pattern" pattern="/TEST/" ignoreCase="true" negate="true" />
</conditions>
<action type="Rewrite" url="/index.html" />
</rule>
<rule name="AngularJS Routes1" stopProcessing="true">
<match url="^ANGULAR-SPA-DIR/" />
<conditions logicalGrouping="MatchAll" trackAllCaptures="false">
<add input="{REQUEST_FILENAME}" matchType="IsFile" negate="true" />
<add input="{REQUEST_FILENAME}" matchType="IsDirectory" negate="true" />
</conditions>
<action type="Rewrite" url="/ANGULAR-SPA-DIR/index.html" />
</rule>
<rule name="AngularJS Routes2" stopProcessing="true">
<match url="^TEST/" />
<conditions logicalGrouping="MatchAll" trackAllCaptures="false">
<add input="{REQUEST_FILENAME}" matchType="IsFile" negate="true" />
<add input="{REQUEST_FILENAME}" matchType="IsDirectory" negate="true" />
</conditions>
<action type="Rewrite" url="/TEST/index.html" />
</rule>
<rule name="WordPress Blog" stopProcessing="true">
<match url="blog/.*" />
<conditions logicalGrouping="MatchAll" trackAllCaptures="true">
<add input="{REQUEST_FILENAME}" matchType="IsFile" />
<add input="{REQUEST_FILENAME}" matchType="IsDirectory" />
</conditions>
<action type="None" logRewrittenUrl="true" />
</rule>
</rules>
</rewrite>
<caching>
<profiles>
<add extension=".css" policy="CacheForTimePeriod" kernelCachePolicy="DontCache" duration="00:30:00" />
<add extension=".svg" policy="CacheForTimePeriod" kernelCachePolicy="DontCache" duration="01:00:00" />
<add extension=".jpg" policy="CacheForTimePeriod" kernelCachePolicy="DontCache" duration="01:00:00" />
<add extension=".js" policy="CacheForTimePeriod" kernelCachePolicy="DontCache" duration="00:30:00" />
<add extension=".woff2" policy="CacheForTimePeriod" kernelCachePolicy="DontCache" duration="24.00:00:00" />
</profiles>
</caching>
<directoryBrowse enabled="true" />
</system.webServer>
</location>
</configuration>
Blog web.config
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<system.webServer>
<rewrite>
<rules><remove name="WordPress Blog"/>
<rule name="WordPress: https://ROOT-URL/blog" patternSyntax="Wildcard">
<match url="*"/>
<conditions>
<add input="{REQUEST_FILENAME}" matchType="IsFile" negate="true"/>
<add input="{REQUEST_FILENAME}" matchType="IsDirectory" negate="true"/>
</conditions>
<action type="Rewrite" url="index.php"/>
</rule></rules>
</rewrite>
</system.webServer>
</configuration>
What can suggest experienced friends?
Add srting to ROOT web.config
<conditions logicalGrouping="MatchAll" trackAllCaptures="false">
...
<add input="{QUERY_STRING}" pattern="/blog/" />
</conditions>
After that, they "Choosing your permalink structure" work correctly

How to block bots with IIS?

I have web.config for asp.net core configured like this to block bots MJ12bot|spbot|YandexBot
I'm using IIS 7.5 with Url Rewrite Module 2.1 installed.
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<system.webServer>
<handlers>
<add name="aspNetCore" path="*" verb="*" modules="AspNetCoreModule" resourceType="Unspecified" />
</handlers>
<aspNetCore processPath="dotnet" arguments=".\myproject.dll" stdoutLogEnabled="false" stdoutLogFile=".\logs\stdout">
</aspNetCore>
<rewrite>
<rules>
<rule name="RequestBlockingRule1" patternSyntax="Wildcard" stopProcessing="true">
<match url="*" />
<conditions logicalGrouping="MatchAny">
<add input="{HTTP_USER_AGENT}" pattern="MJ12bot|spbot|YandexBot" />
</conditions>
<action type="CustomResponse" statusCode="403" statusReason="Forbidden: Access is denied." statusDescription="You do not have permission to view this directory or page using the credentials that you supplied." />
</rule>
</rules>
</rewrite>
</system.webServer>
</configuration>
I tried to delete the website logs file and restart the app pool to monitor the new traffic, the bot is still crawling on my site.
2017-07-01 14:29:52 W3SVC33 125.212.217.6 GET /phim/mune-ve-binh-mat-trang-hoat-hinh q=HD2 80 - 144.76.30.241 Mozilla/5.0+(compatible;+MJ12bot/v1.4.7;+http://mj12bot.com/) 200 0 0 7888 403 21995
2017-07-01 14:30:10 W3SVC33 125.212.217.6 GET /phim/tay-du-ky-1-dai-nao-thien-cung q=HD1 80 - 5.9.63.162 Mozilla/5.0+(compatible;+MJ12bot/v1.4.7;+http://mj12bot.com/) 200 0 0 8278 401 33541
2017-07-01 14:30:13 W3SVC33 125.212.217.6 GET /phim/su-that-kinh-hoang q=HD2 80 - 5.9.156.74 Mozilla/5.0+(compatible;+MJ12bot/v1.4.7;+http://mj12bot.com/) 200 0 0 8425 389 19474
I use this rewrite rule, maybe it will work for you. I did not create it myself, found it at https://www.saotn.org/hackrepair-bad-bots-htaccess-web-config-iis/
<rule name="Abuse User Agents Blocking" stopProcessing="true">
<match url=".*" ignoreCase="false" />
<conditions logicalGrouping="MatchAny">
<add input="{HTTP_USER_AGENT}" pattern="^.*(1Noonbot|1on1searchBot|3D_SEARCH|3DE_SEARCH2|3GSE|50.nu|192.comAgent|360Spider|A6-Indexer|AASP|ABACHOBot|Abonti|abot|AbotEmailSearch|Aboundex|AboutUsBot|AccMonitor\ Compliance|accoona|AChulkov.NET\ page\ walker|Acme.Spider|AcoonBot|acquia-crawler|ActiveTouristBot|Acunetix|Ad\ Muncher|AdamM|adbeat_bot|adminshop.com|Advanced\ Email|AESOP_com_SpiderMan|AESpider|AF\ Knowledge\ Now\ Verity|aggregator:Vocus|ah-ha.com|AhrefsBot|AIBOT|aiHitBot|aipbot|AISIID|AITCSRobot|Akamai-SiteSnapshot|AlexaWebSearchPlatform|AlexfDownload|Alexibot|AlkalineBOT|All\ Acronyms|Amfibibot|AmPmPPC.com|AMZNKAssocBot|Anemone|Anonymous|Anonymouse.org|AnotherBot|AnswerBot|AnswerBus|AnswerChase\ PROve|AntBot|antibot-|AntiSantyWorm|Antro.Net|AONDE-Spider|Aport|Aqua_Products|AraBot|Arachmo|Arachnophilia|archive.org_bot|aria\ eQualizer|arianna.libero.it|Arikus_Spider|Art-Online.com|ArtavisBot|Artera|ASpider|ASPSeek|asterias|AstroFind|athenusbot|AtlocalBot|Atomic_Email_Hunter|attach|attrakt|attributor|Attributor.comBot|augurfind|AURESYS|AutoBaron|autoemailspider|autowebdir|AVSearch-|axfeedsbot|Axonize-bot|Ayna|b2w|BackDoorBot|BackRub|BackStreet\ Browser|BackWeb|Baiduspider-video|Bandit|BatchFTP|baypup|BDFetch|BecomeBot|BecomeJPBot|BeetleBot|Bender|besserscheitern-crawl|betaBot|Big\ Brother|Big\ Data|Bigado.com|BigCliqueBot|Bigfoot|BIGLOTRON|Bilbo|BilgiBetaBot|BilgiBot|binlar|bintellibot|bitlybot|BitvoUserAgent|Bizbot003|BizBot04|BizBot04\ kirk.overleaf.com|Black.Hole|Black\ Hole|Blackbird|BlackWidow|bladder\ fusion|Blaiz-Bee|BLEXBot|Blinkx|BlitzBOT|Blog\ Conversation\ Project|BlogMyWay|BlogPulseLive|BlogRefsBot|BlogScope|Blogslive|BloobyBot|BlowFish|BLT|bnf.fr_bot|BoaConstrictor|BoardReader-Image-Fetcher|BOI_crawl_00|BOIA-Scan-Agent|BOIA.ORG-Scan-Agent|boitho.com-dc|Bookmark\ Buddy|bosug|Bot\ Apoena|BotALot|BotRightHere|Botswana|bottybot|BpBot|BRAINTIME_SEARCH|BrokenLinkCheck.com|BrowserEmulator|BrowserMob|BruinBot|BSearchR&D|BSpider|btbot|Btsearch|Buddy|Buibui|BuildCMS|BuiltBotTough|Bullseye|bumblebee|BunnySlippers|BuscadorClarin|Butterfly|BuyHawaiiBot|BuzzBot|byindia|BySpider|byteserver|bzBot|c\ r\ a\ w\ l\ 3\ r|CacheBlaster|CACTVS\ Chemistry|Caddbot|Cafi|Camcrawler|CamelStampede|Canon-WebRecord|Canon-WebRecordPro|CareerBot|casper|cataguru|CatchBot|CazoodleBot|CCBot|CCGCrawl|ccubee|CD-Preload|CE-Preload|Cegbfeieh|Cerberian\ Drtrs|CERT\ FigleafBot|cfetch|CFNetwork|Chameleon|ChangeDetection|Charlotte|Check&Get|Checkbot|Checklinks|checkprivacy|CheeseBot|ChemieDE-NodeBot|CherryPicker|CherryPickerElite|CherryPickerSE|Chilkat|ChinaClaw|CipinetBot|cis455crawler|citeseerxbot|cizilla.com|ClariaBot|clshttp|Clushbot|cmsworldmap|coccoc|CollapsarWEB|Collector|combine|comodo|conceptbot|ConnectSearch|conpilot|ContentSmartz|ContextAd|contype|cookieNET|CoolBott|CoolCheck|Copernic|Copier|CopyRightCheck|core-project|cosmos|Covario-IDS|Cowbot-|Cowdog|crabbyBot|crawl|Crawl_Application|crawl.UserAgent|CrawlConvera|crawler|crawler_for_infomine|CRAWLER-ALTSE.VUNET.ORG-Lynx|crawler-upgrade-config|crawler.kpricorn.org|crawler#|crawler4j|crawler43.ejupiter.com|Crawly|CreativeCommons|Crescent|Crescent\ Internet\ ToolPak\ HTTP\ OLE\ Control|cs-crawler|CSE\ HTML\ Validator|CSHttpClient|Cuasarbot|culsearch|Curl|Custo|Cutbot|cvaulev|Cyberdog|CyberNavi_WebGet|CyberSpyder|CydralSpider).*$" />
<add input="{HTTP_USER_AGENT}" pattern="^.*(D1GArabicEngine|DA|DataCha0s|DataFountains|DataparkSearch|DataSpearSpiderBot|DataSpider|Dattatec.com|Dattatec.com-Sitios-Top|Daumoa|DAUMOA-video|DAUMOA-web|Declumbot|Deepindex|deepnet|DeepTrawl|dejan|del.icio.us-thumbnails|DelvuBot|Deweb|DiaGem|Diamond|DiamondBot|diavol|DiBot|didaxusbot|DigExt|Digger|DiGi-RSSBot|DigitalArchivesBot|DigOut4U|DIIbot|Dillo|Dir_Snatch.exe|DISCo|DISCo\ Pump|discobot|DISCoFinder|Distilled-Reputation-Monitor|Dit|DittoSpyder|DjangoTraineeBot|DKIMRepBot|DoCoMo|DOF-Verify|domaincrawler|DomainScan|DomainWatcher|dotbot|DotSpotsBot|Dow\ Jonesbot|Download|Download\ Demon|Downloader|DOY|dragonfly|Drip|drone|DTAAgent|dtSearchSpider|dumbot|Dwaar|Dwaarbot|DXSeeker|EAH|EasouSpider|EasyDL|ebingbong|EC2LinkFinder|eCairn-Grabber|eCatch|eChooseBot|ecxi|EdisterBot|EduGovSearch|egothor|eidetica.com|EirGrabber|ElisaBot|EllerdaleBot|EMail\ Exractor|EmailCollector|EmailLeach|EmailSiphon|EmailWolf|EMPAS_ROBOT|EnaBot|endeca|EnigmaBot|Enswer\ Neuro|EntityCubeBot|EroCrawler|eStyleSearch|eSyndiCat|Eurosoft-Bot|Evaal|Eventware|Everest-Vulcan|Exabot|Exabot-Images|Exabot-Test|Exabot-XXX|ExaBotTest|ExactSearch|exactseek.com|exooba|Exploder|explorersearch|extract|Extractor|ExtractorPro|EyeNetIE|ez-robot|Ezooms|factbot|FairAd\ Client|falcon|Falconsbot|fast-search-engine|FAST\ Data\ Document|FAST\ ESP|fastbot|fastbot.de|FatBot|Favcollector|Faviconizer|FDM|FedContractorBot|feedfinder|FelixIDE|fembot|fetch_ici|Fetch\ API\ Request|fgcrawler|FHscan|fido|Filangy|FileHound|FindAnISP.com_ISP_Finder|findlinks|FindWeb|Firebat|Fish-Search-Robot|Flaming\ AttackBot|Flamingo_SearchEngine|FlashCapture|FlashGet|flicky|FlickySearchBot|flunky|focused_crawler|FollowSite|Foobot|Fooooo_Web_Video_Crawl|Fopper|FormulaFinderBot|Forschungsportal|fr_crawler|Francis|Freecrawl|FreshDownload|freshlinks.exe|FriendFeedBot|frodo.at|froGgle|FrontPage|Froola|FU-NBI|full_breadth_crawler|FunnelBack|FunWebProducts|FurlBot|g00g1e|G10-Bot|Gaisbot|GalaxyBot|gazz|gcreep|generate_infomine_category_classifiers|genevabot|genieBot|GenieBotRD_SmallCrawl|Genieo|Geomaxenginebot|geometabot|GeonaBot|GeoVisu|GermCrawler|GetHTMLContents|Getleft|GetRight|GetSmart|GetURL.rexx|GetWeb!|Giant|GigablastOpenSource|Gigabot|Girafabot|GleameBot|gnome-vfs|Go-Ahead-Got-It|Go!Zilla|GoForIt.com|GOFORITBOT|gold|Golem|GoodJelly|Gordon-College-Google-Mini|goroam|GoSeebot|gotit|Govbot|GPU\ p2p|grab|Grabber|GrabNet|Grafula|grapeFX|grapeshot|GrapeshotCrawler|grbot|GreenYogi\ [ZSEBOT]|Gromit|GroupMe|grub|grub-client|Grubclient-|GrubNG|GruBot|gsa|GSLFbot|GT::WWW|Gulliver|GulperBot|GurujiBot|GVC|GVC\ BUSINESS|gvcbot.com|HappyFunBot|harvest|HarvestMan|Hatena\ Antenna|Hawler|Hazel's\ Ferret\ hopper|hcat|hclsreport-crawler|HD\ nutch\ agent|Header_Test_Client|healia|Helix|heritrix|hijbul-heritrix-crawler|HiScan|HiSoftware\ AccMonitor|HiSoftware\ AccVerify|hitcrawler_|hivaBot|hloader|HMSEbot|HMView|hoge|holmes|HomePageSearch|Hooblybot-Image|HooWWWer|Hostcrawler|HSFT\ -\ Link|HSFT\ -\ LVU|HSlide|ht:|htdig|Html\ Link\ Validator|HTMLParser|HTTP::Lite|httplib|HTTrack|Huaweisymantecspider|hul-wax|humanlinks|HyperEstraier|Hyperix).*$" />
<add input="{HTTP_USER_AGENT}" pattern="^.*(ia_archiver|IAArchiver-|ibuena|iCab|ICDS-Ingestion|ichiro|iCopyright\ Conductor|id-search|IDBot|IEAutoDiscovery|IECheck|iHWebChecker|IIITBOT|iim_405|IlseBot|IlTrovatore|Iltrovatore-Setaccio|ImageBot|imagefortress|ImagesHereImagesThereImagesEverywhere|ImageVisu|imds_monitor|imo-google-robot-intelink|IncyWincy|Industry\ Cortexcrawler|Indy\ Library|indylabs_marius|InelaBot|Inet32\ Ctrl|inetbot|InfoLink|INFOMINE|infomine.ucr.edu|InfoNaviRobot|Informant|Infoseek|InfoTekies|InfoUSABot|INGRID|Inktomi|InsightsCollector|InsightsWorksBot|InspireBot|InsumaScout|Intelix|InterGET|Internet\ Ninja|InternetLinkAgent|Interseek|IOI|ip-web-crawler.com|Ipselonbot|Iria|IRLbot|Iron33|Isara|iSearch|iSiloX|IsraeliSearch|IstellaBot|its-learning|IU_CSCI_B659_class_crawler|iVia|iVia\ Page\ Fetcher|JadynAve|JadynAveBot|jakarta|Jakarta\ Commons-HttpClient|Java|Jbot|JemmaTheTourist|JennyBot|Jetbot|JetBrains\ Omea\ Pro|JetCar|Jim|JoBo|JobSpider_BA|JOC|JoeDog|JoyScapeBot|JSpyda|JubiiRobot|jumpstation|Junut|JustView|Jyxobot|K.S.Bot|KakcleBot|kalooga|KaloogaBot|kanagawa|KATATUDO-Spider|Katipo|kbeta1|Kenjin.Spider|KeywenBot|Keyword.Density|Keyword\ Density|kinjabot|KIT-Fireball|Kitenga-crawler-bot|KiwiStatus|kmbot-|kmccrew|Knight|KnowItAll|Knowledge.com|Knowledge\ Engine|KoepaBot|Koninklijke|KrOWLer|KSbot|kuloko-bot|kulturarw3|KummHttp|Kurzor|Kyluka|L.webis|LabelGrab|Labhoo|labourunions411|lachesis|Lament|LamerExterminator|LapozzBot|larbin|LARBIN-EXPERIMENTAL|LBot|LeapTag|LeechFTP|LeechGet|LetsCrawl.com|LexiBot|LexxeBot|lftp|libcrawl|libiViaCore|libWeb|libwww|libwww-perl|likse|Linguee|Link|link_checker|LinkAlarm|linkbot|LinkCheck\ by\ Siteimprove.com|LinkChecker|linkdex.com|LinkextractorPro|LinkLint|linklooker|Linkman|LinkScan|LinksCrawler|LinksManager.com_bot|LinkSweeper|linkwalker|LiteFinder|LitlrBot|Little\ Grabber\ at\ Skanktale.com|Livelapbot|LM\ Harvester|LMQueueBot|LNSpiderguy|LoadTimeBot|LocalcomBot|locust|LolongBot|LookBot|Lsearch|lssbot|LWP|lwp-request|lwp-trivial|LWP::Simple|Lycos_Spider|Lydia\ Entity|LynnBot|Lytranslate|Mag-Net|Magnet|magpie-crawler|Magus|Mail.Ru|Mail.Ru_Bot|MAINSEEK_BOT|Mammoth|MarkWatch|MaSagool|masidani_bot_|Mass|Mata.Hari|Mata\ Hari|matentzn\ at\ cs\ dot\ man\ dot\ ac\ dot\ uk|maxamine.com--robot|maxamine.com-robot|maxomobot|Maxthon$|McBot|MediaFox|medrabbit|Megite|MemacBot|Memo|MendeleyBot|Mercator-|mercuryboard_user_agent_sql_injection.nasl|MerzScope|metacarta|Metager2|metager2-verification-bot|MetaGloss|METAGOPHER|metal|metaquerier.cs.uiuc.edu|METASpider|Metaspinner|MetaURI|MetaURI\ API|MFC_Tear_Sample|MFcrawler|MFHttpScan|Microsoft.URL|MIIxpc|miner|mini-robot|minibot|miniRank|Mirror|Missigua\ Locator|Mister.PiX|Mister\ PiX|Miva|MJ12bot|mnoGoSearch|mod_accessibility|moduna.com|moget|MojeekBot|MOMspider|MonkeyCrawl|MOSES|Motor|mowserbot|MQbot|MSE360|MSFrontPage|MSIECrawler|MSIndianWebcrawl|MSMOBOT|Msnbot|msnbot-products|MSNPTC|MSRBOT|MT-Soft|MultiText|My_Little_SearchEngine_Project|my-heritrix-crawler|MyApp|MYCOMPANYBOT|mycrawler|MyEngines-US-Bot|MyFamilyBot|Myra|nabot|nabot_|Najdi.si|Nambu|NAMEPROTECT|NatchCVS|naver|naverbookmarkcrawler|NaverBot|Navroad|NearSite|NEC-MeshExplorer|NeoScioCrawler|NerdByNature.Bot|NerdyBot|Nerima-crawl-).*$" />
<add input="{HTTP_USER_AGENT}" pattern="^.*(T-H-U-N-D-E-R-S-T-O-N-E|Tailrank|tAkeOut|TAMU_CRAWLER|TapuzBot|Tarantula|targetblaster.com|TargetYourNews.com|TAUSDataBot|taxinomiabot|Tecomi|TeezirBot|Teleport|Teleport\ Pro|TeleportPro|Telesoft|Teradex\ Mapper|TERAGRAM_CRAWLER|TerrawizBot|testbot|testing\ of|TextBot|thatrobotsite.com|The.Intraformant|The\ Dyslexalizer|The\ Intraformant|TheNomad|Theophrastus|theusefulbot|TheUsefulbot_|ThumbBot|thumbshots-de-bot|tigerbot|TightTwatBot|TinEye|Titan|to-dress_ru_bot_|to-night-Bot|toCrawl|Topicalizer|topicblogs|Toplistbot|TopServer\ PHP|topyx-crawler|Touche|TourlentaScanner|TPSystem|TRAAZI|TranSGeniKBot|travel-search|TravelBot|TravelLazerBot|Treezy|TREX|TridentSpider|Trovator|True_Robot|tScholarsBot|TsWebBot|TulipChain|turingos|turnit|TurnitinBot|TutorGigBot|TweetedTimes|TweetmemeBot|TwengaBot|TwengaBot-Discover|Twiceler|Twikle|twinuffbot|Twisted\ PageGetter|Twitturls|Twitturly|TygoBot|TygoProwler|Typhoeus|U.S.\ Government\ Printing\ Office|uberbot|ucb-nutch|UCSD-Crawler|UdmSearch|UFAM-crawler-|Ultraseek|UnChaos|unchaos_crawler_|UnisterBot|UniversalSearch|UnwindFetchor|UofTDB_experiment|updated|URI::Fetch|url_gather|URL-Checker|URL\ Control|URLAppendBot|URLBlaze|urlchecker|urlck|UrlDispatcher|urllib|URLSpiderPro|URLy.Warning|USAF\ AFKN\|usasearch|USS-Cosmix|USyd-NLP-Spider|Vacobot|Vacuum|VadixBot|Vagabondo|Validator|Valkyrie|vBSEO|VCI|VerbstarBot|VeriCiteCrawler|Verifactrola|Verity-URL-Gateway|vermut|versus|versus.integis.ch|viasarchivinginformation.html|vikspider|VIP|VIPr|virus-detector|VisBot|Vishal\ For\ CLIA|VisWeb|vlad|vlsearch|VMBot|VocusBot|VoidEYE|VoilaBot|Vortex|voyager|voyager-hc|voyager-partner-deep|VSE|vspider).*$" />
<add input="{HTTP_USER_AGENT}" pattern="^.*(W3C_Unicorn|W3C-WebCon|w3m|w3search|wacbot|wastrix|Water\ Conserve|Water\ Conserve\ Portal|WatzBot|wauuu\ engine|Wavefire|Waypath|Wazzup|Wazzup1.0.4800|wbdbot|web-agent|Web-Sniffer|Web.Image.Collector|Web\ CEO\ Online|Web\ Image\ Collector|Web\ Link\ Validator|Web\ Magnet|webalta|WebaltBot|WebAuto|webbandit|webbot|webbul-bot|WebCapture|webcheck|Webclipping.com|webcollage|WebCopier|WebCopy|WebCorp|webcrawl.net|webcrawler|WebDownloader\ for|Webdup|WebEMailExtrac|WebEMailExtrac.*|WebEnhancer|WebFerret|webfetch|WebFetcher|WebGather|WebGo\ IS|webGobbler|WebImages|Webinator-search2.fasthealth.com|Webinator-WBI|WebIndex|WebIndexer|weblayers|WebLeacher|WeblexBot|WebLinker|webLyzard|WebmasterCoffee|WebmasterWorld|WebmasterWorldForumBot|WebMiner|WebMoose|WeBot|WebPix|WebReaper|WebRipper|WebSauger|Webscan|websearchbench|WebSite|websitemirror|WebSpear|websphinx.test|WebSpider|Webster|Webster.Pro|Webster\ Pro|WebStripper|WebTrafficExpress|WebTrends\ Link\ Analyzer|webvac|webwalk|WebWalker|Webwasher|WebWatch|WebWhacker|WebXM|WebZip|Weddings.info|wenbin|WEPA|WeRelateBot|Whacker|Widow|WikiaBot|Wikio|wikiwix-bot-|WinHttp.WinHttpRequest|WinHTTP\ Example|WIRE|wired-digital-newsbot|WISEbot|WISENutbot|wish-la|wish-project|wisponbot|WMCAI-robot|wminer|WMSBot|woriobot|worldshop|WorQmada|Wotbox|WPScan|wume_crawler|WWW-Mechanize|www.freeloader.com.|WWW\ Collector|WWWOFFLE|wwwrobot|wwwster|WWWWanderer|wwwxref|Wysigot|X-clawler|Xaldon|Xenu|Xenu's|Xerka\ MetaBot|XGET|xirq|XmarksFetch|XoviBot|xqrobot|Y!J|Y!TunnelPro|yacy.net|yacybot|yarienavoir.net|Yasaklibot|yBot|YebolBot|yellowJacket|yes|YesupBot|Yeti|YioopBot|YisouSpider|yolinkBot|yoogliFetchAgent|yoono|Yoriwa|YottaCars_Bot|you-dir|Z-Add\ Link|zagrebin|Zao|zedzo.digest|zedzo.validate|zermelo|Zeus|Zeus\ Link\ Scout|zibber-v|zimeno|Zing-BottaBot|ZipppBot|zmeu|ZoomSpider|ZuiBot|ZumBot|Zyborg|Zyte).*$" />
<add input="{HTTP_USER_AGENT}" pattern="^.*(Nessus|NESSUS::SOAP|nestReader|Net::Trackback|NetAnts|NetCarta\ CyberPilot\ Pro|Netcraft|NetID.com|NetMechanic|Netprospector|NetResearchServer|NetScoop|NetSeer|NetShift=|NetSongBot|Netsparker|NetSpider|NetSrcherP|NetZip|NetZip-Downloader|NewMedhunt|news|News_Search_App|NewsGatherer|Newsgroupreporter|NewsTroveBot|NextGenSearchBot|nextthing.org|NHSEWalker|nicebot|NICErsPRO|niki-bot|NimbleCrawler|nimbus-1|ninetowns|Ninja|NjuiceBot|NLese|Nogate|Nomad-V2.x|NoteworthyBot|NPbot|NPBot-|NRCan\ intranet|NSDL_Search_Bot|nu_tch-princeton|nuggetize.com|nutch|nutch1|NutchCVS|NutchOrg|NWSpider|Nymesis|nys-crawler|ObjectsSearch|oBot|Obvius\ external\ linkcheck|Occam|Ocelli|Octopus|ODP\ entries|Offline.Explorer|Offline\ Explorer|Offline\ Navigator|OGspider|OmiExplorer_Bot|OmniExplorer_Bot|omnifind|OmniWeb|OnetSzukaj|online\ link\ validator|OOZBOT|Openbot|Openfind|Openfind\ data|OpenHoseBot|OpenIntelligenceData|OpenISearch|OpenSearchServer_Bot|OpiDig|optidiscover|OrangeBot|ORISBot|ornl_crawler_1|ORNL_Mercury|osis-project.jp|OsO|OutfoxBot|OutfoxMelonBot|OWLER-BOT|owsBot|ozelot|P3P\ Client|page_verifier|PageBitesHyperBot|Pagebull|PageDown|PageFetcher|PageGrabber|PagePeeker|PageRank\ Monitor|pamsnbot.htm|Panopy|panscient.com|Pansophica|Papa\ Foto|PaperLiBot|parasite|parsijoo|Pathtraq|Pattern|Patwebbot|pavuk|PaxleFramework|PBBOT|pcBrowser|pd-crawler|PECL::HTTP|penthesila|PeoplePal|perform_crawl|PerMan|PGP-KA|PHPCrawl|PhpDig|PicoSearch|pipBot|pipeLiner|Pita|pixfinder|PiyushBot|planetwork|PleaseCrawl|Plucker|Plukkie|Plumtree|Pockey|Pockey-GetHTML|PoCoHTTP|pogodak.ba|Pogodak.co.yu|Poirot|polybot|Pompos|Poodle\ predictor|PopScreenBot|PostPost|PrivacyFinder|ProjectWF-java-test-crawler|ProPowerBot|ProWebWalker|PROXY|psbot|psbot-page|PSS-Bot|psycheclone|pub-crawler|pucl|pulseBot\ \(pulse|Pump|purebot|PWeBot|pycurl|Python-urllib|pythonic-crawler|PythonWikipediaBot|q1|QEAVis\ agent|QFKBot|qualidade|Qualidator.com|QuepasaCreep|QueryN.Metasearch|QueryN\ Metasearch|quest.durato|Quintura-Crw|QunarBot|Qweery_robot.txt_CheckBot|QweeryBot|r2iBot|R6_CommentReader|R6_FeedFetcher|R6_VoteReader|RaBot|Radian6|radian6_linkcheck|RAMPyBot|RankurBot|RcStartBot|RealDownload|Reaper|REBI-shoveler|Recorder|RedBot|RedCarpet|ReGet|RepoMonkey|RepoMonkey\ Bait|Riddler|RIIGHTBOT|RiseNetBot|RiverglassScanner|RMA|RoboPal|Robosourcer|robot|robotek|robots|Robozilla|rogerBot|Rome\ Client|Rondello|Rotondo|Roverbot|RPT-HTTPClient|rtgibot|RufusBot|Runnk\ online\ rss\ reader|s~stremor-crawler|S2Bot|SafariBookmarkChecker|SaladSpoon|Sapienti|SBIder|SBL-BOT|SCFCrawler|Scich|ScientificCommons.org|ScollSpider|ScooperBot|Scooter|ScoutJet|ScrapeBox|Scrapy|SCrawlTest|Scrubby|scSpider|Scumbot|SeaMonkey$|Search-Channel|Search-Engine-Studio|search.KumKie.com|search.msn.com|search.updated.com|search.usgs.gov|Search\ Publisher|Searcharoo.NET|SearchBlox|searchbot|searchengine|searchhippo.com|SearchIt-Bot|searchmarking|searchmarks|searchmee_v|SearchmetricsBot|searchmining|SearchnowBot_v1|searchpreview|SearchSpider.com|SearQuBot|Seekbot|Seeker.lookseek.com|SeeqBot|seeqpod-vertical-crawler|Selflinkchecker|Semager|semanticdiscovery|Semantifire1|semisearch|SemrushBot|Senrigan|SEOENGWorldBot|SeznamBot|ShablastBot|ShadowWebAnalyzer|Shareaza|Shelob|sherlock|ShopWiki|ShowLinks|ShowyouBot|siclab|silk|Siphon|SiteArchive|SiteCheck-sitecrawl|sitecheck.internetseer.com|SiteFinder|SiteGuardBot|SiteOrbiter|SiteSnagger|SiteSucker|SiteSweeper|SiteXpert|SkimBot|SkimWordsBot|SkreemRBot|skygrid|Skywalker|Sleipnir|slow-crawler|SlySearch|smart-crawler|SmartDownload|Smarte|smartwit.com|Snake|Snapbot|SnapPreviewBot|Snappy|snookit|Snooper|Snoopy|SocialSearcher|SocSciBot|SOFT411\ Directory|sogou|sohu-search|sohu\ agent|Sokitomi|Solbot|sootle|Sosospider|Space\ Bison|Space\ Fung|SpaceBison|SpankBot|spanner|Spatineo\ Monitor\ Controller|special_archiver|SpeedySpider|Sphider|Sphider2|spider|Spider.TerraNautic.net|SpiderEngine|SpiderKU|SpiderMan|Spinn3r|Spinne|sportcrew-Bot|spyder3.microsys.com|sqlmap|Squid-Prefetch|SquidClamAV_Redirector|Sqworm|SrevBot|sslbot|SSM\ Agent|StackRambler|StarDownloader|statbot|statcrawler|statedept-crawler|Steeler|STEGMANN-Bot|stero|Stripper|Stumbler|suchclip|sucker|SumeetBot|SumitBot|SummizeBot|SummizeFeedReader|SuperBot|superbot.com|SuperHTTP|SuperLumin|SuperPagesBot|Supybot|SURF|Surfbot|SurfControl|SurveyBot|suzuran|SWEBot|swish-e|SygolBot|SynapticWalker|Syntryx\ ANT\ Scout\ Chassis\ Pheromone|SystemSearch-robot|Szukacz).*$" />
</conditions>
<action type="CustomResponse" statusCode="403" statusReason="Forbidden" statusDescription="Forbidden" />
</rule>
Your pattern MJ12bot|spbot|YandexBot is a regex pattern but the pattern syntax is configured as Wildcard, so no matches found.
Remove the attribute patternSyntax="Wildcard" from your configuration and replace <match url="*" /> with <match url=".*" /> then it will work as expected.
<rule name="RequestBlockingRule1" stopProcessing="true">
<match url=".*" />
<conditions logicalGrouping="MatchAny">
<add input="{HTTP_USER_AGENT}" pattern="MJ12bot|spbot|YandexBot" />
</conditions>
<action type="CustomResponse" statusCode="403" statusReason="Forbidden: Access is denied." statusDescription="You do not have permission to view this directory or page using the credentials that you supplied." />
</rule>

UrlRewriting does not work on server, but on localhost it works fine on asp.net?

<urlrewritingnet rewriteOnlyVirtualUrls="true" contextItemsPrefix="QueryString" defaultPage="default.aspx" defaultProvider="RegEx" xmlns="http://www.urlrewriting.net/schemas/config/2006/07">
<rewrites>
<add name="book" virtualUrl="^~/book/(.*?)/([0-9]+)$" rewriteUrlParameter="ExcludeFromClientQueryString" destinationUrl="~/book.aspx?title=$1&id=$2" ignoreCase="true" processing="stop" />
<add name="page" virtualUrl="^~/page/(.*?)/([0-9]+)$" rewriteUrlParameter="ExcludeFromClientQueryString" destinationUrl="~/page/default.aspx?title=$1&id=$2" ignoreCase="true" processing="stop" />
<add name="register" virtualUrl="^~/register/(.*?)$" rewriteUrlParameter="ExcludeFromClientQueryString" destinationUrl="~/register.aspx?title=$1" ignoreCase="true" processing="stop" />
<add name="login" virtualUrl="^~/login$" rewriteUrlParameter="ExcludeFromClientQueryString" destinationUrl="~/login.aspx" ignoreCase="true" processing="stop" />
<add name="download" virtualUrl="^~/download/(.*?)$" rewriteUrlParameter="ExcludeFromClientQueryString" destinationUrl="~/download.aspx?title=$1" ignoreCase="true" processing="stop" />
<add name="forgotpass" virtualUrl="^~/passwordrecovery$" rewriteUrlParameter="ExcludeFromClientQueryString" destinationUrl="~/forgotPassword.aspx" ignoreCase="true" processing="stop" />
<add name="newsarchive" virtualUrl="^~/news-archive$" rewriteUrlParameter="ExcludeFromClientQueryString" destinationUrl="~/newsarchive.aspx" ignoreCase="true" processing="stop" />
<add name="newsarchivekeywords" virtualUrl="^~/news-archive/(.*?)$" rewriteUrlParameter="ExcludeFromClientQueryString" destinationUrl="~/newsarchive.aspx?title=$1" ignoreCase="true" processing="stop" />
<add name="news" virtualUrl="^~/news/(.*?)/([0-9]+)$" rewriteUrlParameter="ExcludeFromClientQueryString" destinationUrl="~/news.aspx?title=$1&id=$2" ignoreCase="true" processing="stop" />
<add name="articlearchive" virtualUrl="^~/article-archive$" rewriteUrlParameter="ExcludeFromClientQueryString" destinationUrl="~/articlearchive.aspx" ignoreCase="true" processing="stop" />
<add name="articlearchivekeywords" virtualUrl="^~/article-archive/(.*?)$" rewriteUrlParameter="ExcludeFromClientQueryString" destinationUrl="~/articlearchive.aspx?title=$1" ignoreCase="true" processing="stop" />
<add name="article" virtualUrl="^~/article/(.*?)/([0-9]+)$" rewriteUrlParameter="ExcludeFromClientQueryString" destinationUrl="~/article.aspx?title=$1&id=$2" ignoreCase="true" processing="stop" />
<add name="search" virtualUrl="^~/search/(.*?)$" rewriteUrlParameter="ExcludeFromClientQueryString" destinationUrl="~/search.aspx?s=$1" ignoreCase="true" processing="stop" />
</rewrites>
</urlrewritingnet>
here is my web.config codes. evry thing is ok in localhost(iis) but when i published to server. i just got this
404 - File or directory not found.
The resource you are looking for might have been removed, had its name changed, or is temporarily unavailable.
just try to add this to your we.config and the problem has been solved
<system.web>
<httpModules>
<add name="UrlRewriteModule"
type="UrlRewritingNet.Web.UrlRewriteModule, UrlRewritingNet.UrlRewriter" />
</httpModules>
</system.web>

Azure Web Role MVC5 does not start in integrated mode

After upgrade my Azure Web role project with MVC4 and SDK 2.2 to SDK 2.4 and MVC5, starts the application only in the emulator, but not in the cloud (empty page or "Page can not displayed").
After remote login on the VM and changing the application pool mode from integrated to classic, works the page also with the new settings. Changes of the handler settings in the web.config were unsuccessful.
Current web.config with MVC5 and SDK2.4:
<system.webServer>
<validation validateIntegratedModeConfiguration="false" />
<modules>
<add name="PageSpeedModule" type="CheckMyBus.Web.Frontend.Modules.ProtectionModule" preCondition="managedHandler" />
</modules>
<handlers>
<remove name="ExtensionlessUrlHandler-Integrated-4.0" />
<remove name="OPTIONSVerbHandler" /><remove name="TRACEVerbHandler" />
<add name="ExtensionlessUrlHandler-Integrated-4.0" path="*." verb="GET,HEAD,POST,DEBUG,PUT,DELETE,PATCH,OPTIONS" type="System.Web.Handlers.TransferRequestHandler" preCondition="integratedMode,runtimeVersionv4.0" />
<remove name="ExtensionlessUrlHandler-ISAPI-4.0_32bit" />
<remove name="ExtensionlessUrlHandler-ISAPI-4.0_64bit" />
</handlers>
<urlCompression doStaticCompression="true" doDynamicCompression="true" />
<httpCompression>
<dynamicTypes>
<add mimeType="application/json" enabled="true" />
<add mimeType="application/json; charset=utf-8" enabled="true" />
</dynamicTypes>
</httpCompression>
<staticContent>
<clientCache setEtag="false" />
</staticContent>
<httpErrors errorMode="DetailedLocalOnly" existingResponse="PassThrough" />
<rewrite>
<rules>
<rule name="RemoveIllegalCharacters">
<match url="(.*)("|%22)(.*)" />
<action type="Rewrite" url="{R:1}{R:3}" />
</rule>
<rule name="RemoveTrailingSlashRule1" stopProcessing="true">
<match url="(.*)/$" />
<conditions>
<add input="{REQUEST_FILENAME}" matchType="IsDirectory" negate="true" />
<add input="{REQUEST_FILENAME}" matchType="IsFile" negate="true" />
</conditions>
<action type="Redirect" url="{R:1}" />
</rule>
<rule name="Remove Version from static files" stopProcessing="true">
<match url="(styles|scripts|images)/(.+);v[0-9]+\.(css|js|gif|png|jpg|ico)" />
<action type="Rewrite" url="{R:1}/{R:2}.{R:3}" />
</rule>
<rule name="EnforceLowerCase" stopProcessing="true">
<match url="[A-Z]" ignoreCase="false" />
<action type="Redirect" url="{ToLower:{URL}}" />
</rule>
</rules>
</rewrite>
</system.webServer>
Last web.config works fine with MVC4 and SDK2.2:
<system.webServer>
<validation validateIntegratedModeConfiguration="false" />
<modules>
<add name="PageSpeedModule" type="CheckMyBus.Web.Frontend.Modules.ProtectionModule" preCondition="managedHandler" />
</modules>
<handlers>
<remove name="ExtensionlessUrlHandler-ISAPI-4.0_32bit" />
<remove name="ExtensionlessUrlHandler-ISAPI-4.0_64bit" />
<add name="ExtensionlessUrlHandler-ISAPI-4.0_32bit" path="*." verb="GET,HEAD,POST,DEBUG,PUT,DELETE,PATCH,OPTIONS" modules="IsapiModule" scriptProcessor="%windir%\Microsoft.NET\Framework\v4.0.30319\aspnet_isapi.dll" preCondition="classicMode,runtimeVersionv4.0,bitness32" responseBufferLimit="0" />
<add name="ExtensionlessUrlHandler-ISAPI-4.0_64bit" path="*." verb="GET,HEAD,POST,DEBUG,PUT,DELETE,PATCH,OPTIONS" modules="IsapiModule" scriptProcessor="%windir%\Microsoft.NET\Framework64\v4.0.30319\aspnet_isapi.dll" preCondition="classicMode,runtimeVersionv4.0,bitness64" responseBufferLimit="0" />
<add name="ExtensionlessUrlHandler-Integrated-4.0" path="*." verb="GET,HEAD,POST,DEBUG,PUT,DELETE,PATCH,OPTIONS" type="System.Web.Handlers.TransferRequestHandler" preCondition="integratedMode,runtimeVersionv4.0" />
<remove name="ExtensionlessUrlHandler-Integrated-4.0" /><remove name="OPTIONSVerbHandler" /><remove name="TRACEVerbHandler" /><add name="ExtensionlessUrlHandler-Integrated-4.0" path="*." verb="*" type="System.Web.Handlers.TransferRequestHandler" preCondition="integratedMode,runtimeVersionv4.0" /></handlers>
<urlCompression doStaticCompression="true" doDynamicCompression="true" />
<httpCompression>
<dynamicTypes>
<add mimeType="application/json" enabled="true" />
<add mimeType="application/json; charset=utf-8" enabled="true" />
</dynamicTypes>
</httpCompression>
<staticContent>
<clientCache setEtag="false" />
</staticContent>
<httpErrors errorMode="DetailedLocalOnly" existingResponse="PassThrough" />
<rewrite>
<rules>
<rule name="RemoveIllegalCharacters">
<match url="(.*)("|%22)(.*)" />
<action type="Rewrite" url="{R:1}{R:3}" />
</rule>
<rule name="RemoveTrailingSlashRule1" stopProcessing="true">
<match url="(.*)/$" />
<conditions>
<add input="{REQUEST_FILENAME}" matchType="IsDirectory" negate="true" />
<add input="{REQUEST_FILENAME}" matchType="IsFile" negate="true" />
</conditions>
<action type="Redirect" url="{R:1}" />
</rule>
<rule name="Remove Version from static files" stopProcessing="true">
<match url="(styles|scripts|images)/(.+);v[0-9]+\.(css|js|gif|png|jpg|ico)" />
<action type="Rewrite" url="{R:1}/{R:2}.{R:3}" />
</rule>
<rule name="EnforceLowerCase" stopProcessing="true">
<match url="[A-Z]" ignoreCase="false" />
<action type="Redirect" url="{ToLower:{URL}}" />
</rule>
</rules>
</rewrite>
</system.webServer>
Can everyone help me?
Having upgraded to MVC5 manually I didn't realize that the pages-clause in the Views weg.config had THREE references to System.Web.Mvc. That made the web role recyle forever.
Fixing this solved my rcycling issue.

Resources