I mentioned that v1.1 of ASP.NET by default validates input received from QueryString, Form and Cookie scope. You can turn off this validation site wide by tweaking the web.config:
<configuration>
<system.web>
<pages validateRequest=”false” />
</system.web>
</configuration>
But then you’re left with no validation right? Wrong. You can use the ValidateInput() method of the HttpRequest object programmatically in any code that has access to the HttpRequest instance. Very useful stuff.
One question though: What is potentially dangerous data according to Microsoft? And can you modify that definition? I’m guessing the answers are: a) we’ll never know and b) no. Given their track record, does it make sense to trust Microsoft to validate the input you receive from client browsers when the browser they created can’t be trusted?
I just decompiled the method with the Reflector. And there are the results for that.
public void ValidateInput()
{
this._flags[1] = true;
this._flags[2] = true;
this._flags[4] = true;
}
private void ValidateNameValueCollection(NameValueCollection nvc, string collectionName)
{
int num1 = nvc.Count;
for (int num2 = 0; num2 0))
{
this.ValidateString(text2, text1, collectionName);
}
}
}
}
private void ValidateString(string s, string valueName, string collectionName)
{
s = this.RemoveNullCharacters(s);
int num1 = 0;
if (CrossSiteScriptingValidation.IsDangerousString(s, out num1))
{
string text1 = valueName + “=\””;
int num2 = num1 – 10;
if (num2 = s.Length)
{
num3 = s.Length;
text1 = text1 + s.Substring(num2, num3 – num2) + “\””;
}
else
{
text1 = text1 + s.Substring(num2, num3 – num2) + “…\””;
}
throw new HttpRequestValidationException(HttpRuntime.FormatResourceString(“Dangerous_input_detected”, collectionName, text1));
}
}
As you can see, I makes use of the CrossSiteScriptingValidation Class. Following is the CrossSiteScriptingValidation Class.
Which is in the System.Web.dll and is a internal class.
The Description is as followed.
internal class CrossSiteScriptingValidation
{
// Methods
static CrossSiteScriptingValidation();
internal CrossSiteScriptingValidation();
private static bool IsAtoZ(char c);
private static bool IsDangerousExpressionString(string s, int index);
private static bool IsDangerousOnString(string s, int index);
private static bool IsDangerousScriptString(string s, int index);
internal static bool IsDangerousString(string s, out int matchIndex);
// Fields
private static char[] startingChars;
}
Whole of its methods are static.
And the methods are :
private static bool IsAtoZ(char c)
{
if ((c >= ‘a’) && (c = ‘A’)
{
return (c = s.Length)
{
return false;
}
if ((s[index + 1] != ‘x’) && (s[index + 1] != ‘X’))
{
return false;
}
return (string.Compare(s, index + 2, “pression(“, 0, 9, true, CultureInfo.InvariantCulture) == 0);
}
private static bool IsDangerousOnString(string s, int index)
{
if ((s[index + 1] != ‘n’) && (s[index + 1] != ‘N’))
{
return false;
}
if ((index > 0) && CrossSiteScriptingValidation.IsAtoZ(s[index – 1]))
{
return false;
}
int num1 = s.Length;
index += 2;
while ((index = num1)
{
return false;
}
if ((((s[index + 1] != ‘c’) && (s[index + 1] != ‘C’)) || (s[index + 2] != ‘r’) && (s[index + 2] != ‘R’)) || (((s[index + 3] != ‘i’) && (s[index + 3] != ‘I’)) || (s[index + 4] != ‘p’) && (s[index + 4] != ‘P’)) || (s[index + 5] != ‘t’) && (s[index + 5] != ‘T’))
{
return false;
}
index += 6;
while ((index
I developed a custom httpmodule for ths issue where u can write rules for your filtering purposes and your are no more forced to use the microsoft ones,
you can get the latest version from workspaces.gotdotnet.com/defapp or http://www.yazilimguvenligi.com