Method function at line 200 of .../js/general.js gets user input for the
data element. This element’s value then flows through client-side code without being properly sanitized or
validated and is eventually displayed to the user in loadXML at line 241 of / .../js/general.js. This may enable a DOM XSS attack.
Script as follows:
success: function(data){
var xmlObj = loadXML(data);
........}
function loadXML(xmlString){
xmlString = xmlString.replace(/^\s*|\s(?=\s)|\s*$/g, "");
xmlString = xmlString.replace(/(&)(?!amp;)/gi, "&");
try{
xmlDoc=new ActiveXObject("Microsoft.XMLDOM");
xmlDoc.async="false";
xmlDoc.loadXML(xmlString);
}catch(e){
try{
parser=new DOMParser();
xmlDoc=parser.parseFromString(xmlString,"text/xml");
}catch(e){
alert(e.message);
return;
}
}
return xmlDoc.documentElement;
}
The possibility of DOM XSS attack may be at xmlDoc.loadXML(xmlString).
Could some help me how to sanitize the above scenario to satisfy Checkmarx?
Thanks!!
Related
In our Android project (download manager) we need to show built-in web browser so we able to catch downloads there with the all data (headers, cookies, post data) so we can handle them properly.
Unfortunately, WebView control we use does not provide any way to access POST data of the requests it makes.
So we use a hacky way to get this data. We inject this javascript code in the each html code the browser loads:
<script language="JavaScript">
HTMLFormElement.prototype._submit = HTMLFormElement.prototype.submit;
HTMLFormElement.prototype.submit = formSubmitMonitor;
window.addEventListener('submit', function(e) {
formSubmitMonitor(e);
}, true);
function formSubmitMonitor(e) {
var frm = e ? e.target : this;
formSubmitMonitor_onsubmit(frm);
frm._submit();
}
function formSubmitMonitor_onsubmit(f) {
var data = "";
for (i = 0; i < f.elements.length; i++) {
var name = f.elements[i].name;
var value = f.elements[i].value;
//var type = f.elements[i].type;
if (name)
{
if (data !== "")
data += '&';
data += encodeURIComponent(name) + '=' + encodeURIComponent(value);
}
}
postDataMonitor.onBeforeSendPostData(
f.attributes['method'] === undefined ? null : f.attributes['method'].nodeValue,
new URL(f.action, document.baseURI).href,
data,
f.attributes['enctype'] === undefined ? null : f.attributes['enctype'].nodeValue);
}
XMLHttpRequest.prototype.origOpen = XMLHttpRequest.prototype.open;
XMLHttpRequest.prototype.open = function(method, url, async, user, password) {
// these will be the key to retrieve the payload
this.recordedMethod = method;
this.recordedUrl = url;
this.origOpen(method, url, async, user, password);
};
XMLHttpRequest.prototype.origSend = XMLHttpRequest.prototype.send;
XMLHttpRequest.prototype.send = function(body) {
if (body)
{
postDataMonitor.onBeforeSendPostData(
this.recordedMethod,
this.recordedUrl,
body,
null);
}
this.origSend(body);
};
const origFetch = window.fetch;
window.fetch = function()
{
postDataMonitor.onBeforeSendPostData(
"POST",
"test",
"TEST",
null);
return origFetch.apply(this, arguments);
}
</script>
Generally, it works fine.
But in Google Mail web interface, it's not working for some unknown reason. E.g. when the user enters his login name and presses Next. I thought it's using Fetch API, so I've added interception for it too. But this did not help. Please note, that we do not need to intercept the user credentials, but we need to be able to intercept all, or nothing. Unfortunately, this is the way the whole system works there...
Addition #1.
I've found another way: don't override shouldInterceptRequest, but override onPageStarted instead and call evaluateJavascript there. That way it works even on Google Mail web site! But why the first method is not working then? We break HTML code somehow?
I am trying to get my webhook to return a parsed JSON response from an API. I can log it on the console, but when I try to use app.tell; it gives me: TypeError: Cannot read property 'tell' of undefined. I am basically able to successfully get the data from the API, but I'm not able to use it in a response for some reason. Thanks for the help!
[Actions.API_TRY] () {
var request = http.get(url2, function (response) {
// data is streamed in chunks from the server
// so we have to handle the "data" event
var buffer = "",
data,
route;
response.on("data", function (chunk) {
buffer += chunk;
});
response.on("end", function (err) {
// finished transferring data
// dump the raw data
console.log(buffer);
console.log("\n");
data = JSON.parse(buffer);
route = data.routes[0];
// extract the distance and time
console.log("Walking Distance: " + route.legs[0].distance.text);
console.log("Time: " + route.legs[0].duration.text);
this.app.tell(route.legs[0].distance.text);
});
});
}
This looks to me to be more of a JavaScript scoping issue than anything else. The error message is telling you that app is undefined. Often in Actions, you find code like yours embedded in a function which is defined inside the intent handler which is passed the instance of your Actions app (SDK or Dialog Flow).
I am trying to send a request parameter through to an 'exports' method for a mongodb find in an express.js, backbone.js application. I am having a difficult
time getting the parameters to pass through to mongodb and with '#'.
The breakage is the passing of parameters into the exported mongodb function.
Here is the flow of data:
First the request is successfully routed to the 'upcoming' function:
"upcoming/uni/:uni" : "upcoming",
It flows on to the 'upcoming' function without a problem.
upcoming: function(uni) {
console.log("uni: "+uni);
pag.reset();
console.log("Hit upcoming list target");
setCollectionType('upcoming');
var upcomingCourses = buildCollection();
// ------------------------------------------------------------------------
// here is the problem how do I pass the parameter value through the fetch?
// Although it may also have to do with '#' please read on.
// ------------------------------------------------------------------------
upcomingCourses.fetch({success: function(){
$("#content").html(new ListView({model: upcomingCourses, page: 1}).el);
}});
this.headerView.selectMenuItem('home-menu');
},
The routing for the mongo methods is:
app.get('/upcoming/uni/:uni', mongomod.findUpcoming);
So the following method is exported from the mongodb js file and is executed reliable. However the req.params are not passed through.
Interspersed in the code I have described its' runtime behaviour:
exports.findUpcoming = function(req, res) {
console.log("university", req.params.uni); // This consistently is unpopulated
var uni = req.params.uni;
console.log("Size: "+req.params.length); // This will always be 0
for (var i=0; i < req.params.length; i++) {
console.log("Parameters: "+req.params[i]);
}
db.collection('upcoming', function(err, collection) {
if (typeof uni === 'undefined') {
console.log("The value is undefined");
uni = "Princeton University"; // here we add a string to test it it will work.
}
collection.find({university:uni}).toArray(function(err, items) {
if (err) {
console.log("Error: "+err);
} else {
console.log("No Error");
console.log("Count: "+items.length);
console.log(items[0]['university']);
res.send(items);
}
});
});
};
On additional and important note:
The url, in a working, runtime environment would be:
http://localhost:3000/#upcoming/uni/Exploratorium
This one fails, but the following URL will work in passing the params through these functions however it returns the JSON to the screen rather then
the rendered version:
http://localhost:3000/upcoming/uni/Exploratorium
The problem could be a miss understanding of # and templates. Please, if you see the error enlightenment would be greatly appreciated.
Nothing after the # gets passed to the server. See How to get hash in a server side language? or https://stackoverflow.com/a/318581/711902.
I found a solution to the problem of passing the parameters from the client side to the server side. By changing the url of the collection the parameters will be passed to the server side:
upcomingCourses.url = "/upcoming/uni/"+uni; // <-- here's the ticket where uni is param
upcomingCourses.fetch({success: function(){
$("#content").html(new ListView({model: upcomingCourses, page: 1}).el);
}});
This can be made more elegant but it is a way to pass the parameters on to the server.
Thanks
Enter/change something in a textarea
Before submitting the form, leave the page (e.g. by clicking browser's back button)
Go back to the edit page (e.g. by clicking the forward button)
Expected result: the content entered in the textarea should still be there
Actual result:
with HTTPS: all changes are gone (bad!)
with HTTP: the changes are still there (good!)
Why is this happening when using HTTPS? How can I prevent this? Is the browser or the website responsible?
You can consider the following solutions:
The autocomplete Attribute (HTML5)
This seems unrelated since autocomplete tells the browser to complete fields with the values based on earlier user input which were "submitted" with the form. But in my tests I saw that; after filling out the form without submitting; when I hit the forward (history) button and hit back again; form fields were auto-filled if I set autocomplete="on" and all were cleared when set to "off".
So; (if targeting HTML5 users) you can use this attribute to "cache" your form data. (Works on all major browsers, except Opera).
<form action="/update" method="post" autocomplete="on">
Email: <input type="text" id="email" /><br />
Username: <input type="text" id="uname" /><br />
Password: <input type="password" id="pwd" autocomplete="off"/><br />
<input type="submit" />
</form>
Notice that you can set the auto-complete feature off for a specific field (password in this case) when the rest of the form controls are on.
MSDN Remarks:
If the autocomplete attribute is missing, the field will default to an 'on' state if element has no parent form, or if the form has
autocomplete set to 'on'.
Information provided by the AutoComplete feature is not exposed to
the object model, and is not visible to a Web page until the user
selects one of the suggestions as a value for the text field.
Save the Un-submitted Form Data Locally:
You can store the input data locally, right before the page redirection or on focus-out event of every form control:
Cookies
The good-old cookies can come handy in this case but you should consider the down-sides:
Even though you can encrypt the values programmatically; since we will be working on the client-side, cookies are not truly secure for this. Http-Only and Secure marked cookies will not help us here, because these options are used to enforce SSL when the cookie is "sent" (secure) and cannot be accessed from Javascript (http-only).
Browsers have a cookie size limit. From MSDN: "Most browsers support
cookies of up to 4096 bytes. Because of this small limit, cookies
are best used to store small amounts of data". So, if you don't
watch for this size (when you write the cookie and/or by limiting
the control's value via maxlength attributes); that could be a
problem. (and trimming the value is the worst thing in this case).
Browsers also have a limit to the number of cookies that can be set
per domain. So; when storing the form data in the cookies; instead of setting cookies for each form field value; you should merge them into one or few cookies; for your site not to
exceed this limit.
Still, the bright side is they are supported by all browsers and if you don't plan to "cache" sensitive and too-long data via Cookies, then you can use the following solution. If this is not the case; you should better go with the next suggestion: localStorage.
// Below is just a demonstration and is not tested thoroughly for
// production-ready web applications by any means.
// But it should give you an idea.
/**
* Caches the user-input data from the targeted form, stores it in the cookies
* and fetches back to the form when requested or needed.
*/
var formCache = (function () {
var _form = null,
_formData = [],
_strFormElements = "input[type='text'],"
+ "input[type='checkbox'],"
+ "input[type='radio'],"
// + "input[type='password']," // leave password field out
+ "input[type='hidden'],"
// + "input[type='image'],"
+ "input[type='file'],"
// more input types...
+ "input[type='email'],"
+ "input[type='tel'],"
+ "input[type='url'],"
+ "select,"
+ "textarea";
function _warn() {
console.log('formCache is not initialized.');
}
return {
/**
* Initializes the formCache with a target form (id).
* You can pass any container id for the formId parameter, formCache will
* still look for form elements inside the given container. If no form id
* is passed, it will target the first <form> element in the DOM.
*/
init: function (formId) {
var f = (typeof formId === 'undefined' || formId === null || $.trim(formId) === '')
? $('form').first()
: $('#' + formId);
_form = f.length > 0 ? f : null;
console.log(_form);
return formCache; // make it chainable
},
/**
* Stores the form data in the cookies.
*/
save: function () {
if (_form === null) return _warn();
_form
.find(_strFormElements)
.each(function() {
var f = $(this).attr('id') + ':' + formCache.getFieldValue($(this));
_formData.push(f);
});
docCookies.setItem('formData', _formData.join(), 31536e3); // 1 year expiration (persistent)
console.log('Cached form data:', _formData);
return formCache;
},
/**
* Fills out the form elements from the data previously stored in the cookies.
*/
fetch: function () {
if (_form === null) return _warn();
if (!docCookies.hasItem('formData')) return;
var fd = _formData.length < 1 ? docCookies.getItem('formData').split(',') : _formData;
$.each(fd, function (i, item) {
var s = item.split(':');
var elem = $('#' + s[0]);
formCache.setFieldValue(elem, s[1]);
});
return formCache;
},
/**
* Sets the value of the specified form field from previously stored data.
*/
setFieldValue: function (elem, value) {
if (_form === null) return _warn();
if (elem.is('input:text') || elem.is('input:hidden') || elem.is('input:image') ||
elem.is('input:file') || elem.is('textarea')) {
elem.val(value);
} else if (elem.is('input:checkbox') || elem.is('input:radio')) {
elem.prop('checked', value);
} else if (elem.is('select')) {
elem.prop('selectedIndex', value);
}
return formCache;
},
/**
* Gets the previously stored value of the specified form field.
*/
getFieldValue: function (elem) {
if (_form === null) return _warn();
if (elem.is('input:text') || elem.is('input:hidden') || elem.is('input:image') ||
elem.is('input:file') || elem.is('textarea')) {
return elem.val();
} else if (elem.is('input:checkbox') || elem.is('input:radio')) {
return elem.prop('checked');
} else if (elem.is('select')) {
return elem.prop('selectedIndex');
}
else return null;
},
/**
* Clears the cache and removes the previously stored form data from cookies.
*/
clear: function () {
_formData = [];
docCookies.removeItem('formData');
return formCache;
},
/**
* Clears all the form fields.
* This is different from form.reset() which only re-sets the fields
* to their initial values.
*/
clearForm: function () {
_form
.find(_strFormElements)
.each(function() {
var elem = $(this);
if (elem.is('input:text') || elem.is('input:password') || elem.is('input:hidden') ||
elem.is('input:image') || elem.is('input:file') || elem.is('textarea')) {
elem.val('');
} else if (elem.is('input:checkbox') || elem.is('input:radio')) {
elem.prop('checked', false);
} else if (elem.is('select')) {
elem.prop('selectedIndex', -1);
}
});
return formCache;
}
};
})();
// Save form data right before we unload the form-page
$(window).on('beforeunload', function (event) {
formCache.save();
return false;
});
// Initialize and fetch form data (if exists) when we load the form-page back
$(document).on('ready', function (event) {
formCache.init().fetch();
});
Here is a working demo on jsFiddle.
Note: The "cookies reader/writer" script from developer.mozilla.org should be included with the code above. You can also use Yahoo's YUI 2: Cookie Utility which has a useful setSub() method for setting sub-cookies inside a single cookie, for the browser limit that I previously mentioned.
localStorage
You can also use more modern techniques like localStorage (HTML5). It is more secure and faster. All major browsers support this feature including IE 8+. (Additionally, iOS and Android support!)
if (typeof Storage !== 'undefined') { // We have local storage support
localStorage.username = 'Onur'; // to save to local storage
document.getElementById('uname').value = localStorage.username; // to fetch from local storage
}
So, just like in the cookies example;
$(window).on('beforeunload', function (event) {
saveFormToLocalStorage();
return false;
});
$(document).on('ready', function (event) {
fillFormFromLocalStorage()
});
SessionStorage
This works pretty much the same way. From W3C: The sessionStorage object is equal to the localStorage object, except that it stores the data for only one session.
Save Form Data to Server/DB via Silent AJAX Post(s):
Not a very efficient way but you might want to use this where others are not feasible. You can make the post on the beforeunload event and prompt a message to the user.
$(window).on('beforeunload', function (event) {
//check if at least one field is filled out.
//make the AJAX post if filled out.
return "You are leaving the page without submitting the form...";
});
Retrieve Previously Saved Data from Server on Page Load:
Just to remind you; if the user is filling out an "update" form, for example; you can always fetch the previously saved data from the server and automatically fill in the form (non-sensitive fields).
Conclusion
If you really need this and worth the trouble; you should consider a cross-browser solution that implements a fall-back mechanism; such as:
IF you have support for HTML5 features; use HTML5 autocomplete
attribute. (You can embed the attribute in the HTML beforehand, or
set it via Javascript/jQuery when you test for browser support.)
ELSE IF you have support for the Storage object; go with
localStorage;
ELSE IF [cookies your current session stores] + [cookie size your
form data needs] < 4096 bytes; then use cookies.
ELSE IF you have a server-side web-app make silent AJAX requests to
store data on server.
ELSE don't do it.
Note: For HTML5 feature detection, take a look at this page or this page or you can use Modernizr.
HTTPS Problem:
The reason, all form changes are gone when using HTTPS is that; it is a secure protocol. Forms are mostly used for user input and can (probably) contain sensitive data. So this behavior seems natural and expected. The solution(s) I offer above will work the same as they do on HTTP. So that should cover all your concerns.
Further reading:
Autofilling form controls: the autocomplete attribute
HTML5 form autocomplete attribute
DOM Storage
HTML5 Web Storage
Future of Local Storage for Web
Cookies
This is what worked for me.
<select
class="form-select custom-select page-number-select"
(change)="onPageChange($event)"
data-test="XXXX"
[attr.aria-labelledby]="XXXX"
[value]="pageNumber" <---- This fixed the problem
>
<ng-container
*ngFor="let pageNumber of totalPageCount"
>
<option value="{{ pageNumber }}" [attr.selected]="pageNumber == page ? '' : null" >
{{ t('pageN', { pageNumber: pageNumber }) }}
</option>
</ng-container>
</select>
Adding the data coming from the stream in the value attribute ensured that the correct value is shown at all times. Even upon browser's popstate events (back and forward button clicks)
How to submit dojo form using AJAX and if there are errors, print errors near incorrectly filled fields?
Now I am doing something like that:
dojo.ready(function() {
var form = dojo.byId("user_profile_form");
dojo.connect(form, "onsubmit", function(event){
dojo.stopEvent(event);
var xhrArgs = {
form: form,
handleAs: "json",
load: function(responseText){
var result_data = zen.json.getResult(responseText);
dojo.byId("response").innerHTML = "Form posted.";
},
error: function(error){
// We'll 404 in the demo, but that's okay. We don't have a 'postIt' service on the
// docs server.
dojo.byId("response").innerHTML = "Form posted.";
}
}
// Call the asynchronous xhrPost
dojo.byId("response").innerHTML = "Form being sent..."
var deferred = dojo.xhrPost(xhrArgs);
});
But I don't know how to print errors
There are a few ways that you can do this. The one that I prefer is to subscribe to the IO Pipeline Topics
For errors, subscribe to the /dojo/io/error topic. Here's an example that will Growl the errors.
dojo.subscribe("/dojo/io/error", function(/*dojo.Deferred*/ dfd, /*Object*/ error){
// Triggered whenever an IO request has errored.
// It passes the error and the dojo.Deferred
// for the request with the topic.
var responseTextObject = dojo.fromJson(error.responseText)
var growlMessage = '';
if (responseTextObject && responseTextObject.message) {
growlMessage += responseTextObject.message
} else {
// Don't Growl the xhr cancelled messages.
if (error.message == 'xhr cancelled') {
return;
}
growlMessage = error.message
}
new ext.Growl({
message: growlMessage
});
});
The server should provide all the error details in the response. In this example, a JSON formatted response is expected but if it's not provided, the error is still shown.
If you want to see the nice invalid field styling, put the widgets in a dijit.form.Form