Azure Mobile Services Node.js update column field count during read query - azure-mobile-services

I would like to update a column in a specific row in Azure Mobile Services using server side code (node.js).
The idea is that the column A (that stores a number) will increase its count by 1 (i++) everytime a user runs a read query from my mobile apps.
Please, how can I accomplish that from the read script in Azure Mobile Services.
Thanks in advance,

Check out the examples in the online reference. In the table Read script for the table you're tracking you will need to do something like this. It's not clear whether you're tracking in the same table the user is reading, or in a separate counts table, but the flow is the same.
Note that if you really want to track this you should log read requests to another table and tally them after the fact, or use an external analytics system (Google Analytics, Flurry, MixPanel, Azure Mobile Engagement, etc.). This way of updating a single count field in a record will not be accurate if multiple phones read from the table at the same time -- they will both read the same value x from the tracking table, increment it, and update the record with the same value x+1.
function read(query, user, request) {
var myTable = tables.getTable('counting');
myTable.where({
tableName: 'verses'
}).read({
success: updateCount
});
function updateCount(results) {
if (results.length > 0) {
// tracking record was found. update and continue normal execution.
var trackingRecord = results[0];
trackingRecord.count = trackingRecord.count + 1;
myTable.update(trackingRecord, { success: function () {
request.execute();
});
} else {
console.log('error updating count');
request.respond(500, 'unable to update read count');
}
}
};
Hope this helps.
Edit: fixed function signature and table names above, adding another example below
If you want to track which verses were read (if your app can request one at a time) you need to do the "counting" request and update after the "verses" request, because the script doesn't tell you up front which verse records the user requested.
function read(query, user, request) {
request.execute( { success: function(verseResults) {
request.respond();
if (verseResults.length === 1) {
var countTable = tables.getTable('counting');
countTable.where({
verseId: verseResults[0].id
}).read({
success: updateCount
});
function updateCount(results) {
if (results.length > 0) {
// tracking record was found. update and continue normal execution.
var trackingRecord = results[0];
trackingRecord.count = trackingRecord.count + 1;
countTable.update(trackingRecord);
} else {
console.log('error updating count');
}
}
}
});
};
Another note: make sure your counting table has an index on the column you're selecting by (tableName in the first example, verseId in the second).

Related

MSCRM Retrieve Multiple PlugIn limits the other Retrievemultiple uery

In my scenario, there is a plugin (Retrieve Multiple) on Annotation. This plugin is nothing just a part of BLOB Storage solution(used for Attachment Management solution provided by Microsoft). So, it is clear that in our CRM, MicrosoftlLabsAzureBlobstorage is being used.
Now, I am executing a console app which retrieves multiple annotations through Query Expression. When it tries to fetch records around 500 or 600, it throws below error.
{The plug-in execution failed because no Sandbox Hosts are currently
available. Please check that you have a Sandbox server configured and
that it is running.\r\nSystem.ServiceModel.CommunicationException:
Microsoft Dynamics CRM has experienced an error. Reference number for
administrators or support: #AFF51A0F"}
When I fetch specific records or very less records, it executes fine.
So, I my question is that is there any limitation in number for Rerieve Multiple Query ? if retrievemultiple PlugIn exists ?
Is there any other clue that I am not able to find ?
To work around this conflict, in your console application code you may want to try retrieving smaller pages of annotations, say 50 at a time, and loop through the pages to process them all.
This article provides sample code for paging a QueryExpression.
Here's the abridged version of that sample:
// The number of records per page to retrieve.
int queryCount = 3;
// Initialize the page number.
int pageNumber = 1;
// Initialize the number of records.
int recordCount = 0;
// Create the query expression
QueryExpression pagequery = new QueryExpression();
pagequery.EntityName = "account";
pagequery.ColumnSet.AddColumns("name", "emailaddress1");
// Assign the pageinfo properties to the query expression.
pagequery.PageInfo = new PagingInfo();
pagequery.PageInfo.Count = queryCount;
pagequery.PageInfo.PageNumber = pageNumber;
// The current paging cookie. When retrieving the first page,
// pagingCookie should be null.
pagequery.PageInfo.PagingCookie = null;
while (true)
{
// Retrieve the page.
EntityCollection results = _serviceProxy.RetrieveMultiple(pagequery);
if (results.Entities != null)
{
// Retrieve all records from the result set.
foreach (Account acct in results.Entities)
{
Console.WriteLine("{0}.\t{1}\t{2}", ++recordCount, acct.Name,
acct.EMailAddress1);
}
}
// Check for more records, if it returns true.
if (results.MoreRecords)
{
// Increment the page number to retrieve the next page.
pagequery.PageInfo.PageNumber++;
// Set the paging cookie to the paging cookie returned from current results.
pagequery.PageInfo.PagingCookie = results.PagingCookie;
}
else
{
// If no more records are in the result nodes, exit the loop.
break;
}
}
This page has more info and another sample.

Get ALL Facebook posts using facebook api and paging

It's my intention to get all past posts. I have created a Facebook app. When this code is executed, I have properly logged in with the correct appid and secret.
function loadPage() { // calls the first batch of records
FB.api("/me/feed",{},function(response) { procBatch(response) } );
}
function procBatch(dat) { // handle this batch, request the next batch
for ( i = 0; i < dat.data.length; i++ ) {
procRow(dat.data[i]); // process this row
}
if ( typeof(dat.paging) != 'undefined' ) {
FB.api(dat.paging.next, {}, function(response){ procBatch(dat); } );
} else {
alert("No more records expected");
}
}
Unfortunately, this only takes me back about six months.
Is it possible to go back further?
Thanks in advance.
Yes, that's possible.
This can be done by increasing the limit of number object per page. I've not tried retrieving ALL the posts, but I am pretty sure that you can easily retrieve posts which are even few years old.
You can call /me/feed?limit=numberOfObjectsPerPage instead of just /me/feed to increase this limit.
You can find more information on this here. Do have a look at the Time Based Pagination on the link. This will explain you how to manage the output for the feeds.

Updating MongoDB in Meteor Router Filter Methods

I am currently trying to log user page views in meteor app by storing the userId, Meteor.Router.page() and timestamp when a user clicks on other pages.
//userlog.js
Meteor.methods({
createLog: function(page){
var timeStamp = Meteor.user().lastActionTimestamp;
//Set variable to store validation if user is logging in
var hasLoggedIn = false;
//Checks if lastActionTimestamp of user is more than an hour ago
if(moment(new Date().getTime()).diff(moment(timeStamp), 'hours') >= 1){
hasLoggedIn = true;
}
console.log("this ran");
var log = {
submitted: new Date().getTime(),
userId: Meteor.userId(),
page: page,
login: hasLoggedIn
}
var logId = Userlogs.insert(log);
Meteor.users.update(Meteor.userId(), {$set: {lastActionTimestamp: log.submitted}});
return logId;
}
});
//router.js This method runs on a filter on every page
'checkLoginStatus': function(page) {
if(Meteor.userId()){
//Logs the page that the user has switched to
Meteor.call('createLog', page);
return page;
}else if(Meteor.loggingIn()) {
return 'loading';
}else {
return 'loginPage';
}
}
However this does not work and it ends up with a recursive creation of userlogs. I believe that this is due to the fact that i did a Collection.find in a router filter method. Does anyone have a work around for this issue?
When you're updating Meteor.users and setting lastActionTimestamp, Meteor.user will be updated and send the invalidation signal to all reactive contexts which depend on it. If Meteor.user is used in a filter, then that filter and all consecutive ones, including checkLoginStatus will rerun, causing a loop.
Best practices that I've found:
Avoid using reactive data sources as much as possible within filters.
Use Meteor.userId() where possible instead of Meteor.user()._id because the former will not trigger an invalidation when an attribute of the user object changes.
Order your filters so that they run with the most frequently updated reactive data source first. For example, if you have a trackPage filter that requires a user, let it run after another filter called requireUser so that you are certain you have a user before you track. Otherwise if you'd track first, check user second then when Meteor.logginIn changes from false to true, you'd track the page again.
This is the main reason we switched to meteor-mini-pages instead of Meteor-Router because it handles reactive data sources much easier. A filter can redirect, and it can stop() the router from running, etc.
Lastly, cmather and others are working on a new router which is a merger of mini-pages and Meteor.Router. It will be called Iron Router and I recommend using it once it's out!

Exporting large data in csv for users

I am trying to make a system where users are allowed to export data in csv format. It is no problem in formatting the data in csv format. I am having he problem in processing the request for csv export data. suppose a User requests for exporting all data and it is huge. I think it would not be best way to ask user to wait untill the request is complete right? Should I tell users that exporting of data is in progress and we will notify once it is complete? If yes i should use background process for it right?
If possible, define some kind of configurable 'setting' which might default to 10Mb. If you can tell the export will be more than 10Mb you ask the user if they want to export such a large rowset. If possible, estimate the size and let them know how large that will be. If you show them a progress bar while you're running a background export you'll have to have some sort of process to process communication to pass that back to the user command form. Have an option on the form to change that setting to a number closer to the one they want as a warning level.
I finally went with streaming data to users in csv format using nodejs.
function csvExport(req, res){
mysqlPool.getConnection(function(err, connection){
if(err){
throw err;
}
res.header('Content-disposition', 'attachment; filename=connects.csv');
res.header('Content-Type', 'text/csv');
var csv_header_row = "Email,First Name,Last Name,Status,Created\n";
res.write(csv_header_row);
var query = connection.query('SELECT * FROM contacts where user_id = ? AND deleted = 0', [req.params.user_id]);
query
.on('error', function(err) {
//handle error
})
.on('fields', function(fields) {
})
.on('result', function(row) {
var csv_row = row.email+','+row.first_name+','+row.last_name+','+row.status+','+row.created+"\n";
res.write(csv_row);
})
.on('end', function() {
res.end('', function(){
console.log('done');
});
});
});
}

Insert or update record in HighScore table

I have a table that receives high score entries. However, if the user already has an entry in the table (tracked through a GUID field, not the user parameter) I want to update it if the new entry has a better time otherwise don't change the existing record. However, if the user doesn't have a record in the high score table then add a new record. I also have two query parameters to pass to the query.
I want the insert operation to handle this for the table. I have this so far but I get an exception raised when I call InsertAsync(...) on the highscore table
function insert(item, user, request) {
var sql ="select Id from HighScore where PlayerGUID=? AND PlayerBadge=?";
mssql.query(sql, [user.PlayerGUID], [user.PlayerBadge], {
success: function(results) {
if(results.length > 0) {
// leader board record exists so update the current record
// Check the existing record and update it is the new time is better
console.log("Found existing entry");
} else {
// no record exists for this user to insert one
request.execute();
console.log("Found existing entry");
}
}
});
}
Can anyone offer me any assistance with achieving my goal?
Many thanks,
J.
It took some time and some help but here's where I ended up. It works just as I intended it to.
function insert(item, user, request) {
// Store the passed in item object for us when inserting or updating
resultsItem = item;
// Store the request object to allow calld functions to send respond commands
thisRequest = request;
// Retrieve the HighScore table so we can check it for an existing record
hsTable = tables.getTable('HighScore');
// Update the leaderboard
updateLeaderboard(item);
}
// Global variables
var resultsItem, hsTable, thisRequest;
function updateLeaderboard(item){
//Filter the table using the where operator to only include those
// records for the current PlayerGUID and PlayerBadge fields
hsTable.where({
PlayerGUID: item.PlayerGUID,
PlayerBadge: item.PlayerBadge
}).read({
success:updateScore,
error: errorHandler
})
}
function updateScore(results){
if(results.length > 0) {
// If a record already exists then check the PlayerTime
if(results[0].PlayerTime > resultsItem.PlayerTime)
{
// Update the PlayerTime if it is less than the currently saved value
hsTable.update({
id: results[0].id,
PlayerTime: resultsItem.PlayerTime
}, {
success: logSuccess,
error: errorHandler
})
} else {
// Send them OK. Could change this and use the returned code/text to display a custom
// message that tells the user that a previous time is faster.
thisRequest.respond(statusCodes.OK);
}
} else {
// The record for this PlayerGUID and PlayerBadge exists so write one
hsTable.insert({
PlayerName: resultsItem.PlayerName,
PlayerCountry: resultsItem.PlayerCountry,
PlayerTime: resultsItem.PlayerTime,
PlayerBadge: resultsItem.PlayerBadge,
PlayerGender: resultsItem.PlayerGender,
PlayerDOB: resultsItem.PlayerDOB,
PlayerGUID: resultsItem.PlayerGUID
}, {
success: logSuccess,
error: errorHandler
})
}
}
// Called if there is an error
function errorHandler(error){
console.error
("An error occurred trying to update leaderboard infor for player" +
resultsItem.PlayerName);
thisRequest.respond(statusCodes.BAD_REQUEST);
}
//Called if things work out ok.
function logSuccess()
{
thisRequest.respond(statusCodes.OK);
}