IAsyncEnumerator item becomes null - azure-service-fabric

While iterating my ReliableDictionary with an enumerator, the "Current.Value" returns null because the line "totalLength += val.Length;" throws NullPointerException.
How is this possible?
public async Task RemoveItemsPeriodically()
{
try
{
var totalLength = 0;
var dict= await this.stateManager.GetOrAddAsync<IReliableDictionary<string, Item>>("dict");
using (ITransaction tx = this.stateManager.CreateTransaction())
{
IAsyncEnumerable<KeyValuePair<string, Item>> asyncEnumerable = await dict.CreateEnumerableAsync(tx);
List<string> removeKeys= new List<string>();
using (IAsyncEnumerator<KeyValuePair<string, Item>> asyncEnumerator = asyncEnumerable.GetAsyncEnumerator())
{
while (await asyncEnumerator.MoveNextAsync(CancellationToken.None))
{
if (asyncEnumerator == null || asyncEnumerator.Current.Value == null)
{
Log();
}
else
{
var keyValue = asyncEnumerator.Current;
var key = keyValue.Key;
var val= keyValue.Value;
totalLength += val.Length;
if (this.checkToDelete(item.DeleteValue))
{
removeKeys.Add(key);
}
}
}
foreach (string keyToRemove in removeKeys)
{
dict= await this.stateManager.GetOrAddAsync<IReliableDictionary<string, Item>>("dict");
await dict.TryRemoveAsync(tx, keyToRemove);
}
}
await tx.CommitAsync();
}
}
}

Related

Listen to a new value of a bloc and generate a list of listened items

I have this StreamSubscription field called followSubscribtion. It listens if there is a new follower and then calls populateFollower to load follower profile.
followsSubscription =
getBloc(context).followsHandler.stream.listen((value) async {
if (value.status == Status.success) {
await populateFollows();
}
});
});
populateFollows() async{
if (getBloc(context).followsModel.length > 0) {
for (var i = 0; i < getBloc(context).followsModel.length; i++) {
getBloc(context).loadFollowsProfile(getBloc(context).followsModel[i].userId);
break;
}
}
}
This works fine, But I want each profile that will be loaded to be added to a list, How do I do that?
loadFollowsProfile method
loadFollowsProfile(int id , List<UserProfileModel> profileList) {
getFollowsProfileHandler.addNetworkTransformerStream(
Network.getInstance().getUserProfile(id), (_) {
userProfileModelBloc = UserProfileModel.fromJson(_);
profileList.add(userProfileModelBloc);
return userProfileModelBloc;
});
}
You can do this by setting up loadFollowsProfile() to return a UserProfileModel, adding that to a list in the for loop of populateFollows(), and then returning that list from populateFollows().
List<ProfileObject> populateFollows() async{
List<ProfileObject> profileList = [];
if (getBloc(context).followsModel.length > 0) {
for (var i = 0; i < getBloc(context).followsModel.length; i++){
profileList.add(getBloc(context).loadFollowsProfile(
getBloc(context).followsModel[i].userId
));
break;
}
}
return profileList;
}
followsSubscription =
getBloc(context).followsHandler.stream.listen((value) async {
if (value.status == Status.success) {
profileList = await populateFollows();
}
});
});

for-in using dart Exception: type '_InternalLinkedHashMap<String, dynamic>

i am not familiar using For-in using dart, i use similar code in ts or angular and it work as expected, but get lost when use flutter and need more guidance to use it.
Future<int> datetransaction(String mid) async {
var url = 'http://callsomehttp.com/$mid';
var res = await http.get(url);
if (res.statusCode == 200) {
var dtpoint = json.decode(res.body);
var availabledate = [];
for (var key in dtpoint) {
var dateEle = dtpoint[key]['Balance']['date'];
if (availabledate.indexOf(dateEle) == -1) {
availabledate.add(dateEle);
totalSpending(dateEle, mid);
}
}
print('available data $availabledate');
var spen = totalTransaction.reduce((a, b) => a + b);
return spen ;
} else {
throw Exception(
"Request to $url failed with status ${res.statusCode}: ${res.body}");
}
}
Future totalSpending (String date, String ckey) async {
var total = 0 ;
final String url = 'http://otherurl.com/$date';
final res = await http.get(url);
if (res.statusCode == 200) {
var pdata = json.decode(res.body);
for (var key in pdata) {
var el = pdata[key]['Transaction']['customer_code'];
var ttl = int.parse(pdata[key]['Transaction']['total']);
if( el == ckey) {
totalTransaction.add(ttl);
total = ttl ;
}
}
return total ;
} else {
throw Exception(
"Request to $url failed with status ${res.statusCode}: ${res.body}");
}
}
any guidance to give a light , or other way to get the result really appreciate, thank you

Dart : Convert From Iterable<ActivityModel> To ActivityModel

I have model Like This :
Model
class ActivityModel {
String idActivity;
String titleActivity;
String dateTimeActivity;
int isDoneActivity;
int codeIconActivity;
String informationActivity;
String createdDateActivity;
ActivityModel({
this.idActivity,
this.titleActivity,
this.dateTimeActivity,
this.isDoneActivity,
this.codeIconActivity,
this.informationActivity,
this.createdDateActivity,
});
ActivityModel.fromSqflite(Map<String, dynamic> map)
: idActivity = map['id_activity'],
titleActivity = map['title_activity'],
dateTimeActivity = map['datetime_activity'],
isDoneActivity = map['is_done_activity'],
codeIconActivity = map['code_icon_activity'],
informationActivity = map['information_activity'],
createdDateActivity = map['created_date'];
Map<String, dynamic> toMapForSqflite() {
return {
'id_activity': this.idActivity,
'title_activity': this.titleActivity,
'datetime_activity': this.dateTimeActivity,
'is_done_activity': this.isDoneActivity,
'code_icon_activity': this.codeIconActivity,
'information_activity': this.informationActivity,
'created_date': this.createdDateActivity,
};
}
I want get data where dateTimeActivity is before than date now, then i update isDoneActivity = 1 with this code :
Source code
final passedDateItem = _selectedActivityItem.where((element) {
DateTime convertStringToDateTime =
DateTime.parse(element.dateTimeActivity);
return convertStringToDateTime.isBefore(DateTime.now());
});
if (passedDateItem != null) {
print('Not Null');
} else {
print('Nulledd');
return null;
}
The problem is , passedDateItem return Iterable[ActivityModel] , it's possible to convert it to ActivityModel? So i can easly update like this ?
if (passedDateItem != null) {
passedDateItem.isDoneActivity = 1; <<<
// return passedDateItem.map((e) => e.isDoneActivity = 1);
// final testtt= passedDateItem.
print('Not Null');
} else {
print('Nulledd');
return null;
}
Iterate through passedDateItem
for (var activityModel in passedDateItem) {
//..conditions
activityModel.isDoneActivity = 1;
}
If you are only interested in the first/last element of passedDateItem
use
passedDateItem.first.isDoneActivity == 1
or
passedDateItem.last.isDoneActivity == 1
make sure passedDateItem is not empty in that case.

opcua session was closed by client

I have written the attached OpcUaConnector class for opc-ua connection related activities.
But it is not handling session. For example:
In opc ua configuration disabled the endpoint
In kepserver configuration did runtime > reinitializing
The windows service is throwing:
Source : system.Reactive.Core
InnerException : The session was closed by client
and stopping the windows service, as this error goes unhandled.
Can some one suggest how to handle session in opc-ua?
public class OpcUaConnector
{
private static SimplerAES simplerAES = new SimplerAES();
private DataContainer dataCointainer = null;
private UaTcpSessionChannel channel;
private string opcServerName = string.Empty;
private string opcUserId = string.Empty;
private string opcPassword = string.Empty;
private static ILog LogOpcStore;
private static System.IDisposable token;
private static uint id;
public OpcConnector(ILog Log)
{
IntializeLogOpcStore(Log);
}
private static void IntializeLogOpcStore(ILog Log)
{
LogOpcStore = Log;
}
public async Task OpenOpcConnection()
{
try
{
if ((!string.IsNullOrEmpty(this.opcServerName) & (this.opcServerName != AppMain.MyAppSettings.OpcServer)) ||
(!string.IsNullOrEmpty(this.opcUserId) & (this.opcUserId != AppMain.MyAppSettings.OpcUserId)) ||
(!string.IsNullOrEmpty(this.opcPassword) & (this.opcPassword != AppMain.MyAppSettings.OpcPassword)))
{
await channel.CloseAsync();
this.opcServerName = AppMain.MyAppSettings.OpcServer;
this.opcUserId = AppMain.MyAppSettings.OpcUserId;
this.opcPassword = AppMain.MyAppSettings.OpcPassword;
}
if (channel==null || (channel != null && (channel.State == CommunicationState.Closed || channel.State == CommunicationState.Faulted)))
{
var appDescription = new ApplicationDescription()
{
ApplicationName = "MyAppName",
ApplicationUri = $"urn:{System.Net.Dns.GetHostName()}:MyAppName",
ApplicationType = ApplicationType.Client,
};
//application data won't be deleted when uninstall
var certificateStore = new DirectoryStore(
Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData), MyAppName", "pki"),
true, true
);
//if the Ethernet cable unplugs or the Wifi drops out,
//you have some timeouts that can keep the session open for a while.
//There is a SessionTimeout (default of 2 min).
this.channel = new UaTcpSessionChannel(
appDescription,
certificateStore,
SignInOpc,
AppMain.MyAppSettings.OpcServer,
null,
options: new UaTcpSessionChannelOptions { SessionTimeout = 120000 });
await channel.OpenAsync();
//LogOpcStore.Info(String.Format("Opc connection sucessful"));
}
this.opcServerName = AppMain.MyAppSettings.OpcServer;
this.opcUserId = AppMain.MyAppSettings.OpcUserId;
this.opcPassword = AppMain.MyAppSettings.OpcPassword;
}
catch (Exception ex)
{
ServiceException serviceException = new ServiceException(ex.HResult + " " + ex.Message, "C052");
throw serviceException;
}
}
private static async Task RecursivelyFindNode(UaTcpSessionChannel channel, NodeId nodeid)
{
BrowseRequest browseRequest = new BrowseRequest
{
NodesToBrowse = new BrowseDescription[] { new BrowseDescription { NodeId = nodeid, BrowseDirection = BrowseDirection.Forward, ReferenceTypeId = NodeId.Parse(ReferenceTypeIds.HierarchicalReferences), NodeClassMask = (uint)NodeClass.Variable | (uint)NodeClass.Object, IncludeSubtypes = true, ResultMask = (uint)BrowseResultMask.All } },
};
BrowseResponse browseResponse = await channel.BrowseAsync(browseRequest);
foreach (var rd1 in browseResponse.Results[0].References ?? new ReferenceDescription[0])
{
uint chid = AppMain.MyTagDatabase.GetClientHandleByTag(rd1.DisplayName.ToString());
if (chid > 0)
{
AppMain.MyTagDatabase.UpdateNodeByClientHandle(chid, rd1.NodeId.ToString());
}
await RecursivelyFindNode(channel, ExpandedNodeId.ToNodeId(rd1.NodeId, channel.NamespaceUris));
}
}
public async Task CreateSubscription(DataContainer dc)
{
double curReadingValue;
try
{
dataCointainer = dc;
await RecursivelyFindNode(channel, NodeId.Parse(ObjectIds.RootFolder));
if (AppMain.MyTagDatabase.GetCntTagsNotInOpcServer() == AppMain.MyTagDatabase.GetTagCount())
{
//no need to create subscription
return;
}
//subscription timeout that is the product of PublishingInterval * LifetimeCount:
var subscriptionRequest = new CreateSubscriptionRequest
{
RequestedPublishingInterval = 1000f,
RequestedMaxKeepAliveCount = 30,
RequestedLifetimeCount = 30 * 3,
PublishingEnabled = true,
};
var subscriptionResponse = await channel.CreateSubscriptionAsync(subscriptionRequest);
id = subscriptionResponse.SubscriptionId;
var itemsToCreate = new MonitoredItemCreateRequest[AppMain.MyTagDatabase.GetTagHavingNodeCount()];
int i = 0;
foreach (var item in AppMain.MyTagDatabase.GetMyTagDatabase())
{
var itemKey = item.Key;
var itemValue = item.Value;
itemsToCreate[i] = new MonitoredItemCreateRequest { ItemToMonitor = new ReadValueId { NodeId = NodeId.Parse(itemValue.NodeId), AttributeId = AttributeIds.Value }, MonitoringMode = MonitoringMode.Reporting, RequestedParameters = new MonitoringParameters { ClientHandle = itemKey, SamplingInterval = -1, QueueSize = 0, DiscardOldest = true } };
i++;
}
var itemsRequest = new CreateMonitoredItemsRequest
{
SubscriptionId = id,
ItemsToCreate = itemsToCreate,
};
var itemsResponse = await channel.CreateMonitoredItemsAsync(itemsRequest);
token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(pr =>
{
// loop thru all the data change notifications
// receiving data change notifications here
var dcns = pr.NotificationMessage.NotificationData.OfType<DataChangeNotification>();
foreach (var dcn in dcns)
{
foreach (var min in dcn.MonitoredItems)
{
MyTag MyTag = new MyTag();
bool hasValue = AppMain.MyTagDatabase.GetMyTag(min.ClientHandle, out MyTag);
if (hasValue)
{
if (double.TryParse(min.Value.Value.ToString(), out curReadingValue))
{
//LogOpcStore.Info(String.Format("ClientHandle : {0} TagName : {1} SourceTimestamp : {2} ServerTimeStamp : {3} curReadingValue : {4}", min.ClientHandle, MyTag.TagName, min.Value.SourceTimestamp, min.Value.ServerTimestamp, curReadingValue));
AddDataPointToContainer(1, MyTag.TagName, min.Value.SourceTimestamp, curReadingValue);
}
}
}
}
});
}
catch (Exception ex)
{
//If the interruption lasts longer than these timeouts then the SessionChannel and Subscriptions will need to be recreated.
channel = null;
FatalServiceException fatalserviceException = new FatalServiceException(ex.Message, "C052");
throw fatalserviceException;
}
}
public async Task DeleteSubscription()
{
try
{
var request = new DeleteSubscriptionsRequest
{
SubscriptionIds = new uint[] { id }
};
await channel.DeleteSubscriptionsAsync(request);
token.Dispose();
}
catch (Exception ex)
{
ServiceException serviceException = new ServiceException(ex.Message, "C052");
throw serviceException;
}
}
private static async Task<IUserIdentity> SignInOpc(EndpointDescription endpoint)
{
IUserIdentity userIdentity = null;
if (endpoint.UserIdentityTokens.Any(p => p.TokenType == UserTokenType.Anonymous))
{
userIdentity = new AnonymousIdentity();
}
else if (endpoint.UserIdentityTokens.Any(p => p.TokenType == UserTokenType.UserName))
{
var userName = AppMain.MyAppSettings.OpcUserId;
var password = simplerAES.Decrypt(AppMain.MyAppSettings.OpcPassword);
userIdentity = new UserNameIdentity(userName, password);
}
return userIdentity;
}
private void AddDataPointToContainer(int dataType, string source, DateTime SourceTimestampUTC, double value)
{
ConditionValue conditionValue = new ConditionValue();
long timestamp = AppMain.ServerSyncTimeStore.ConvertDateTimeToTimeStampUTC(SourceTimestampUTC);
conditionValue.dataType = dataType;
conditionValue.source = source;
conditionValue.timestamp = timestamp;
conditionValue.SourceTimestampUTC = SourceTimestampUTC;
conditionValue.LocalTime = SourceTimestampUTC.ToLocalTime();
conditionValue.value = value;
//LogOpcStore.Info(String.Format("TagName : {0} SourceTimestampUTC : {1} timestamp : {2} LocalTime : {3} curReadingValue : {4}", source, SourceTimestampUTC, timestamp, SourceTimestampUTC.ToLocalTime(), value));
dataCointainer.AddDataPoint(conditionValue);
}
}
I see you are using the project https://github.com/convertersystems/opc-ua-client.
When a server closes the session and socket (as happens when you reinitialize Kepware) the client receives immediate notification that causes the client channel to fault. A faulted channel cannot be reopened, it should be aborted and a new channel should be created.
I made this standalone test, to show that you may have to catch an exception and recreate the channel and subscription. The point of this test is to subscribe to the CurrentTime node and collect 60 datachanges. The test should last a minute. If you re-init the Kepware server in the middle of the test, the code catches the exception and recreates the channel and subscription.
[TestMethod]
public async Task OpcConnectorTest()
{
var count = 0;
UaTcpSessionChannel channel = null;
while (count < 60)
{
try
{
channel = new UaTcpSessionChannel(
this.localDescription,
this.certificateStore,
new AnonymousIdentity(),
EndpointUrl,
SecurityPolicyUris.None,
loggerFactory: this.loggerFactory);
await channel.OpenAsync();
// create the keep alive subscription.
var subscriptionRequest = new CreateSubscriptionRequest
{
RequestedPublishingInterval = 1000f,
RequestedMaxKeepAliveCount = 30,
RequestedLifetimeCount = 30 * 3,
PublishingEnabled = true,
};
var subscriptionResponse = await channel.CreateSubscriptionAsync(subscriptionRequest).ConfigureAwait(false);
var id = subscriptionResponse.SubscriptionId;
var token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(pr =>
{
// loop thru all the data change notifications
var dcns = pr.NotificationMessage.NotificationData.OfType<DataChangeNotification>();
foreach (var dcn in dcns)
{
foreach (var min in dcn.MonitoredItems)
{
Console.WriteLine($"sub: {pr.SubscriptionId}; handle: {min.ClientHandle}; value: {min.Value}");
count++;
}
}
});
var itemsRequest = new CreateMonitoredItemsRequest
{
SubscriptionId = id,
ItemsToCreate = new MonitoredItemCreateRequest[]
{
new MonitoredItemCreateRequest { ItemToMonitor = new ReadValueId { NodeId = NodeId.Parse("i=2258"), AttributeId = AttributeIds.Value }, MonitoringMode = MonitoringMode.Reporting, RequestedParameters = new MonitoringParameters { ClientHandle = 12345, SamplingInterval = -1, QueueSize = 0, DiscardOldest = true } }
},
};
var itemsResponse = await channel.CreateMonitoredItemsAsync(itemsRequest);
while (channel.State == CommunicationState.Opened && count < 60)
{
await Task.Delay(1000);
}
}
catch (Exception ex)
{
Console.WriteLine($"Exception: {ex.GetType()}. {ex.Message}");
}
}
if (channel != null)
{
Console.WriteLine($"Closing session '{channel.SessionId}'.");
await channel.CloseAsync();
}
}
I know this is an old post, but I stumbled upon this problem as well. For those interested:
The problem is related to the subscription(s).
When the following code is run:
token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(pr =>
{
// loop thru all the data change notifications
// receiving data change notifications here
var dcns = pr.NotificationMessage.NotificationData.OfType<DataChangeNotification>();
foreach (var dcn in dcns)
{
foreach (var min in dcn.MonitoredItems)
{
MyTag MyTag = new MyTag();
bool hasValue = AppMain.MyTagDatabase.GetMyTag(min.ClientHandle, out MyTag);
if (hasValue)
{
if (double.TryParse(min.Value.Value.ToString(), out curReadingValue))
{
//LogOpcStore.Info(String.Format("ClientHandle : {0} TagName : {1} SourceTimestamp : {2} ServerTimeStamp : {3} curReadingValue : {4}", min.ClientHandle, MyTag.TagName, min.Value.SourceTimestamp, min.Value.ServerTimestamp, curReadingValue));
AddDataPointToContainer(1, MyTag.TagName, min.Value.SourceTimestamp, curReadingValue);
}
}
}
}
});
Observable.subscribe() takes multiple arguments. You should include what to do in case of an error. For example:
token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(
pr => { code to run normally... },
ex => { Log.Info(ex.Message); },
() => { }
);
See http://reactivex.io/documentation/operators/subscribe.html for more information.

Refresh sticky mobile leaderboard ad slot by changing the content URL in infinite scroll

I want to refresh sticky mobile leaderboard slot when the URL changes in infinite scroll. What do you think is the best way to go? Please let me know if you have any suggestions.
class DFPAds {
constructor() {
this.slots = [];
this.onScroll = throttle(this.loopAds, 300);
this.addEvents();
this.createAdObject = this.createAdObject.bind(this);
this.createSlotForAd = this.createSlotForAd.bind(this);
this.displayAd = this.displayAd.bind(this);
}
static get() {
return DFPAds._instance;
}
static set() {
if (!DFPAds._instance) {
DFPAds._instance = new DFPAds();
return DFPAds._instance;
} else {
throw new Error("DFPAds: instance already initialized");
}
}
addEvents() {
window.addEventListener("scroll", e => this.onScroll());
}
loopAds() {
this.slots.map(slot => this.displayAd(slot));
}
createAdObject(ad) {
let id = ad.id;
let attributes = getDataSet(ad);
let sizes = JSON.parse(attributes.sizes);
let sizeMapping;
if (attributes.sizemapping) {
attributes.sizemapping.length
? (sizeMapping = JSON.parse(attributes.sizemapping))
: (sizeMapping = null);
}
attributes.id = id;
attributes.sizes = sizes;
attributes.sizemapping = sizeMapping;
return attributes;
}
createSlotForAd(adObject) {
let {
id,
adtype,
position,
slotname,
sizes,
sizemapping,
shouldlazyload,
pagenumber,
pageid
} = adObject;
googletag.cmd.push(() => {
let slot = googletag.defineSlot(slotname, sizes, id);
if(position){
slot.setTargeting("position", position);
}
if(pagenumber){
slot.setTargeting("pagenumber", pagenumber);
}
if(pageid){
slot.setTargeting("PageID", pageid)
}
if (sizemapping) {
let mapping = googletag.sizeMapping();
sizemapping.map(size => {
mapping.addSize(size[0], size[1])
});
slot.defineSizeMapping(mapping.build());
}
slot.addService(googletag.pubads());
googletag.display(id);
shouldlazyload
? this.slots.push({ slot: slot, id: id })
: googletag.pubads().refresh([slot]);
console.log("SlotTop", slot)
});
}
displayAd(slot) {
console.log("Slottwo", slot)
let item = document.getElementById(slot.id);
if (item) {
let parent = item.parentElement;
let index = this.slots.indexOf(slot);
let parentDimensions = parent.getBoundingClientRect();
if (
(parentDimensions.top - 300) < window.innerHeight &&
parentDimensions.top + 150 > 0 &&
parent.offsetParent != null
) {
googletag.cmd.push(function() {
googletag.pubads().refresh([slot.slot]);
});
this.slots.splice(index, 1);
}
}
}
setUpAdSlots(context = document) {
let ads = [].slice.call(context.getElementsByClassName("Ad-data"));
if (ads.length) {
ads.map(ad => compose(this.createSlotForAd, this.createAdObject)(ad));
}
}
}
export default DFPAds;
And this is my Infinite Scroll code:
export default class InfiniteScroll {
constructor() {
this._end = document.getElementById('InfiniteScroll-End');
this._container = document.getElementById('InfiniteScroll-Container');
if(!this._end || !this._container)
return;
this._articles = { };
this._triggeredArticles = [];
this._currentArticle = '';
this._apiUrl = '';
this._count = 1;
this._timedOut = false;
this._loading = false;
this._ended = false;
this._viewedParameter = "&alreadyViewedContentIds=";
this._articleSelector = "InfiniteScroll-Article-";
this._articleClass = "Article-Container";
this.setStartData();
this.onScroll = throttle(this.eventScroll, 200);
this.addEvents();
}
addEvents(){
setTimeout(()=>{
window.addEventListener('scroll', (e) => this.onScroll() );
}, 2000);
}
addToStore(article){
this._articles["a" + article.id] = article;
}
addToTriggeredArticles(id){
this._triggeredArticles.push(id);
}
getRequestUrl(){
return this._apiUrl + this._viewedParameter + Object.keys(this._articles).map(key => this._articles[key].id).join();
}
getLastArticle(){
return this._articles[Object.keys(this._articles)[Object.keys(this._articles).length-1]];
}
setStartData() {
let dataset = getDataSet(this._container);
if(dataset.hasOwnProperty('apiurl')){
let article = Article.get();
if(article){
this._apiUrl = dataset.apiurl;
this._currentArticle = "a" + article.id;
this.addToStore(article);
}else{
throw(new Error('Infinite Scroll: Article not initialized.'));
}
}else{
throw(new Error('Infinite Scroll: Start object missing "apiurl" property.'));
}
}
eventScroll() {
if(this.isApproachingNext()){
this.requestNextArticle();
}
if(!this.isMainArticle(this._articles[this._currentArticle].node)){
this.updateCurrentArticle();
}
}
eventRequestSuccess(data){
this._loading = false;
if(data != ''){
if(data.hasOwnProperty('Id') && data.hasOwnProperty('Html') && data.hasOwnProperty('Url')){
this.incrementCount();
let node = document.createElement('div');
node.id = this._articleSelector + data.Id;
node.innerHTML = data.Html;
node.className = this._articleClass;
this._container.appendChild(node);
this.initArticleUpNext({
img: data.Img,
title: data.ArticleTitle,
category: data.Category,
target: node.id
});
this.addToStore(
new Article({
id: data.Id,
node: node,
url: data.Url,
title: data.Title,
count: this._count,
nielsenProps: {
section: data.NeilsenSection,
sega: data.NeilsenSegmentA,
segb: data.NeilsenSegmentB,
segc: data.NeilsenSegmentC
}
})
);
}else{
this._ended = true;
throw(new Error('Infinite Scroll: Response does not have an ID, Url and HTML property'));
}
}else{
this._ended = true;
throw(new Error('Infinite Scroll: No new article was received.'));
}
}
eventRequestError(response){
this._loading = false;
this._ended = true;
throw(new Error("Infinite Scroll: New article request failed."));
}
requestNextArticle(){
if(!this._loading){
this._loading = true;
return API.requestJSON(this.getRequestUrl())
.then(
(response)=>{this.eventRequestSuccess(response)},
(response)=>{this.eventRequestError(response)}
);
}
}
triggerViewEvent(article){
if(article.count > 1 && !this.isAlreadyTriggeredArticle(article.id)){
this.addToTriggeredArticles(article.id);
Tracker.pushView(article.url, article.count);
if(isFeatureEnabled('Nielsen') && Nielsen.get()){
Nielsen.get().eventInfiniteScroll({
id: article.id,
url: article.url,
section: article.nielsenProps.section,
sega: article.nielsenProps.sega,
segb: article.nielsenProps.segb,
segc: article.nielsenProps.segc
});
NielsenV60.trackEvent(article.title);
}
}
}
updateCurrentArticle(){
Object.keys(this._articles).map( key => {
if(this._currentArticle !== key && this.isMainArticle(this._articles[key].node)){
this._currentArticle = key;
this.updateUrl(this._articles[key]);
this.triggerViewEvent(this._articles[key]);
}
});
}
updateUrl(article){
try{
if(history.replaceState){
if(window.location.pathname !== article.url){
history.replaceState('', article.title, article.url);
}
}
}catch(e){}
}
incrementCount(){
this._count = this._count + 1;
}
initArticleUpNext(data){
this.getLastArticle().initUpNext(data);
}
isApproachingNext(){
return window.pageYOffset > this._end.offsetTop - (window.innerHeight * 2) && !this._ended && this._end.offsetTop >= 100;
}
isMainArticle(node){
if(node.getBoundingClientRect){
return (node.getBoundingClientRect().top < 80 && node.getBoundingClientRect().bottom > 70);
}else{
return false;
}
}
isAlreadyTriggeredArticle(id){
return this._triggeredArticles.indexOf(id) > -1;
}
}