opcua session was closed by client - opc-ua

I have written the attached OpcUaConnector class for opc-ua connection related activities.
But it is not handling session. For example:
In opc ua configuration disabled the endpoint
In kepserver configuration did runtime > reinitializing
The windows service is throwing:
Source : system.Reactive.Core
InnerException : The session was closed by client
and stopping the windows service, as this error goes unhandled.
Can some one suggest how to handle session in opc-ua?
public class OpcUaConnector
{
private static SimplerAES simplerAES = new SimplerAES();
private DataContainer dataCointainer = null;
private UaTcpSessionChannel channel;
private string opcServerName = string.Empty;
private string opcUserId = string.Empty;
private string opcPassword = string.Empty;
private static ILog LogOpcStore;
private static System.IDisposable token;
private static uint id;
public OpcConnector(ILog Log)
{
IntializeLogOpcStore(Log);
}
private static void IntializeLogOpcStore(ILog Log)
{
LogOpcStore = Log;
}
public async Task OpenOpcConnection()
{
try
{
if ((!string.IsNullOrEmpty(this.opcServerName) & (this.opcServerName != AppMain.MyAppSettings.OpcServer)) ||
(!string.IsNullOrEmpty(this.opcUserId) & (this.opcUserId != AppMain.MyAppSettings.OpcUserId)) ||
(!string.IsNullOrEmpty(this.opcPassword) & (this.opcPassword != AppMain.MyAppSettings.OpcPassword)))
{
await channel.CloseAsync();
this.opcServerName = AppMain.MyAppSettings.OpcServer;
this.opcUserId = AppMain.MyAppSettings.OpcUserId;
this.opcPassword = AppMain.MyAppSettings.OpcPassword;
}
if (channel==null || (channel != null && (channel.State == CommunicationState.Closed || channel.State == CommunicationState.Faulted)))
{
var appDescription = new ApplicationDescription()
{
ApplicationName = "MyAppName",
ApplicationUri = $"urn:{System.Net.Dns.GetHostName()}:MyAppName",
ApplicationType = ApplicationType.Client,
};
//application data won't be deleted when uninstall
var certificateStore = new DirectoryStore(
Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData), MyAppName", "pki"),
true, true
);
//if the Ethernet cable unplugs or the Wifi drops out,
//you have some timeouts that can keep the session open for a while.
//There is a SessionTimeout (default of 2 min).
this.channel = new UaTcpSessionChannel(
appDescription,
certificateStore,
SignInOpc,
AppMain.MyAppSettings.OpcServer,
null,
options: new UaTcpSessionChannelOptions { SessionTimeout = 120000 });
await channel.OpenAsync();
//LogOpcStore.Info(String.Format("Opc connection sucessful"));
}
this.opcServerName = AppMain.MyAppSettings.OpcServer;
this.opcUserId = AppMain.MyAppSettings.OpcUserId;
this.opcPassword = AppMain.MyAppSettings.OpcPassword;
}
catch (Exception ex)
{
ServiceException serviceException = new ServiceException(ex.HResult + " " + ex.Message, "C052");
throw serviceException;
}
}
private static async Task RecursivelyFindNode(UaTcpSessionChannel channel, NodeId nodeid)
{
BrowseRequest browseRequest = new BrowseRequest
{
NodesToBrowse = new BrowseDescription[] { new BrowseDescription { NodeId = nodeid, BrowseDirection = BrowseDirection.Forward, ReferenceTypeId = NodeId.Parse(ReferenceTypeIds.HierarchicalReferences), NodeClassMask = (uint)NodeClass.Variable | (uint)NodeClass.Object, IncludeSubtypes = true, ResultMask = (uint)BrowseResultMask.All } },
};
BrowseResponse browseResponse = await channel.BrowseAsync(browseRequest);
foreach (var rd1 in browseResponse.Results[0].References ?? new ReferenceDescription[0])
{
uint chid = AppMain.MyTagDatabase.GetClientHandleByTag(rd1.DisplayName.ToString());
if (chid > 0)
{
AppMain.MyTagDatabase.UpdateNodeByClientHandle(chid, rd1.NodeId.ToString());
}
await RecursivelyFindNode(channel, ExpandedNodeId.ToNodeId(rd1.NodeId, channel.NamespaceUris));
}
}
public async Task CreateSubscription(DataContainer dc)
{
double curReadingValue;
try
{
dataCointainer = dc;
await RecursivelyFindNode(channel, NodeId.Parse(ObjectIds.RootFolder));
if (AppMain.MyTagDatabase.GetCntTagsNotInOpcServer() == AppMain.MyTagDatabase.GetTagCount())
{
//no need to create subscription
return;
}
//subscription timeout that is the product of PublishingInterval * LifetimeCount:
var subscriptionRequest = new CreateSubscriptionRequest
{
RequestedPublishingInterval = 1000f,
RequestedMaxKeepAliveCount = 30,
RequestedLifetimeCount = 30 * 3,
PublishingEnabled = true,
};
var subscriptionResponse = await channel.CreateSubscriptionAsync(subscriptionRequest);
id = subscriptionResponse.SubscriptionId;
var itemsToCreate = new MonitoredItemCreateRequest[AppMain.MyTagDatabase.GetTagHavingNodeCount()];
int i = 0;
foreach (var item in AppMain.MyTagDatabase.GetMyTagDatabase())
{
var itemKey = item.Key;
var itemValue = item.Value;
itemsToCreate[i] = new MonitoredItemCreateRequest { ItemToMonitor = new ReadValueId { NodeId = NodeId.Parse(itemValue.NodeId), AttributeId = AttributeIds.Value }, MonitoringMode = MonitoringMode.Reporting, RequestedParameters = new MonitoringParameters { ClientHandle = itemKey, SamplingInterval = -1, QueueSize = 0, DiscardOldest = true } };
i++;
}
var itemsRequest = new CreateMonitoredItemsRequest
{
SubscriptionId = id,
ItemsToCreate = itemsToCreate,
};
var itemsResponse = await channel.CreateMonitoredItemsAsync(itemsRequest);
token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(pr =>
{
// loop thru all the data change notifications
// receiving data change notifications here
var dcns = pr.NotificationMessage.NotificationData.OfType<DataChangeNotification>();
foreach (var dcn in dcns)
{
foreach (var min in dcn.MonitoredItems)
{
MyTag MyTag = new MyTag();
bool hasValue = AppMain.MyTagDatabase.GetMyTag(min.ClientHandle, out MyTag);
if (hasValue)
{
if (double.TryParse(min.Value.Value.ToString(), out curReadingValue))
{
//LogOpcStore.Info(String.Format("ClientHandle : {0} TagName : {1} SourceTimestamp : {2} ServerTimeStamp : {3} curReadingValue : {4}", min.ClientHandle, MyTag.TagName, min.Value.SourceTimestamp, min.Value.ServerTimestamp, curReadingValue));
AddDataPointToContainer(1, MyTag.TagName, min.Value.SourceTimestamp, curReadingValue);
}
}
}
}
});
}
catch (Exception ex)
{
//If the interruption lasts longer than these timeouts then the SessionChannel and Subscriptions will need to be recreated.
channel = null;
FatalServiceException fatalserviceException = new FatalServiceException(ex.Message, "C052");
throw fatalserviceException;
}
}
public async Task DeleteSubscription()
{
try
{
var request = new DeleteSubscriptionsRequest
{
SubscriptionIds = new uint[] { id }
};
await channel.DeleteSubscriptionsAsync(request);
token.Dispose();
}
catch (Exception ex)
{
ServiceException serviceException = new ServiceException(ex.Message, "C052");
throw serviceException;
}
}
private static async Task<IUserIdentity> SignInOpc(EndpointDescription endpoint)
{
IUserIdentity userIdentity = null;
if (endpoint.UserIdentityTokens.Any(p => p.TokenType == UserTokenType.Anonymous))
{
userIdentity = new AnonymousIdentity();
}
else if (endpoint.UserIdentityTokens.Any(p => p.TokenType == UserTokenType.UserName))
{
var userName = AppMain.MyAppSettings.OpcUserId;
var password = simplerAES.Decrypt(AppMain.MyAppSettings.OpcPassword);
userIdentity = new UserNameIdentity(userName, password);
}
return userIdentity;
}
private void AddDataPointToContainer(int dataType, string source, DateTime SourceTimestampUTC, double value)
{
ConditionValue conditionValue = new ConditionValue();
long timestamp = AppMain.ServerSyncTimeStore.ConvertDateTimeToTimeStampUTC(SourceTimestampUTC);
conditionValue.dataType = dataType;
conditionValue.source = source;
conditionValue.timestamp = timestamp;
conditionValue.SourceTimestampUTC = SourceTimestampUTC;
conditionValue.LocalTime = SourceTimestampUTC.ToLocalTime();
conditionValue.value = value;
//LogOpcStore.Info(String.Format("TagName : {0} SourceTimestampUTC : {1} timestamp : {2} LocalTime : {3} curReadingValue : {4}", source, SourceTimestampUTC, timestamp, SourceTimestampUTC.ToLocalTime(), value));
dataCointainer.AddDataPoint(conditionValue);
}
}

I see you are using the project https://github.com/convertersystems/opc-ua-client.
When a server closes the session and socket (as happens when you reinitialize Kepware) the client receives immediate notification that causes the client channel to fault. A faulted channel cannot be reopened, it should be aborted and a new channel should be created.
I made this standalone test, to show that you may have to catch an exception and recreate the channel and subscription. The point of this test is to subscribe to the CurrentTime node and collect 60 datachanges. The test should last a minute. If you re-init the Kepware server in the middle of the test, the code catches the exception and recreates the channel and subscription.
[TestMethod]
public async Task OpcConnectorTest()
{
var count = 0;
UaTcpSessionChannel channel = null;
while (count < 60)
{
try
{
channel = new UaTcpSessionChannel(
this.localDescription,
this.certificateStore,
new AnonymousIdentity(),
EndpointUrl,
SecurityPolicyUris.None,
loggerFactory: this.loggerFactory);
await channel.OpenAsync();
// create the keep alive subscription.
var subscriptionRequest = new CreateSubscriptionRequest
{
RequestedPublishingInterval = 1000f,
RequestedMaxKeepAliveCount = 30,
RequestedLifetimeCount = 30 * 3,
PublishingEnabled = true,
};
var subscriptionResponse = await channel.CreateSubscriptionAsync(subscriptionRequest).ConfigureAwait(false);
var id = subscriptionResponse.SubscriptionId;
var token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(pr =>
{
// loop thru all the data change notifications
var dcns = pr.NotificationMessage.NotificationData.OfType<DataChangeNotification>();
foreach (var dcn in dcns)
{
foreach (var min in dcn.MonitoredItems)
{
Console.WriteLine($"sub: {pr.SubscriptionId}; handle: {min.ClientHandle}; value: {min.Value}");
count++;
}
}
});
var itemsRequest = new CreateMonitoredItemsRequest
{
SubscriptionId = id,
ItemsToCreate = new MonitoredItemCreateRequest[]
{
new MonitoredItemCreateRequest { ItemToMonitor = new ReadValueId { NodeId = NodeId.Parse("i=2258"), AttributeId = AttributeIds.Value }, MonitoringMode = MonitoringMode.Reporting, RequestedParameters = new MonitoringParameters { ClientHandle = 12345, SamplingInterval = -1, QueueSize = 0, DiscardOldest = true } }
},
};
var itemsResponse = await channel.CreateMonitoredItemsAsync(itemsRequest);
while (channel.State == CommunicationState.Opened && count < 60)
{
await Task.Delay(1000);
}
}
catch (Exception ex)
{
Console.WriteLine($"Exception: {ex.GetType()}. {ex.Message}");
}
}
if (channel != null)
{
Console.WriteLine($"Closing session '{channel.SessionId}'.");
await channel.CloseAsync();
}
}

I know this is an old post, but I stumbled upon this problem as well. For those interested:
The problem is related to the subscription(s).
When the following code is run:
token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(pr =>
{
// loop thru all the data change notifications
// receiving data change notifications here
var dcns = pr.NotificationMessage.NotificationData.OfType<DataChangeNotification>();
foreach (var dcn in dcns)
{
foreach (var min in dcn.MonitoredItems)
{
MyTag MyTag = new MyTag();
bool hasValue = AppMain.MyTagDatabase.GetMyTag(min.ClientHandle, out MyTag);
if (hasValue)
{
if (double.TryParse(min.Value.Value.ToString(), out curReadingValue))
{
//LogOpcStore.Info(String.Format("ClientHandle : {0} TagName : {1} SourceTimestamp : {2} ServerTimeStamp : {3} curReadingValue : {4}", min.ClientHandle, MyTag.TagName, min.Value.SourceTimestamp, min.Value.ServerTimestamp, curReadingValue));
AddDataPointToContainer(1, MyTag.TagName, min.Value.SourceTimestamp, curReadingValue);
}
}
}
}
});
Observable.subscribe() takes multiple arguments. You should include what to do in case of an error. For example:
token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(
pr => { code to run normally... },
ex => { Log.Info(ex.Message); },
() => { }
);
See http://reactivex.io/documentation/operators/subscribe.html for more information.

Related

OPC-DA AddGroup not working by using "TitaniumAS.Opc.Client" c# library

I'm getting a NULL reference error while adding the group. ie: in line -> opcDaGroup = server.AddGroup("Group");
Please help me out what's the issue and how to overcome the same.
whether this is the DCOM access issue.
Code:
public OpcDaGroup opcDaGroup;
private void StartConnection(string Cls_Id, string Machine_IpAddress)
{
try
{
log.Debug("START - Starting the connection");
Uri url = UrlBuilder.Build(Cls_Id, Machine_IpAddress);
server = new OpcDaServer(url);
server.Connect();
log.Debug("END - Starting the connection");
}
catch (Exception e)
{
log.Error("Could not connect to the OPC server for the IpAddress: {0}", e);
}
}
public OpcDaAdapterThreadImpl(string Cls_Id, string Machine_IpAddress, string name, ICollection<TagConfigurationModel> tags)
{
this.Cls_Id = Cls_Id;
this.Machine_IpAddress = Machine_IpAddress;
this.name = name;
this.AdapterReady = true;
Tags = tags;
var group = tags.GroupBy(c => new { ItemName = c.TagName }).Select(c => c.Key).Distinct();
grouptags = group.Select(c => new TagConfigurationModel { TagName = c.ItemName }).ToList();
opcTagItems = grouptags.Select(c => new OpcDaItemDefinition { ItemId = c.TagName, IsActive = true }).ToList();
// Connection Initializing
StartConnection(Cls_Id, Machine_IpAddress);
if (server.IsConnected)
{
// Add group to the server
opcDaGroup = server.AddGroup("Group");
opcDaGroup.IsActive = true;
// Add Items to group
opcDaGroup.AddItems(opcTagItems);
}
}

Chrome Developer Tools - Performance profiling

In the below image (from chrome performance profiling tab for a API call), what is resource loading which costs 719 ms ?
If I visit the network tab, for the same API call, I see only 10.05 seconds.
What is resource loading mean here ? Is there any specific activity the browser does after receiving the data ?
As #wOxxOM stated, buildNetworkRequestDetails is being called from Source Code
From the statement by #Sanju singh :
that statement doesn't tell anything, why it is taking that much time to make the resource available?
I think its necessary to break down exactly what is happening..
Summary:
Activity Browser and Network Activity are using different algorithms for calculating completion. Network Activity is calculating the response times from the request and Activity Browser is calculating the response time + time it took to add it into the WebInspector tracer.
Looking at
/**
* #param {!TimelineModel.TimelineModel.NetworkRequest} request
* #param {!TimelineModel.TimelineModel.TimelineModelImpl} model
* #param {!Components.Linkifier.Linkifier} linkifier
* #return {!Promise<!DocumentFragment>}
*/
static async buildNetworkRequestDetails(request, model, linkifier) {
const target = model.targetByEvent(request.children[0]);
const contentHelper = new TimelineDetailsContentHelper(target, linkifier);
const category = TimelineUIUtils.networkRequestCategory(request);
const color = TimelineUIUtils.networkCategoryColor(category);
contentHelper.addSection(ls`Network request`, color);
if (request.url) {
contentHelper.appendElementRow(ls`URL`, Components.Linkifier.Linkifier.linkifyURL(request.url));
}
// The time from queueing the request until resource processing is finished.
const fullDuration = request.endTime - (request.getStartTime() || -Infinity);
if (isFinite(fullDuration)) {
let textRow = Number.millisToString(fullDuration, true);
// The time from queueing the request until the download is finished. This
// corresponds to the total time reported for the request in the network tab.
const networkDuration = request.finishTime - request.getStartTime();
// The time it takes to make the resource available to the renderer process.
const processingDuration = request.endTime - request.finishTime;
if (isFinite(networkDuration) && isFinite(processingDuration)) {
const networkDurationStr = Number.millisToString(networkDuration, true);
const processingDurationStr = Number.millisToString(processingDuration, true);
const cacheOrNetworkLabel = request.cached() ? ls`load from cache` : ls`network transfer`;
textRow += ls` (${networkDurationStr} ${cacheOrNetworkLabel} + ${processingDurationStr} resource loading)`;
}
contentHelper.appendTextRow(ls`Duration`, textRow);
}
if (request.requestMethod) {
contentHelper.appendTextRow(ls`Request Method`, request.requestMethod);
}
if (typeof request.priority === 'string') {
const priority = PerfUI.NetworkPriorities.uiLabelForNetworkPriority(
/** #type {!Protocol.Network.ResourcePriority} */ (request.priority));
contentHelper.appendTextRow(ls`Priority`, priority);
}
if (request.mimeType) {
contentHelper.appendTextRow(ls`Mime Type`, request.mimeType);
}
let lengthText = '';
if (request.memoryCached()) {
lengthText += ls` (from memory cache)`;
} else if (request.cached()) {
lengthText += ls` (from cache)`;
} else if (request.timing && request.timing.pushStart) {
lengthText += ls` (from push)`;
}
if (request.fromServiceWorker) {
lengthText += ls` (from service worker)`;
}
if (request.encodedDataLength || !lengthText) {
lengthText = `${Number.bytesToString(request.encodedDataLength)}${lengthText}`;
}
contentHelper.appendTextRow(ls`Encoded Data`, lengthText);
if (request.decodedBodyLength) {
contentHelper.appendTextRow(ls`Decoded Body`, Number.bytesToString(request.decodedBodyLength));
}
const title = ls`Initiator`;
const sendRequest = request.children[0];
const topFrame = TimelineModel.TimelineModel.TimelineData.forEvent(sendRequest).topFrame();
if (topFrame) {
const link = linkifier.maybeLinkifyConsoleCallFrame(target, topFrame, {tabStop: true});
if (link) {
contentHelper.appendElementRow(title, link);
}
} else {
const initiator = TimelineModel.TimelineModel.TimelineData.forEvent(sendRequest).initiator();
if (initiator) {
const initiatorURL = TimelineModel.TimelineModel.TimelineData.forEvent(initiator).url;
if (initiatorURL) {
const link = linkifier.maybeLinkifyScriptLocation(target, null, initiatorURL, 0, {tabStop: true});
if (link) {
contentHelper.appendElementRow(title, link);
}
}
}
}
if (!request.previewElement && request.url && target) {
request.previewElement = await Components.ImagePreview.ImagePreview.build(
target, request.url, false,
{imageAltText: Components.ImagePreview.ImagePreview.defaultAltTextForImageURL(request.url)});
}
if (request.previewElement) {
contentHelper.appendElementRow(ls`Preview`, request.previewElement);
}
return contentHelper.fragment;
}
We can easily see that the request parameter is of type
`TimelineModel.TimelineModel.NetworkRequest`
NetWorkRequest has the following code:
_didStopRecordingTraceEvents: function()
{
var metadataEvents = this._processMetadataEvents();
this._injectCpuProfileEvents(metadataEvents);
this._tracingModel.tracingComplete();
this._resetProcessingState();
var startTime = 0;
for (var i = 0, length = metadataEvents.page.length; i < length; i++) {
var metaEvent = metadataEvents.page[i];
var process = metaEvent.thread.process();
var endTime = i + 1 < length ? metadataEvents.page[i + 1].startTime : Infinity;
this._currentPage = metaEvent.args["data"] && metaEvent.args["data"]["page"];
for (var thread of process.sortedThreads()) {
if (thread.name() === WebInspector.TimelineModel.WorkerThreadName && !metadataEvents.workers.some(function(e) { return e.args["data"]["workerThreadId"] === thread.id(); }))
continue;
this._processThreadEvents(startTime, endTime, metaEvent.thread, thread);
}
startTime = endTime;
}
this._inspectedTargetEvents.sort(WebInspector.TracingModel.Event.compareStartTime);
this._cpuProfiles = null;
this._buildTimelineRecords();
this._buildGPUTasks();
this._insertFirstPaintEvent();
this._resetProcessingState();
this.dispatchEventToListeners(WebInspector.TimelineModel.Events.RecordingStopped);
},
We can see that endTime is being calculated from:
metaEvent.thread.process()
We can see that metaEvent.page is being set by:
_processMetadataEvents: function()
{
var metadataEvents = this._tracingModel.devToolsMetadataEvents();
var pageDevToolsMetadataEvents = [];
var workersDevToolsMetadataEvents = [];
for (var event of metadataEvents) {
if (event.name === WebInspector.TimelineModel.DevToolsMetadataEvent.TracingStartedInPage)
pageDevToolsMetadataEvents.push(event);
else if (event.name === WebInspector.TimelineModel.DevToolsMetadataEvent.TracingSessionIdForWorker)
workersDevToolsMetadataEvents.push(event);
}
if (!pageDevToolsMetadataEvents.length) {
// The trace is probably coming not from DevTools. Make a mock Metadata event.
var pageMetaEvent = this._loadedFromFile ? this._makeMockPageMetadataEvent() : null;
if (!pageMetaEvent) {
console.error(WebInspector.TimelineModel.DevToolsMetadataEvent.TracingStartedInPage + " event not found.");
return {page: [], workers: []};
}
pageDevToolsMetadataEvents.push(pageMetaEvent);
}
var sessionId = pageDevToolsMetadataEvents[0].args["sessionId"] || pageDevToolsMetadataEvents[0].args["data"]["sessionId"];
this._sessionId = sessionId;
var mismatchingIds = new Set();
/**
* #param {!WebInspector.TracingModel.Event} event
* #return {boolean}
*/
function checkSessionId(event)
{
var args = event.args;
// FIXME: put sessionId into args["data"] for TracingStartedInPage event.
if (args["data"])
args = args["data"];
var id = args["sessionId"];
if (id === sessionId)
return true;
mismatchingIds.add(id);
return false;
}
var result = {
page: pageDevToolsMetadataEvents.filter(checkSessionId).sort(WebInspector.TracingModel.Event.compareStartTime),
workers: workersDevToolsMetadataEvents.filter(checkSessionId).sort(WebInspector.TracingModel.Event.compareStartTime)
};
if (mismatchingIds.size)
WebInspector.console.error("Timeline recording was started in more than one page simultaneously. Session id mismatch: " + this._sessionId + " and " + mismatchingIds.valuesArray() + ".");
return result;
}

moq mongodb InsertOneAsync method

I am using Mongodb database with .net core. I just want to moq insert method that using mongodbContext. Here is what I am trying to do but it's not working:
public void InsertEventAsync_Test()
{
//Arrange
var eventRepository = EventRepository();
var pEvent = new PlanEvent
{
ID = "testEvent",
WorkOrderID = "WorkOrderID",
IsDeleted = false,
IsActive = true,
EquipmentID = "EquipmentID"
};
////Act
//mockEventContext.Setup(mr => mr.PlanEvent.InsertOne(It.IsAny<PlanEvent>(), It.IsAny<InsertOneOptions>()))
mockEventContext.Setup(s => s.PlanEvent.InsertOneAsync(It.IsAny<PlanEvent>(), It.IsAny<InsertOneOptions>())).Returns("sdad");
var result = eventRepository.InsertEventAsync(pEvent);
////Assert
result.Should().NotBeNull();
}
Below is the method that I need to Moq:
public EventRepository(IFMPContext eventContext)
{
_eventContext = eventContext;
}
public async Task<string> InsertEventAsync(Model.EventDataModel.PlanEvent eventobj)
{
eventobj._id = ObjectId.GenerateNewId();
eventobj.CreatedDateTime = DateTime.UtcNow.ToString();
try
{
_eventContext.PlanEvent.InsertOne(eventobj);
return eventobj.ID;
}
catch (Exception ex)
{
string x = ex.Message;
}
return "";
}
Assuming
public class EventRepository {
private readonly IFMPContext eventContext;
public EventRepository(IFMPContext eventContext) {
this.eventContext = eventContext;
}
public async Task<string> InsertEventAsync(Model.EventDataModel.PlanEvent eventobj) {
eventobj._id = ObjectId.GenerateNewId();
eventobj.CreatedDateTime = DateTime.UtcNow.ToString();
try {
await eventContext.PlanEvent.InsertOneAsync(eventobj);
return eventobj.ID;
} catch (Exception ex) {
string x = ex.Message;
}
return "";
}
}
You need to configure the test to support the async nature of the method under test
public async Task InsertEventAsync_Test()
{
//Arrange
var expected = "testEvent";
var pEvent = new PlanEvent {
ID = expected,
WorkOrderID = "WorkOrderID",
IsDeleted = false,
IsActive = true,
EquipmentID = "EquipmentID"
};
var mockEventContext = new Mock<IFMPContext>();
mockEventContext
.Setup(_ => _.PlanEvent.InsertOneAsync(It.IsAny<PlanEvent>(), It.IsAny<InsertOneOptions>()))
.ReturnsAsync(Task.FromResult((object)null));
var eventRepository = new EventRepository(mockEventContext.Object);
//Act
var actual = await eventRepository.InsertEventAsync(pEvent);
//Assert
actual.Should().NotBeNull()
actual.Should().Be(expected);
}
The test method definition needed to be updated to be asynchronous to allow the method under test to be awaited. The mock dependency also needed to be setup in such a way to allow the async flow to continue as expected when invoked.
#Nkosi Thanks a lot for your help. Finally i found the way. i was missing extra moq param It.IsAny<System.Threading.CancellationToken>() below is the working test
public void InsertEventAsync_Test()
{
//Arrange
var eventRepository = EventRepository();
var pEvent = new PlanEvent
{
ID = "testEvent",
WorkOrderID = "WorkOrderID",
IsDeleted = false,
IsActive = true,
EquipmentID = "EquipmentID"
};
////Act
mockEventContext.Setup(s => s.PlanEvent.InsertOne(It.IsAny<PlanEvent>(), It.IsAny<InsertOneOptions>(),It.IsAny<System.Threading.CancellationToken>()));
var result = eventRepository.InsertEventAsync(pEvent);
////Assert
result.Should().NotBeNull();
Assert.AreEqual(pEvent.ID, result);
}

DB2ResultSet.Read(). ERROR [24000] [IBM] CLI0115E Invalid cursor state. SQLSTATE=24000

We have problem with the application run DB2ResultSet.Read(). Sometime will get ERROR :
[24000] [IBM] CLI0115E Invalid cursor state. SQLSTATE=24000.
Database : DB2 for Linux, UNIX and Windows V10.5
Client: Windows 7 64bit
Method:
public int EventGetEvSegmentCnt(string SegmentID, string strEvntGroup)
{
int strGroupCnt = 0;
string strSQL = string.Empty;`enter code here`
DB2ResultSet objRs;
if (string.IsNullOrEmpty(SegmentID) || string.IsNullOrEmpty(strEvntGroup))
{
strGroupCnt = 0;
}
else
{
strSQL = " SELECT COUNT(EVNT_CODE) AS EVNT_GROUP_COUNT FROM E_SEGMENT_EVENT WHERE C_SEGMENT_ID = " + SegmentID + " AND EVNT_GROUP = " + strEvntGroup;
Common.DatabaseHelper helper = new Common.DatabaseHelper();
objRs = helper.ExecuteResultSet(strSQL);
if (objRs.Read())
{
strGroupCnt = 0;
}
else
{
strGroupCnt = int.Parse(objRs["EVNT_GROUP_COUNT"].ToString());
}
}
return strGroupCnt;
}
Error Message:
[Information] System.Web.HttpUnhandledException (0x80004005):
Exception of type 'System.Web.HttpUnhandledException' was thrown. ---> IBM.Data.DB2.DB2Exception (0x80004005): ERROR [24000] [IBM] CLI0115E Invalid cursor state. SQLSTATE=24000
at IBM.Data.DB2.DB2DataBuffer.FetchScroll(FetchType fetchType, Int64 offset, Int32 numRows)
at IBM.Data.DB2.DB2DataBuffer.FetchNext()
at IBM.Data.DB2.DB2DataReader.Fetch(FetchDirection direction, Int64 offset, Boolean& isDeleted)
at IBM.Data.DB2.DB2ResultSet.Read()
Please help.
DatabaseHelper:
public class DatabaseHelper
{
public DatabaseHelper()
{
}
public DataSet ExecuteDataSet(string commandText, List<DB2Parameter> parameters = null)
{
var command = GetCommand(commandText, parameters);
var adapter = new DB2DataAdapter();
adapter.SelectCommand = command;
var ds = new DataSet();
adapter.Fill(ds);
adapter.Dispose();
return ds;
}
public DB2DataReader ExecuteReader(string commandText, List<DB2Parameter> parameters = null)
{
var command = GetCommand(commandText, parameters);
return command.ExecuteReader(CommandBehavior.CloseConnection);
}
public DB2ResultSet ExecuteResultSet(string commandText, List<DB2Parameter> parameters = null)
{
var command = GetCommand(commandText, parameters);
//DB2ResultSet result = command.ExecuteResultSet(CommandBehavior.Default, DB2CursorType.Dynamic);
DB2ResultSet result = command.ExecuteResultSet(CommandBehavior.CloseConnection, DB2CursorType.Dynamic);
return result;
}
public DB2ResultSet ExecuteResultSetStatic(string commandText, List<DB2Parameter> parameters = null)
{
var command = GetCommand(commandText, parameters);
// DB2ResultSet result = command.ExecuteResultSet(CommandBehavior.Default, DB2CursorType.Static);
DB2ResultSet result = command.ExecuteResultSet(CommandBehavior.CloseConnection, DB2CursorType.Static);
return result;
}
public int ExecuteNonQuery(string commandText, List<DB2Parameter> parameters = null)
{
var command = GetCommand(commandText, parameters);
int result = command.ExecuteNonQuery();
command.Connection.Close();
return result;
}
public void ExecuteSQLArray(string[] arrSQL)
{
var command = new DB2Command();
command.Connection = GetConnection();
command.CommandType = CommandType.Text;
command.CommandTimeout = 600;
command.Transaction = command.Connection.BeginTransaction();
try
{
foreach (string strSQL in arrSQL)
{
if (!string.IsNullOrEmpty(strSQL))
{
command.CommandText = strSQL;
command.ExecuteNonQuery();
}
}
command.Transaction.Commit();
}
catch (Exception)
{
command.Transaction.Rollback();
throw;
}
command.Connection.Close();
command.Dispose();
}
public DB2Connection GetConnection()
{
var conn = new DB2Connection(System.Configuration.ConfigurationManager.ConnectionStrings["DB2_Conn"].ConnectionString);
conn.Open();
return conn;
}
public DB2Command GetCommand(string commandText, List<DB2Parameter> parameters)
{
var command = new DB2Command(commandText);
command.Connection = GetConnection();
command.CommandTimeout = 600;
if (parameters != null)
{
foreach (var parameter in parameters)
{
command.Parameters.Add(parameter);
}
}
return command;
}
}
try this :
public int EventGetEvSegmentCnt(string SegmentID, string strEvntGroup)
{
if (string.IsNullOrEmpty(SegmentID) || string.IsNullOrEmpty(strEvntGroup)) return 0;
int strGroupCnt = 0;
DataSet objDS=null;
string strSQL = string.Format(" SELECT COUNT(EVNT_CODE) AS EVNT_GROUP_COUNT FROM E_SEGMENT_EVENT WHERE C_SEGMENT_ID ={0} AND EVNT_GROUP = {0}", SegmentID, strEvntGroup);
try
{
Common.DatabaseHelper helper = new Common.DatabaseHelper();
objDS = helper.ExecuteDataSet(strSQL);
if (objRs.Table[0].Rows.Count > 0)
{
strGroupCnt = int.Parse(objDS.Tables[0].Rows[0]["EVNT_GROUP_COUNT"].ToString());
}
}
finally
{
if (objDS != null) objDS.Dispose()
return strGroupCnt;
}
}

chrome.serial receiveTimeout Not working.

Below code is a copy with minor edits from https://github.com/GoogleChrome/chrome-app-samples/tree/master/serial/ledtoggle. I am able to send a byte and receive a reply. I am not able to get an TimeoutError event in case of reply is not sent by the client. I have set timeout to 50 ms.
this.receiveTimeout = 50;
Entire code follows.
const DEVICE_PATH = 'COM1';
const serial = chrome.serial;
var ab2str = function(buf) {
var bufView = new Uint8Array(buf);
var encodedString = String.fromCharCode.apply(null, bufView);
return decodeURIComponent(escape(encodedString));
};
var str2ab = function(str) {
var encodedString = unescape((str));
var bytes = new Uint8Array(1);
bytes[0] = parseInt(encodedString);
}
return bytes.buffer;
};
var SerialConnection = function() {
this.connectionId = -1;
this.lineBuffer = "";
this.receiveTimeout =50;
this.boundOnReceive = this.onReceive.bind(this);
this.boundOnReceiveError = this.onReceiveError.bind(this);
this.onConnect = new chrome.Event();
this.onReadLine = new chrome.Event();
this.onError = new chrome.Event();
};
SerialConnection.prototype.onConnectComplete = function(connectionInfo) {
if (!connectionInfo) {
log("Connection failed.");
return;
}
this.connectionId = connectionInfo.connectionId;
chrome.serial.onReceive.addListener(this.boundOnReceive);
chrome.serial.onReceiveError.addListener(this.boundOnReceiveError);
this.onConnect.dispatch();
};
SerialConnection.prototype.onReceive = function(receiveInfo) {
if (receiveInfo.connectionId !== this.connectionId) {
return;
}
this.lineBuffer += ab2str(receiveInfo.data);
var index;
while ((index = this.lineBuffer.indexOf('$')) >= 0) {
var line = this.lineBuffer.substr(0, index + 1);
this.onReadLine.dispatch(line);
this.lineBuffer = this.lineBuffer.substr(index + 1);
}
};
SerialConnection.prototype.onReceiveError = function(errorInfo) {
log('Error');
if (errorInfo.connectionId === this.connectionId) {
log('Error');
this.onError.dispatch(errorInfo.error);
log('Error');
}
log('Error');
};
SerialConnection.prototype.connect = function(path) {
serial.connect(path, this.onConnectComplete.bind(this))
};
SerialConnection.prototype.send = function(msg) {
if (this.connectionId < 0) {
throw 'Invalid connection';
}
serial.send(this.connectionId, str2ab(msg), function() {});
};
SerialConnection.prototype.disconnect = function() {
if (this.connectionId < 0) {
throw 'Invalid connection';
}
serial.disconnect(this.connectionId, function() {});
};
var connection = new SerialConnection();
connection.onConnect.addListener(function() {
log('connected to: ' + DEVICE_PATH);
);
connection.onReadLine.addListener(function(line) {
log('read line: ' + line);
});
connection.onError.addListener(function() {
log('Error: ');
});
connection.connect(DEVICE_PATH);
function log(msg) {
var buffer = document.querySelector('#buffer');
buffer.innerHTML += msg + '<br/>';
}
document.querySelector('button').addEventListener('click', function() {
connection.send(2);
});
Maybe I'm reading the code incorrectly, but at no point do you pass receiveTimeout into chrome.serial. The method signature is chrome.serial.connect(string path, ConnectionOptions options, function callback), where options is an optional parameter. You never pass anything into options. Fix that and let us know what happens.