Optimized Constraint scheduling with Google OR Tools slow and not finding a solution - or-tools

Our team is trying to do constraint scheduling, requiring the model to fill in who will do which tasks on which day. We started from the https://developers.google.com/optimization/scheduling/employee_scheduling example.
We succeeded in scheduling simple, obvious things eg assigning a task after another task, but the moment we increase complexity or size the model is always unable to provide a feasible solution. We tried modelling the constraints differently in the ModelWaitingForConstraintsOptimized method. But to no avail.
using Binocs.Scheduling.AI.Host.Data;
using Binocs.Scheduling.AI.Host.Domain
using Google.OrTools.Sat;
using Constraint = Binocs.Scheduling.AI.Host.Domain.Constraint;
namespace Binocs.Scheduling.AI.Host;
public class Program
{
public static void Main()
{
var scheduleItems = new ScheduleItemQuery().Query().ToArray();
var teamMembers = (new TeamMembersQuery().Query()).ToDictionary(_ => _.Id);
var equipments = (new EquipmentsQuery().Query()).ToDictionary(_ => _.Id);
var teamMemberAvailabilities = new TeamMembersAvailabilitiesQuery().Query(teamMembers.Keys);
var equipmentAvailabilities = new EquipmentAvailabilitiesQuery().Query(equipments.Keys);
var constraints = (new ConstraintsQuery().Query()).GroupBy(_ => _.From).ToDictionary(_ => _.Key, _ => _.ToArray());
#region AIData
var horizon = new Horizon(8);
var time = new Time(DateTime.Today);
var aiScheduleItemFactory = new AIScheduleItemFactory(time);
var aiAvailabilityFactory = new AIAvailabilityFactory(time);
var aiScheduledItemFactory = new AIScheduledItemFactory(time);
var aiItems = scheduleItems.Select(aiScheduleItemFactory.Create).ToArray();
var aiScheduledItems = scheduleItems.Select(aiScheduledItemFactory.Create).ToArray();
var allAIAvailabilities = teamMemberAvailabilities.Select(aiAvailabilityFactory.Create)
.Concat(equipmentAvailabilities.Select(aiAvailabilityFactory.Create)).ToArray();
var availabilitiesPerResource = allAIAvailabilities
.Where(_ => _.Offset >= 0 && horizon.WithinHorizon(_.Offset))
.GroupBy(_ => _.Id)
.OrderBy(_ => _.Key)
.ToArray();
var horizonCompensatedAvailabilities = availabilitiesPerResource.ToDictionary(_ => _.Key, _ =>
{
var arr = horizon.EmptyHorizon();
foreach (var aiTeamMemberAvailability in _)
{
arr[aiTeamMemberAvailability.Offset] = aiTeamMemberAvailability.Hours;
}
return arr;
});
var taskSizes = aiItems.Select(_ => _.PlannedHours).ToArray();
var dueIntervals = aiItems.Select(_ => _.Deadline).ToArray();
var allAvailabilities = horizonCompensatedAvailabilities.Select(_ => _.Value).ToArray();
var totalDistinctWorkers = allAvailabilities.Length;
var totalDistinctTasks = taskSizes.Length;
var totalDistinctIntervals = horizon.Intervals;
var indexedDistinctWorkers = Enumerable.Range(0, totalDistinctWorkers).ToArray();
var indexedDistinctTasks = Enumerable.Range(0, totalDistinctTasks).ToArray();
var indexedDistinctIntervals = Enumerable.Range(0, totalDistinctIntervals).ToArray();
#endregion
#region AIModel
var model = new CpModel();
var allVariablesTheAINeedsToFillIn = new IntVar[totalDistinctWorkers, totalDistinctTasks, totalDistinctIntervals];
var flattenedAllVariablesTheAINeedsToFillIn = new IntVar[totalDistinctWorkers * totalDistinctTasks * totalDistinctIntervals];
ModelCompetences(indexedDistinctWorkers, indexedDistinctTasks, indexedDistinctIntervals, allAIAvailabilities, scheduleItems, allVariablesTheAINeedsToFillIn, model, totalDistinctWorkers, totalDistinctTasks, flattenedAllVariablesTheAINeedsToFillIn);
ModelAvailabilities(indexedDistinctIntervals, indexedDistinctWorkers, totalDistinctTasks, indexedDistinctTasks, allVariablesTheAINeedsToFillIn, model, taskSizes, allAvailabilities);
ModelAssignments(indexedDistinctTasks, totalDistinctWorkers, totalDistinctIntervals, indexedDistinctWorkers, indexedDistinctIntervals, allVariablesTheAINeedsToFillIn, model);
var taskIntervals = new IntVar[totalDistinctTasks];
ModelDates(indexedDistinctTasks, taskIntervals, model, totalDistinctIntervals, indexedDistinctWorkers, indexedDistinctIntervals, allVariablesTheAINeedsToFillIn);
ModelWaitingForConstraints(scheduleItems, constraints, model, taskIntervals);
//ModelWaitingForConstraintsOptimized(scheduleItems, constraints, model, horizon, indexedDistinctWorkers, flattenedAllVariablesTheAINeedsToFillIn);
ModelEquipmentAtSameTimeAsTeammember(scheduleItems, model, taskIntervals);
ModelObjective(flattenedAllVariablesTheAINeedsToFillIn, indexedDistinctWorkers, indexedDistinctTasks, indexedDistinctIntervals, totalDistinctWorkers, totalDistinctTasks, dueIntervals, taskSizes, model);
#endregion
#region Solve
var solver = new CpSolver();
var status = solver.Solve(model);
Console.WriteLine($"Solve status: {status}");
if (status == CpSolverStatus.Optimal || status == CpSolverStatus.Feasible)
{
var resourceLookup = availabilitiesPerResource.Select(_ => _.First()).ToArray();
Console.WriteLine($"Total cost: {solver.ObjectiveValue}\n");
foreach (var interval in indexedDistinctIntervals)
{
foreach (var worker in indexedDistinctWorkers)
{
foreach (var task in indexedDistinctTasks)
{
if (solver.Value(allVariablesTheAINeedsToFillIn[worker, task, interval]) > 0.5)
{
var late = interval > dueIntervals[task] ? "L" : "T";
aiScheduledItems[task].WithPlanned(interval);
aiScheduledItems[task].WithResource(resourceLookup[worker].Id);
Console.WriteLine($"{late}{taskSizes[task]} interval {interval} - Due {dueIntervals[task]} - Size {taskSizes[task]} - Worker {(resourceLookup[worker].ForEquipment ? equipments[resourceLookup[worker].Id].Code : teamMembers[resourceLookup[worker].Id].Code)} assigned to task {scheduleItems[task].ScheduleItemId} - {scheduleItems[task].ServiceItemActivityId} {(scheduleItems[task].EquipmentPlannedHours == 0m ? "" : "EQ")}");
}
}
}
}
}
else
{
Console.WriteLine("No solution found.");
}
Console.WriteLine("Statistics");
Console.WriteLine($" - conflicts : {solver.NumConflicts()}");
Console.WriteLine($" - branches : {solver.NumBranches()}");
Console.WriteLine($" - wall time : {solver.WallTime()}s");
#endregion
}
#region ModelMethods
static void ModelCompetences(int[] indexedDistinctWorkers, int[] indexedDistinctTasks, int[] indexedDistinctIntervals,
AIAvailability[] allAiAvailabilities, ScheduleItem[] scheduleItems, IntVar[,,] allVariablesTheAiNeedsToFillIn,
CpModel cpModel, int totalDistinctWorkers, int totalDistinctTasks,
IntVar[] flattenedAllVariablesTheAiNeedsToFillIn)
{
foreach (var worker in indexedDistinctWorkers)
{
foreach (var task in indexedDistinctTasks)
{
foreach (var interval in indexedDistinctIntervals)
{
if (allAiAvailabilities[worker].IsCompetentFor(scheduleItems[task]))
{
allVariablesTheAiNeedsToFillIn[worker, task, interval] =
cpModel.NewBoolVar($"allVariablesTheAINeedsToFillIn[{worker},{task},{interval}]");
}
else
{
allVariablesTheAiNeedsToFillIn[worker, task, interval] =
cpModel.NewConstant(0, $"allVariablesTheAINeedsToFillIn[{worker},{task},{interval}]");
}
var flattenedIndex =
MatrixOperations.Flatten(worker, task, interval, totalDistinctWorkers, totalDistinctTasks);
flattenedAllVariablesTheAiNeedsToFillIn[flattenedIndex] =
allVariablesTheAiNeedsToFillIn[worker, task, interval];
}
}
}
}
static void ModelAvailabilities(int[] indexedDistinctIntervals, int[] indexedDistinctWorkers, int totalDistinctTasks, int[] indexedDistinctTasks,
IntVar[,,] intVars, CpModel model, int[] taskSizes, int[][] allAvailabilities)
{
foreach (var interval in indexedDistinctIntervals)
{
foreach (var worker in indexedDistinctWorkers)
{
var vars = new IntVar[totalDistinctTasks];
foreach (var task in indexedDistinctTasks)
{
vars[task] = intVars[worker, task, interval];
}
model.Add(LinearExpr.ScalProd(vars, taskSizes) <= allAvailabilities[worker][interval]);
}
}
}
static void ModelAssignments(int[] totalDistinctTasks, int totalDistinctWorkers, int totalDistinctIntervals,
int[] indexedDistinctWorkers, int[] indexedDistinctIntervals, IntVar[,,] allVariablesTheAiNeedsToFillIn,
CpModel model)
{
foreach (var task in totalDistinctTasks)
{
var vars = new IntVar[totalDistinctWorkers * totalDistinctIntervals];
foreach (var worker in indexedDistinctWorkers)
{
foreach (var interval in indexedDistinctIntervals)
{
vars[MatrixOperations.Flatten(worker, interval, totalDistinctWorkers)] = allVariablesTheAiNeedsToFillIn[worker, task, interval];
}
}
model.Add(LinearExpr.Sum(vars) == 1);
}
}
static void ModelWaitingForConstraints(ScheduleItem[] scheduleItems, Dictionary<int, Constraint[]> constraintsMap, CpModel model,
IntVar[] taskIntervals)
{
for (var i = 0; i < scheduleItems.Length; i++)
{
var current = scheduleItems[i];
if (!constraintsMap.TryGetValue(current.ServiceItemActivityId, out var applicableConstraints)) continue;
foreach (var c in applicableConstraints)
{
var related = scheduleItems.Select((si, i) => new { si, i }).Where(_ =>
_.si.ScheduleItemId == current.ScheduleItemId && _.si.ServiceItemActivityId == c.To);
foreach (var otherSi in related)
{
switch (c.WaitingForType)
{
case 0:
model.Add(taskIntervals[i] >= taskIntervals[otherSi.i] + c.Value);
break;
case 1:
model.Add(taskIntervals[i] == taskIntervals[otherSi.i] + c.Value);
break;
case 2:
model.Add(taskIntervals[i] <= taskIntervals[otherSi.i] + c.Value);
break;
}
}
}
}
}
static void ModelWaitingForConstraintsOptimized(ScheduleItem[] scheduleItems, Dictionary<int, Constraint[]> constraintsMap, CpModel model, Horizon horizon, int[] indexedDistinctWorkers, IntVar[] flattenedAllVariablesTheAINeedsToFillIn)
{
for (var scheduleItemIndex = 0; scheduleItemIndex < scheduleItems.Length; scheduleItemIndex++)
{
var current = scheduleItems[scheduleItemIndex];
if (!constraintsMap.TryGetValue(current.ServiceItemActivityId, out var applicableConstraints)) continue;
foreach (var c in applicableConstraints)
{
var related = scheduleItems.Select((si, i) => new { si, i }).Where(_ =>
_.si.ScheduleItemId == current.ScheduleItemId && _.si.ServiceItemActivityId == c.To).ToList();
for (var horizonDayIndex = 0; horizonDayIndex < horizon.Value.Length; horizonDayIndex++)
{
for (var workerIndex = 0; workerIndex < indexedDistinctWorkers.Length; workerIndex++)
{
var d1 =
MatrixOperations.Flatten(workerIndex, scheduleItemIndex, horizonDayIndex, indexedDistinctWorkers.Length, scheduleItems.Length);
foreach (var otherSi in related)
{
switch (c.WaitingForType)
{
case 0:
var endDayAtLeast = Math.Min(horizonDayIndex + c.Value, horizon.Value.Length - 1);
for (var day = 0; day < endDayAtLeast; day++)
{
var d2Atleast =
MatrixOperations.Flatten(workerIndex, otherSi.i, day, indexedDistinctWorkers.Length, scheduleItems.Length);
model.AddImplication(flattenedAllVariablesTheAINeedsToFillIn[d1], flattenedAllVariablesTheAINeedsToFillIn[d2Atleast].Not());
}
break;
case 1:
var exactDay = horizonDayIndex + c.Value;
if (exactDay > horizon.Value.Length - 1) break;
var d2Exact =
MatrixOperations.Flatten(workerIndex, otherSi.i, exactDay, indexedDistinctWorkers.Length, scheduleItems.Length);
model.AddImplication(flattenedAllVariablesTheAINeedsToFillIn[d1], flattenedAllVariablesTheAINeedsToFillIn[d2Exact]);
break;
case 2:
var startDayAtMost = horizonDayIndex + c.Value;
var endDayAtMost = horizon.Value.Length;
for (var day = startDayAtMost; day < endDayAtMost; day++)
{
var d2AtMost =
MatrixOperations.Flatten(workerIndex, otherSi.i, day, indexedDistinctWorkers.Length, scheduleItems.Length);
model.AddImplication(flattenedAllVariablesTheAINeedsToFillIn[d1], flattenedAllVariablesTheAINeedsToFillIn[d2AtMost].Not());
}
break;
}
}
}
}
}
}
}
static void ModelEquipmentAtSameTimeAsTeammember(ScheduleItem[] scheduleItems, CpModel model, IntVar[] taskIntervals)
{
foreach (var g in scheduleItems.Select((si, i) => new { si, i })
.GroupBy(_ => new { _.si.ScheduleItemId, _.si.ServiceItemActivityId }).Where(_ => _.Count() > 1))
{
model.Add(taskIntervals[g.First().i] == taskIntervals[g.Skip(1).First().i]);
}
}
static void ModelDates(int[] indexedDistinctTasks, IntVar[] taskIntervals, CpModel model, int totalDistinctIntervals,
int[] indexedDistinctWorkers, int[] indexedDistinctIntervals, IntVar[,,] allVariablesTheAiNeedsToFillIn)
{
foreach (var task in indexedDistinctTasks)
{
taskIntervals[task] = model.NewIntVar(0, totalDistinctIntervals, "taskInterval");
foreach (var worker in indexedDistinctWorkers)
{
foreach (var interval in indexedDistinctIntervals)
{
model.Add(taskIntervals[task] == interval).OnlyEnforceIf(allVariablesTheAiNeedsToFillIn[worker, task, interval]);
}
}
}
}
static void ModelObjective(IntVar[] flattenedAllVariablesTheAiNeedsToFillIn, int[] indexedDistinctWorkers, int[] indexedDistinctTasks,
int[] indexedDistinctIntervals, int totalDistinctWorkers, int totalDistinctTasks, int[] dueIntervals, int[] taskSizes,
CpModel model)
{
var xDuePassedFlat = Enumerable.Range(0, flattenedAllVariablesTheAiNeedsToFillIn.Length).ToArray();
foreach (var worker in indexedDistinctWorkers)
{
foreach (var task in indexedDistinctTasks)
{
foreach (var interval in indexedDistinctIntervals)
{
xDuePassedFlat[MatrixOperations.Flatten(worker, task, interval, totalDistinctWorkers, totalDistinctTasks)] = Math.Max(0, (interval - dueIntervals[task])) - dueIntervals[task];
}
}
}
model.Minimize(LinearExpr.ScalProd(flattenedAllVariablesTheAiNeedsToFillIn, xDuePassedFlat));
}
#endregion
}

just try
solver.StringParameters = "num_search_workers:8,log_seach_progress:true";
Furthermore, this example uses the assumptions that num_shift_types == num_nurses. If this is false, there are no solutions.
When I run the model you sent me, I get:
Starting Search at 14.24s with 16 workers.
10 full subsolvers: [default_lp, no_lp, max_lp, core, reduced_costs, pseudo_costs, quick_restart, quick_restart_no_lp, lb_tree_search, probing]
Interleaved subsolvers: [feasibility_pump, rnd_var_lns_default, rnd_cst_lns_default, graph_var_lns_default, graph_cst_lns_default, rins_lns_default, rens_lns_default]
#Bound 17.99s best:inf next:[-91170,1793684] am1_presolve num_literals:163919 num_am1:129 increase:357955 work_done:100382751
#1 18.23s best:-514 next:[-91170,-515] core fixed_bools:63/171500
#Bound 18.24s best:-514 next:[-90570,-515] bool_core num_cores:0 [] assumptions:139518 depth:0 fixed_bools:63/171500
#2 18.30s best:-1043 next:[-90570,-1044] core fixed_bools:83/171500
#Bound 18.31s best:-1043 next:[-90370,-1044] bool_core num_cores:0 [] assumptions:139498 depth:0 fixed_bools:83/171500
#3 18.36s best:-1504 next:[-90370,-1505] quick_restart_no_lp fixed_bools:185/171371
#Bound 18.62s best:-1504 next:[-2305,-1505] probing initial_propagation
#Bound 20.50s best:-1504 next:[-1880,-1505] probing
#4 21.48s best:-1593 next:[-1880,-1594] graph_cst_lns_default(d=0.29 s=219 t=0.10 p=0.00)
#Bound 22.11s best:-1593 next:[-1600,-1594] max_lp
#5 22.38s best:-1594 next:[-1600,-1595] graph_cst_lns_default(d=0.28 s=224 t=0.10 p=0.33)
#6 24.04s best:-1595 next:[-1600,-1596] graph_var_lns_default(d=0.94 s=233 t=0.10 p=1.00)
#7 25.90s best:-1596 next:[-1600,-1597] rnd_var_lns_default(d=0.57 s=246 t=0.10 p=0.50)
#8 26.53s best:-1597 next:[-1600,-1598] graph_cst_lns_default(d=0.53 s=249 t=0.10 p=0.57)
#9 28.21s best:-1598 next:[-1600,-1599] rnd_var_lns_default(d=0.75 s=256 t=0.10 p=0.60)
#10 28.45s best:-1599 next:[-1600,-1600] rnd_cst_lns_default(d=0.58 s=257 t=0.10 p=0.56)
#11 30.27s best:-1600 next:[] graph_cst_lns_default(d=0.80 s=259 t=0.10 p=0.70)
#Done 30.28s core
#Done 30.31s probing
#Done 30.45s pseudo_costs
...
CpSolverResponse summary:
status: OPTIMAL
objective: -1600
best_bound: -1600
booleans: 171371
conflicts: 0
branches: 10123
propagations: 10736218
integer_propagations: 10978261
restarts: 10077
lp_iterations: 24982
walltime: 31.5911
usertime: 252.682
deterministic_time: 67.9784
gap_integral: 7.71199
For a more general problem, you should look at the shift scheduling example

Related

Open Xml - File Needs To Be Repaired To Open

I am having an issue where the file generation process works as expected, but when I open the excel file it says that it is corrupt and needs to be repaired. When the repair is complete, the file opens and all of the data is there.
The error message that I am receiving is as follows:
Removed Records: Cell information from /xl/worksheets/sheet1.xml part
My code is as follows:
using (var workbookDocument = SpreadsheetDocument.Create(staging, DocumentFormat.OpenXml.SpreadsheetDocumentType.Workbook))
{
var count = query.Count();
var worksheetNumber = 1;
var worksheetCapacity = Convert.ToInt32(100000);
var worksheetCount = Convert.ToInt32(Math.Ceiling(Convert.ToDouble(count) / worksheetCapacity));
var workbookPart = workbookDocument.AddWorkbookPart();
var worksheetInfo = new List<WorksheetData>();
OpenXmlWriter worksheetWriter;
while (worksheetNumber <= worksheetCount)
{
var worksheetLine = 1;
var worksheetName = sheet + "_" + Convert.ToString(worksheetNumber);
var worksheetPart = workbookDocument.WorkbookPart.AddNewPart<WorksheetPart>
var worksheetId = workbookDocument.WorkbookPart.GetIdOfPart(worksheetPart);
var worksheetKey = Convert.ToUInt32(worksheetNumber);
var worksheetAttributes = new List<OpenXmlAttribute>();
worksheetAttributes.Add(new OpenXmlAttribute("r", null, worksheetLine.ToString()));
worksheetInfo.Add(new WorksheetData() { Id = worksheetId, Key = worksheetKey, Name = worksheetName });
worksheetWriter = OpenXmlWriter.Create(worksheetPart);
worksheetWriter.WriteStartElement(new Worksheet());
worksheetWriter.WriteStartElement(new SheetData());
worksheetWriter.WriteStartElement(new Row(), worksheetAttributes);
for (var i = 0; i < headers.Count; i++)
{
var worksheetCell = new DocumentFormat.OpenXml.Spreadsheet.Cell();
var worksheetValue = new DocumentFormat.OpenXml.Spreadsheet.CellValue(headers[i]);
worksheetAttributes.Clear();
worksheetAttributes.Add(new OpenXmlAttribute("t", null, "str"));
worksheetAttributes.Add(new OpenXmlAttribute("r", null, GetColumnReference(worksheetLine, i)));
worksheetWriter.WriteStartElement(worksheetCell, worksheetAttributes);
worksheetWriter.WriteElement(worksheetValue);
worksheetWriter.WriteEndElement();
}
worksheetWriter.WriteEndElement();
worksheetLine++;
var skip = ((worksheetNumber - 1) * worksheetCapacity);
var results = query.SelectProperties(columns).Skip(skip).Take(worksheetCapacity).ToList();
for (var j = 0; j < results.Count; j++)
{
worksheetAttributes.Clear();
worksheetAttributes.Add(new OpenXmlAttribute("r", null, worksheetLine.ToString()));
worksheetWriter.WriteStartElement(new Row());
for (var k = 0; k < columns.Count(); k++)
{
var column = columns[k].Split((".").ToCharArray()).Last();
var value = results[j].GetType().GetField(column).GetValue(results[j]);
var type = value?.GetType().Name;
var text = ExportFormatter.Format(type, value);
worksheetAttributes.Clear();
worksheetAttributes.Add(new OpenXmlAttribute("t", null, "str"));
worksheetAttributes.Add(new OpenXmlAttribute("r", null, GetColumnReference(worksheetLine, j)));
worksheetWriter.WriteStartElement(new Cell());
worksheetWriter.WriteElement(new CellValue(text));
worksheetWriter.WriteEndElement();
}
worksheetWriter.WriteEndElement();
worksheetLine++;
}
worksheetWriter.WriteEndElement();
worksheetWriter.WriteEndElement();
worksheetWriter.Close();
worksheetNumber++;
}
worksheetWriter = OpenXmlWriter.Create(workbookDocument.WorkbookPart);
worksheetWriter.WriteStartElement(new Workbook());
worksheetWriter.WriteStartElement(new Sheets());
for (var i = 0; i < worksheetInfo.Count; i++)
{
worksheetWriter.WriteElement(new Sheet()
{
Name = worksheetInfo[i].Name,
SheetId = worksheetInfo[i].Key,
Id = worksheetInfo[i].Id
});
}
worksheetWriter.WriteEndElement();
worksheetWriter.WriteEndElement();
worksheetWriter.Close();
}
I use the below class to track the worksheet information:
private class WorksheetData
{
public String Id { get; set; }
public UInt32 Key { get; set; }
public String Name { get; set; }
}
Can anyone identify why this is happening?
Perhaps and extra ending tag or ones that missing?
Finally got this to work, there were a few issues.
The cell references A1 A2 A3, etc... were not correct in the code.
The row number were not beign tracked corretly.
The attributes being applied to the cell elements were not correct because they wern't being cleared prior to writing.
The usage of the CallValue was not functioning as expected. Not sure exactly why, but when the Cell Value is used it doesn't open properly in excel. Simply using the cell and setting the DataType and CellValue properties did work. Note - The underlying xml looked exactly the same between the two, but only the second worked.
The final code from this is as follows:
public static ExportInfo Export<T>(this IQueryable<T> query, String temp, String path, List<ExportField> fields)
{
var worker = new ExportWorker();
return worker.Export<T>(query, temp, path, fields);
}
public static class ExportFormatter
{
public static String Format(String type, Object value)
{
if (value == null)
{
return "";
}
else
{
var text = "";
switch (type)
{
case "Decimal":
var decimalValue = (Decimal)value;
text = decimal.Round(decimalValue, 2, MidpointRounding.AwayFromZero).ToString();
break;
case "DateTimeOffset":
var dateTimeOffset = (DateTimeOffset)value;
text = dateTimeOffset.ToUniversalTime().ToString("MM/dd/yyyy");
break;
case "DateTime":
var dateTime = (DateTime)value;
text = dateTime.ToUniversalTime().ToString("MM/dd/yyyy");
break;
default:
text = Convert.ToString(value);
break;
}
return text;
}
public class ExportWorker
{
String record;
String staging;
String destination;
Thread thread;
Timer timer;
public ExportInfo Export<T>(IQueryable<T> query, String temp, String path, List<ExportField> fields)
{
var selections = from a in fields group a by new { a.Field } into g select new { g.Key.Field, Header = g.Max(x => x.Header) };
var headers = (from a in selections select a.Header).ToList();
var columns = (from a in selections select a.Field).Distinct().ToList();
var entity = query.ElementType.ToString();
var array = entity.Split((".").ToCharArray());
var sheet = array[array.Length - 1];
var key = Guid.NewGuid().ToString().Replace("-", "_");
var name = key + ".xlsx";
var log = key + ".txt";
var timeout = 60 * 60000;
staging = temp + name;
destination = path + name;
record = path + log;
thread = new Thread(
new ThreadStart(() =>
{
try
{
using (var workbookDocument = SpreadsheetDocument.Create(staging, DocumentFormat.OpenXml.SpreadsheetDocumentType.Workbook))
{
var count = query.Count();
var worksheetNumber = 1;
var worksheetCapacity = Convert.ToInt32(100000);
var worksheetCount = Convert.ToInt32(Math.Ceiling(Convert.ToDouble(count) / worksheetCapacity));
var workbookPart = workbookDocument.AddWorkbookPart();
var worksheetInfo = new List<WorksheetData>();
OpenXmlWriter worksheetWriter;
while (worksheetNumber <= worksheetCount)
{
var worksheetLine = 1;
var worksheetThrottle = 0;
var worksheetName = sheet + "_" + Convert.ToString(worksheetNumber);
var worksheetPart = workbookDocument.WorkbookPart.AddNewPart<WorksheetPart>();
var worksheetId = workbookDocument.WorkbookPart.GetIdOfPart(worksheetPart);
var worksheetKey = Convert.ToUInt32(worksheetNumber);
var worksheetAttributes = new List<OpenXmlAttribute>();
worksheetAttributes.Add(new OpenXmlAttribute("r", null, worksheetLine.ToString()));
worksheetInfo.Add(new WorksheetData() { Id = worksheetId, Key = worksheetKey, Name = worksheetName });
worksheetWriter = OpenXmlWriter.Create(worksheetPart);
worksheetWriter.WriteStartElement(new Worksheet());
worksheetWriter.WriteStartElement(new SheetData());
worksheetWriter.WriteStartElement(new Row(), worksheetAttributes);
for (var i = 0; i < headers.Count; i++)
{
var worksheetCell = new Cell();
worksheetCell.DataType = CellValues.String;
worksheetCell.CellValue = new CellValue(headers[i]);
worksheetWriter.WriteElement(worksheetCell);
worksheetAttributes.Clear();
}
worksheetWriter.WriteEndElement();
worksheetLine++;
var skip = ((worksheetNumber - 1) * worksheetCapacity);
var results = query.SelectProperties(columns).Skip(skip).Take(worksheetCapacity).ToList();
for (var j = 0; j < results.Count; j++)
{
if (worksheetThrottle >= 5) { worksheetThrottle = 0; System.Threading.Thread.Sleep(1); }
worksheetAttributes.Clear();
worksheetAttributes.Add(new OpenXmlAttribute("r", null, worksheetLine.ToString()));
worksheetWriter.WriteStartElement(new Row(), worksheetAttributes);
for (var k = 0; k < columns.Count(); k++)
{
var column = columns[k].Split((".").ToCharArray()).Last();
var value = results[j].GetType().GetField(column).GetValue(results[j]);
var type = value?.GetType().Name;
var text = (String)ExportFormatter.Format(type, value);
var worksheetCell = new Cell();
worksheetCell.DataType = CellValues.String;
worksheetCell.CellValue = new CellValue(text);
worksheetWriter.WriteElement(worksheetCell);
worksheetAttributes.Clear();
}
worksheetWriter.WriteEndElement();
worksheetLine++;
worksheetThrottle++;
}
worksheetWriter.WriteEndElement();
worksheetWriter.WriteEndElement();
worksheetWriter.Close();
worksheetNumber++;
}
worksheetWriter = OpenXmlWriter.Create(workbookDocument.WorkbookPart);
worksheetWriter.WriteStartElement(new Workbook());
worksheetWriter.WriteStartElement(new Sheets());
for (var i = 0; i < worksheetInfo.Count; i++)
{
worksheetWriter.WriteElement(new Sheet()
{
Name = worksheetInfo[i].Name,
SheetId = worksheetInfo[i].Key,
Id = worksheetInfo[i].Id
});
}
worksheetWriter.WriteEndElement();
worksheetWriter.WriteEndElement();
worksheetWriter.Close();
}
var logsfile = File.CreateText(record);
var datafile = (new DirectoryInfo(temp)).GetFiles().FirstOrDefault(a => a.Name == name);
datafile.MoveTo(destination);
logsfile.WriteLine("succeeded");
logsfile.Close();
}
catch (Exception ex)
{
try { File.Delete(staging); } catch (Exception) { }
var logsfile = File.CreateText(record);
logsfile.WriteLine("failed");
logsfile.WriteLine(ex.ToString());
logsfile.Close();
}
})
);
thread.Start();
timer = new Timer(Expire, null, timeout, Timeout.Infinite);
return new ExportInfo() { File = destination, Log = record };
}
void Expire(object state)
{
try { File.Delete(staging); } catch (Exception) { }
var logsfile = File.CreateText(record);
logsfile.WriteLine("timeout");
logsfile.Close();
thread.Abort();
}
private class WorksheetData
{
public String Id { get; set; }
public UInt32 Key { get; set; }
public String Name { get; set; }
}
}
After making those adjustments, the export works beautifully.
Also, open xml solved a lot of problems that I was having with memory management.
Using the above approach allowed me to export 3 files, each with 1.5 million rows (40 columns) in about 10 minutes.
During the export process, CPU utilization never exceeded 35% and it never used more than 1/10 of a gig of memory. Bravo...

opcua session was closed by client

I have written the attached OpcUaConnector class for opc-ua connection related activities.
But it is not handling session. For example:
In opc ua configuration disabled the endpoint
In kepserver configuration did runtime > reinitializing
The windows service is throwing:
Source : system.Reactive.Core
InnerException : The session was closed by client
and stopping the windows service, as this error goes unhandled.
Can some one suggest how to handle session in opc-ua?
public class OpcUaConnector
{
private static SimplerAES simplerAES = new SimplerAES();
private DataContainer dataCointainer = null;
private UaTcpSessionChannel channel;
private string opcServerName = string.Empty;
private string opcUserId = string.Empty;
private string opcPassword = string.Empty;
private static ILog LogOpcStore;
private static System.IDisposable token;
private static uint id;
public OpcConnector(ILog Log)
{
IntializeLogOpcStore(Log);
}
private static void IntializeLogOpcStore(ILog Log)
{
LogOpcStore = Log;
}
public async Task OpenOpcConnection()
{
try
{
if ((!string.IsNullOrEmpty(this.opcServerName) & (this.opcServerName != AppMain.MyAppSettings.OpcServer)) ||
(!string.IsNullOrEmpty(this.opcUserId) & (this.opcUserId != AppMain.MyAppSettings.OpcUserId)) ||
(!string.IsNullOrEmpty(this.opcPassword) & (this.opcPassword != AppMain.MyAppSettings.OpcPassword)))
{
await channel.CloseAsync();
this.opcServerName = AppMain.MyAppSettings.OpcServer;
this.opcUserId = AppMain.MyAppSettings.OpcUserId;
this.opcPassword = AppMain.MyAppSettings.OpcPassword;
}
if (channel==null || (channel != null && (channel.State == CommunicationState.Closed || channel.State == CommunicationState.Faulted)))
{
var appDescription = new ApplicationDescription()
{
ApplicationName = "MyAppName",
ApplicationUri = $"urn:{System.Net.Dns.GetHostName()}:MyAppName",
ApplicationType = ApplicationType.Client,
};
//application data won't be deleted when uninstall
var certificateStore = new DirectoryStore(
Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData), MyAppName", "pki"),
true, true
);
//if the Ethernet cable unplugs or the Wifi drops out,
//you have some timeouts that can keep the session open for a while.
//There is a SessionTimeout (default of 2 min).
this.channel = new UaTcpSessionChannel(
appDescription,
certificateStore,
SignInOpc,
AppMain.MyAppSettings.OpcServer,
null,
options: new UaTcpSessionChannelOptions { SessionTimeout = 120000 });
await channel.OpenAsync();
//LogOpcStore.Info(String.Format("Opc connection sucessful"));
}
this.opcServerName = AppMain.MyAppSettings.OpcServer;
this.opcUserId = AppMain.MyAppSettings.OpcUserId;
this.opcPassword = AppMain.MyAppSettings.OpcPassword;
}
catch (Exception ex)
{
ServiceException serviceException = new ServiceException(ex.HResult + " " + ex.Message, "C052");
throw serviceException;
}
}
private static async Task RecursivelyFindNode(UaTcpSessionChannel channel, NodeId nodeid)
{
BrowseRequest browseRequest = new BrowseRequest
{
NodesToBrowse = new BrowseDescription[] { new BrowseDescription { NodeId = nodeid, BrowseDirection = BrowseDirection.Forward, ReferenceTypeId = NodeId.Parse(ReferenceTypeIds.HierarchicalReferences), NodeClassMask = (uint)NodeClass.Variable | (uint)NodeClass.Object, IncludeSubtypes = true, ResultMask = (uint)BrowseResultMask.All } },
};
BrowseResponse browseResponse = await channel.BrowseAsync(browseRequest);
foreach (var rd1 in browseResponse.Results[0].References ?? new ReferenceDescription[0])
{
uint chid = AppMain.MyTagDatabase.GetClientHandleByTag(rd1.DisplayName.ToString());
if (chid > 0)
{
AppMain.MyTagDatabase.UpdateNodeByClientHandle(chid, rd1.NodeId.ToString());
}
await RecursivelyFindNode(channel, ExpandedNodeId.ToNodeId(rd1.NodeId, channel.NamespaceUris));
}
}
public async Task CreateSubscription(DataContainer dc)
{
double curReadingValue;
try
{
dataCointainer = dc;
await RecursivelyFindNode(channel, NodeId.Parse(ObjectIds.RootFolder));
if (AppMain.MyTagDatabase.GetCntTagsNotInOpcServer() == AppMain.MyTagDatabase.GetTagCount())
{
//no need to create subscription
return;
}
//subscription timeout that is the product of PublishingInterval * LifetimeCount:
var subscriptionRequest = new CreateSubscriptionRequest
{
RequestedPublishingInterval = 1000f,
RequestedMaxKeepAliveCount = 30,
RequestedLifetimeCount = 30 * 3,
PublishingEnabled = true,
};
var subscriptionResponse = await channel.CreateSubscriptionAsync(subscriptionRequest);
id = subscriptionResponse.SubscriptionId;
var itemsToCreate = new MonitoredItemCreateRequest[AppMain.MyTagDatabase.GetTagHavingNodeCount()];
int i = 0;
foreach (var item in AppMain.MyTagDatabase.GetMyTagDatabase())
{
var itemKey = item.Key;
var itemValue = item.Value;
itemsToCreate[i] = new MonitoredItemCreateRequest { ItemToMonitor = new ReadValueId { NodeId = NodeId.Parse(itemValue.NodeId), AttributeId = AttributeIds.Value }, MonitoringMode = MonitoringMode.Reporting, RequestedParameters = new MonitoringParameters { ClientHandle = itemKey, SamplingInterval = -1, QueueSize = 0, DiscardOldest = true } };
i++;
}
var itemsRequest = new CreateMonitoredItemsRequest
{
SubscriptionId = id,
ItemsToCreate = itemsToCreate,
};
var itemsResponse = await channel.CreateMonitoredItemsAsync(itemsRequest);
token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(pr =>
{
// loop thru all the data change notifications
// receiving data change notifications here
var dcns = pr.NotificationMessage.NotificationData.OfType<DataChangeNotification>();
foreach (var dcn in dcns)
{
foreach (var min in dcn.MonitoredItems)
{
MyTag MyTag = new MyTag();
bool hasValue = AppMain.MyTagDatabase.GetMyTag(min.ClientHandle, out MyTag);
if (hasValue)
{
if (double.TryParse(min.Value.Value.ToString(), out curReadingValue))
{
//LogOpcStore.Info(String.Format("ClientHandle : {0} TagName : {1} SourceTimestamp : {2} ServerTimeStamp : {3} curReadingValue : {4}", min.ClientHandle, MyTag.TagName, min.Value.SourceTimestamp, min.Value.ServerTimestamp, curReadingValue));
AddDataPointToContainer(1, MyTag.TagName, min.Value.SourceTimestamp, curReadingValue);
}
}
}
}
});
}
catch (Exception ex)
{
//If the interruption lasts longer than these timeouts then the SessionChannel and Subscriptions will need to be recreated.
channel = null;
FatalServiceException fatalserviceException = new FatalServiceException(ex.Message, "C052");
throw fatalserviceException;
}
}
public async Task DeleteSubscription()
{
try
{
var request = new DeleteSubscriptionsRequest
{
SubscriptionIds = new uint[] { id }
};
await channel.DeleteSubscriptionsAsync(request);
token.Dispose();
}
catch (Exception ex)
{
ServiceException serviceException = new ServiceException(ex.Message, "C052");
throw serviceException;
}
}
private static async Task<IUserIdentity> SignInOpc(EndpointDescription endpoint)
{
IUserIdentity userIdentity = null;
if (endpoint.UserIdentityTokens.Any(p => p.TokenType == UserTokenType.Anonymous))
{
userIdentity = new AnonymousIdentity();
}
else if (endpoint.UserIdentityTokens.Any(p => p.TokenType == UserTokenType.UserName))
{
var userName = AppMain.MyAppSettings.OpcUserId;
var password = simplerAES.Decrypt(AppMain.MyAppSettings.OpcPassword);
userIdentity = new UserNameIdentity(userName, password);
}
return userIdentity;
}
private void AddDataPointToContainer(int dataType, string source, DateTime SourceTimestampUTC, double value)
{
ConditionValue conditionValue = new ConditionValue();
long timestamp = AppMain.ServerSyncTimeStore.ConvertDateTimeToTimeStampUTC(SourceTimestampUTC);
conditionValue.dataType = dataType;
conditionValue.source = source;
conditionValue.timestamp = timestamp;
conditionValue.SourceTimestampUTC = SourceTimestampUTC;
conditionValue.LocalTime = SourceTimestampUTC.ToLocalTime();
conditionValue.value = value;
//LogOpcStore.Info(String.Format("TagName : {0} SourceTimestampUTC : {1} timestamp : {2} LocalTime : {3} curReadingValue : {4}", source, SourceTimestampUTC, timestamp, SourceTimestampUTC.ToLocalTime(), value));
dataCointainer.AddDataPoint(conditionValue);
}
}
I see you are using the project https://github.com/convertersystems/opc-ua-client.
When a server closes the session and socket (as happens when you reinitialize Kepware) the client receives immediate notification that causes the client channel to fault. A faulted channel cannot be reopened, it should be aborted and a new channel should be created.
I made this standalone test, to show that you may have to catch an exception and recreate the channel and subscription. The point of this test is to subscribe to the CurrentTime node and collect 60 datachanges. The test should last a minute. If you re-init the Kepware server in the middle of the test, the code catches the exception and recreates the channel and subscription.
[TestMethod]
public async Task OpcConnectorTest()
{
var count = 0;
UaTcpSessionChannel channel = null;
while (count < 60)
{
try
{
channel = new UaTcpSessionChannel(
this.localDescription,
this.certificateStore,
new AnonymousIdentity(),
EndpointUrl,
SecurityPolicyUris.None,
loggerFactory: this.loggerFactory);
await channel.OpenAsync();
// create the keep alive subscription.
var subscriptionRequest = new CreateSubscriptionRequest
{
RequestedPublishingInterval = 1000f,
RequestedMaxKeepAliveCount = 30,
RequestedLifetimeCount = 30 * 3,
PublishingEnabled = true,
};
var subscriptionResponse = await channel.CreateSubscriptionAsync(subscriptionRequest).ConfigureAwait(false);
var id = subscriptionResponse.SubscriptionId;
var token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(pr =>
{
// loop thru all the data change notifications
var dcns = pr.NotificationMessage.NotificationData.OfType<DataChangeNotification>();
foreach (var dcn in dcns)
{
foreach (var min in dcn.MonitoredItems)
{
Console.WriteLine($"sub: {pr.SubscriptionId}; handle: {min.ClientHandle}; value: {min.Value}");
count++;
}
}
});
var itemsRequest = new CreateMonitoredItemsRequest
{
SubscriptionId = id,
ItemsToCreate = new MonitoredItemCreateRequest[]
{
new MonitoredItemCreateRequest { ItemToMonitor = new ReadValueId { NodeId = NodeId.Parse("i=2258"), AttributeId = AttributeIds.Value }, MonitoringMode = MonitoringMode.Reporting, RequestedParameters = new MonitoringParameters { ClientHandle = 12345, SamplingInterval = -1, QueueSize = 0, DiscardOldest = true } }
},
};
var itemsResponse = await channel.CreateMonitoredItemsAsync(itemsRequest);
while (channel.State == CommunicationState.Opened && count < 60)
{
await Task.Delay(1000);
}
}
catch (Exception ex)
{
Console.WriteLine($"Exception: {ex.GetType()}. {ex.Message}");
}
}
if (channel != null)
{
Console.WriteLine($"Closing session '{channel.SessionId}'.");
await channel.CloseAsync();
}
}
I know this is an old post, but I stumbled upon this problem as well. For those interested:
The problem is related to the subscription(s).
When the following code is run:
token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(pr =>
{
// loop thru all the data change notifications
// receiving data change notifications here
var dcns = pr.NotificationMessage.NotificationData.OfType<DataChangeNotification>();
foreach (var dcn in dcns)
{
foreach (var min in dcn.MonitoredItems)
{
MyTag MyTag = new MyTag();
bool hasValue = AppMain.MyTagDatabase.GetMyTag(min.ClientHandle, out MyTag);
if (hasValue)
{
if (double.TryParse(min.Value.Value.ToString(), out curReadingValue))
{
//LogOpcStore.Info(String.Format("ClientHandle : {0} TagName : {1} SourceTimestamp : {2} ServerTimeStamp : {3} curReadingValue : {4}", min.ClientHandle, MyTag.TagName, min.Value.SourceTimestamp, min.Value.ServerTimestamp, curReadingValue));
AddDataPointToContainer(1, MyTag.TagName, min.Value.SourceTimestamp, curReadingValue);
}
}
}
}
});
Observable.subscribe() takes multiple arguments. You should include what to do in case of an error. For example:
token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(
pr => { code to run normally... },
ex => { Log.Info(ex.Message); },
() => { }
);
See http://reactivex.io/documentation/operators/subscribe.html for more information.

inserting into SQL via sqlbulk

hallo i have this sniped code like this:
public static void Put_CSVtoSQL_Adhesion()
{
bool IsFirst = true;
DataTable dt = new DataTable();
string line = null;
int i = 0;
try
{
string fileName = Path.Combine(HttpContext.Current.Server.MapPath(UploadDirectory), TheFileName);
using (StreamReader sr = File.OpenText(fileName))
{
while ((line = sr.ReadLine()) != null)
{
string[] data = line.Split(';');
if (data.Length > 0)
{
if (i == 0)
{
foreach (var item in data)
{
dt.Columns.Add(new DataColumn());
}
i++;
}
DataRow row = dt.NewRow();
row.ItemArray = data;
// Pour enlever la tete
if (!IsFirst) dt.Rows.Add(row);
IsFirst = false;
}
}
}
using (var connectionWrapper = new Connexion())
{
var connectedConnection = connectionWrapper.GetConnected();
using (SqlBulkCopy copy = new SqlBulkCopy(connectionWrapper.conn))
{
int CountColl = dt.Columns.Count;
copy.ColumnMappings.Add(0, 1);
copy.ColumnMappings.Add(1, 2);
copy.ColumnMappings.Add(2, 3);
copy.ColumnMappings.Add(3, 4);
copy.ColumnMappings.Add(4, 5);
copy.DestinationTableName = "cotisation";
copy.WriteToServer(dt);
}
}
}
catch (Exception excThrown)
{
throw new Exception(excThrown.Message);
}
}
this code work well, but now i have more than 60 column, should i type manualy from 1 to 60 column or there are another methode ?
copy.ColumnMappings.Add(0, 1);
copy.ColumnMappings.Add(1, 2);
copy.ColumnMappings.Add(2, 3);
copy.ColumnMappings.Add(3, 4);
copy.ColumnMappings.Add(4, 5);
...until 60 column ?
the column is all the same i just shifted 1 column, because the first one is autoincremented column as an ID
Write a loop?
for (int i = 0; i < dt.Columns.Count - 1; i++)
{
copy.ColumnMappings.Add(i, i + 1);
}

Change label's text with using property

I am working on my C# ADO.NET app. I have connected my SQL Server database with C# app, and I can perform simple CRUD operations. I want to make that my app open my reminder form when someone in my database have birthday, so I made my query and all persons who have birthday on today's day are in my query, and with using property from my reminder form I change label's text with name and surname of person who have birthday. Now I just dont know how to change next label's text when more than one person have birthday in my query... I dont know how to get next element in my foreach loop...
Here is my code:
Form2 forma = new Form2();
TBirthDayEntities today_born = new TBirthDayEntities();
public Form1()
{
InitializeComponent();
timer1.Tick += new EventHandler(timer1_Tick);
timer1.Interval = 1000;
timer1.Enabled = true;
timer1.Start();
}
private Boolean provjera_rodj()
{
Boolean flag = false;
int cnt = 0;
IQueryable<TBD> query;
using (var data = new TBirthDayEntities())
{
query = (from x in data.TBD
where x.BirthDay.Day == System.DateTime.Now.Day && x.BirthDay.Month == System.DateTime.Now.Month
select x);
foreach (var x in query)
{
today_born.TBD.Add(x);
cnt += 1;
flag = true;
}
}
switch (cnt)
{
case 1:
{
foreach (var x in today_born.TBD)
{
forma.p_label2 = x.FName + " " + x.LName;
}
break;
}
case 2:
{
foreach (var x in today_born.TBD)
{
forma.p_label2 = x.FName + x.LName;
forma.p_label3 = x.FName + x.LName; //wrong
}
break;
}
}
return flag;
}
private void timer1_Tick(object sender, EventArgs e)
{
Boolean flag = provjera_rodj();
if (flag == true)
{
forma.Show();
timer1.Stop();
}
}
switch (cnt)
{
case 1:
case 2:
{
var lstLabel = new List<Label>()
{
forma.p_label2
, forma.p_label3
};
for(int i = 0; i < today_born.TBD.Count; i++)
{
var x in today_born.TBD[i];
lstLabel[x].Text = x.FName + x.LName;
}
break;
}
}
EDIT:
switch (cnt)
{
case 1:
case 2:
{
var lstLabel = new List<Action<string>>()
{
new Action<string>(s =>forma.p_label2 = s)
, new Action<string>(s =>forma.p_label3 = s)
};
for(int i = 0; i < today_born.TBD.Count; i++)
{
var x = today_born.TBD[i];
lstLabel[x](x.FName + x.LName);
}
break;
}
}

Stochastic universal sampling

I need a sus implementation in c# for finding candidate individuals in a population this is what i have so far but im not sure if it is correct.
public void sus(IEnumerable<TimeTable>population)
{
var ag = population.Sum(i => normalize((double) i.Fitness, true));
var mark = rnMutate.NextDouble();
var index = 0;
foreach (var candidate in population)
{
var cu = population.Sum(i => normalize((double)i.Fitness, false)) / ag * 5;
while (cu > mark + index)
{
Survivors.Add(candidate);
index++;
}
}
}
public double normalize(double fitness, bool natural)
{
if (natural)
return fitness;
return fitness == (double)FitnessLBound ? double.PositiveInfinity : 1 / fitness;
}
private IEnumerable<TimeTable> StochasticSample(IEnumerable<TimeTable> population, int size)
{
var t = population.Sum(it => it.Fitness);
var temp = new List<TimeTable>();
var ptr = rnMutate.NextDouble();
var sum = 0M;
for (int i = 0; i < size; i++)
{
for (sum += ExpValue(i, t); sum > (decimal) ptr; ptr++)
{
temp.Add(population.ElementAt(i));
--size;
}
}
return temp;
}
private decimal ExpValue(decimal fitness, decimal sum)
{
return decimal.Divide(fitness, sum);
}