Mongodb processes only one record at a time? - mongodb

I am using Mongodb in .net core.
At the same time I have many applications that query the same table. I have the problem that 1 record is processed in many applications.
I used the version in mongo but it didn't work well.
Here is my code:
try
{
TopupRequest topupReturn = null;
do
{
var topupRequest = await _paygateMongoRepository.GetOneAsync<TopupRequest>(p =>
p.Status == TopupStatus.Init && categoryCodes.Contains(p.CategoryCode) &&
p.ServiceCode == serviceCode);
if (topupRequest == null)
break;
var version = topupRequest.Version;
var versionPlus = topupRequest.Version + 1;
Expression<Func<TopupRequest, bool>> filter = p => p.Id == topupRequest.Id && p.Version == version;
var options = new FindOneAndUpdateOptions<TopupRequest, TopupRequest>
{
IsUpsert = false,
ReturnDocument = ReturnDocument.After
};
_logger.Info("Version: " + version + " version plus: " + versionPlus);
topupReturn = await _paygateMongoRepository.GetAndUpdateOne<TopupRequest, Guid>(filter,
Builders<TopupRequest>.Update.Set(m => m.Status, TopupStatus.InProcessing)
.Set(m => m.Version, versionPlus)
.Set(m => m.WorkerApp, workerApp), options);
_logger.Info("Version: " + version + " version plus: " + versionPlus);
} while (topupReturn == null);
if (topupReturn == null) return null;
if (timeout > 0 && topupReturn.CreatedTime.AddSeconds(timeout) <= DateTime.UtcNow) return null;
_logger.Info($"GetTopupPriorityAvailableVersionAsync success: {topupReturn.ToJson()}");
return topupReturn.ConvertTo<TopupRequestDto>();
}
catch (Exception e)
{
_logger.Info($"GetTopupPriorityAvailableVersionAsync error: {e}");
return null;
}
I used a transaction solution. But it also doesn't work for me.
using (var session = MongoDbContext.Client.StartSession())
{
var topups = MongoDbContext.GetCollection<TopupRequest>();
try
{
session.StartTransaction(new TransactionOptions(
readConcern: ReadConcern.Local,
readPreference: ReadPreference.Primary,
writeConcern: WriteConcern.WMajority));
var sort = Builders<TopupRequest>.Sort.Descending("CreatedTime");
var filter = Builders<TopupRequest>.Filter.Eq(p => p.Status, TopupStatus.Init)
& Builders<TopupRequest>.Filter.In("CategoryCode", categoryCodes)
& Builders<TopupRequest>.Filter.Eq(p => p.ServiceCode, serviceCode);
var options = new FindOneAndUpdateOptions<TopupRequest, TopupRequest>
{
IsUpsert = false,
ReturnDocument = ReturnDocument.After,
Sort = sort
};
var update = Builders<TopupRequest>.Update.Set("Status", TopupStatus.InProcessing);
var result = await topups.FindOneAndUpdateAsync(session, filter, update,options);
if (result == null)
session.AbortTransaction();
session.CommitTransaction();
return result;
}
catch (Exception e)
{
session.AbortTransaction();
Console.WriteLine(e);
return null;
}
}
I still had the same problem at the same time 1 record was used many times.
Please help me

Related

Insert/Update 50 records from JSON file to SQL Table taking too much in ASP.NET MVC EF core

I am doing insert/update records from JSON file to SQL table in the single action method. I am inserting only 50 records but it takes too much time. I am new to ASP.NET Core. Don't know whether my too much for each loop creates the issue or my database logic. Please can anybody help me to correct this issue?
I am reading json and based on custom mapping, I am doing insert or update to table column from json object values.
Please look into my below code
[HttpPost]
public IActionResult InsertProductDetails()
{
int getcountProductName = (from gcPN in _context.K360MappingMasters
where gcPN.K360Catalog == "Name" && gcPN.ApiContent == baseurl
select gcPN).Count();
if (getcountProductName == 0)
{
return Json(new { Status = "error", Message = "Required ProductName catalog not yet mapped , unable to insert productdetails" });
}
else if (getcountProductName == 1)
{
try
{
using WebClient wc = new WebClient();
string contentString = wc.DownloadString(baseurl);
List<Dictionary<string, string>> ListJsonProductContent = new List<Dictionary<string, string>>();
List<string> ListProductsCheck = new List<string>();
var token = JToken.Parse(contentString);
if (token.Type == JTokenType.Array) // "["
{
ListJsonProductContent = JsonConvert.DeserializeObject<List<Dictionary<string, string>>>(contentString);
}
else if (token.Type == JTokenType.Object) // "{"
{
var ObjectResponse = JsonConvert.DeserializeObject<Dictionary<string, object>>(contentString);
foreach (var x in ObjectResponse)
{
string key = x.Key.ToString();
string val = x.Value.ToString();
foreach (var dicItemML in JsonConvert.DeserializeObject<List<Dictionary<string, string>>>(val))
{
ListJsonProductContent.Add(dicItemML);
}
}
}
List<K360MappingMaster> ListMappedDataDb = new List<K360MappingMaster>();
var VLinqQuery = from KMM in _context.K360MappingMasters
where KMM.ApiContent != null && KMM.ApiContent == baseurl
select KMM;
ListMappedDataDb = VLinqQuery.ToList();
foreach (var dicItemML in ListJsonProductContent)
{
Dictionary<string, string> updItem = new Dictionary<string, string>();
foreach (var itemMl in dicItemML)
{
var catalogProductsCheck = _context.CatalogProducts.ToList();
var result = catalogProductsCheck.Select(s => s.Name).ToList().Contains(itemMl.Value);
if (result == true)
ListProductsCheck.Add(itemMl.Value.ToString());
if (ListMappedDataDb.Select(s => s.ApiCatalog).ToList().Contains(itemMl.Key))
{
foreach (K360MappingMaster data in ListMappedDataDb.Where(s => s.ApiCatalog == itemMl.Key))
{
if (updItem.ContainsKey(data.K360Catalog))
{
if (data.K360Catalog == "Specification")
{
updItem[data.K360Catalog] += "<p>" + itemMl.Key + " :" + itemMl.Value + "<p>";
}
else
{
updItem[data.K360Catalog] += " " + itemMl.Value;
}
}
else
{
if (data.K360Catalog == "Specification")
{
updItem.Add(data.K360Catalog, "<p>" + itemMl.Key + " :" + itemMl.Value + "<p>");
}
else
{
updItem.Add(data.K360Catalog, itemMl.Value);
}
}
}
}
dicItemML.Remove(itemMl.Key);
}
foreach (var itemM2 in updItem)
{
dicItemML.Add(itemM2.Key, itemM2.Value);
}
}
List<CatalogProduct> ListKp = new List<CatalogProduct>();
foreach (var dicItem in ListJsonProductContent)
{
CatalogProduct Ctgkp = new CatalogProduct
{
Name = dicItem.ContainsKey("Name") ? dicItem["Name"] : "No Product",
Slug = dicItem.ContainsKey("Name") ? string.Concat(dicItem["Name"].Where(c => !char.IsWhiteSpace(c))).ToLower(CultureInfo.CurrentCulture) : "No Slug",
Price = dicItem.ContainsKey("Price") ? decimal.Parse(dicItem["Price"], CultureInfo.InvariantCulture) : default,
ShortDescription = dicItem.ContainsKey("ShortDescription") ? dicItem["ShortDescription"] : null,
Description = dicItem.ContainsKey("Description") ? dicItem["Description"] : null,
Specification = dicItem.ContainsKey("Specification") ? dicItem["Specification"] : null,
RatingAverage = dicItem.ContainsKey("RatingAverage") ? double.Parse(dicItem["RatingAverage"], CultureInfo.InvariantCulture) : null,
MetaTitle = dicItem.ContainsKey("MetaTitle") ? dicItem["MetaTitle"] : null,
MetaKeywords = dicItem.ContainsKey("MetaKeywords") ? dicItem["MetaKeywords"] : null,
MetaDescription = dicItem.ContainsKey("MetaDescription") ? dicItem["MetaDescription"] : null,
Sku = dicItem.ContainsKey("Sku") ? dicItem["Sku"] : null,
Gtin = dicItem.ContainsKey("Gtin") ? dicItem["Gtin"] : null,
NormalizedName = dicItem.ContainsKey("NormalizedName") ? dicItem["NormalizedName"] : null,
StockQuantity = dicItem.ContainsKey("StockQuantity") ? int.Parse(dicItem["StockQuantity"], CultureInfo.InvariantCulture) : 50,
ReviewsCount = dicItem.ContainsKey("ReviewsCount") ? int.Parse(dicItem["ReviewsCount"], CultureInfo.InvariantCulture) : default,
DisplayOrder = dicItem.ContainsKey("DisplayOrder") ? int.Parse(dicItem["DisplayOrder"], CultureInfo.InvariantCulture) : 1,
OldPrice = dicItem.ContainsKey("OldPrice") ? decimal.Parse(dicItem["OldPrice"], CultureInfo.InvariantCulture) : null,
SpecialPrice = dicItem.ContainsKey("SpecialPrice") ? decimal.Parse(dicItem["SpecialPrice"], CultureInfo.InvariantCulture) : null,
SpecialPriceStart = dicItem.ContainsKey("SpecialPriceStart") ? DateTimeOffset.Parse(dicItem["SpecialPriceStart"], CultureInfo.InvariantCulture) : null,
SpecialPriceEnd = dicItem.ContainsKey("SpecialPriceEnd") ? DateTimeOffset.Parse(dicItem["SpecialPriceEnd"], CultureInfo.InvariantCulture) : null,
IsPublished = true,
PublishedOn = DateTimeOffset.Now,
CreatedById = 10,
IsDeleted = false,
CreatedOn = DateTimeOffset.UtcNow,
LatestUpdatedOn = DateTimeOffset.UtcNow,
LatestUpdatedById = 10,
HasOptions = false,
IsVisibleIndividually = true,
IsFeatured = true,
IsCallForPricing = false,
IsAllowToOrder = true,
StockTrackingIsEnabled = true
};
ListKp.Add(Ctgkp);
}
using (var transaction = _context.Database.BeginTransaction())
{
try
{
int numCpTotal = 0;
var catalogProducts = _context.CatalogProducts.ToList();
var config = new MapperConfiguration(cfg => cfg.CreateMap<CatalogProduct, CatalogProduct>()
.ForMember(c => c.Id, opt => opt.Ignore()));
var mapper = config.CreateMapper();
_context.ChangeTracker.AutoDetectChangesEnabled = false;
foreach (var kp in ListKp)
{
var existingRootProduct = _context.CatalogProducts.SingleOrDefault(x => x.Name == kp.Name);
if (existingRootProduct == null)
{
if (ListProductsCheck.Count > 0)
{
var firstItem = ListProductsCheck.ToList();
foreach (var item in firstItem)
{
_context.CatalogProducts.RemoveRange(_context.CatalogProducts.Where(c => c.Name == item));
_context.CoreEntities.RemoveRange(_context.CoreEntities.Where(c => c.Name == item));
_context.SaveChanges();
}
}
_context.CatalogProducts.Add(kp);
_context.SaveChanges();
CoreEntity ce = new CoreEntity
{
Name = kp.Name,
Slug = string.Concat(kp.Name.Where(c => !char.IsWhiteSpace(c))).ToLower(CultureInfo.CurrentCulture),
EntityId = kp.Id,
EntityTypeId = "Product",
};
_context.CoreEntities.Add(ce);
}
else
{
mapper.Map<CatalogProduct, CatalogProduct>(kp, existingRootProduct);
}
}
(from q in _context.K360MappingMasters
where q.ApiContent == baseurl
select q).ToList().ForEach(x => x.InsertStatusFlag = true);
_context.ChangeTracker.DetectChanges();
numCpTotal = _context.SaveChanges();
_context.ChangeTracker.AutoDetectChangesEnabled = true;
transaction.Commit();
return Json(new { Status = "success", Message = "No conflicts. " + numCpTotal + " product details saved." });
}
catch (Exception ex)
{
transaction.Rollback();
return Json(new { Status = "warning", Message = ex.Message });
throw new Exception();
}
}
}
catch (Exception ex)
{
return Json(new { Status = "warning", Message = ex.Message });
throw new Exception();
}
}
else
{
return RedirectToAction("Index");
}
}

How to do both Add and Update in Entity Framework ASP.NET Core?

I want to do both updates and add in the same action method in ASP.NET Core by Entity Framework Core, but after making an addition to a list of records in the database, I cannot update the same records to the table. It always creates a new set of records.
What is my mistake? Can anybody please help me?
[HttpPost]
public IActionResult InsertProductDetails()
{
using WebClient wc = new WebClient();
string contentString = wc.DownloadString(baseurl);
List<Dictionary<string, string>> ListJsonProductContent = new List<Dictionary<string, string>>();
var token = JToken.Parse(contentString);
if (token.Type == JTokenType.Array) // "["
{
ListJsonProductContent = JsonConvert.DeserializeObject<List<Dictionary<string, string>>>(contentString);
}
else if (token.Type == JTokenType.Object) // "{"
{
var ObjectResponse = JsonConvert.DeserializeObject<Dictionary<string, object>>(contentString);
foreach (var x in ObjectResponse)
{
string key = x.Key.ToString();
string val = x.Value.ToString();
foreach (var dicItemML in JsonConvert.DeserializeObject<List<Dictionary<string, string>>>(val))
{
ListJsonProductContent.Add(dicItemML);
}
}
}
List<K360MappingMaster> ListMappedDataDb = new List<K360MappingMaster>();
var VLinqQuery = from KMM in _context.K360MappingMasters
where KMM.ApiUrl != null && KMM.ApiUrl == baseurl
select KMM;
ListMappedDataDb = VLinqQuery.ToList();
foreach (var dicItemML in ListJsonProductContent)
{
Dictionary<string, string> updItem = new Dictionary<string, string>();
foreach (var itemMl in dicItemML)
{
if (ListMappedDataDb.Select(s => s.ApiCatalog).ToList().Contains(itemMl.Key))
{
if (updItem.ContainsKey(ListMappedDataDb.Where(s => s.ApiCatalog == itemMl.Key).Select(s => s.K360Catalog).FirstOrDefault()))
{
if (ListMappedDataDb.Where(s => s.ApiCatalog == itemMl.Key).Select(s => s.K360Catalog).FirstOrDefault() == "Specification")
{
updItem[ListMappedDataDb.Where(s => s.ApiCatalog == itemMl.Key).Select(s => s.K360Catalog).FirstOrDefault()] += "<p>" + itemMl.Key + " :" + itemMl.Value + "<p>";
}
else
{
updItem[ListMappedDataDb.Where(s => s.ApiCatalog == itemMl.Key).Select(s => s.K360Catalog).FirstOrDefault()] += " " + itemMl.Value;
}
}
else
{
if (ListMappedDataDb.Where(s => s.ApiCatalog == itemMl.Key).Select(s => s.K360Catalog).FirstOrDefault() == "Specification")
{
updItem.Add(ListMappedDataDb.Where(s => s.ApiCatalog == itemMl.Key).Select(s => s.K360Catalog).FirstOrDefault(), "<p>" + itemMl.Key + " :" + itemMl.Value + "<p>");
}
else
{
updItem.Add(ListMappedDataDb.Where(s => s.ApiCatalog == itemMl.Key).Select(s => s.K360Catalog).FirstOrDefault(), itemMl.Value);
}
}
}
dicItemML.Remove(itemMl.Key);
}
foreach (var itemM2 in updItem)
{
dicItemML.Add(itemM2.Key, itemM2.Value);
}
}
List<CatalogProduct> ListKp = new List<CatalogProduct>();
foreach (var dicItem in ListJsonProductContent)
{
try
{
CatalogProduct Ctgkp = new CatalogProduct
{
Name = dicItem.ContainsKey("Name") ? dicItem["Name"] : "No Product",
Slug = dicItem.ContainsKey("Name") ? string.Concat(dicItem["Name"].Where(c => !char.IsWhiteSpace(c))).ToLower() : "No Slug",
Price = dicItem.ContainsKey("Price") ? decimal.Parse(dicItem["Price"], CultureInfo.InvariantCulture) : default,
ShortDescription = dicItem.ContainsKey("ShortDescription") ? dicItem["ShortDescription"] : null,
Description = dicItem.ContainsKey("Description") ? dicItem["Description"] : null,
Specification = dicItem.ContainsKey("Specification") ? dicItem["Specification"] : null,
RatingAverage = dicItem.ContainsKey("RatingAverage") ? double.Parse(dicItem["RatingAverage"], CultureInfo.InvariantCulture) : null,
MetaTitle = dicItem.ContainsKey("MetaTitle") ? dicItem["MetaTitle"] : null,
MetaKeywords = dicItem.ContainsKey("MetaKeywords") ? dicItem["MetaKeywords"] : null,
MetaDescription = dicItem.ContainsKey("MetaDescription") ? dicItem["MetaDescription"] : null,
Sku = dicItem.ContainsKey("Sku") ? dicItem["Sku"] : null,
Gtin = dicItem.ContainsKey("Gtin") ? dicItem["Gtin"] : null,
NormalizedName = dicItem.ContainsKey("NormalizedName") ? dicItem["NormalizedName"] : null,
StockQuantity = dicItem.ContainsKey("StockQuantity") ? int.Parse(dicItem["StockQuantity"], CultureInfo.InvariantCulture) : 50,
ReviewsCount = dicItem.ContainsKey("ReviewsCount") ? int.Parse(dicItem["ReviewsCount"], CultureInfo.InvariantCulture) : default,
DisplayOrder = dicItem.ContainsKey("DisplayOrder") ? int.Parse(dicItem["DisplayOrder"], CultureInfo.InvariantCulture) : 1,
OldPrice = dicItem.ContainsKey("OldPrice") ? decimal.Parse(dicItem["OldPrice"], CultureInfo.InvariantCulture) : null,
SpecialPrice = dicItem.ContainsKey("SpecialPrice") ? decimal.Parse(dicItem["SpecialPrice"], CultureInfo.InvariantCulture) : null,
SpecialPriceStart = dicItem.ContainsKey("SpecialPriceStart") ? DateTimeOffset.Parse(dicItem["SpecialPriceStart"], CultureInfo.InvariantCulture) : null,
SpecialPriceEnd = dicItem.ContainsKey("SpecialPriceEnd") ? DateTimeOffset.Parse(dicItem["SpecialPriceEnd"], CultureInfo.InvariantCulture) : null,
IsPublished = true,
PublishedOn = DateTimeOffset.Now,
CreatedById = 10,
IsDeleted = false,
CreatedOn = DateTimeOffset.UtcNow,
LatestUpdatedOn = DateTimeOffset.UtcNow,
LatestUpdatedById = 10,
HasOptions = false,
IsVisibleIndividually = true,
IsFeatured = true,
IsCallForPricing = false,
IsAllowToOrder = true,
StockTrackingIsEnabled = true
};
ListKp.Add(Ctgkp);
}
catch (Exception ex)
{
TempData["Exceptionmsg"] = ex;
return RedirectToAction("Index");
throw;
}
}
int numP = 0;
try
{
using (var transaction = _context.Database.BeginTransaction())
{
int getcount = (from gcM in _context.CatalogProducts
select gcM).Count();
if (getcount == 0)
{
_context.CatalogProducts.AddRange(ListKp);
_context.SaveChanges();
}
else
{
var catalogProducts = (from gcM in _context.CatalogProducts select gcM).ToList();
for (int i = 0; i < catalogProducts.Count; i++)
{
catalogProducts[i].Name = ListKp[i].Name;
catalogProducts[i].Price = ListKp[i].Price;
catalogProducts[i].Description = ListKp[i].Description;
catalogProducts[i].Specification = ListKp[i].Specification;
//...
}
_context.CatalogProducts.UpdateRange(catalogProducts);
_context.SaveChanges();
}
transaction.Commit();
}
if (numP > 0)
{
TempData["Sucessmsg"] = "No conflicts. " + numP + " product details saved.";
(from p in _context.K360MappingMasters
where p.ApiUrl == baseurl
select p).ToList()
.ForEach(x => x.InsertStatusFlag = true);
_context.SaveChanges();
return RedirectToAction("Index");
}
}
catch (Exception ex)
{
TempData["Exceptionmsg"] = ex;
return RedirectToAction("Index"); ;
throw;
}
return RedirectToAction("Index");
}
I have accepted your answer. Thanks. But I have few more clarifications need for the same question
1.CatalogProduct table has Auto increment primary key Id field only. I am inserting data from JSON URL. The unique field should be the product name. How to check whether the product name already exists in the table before adding/updating records.
2.If I need to add another JSON URL data, how do I differentiate the two URL product details?
There is no entities in db being tracked when doing Update, so it will directly add them.
You can try the below codes.
else
{
var catalogProducts= (from gcM in _context.CatalogProducts select gcM).ToList();
for (int i = 0; i < catalogProducts.Count; i++)
{
catalogProducts[i].Name = ListKp[i].Name;
catalogProducts[i].Price = ListKp[i].Price;
//...
}
_context.CatalogProducts.UpdateRange(catalogProducts);
numP = _context.SaveChanges();
}
Update:
using (var transaction = _context.Database.BeginTransaction())
{
var catalogProducts = _context.CatalogProducts.ToList();
foreach(var kp in ListKp)
{
if(!catalogProducts.Any(x => x.Name == kp.Name)){
_context.CatalogProducts.Add(kp);
_context.SaveChanges();
}
else
{
//Use AutoMapper automatically do the mapping
var config = new MapperConfiguration(cfg => cfg.CreateMap<CatalogProduct, CatalogProduct>().ForMember(c => c.Id, opt => opt.Ignore()));
var oldone = catalogProducts.FirstOrDefault(c => c.Name == kp.Name);
var mapper = config.CreateMapper();
oldone = mapper.Map<CatalogProduct,CatalogProduct>(kp, oldone);
_context.CatalogProducts.Update(oldone);
_context.SaveChanges();
}
}
transaction.Commit();
}

Dynamic Search Using Entity Framework

I have a search screen with optional fields using Entity Framework, I want to build a dynamic query without selecting the object and filter on it.
I want to Optimize the following Existing Search, I don't want to select "var query = from p in context.APLCN_TRCKR select p;" at this stage because the application will be used by more than 100 people at once:
using (var context = new VASTEntities())
{
var query = from p in context.APLCN_TRCKR select p;
if (!string.IsNullOrEmpty(searchObj.CUST_NAME_X))
query = query.Where(p => p.CUST_NAME_X == searchObj.CUST_NAME_X.Trim());
if (!string.IsNullOrEmpty(searchObj.SURNM_X))
query = query.Where(p => p.CUST_SURNM_X == searchObj.SURNM_X.Trim());
if (!string.IsNullOrEmpty(searchObj.QUEUE_ID))
query = query.Where(p => p.QUEUE_ID == searchObj.QUEUE_ID.Trim());
if (!string.IsNullOrEmpty(searchObj.BP_ID))
query = query.Where(p => p.BPID == searchObj.BP_ID.Trim());
if (!string.IsNullOrEmpty(searchObj.UserID))
query = query.Where(p => p.CURR_OWNR_USER_ID == searchObj.UserID.Trim());
if (!string.IsNullOrEmpty(searchObj.APLCN_TRCKR_ID))
query = query.Where(p => p.APLCN_TRCKR_ID == searchObj.APLCN_TRCKR_ID.Trim());
if (!string.IsNullOrEmpty(searchObj.APLCN_STTS_ID))
query = query.Where(p => p.APLCN_STTS_ID == searchObj.APLCN_STTS_ID.Trim());
if (!string.IsNullOrEmpty(searchObj.CUST_ID))
query = query.Where(p => p.CUST_ID == searchObj.CUST_ID.Trim());
if (!string.IsNullOrEmpty(searchObj.CELL_ID))
query = query.Where(p => p.CELL_ID == searchObj.CELL_ID.Trim());
if (!string.IsNullOrEmpty(searchObj.ORIGN_ID))
query = query.Where(p => p.ORIGN_ID == searchObj.ORIGN_ID.Trim());
if (!string.IsNullOrEmpty(searchObj.ORGTN_CHANL_ID))
query = query.Where(p => p.ORGTN_CHANL_ID == searchObj.ORGTN_CHANL_ID.Trim());
if (!string.IsNullOrEmpty(searchObj.CR_DCSN_ID))
query = query.Where(p => p.CR_DCSN_ID == searchObj.CR_DCSN_ID.Trim());
if (!string.IsNullOrEmpty(searchObj.SBSA_CUST_I))
query = query.Where(p => p.SBSA_CUST_I == searchObj.SBSA_CUST_I.Trim());
if (!string.IsNullOrEmpty(searchObj.USER_ID_APP_CRTD))
query = query.Where(p => p.USER_ID_APP_CRTD == searchObj.USER_ID_APP_CRTD.Trim());
if (!string.IsNullOrEmpty(searchObj.RGION_ID))
{
int r = int.Parse(searchObj.RGION_ID.Trim());
query = query.Where(p => p.RGION_ID == r);
}
if (!string.IsNullOrEmpty(searchObj.CR_REGION))
{
int x = int.Parse(searchObj.CR_REGION);
if (x == 0)
{
// check 0 - not applicable or null
query = query.Where(p => p.CR_REGION_ID == 0 || p.CR_REGION_ID == null);
}
else
{
query = query.Where(p => p.CR_REGION_ID == x);
}
}
if (!string.IsNullOrEmpty(searchObj.Process_Type))
query = query.Where(p => p.PRCES_TYPE_ID == searchObj.Process_Type.Trim());
query.ToList();
foreach (var a in query)
{
searchAppsObj.Add(Translator.TranslateReqObjToBO.TranslateDTOToSearchApp(a));
}
if (query.Count() == 0)
{
throw new Exception("No Applications Found.");
}
context.Connection.Close();
return searchAppsObj;
}
I want to do something like this but this one is not working properly:
string cust_name_x = "", surname_x = "", queue_id = "", bp_id = "", user_id = "", aplcn_trckr_id = "",
aplcn_stts_id = "", cust_id = "", process_type = "", cr_region = "", cell_id = "", Origin = "", region = "", channel = "", credit_verdict = "", sbsa_cust_id = "", app_creator_id = "";
if (!string.IsNullOrEmpty(searchObj.CUST_NAME_X))
cust_name_x = searchObj.CUST_NAME_X.Trim();
if (!string.IsNullOrEmpty(searchObj.SURNM_X))
surname_x = searchObj.SURNM_X.Trim();
if (!string.IsNullOrEmpty(searchObj.QUEUE_ID))
queue_id = searchObj.QUEUE_ID.Trim();
if (!string.IsNullOrEmpty(searchObj.BP_ID))
bp_id = searchObj.BP_ID;
if (!string.IsNullOrEmpty(searchObj.UserID))
user_id = searchObj.UserID.Trim();
if (!string.IsNullOrEmpty(searchObj.APLCN_TRCKR_ID))
aplcn_trckr_id = searchObj.APLCN_TRCKR_ID.Trim();
if (!string.IsNullOrEmpty(searchObj.APLCN_STTS_ID))
aplcn_stts_id = searchObj.APLCN_STTS_ID.Trim();
if (!string.IsNullOrEmpty(searchObj.CUST_ID))
cust_id = searchObj.CUST_ID.Trim();
if (!string.IsNullOrEmpty(searchObj.Process_Type))
process_type = searchObj.Process_Type.Trim();
if (!string.IsNullOrEmpty(searchObj.CR_REGION))
cr_region = searchObj.CR_REGION.Trim();
if (!string.IsNullOrEmpty(searchObj.CELL_ID))
cell_id = searchObj.CELL_ID.Trim();
if (!string.IsNullOrEmpty(searchObj.ORIGN_ID))
Origin = searchObj.ORIGN_ID.Trim();
if (!string.IsNullOrEmpty(searchObj.RGION_ID))
region = searchObj.RGION_ID.Trim();
if (!string.IsNullOrEmpty(searchObj.ORGTN_CHANL_ID))
channel = searchObj.ORGTN_CHANL_ID.Trim();
if (!string.IsNullOrEmpty(searchObj.CR_DCSN_ID))
credit_verdict = searchObj.CR_DCSN_ID.Trim();
if (!string.IsNullOrEmpty(searchObj.SBSA_CUST_I))
sbsa_cust_id = searchObj.SBSA_CUST_I.Trim();
if (!string.IsNullOrEmpty(searchObj.USER_ID_APP_CRTD))
app_creator_id = searchObj.USER_ID_APP_CRTD.Trim();
using (var context = new VASTEntities())
{
var query = from p in context.APLCN_TRCKR
where
p.CUST_NAME_X.Contains(cust_name_x) &&
p.CUST_SURNM_X.Contains(surname_x) &&
p.QUEUE_ID.Contains(queue_id) &&
p.BPID.Contains(bp_id) &&
p.CURR_OWNR_USER_ID.Contains(user_id) &&
p.APLCN_TRCKR_ID.Contains(aplcn_trckr_id) &&
p.APLCN_STTS_ID.Contains(aplcn_stts_id) &&
p.CUST_ID.Contains(cust_id) &&
p.PRCES_TYPE_ID.Contains(process_type) &&
p.CELL_ID.Contains(cell_id) &&
SqlFunctions.StringConvert((double)p.CR_REGION_ID).Contains(cr_region) &&
p.ORIGN_ID.Contains(Origin) &&
SqlFunctions.StringConvert((double)p.RGION_ID).Contains(region) &&
p.ORGTN_CHANL_ID.Contains(channel) &&
p.CR_DCSN_ID.Contains(credit_verdict) &&
p.SBSA_CUST_I.Contains(sbsa_cust_id)
select p;
query.ToList();
if (query.Count() == 0)
{
throw new Exception("No Applications Found.");
}
foreach (var a in query)
{
searchAppsObj.Add(Translator.TranslateReqObjToBO.TranslateDTOToSearchApp(a));
}
context.Connection.Close();
return searchAppsObj;
}
You can just create a collection of lambda expression like below:
var filters = new List<Expression<Func<Application, bool>>>();
if (!string.IsNullOrWhitespace(searchObj.CUST_NAME_X))
filters.Add(application => application .CUST_NAME_X.Contains(searchObj.CUST_NAME_X.Trim());
if (!string.IsNullOrEmpty(searchObj.SURNM_X))
filters.Add(application => application .CUST_SURNM_X.Contains(searchObj.SURNM_X.Trim());
// And so on for all criteria
After that you can do a loop on filters like below:
using (var context = new VASTEntities())
{
var query = context.APLCN_TRCKR;
foreach(var filter in filters)
{
query = query.Where(filter);
}
var result = query.ToList();
if (result.Count() == 0)
{
throw new Exception("No Applications Found.");
}
foreach (var a in result)
{
searchAppsObj.Add(Translator.TranslateReqObjToBO.TranslateDTOToSearchApp(a));
}
context.Connection.Close();
return searchAppsObj;
}
change
var query = from p in context.APLCN_TRCKR select p;
to
var query = context.APLCN_TRCKR.AsQueryable();
And when the filtering work is done:
await query.ToListAsync() // this one goes to the database
Also have a look at this: Linq query filtering
In Addition: don't call context.Connection.Close(); as this is going to be executed anyway because using behaves like try { ... } finally { // dispose work }

opcua session was closed by client

I have written the attached OpcUaConnector class for opc-ua connection related activities.
But it is not handling session. For example:
In opc ua configuration disabled the endpoint
In kepserver configuration did runtime > reinitializing
The windows service is throwing:
Source : system.Reactive.Core
InnerException : The session was closed by client
and stopping the windows service, as this error goes unhandled.
Can some one suggest how to handle session in opc-ua?
public class OpcUaConnector
{
private static SimplerAES simplerAES = new SimplerAES();
private DataContainer dataCointainer = null;
private UaTcpSessionChannel channel;
private string opcServerName = string.Empty;
private string opcUserId = string.Empty;
private string opcPassword = string.Empty;
private static ILog LogOpcStore;
private static System.IDisposable token;
private static uint id;
public OpcConnector(ILog Log)
{
IntializeLogOpcStore(Log);
}
private static void IntializeLogOpcStore(ILog Log)
{
LogOpcStore = Log;
}
public async Task OpenOpcConnection()
{
try
{
if ((!string.IsNullOrEmpty(this.opcServerName) & (this.opcServerName != AppMain.MyAppSettings.OpcServer)) ||
(!string.IsNullOrEmpty(this.opcUserId) & (this.opcUserId != AppMain.MyAppSettings.OpcUserId)) ||
(!string.IsNullOrEmpty(this.opcPassword) & (this.opcPassword != AppMain.MyAppSettings.OpcPassword)))
{
await channel.CloseAsync();
this.opcServerName = AppMain.MyAppSettings.OpcServer;
this.opcUserId = AppMain.MyAppSettings.OpcUserId;
this.opcPassword = AppMain.MyAppSettings.OpcPassword;
}
if (channel==null || (channel != null && (channel.State == CommunicationState.Closed || channel.State == CommunicationState.Faulted)))
{
var appDescription = new ApplicationDescription()
{
ApplicationName = "MyAppName",
ApplicationUri = $"urn:{System.Net.Dns.GetHostName()}:MyAppName",
ApplicationType = ApplicationType.Client,
};
//application data won't be deleted when uninstall
var certificateStore = new DirectoryStore(
Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData), MyAppName", "pki"),
true, true
);
//if the Ethernet cable unplugs or the Wifi drops out,
//you have some timeouts that can keep the session open for a while.
//There is a SessionTimeout (default of 2 min).
this.channel = new UaTcpSessionChannel(
appDescription,
certificateStore,
SignInOpc,
AppMain.MyAppSettings.OpcServer,
null,
options: new UaTcpSessionChannelOptions { SessionTimeout = 120000 });
await channel.OpenAsync();
//LogOpcStore.Info(String.Format("Opc connection sucessful"));
}
this.opcServerName = AppMain.MyAppSettings.OpcServer;
this.opcUserId = AppMain.MyAppSettings.OpcUserId;
this.opcPassword = AppMain.MyAppSettings.OpcPassword;
}
catch (Exception ex)
{
ServiceException serviceException = new ServiceException(ex.HResult + " " + ex.Message, "C052");
throw serviceException;
}
}
private static async Task RecursivelyFindNode(UaTcpSessionChannel channel, NodeId nodeid)
{
BrowseRequest browseRequest = new BrowseRequest
{
NodesToBrowse = new BrowseDescription[] { new BrowseDescription { NodeId = nodeid, BrowseDirection = BrowseDirection.Forward, ReferenceTypeId = NodeId.Parse(ReferenceTypeIds.HierarchicalReferences), NodeClassMask = (uint)NodeClass.Variable | (uint)NodeClass.Object, IncludeSubtypes = true, ResultMask = (uint)BrowseResultMask.All } },
};
BrowseResponse browseResponse = await channel.BrowseAsync(browseRequest);
foreach (var rd1 in browseResponse.Results[0].References ?? new ReferenceDescription[0])
{
uint chid = AppMain.MyTagDatabase.GetClientHandleByTag(rd1.DisplayName.ToString());
if (chid > 0)
{
AppMain.MyTagDatabase.UpdateNodeByClientHandle(chid, rd1.NodeId.ToString());
}
await RecursivelyFindNode(channel, ExpandedNodeId.ToNodeId(rd1.NodeId, channel.NamespaceUris));
}
}
public async Task CreateSubscription(DataContainer dc)
{
double curReadingValue;
try
{
dataCointainer = dc;
await RecursivelyFindNode(channel, NodeId.Parse(ObjectIds.RootFolder));
if (AppMain.MyTagDatabase.GetCntTagsNotInOpcServer() == AppMain.MyTagDatabase.GetTagCount())
{
//no need to create subscription
return;
}
//subscription timeout that is the product of PublishingInterval * LifetimeCount:
var subscriptionRequest = new CreateSubscriptionRequest
{
RequestedPublishingInterval = 1000f,
RequestedMaxKeepAliveCount = 30,
RequestedLifetimeCount = 30 * 3,
PublishingEnabled = true,
};
var subscriptionResponse = await channel.CreateSubscriptionAsync(subscriptionRequest);
id = subscriptionResponse.SubscriptionId;
var itemsToCreate = new MonitoredItemCreateRequest[AppMain.MyTagDatabase.GetTagHavingNodeCount()];
int i = 0;
foreach (var item in AppMain.MyTagDatabase.GetMyTagDatabase())
{
var itemKey = item.Key;
var itemValue = item.Value;
itemsToCreate[i] = new MonitoredItemCreateRequest { ItemToMonitor = new ReadValueId { NodeId = NodeId.Parse(itemValue.NodeId), AttributeId = AttributeIds.Value }, MonitoringMode = MonitoringMode.Reporting, RequestedParameters = new MonitoringParameters { ClientHandle = itemKey, SamplingInterval = -1, QueueSize = 0, DiscardOldest = true } };
i++;
}
var itemsRequest = new CreateMonitoredItemsRequest
{
SubscriptionId = id,
ItemsToCreate = itemsToCreate,
};
var itemsResponse = await channel.CreateMonitoredItemsAsync(itemsRequest);
token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(pr =>
{
// loop thru all the data change notifications
// receiving data change notifications here
var dcns = pr.NotificationMessage.NotificationData.OfType<DataChangeNotification>();
foreach (var dcn in dcns)
{
foreach (var min in dcn.MonitoredItems)
{
MyTag MyTag = new MyTag();
bool hasValue = AppMain.MyTagDatabase.GetMyTag(min.ClientHandle, out MyTag);
if (hasValue)
{
if (double.TryParse(min.Value.Value.ToString(), out curReadingValue))
{
//LogOpcStore.Info(String.Format("ClientHandle : {0} TagName : {1} SourceTimestamp : {2} ServerTimeStamp : {3} curReadingValue : {4}", min.ClientHandle, MyTag.TagName, min.Value.SourceTimestamp, min.Value.ServerTimestamp, curReadingValue));
AddDataPointToContainer(1, MyTag.TagName, min.Value.SourceTimestamp, curReadingValue);
}
}
}
}
});
}
catch (Exception ex)
{
//If the interruption lasts longer than these timeouts then the SessionChannel and Subscriptions will need to be recreated.
channel = null;
FatalServiceException fatalserviceException = new FatalServiceException(ex.Message, "C052");
throw fatalserviceException;
}
}
public async Task DeleteSubscription()
{
try
{
var request = new DeleteSubscriptionsRequest
{
SubscriptionIds = new uint[] { id }
};
await channel.DeleteSubscriptionsAsync(request);
token.Dispose();
}
catch (Exception ex)
{
ServiceException serviceException = new ServiceException(ex.Message, "C052");
throw serviceException;
}
}
private static async Task<IUserIdentity> SignInOpc(EndpointDescription endpoint)
{
IUserIdentity userIdentity = null;
if (endpoint.UserIdentityTokens.Any(p => p.TokenType == UserTokenType.Anonymous))
{
userIdentity = new AnonymousIdentity();
}
else if (endpoint.UserIdentityTokens.Any(p => p.TokenType == UserTokenType.UserName))
{
var userName = AppMain.MyAppSettings.OpcUserId;
var password = simplerAES.Decrypt(AppMain.MyAppSettings.OpcPassword);
userIdentity = new UserNameIdentity(userName, password);
}
return userIdentity;
}
private void AddDataPointToContainer(int dataType, string source, DateTime SourceTimestampUTC, double value)
{
ConditionValue conditionValue = new ConditionValue();
long timestamp = AppMain.ServerSyncTimeStore.ConvertDateTimeToTimeStampUTC(SourceTimestampUTC);
conditionValue.dataType = dataType;
conditionValue.source = source;
conditionValue.timestamp = timestamp;
conditionValue.SourceTimestampUTC = SourceTimestampUTC;
conditionValue.LocalTime = SourceTimestampUTC.ToLocalTime();
conditionValue.value = value;
//LogOpcStore.Info(String.Format("TagName : {0} SourceTimestampUTC : {1} timestamp : {2} LocalTime : {3} curReadingValue : {4}", source, SourceTimestampUTC, timestamp, SourceTimestampUTC.ToLocalTime(), value));
dataCointainer.AddDataPoint(conditionValue);
}
}
I see you are using the project https://github.com/convertersystems/opc-ua-client.
When a server closes the session and socket (as happens when you reinitialize Kepware) the client receives immediate notification that causes the client channel to fault. A faulted channel cannot be reopened, it should be aborted and a new channel should be created.
I made this standalone test, to show that you may have to catch an exception and recreate the channel and subscription. The point of this test is to subscribe to the CurrentTime node and collect 60 datachanges. The test should last a minute. If you re-init the Kepware server in the middle of the test, the code catches the exception and recreates the channel and subscription.
[TestMethod]
public async Task OpcConnectorTest()
{
var count = 0;
UaTcpSessionChannel channel = null;
while (count < 60)
{
try
{
channel = new UaTcpSessionChannel(
this.localDescription,
this.certificateStore,
new AnonymousIdentity(),
EndpointUrl,
SecurityPolicyUris.None,
loggerFactory: this.loggerFactory);
await channel.OpenAsync();
// create the keep alive subscription.
var subscriptionRequest = new CreateSubscriptionRequest
{
RequestedPublishingInterval = 1000f,
RequestedMaxKeepAliveCount = 30,
RequestedLifetimeCount = 30 * 3,
PublishingEnabled = true,
};
var subscriptionResponse = await channel.CreateSubscriptionAsync(subscriptionRequest).ConfigureAwait(false);
var id = subscriptionResponse.SubscriptionId;
var token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(pr =>
{
// loop thru all the data change notifications
var dcns = pr.NotificationMessage.NotificationData.OfType<DataChangeNotification>();
foreach (var dcn in dcns)
{
foreach (var min in dcn.MonitoredItems)
{
Console.WriteLine($"sub: {pr.SubscriptionId}; handle: {min.ClientHandle}; value: {min.Value}");
count++;
}
}
});
var itemsRequest = new CreateMonitoredItemsRequest
{
SubscriptionId = id,
ItemsToCreate = new MonitoredItemCreateRequest[]
{
new MonitoredItemCreateRequest { ItemToMonitor = new ReadValueId { NodeId = NodeId.Parse("i=2258"), AttributeId = AttributeIds.Value }, MonitoringMode = MonitoringMode.Reporting, RequestedParameters = new MonitoringParameters { ClientHandle = 12345, SamplingInterval = -1, QueueSize = 0, DiscardOldest = true } }
},
};
var itemsResponse = await channel.CreateMonitoredItemsAsync(itemsRequest);
while (channel.State == CommunicationState.Opened && count < 60)
{
await Task.Delay(1000);
}
}
catch (Exception ex)
{
Console.WriteLine($"Exception: {ex.GetType()}. {ex.Message}");
}
}
if (channel != null)
{
Console.WriteLine($"Closing session '{channel.SessionId}'.");
await channel.CloseAsync();
}
}
I know this is an old post, but I stumbled upon this problem as well. For those interested:
The problem is related to the subscription(s).
When the following code is run:
token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(pr =>
{
// loop thru all the data change notifications
// receiving data change notifications here
var dcns = pr.NotificationMessage.NotificationData.OfType<DataChangeNotification>();
foreach (var dcn in dcns)
{
foreach (var min in dcn.MonitoredItems)
{
MyTag MyTag = new MyTag();
bool hasValue = AppMain.MyTagDatabase.GetMyTag(min.ClientHandle, out MyTag);
if (hasValue)
{
if (double.TryParse(min.Value.Value.ToString(), out curReadingValue))
{
//LogOpcStore.Info(String.Format("ClientHandle : {0} TagName : {1} SourceTimestamp : {2} ServerTimeStamp : {3} curReadingValue : {4}", min.ClientHandle, MyTag.TagName, min.Value.SourceTimestamp, min.Value.ServerTimestamp, curReadingValue));
AddDataPointToContainer(1, MyTag.TagName, min.Value.SourceTimestamp, curReadingValue);
}
}
}
}
});
Observable.subscribe() takes multiple arguments. You should include what to do in case of an error. For example:
token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(
pr => { code to run normally... },
ex => { Log.Info(ex.Message); },
() => { }
);
See http://reactivex.io/documentation/operators/subscribe.html for more information.

Importing DBF file in SQL Server 2012

I have over 200+ dbf files having different schema. I have used SSIS to import .dbf files, but in order to automate the task I want to use OpenRowset. OpenRowSet is working fine for Excel Files, but not for .dbf.
I have written
SELECT [LRSNum],[AppUpdD],[AppStat],[PIN] FROM OPENROWSET('MICROSOFT.ACE.OLEDB.12.0','dBASE 5.0;Database=D:\Sales- data\SalesSourcefile\2016\December\Shape\Martin\real_land\', 'SELECT [LRSNum],[AppUpdD],[AppStat],[PIN] FROM real_land.dbf');
Any help will be appreciated.
I am using SQL Server 2012, Windows 8.1.
Installed Foxpro driver, but when selected foxpro using DTS, it fails.
FYI - You can do automated uploads of DBFs to SQL Server using SQL Server Upsizing Wizard for FoxPro.
With this tool, you have to do the uploading from FoxPro, and the DBFs must be attached to a FoxPro DBC.
http://www.codemag.com/article/0703052
The latest version if available from the VFPx site.
Finally, this is working
SELECT * FROM OPENROWSET ('MICROSOFT.ACE.OLEDB.12.0','dBase 5.0;HDR=YES;IMEX=2;DATABASE=\Dbf Directory\',
'SELECT * FROM dbf_filename.dbf')
I once had to do this too and wrote some C# code for the import.
Just as a try: With this line I open the connection within C#
var con = new OdbcConnection("Driver={{Microsoft dBASE Driver (*.dbf)}};Dbq=C:\\SomePath;DriverID=277;");
Might be, that you can get something out of this.
Some C# code
The following code is taken from one of my C# projects. I modified it to be neutral, but I cannot guarantee, that this is working right so:
public List<string> Ambus;
public List<string> Tbls;
public List<string> errList;
public List<string> SQLs;
private void btnImport_Click(object sender, EventArgs e) {
SqlConnection sqlcon;
SqlCommand sqlcmd;
SQLs.Clear();
Tbls.Clear();
var con = new OdbcConnection("Driver={{Microsoft dBASE Driver (*.dbf)}};Dbq=C:\\SomePath;DriverID=277;");
con.Open();
var tbls = con.GetSchema(OdbcMetaDataCollectionNames.Tables);
foreach (System.Data.DataRow r in tbls.Rows) {
Tbls.Add(r["TABLE_NAME"].ToString());
}
DataTable cols = null;
var sb = new StringBuilder();
int i = 0;
foreach (var tblnm in Tbls) {
i++;
sb.Clear();
try {
cols = con.GetSchema(OdbcMetaDataCollectionNames.Columns, new string[] { null, null, tblnm, null });
sb.AppendFormat(" CREATE TABLE dbo.[{0}](TableName VARCHAR(100) NOT NULL ", tblnm);
int count = 0;
foreach (DataRow colrow in cols.Rows) {
var colInf = string.Format(" ,{0} {1} NULL", colrow["COLUMN_NAME"].ToString(), this.createDataType(colrow["TYPE_NAME"].ToString(), colrow["COLUMN_SIZE"].ToString(), colrow["DECIMAL_DIGITS"].ToString(), colrow["NUM_PREC_RADIX"].ToString()));
sb.Append(colInf);
count++;
}
sb.Append("); ");
SQLs.Add(sb.ToString());
sb.Clear();
var cmd = new OdbcCommand("SELECT * FROM [" + tblnm + "]", con);
var reader = cmd.ExecuteReader();
while (reader.Read()) {
var vals = createVals(cols, reader, tblnm);
string insStat = string.Format(" INSERT INTO dbo.[{0}] VALUES ('{0}',{1});", tblnm, vals);
SQLs.Add(insStat);
}
}
catch (Exception exo) {
errList.Add(string.Format("{0}:{1}", tblnm, exo.Message));
}
con.Close();
sqlcon = new SqlConnection("Data Source=SomeSQLServer;Initial Catalog=master;User ID=sa;pwd=SomePwd");
sqlcon.Open();
sqlcmd = new SqlCommand("USE SomeTargetDB;", sqlcon);
sqlcmd.ExecuteNonQuery();
i = 0;
foreach (string s in SQLs) {
i++;
//Progress-output: this.Text = string.Format("{0} von {1}", i, SQLs.Count);
sqlcmd = new SqlCommand(s, sqlcon);
sqlcmd.ExecuteNonQuery();
}
sqlcon.Close();
}
}
private string createDataType(string typ, string size, string dec, string prec) {
switch (typ.ToLower()) {
case "char":
return "NVARCHAR(" + size + ")";
case "logical":
return "BIT";
case "numeric":
dec = dec == string.Empty ? null : dec;
prec = prec == string.Empty ? null : prec;
int d = int.Parse(dec ?? "0");
int p = int.Parse(prec ?? "0");
if (d == 0 && p == 0)
return "INT";
else if (d > 0 && p > 0)
return string.Format("DECIMAL({0},{1})", d, p);
else if (d == 0 && p > 0)
return "FLOAT";
else
return null;
case "date":
return "DATETIME";
default:
return null;
}
}
private string createVals(DataTable cols, OdbcDataReader reader, string tblnm) {
var sb = new StringBuilder();
sb.Append("'" + tblnm + "'");
foreach (DataRow colrow in cols.Rows) {
var val = string.Empty;
try {
val = reader[colrow["COLUMN_NAME"].ToString()].ToString();
}
catch { }
if (val.Trim().Length == 0)
val = "NULL";
else {
if (colrow["TYPE_NAME"].ToString().ToLower() == "char")
val = val.Replace("'", "''");
if (colrow["TYPE_NAME"].ToString().ToLower() == "numeric")
val = val.Replace(".", "").Replace(",", ".");
if (colrow["TYPE_NAME"].ToString().ToLower() == "date") {
var d = DateTime.Parse(val, System.Globalization.CultureInfo.CurrentCulture);
if (d.Year < 1900 || d.Year > 2020)
d = new DateTime(1900, d.Month, d.Day, d.Hour, d.Minute, d.Second);
val = d.ToString("dd.MM.yyyy HH:mm:ss");
}
val = "'" + val + "'";
}
sb.AppendFormat(",{0}", val);
}
return sb.ToString();
}