How to log in messenger with http requests - facebook

I tried using the ktor framework for kotlin with the skrape it library to attempt to log in to facebook messenger with no success, all the libraries I can find on github are deprecated and all the past stackoverflow questions about it don't seem to work anymore. Anyone have any idea what I'm doing wrong? This is what I have tried, the output is always: "userID: null"
const val baseUrl = "https://www.messenger.com"
val client = HttpClient(CIO) {
expectSuccess = false
install(HttpCookies)
install(DefaultRequest) {
header("Origin", baseUrl)
header("Referer", "$baseUrl/")
header("Accept-Language", "en-US,en;q=0.9")
header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.54 Safari/537.36")
port = 443
}
followRedirects = true
}
fun main(args: Array<String>) {
runBlocking {
var jazoest: String = ""
var lsd: String = ""
var initialRequestId: String = ""
val timezone: String = "-120"
val lgndim: String = "eyJ3IjoxNDQwLCJoIjo5MDAsImF3IjoxNDQwLCJhaCI6OTAwLCJjIjoyNH0%3D"
var lgnrnd: String = ""
var lgnjs: String = ""
val login: String = "1"
val persistent: String = "1"
skrape(BrowserFetcher) {
request {
url = baseUrl
followRedirects = true
headers = mapOf("Origin" to baseUrl, "Referer" to "$baseUrl/", "Accept-Language" to "en-US,en;q=0.9")
userAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.84 Safari/537.36 Edg/99.0.100.0"
println("user-agent: $userAgent")
}
response {
htmlDocument {
jazoest = input {
withAttribute = "name" to "jazoest"
findFirst {
attributes["value"]
} ?: ""
}
lsd = input {
withAttribute = "name" to "lsd"
findFirst {
attributes["value"]
} ?: ""
}
initialRequestId = input {
withAttribute = "name" to "initial_request_id"
findFirst {
attributes["value"]
} ?: ""
}
lgnrnd = input {
withAttribute = "name" to "lgnrnd"
findFirst {
attributes["value"]
} ?: ""
}
lgnjs = input {
withAttribute = "name" to "lgnjs"
findFirst {
attributes["value"]
} ?: ""
}
}
}
}
val loginPostRequest = client.post("$baseUrl/login/password/") {
setBody(
"jazoest=$jazoest" +
"&lsd=$lsd" +
"&initial_request_id=$initialRequestId" +
"&timezone=-$timezone" +
"&lgndim=$lgndim" +
"&lgnrnd=$lgnrnd" +
"&lgnjs=$lgnjs" +
"&email=$email" +
"&pass=$password" +
"&login=$login" +
"&persistent=$persistent" +
"&default_persistent="
)
}
val messengerLoggedInGetRequest = client.get(baseUrl)
val userID = client.cookies("https://messenger.com")["c_user"]
println("userID: $userID")
}
client.close()
}

Related

error: Connection closed while receiving data Unhandled Exception: Connection closed while receiving data

I am trying to integrate Python-based API into my flutter app, to fetch web data, like scrapping, data is coming into Postman but when I run the POST method in the flutter app it runs and gives an id then runs the GET method to get data on that id. But when data come it gives the error Unhandled Exception: Connection closed while receiving data
I have the following rest API written with flask (python)
from flask_cors import CORS, cross_origin
from Crawler import Crawler
app = Flask(__name__)
cors = CORS(app)
app.config['CORS_HEADERS'] = 'Content-Type'
scans = dict()
count = 1
def setScan(result, res):
scans[result] = res
#app.route('/crawl', methods=['POST'])
#cross_origin()
def post():
url = request.form.get('link')
start_page = request.form.get('start_page')
num_pages = request.form.get('num_pages')
if start_page is None:
start_page = 1
if num_pages is None:
num_pages = 1
crawler = Crawler(url)
global count
global scans
result = count
count += 1
crawler.run_crawler(lambda res: setScan(result, res), start_page, num_pages)
return {"id": result}
#app.route('/crawl/<int:id>', methods=['GET'])
#cross_origin()
def getResult(id):
if id in scans.keys():
return jsonify(scans[id])
else:
return {"status": "Loading"}
if __name__ == '__main__':
app.run()
import threading
from urllib.parse import urlparse # parsing urls
import requests # making requests to website
from bs4 import BeautifulSoup
class Crawler:
# Constructor
def __init__(self, url):
self.url = url # url of the website
self.pages = {self.url} # set of pages
self.locations = {'Islamabad', 'Karachi', 'Lahore'}
print(url)
if "http" in url:
self.domain = urlparse(url).netloc # domain name of the website
else:
self.domain = url[url.index(".") + 1:]
self.postings = list()
def crawl(self, num_pages=1, start_page=1):
if self.domain == "zameen.com":
self.crawl_zameen(num_pages, start_page)
elif self.domain == "graana.com":
self.crawl_granna_v2(num_pages, start_page)
elif self.domain == "ilaan.com":
self.crawl_ilaan(num_pages, start_page)
else:
print(f"{self.domain} Webpage not supported")
def crawl_ilaan(self, num_pages=1, start_page=1):
cities = ['Lahore', 'Islamabad', 'Karachi']
for city in cities:
for i in range(int(start_page), int(num_pages) + 1):
print(f"Crawling Ilaan page number: {i}\n")
url = "https://www.ilaan.com/_SearchListingAjax"
payload = "{\"ic\":\"&ic=Lahore\",\"q\":\""+city+"\",\"ptid\":\"2\",\"tl\":\"1\",\"propertyCategory\":\" Houses \",\"sr\":\"\",\"city\":\"\",\"propertyType\":\"rent\",\"CurrentUrl\":\"house-for-rent?q=&ptid=2&tl=1\",\"pgno\":\"" + \
str(i) + " \"}"
headers = {
'authority': 'www.ilaan.com',
'accept': '*/*',
'accept-language': 'en-US,en;q=0.9',
'cache-control': 'no-cache',
'content-type': 'application/json; charset=UTF-8',
'cookie': 'ASP.NET_SessionId=glvcpa0wa1hkmdbscd5b5yus; _gcl_au=1.1.1521133608.1672521617; _ga=GA1.1.1234506406.1672521618; twk_idm_key=1pCESA-j-i3RBozWQIGuP; _ga_YPP49Z23L5=GS1.1.1672521617.1.1.1672521786.0.0.0; TawkConnectionTime=0; twk_uuid_6373b548daff0e1306d78a3b=%7B%22uuid%22%3A%221.PUjxyVfs9Mcjd5sEod7kopr5BrQot8cCvpTQJSVy7xw9DQha4TpdDvJg1DgGwiiHjcpK6f1J2TvsNdHrciKTgGWsj6fq6dz8iK0DJ49EKrfUvi9gB%22%2C%22version%22%3A3%2C%22domain%22%3A%22ilaan.com%22%2C%22ts%22%3A1672521792461%7D',
'origin': 'https://www.ilaan.com',
'pragma': 'no-cache',
'referer': 'https://www.ilaan.com/house-for-rent?tl=1&ptid=2&pgno=3&ic=%26ic%3dLahore',
'sec-ch-ua': '"Not?A_Brand";v="8", "Chromium";v="108", "Google Chrome";v="108"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36',
'x-requested-with': 'XMLHttpRequest'
}
req = requests.request("POST", url, headers=headers, data=payload)
soup = BeautifulSoup(req.text, "lxml")
items = soup.findAll('div', attrs={'class': 'float-start col-8'})
for item in items:
link = item.findAll('a')[0]
page = f"https://www.ilaan.com{link['href']}"
print(f"Crawling Page: {page}\n")
res = requests.get(page)
bsf = BeautifulSoup(res.text, "lxml")
titleBox = bsf.findAll('h1', attrs={'class': 'opacity-90 font-16'})[0]
posting = dict({
'url': page,
'title': titleBox.text,
'location': titleBox.nextSibling.nextSibling.text,
})
# details
details = bsf.find("div", attrs={'class': 'row mb-0 mt3 text-center'})
fields = ["id", "area", "purpose", "date", "bedrooms", "bathrooms", "price", "location"]
fieldsDetails = []
for child in details:
detail = child.text.strip()
if len(detail) > 0:
fieldsDetails.append(detail)
posting['details'] = dict(zip(fields, fieldsDetails))
imagesBox = bsf.find('div', attrs={'class': 'splide__list'})
images = []
imgBox = imagesBox.findAll('img', attrs={'class': 'img-fluid'})
for img in imgBox:
image = img['src']
if image[:4] == 'http':
images.append(image)
posting['media'] = images
self.postings.append(posting)
pass
def crawl_granna(self, num_pages=1, start_page=1):
cities = ['islamabad', 'rawalpindi', 'lahore']
for city in cities:
for i in range(int(start_page), int(num_pages) + 1):
print(f"Crawling granna page number: {i}\n")
url = f"https://www.graana.com/_next/data/0Qm8AcPOlM2s6IO6V9RNo/residential/for_rent/{city}/all/1.json?offset=30&page={i}&subtype=residential&purpose=for_rent&city=islamabad&areas=all&ids=1"
print(url)
payload = {}
headers = {
'authority': 'www.graana.com',
'accept': '*/*',
'accept-language': 'en-US,en;q=0.9',
'baggage': 'sentry-environment=Staging,sentry-release=0Qm8AcPOlM2s6IO6V9RNo,sentry-transaction=%2F%5Bsubtype%5D%2F%5Bpurpose%5D%2F%5Bcity%5D%2F%5Bareas%5D%2F%5Bids%5D,sentry-public_key=c255164d1e7144b5a93adf2553c49a82,sentry-trace_id=4d8470d72fef46289c24a46b72ef999b,sentry-sample_rate=0.5',
'cache-control': 'no-cache',
'cookie': '_gcl_au=1.1.984580374.1671639286; _ga=GA1.1.1940448186.1671639287; lhc_per=vid|46e86cb4d2faad1a962c|hnh|1672162939; _ga_4CNKWK86H3=GS1.1.1672512375.4.1.1672512959.0.0.0; __cf_bm=Z5b01rElnJVtNnqNgKQ8gl1BhLBUQoX6aU_PwObp3No-1672513333-0-AROQpdSKrXQBvbdyoZbQ2DDp/CMFF/kS/2CE6fHzGgD5JBi/3bJbxg5tNS/rNx9TaS1MvJjOXeDTYDMM9O2cN2z4JahQ1liaV7/Vhmo6VZM+KfzUwk3T3AP7okwyuWKo/CPPRTxFU05nY+JWSR8MooBFbIHCWuJHjwI2xMN6eop+fBaIrP7vou9Kd6ek5vvLww==',
'pragma': 'no-cache',
'referer': 'https://www.graana.com/residential/for_rent/Islamabad/all/1',
'sec-ch-ua': '"Not?A_Brand";v="8", "Chromium";v="108", "Google Chrome";v="108"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'sentry-trace': '4d8470d72fef46289c24a46b72ef999b-bc25d6041c1c672f-1',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36',
'x-nextjs-data': '1'
}
response = requests.request("GET", url, headers=headers, data=payload)
print(response)
data = json.loads(response.text)
listings = data["pageProps"]['propertyServer']['listings']['items']
self.postings.append(listings)
pass
def crawl_granna_v2(self, num_pages=1, start_page=1):
cities = ['Islamabad', 'Rawalpindi', 'Lahore']
for city in cities:
for i in range(int(start_page), int(num_pages) + 1):
try:
print(f"Crawling granna page number: {i}\n")
url = f"https://www.graana.com/residential/for_rent/{city}/all/{i}"
print(url)
headers = {
'authority': 'www.graana.com',
'accept': '*/*',
'accept-language': 'en-US,en;q=0.9',
'baggage': 'sentry-environment=Staging,sentry-release=0Qm8AcPOlM2s6IO6V9RNo,sentry-transaction=%2F%5Bsubtype%5D%2F%5Bpurpose%5D%2F%5Bcity%5D%2F%5Bareas%5D%2F%5Bids%5D,sentry-public_key=c255164d1e7144b5a93adf2553c49a82,sentry-trace_id=4d8470d72fef46289c24a46b72ef999b,sentry-sample_rate=0.5',
'cache-control': 'no-cache',
'cookie': '_gcl_au=1.1.984580374.1671639286; _ga=GA1.1.1940448186.1671639287; lhc_per=vid|46e86cb4d2faad1a962c|hnh|1672162939; _ga_4CNKWK86H3=GS1.1.1672512375.4.1.1672512959.0.0.0; __cf_bm=Z5b01rElnJVtNnqNgKQ8gl1BhLBUQoX6aU_PwObp3No-1672513333-0-AROQpdSKrXQBvbdyoZbQ2DDp/CMFF/kS/2CE6fHzGgD5JBi/3bJbxg5tNS/rNx9TaS1MvJjOXeDTYDMM9O2cN2z4JahQ1liaV7/Vhmo6VZM+KfzUwk3T3AP7okwyuWKo/CPPRTxFU05nY+JWSR8MooBFbIHCWuJHjwI2xMN6eop+fBaIrP7vou9Kd6ek5vvLww==',
'pragma': 'no-cache',
'referer': 'https://www.graana.com/residential/for_rent/Islamabad/all/1',
'sec-ch-ua': '"Not?A_Brand";v="8", "Chromium";v="108", "Google Chrome";v="108"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"',
'sec-fetch-dest': 'empty',
'sec-fetch-mode': 'cors',
'sec-fetch-site': 'same-origin',
'sentry-trace': '4d8470d72fef46289c24a46b72ef999b-bc25d6041c1c672f-1',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36',
'x-nextjs-data': '1'
}
response = requests.request("GET", url, headers=headers, data={})
soup = BeautifulSoup(response.text, "lxml")
script = soup.find('script', attrs={'id': '__NEXT_DATA__', 'type': 'application/json'})
data = json.loads(script.text)
listings = data["props"]["pageProps"]['propertyServer']['listings']['items']
self.postings.append(listings)
except:
pass
pass
def crawl_zameen(self, num_pages=1, start_page=1):
cities = [1, 2, 3]
for city in cities:
for i in range(int(start_page), int(num_pages) + 1):
print(f"Crawling Zameen page number: {i}\n")
url = f"https://www.zameen.com/Homes/Islamabad-{city}-{i}.html"
req = requests.get(url)
soup = BeautifulSoup(req.text, "lxml")
items = soup.findAll('a', attrs={'class': '_7ac32433', 'aria-label': "Listing link"})
for link in items:
page = f"https://www.zameen.com{link['href']}"
print(f"Crawling Page: {page}\n")
res = requests.get(page)
bsf = BeautifulSoup(res.text, "lxml")
titleBox = bsf.findAll('div', attrs={'class': 'b72558b0'})[0]
posting = dict({
'url': page,
'title': titleBox.findChildren()[0].text,
'location': titleBox.findChildren()[1].text,
})
# details
detailsBox = bsf.find("ul", attrs={'class': '_033281ab', 'aria-label': 'Property details'})
for li in detailsBox.children:
pair = li.findChildren("span")
posting[pair[0].text] = pair[1].text
imagesBox = bsf.findAll('div', attrs={'class': 'image-gallery-thumbnails'})
images = []
for imgBox in imagesBox:
images.append(imgBox.find('img', attrs={'role': 'presentation'})['src'])
posting['media'] = images
self.postings.append(posting)
pass
def run(self, cb, start_page=1, num_pages=1):
self.crawl(num_pages, start_page)
try:
cb(self.postings)
except:
cb({"status": "Failed"})
def run_crawler(self, cb, start_page=1, num_pages=1):
thread = threading.Thread(target=self.run, args=(cb, start_page, num_pages))
thread.start()
This is the flutter Provider code......
import 'package:cloud_firestore/cloud_firestore.dart';
import 'package:flutter/material.dart';
import 'package:http/http.dart' as http;
import 'package:http/retry.dart';
import '../model/PropertyApiModel.dart';
class PropertyApiProvider extends ChangeNotifier {
bool isLoading = false;
http.Response? result;
var id;
final FirebaseFirestore _firestore = FirebaseFirestore.instance;
List<PropertyApiModel> propertyModel = [];
Future deletedocs() async {
final instance = FirebaseFirestore.instance;
final batch = instance.batch();
var collection = instance.collection('propertydata');
var snapshots = await collection.get();
for (var doc in snapshots.docs) {
batch.delete(doc.reference);
}
await batch.commit();
print("deleted");
}
int i = 0;
var data;
Future<dynamic> getData(String id) async {
try {
print("getting data...");
var client = RetryClient(http.Client(), retries: 2);
result = await http.get(Uri.parse('http://10.0.2.2:5000/crawl/$id'),
headers: {"Keep-Alive": "timeout=50, max=2"}).timeout(
const Duration(seconds: 50),
);
if (result!.statusCode != 200) {
result = await client.get(Uri.parse('http://10.0.2.2:5000/crawl/$id'),
headers: {"Keep-Alive": "timeout=50, max=2"});
}
print("response status code: ${result!.statusCode}");
if (result!.statusCode == 200) {
var body = jsonDecode(result!.body);
print("data: ${body.toString()}");
print("data length: ${body.toString().length}");
if (body.toString().length == 17) {
await Future.delayed(const Duration(minutes: 2));
return await getData(id.toString());
} else {
await deletedocs();
for (Map pp in body) {
print("firebase running");
propertyModel.add(PropertyApiModel.fromJson(pp));
PropertyApiModel propertyApiModel = PropertyApiModel(
added: propertyModel[i].added,
Location: propertyModel[i].citylocation,
area: propertyModel[i].area,
baths: propertyModel[i].baths,
bedrooms: propertyModel[i].bedrooms,
location: propertyModel[i].location,
media: propertyModel[i].media!,
price: propertyModel[i].price,
purpose: propertyModel[i].purpose,
title: propertyModel[i].title,
type: propertyModel[i].type,
url: propertyModel[i].url,
);
await _firestore
.collection("propertydata")
.add(propertyApiModel.toJson())
.then((value) {
print("idddd ${value.id}");
});
if (i < result!.body.length) {
print("adddddddddddd");
i++;
print(propertyApiModel);
} else {
FirebaseFirestore firebaseFirestore = FirebaseFirestore.instance;
readdata() async {
final data = firebaseFirestore.collection('propertydata');
final snapshot = await data.get();
if (snapshot.docs.isNotEmpty) {
print(snapshot.toString());
}
}
}
}
isLoading = false;
notifyListeners();
return body;
}
} else {
throw Exception(
"Failed to fetch data, status code: ${result!.statusCode}");
}
} catch (e) {
print("error: ${e.toString()}");
rethrow;
}
}
getId(String url) async {
try {
print("making post request...");
http.Response response = await http.post(
Uri.parse("http://10.0.2.2:5000/crawl"),
body: {'link': url, 'start_page': '1', 'num_pages': '2'});
print("response status code: ${response.statusCode}");
if (response.statusCode == 200) {
var body = jsonDecode(response.body.toString());
id = body['id'];
print("iddd $id");
getData(id.toString());
notifyListeners();
return id.toString();
} else {
throw Exception(
"Failed to get id, status code: ${response.statusCode}");
}
} catch (e) {
print("error: ${e.toString()}");
}
}
}
it's because you are using wrong data for jsonDecode.
The response data is actually in the response.bodyBytes.
And Since you cannot directly convert bytes to json with dart, convert bytes to String first then decode the String using jsonDecode.
So... don't use response.body use response.bodyBytes.
I have modified your code so that it uses response.bodyBytes
if (response.statusCode == 200) {
// Get body bytes from response
final bytes = response.bodyBytes;
// Convert bytes to String then decode
final body = jsonDecode(utf8.decode(bytes));
id = body['id'];
print("iddd $id");
getData(id.toString());
notifyListeners();
return id.toString();
} else {
throw Exception("Failed to get id, status code: ${response.statusCode}");
}

Wrong PUT method gets triggered in Akka Http using scala

In my APIendpoint class, I have 2 PUT methods lets say updateA and UpdateB but when I'm trying to hit UpdateB using swagger it resolves to a UpdateA everytime. I dunno what I'm doing wrong because the code seems ok to me. Any help is appreciated.
#Api(value = "/connectroutes", produces = "application/json", consumes = "application/json",
authorizations = Array(new Authorization(value = "")))
#Produces(Array(MediaType.APPLICATION_JSON))
def routes: Route = {
pathPrefix("sampleroute") {
authenticateBasic(realm = "sample realm", authenticator) { authenticationResult =>
updateA ~
updateB
}
}
#PUT
#Path("{name}")
#Produces(Array(MediaType.APPLICATION_JSON))
#Operation(summary = "sample", description = "UPDATE A",
parameters = Array(new Parameter(name = "name", in = ParameterIn.PATH, description = "updateA name", required = true)),
requestBody = new RequestBody(content = Array(new Content(schema = new Schema(implementation = classOf[A]), mediaType = MediaType.APPLICATION_JSON))),
)
def updateA: Route = {
path(Segment) { name =>
put {
entity(as[A]) { a: A => {
complete(updateAMethod(name, a))
}
}
}
}
}
#PUT
#Path("updateb")
#Produces(Array(MediaType.APPLICATION_JSON))
#Authorization("basicAuth")
#Operation(summary = "Sample", description = "Update B",
requestBody = new RequestBody(content = Array(new Content(schema = new Schema(implementation = classOf[String]), mediaType = MediaType.APPLICATION_JSON))),
)
def updateB: Route = {
path("updateb" / Segment) { namespace =>
put {
entity(as[String]) { updatedval: String => {
complete(updatedVal))
}
}
}
}
}

Insert/Update 50 records from JSON file to SQL Table taking too much in ASP.NET MVC EF core

I am doing insert/update records from JSON file to SQL table in the single action method. I am inserting only 50 records but it takes too much time. I am new to ASP.NET Core. Don't know whether my too much for each loop creates the issue or my database logic. Please can anybody help me to correct this issue?
I am reading json and based on custom mapping, I am doing insert or update to table column from json object values.
Please look into my below code
[HttpPost]
public IActionResult InsertProductDetails()
{
int getcountProductName = (from gcPN in _context.K360MappingMasters
where gcPN.K360Catalog == "Name" && gcPN.ApiContent == baseurl
select gcPN).Count();
if (getcountProductName == 0)
{
return Json(new { Status = "error", Message = "Required ProductName catalog not yet mapped , unable to insert productdetails" });
}
else if (getcountProductName == 1)
{
try
{
using WebClient wc = new WebClient();
string contentString = wc.DownloadString(baseurl);
List<Dictionary<string, string>> ListJsonProductContent = new List<Dictionary<string, string>>();
List<string> ListProductsCheck = new List<string>();
var token = JToken.Parse(contentString);
if (token.Type == JTokenType.Array) // "["
{
ListJsonProductContent = JsonConvert.DeserializeObject<List<Dictionary<string, string>>>(contentString);
}
else if (token.Type == JTokenType.Object) // "{"
{
var ObjectResponse = JsonConvert.DeserializeObject<Dictionary<string, object>>(contentString);
foreach (var x in ObjectResponse)
{
string key = x.Key.ToString();
string val = x.Value.ToString();
foreach (var dicItemML in JsonConvert.DeserializeObject<List<Dictionary<string, string>>>(val))
{
ListJsonProductContent.Add(dicItemML);
}
}
}
List<K360MappingMaster> ListMappedDataDb = new List<K360MappingMaster>();
var VLinqQuery = from KMM in _context.K360MappingMasters
where KMM.ApiContent != null && KMM.ApiContent == baseurl
select KMM;
ListMappedDataDb = VLinqQuery.ToList();
foreach (var dicItemML in ListJsonProductContent)
{
Dictionary<string, string> updItem = new Dictionary<string, string>();
foreach (var itemMl in dicItemML)
{
var catalogProductsCheck = _context.CatalogProducts.ToList();
var result = catalogProductsCheck.Select(s => s.Name).ToList().Contains(itemMl.Value);
if (result == true)
ListProductsCheck.Add(itemMl.Value.ToString());
if (ListMappedDataDb.Select(s => s.ApiCatalog).ToList().Contains(itemMl.Key))
{
foreach (K360MappingMaster data in ListMappedDataDb.Where(s => s.ApiCatalog == itemMl.Key))
{
if (updItem.ContainsKey(data.K360Catalog))
{
if (data.K360Catalog == "Specification")
{
updItem[data.K360Catalog] += "<p>" + itemMl.Key + " :" + itemMl.Value + "<p>";
}
else
{
updItem[data.K360Catalog] += " " + itemMl.Value;
}
}
else
{
if (data.K360Catalog == "Specification")
{
updItem.Add(data.K360Catalog, "<p>" + itemMl.Key + " :" + itemMl.Value + "<p>");
}
else
{
updItem.Add(data.K360Catalog, itemMl.Value);
}
}
}
}
dicItemML.Remove(itemMl.Key);
}
foreach (var itemM2 in updItem)
{
dicItemML.Add(itemM2.Key, itemM2.Value);
}
}
List<CatalogProduct> ListKp = new List<CatalogProduct>();
foreach (var dicItem in ListJsonProductContent)
{
CatalogProduct Ctgkp = new CatalogProduct
{
Name = dicItem.ContainsKey("Name") ? dicItem["Name"] : "No Product",
Slug = dicItem.ContainsKey("Name") ? string.Concat(dicItem["Name"].Where(c => !char.IsWhiteSpace(c))).ToLower(CultureInfo.CurrentCulture) : "No Slug",
Price = dicItem.ContainsKey("Price") ? decimal.Parse(dicItem["Price"], CultureInfo.InvariantCulture) : default,
ShortDescription = dicItem.ContainsKey("ShortDescription") ? dicItem["ShortDescription"] : null,
Description = dicItem.ContainsKey("Description") ? dicItem["Description"] : null,
Specification = dicItem.ContainsKey("Specification") ? dicItem["Specification"] : null,
RatingAverage = dicItem.ContainsKey("RatingAverage") ? double.Parse(dicItem["RatingAverage"], CultureInfo.InvariantCulture) : null,
MetaTitle = dicItem.ContainsKey("MetaTitle") ? dicItem["MetaTitle"] : null,
MetaKeywords = dicItem.ContainsKey("MetaKeywords") ? dicItem["MetaKeywords"] : null,
MetaDescription = dicItem.ContainsKey("MetaDescription") ? dicItem["MetaDescription"] : null,
Sku = dicItem.ContainsKey("Sku") ? dicItem["Sku"] : null,
Gtin = dicItem.ContainsKey("Gtin") ? dicItem["Gtin"] : null,
NormalizedName = dicItem.ContainsKey("NormalizedName") ? dicItem["NormalizedName"] : null,
StockQuantity = dicItem.ContainsKey("StockQuantity") ? int.Parse(dicItem["StockQuantity"], CultureInfo.InvariantCulture) : 50,
ReviewsCount = dicItem.ContainsKey("ReviewsCount") ? int.Parse(dicItem["ReviewsCount"], CultureInfo.InvariantCulture) : default,
DisplayOrder = dicItem.ContainsKey("DisplayOrder") ? int.Parse(dicItem["DisplayOrder"], CultureInfo.InvariantCulture) : 1,
OldPrice = dicItem.ContainsKey("OldPrice") ? decimal.Parse(dicItem["OldPrice"], CultureInfo.InvariantCulture) : null,
SpecialPrice = dicItem.ContainsKey("SpecialPrice") ? decimal.Parse(dicItem["SpecialPrice"], CultureInfo.InvariantCulture) : null,
SpecialPriceStart = dicItem.ContainsKey("SpecialPriceStart") ? DateTimeOffset.Parse(dicItem["SpecialPriceStart"], CultureInfo.InvariantCulture) : null,
SpecialPriceEnd = dicItem.ContainsKey("SpecialPriceEnd") ? DateTimeOffset.Parse(dicItem["SpecialPriceEnd"], CultureInfo.InvariantCulture) : null,
IsPublished = true,
PublishedOn = DateTimeOffset.Now,
CreatedById = 10,
IsDeleted = false,
CreatedOn = DateTimeOffset.UtcNow,
LatestUpdatedOn = DateTimeOffset.UtcNow,
LatestUpdatedById = 10,
HasOptions = false,
IsVisibleIndividually = true,
IsFeatured = true,
IsCallForPricing = false,
IsAllowToOrder = true,
StockTrackingIsEnabled = true
};
ListKp.Add(Ctgkp);
}
using (var transaction = _context.Database.BeginTransaction())
{
try
{
int numCpTotal = 0;
var catalogProducts = _context.CatalogProducts.ToList();
var config = new MapperConfiguration(cfg => cfg.CreateMap<CatalogProduct, CatalogProduct>()
.ForMember(c => c.Id, opt => opt.Ignore()));
var mapper = config.CreateMapper();
_context.ChangeTracker.AutoDetectChangesEnabled = false;
foreach (var kp in ListKp)
{
var existingRootProduct = _context.CatalogProducts.SingleOrDefault(x => x.Name == kp.Name);
if (existingRootProduct == null)
{
if (ListProductsCheck.Count > 0)
{
var firstItem = ListProductsCheck.ToList();
foreach (var item in firstItem)
{
_context.CatalogProducts.RemoveRange(_context.CatalogProducts.Where(c => c.Name == item));
_context.CoreEntities.RemoveRange(_context.CoreEntities.Where(c => c.Name == item));
_context.SaveChanges();
}
}
_context.CatalogProducts.Add(kp);
_context.SaveChanges();
CoreEntity ce = new CoreEntity
{
Name = kp.Name,
Slug = string.Concat(kp.Name.Where(c => !char.IsWhiteSpace(c))).ToLower(CultureInfo.CurrentCulture),
EntityId = kp.Id,
EntityTypeId = "Product",
};
_context.CoreEntities.Add(ce);
}
else
{
mapper.Map<CatalogProduct, CatalogProduct>(kp, existingRootProduct);
}
}
(from q in _context.K360MappingMasters
where q.ApiContent == baseurl
select q).ToList().ForEach(x => x.InsertStatusFlag = true);
_context.ChangeTracker.DetectChanges();
numCpTotal = _context.SaveChanges();
_context.ChangeTracker.AutoDetectChangesEnabled = true;
transaction.Commit();
return Json(new { Status = "success", Message = "No conflicts. " + numCpTotal + " product details saved." });
}
catch (Exception ex)
{
transaction.Rollback();
return Json(new { Status = "warning", Message = ex.Message });
throw new Exception();
}
}
}
catch (Exception ex)
{
return Json(new { Status = "warning", Message = ex.Message });
throw new Exception();
}
}
else
{
return RedirectToAction("Index");
}
}

Ordering properties in PowerShell classes with inheritance

When using PowerShell classes with inheritance, is there any way to specify the order of parameters?
In there the inheritance example below, there are two classes (HttpResponseOK and HttpResponseBadRequest), both of which inherit another class (HttpResponse).
In this case, creating an instance of either HttpResponseOK and HttpResponseBadRequest results in the properties of those objects being listed before the properties of HttpResponse. For example -
Code
$HttpResponseOK = [HttpResponse]::OK("Everything is OK")
$HttpResponseOK | ConvertTo-Json
$HttpResponseBadRequest = [HttpResponse]::BadRequest("Something bad happened")
$HttpResponseBadRequest | ConvertTo-Json
Response
{
"notification": "Everything is OK",
"statusCode": 200
}
{
"exception": "Something bad happened",
"statusCode": 400
}
As you can see, when the response is sent to a user the statusCode property appears after the notification/exception property. Given that notification/exception will in reality be objects, this means that the statsCode is not initially visible in the response.
Is there a way to specify that statusCode should go before notification/exception?
Inheritance Example
Class HttpResponse
{
[int]$statusCode = [HttpStatusCode]::OK
[void]SetStatus(
[int]$statusCode
)
{
$this.statusCode = $statusCode
}
static [HttpResponseOK]OK(
[object]$notification
)
{
$response = [HttpResponseOK]::new($notification)
return $response
}
static [HttpResponseBadRequest]BadRequest(
[object]$exception
)
{
$response = [HttpResponseBadRequest]::new($exception)
return $response
}
}
Class HttpResponseOK : HttpResponse
{
[object]$notification
HttpResponseOK(
[object]$notification
)
{
$this.SetStatus([HttpStatusCode]::OK)
$this.notification = $notification
}
}
Class HttpResponseBadRequest : HttpResponse
{
[object]$exception
HttpResponseBadRequest(
[object]$exception
)
{
$this.SetStatus([HttpStatusCode]::BadRequest)
$this.exception = $exception
}
}
Never done much with classes in Powershell, but if I move the declarations for both [object]$notification and [object]$exception to the base class HttpResponse, The order seems to keep:
Class HttpResponse
{
[int]$statusCode = [System.Net.HttpStatusCode]::OK
[object]$notification
[object]$exception
[void]SetStatus(
[int]$statusCode
)
{
$this.statusCode = $statusCode
}
static [HttpResponseOK]OK(
[object]$notification
)
{
$response = [HttpResponseOK]::new($notification)
return $response
}
static [HttpResponseBadRequest]BadRequest(
[object]$exception
)
{
$response = [HttpResponseBadRequest]::new($exception)
return $response
}
}
Class HttpResponseOK : HttpResponse
{
HttpResponseOK(
[object]$notification
)
{
$this.SetStatus([System.Net.HttpStatusCode]::OK)
$this.notification = $notification
}
}
Class HttpResponseBadRequest : HttpResponse
{
HttpResponseBadRequest(
[object]$exception
)
{
$this.SetStatus([System.Net.HttpStatusCode]::BadRequest)
$this.exception = $exception
}
}
$HttpResponseOK = [HttpResponse]::OK("Everything is OK")
$HttpResponseOK | ConvertTo-Json
$HttpResponseBadRequest = [HttpResponse]::BadRequest("Something bad happened")
$HttpResponseBadRequest | ConvertTo-Json
Output:
{
"statusCode": 200,
"notification": "Everything is OK",
"exception": null
}
{
"statusCode": 400,
"notification": null,
"exception": "Something bad happened"
}

opcua session was closed by client

I have written the attached OpcUaConnector class for opc-ua connection related activities.
But it is not handling session. For example:
In opc ua configuration disabled the endpoint
In kepserver configuration did runtime > reinitializing
The windows service is throwing:
Source : system.Reactive.Core
InnerException : The session was closed by client
and stopping the windows service, as this error goes unhandled.
Can some one suggest how to handle session in opc-ua?
public class OpcUaConnector
{
private static SimplerAES simplerAES = new SimplerAES();
private DataContainer dataCointainer = null;
private UaTcpSessionChannel channel;
private string opcServerName = string.Empty;
private string opcUserId = string.Empty;
private string opcPassword = string.Empty;
private static ILog LogOpcStore;
private static System.IDisposable token;
private static uint id;
public OpcConnector(ILog Log)
{
IntializeLogOpcStore(Log);
}
private static void IntializeLogOpcStore(ILog Log)
{
LogOpcStore = Log;
}
public async Task OpenOpcConnection()
{
try
{
if ((!string.IsNullOrEmpty(this.opcServerName) & (this.opcServerName != AppMain.MyAppSettings.OpcServer)) ||
(!string.IsNullOrEmpty(this.opcUserId) & (this.opcUserId != AppMain.MyAppSettings.OpcUserId)) ||
(!string.IsNullOrEmpty(this.opcPassword) & (this.opcPassword != AppMain.MyAppSettings.OpcPassword)))
{
await channel.CloseAsync();
this.opcServerName = AppMain.MyAppSettings.OpcServer;
this.opcUserId = AppMain.MyAppSettings.OpcUserId;
this.opcPassword = AppMain.MyAppSettings.OpcPassword;
}
if (channel==null || (channel != null && (channel.State == CommunicationState.Closed || channel.State == CommunicationState.Faulted)))
{
var appDescription = new ApplicationDescription()
{
ApplicationName = "MyAppName",
ApplicationUri = $"urn:{System.Net.Dns.GetHostName()}:MyAppName",
ApplicationType = ApplicationType.Client,
};
//application data won't be deleted when uninstall
var certificateStore = new DirectoryStore(
Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData), MyAppName", "pki"),
true, true
);
//if the Ethernet cable unplugs or the Wifi drops out,
//you have some timeouts that can keep the session open for a while.
//There is a SessionTimeout (default of 2 min).
this.channel = new UaTcpSessionChannel(
appDescription,
certificateStore,
SignInOpc,
AppMain.MyAppSettings.OpcServer,
null,
options: new UaTcpSessionChannelOptions { SessionTimeout = 120000 });
await channel.OpenAsync();
//LogOpcStore.Info(String.Format("Opc connection sucessful"));
}
this.opcServerName = AppMain.MyAppSettings.OpcServer;
this.opcUserId = AppMain.MyAppSettings.OpcUserId;
this.opcPassword = AppMain.MyAppSettings.OpcPassword;
}
catch (Exception ex)
{
ServiceException serviceException = new ServiceException(ex.HResult + " " + ex.Message, "C052");
throw serviceException;
}
}
private static async Task RecursivelyFindNode(UaTcpSessionChannel channel, NodeId nodeid)
{
BrowseRequest browseRequest = new BrowseRequest
{
NodesToBrowse = new BrowseDescription[] { new BrowseDescription { NodeId = nodeid, BrowseDirection = BrowseDirection.Forward, ReferenceTypeId = NodeId.Parse(ReferenceTypeIds.HierarchicalReferences), NodeClassMask = (uint)NodeClass.Variable | (uint)NodeClass.Object, IncludeSubtypes = true, ResultMask = (uint)BrowseResultMask.All } },
};
BrowseResponse browseResponse = await channel.BrowseAsync(browseRequest);
foreach (var rd1 in browseResponse.Results[0].References ?? new ReferenceDescription[0])
{
uint chid = AppMain.MyTagDatabase.GetClientHandleByTag(rd1.DisplayName.ToString());
if (chid > 0)
{
AppMain.MyTagDatabase.UpdateNodeByClientHandle(chid, rd1.NodeId.ToString());
}
await RecursivelyFindNode(channel, ExpandedNodeId.ToNodeId(rd1.NodeId, channel.NamespaceUris));
}
}
public async Task CreateSubscription(DataContainer dc)
{
double curReadingValue;
try
{
dataCointainer = dc;
await RecursivelyFindNode(channel, NodeId.Parse(ObjectIds.RootFolder));
if (AppMain.MyTagDatabase.GetCntTagsNotInOpcServer() == AppMain.MyTagDatabase.GetTagCount())
{
//no need to create subscription
return;
}
//subscription timeout that is the product of PublishingInterval * LifetimeCount:
var subscriptionRequest = new CreateSubscriptionRequest
{
RequestedPublishingInterval = 1000f,
RequestedMaxKeepAliveCount = 30,
RequestedLifetimeCount = 30 * 3,
PublishingEnabled = true,
};
var subscriptionResponse = await channel.CreateSubscriptionAsync(subscriptionRequest);
id = subscriptionResponse.SubscriptionId;
var itemsToCreate = new MonitoredItemCreateRequest[AppMain.MyTagDatabase.GetTagHavingNodeCount()];
int i = 0;
foreach (var item in AppMain.MyTagDatabase.GetMyTagDatabase())
{
var itemKey = item.Key;
var itemValue = item.Value;
itemsToCreate[i] = new MonitoredItemCreateRequest { ItemToMonitor = new ReadValueId { NodeId = NodeId.Parse(itemValue.NodeId), AttributeId = AttributeIds.Value }, MonitoringMode = MonitoringMode.Reporting, RequestedParameters = new MonitoringParameters { ClientHandle = itemKey, SamplingInterval = -1, QueueSize = 0, DiscardOldest = true } };
i++;
}
var itemsRequest = new CreateMonitoredItemsRequest
{
SubscriptionId = id,
ItemsToCreate = itemsToCreate,
};
var itemsResponse = await channel.CreateMonitoredItemsAsync(itemsRequest);
token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(pr =>
{
// loop thru all the data change notifications
// receiving data change notifications here
var dcns = pr.NotificationMessage.NotificationData.OfType<DataChangeNotification>();
foreach (var dcn in dcns)
{
foreach (var min in dcn.MonitoredItems)
{
MyTag MyTag = new MyTag();
bool hasValue = AppMain.MyTagDatabase.GetMyTag(min.ClientHandle, out MyTag);
if (hasValue)
{
if (double.TryParse(min.Value.Value.ToString(), out curReadingValue))
{
//LogOpcStore.Info(String.Format("ClientHandle : {0} TagName : {1} SourceTimestamp : {2} ServerTimeStamp : {3} curReadingValue : {4}", min.ClientHandle, MyTag.TagName, min.Value.SourceTimestamp, min.Value.ServerTimestamp, curReadingValue));
AddDataPointToContainer(1, MyTag.TagName, min.Value.SourceTimestamp, curReadingValue);
}
}
}
}
});
}
catch (Exception ex)
{
//If the interruption lasts longer than these timeouts then the SessionChannel and Subscriptions will need to be recreated.
channel = null;
FatalServiceException fatalserviceException = new FatalServiceException(ex.Message, "C052");
throw fatalserviceException;
}
}
public async Task DeleteSubscription()
{
try
{
var request = new DeleteSubscriptionsRequest
{
SubscriptionIds = new uint[] { id }
};
await channel.DeleteSubscriptionsAsync(request);
token.Dispose();
}
catch (Exception ex)
{
ServiceException serviceException = new ServiceException(ex.Message, "C052");
throw serviceException;
}
}
private static async Task<IUserIdentity> SignInOpc(EndpointDescription endpoint)
{
IUserIdentity userIdentity = null;
if (endpoint.UserIdentityTokens.Any(p => p.TokenType == UserTokenType.Anonymous))
{
userIdentity = new AnonymousIdentity();
}
else if (endpoint.UserIdentityTokens.Any(p => p.TokenType == UserTokenType.UserName))
{
var userName = AppMain.MyAppSettings.OpcUserId;
var password = simplerAES.Decrypt(AppMain.MyAppSettings.OpcPassword);
userIdentity = new UserNameIdentity(userName, password);
}
return userIdentity;
}
private void AddDataPointToContainer(int dataType, string source, DateTime SourceTimestampUTC, double value)
{
ConditionValue conditionValue = new ConditionValue();
long timestamp = AppMain.ServerSyncTimeStore.ConvertDateTimeToTimeStampUTC(SourceTimestampUTC);
conditionValue.dataType = dataType;
conditionValue.source = source;
conditionValue.timestamp = timestamp;
conditionValue.SourceTimestampUTC = SourceTimestampUTC;
conditionValue.LocalTime = SourceTimestampUTC.ToLocalTime();
conditionValue.value = value;
//LogOpcStore.Info(String.Format("TagName : {0} SourceTimestampUTC : {1} timestamp : {2} LocalTime : {3} curReadingValue : {4}", source, SourceTimestampUTC, timestamp, SourceTimestampUTC.ToLocalTime(), value));
dataCointainer.AddDataPoint(conditionValue);
}
}
I see you are using the project https://github.com/convertersystems/opc-ua-client.
When a server closes the session and socket (as happens when you reinitialize Kepware) the client receives immediate notification that causes the client channel to fault. A faulted channel cannot be reopened, it should be aborted and a new channel should be created.
I made this standalone test, to show that you may have to catch an exception and recreate the channel and subscription. The point of this test is to subscribe to the CurrentTime node and collect 60 datachanges. The test should last a minute. If you re-init the Kepware server in the middle of the test, the code catches the exception and recreates the channel and subscription.
[TestMethod]
public async Task OpcConnectorTest()
{
var count = 0;
UaTcpSessionChannel channel = null;
while (count < 60)
{
try
{
channel = new UaTcpSessionChannel(
this.localDescription,
this.certificateStore,
new AnonymousIdentity(),
EndpointUrl,
SecurityPolicyUris.None,
loggerFactory: this.loggerFactory);
await channel.OpenAsync();
// create the keep alive subscription.
var subscriptionRequest = new CreateSubscriptionRequest
{
RequestedPublishingInterval = 1000f,
RequestedMaxKeepAliveCount = 30,
RequestedLifetimeCount = 30 * 3,
PublishingEnabled = true,
};
var subscriptionResponse = await channel.CreateSubscriptionAsync(subscriptionRequest).ConfigureAwait(false);
var id = subscriptionResponse.SubscriptionId;
var token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(pr =>
{
// loop thru all the data change notifications
var dcns = pr.NotificationMessage.NotificationData.OfType<DataChangeNotification>();
foreach (var dcn in dcns)
{
foreach (var min in dcn.MonitoredItems)
{
Console.WriteLine($"sub: {pr.SubscriptionId}; handle: {min.ClientHandle}; value: {min.Value}");
count++;
}
}
});
var itemsRequest = new CreateMonitoredItemsRequest
{
SubscriptionId = id,
ItemsToCreate = new MonitoredItemCreateRequest[]
{
new MonitoredItemCreateRequest { ItemToMonitor = new ReadValueId { NodeId = NodeId.Parse("i=2258"), AttributeId = AttributeIds.Value }, MonitoringMode = MonitoringMode.Reporting, RequestedParameters = new MonitoringParameters { ClientHandle = 12345, SamplingInterval = -1, QueueSize = 0, DiscardOldest = true } }
},
};
var itemsResponse = await channel.CreateMonitoredItemsAsync(itemsRequest);
while (channel.State == CommunicationState.Opened && count < 60)
{
await Task.Delay(1000);
}
}
catch (Exception ex)
{
Console.WriteLine($"Exception: {ex.GetType()}. {ex.Message}");
}
}
if (channel != null)
{
Console.WriteLine($"Closing session '{channel.SessionId}'.");
await channel.CloseAsync();
}
}
I know this is an old post, but I stumbled upon this problem as well. For those interested:
The problem is related to the subscription(s).
When the following code is run:
token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(pr =>
{
// loop thru all the data change notifications
// receiving data change notifications here
var dcns = pr.NotificationMessage.NotificationData.OfType<DataChangeNotification>();
foreach (var dcn in dcns)
{
foreach (var min in dcn.MonitoredItems)
{
MyTag MyTag = new MyTag();
bool hasValue = AppMain.MyTagDatabase.GetMyTag(min.ClientHandle, out MyTag);
if (hasValue)
{
if (double.TryParse(min.Value.Value.ToString(), out curReadingValue))
{
//LogOpcStore.Info(String.Format("ClientHandle : {0} TagName : {1} SourceTimestamp : {2} ServerTimeStamp : {3} curReadingValue : {4}", min.ClientHandle, MyTag.TagName, min.Value.SourceTimestamp, min.Value.ServerTimestamp, curReadingValue));
AddDataPointToContainer(1, MyTag.TagName, min.Value.SourceTimestamp, curReadingValue);
}
}
}
}
});
Observable.subscribe() takes multiple arguments. You should include what to do in case of an error. For example:
token = channel.Where(pr => pr.SubscriptionId == id).Subscribe(
pr => { code to run normally... },
ex => { Log.Info(ex.Message); },
() => { }
);
See http://reactivex.io/documentation/operators/subscribe.html for more information.