Parameterize the find method in python using mongo - mongodb

Files to upload will be like WFSIV0101202001.318.tar.gz,WFSIV0101202001.2624.tar.gz etc.
INPUT_FILE_PATH = 'C:\Files to upload'
try:
import os
from google.cloud import storage
import sys
import pymongo
import pymongo.errors
from pymongo import MongoClient
from pymongo.errors import ConnectionFailure
except:
print("missing modules")
try:
mongo_client = MongoClient(host="xyz.com", port=27017)
Db = mongo_client['abcd']
coll = Db['shopper_journey_sitedata']
except ConnectionFailure:
print("Connection failed")
date=[]
# Thirdpartyid=[]
input_files = os.listdir(INPUT_FILE_PATH)
# looping through input files
for input_file in input_files:
x = input_file.split(".")
date.append(x[0][5:13])
tp_site_id = x[1]
# print(tp_site_id)
cur = coll.find({"third_party_site_id":tp_site_id})
for doc in cur:
print(doc)
Now i want to parameterize the find() method for every id, so that on each iteration i should get st_site_id ?
above code i tried but ist giving error as "Datas:name error"

You can do one thing
coll.find({"third_party_site_id": { $in :
[318,2624,2621,2622,102,078]}})
If Tid is an array, then you could replace 318 in your query to Tid[I]

Related

Trigger in pymongo

My Requirement is as follows:
In below table code if records gets inserted into "col" I should do some manipulations to "col2" table, How to achieve this? is my below code correct, I am trying to see what insert_change prints, but it is not printing anything
import pymongo
import logging
import Parameters,constants
class Stream:
def get_connection(self):
client = pymongo.MongoClient(
"mongodb://" + constants.USER_NAME + ":" + constants.PWD + constants.server + constants.CA_CERTIFICATES_PATH)
logging.info("Mongo DB Connection Established Sucessfully")
db = client[Parameters.STG_QC_Hub_Files]
col = db[Parameters.col7]
col2 = db[Parameters.col8]
return col,col2,db
def insert(self,col,col2,db):
try:
with db.col.watch([{'$match': {'operationType': 'insert'}}]) as stream:
print(stream)
for insert_change in stream:
print("hi")
print(insert_change)
except pymongo.errors.PyMongoError:
# The ChangeStream encountered an unrecoverable error or the
# resume attempt failed to recreate the cursor.
logging.error('...')
c=Stream()
col,col2,db=c.get_connection()
c.insert(col,col2,db)
The below code will print what you have inserted into the table:
import pymongo
import logging
import Parameters,constants,mongotriggers
from bson.json_util import dumps
class Stream:
def get_connection(self):
client = pymongo.MongoClient(
"mongodb://" + constants.USER_NAME + ":" + constants.PWD + constants.server + constants.CA_CERTIFICATES_PATH)
logging.info("Mongo DB Connection Established Sucessfully")
db = client[Parameters.QC_Quality_Hub]
col = db[Parameters.col7]
col2 = db[Parameters.col8]
return col,col2,db
def insert(self,col,col2,db):
try:
resume_token = None
pipeline = [{"$match": {"operationType": "insert"}}]
a = [doc for doc in col.find({})]
with col.watch(pipeline) as stream:
print(stream)
for insert_change in stream:
print(dumps(insert_change))
except pymongo.errors.PyMongoError:
logging.error("Oops")
c=Stream()
col,col2,db=c.get_connection()
c.insert(col,col2,db)

Reminder Command using MongoDB discord.py

I am new to databases and datetime and stuff.
I actually want to create a reminder command which uses MongoDB as a Database. I am using Motor as I want to use asyncio along with it. Please tell me if I am on the right path or not and if I am not, then what should I do?
I have setup the basic connection with MongoDB using motor.
Here is my code.
import discord
from discord.ext import commands
import pymongo
from pymongo import MongoClient
import os
import asyncio
import motor
import motor.motor_asyncio
class Reminder(commands.Cog):
def __init__(self, bot):
self.bot = bot
#commands.Cog.listener()
async def on_ready(self):
print('Reminder is Ready.')
### MongoDB Variables ###
#commands.command()
async def remind(self, ctx, time, *, msg):
### MongoDB Variables ###
mongo_url = os.environ['Mongodb_url']
cluster = motor.motor_asyncio.AsyncIOMotorClient(str(mongo_url))
db = cluster['Database']
collection = db['reminder']
### Discord Variables ###
author_id = ctx.author.id
guild_id = ctx.guild.id
### Time Variables ###
time_conversion = {"s": 1, "m": 60, "h": 3600, "d": 86400}
remindertime = int(time[0]) * time_conversion[time[-1]]
if ctx.author.bot:
return
if (await collection.count_documents({}) == 0):
rem_info = {"_id": author_id, "GuildID": guild_id, "time": remindertime, "msg": msg}
await collection.insert_one(rem_info)
await ctx.send('Logged In')
def setup(bot):
bot.add_cog(Reminder(bot))
What is Reminder Command and whati want to do?
Basically, the command will take the amount of time to be reminded and the topic to be reminded about as arguments.
After the certain amount of time specified in the command, it will DM the user that "You asked me to remind you about {topic}".
I hope that is all the needed information.
attachment to the comments below the question:
to check if it is time to remind a user you can use the datetime module
import discord
from discord.ext import commands, tasks
import pymongo
from pymongo import MongoClient
import os
import asyncio
import motor
import motor.motor_asyncio
import datetime
from datetime import datetime, timedelta
### MongoDB Variables ###
mongo_url = os.environ['Mongodb_url']
cluster = motor.motor_asyncio.AsyncIOMotorClient(str(mongo_url))
db = cluster['Database']
collection = db['reminder']
class Reminder(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.reminder_task.start()
def cog_unload(self):
self.reminder_task.cancel()
#tasks.loop(minutes=1.0)
async def reminder_task(self):
reminders = collection.find({})
for reminder in reminders:
# reminder should look like this: {"_id": 1234, "GuildID": 1234, "time": datetime_objekt, "msg": "some text"}
now = datetime.now()
if now >= reminder:
# remind the user
guild = self.client.get_guild(reminder['GuildID'])
member = guild.get_member(reminder['_id'])
await member.send(f"reminder for {reminder['msg']}")
#commands.Cog.listener()
async def on_ready(self):
print('Reminder is Ready.')
#commands.command()
async def remind(self, ctx, time, *, msg):
"""Reminder Command"""
# like above
# just change this
### Time Variables ###
time_conversion = {"s": 1, "m": 60, "h": 3600, "d": 86400}
remindseconds = int(time[0]) * time_conversion[time[-1]]
remindertime = datetime.now() + timedelta(seconds=remindseconds)
def setup(bot):
bot.add_cog(Reminder(bot))

why excel process stay opened after printing

i use these packages:
openpyxl - copy excel templates, opened copied file, save data from db, print it and then delete;
pywin32 - send for printing to remote network printer by network name;
after some processes, i don't know which - excel process window still opened. (attach screenshot)
i attach the most using in my project functions.
this program it's like a web service, which listening 5000 port and print in needed template.
i delete all created files, because of no need to store all of them.
from openpyxl import load_workbook
import os
import app_config as config
import printers.printers as p
from datetime import datetime
import shutil
import time
class EditExcelTemplate:
def __init__(self, template_name):
now = datetime.now()
report_name = "_{}{}{}_{}{}{}_{}".format(now.year, now.month, now.day,
now.hour, now.minute, now.second,
now.microsecond)
self.report_path = config.EXCEL_REPORT_PATH.format(template_name +
report_name)
shutil.copy(src=config.EXCEL_TEMPLATE_PATH.format(template_name),
dst=self.report_path)
# self.wb = load_workbook(filename=config.EXCEL_TEMPLATE_PATH.format(template_name))
start_load = time.time()
self.wb = load_workbook(filename=self.report_path,
keep_links=False,
keep_vba=False,
data_only=True)
end_load = time.time()
print('LOAD WORKBOOK|{}'.format(str(end_load - start_load)))
self.ws = self.wb.active
self.answer = {'file_name': template_name.upper()}
def write_workbook(self, row_dest, column_dest, value):
c = self.ws.cell(row=row_dest, column=column_dest)
c.value = value
def save_excel(self):
self.wb.save(self.report_path)
def print_excel(self, printer_no):
p.print_excel(printer_no=printer_no, path_to_file=self.report_path)
def print_excel_file(self, printer_name):
p.print_excel_file(printer_name=printer_name, path_to_file=self.report_path)
import win32api
import app_config
import os, time
def print_excel(printer_no, path_to_file):
printer_name = app_config.PRINTER_NAMES[printer_no]
win32api.ShellExecute(
1,
'printto',
path_to_file,
'{}'.format(printer_name),
'.',
0
)
def delete_file(path_to_file, try_count=1):
if os.path.exists(path=path_to_file):
file_name = path_to_file.split('\\')[-1]
while try_count < 60:
try:
os.remove(path_to_file)
print('File {} deleted!'.format(file_name))
break
except PermissionError:
print('Can not delete file {}. Hold {} sec.'.format(file_name, try_count))
time.sleep(1.0)
try_count += 1

How to create a mongoengine connection with ssh?

I'm trying to create a connection and add a document with mongoengine through an SSH tunnel.
A successful attempt with pymongo can be seen below, I simply want something similar with mongoengine. :-)
from auth import *
import pymongo
from sshtunnel import SSHTunnelForwarder
server = SSHTunnelForwarder(
(HOST_IP, HOST_PORT),
ssh_username = SSH_USER,
ssh_password = SSH_PASS,
remote_bind_address = ('localhost', 27017)
)
server.start()
client = pymongo.MongoClient('127.0.0.1', server.local_bind_port)
db = client[MONGO_DB]
db.authenticate(MONGO_USER, MONGO_PASS)
coll = db.queue_db
coll.insert({"testFile42":43})
server.stop()
mongoengine.connect(
db=DB_NAME,
host="127.0.0.1",
port=server.local_bind_port
)

flask/MongoDB error on local server using raspberry pi3 - raspbian os

i've made a local server using flask and mongoDB which works great on windows, but when i moved my code to the raspberry pi, i've got an error which i couldn't figure out why it occurs.
the code im using:
1) for the flask server
from flask import Flask
from flask import jsonify
from flask import request
import pymongo
import time
import datetime
import json
app = Flask(__name__)
client = pymongo.MongoClient("localhost", 27017)
db = client['mqtt-db']
obs_collection = db['mqtt-collection']
#app.route("/obs")
def obs():
data_str = request.args.get("data")
print data_str
data = json.loads(data_str)
print data
data["date"] = datetime.datetime.now()
obs_collection.save(data)
return "success"
#app.route("/get_obs")
def get_obs():
res = []
for row in obs_collection.find():
del row['_id']
res.append(row)
return jsonify(res)
#app.route("/delete_all")
def delete_all():
res = obs_collection.delete_many({})
return jsonify({"deleted": res.deleted_count})
if __name__ == "__main__":
app.run(host="0.0.0.0", debug=True)
2) script for inserting messages into db , using mqtt protocol:
import paho.mqtt.client as mqtt
import pymongo
import json
import datetime
topic = "sensor"
host = "10.0.0.6"
client = pymongo.MongoClient("localhost", 27017)
db = client['mqtt-db']
mqtt_collection = db['mqtt-collection']
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe(topic)
# The callback for when a PUBLISH message is received from the server.
def on_message(client, userdata, msg):
data_str = str(msg.payload)
data = json.loads(data_str)
print data_str
print data
data["date"] = datetime.datetime.now()
mqtt_collection.save(data)
print(msg.topic+" "+str(msg.payload))
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
client.connect(host, 1883, 60)
# Blocking call that processes network traffic, dispatches callbacks and
# handles reconnecting.
# Other loop*() functions are available that give a threaded interface and a
# manual interface.
client.loop_forever()
the error occurs when i try to retrieve data from the server using "get_obs" function.
the error is: "Value Error: dictionary update sequence element #0 has length 4; 2 is required"
appreciate your help.
as #davidism suggested, the solution was to update to the latest version of Flask