I am using pyorient and and want to parameterize queries -- I am assuming that command() allows placeholders, but am not able to find any documentation. IN particular I'd like to use dict() args as per postgres' %(name)s construct, but could make tuples/lists work as well.
I tried your case with my python_test database:
Dataset:
I used two parameters:
name ---> string;
surname ---> string;
and I passed them into the command() function.
PyOrient Code:
import pyorient
db_name = 'python_test'
user = 'root'
pwd = 'root'
print("Connecting...")
client = pyorient.OrientDB("localhost",2424)
session_id = client.connect(user, pwd)
print("OK - sessionID: ",session_id,"\n")
if client.db_exists( db_name, pyorient.STORAGE_TYPE_PLOCAL ):
print("DB "+db_name+" already exists\n")
client.db_open(db_name, user, pwd, pyorient.DB_TYPE_GRAPH, pyorient.STORAGE_TYPE_PLOCAL)
name = 'test name'
surname = 'test surname'
vertexes = client.command("SELECT FROM TestClass WHERE name = '" + name + "' AND surname = '" + surname + "'")
for vertex in vertexes:
print(vertex)
else:
print("Creating DB "+ db_name + "...")
client.db_create( db_name, pyorient.DB_TYPE_GRAPH, pyorient.STORAGE_TYPE_PLOCAL )
print("DB " + db_name + " created\n")
client.db_close()
Output:
Connecting...
OK - sessionID: 40
DB python_test already exists
{'#TestClass':{'surname': 'test surname', 'name': 'test name'},'version':1,'rid':'#12:0'}
Hope it helps
Related
I have the following class that makes connection to MSSQL Server instance in my project,
"""MSSql Connection class."""
import logging
import logging.config
import sqlalchemy
class MSSQLConnection:
_connection = None
_engine = None
def __init__(self, host, database, username, password):
connection_string = self.build_connection_string(
host, database, username, password)
self._engine = sqlalchemy.create_engine(
connection_string, fast_executemany=True,
isolation_level="READ COMMITTED")
def connect(self):
self._connection = self._engine.connect()
def get_result_tuple(self, table):
logger = logging.getLogger()
metadata = sqlalchemy.MetaData()
db_table = sqlalchemy.Table(
table, metadata, autoload=True, autoload_with=self._engine)
query = sqlalchemy.select([db_table])
transaction_id = self.get_transaction_id()
result_proxy = self._connection.execute(query)
return transaction_id, result_proxy
def get_transaction_id(self):
"""Get the transaction id."""
connection = self._connection
sql_query = sqlalchemy.text("SELECT CURRENT_TRANSACTION_ID()")
result = connection.execute(sql_query)
row = result.fetchone()
return row[0]
def build_connection_string(self, host, database, username, password):
connection_string = ('mssql+pyodbc://' + username + ':'
+ password + '#' + host + '/' + database
+ '?driver=ODBC+Driver+17+for+SQL+Server')
return connection_string
I would like to mock the method
get_result_tuple
that returns ´transaction_id´ and instance of sqlalchemy.engine.cursor.LegacyCursorResult.
How to mock sqlalchemy.engine.cursor.LegacyCursorResult and return some dummy data on the ResultProxy object?
The caller has the following code,
mssql_connection = MSSQLConnection(
host, database, username, password)
mssql_connection.connect()
result_tuple = mssql_connection.get_result_tuple(table)
transaction_id, result_proxy = result_tuple
logger.info(f'Transaction id = {transaction_id}')
current_date = date.today().strftime("%Y_%m_%d")
while True:
partial_results = result_proxy.fetchmany(rows_fetch_limit)
results_count = len(partial_results)
if (partial_results == [] or results_count == 0):
return
else:
// other logic
Please advise.
I want to execute a query on Flink SQL Table backed by kafka topic of secured kafka cluster. I'm able to execute the query programmatically but unable to do the same through Flink SQL client. I'm not sure on how to pass JAAS config (java.security.auth.login.config) and other system properties through Flink SQL client.
Flink SQL query programmatically
private static void simpleExec_auth() {
// Create the execution environment.
final EnvironmentSettings settings = EnvironmentSettings.newInstance()
.inStreamingMode()
.withBuiltInCatalogName(
"default_catalog")
.withBuiltInDatabaseName(
"default_database")
.build();
System.setProperty("java.security.auth.login.config","client_jaas.conf");
System.setProperty("sun.security.jgss.native", "true");
System.setProperty("sun.security.jgss.lib", "/usr/libexec/libgsswrap.so");
System.setProperty("javax.security.auth.useSubjectCredsOnly","false");
TableEnvironment tableEnvironment = TableEnvironment.create(settings);
String createQuery = "CREATE TABLE test_flink11 ( " + "`keyid` STRING, " + "`id` STRING, "
+ "`name` STRING, " + "`age` INT, " + "`color` STRING, " + "`rowtime` TIMESTAMP(3) METADATA FROM 'timestamp', " + "`proctime` AS PROCTIME(), " + "`address` STRING) " + "WITH ( "
+ "'connector' = 'kafka', "
+ "'topic' = 'test_flink10', "
+ "'scan.startup.mode' = 'latest-offset', "
+ "'properties.bootstrap.servers' = 'kafka01.nyc.com:9092', "
+ "'value.format' = 'avro-confluent', "
+ "'key.format' = 'avro-confluent', "
+ "'key.fields' = 'keyid', "
+ "'value.fields-include' = 'EXCEPT_KEY', "
+ "'properties.security.protocol' = 'SASL_PLAINTEXT', 'properties.sasl.kerberos.service.name' = 'kafka', 'properties.sasl.kerberos.kinit.cmd' = '/usr/local/bin/skinit --quiet', 'properties.sasl.mechanism' = 'GSSAPI', "
+ "'key.avro-confluent.schema-registry.url' = 'http://kafka-schema-registry:5037', "
+ "'key.avro-confluent.schema-registry.subject' = 'test_flink6', "
+ "'value.avro-confluent.schema-registry.url' = 'http://kafka-schema-registry:5037', "
+ "'value.avro-confluent.schema-registry.subject' = 'test_flink4')";
System.out.println(createQuery);
tableEnvironment.executeSql(createQuery);
TableResult result = tableEnvironment
.executeSql("SELECT name,rowtime FROM test_flink11");
result.print();
}
This is working fine.
Flink SQL query through SQL client
Running this giving the following error.
Flink SQL> CREATE TABLE test_flink11 (`keyid` STRING,`id` STRING,`name` STRING,`address` STRING,`age` INT,`color` STRING) WITH('connector' = 'kafka', 'topic' = 'test_flink10','scan.startup.mode' = 'earliest-offset','properties.bootstrap.servers' = 'kafka01.nyc.com:9092','value.format' = 'avro-confluent','key.format' = 'avro-confluent','key.fields' = 'keyid', 'value.avro-confluent.schema-registry.url' = 'http://kafka-schema-registry:5037', 'value.avro-confluent.schema-registry.subject' = 'test_flink4', 'value.fields-include' = 'EXCEPT_KEY', 'key.avro-confluent.schema-registry.url' = 'http://kafka-schema-registry:5037', 'key.avro-confluent.schema-registry.subject' = 'test_flink6', 'properties.security.protocol' = 'SASL_PLAINTEXT', 'properties.sasl.kerberos.service.name' = 'kafka', 'properties.sasl.kerberos.kinit.cmd' = '/usr/local/bin/skinit --quiet', 'properties.sasl.mechanism' = 'GSSAPI');
Flink SQL> select * from test_flink11;
[ERROR] Could not execute SQL statement. Reason:
java.lang.IllegalArgumentException: Could not find a 'KafkaClient' entry in the JAAS configuration. System property 'java.security.auth.login.config' is /tmp/jaas-6309821891889949793.conf
There is nothing in /tmp/jaas-6309821891889949793.conf except the following comment
# We are using this file as an workaround for the Kafka and ZK SASL implementation
# since they explicitly look for java.security.auth.login.config property
# Please do not edit/delete this file - See FLINK-3929
SQL client run command
bin/sql-client.sh embedded --jar flink-sql-connector-kafka_2.11-1.12.0.jar --jar flink-sql-avro-confluent-registry-1.12.0.jar
Flink cluster command
bin/start-cluster.sh
How to pass this java.security.auth.login.config and other system properties (that I'm setting in the above java code snippet), for SQL client?
flink-conf.yaml
security.kerberos.login.use-ticket-cache: true
security.kerberos.login.principal: XXXXX#HADOOP.COM
security.kerberos.login.use-ticket-cache: false
security.kerberos.login.keytab: /path/to/kafka.keytab
security.kerberos.login.principal: XXXX#HADOOP.COM
security.kerberos.login.contexts: Client,KafkaClient
I haven't really tested whether this solution is feasible, you can try it out, hope it will help you.
I make a specific development of the db_backup openerp 7 module to running on version 8 Odoo.
So it is properly installed and it does backup
the problem is that the sql file size is zero KB
this is the code backup_scheduler.py
import xmlrpclib
import socket
import os
import time
import base64
from openerp.osv import fields,osv,orm
from openerp import tools, netsvc
from openerp.tools.translate import _
import logging
_logger = logging.getLogger(__name__)
def execute(connector, method, *args):
res = False
try:
res = getattr(connector,method)(*args)
except socket.error,e:
raise e
return res
addons_path = tools.config['addons_path'] + '/auto_backup/DBbackups'
class db_backup(osv.Model):
_name = 'db.backup'
def get_db_list(self, cr, user, ids, host='localhost', port='8069', context={}):
uri = 'http://' + host + ':' + port
conn = xmlrpclib.ServerProxy(uri + '/xmlrpc/db')
db_list = execute(conn, 'list')
return db_list
_columns = {
'host' : fields.char('Host', size=100, required='True'),
'port' : fields.char('Port', size=10, required='True'),
'name' : fields.char('Database', size=100, required='True',help='Database you want to schedule backups for'),
'bkp_dir' : fields.char('Backup Directory', size=100, help='Absolute path for storing the backups', required='True')
}
_defaults = {
'bkp_dir' : lambda *a : addons_path,
'host' : lambda *a : 'localhost',
'port' : lambda *a : '8069'
}
def _check_db_exist(self, cr, user, ids):
for rec in self.browse(cr,user,ids):
db_list = self.get_db_list(cr, user, ids, rec.host, rec.port)
if rec.name in db_list:
return True
return False
_constraints = [
(_check_db_exist, _('Error ! No such database exists!'), [])
]
def schedule_backup(self, cr, user, context={}):
conf_ids= self.search(cr, user, [])
confs = self.browse(cr,user,conf_ids)
for rec in confs:
db_list = self.get_db_list(cr, user, [], rec.host, rec.port)
if rec.name in db_list:
try:
if not os.path.isdir(rec.bkp_dir):
os.makedirs(rec.bkp_dir)
except:
raise
bkp_file='%s_%s.sql' % (rec.name, time.strftime('%Y%m%d_%H_%M_%S'))
file_path = os.path.join(rec.bkp_dir,bkp_file)
fp = open(file_path,'wb')
uri = 'http://' + rec.host + ':' + rec.port
conn = xmlrpclib.ServerProxy(uri + '/xmlrpc/db')
bkp=''
try:
bkp = execute(conn, 'dump', tools.config['admin_passwd'], rec.name)
except:
logger.notifyChannel('backup', netsvc.LOG_INFO, "Could'nt backup database %s. Bad database administrator password for server running at http://%s:%s" %(rec.name, rec.host, rec.port))
continue
bkp = base64.decodestring(bkp)
fp.write(bkp)
fp.close()
else:
logger.notifyChannel('backup', netsvc.LOG_INFO, "database %s doesn't exist on http://%s:%s" %(rec.name, rec.host, rec.port))
db_backup()
This is probably because an error occurs on the backend that does not get propagated back to the client. If you check the server logs at the time you take a backup you will probably see the issue.
Note If you need a script to get a backup from or restore a database to a v7, v8 or v9 server take a look at https://github.com/daramousk/odoo_remote_backup
I have developed a script for this specific reason which you can use or change to resolve your issue.
When I query Rally REST API (.net) I am able to get all the user values except the following fields:
Role
CostCenter
LastLoginDate
OfficeLocation
c_RoleType
CreationDate
When I fire up a url "https://rally1.rallydev.com/slm/webservice/v2.0/user/xxxxxxxxxx" in a browser I am able to access all the above fields.
However, when I use REST API the result set doesn't include any of the above fields.
Not sure if I need to do anything differently.
request = new Request("user");
request.Fetch = fetchList;
request.Workspace = RallyInterface.workspaceRef;
QueryResult queryResult = restApi.Query(request);
PS. In above example Fetch string is empty and the aim is to fetch all the possible user fields. When I debug I am only able to get the following 18 fields
[0] "_rallyAPIMajor" string
[1] "_rallyAPIMinor" string
[2] "_ref" string
[3] "_refObjectUUID" string
[4] "_objectVersion" string
[5] "_refObjectName" string
[6] "ObjectID" string
[7] "Department" string
[8] "Disabled" string
[9] "DisplayName" string
[10] "EmailAddress" string
[11] "FirstName" string
[12] "LastName" string
[13] "MiddleName" string
[14] "Phone" string
[15] "Role" string
[16] "UserName" string
[17] "_type" string
Also, when I use GetByReference(), I am able to get the value for "Role" but not for any of the following fields:
CostCenter
LastLoginDate
OfficeLocation
c_RoleType
CreationDate
GetByReference() response returns with field not found message.
The fetchList should include those fields, or should be entirely omitted. The code example below returns those fields in both cases. It means if I comment out userRequest.Fetch in the code below, those fields will still be returned. If a fetch is present, but does not include those fields, they will not be returned.
userRequest.Fetch = new List<string>()
{
"Role",
"CostCenter",
"LastLoginDate",
"OfficeLocation",
"CreationDate"
};
userRequest.Query = new Query("UserName", Query.Operator.Equals, "user#co.com");
QueryResult userResults = restApi.Query(userRequest);
foreach (var u in userResults.Results)
{
Console.WriteLine("Role: " + u["Role"] + " CostCenter: " + u["CostCenter"] + " LastLoginDate: " + u["LastLoginDate"] + " OfficeLocation: " + u["OfficeLocation"] + " CreationDate: " + u["CreationDate"]);
}
Here is another variation of this code when GetByReference is used:
userRequest.Query = new Query("UserName", Query.Operator.Equals, "user#co.com");
QueryResult userResults = restApi.Query(userRequest);
String userRef = userResults.Results.First()._ref;
DynamicJsonObject user = restApi.GetByReference(userRef, "Name", "Role", "CostCenter", "LastLoginDate", "OfficeLocation", "CreationDate");
String role = user["Role"];
String costCenter = user["CostCenter"];
String lastLoginDate = user["LastLoginDate"];
String officeLocation = user["OfficeLocation"];
String creationDate = user["CreationDate"];
Console.WriteLine("Role: " + role + " CostCenter: " + costCenter + " LastLoginDate: " + lastLoginDate + " OfficeLocation: " + officeLocation + " CreationDate: " + creationDate);
The full code is available in this github repo.
In my sample app following snippet works fine.
SELECT_CQL = "SELECT * FROM " + STREAM_NAME_IN_CASSANDRA + " WHERE '" + CONTEXT_ID_COLUMN + "'=?";
Connection connection = getConnection();
statement = connection.prepareStatement(SELECT_CQL);
statement.setString(1, "123");
resultSet = statement.executeQuery();
But when I try to add another parameter to the where clause query returns nothing!!
SELECT_CQL = "SELECT * FROM " + STREAM_NAME_IN_CASSANDRA + " WHERE '" + CONTEXT_ID_COLUMN + "'=? AND '"+TIMESTAMP_COLUMN+"'=?";
Connection connection = getConnection();
statement = connection.prepareStatement(SELECT_CQL);
statement.setString(1, "123");
statement.setString(2, "1390996577514");
resultSet = statement.executeQuery();
When I try the exact query within cqlsh terminal, it works fine.
statement.setString(2, "1390996577514");
Double-check the datatype of your TIMESTAMP_COLUMN and make sure that it's a string. Otherwise you'll need to use the appropriate "set" method. Ex:
statement.setLong(2, 1390996577514L);