"Could not lookup Ecto repo" error during tests - postgresql

I've been getting the following exception when trying to setup a new ecto repo. Currently only using it for testing (since i am just setting this up).
** (RuntimeError) could not lookup Ecto repo Lipwig.AnnotatedUnit.Repo because it was not started or it does not exist
This happens when my tests use Lipwig.AnnotatedUnit.DataCase. However, when i use Lipwig.DataCase (which references the Lipwig.Repo`), the tests run fine.
I have the following setup in the project.
defmodule Lipwig.Repo do
use Ecto.Repo,
otp_app: :lipwig,
adapter: Ecto.Adapters.Postgres
end
defmodule Lipwig.AnnotatedUnit.Repo do
use Ecto.Repo,
otp_app: :lipwig,
adapter: Ecto.Adapters.Postgres
end
This here is my test setup
defmodule Lipwig.DataCase do
use ExUnit.CaseTemplate
using do
quote do
alias Lipwig.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Lipwig.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Lipwig.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Lipwig.Repo, {:shared, self()})
end
:ok
end
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Regex.replace(~r"%{(\w+)}", message, fn _, key ->
opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
end)
end)
end
end
defmodule Lipwig.AnnotatedUnit.DataCase do
use ExUnit.CaseTemplate
using do
quote do
alias Lipwig.AnnotatedUnit.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Lipwig.AnnotatedUnit.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Lipwig.AnnotatedUnit.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Lipwig.AnnotatedUnit.Repo, {:shared, self()})
end
:ok
end
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Regex.replace(~r"%{(\w+)}", message, fn _, key ->
opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
end)
end)
end
end
My other datacase for Lipwig.Repo is almost identical, just referencing the other repo.
My config.exs config defines
config :lipwig,
ecto_repos: [Lipwig.Repo, Lipwig.AnnotatedUnit.Repo]
My test.exs config is
config :lipwig, Lipwig.Repo,
username: "postgres",
password: "postgres",
database: "lipwig_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
config :lipwig, Lipwig.AnnotatedUnit.Repo,
username: "postgres",
password: "postgres",
database: "lipwig_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox

As per my knowledge, the configurations seem fine. As the error suggests, I think the issue might be because you are not starting the Lipwig.AnnotatedUnit.Repo in with you application.
Look for the application.ex file in the /lib/lipwig_web folder and make sure that Lipwig.AnnotatedUnit.Repo is added in the children array.
Hope this resolves 🥂

Related

Flask Rest Api SQL Alchemy connection Cloud Sql Postgresq

I have a connection problem with Cloud Sql Postgres from my Flask Rest API app.
I have a db.py file:
import os
from flask_sqlalchemy import SQLAlchemy
import sqlalchemy
db = SQLAlchemy()
def connect_unix_socket() -> sqlalchemy.engine.base.Engine:
""" Initializes a Unix socket connection pool for a Cloud SQL instance of Postgres. """
# Note: Saving credentials in environment variables is convenient, but not
# secure - consider a more secure solution such as
# Cloud Secret Manager (https://cloud.google.com/secret-manager) to help
# keep secrets safe.
db_user = os.environ["DB_USER"] # e.g. 'my-database-user'
db_pass = os.environ["DB_PASS"] # e.g. 'my-database-password'
db_name = os.environ["DB_NAME"] # e.g. 'my-database'
unix_socket_path = os.environ["INSTANCE_UNIX_SOCKET"] # e.g. '/cloudsql/project:region:instance'
pool = sqlalchemy.create_engine(
# Equivalent URL:
# postgresql+pg8000://<db_user>:<db_pass>#/<db_name>
# ?unix_sock=<INSTANCE_UNIX_SOCKET>/.s.PGSQL.5432
# Note: Some drivers require the `unix_sock` query parameter to use a different key.
# For example, 'psycopg2' uses the path set to `host` in order to connect successfully.
sqlalchemy.engine.url.URL.create(
drivername="postgresql+pg8000",
username=db_user,
password=db_pass,
database=db_name,
query={"unix_sock": "{}/.s.PGSQL.5432".format(unix_socket_path)},
),
# [START_EXCLUDE]
# Pool size is the maximum number of permanent connections to keep.
pool_size=5,
# Temporarily exceeds the set pool_size if no connections are available.
max_overflow=2,
# The total number of concurrent connections for your application will be
# a total of pool_size and max_overflow.
# 'pool_timeout' is the maximum number of seconds to wait when retrieving a
# new connection from the pool. After the specified amount of time, an
# exception will be thrown.
pool_timeout=30, # 30 seconds
# 'pool_recycle' is the maximum number of seconds a connection can persist.
# Connections that live longer than the specified amount of time will be
# re-established
pool_recycle=1800, # 30 minutes
# [END_EXCLUDE]
)
return pool
I import the db.py file in my app.py file:
import os
import sqlalchemy
from flask import Flask
from flask_smorest import Api
from flask_sqlalchemy import SQLAlchemy
from db import db, connect_unix_socket
import models
from resources.user import blp as UserBlueprint
# pylint: disable=C0103
app = Flask(__name__)
def init_connection_pool() -> sqlalchemy.engine.base.Engine:
# use a Unix socket when INSTANCE_UNIX_SOCKET (e.g. /cloudsql/project:region:instance) is defined
if unix_socket_path:
return connect_unix_socket()
raise ValueError(
"Missing database connection type. Please define one of INSTANCE_HOST, INSTANCE_UNIX_SOCKET, or INSTANCE_CONNECTION_NAME"
)
db = None
#app.before_first_request
def init_db() -> sqlalchemy.engine.base.Engine:
global db
db = init_connection_pool()
api = Api(app)
#app.route("/api")
def user_route():
return "Welcome user API!"
api.register_blueprint(UserBlueprint)
if __name__ == '__main__':
server_port = os.environ.get('PORT', '8080')
app.run(debug=True, port=server_port, host='0.0.0.0')
The app run correctly, when i call the end point to Get or Post users, the app crash and give me this error:
"The current Flask app is not registered with this 'SQLAlchemy'"
RuntimeError: The current Flask app is not registered with this 'SQLAlchemy' instance. Did you forget to call 'init_app', or did you create multiple 'SQLAlchemy' instances?
This is my User.py class:
from sqlalchemy.exc import SQLAlchemyError, IntegrityError
from db import db
from models import UserModel
from schemas import UserSchema
blp = Blueprint("Users", "users", description="Operations on users")
#blp.route("/user/<string:user_id>")
class User(MethodView):
#blp.response(200, UserSchema)
def get(self, user_id):
user = UserModel.query.get_or_404(user_id)
return user
def delete(self, user_id):
user = UserModel.query.get_or_404(user_id)
db.session.delete(user)
db.session.commit()
return {"message": "User deleted"}, 200
#blp.route("/user")
class UserList(MethodView):
#blp.response(200, UserSchema(many=True))
def get(self):
return UserModel.query.all()
How i can fix this issue?
#dev_ Your issue is that your are trying to intermingle the use of SQLAlchemy Core with SQLAlchemy ORM as if they are the same thing, leading to your issues. SQLAlchemy connection pools created using sqlalchemy.create_engine use the CORE API while Flask-SQLAlchemy uses the SQLAlchemy ORM model. This is the core reason for you issue. It is easier to use one or the other.
I would recommend using purely Flask-SQLALchemy with the use of the cloud-sql-python-connector library for your use-case. It will make your life much easier.
For simplicity, I am getting rid of your db.py leading to your app.py file being as follows:
from flask import Flask
from flask_smorest import Api
from flask_sqlalchemy import SQLAlchemy
from google.cloud.sql.connector import Connector, IPTypes
from resources.user import blp as UserBlueprint
# load env vars
db_user = os.environ["DB_USER"] # e.g. 'my-database-user'
db_pass = os.environ["DB_PASS"] # e.g. 'my-database-password'
db_name = os.environ["DB_NAME"] # e.g. 'my-database'
instance_connection_name = os.environ["INSTANCE_CONNECTION_NAME"] # e.g. 'project:region:instance'
# Python Connector database connection function
def getconn():
with Connector() as connector:
conn = connector.connect(
instance_connection_name, # Cloud SQL Instance Connection Name
"pg8000",
user=db_user,
password=db_pass,
db=db_name,
ip_type= IPTypes.PUBLIC # IPTypes.PRIVATE for private IP
)
return conn
app = Flask(__name__)
# configure Flask-SQLAlchemy to use Python Connector
app.config['SQLALCHEMY_DATABASE_URI'] = "postgresql+pg8000://"
app.config['SQLALCHEMY_ENGINE_OPTIONS'] = {
"creator": getconn
}
# initialize db (using app!)
db = SQLAlchemy(app)
# rest of your code
api = Api(app)
# ...
Hope this helps resolve your issue!

Wasm-rust and postgres

is there a way to get data from a database in postgres using wasm?. I'd tried to get it using a library in rust but I got some errore when I build the package using "wasm-pack building--target web". The idea is to build a function in lib.rs file that return data from a db. I have the below code inside lib.rs:
use postgres::{Client, Error, NoTls};
use wasm_bindgen::prelude::*;
...
struct Author {
_id: i32,
name: String,
}
#[wasm_bindgen]
pub fn select_name(name: &String) -> Result<(), Error> {
let mut client = Client::connect("postgresql://user:1234#localhost:5432/db", NoTls)?;
for row in client.query(
"SELECT id, name FROM author WHERE name = $1",
&[&name],
)? {
let author = Author {
_id: row.get(0),
name: row.get(1),
};
println!(
"Select_Name => Author {} :",
author.name
);
}
Ok(())
}
but I get some errors:
error[E0432]: unresolved import `crate::sys::IoSourceState`
error[E0432]: unresolved import `crate::sys`
...
It is not possible directly (as Njuguna Mureithi rightly wrote) but it can be circumvented.
We can use the project: https://github.com/PostgREST/postgrest
and expose the API to our sql server.
We download the latest version of postgrest
https://github.com/PostgREST/postgrest/releases/tag/v9.0.0
in case of Linux we unpack
tar -xf postgrest-v9.0.0-linux-static-x64.tar.xz
then run help
./postgrest -h
create a configuration file for ./postgrest
postgrest -e > cfg.psqlrest
change the user and password for the database in the configuration file.
e.g. with
db-uri = "postgres://user:pasword#localhost:5432/dbname"
to your dbname database access user:pasword configuration
db-uri = "postgres://postgres:zaqwsxc#localhost:5432/dbname"
and run the server which will issue the api to our postgres
./postgrest cfg.psqlrest
The address http://localhost:3000 will start accessing the database dbname, which must be created in the database server beforehand.
Here is a description of the libraries needed to call the query using the API.
https://rustwasm.github.io/wasm-bindgen/examples/fetch.html
examples of API
https://postgrest.org/en/stable/api.html

Karate + Gradle : Is there a way to get environment variable value in a .java file and define Postgres DB configuration based on env variable value? [duplicate]

I have a DB utils java file where I need to load DB username password based on environment I am running the code on , and these environment values I need to import from karate-config.js . How to achieve this ?
Just use embedded expressions ! So if you have dbusername and dbpassword set in karate-config.js:
* def config = { username: '#(dbusername)', password: '#(dbpassword)', url: 'jdbc:h2:mem:testdb', driverClassName: 'org.h2.Driver' }
* def DbUtils = Java.type('com.mycompany.DbUtils')
* def db = new DbUtils(config)

Creating postgres schemas using psycopg cur.execute

My python application allows users to create schemas of their naming. I need a way to protect the application from sql injections.
The SQL to be executed reads
CREATE SCHEMA schema_name AUTHORIZATION user_name;
The psycopg documentation (generally) recommends passing parameters to execute like so
conn = psycopg2.connect("dbname=test user=postgres")
cur = conn.cursor()
query = 'CREATE SCHEMA IF NOT EXISTS %s AUTHORIZATION %s;'
params = ('schema_name', 'user_name')
cur.execute(query, params)
But this results in a query with single quotes, which fails:
CREATE SCHEMA 'schema_name' AUTHORIZATION 'user_name';
> fail
Is there a way to remove the quotes, or should I just settle for stripping non-alphanumeric characters from the schema name and call it a day? The later seems kind of ugly, but should still work.
To pass identifiers use AsIs. But that exposes to SQL injection:
import psycopg2
from psycopg2.extensions import AsIs
conn = psycopg2.connect(database='cpn')
cursor = conn.cursor()
query = """CREATE SCHEMA %s AUTHORIZATION %s;"""
param = (AsIs('u1'), AsIs('u1; select * from user_table'))
print cursor.mogrify(query, param)
Output:
CREATE SCHEMA u1 AUTHORIZATION u1; select * from user_table;
Here's a boilerplate that might help. I've used environment variables but you can use a .conf or whatever you like.
Store your connection variables in a .env file:
db_host = "localhost"
db_port = "5432"
db_database = "postgres"
db_user = "postgres"
db_password = "postgres"
db_schema = "schema2"
Load params in your app.py and assign them to variables, then use the variables where required:
import psychopg2
from dotenv import load_dotenv
import database
# Load your environment variables here:
load_dotenv()
db_host = os.environ["db_host"]
db_port = os.environ["db_port"]
db_database = os.environ["db_database"]
db_user = os.environ["db_user"]
db_password = os.environ["db_password"]
db_schema = os.environ["db_schema"]
# Build Connection:
connection = psycopg2.connect(host=db_host,
port=db_port,
database=db_database,
user=db_user,
password=db_password
)
# Build Query Strings:
CREATE_SCHEMA = f"CREATE SCHEMA IF NOT EXISTS {schema};"
CREATE_TABLE1 = f"CREATE TABLE IF NOT EXISTS {schema}.table1 (...);"
CREATE_TABLE2 = f"CREATE TABLE IF NOT EXISTS {schema}.table2 (...);"
# Create Schema and Tables:
with connection:
with connection.cursor() as cursor:
cursor.execute(CREATE_SCHEMA)
cursor.execute(CREATE_TABLE1)
cursor.execute(CREATE_TABLE2)
As of psycopg2 >= 2.7, psycopg2.sql can be used to compose dynamic statements, which also guards from SQL injection.

Mongodb authentication using elixir-mongo

I have just started using Elixir, so I figure I have some basic misunderstanding going on here. Here is the code...
defmodule Mdb do
def connect(collection, this_db \\ "db-test") do
{:ok, mongo} = Mongo.connect("db-test.some-mongo-server.com", 12345)
db = mongo |> Mongo.db(this_db)
db |> Mongo.auth("user", "secretpassword")
db
end
end
I start with iex -S mix
and when I try db = Mdb.connect("users") I get
** (UndefinedFunctionError) undefined function: Mongo.auth/3
Mongo.auth(%Mongo.Db{auth: nil, mongo: %Mongo.Server{host: 'db-test.some-mongo-server.com', id_prefix: 12641, mode: :passive, opts: %{}, port: 12345, socket: #Port<0.5732>, timeout: 6000}, name: "db-stage", opts: %{mode: :passive, timeout: 6000}}, "user", "secretpassword")
(mdb_play) lib/mdb.ex:7: Mdb.connect/2
I looks like Mongo.auth/3 is undefined, but that makes no sense to me. Can any one point me towards my error?
thanks for the help
I just played around it, and faced the same error. As in the error message, Mongo.auth seems not defined, and it might be Mongo.Db.auth instead. However, I faced another error (ArgumentError) on Mongo.Db.auth too. It may be certain issue in the library.
** (ArgumentError) argument error
:erlang.byte_size
...
(mongo) lib/mongo_request.ex:43: Mongo.Request.cmd/3
(mongo) lib/mongo_db.ex:44: Mongo.Db.auth/1
I'm not familiar with the library, but after small change in Mongo.Db.auth, normal call seems started working.
I tried with the following sequence.
mongo = Mongo.connect!(server, port)
db = mongo |> Mongo.db(db_name)
db |> Mongo.Db.auth(user_name, password)
collection = db |> Mongo.Db.collection(collection_name)
collection |> Mongo.Collection.count()
The change I tried is in the following fork-repo.
https://github.com/parroty/elixir-mongo