Eclipselink entity mappings cache - jpa

I am using EclipseLink for my project.
I extend XMLMetadataSource (to provide a custom class loader) because entities I persist are runtime created. And it works OK.
I am getting "unknown entity type" when I do following.
Create entity
Create mapping
Create entity manager factory, provide custom class loader
create entity manager and persist. -- IT WORKS OK.
now drop entity , and drop from class loader
create same entity ,
create mapping again (of course it looks same)
try to refresh entity manager factory with new properties (new class loader, mapping file)
try to persist - complains "unknown type"
Any idea, if EL caches XML mappings.
I tried to re-creating factory again but its same error.
I am tried MySQL and Derby. with 'drop-and-create-tables' and 'create-or-extend-tables' .
same result.

I filed a bug with eclipse link.
https://bugs.eclipse.org/bugs/show_bug.cgi?id=426310

Its not a bug per say in EL. But problem is with EL "not building or re-creating 'class-->class_descriptor' map (an internal map that holds Class object of each entity and entities description). I found this accidentally. For those interested, here is a sample code that might help.
public class Test1 {
public Test1(String pu, Map<String, Object> props ) {
pu_name = pu;
properties = new HashMap<String, Object> ();
properties.putAll(props);
loader = new MyClassLoader();
}
public void initialization( ) {
mms = new WAMetadataSource();
properties.put(PersistenceUnitProperties.METADATA_SOURCE, mms);
properties.put(PersistenceUnitProperties.CLASSLOADER,loader);
if(emf == null || !emf.isOpen()) {
synchronized(Test1.class) {
if (emf == null || !emf.isOpen()) {
emf = Persistence.createEntityManagerFactory(pu_name, properties);
}
}
} else {
JpaHelper.getEntityManagerFactory(emf).refreshMetadata(properties);
}
System.out.println("======> refreshed. emf.hascode : " + emf.hashCode() + ", loader.h : " + loader.hashCode());
} public EntityManager getEntityManager(Map<String, Object> props) {
if (em == null) {
em = emf.createEntityManager(props);
}
return em;
} public void persist(Object obj) {
try {
getEntityManager(properties);
System.out.println("===> em.hascode =" + em.hashCode() +", " + JpaHelper.getEntityManager(em).getProperties().get(PersistenceUnitProperties.CLASSLOADER).hashCode() );
em.clear();
em.getTransaction().begin();
em.persist(obj);
em.getTransaction().commit();
} finally {
}
}public Object getRuntimeEntityObject(int ii) {
Object obj=null;
Class clazz = loader.loadClass("com.xxx.sample.entity.runtime.User");
if(ii == 1){
obj = clazz.getConstructor(String.class).newInstance("Jai Ramjiki-1");
} else {
obj = clazz.getConstructor(String.class).newInstance("Jai Ramjiki-2");
}
obj = clazz.cast(obj);
return obj;
}public static void main(String[] args) {
Map<String, Object> props = new HashMap<String, Object>();
props.put(PersistenceUnitProperties.JDBC_DRIVER, "com.mysql.jdbc.Driver");
props.put(PersistenceUnitProperties.JDBC_URL, "jdbc:mysql://localhost:3306/test" );
props.put(PersistenceUnitProperties.JDBC_USER, "root");
props.put(PersistenceUnitProperties.JDBC_PASSWORD, "root");
props.put(PersistenceUnitProperties.DDL_GENERATION, "create-or-extend-tables");
Test1 t1 = new Test1("mysql", props);
Object obj1 = t1.getRuntimeEntityObject(1);
System.out.println(" ****> obj1 = " + obj1 + ", classloader hashcode : " + obj1.getClass().getClassLoader().hashCode() );
t1.initialization();
t1.persist(obj1);
System.out.println("Class 1 : " + obj1.getClass().hashCode() + ", obj1 : " + obj1);
t1.close();
// now drop the previous class loader and rerun same.
Test1 t2 = new Test1("mysql", props);
Object obj2 = t2.getRuntimeEntityObject(2);
System.out.println(" ****> obj2 = " + obj2 + ", classloader hashcode : " + obj2.getClass().getClassLoader().hashCode() );
t2.initialization();
t2.persist(obj2);
t2.close();
Object obj3 = t1.getRuntimeEntityObject(1);
System.out.println(" ****> obj3 = " + obj3 + ", classloader hashcode : " + obj3.getClass().getClassLoader().hashCode() );
t1.persist(obj3);
}
AND extend XMLMetadatSource
#Override
public XMLEntityMappings getEntityMappings(Map<String, Object> properties, ClassLoader classLoader, SessionLog log) {
properties.put(PersistenceUnitProperties.METADATA_SOURCE_XML_FILE, "eclipselink-orm-user.xml");
properties.put(PersistenceUnitProperties.VALIDATOR_FACTORY, null);
return super.getEntityMappings(properties, classLoader, log);
}
And create a runtime class using javassist in your CustomClassloader which extends ClassLoader
public void createRuntimeClass(String className) throws Exception {
CtClass bclass = pool.makeClass(className);
bclass.addConstructor(CtNewConstructor.defaultConstructor(bclass));
Map<String, String> fields = new HashMap<String, String>();
addFields(fields);
int noOfFields = fields.size();
CtClass[] fclasses = new CtClass[noOfFields];
int ii=0;
for (Entry<String, String> field : fields.entrySet()) {
String fieldName = field.getKey();
String fieldType = field.getValue();
//.. code to add field
bclass.addField(bfield);
//add getter method.
// add getter and setters
}
CtConstructor userConstructor = CtNewConstructor.make(constructorSource, bclass);
bclass.addConstructor(userConstructor);
byte bytes [] = bclass.toBytecode();
Class cls = bclass.toClass(this, null);
loadedClasses.put(className, cls);
loadClassBytes.put(className, bytes);
}
and Override loadClass and getResourceAsStream methods.
public Class<?> loadClass(String name) throws ClassNotFoundException {return clazz = loadedClasses.get(name);}
public InputStream getResourceAsStream(String name) {return loadClassBytes.get(className);}
hope this helps
EL has provided a way to clear the cache of current project, and setting descriptors maps. but none of them worked. Not sure it is intended behavior or by mistake they exposed that API.
Gopi

Yes, persistence unit loading is only done once. If you are using an XMLMetadataSource to change mappings, you must tell the factory to refresh its mappings using refreshMetadata() on the EMF as described here:
http://wiki.eclipse.org/EclipseLink/DesignDocs/340192#Refresh
After that, the next EntityManagers obtained will be using the new mappings, while existing EMs will still use the old mappings.

Related

Using pre-populated database in Room by copy database file from assert

I want to use pre-populated database in Android Room. I found a way to make it through using the callback, and filled up the database files.
But something is wrong, I'm sure that the database is copied normally, but it remains empty in the device monitor and android emulator. Can you please help me
public abstract class AppDatabase extends RoomDatabase {
private static AppDatabase INSTANCE;
private static final String DB_NAME = "base.db";
static Context ctx;
public abstract Dao dao();
public static AppDatabase getDatabase(Context context) {
if (INSTANCE == null) {
ctx = context;
synchronized (AppDatabase.class) {
if (INSTANCE == null) {
INSTANCE = Room.databaseBuilder(context,
AppDatabase.class, DB_NAME)
.allowMainThreadQueries()
.addCallback(rdc)
.build();
}
}
}
return INSTANCE;
}
private static RoomDatabase.Callback rdc = new RoomDatabase.Callback() {
public void onCreate(SupportSQLiteDatabase db) {
new PopulateDbAsync(INSTANCE, ctx).execute();
Log.d("db create ", "table created when db created first time in onCreate");
}
public void onOpen(#NonNull SupportSQLiteDatabase db) {
ContentValues contentValues = new ContentValues();
}
};
private static class PopulateDbAsync extends AsyncTask<Void, Void, Void> {
private Dao dao;
AssetManager assetManager = ctx.getAssets();
PopulateDbAsync(AppDatabase db, Context context) {
Dao = db.Dao();
ctx = context;
}
#Override
protected Void doInBackground(final Void... params) {
String DB_PATH = "/data/data/mypackage/databases/";
String DB_NAME = "base.db";
try {
Log.d("AppDatabase","Trying copy database file");
OutputStream myOutput = new FileOutputStream(DB_PATH + DB_NAME);
byte[] buffer = new byte[1024];
int length;
InputStream myInput = ctx.getAssets().open("base.db");
while ((length = myInput.read(buffer)) > 0) {
myOutput.write(buffer, 0, length);
}
myInput.close();
myOutput.flush();
myOutput.close();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
}
}
I solved after spending 6 hours on researching and R & D .
Context is that : - I want to put already existing finaldb.db(which is present inside assests folder) into room database .
Step 1 :
copy this framework files from here link
Step 2 :
You need to migrate , chill i have code :)
#Database(entities = {Status.class}, version = 1,exportSchema = false)
public abstract class AppDatabase extends RoomDatabase {
public abstract DataDao StatusDao();
private static AppDatabase INSTANCE;
public static AppDatabase getDatabase(Context context) {
if (INSTANCE == null) {
INSTANCE = createDatabase(context);
}
return (INSTANCE);
}
private static final Migration MIGRATION_2_3 = new Migration(1, 2) {
#Override
public void migrate(#NonNull SupportSQLiteDatabase database) {
Log.d("kkkk","bc");
String SQL_CREATE_TABLE = "CREATE TABLE IF NOT EXISTS 'Status' " +
"( 'id' INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT," +
" 'category' TEXT NOT NULL," +
" 'sub_category' TEXT NOT NULL," +
" 'content' TEXT NOT NULL," +
" 'favourite' INTEGER DEFAULT(0))";
database.execSQL(SQL_CREATE_TABLE);
}
};
private static AppDatabase createDatabase(Context context) {
RoomDatabase.Builder<AppDatabase> builder =
Room.databaseBuilder(context.getApplicationContext(), AppDatabase.class,
context.getString(R.string.dbase_name));
return (builder.openHelperFactory(new AssetSQLiteOpenHelperFactory())
.allowMainThreadQueries()
.addMigrations(MIGRATION_2_3)
.build());
}
}
In MIGRATION_2_3 you have to create table exactly same as current database(which is present in assests folder)
want to learn about migration
Step 3 :
Now table is created successfully in room database !
In case of crash see your logcat , in which its written in understandable form .
You cannot, properly, do the copy of the database in the onCreate method.
When the onCreate method is called the database has been created (the created database is passed to the method). You have to do the copy before the onCreate method and before the database is opened.
You could override's the RoomDatabase init method and do the copy from that method or do the copy before invoking the databaseBuilder.
I'm solved it.
Database class:
#Database(entities = {Entity1.class, Entity2.class, Entity3.class}, version = 1, exportSchema = false)
public abstract class AppDatabase extends RoomDatabase {
private static AppDatabase INSTANCE;
public abstract Entity1Dao 1Dao();
public abstract Entity2Dao 2Dao();
public abstract Entity3Dao 3Dao();
public static AppDatabase getDatabase(Context context) {
if (INSTANCE == null) {
INSTANCE = createDatabase(context);
}
return (INSTANCE);
}
private static AppDatabase createDatabase(Context context) {
RoomDatabase.Builder<AppDatabase> builder =
Room.databaseBuilder(context.getApplicationContext(), AppDatabase.class,
context.getString(R.string.dbase_name));
return (builder.openHelperFactory(new AssetSQLiteOpenHelperFactory())
.allowMainThreadQueries()
.build());
}
}
Also you should get SQL Helpers, link
My issue is a little bit different than the OP.
I was copying the database file from internal storage which I downloaded from the internet, not from assets. And java.lang.RuntimeException: Unable to copy database file is raised because I didn't grant READ_EXTERNAL_STORAGE before this, or in general granting WRITE_EXTERNAL_STORAGE as READ_EXTERNAL_STORAGE is included in WRITE_EXTERNAL_STORAGE and already need the write permission for downloading the file.

Camel mongodb - MongoDbProducer multiple inserts

I am trying to do a multiple insert using the camel mongo db component.
My Pojo representation is :
Person {
String firstName;
String lastName;
}
I have a processor which constructs a valid List of Person pojo and is a valid json structure.
When this list of Person is sent to the mongodb producer , on invocation of createDoInsert the type conversion to BasicDBObject fails. This piece of code below looks to be the problem. Should it have more fall backs / checks in place to attempt the list conversion down further below as it fails on the very first cast itself. Debugging the MongoDbProducer the exchange object being received is a DBList which extends DBObject. This causes the singleInsert flag to remain set at true which fails the insertion below as we get a DBList instead of a BasicDBObject :
if(singleInsert) {
BasicDBObject insertObjects = (BasicDBObject)insert;
dbCol.insertOne(insertObjects);
exchange1.getIn().setHeader("CamelMongoOid", insertObjects.get("_id"));
}
The Camel MongoDbProducer code fragment
private Function<Exchange, Object> createDoInsert() {
return (exchange1) -> {
MongoCollection dbCol = this.calculateCollection(exchange1);
boolean singleInsert = true;
Object insert = exchange1.getIn().getBody(DBObject.class);
if(insert == null) {
insert = exchange1.getIn().getBody(List.class);
if(insert == null) {
throw new CamelMongoDbException("MongoDB operation = insert, Body is not conversible to type DBObject nor List<DBObject>");
}
singleInsert = false;
insert = this.attemptConvertToList((List)insert, exchange1);
}
if(singleInsert) {
BasicDBObject insertObjects = (BasicDBObject)insert;
dbCol.insertOne(insertObjects);
exchange1.getIn().setHeader("CamelMongoOid", insertObjects.get("_id"));
} else {
List insertObjects1 = (List)insert;
dbCol.insertMany(insertObjects1);
ArrayList objectIdentification = new ArrayList(insertObjects1.size());
objectIdentification.addAll((Collection)insertObjects1.stream().map((insertObject) -> {
return insertObject.get("_id");
}).collect(Collectors.toList()));
exchange1.getIn().setHeader("CamelMongoOid", objectIdentification);
}
return insert;
};
}
My route is as below :
<route id="uploadFile">
<from uri="jetty://http://0.0.0.0:9886/test"/>
<process ref="fileProcessor"/>
<unmarshal>
<csv>
<header>fname</header>
<header>lname</header>
</csv>
</unmarshal>
<process ref="mongodbProcessor" />
<to uri="mongodb:mongoBean?database=axs175&collection=insurance&operation=insert" />
and the MongoDBProcessor constructing the List of Person Pojo
#Component
public class MongodbProcessor implements Processor {
#Override
public void process(Exchange exchange) throws Exception {
ArrayList<List<String>> personlist = (ArrayList) exchange.getIn().getBody();
ArrayList<Person> persons = new ArrayList<>();
for(List<String> records : personlist){
Person person = new Person();
person.setFname(records.get(0));
person.setLname(records.get(1));
persons.add(person);
}
exchange.getIn().setBody(persons);
}
}
Also requested information here - http://camel.465427.n5.nabble.com/Problems-with-MongoDbProducer-multiple-inserts-tc5792644.html
This issue is now fixed via - https://issues.apache.org/jira/browse/CAMEL-10728

Mybatis can set value with reflection?

Mybatis can set value with reflection?
I have a class , and it has a property , it's setter is protected . So I have to use reflection to set this value ? Mybatis can work ?
yes, mybatis use reflect to set value.
in Reflator.java(mybatis 3.3.0), mybatis will config set method.
private void addSetMethods(Class<?> cls) {
Map<String, List<Method>> conflictingSetters = new HashMap<String, List<Method>>();
Method[] methods = getClassMethods(cls);
for (Method method : methods) {
String name = method.getName();
if (name.startsWith("set") && name.length() > 3) {
if (method.getParameterTypes().length == 1) {
name = PropertyNamer.methodToProperty(name);
addMethodConflict(conflictingSetters, name, method);
}
}
}
resolveSetterConflicts(conflictingSetters);
}
if your class do not have set method, when addSetFields it will add new SetInvoker for the field:
private void addSetField(Field field) {
if (isValidPropertyName(field.getName())) {
setMethods.put(field.getName(), new SetFieldInvoker(field));
setTypes.put(field.getName(), field.getType());
}
}
and the SetFieldInvoker is like this:
/**
* #author Clinton Begin
*/
public class SetFieldInvoker implements Invoker {
private Field field;
public SetFieldInvoker(Field field) {
this.field = field;
}
#Override
public Object invoke(Object target, Object[] args) throws IllegalAccessException, InvocationTargetException {
field.set(target, args[0]);
return null;
}
#Override
public Class<?> getType() {
return field.getType();
}
}
the DefaultResultSetHandler calls BeanWrapper's setBeanProperty method will call getSetInvoker
private void setBeanProperty(PropertyTokenizer prop, Object object, Object value) {
try {
Invoker method = metaClass.getSetInvoker(prop.getName());
Object[] params = {value};
try {
method.invoke(object, params);
} catch (Throwable t) {
throw ExceptionUtil.unwrapThrowable(t);
}
} catch (Throwable t) {
throw new ReflectionException("Could not set property '" + prop.getName() + "' of '" + object.getClass() + "' with value '" + value + "' Cause: " + t.toString(), t);
}
}
the whole call chain maybe like this:
DefaultSqlSession##selectList -->SimpleExecutor##doQuery --> SimpleStatementHandler##query --> DefaultResultSetHandler##handleResultSets

Spring AOP #Aspect J : how do I give Aspects access to other classes

I am quite new to Java and Spring. I would like to find out if it is possible and if so how I can get my aspects to apply to more than one class without having to call the method from the class where the aspects "work".
This is my main class. Aspects work on any methods I call diresctly from this class, but will not work on any of the other methods called by other classes (even if they are not internal)
public class AopMain {
public static void main(String[] args) {
String selection = "on";
ApplicationContext ctx = new ClassPathXmlApplicationContext("spring.xml");
do {
try{
System.out.println("Enter 'length' for a length conversion and 'temperature' for a temperature conversion and 'quit' to quit");
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
selection = br.readLine();
if(selection.contentEquals("length")) {
LengthService lengthService = ctx.getBean("lengthService", LengthService.class);
lengthService.runLengthService();
lengthService.display();
}
else if(selection.contentEquals("temperature")) {
TemperatureService temperatureService = new TemperatureService();
temperatureService.runTempertureService();
temperatureService.display();
}
}
catch (Exception e) {
System.out.println("Input error");
}
} while (!selection.contentEquals("quit"));
}
}
This is one of the conversion service classes:
public class TemperatureService {
String fromUnit = null;
String toUnit = null;
double val = 0;
double converted = 0;
public void runTempertureService() {
Scanner in = new Scanner(System.in);
System.out.println("Convert from (enter C, K, F): ");
fromUnit = in.nextLine();
System.out.println("Convert to (enter C, K, F): ");
toUnit = in.nextLine();
TemperatureConverter from = new TemperatureConverter(fromUnit);
TemperatureConverter to = new TemperatureConverter(toUnit);
System.out.println("Value:");
val = in.nextDouble();
double celcius = from.toCelcius(val);
converted = to.fromCelcius(celcius);
from.display(val, fromUnit, converted, toUnit);
System.out.println(val + " " + fromUnit + " = " + converted + " " + toUnit);
}
public String[] display(){
String[] displayString = {Double.toString(val), fromUnit, Double.toString(converted), toUnit};
return displayString;
}
}
And this is one of the conversion classes:
public class TemperatureConverter {
final double C_TO_F = 33.8;
final double C_TO_C = 1;
final double C_TO_KELVIN = 274.15;
private double factor;
public TemperatureConverter(String unit) {
if (unit.contentEquals("F"))
factor = C_TO_F;
else if(unit.contentEquals("C"))
factor = C_TO_C;
else if(unit.contentEquals("K"))
factor = C_TO_KELVIN;
}
public double toCelcius(double measurement) {
return measurement * factor;
}
public double fromCelcius(double measurement) {
return measurement/factor;
}
public TemperatureConverter() {}
public void display(double val, String fromUnit, double converted, String toUnit) {}
}
This is my configuration file:
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:aop="http://www.springframework.org/schema/aop" xmlns:context="http://www.springframework.org/schema/context" xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-2.0.xsd http://www.springframework.org/schema/aop http://www.springframework.org/schema/aop/spring-aop-2.0.xsd http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-2.5.xsd">
<aop:aspectj-autoproxy/>
<bean name= "lengthConverter" class= "converter.method.LengthConverter"/>
<bean name= "temperatureConverter" class= "converter.method.TemperatureConverter"/>
<bean name= "lengthService" class= "converter.service.LengthService" autowire = "byName"/>
<bean name= "temperatureService" class= "converter.service.TemperatureService"/>
<bean name="ValidationAspect" class= "converter.aspect.ValidationAspect" />
<bean name="DisplayAspect" class= "converter.aspect.DisplayAspect" />
</beans>
I want to be able to apply an aspect to functions of the converter class called by the service class but like I have mentioned, it doesnt work unnless the method is called from the main class directly. (the display function was originally part of the converter class but I moved it so that the aspect would work). Also why will an aspect not pick up the newline() method call?
Edit:
This is one of my aspects:
#Aspect
public class DisplayAspect {
#AfterReturning(pointcut = "execution(* display(..))", returning = "retVal")
public void fileSetUp(Object retVal) {
System.out.println("So we found the display things");
Writer writer = null;
String[] returnArray = (String[]) retVal;
try {
System.out.println("inside try");
String text = "The opertion performed was: " + returnArray[0] + " in " + returnArray[1] + " is " + returnArray[2] + " " + returnArray[3] + "\n";
File file = new File("Log.txt");
writer = new BufferedWriter(new FileWriter(file, true));
writer.write(text);
} catch (FileNotFoundException e1) {
e1.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
try {
if (writer != null) {
writer.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
I want to be able to apply an aspect to functions of the converter class
Well, then change your pointcut so as intercept the methods (not functions, they are called methods) you want to handle in your advice. At the moment the pointcut is
execution(* display(..))
I.e. it will intercept all methods named display with any number of parameters and any return type. If you want to intercept all converter methods instead, change it to
execution(* converter.method.TemperatureConverter.*(..))
instead.
like I have mentioned, it doesnt work unnless the method is called from the main class directly.
I need to guess because this description is unclear, but probably What you are trying to describe is that the advice is only applied if the TemperatureService.display() is called from outside the class, not from a method within TemperatureService. This is a known and well described limitation of Spring AOP, see Spring Manual, chapter 9.6, "Proxying mechanisms": Due to the proxy-based "AOP lite" approach of Spring AOP, this cannot work because internal calls to methods of this, are not routed through the dynamic proxy created by the Spring container. Thus, Spring AOP only works for inter-bean calls, not intra-bean ones. If you need to intercept internal calls, you need to switch to full-blown AspectJ, which can be easily integrated into Spring applications via LTW (load-time weaving) as described in chapter 9.8, "Using AspectJ with Spring applications".

Proper way to profile a DbContext using MiniProfiler and EF 5 and Autofac

The MiniProfiler site gives the following code for generating an Entity Framework ObjectContext:
public static MyModel Get()
{
var conn = new StackExchange.Profiling.Data.EFProfiledDbConnection(GetConnection(), MiniProfiler.Current);
return ObjectContextUtils.CreateObjectContext<MyModel>(conn); // resides in the MiniProfiler.EF nuget pack
}
However, using Entity Framework 5, I am not using an ObjectContext - rather I am using a DbContext. I cannot plug the model name in here, since the CreateObjectContext<T>() method expects T to be of type ObjectContext. (For the same reason, the code given in this answer also doesn't work).
Additionally, I am using autofac to initialize my Db connections. This is being registered with the following (MyData = the name of my EF DataContext):
Builder.RegisterType<MyData>().As<DbContext>().InstancePerHttpRequest();
So combining two parts: how can I use autofac to initialize my DbContext tied into MiniProfiler.EF? And if that is not possible, at least how can I do the first part (create a factory method for MiniProfiler.EF to return a DbContext)?
I just got this working:
public static class DbContextUtils
{
private const BindingFlags PrivateInstance = BindingFlags.NonPublic | BindingFlags.Instance;
public static T CreateDbContext<T>() where T : DbContext
{
return CreateDbContext<T>(GetProfiledConnection<T>());
}
public static T CreateDbContext<T>(this DbConnection connection) where T : DbContext
{
var workspace = new MetadataWorkspace(new[] { "res://*/" }, new[] { typeof(T).Assembly });
var factory = DbProviderServices.GetProviderFactory(connection);
var itemCollection = workspace.GetItemCollection(DataSpace.SSpace);
var providerFactoryField = itemCollection.GetType().GetField("_providerFactory", PrivateInstance);
if (providerFactoryField != null) providerFactoryField.SetValue(itemCollection, factory);
var ec = new EntityConnection(workspace, connection);
return CtorCache<T, DbConnection>.Ctor(ec);
}
public static DbConnection GetProfiledConnection<T>() where T : DbContext
{
var dbConnection = ObjectContextUtils.GetStoreConnection("name=" + typeof(T).Name);
return new EFProfiledDbConnection(dbConnection, MiniProfiler.Current);
}
internal static class CtorCache<TType, TArg> where TType : class
{
public static readonly Func<TArg, TType> Ctor;
static CtorCache()
{
var argTypes = new[] { typeof(TArg) };
var ctor = typeof(TType).GetConstructor(argTypes);
if (ctor == null)
{
Ctor = x => { throw new InvalidOperationException("No suitable constructor defined"); };
}
else
{
var dm = new DynamicMethod("ctor", typeof(TType), argTypes);
var il = dm.GetILGenerator();
il.Emit(OpCodes.Ldarg_0);
il.Emit(OpCodes.Newobj, ctor);
il.Emit(OpCodes.Ret);
Ctor = (Func<TArg, TType>)dm.CreateDelegate(typeof(Func<TArg, TType>));
}
}
}
}
It is based on the code in MiniProfiler's ObjectContextUtils.
You use it like this:
builder.Register(c => DbContextUtils.CreateDbContext<MyData>()).As<DbContext>().InstancePerHttpRequest();
This solution REQUIRES your DbContext to have a constructor which takes a DbConnection and passes it to base, like this:
public MyData(DbConnection connection)
: base(connection, true)
{
}
There is a constructor of the DbContext class which takes an existing DbConnection
So you need a new contructor on your MyData which just calls the base
public class MyData : DbContext
{
public MyData(DbConnection existingConnection, bool contextOwnsConnection)
: base(existingConnection, contextOwnsConnection)
{
}
//..
}
Then you register your MyData with Register:
builder.Register(c =>
{
var conn = new EFProfiledDbConnection(GetConnection(), MiniProfiler.Current);
return new MyData(conn, true);
}).As<DbContext>().InstancePerHttpRequest();