How to create Entity Framework ObjectContext? - entity-framework

I have many DBs in one SQL server.
I placed connectionString as template(look at Initial Catalog={0}) into web.config.
<add name="ent" connectionString="metadata=res://*/ent.csdl|res://*/ent.ssdl|res://*/ent.msl;provider=System.Data.SqlClient;provider connection string="Data Source=1.1.1.1;Initial Catalog={0};Persist Security Info=True;User ID=user;Password=pass;MultipleActiveResultSets=True"" providerName="System.Data.EntityClient" />
I want to create the objectContext with correct connectionString. I thought to do the following, CreatObjectContext<SiteEntities>('MySite') but I get error Unable to determine the provider name for connection of type 'System.Data.EntityClient.EntityConnection'.
public T CreatObjectContext<T>(string dbName) where T : ObjectContext, new()
{
var conStr = ConfigurationManager.ConnectionStrings["ent"].ConnectionString;
var entityBuilder = new EntityConnectionStringBuilder(conStr);
entityBuilder.Provider = "System.Data.SqlClient";
// Build correct conString to the db
entityBuilder.ProviderConnectionString = string.Format(entityBuilder.ProviderConnectionString, dbName);
var connection = new EntityConnection(entityBuilder.ConnectionString);
var builder = new ContextBuilder<T>();
return builder.Create(connection);
}
What I'm doing wrong? How I can create the context?

If you are using EntityConnectionStringBuilder, you only need to store the sqlserver connection strings in your web.config. EntityConnectionStringBuilder can then convert those to EF4 connection strings.
Example web.config
<connectionStrings>
<add name="db1" connectionString="data source=localhost\SQLEXPRESS;initial catalog=db1;integrated security=True;multipleactiveresultsets=True;App=EntityFramework" />
<add name="db2" connectionString="data source=localhost\SQLEXPRESS;initial catalog=db2;integrated security=True;multipleactiveresultsets=True;App=EntityFramework" />
</connectionStrings>
And we can change your method to something like:
public ObjectContext CreatObjectContext(string dbName)
{
var conStr = ConfigurationManager.ConnectionStrings[dbName].ConnectionString;
var entityBuilder = new EntityConnectionStringBuilder();
entityBuilder.Provider = "System.Data.SqlClient";
entityBuilder.ProviderConnectionString = conStr;
entityBuilder.MetaData = #"res://*/ent.csdl|res://*/ent.ssdl|res://*/ent.msl";
return new ObjectContext(entityBuilder.ToString());
}

I just wanted to share a small class to create an entity framework connection using the entity class as type of T an SQL connection string and the entityModel meta data name.
public static class EFConnection<T> where T : ObjectContext
{
public static T GetDatabase(string connectionString,string entityModelMetadataName)
{
var entityBuilder = new EntityConnectionStringBuilder();
entityBuilder.Provider = "System.Data.SqlClient";
entityBuilder.ProviderConnectionString = connectionString;
entityBuilder.Metadata = #"res://*/" + entityModelMetadataName + ".csdl|res://*/" + entityModelMetadataName + ".ssdl|res://*/" + entityModelMetadataName + ".msl";
var _db=(T)Activator.CreateInstance(typeof(T), new object[] { entityBuilder.ToString()});
return _db;
}
}
use example:
var _db = EFConnection<Model1Container>.GetDatabase(Settings.General.Default.DatabaseConnectionString, "Model1");
I did use this post also to put everything together:

Related

ADO.NET add SQLParameters

I am trying to execute a stored procedure with parameters using ADO.NET in .NET Core.
When I try and pass in a SqlParameter object I get the below error.
I have a using statement at the top for System.Data.SqlClient.
Is there another way or new ways of passing Sql parameters to a stored procedure?
public async Task<List<SynchStockDto>> GetStockForSync(int locationBinId, DateTime lastSyncDate)
{
await EnsureConnectionOpenAsync();
var command = GetConnection().CreateCommand();
command.CommandText = "sp_GetStockForSync #LocationBinId, #LastSyncDate";
command.CommandType = CommandType.StoredProcedure;
command.Transaction = GetActiveTransaction();
command.Parameters.Add(new SqlParameter("LocationBinId",locationBinId)); //not finding SQLParameter method
using (var dataReader = await command.ExecuteReaderAsync())
{
var result = new List<SynchStockDto>();
while (dataReader.Read())
{
var stockItem = new SynchStockDto
{
};
result.Add(stockItem);
}
return result;
}
}

Upgrade to CSLA 6: ConnectionManager problem

we are trying to upgrade to CSLA 6.
now, we are getting a message:
"ConnectionManager is obsolete, use dependency injection ... use ApplicationContext.LocalContext"
for this code:
using (var ctx = ConnectionManager<OracleConnection>.GetManager("dbEndpoint", true))
We've tried this code snippet but all connections is NULL.
Could you please help us to correctly get Connection?
var services = new ServiceCollection();
services.AddCsla();
var provider = services.BuildServiceProvider();
DataPortalFactory = provider.GetRequiredService<IDataPortalFactory>();
var appContext = provider.GetRequiredService<Csla.ApplicationContext>();
var conn1 = appContext.LocalContext.GetValueOrNull("dbEndpoint");
var conn2 = appContext.LocalContext.GetValueOrNull("__db:default-dbEndpoint");
var conn3 = appContext.LocalContext["dbEndpoint"];
var conn4 = appContext.LocalContext["__db:default-dbEndpoint"];
another experiment:
....
var CONNECTION_ORACLE = new OracleConnection(ConfigurationManager.ConnectionStrings["dbEndpoint"].ConnectionString);
services.AddScoped<IDbConnection>(o => CONNECTION_ORACLE);
....
var provider = services.BuildServiceProvider();
...
var connectionResolved = provider.GetRequiredService<IDbConnection>();
appContext.LocalContext.Add("dbEndpoint", connectionResolved);
then connection is not null;
and inside of Factory is successfully resolved by DI:
public DocFactory(ApplicationContext appContext, IDbConnection connection) : base(
appContext)
{
_connection = connection;
}
then
[Fetch]
public Doc_Fetch(DocCriteria criteria)
{
bool cancel = false;
OnFetching(criteria, ref cancel);
if (cancel) return null;
Doc item = null;
OracleConnection connection = _connection as OracleConnection;
connection is Closed (but NOT null!!). it's possible to open it but if close it, somebody else consuming it will face with a problem or child objects also will face problem with closed connection.
so, making ConnectionManager as Obsolete may be not so obvious way to go. But ConnectionManager was very useful for counting open connection, supporting transactions etc
Could you please provide a workaround for it.
more attempts:
var connectionString =
ConfigurationManager.ConnectionStrings["dbEndpoint"].ConnectionString;
..
appContext.ClientContext.Add("DBConnectionString", connectionString );
...
Factory
using (var connection = new OracleConnection(ApplicationContext.ClientContext["DBConnectionString"].ToString()))
{
connection.Open();
Your DAL should require that a database connection be injected.
public class MyDal : IDisposable
{
public MyDal(OracleConnection connection)
{
Connection = connection;
}
private OracleConnection Connection { get; set; }
public MyData GetData()
{
// use Connection to get the data
return data;
}
public void Dispose()
{
Connection.Dispose();
}
}
Then in the app server startup code, register your DAL type(s) and also register your connection type.
services.AddScoped(typeof(OracleConnection), () =>
{
// initialize the connection here
return connection;
});
services.AddScoped<MyDal>();
Then, in your data portal operation method (such as create, fetch, etc.), inject your DAL:
[Fetch]
private void Fetch([Inject] MyDal dal)
{
var data = dal.GetData();
}

How do I use Entity Framework instead of Ado.net for stored procedure?

I am using a stored procedure in my SQL Server database to take input of the data through the datatable. As I am using ASP.NET MVC now, I want to use Entity Framework instead of ado.net
public void BulkUpload(DataTable dt)
{
dt.TableName = "MainTable";
DataSet dataset = new DataSet();
DataTable dataTable = new DataTable();
try
{
using (SqlConnection conn = new SqlConnection(ConfigurationManager.ConnectionStrings["conn"].ConnectionString))
{
conn.Open();
{
SqlCommand cmd = new SqlCommand("DatatableToDataBase", conn);
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("#mode", SqlDbType.VarChar).Value = "MainTB";
cmd.Parameters.AddWithValue("#Details", SqlDbType.VarChar).Value = dt;
cmd.ExecuteNonQuery();
conn.Close();
}
}
}
catch (Exception)
{ }
}
It's very easy just add an entity datamodel to your program
connect to you model :
"entitymodel" context = this.CurrentDataSource;
And pass the stored procedure where you like .
Thats everything
Example:
[WebGet]
public List<callersW> GetCaller()
{
testCDREntities1 context = this.CurrentDataSource;
//sql parameters here
List<callersW> result = context.Database.SqlQuery<callersW>("StoredProcedure").ToList();
return result;
}

Not seeing roles on Principal in ASP.NET MVC 2 Application

I am writing an ASP.NET MVC 2 application and don't want to use ASP.NET Membership. I do want to use the Authorize attribute on the Controllers. What I have done so far is ...
Web.config
<roleManager enabled="true" />
<authentication mode="Forms">
<forms loginUrl="~/Authentication/Login" timeout="2880"/>
</authentication>
<authorization>
<allow users="*" /> /* This is for testing */
</authorization>
In my Global.asax
protected void Application_AuthenticateRequest(Object sender, EventArgs e)
{
var cookie = Context.Request.Cookies[FormsAuthentication.FormsCookieName];
if (cookie == null) return;
var decryptedCookie = FormsAuthentication.Decrypt(cookie.Value);
var roles = decryptedCookie.UserData.Split('|');
var tcmIdentity = new TcmIdentity(decryptedCookie.Name);
var tcmPrincipal = new GenericPrincipal(tcmIdentity, roles);
Context.User = tcmPrincipal;
}
I am using a custom IIdentity so that I can add some custom properties in the future. To test this in my Controller action I did this ...
var testPrincipal = User;
I can see the custom Identity with all of the user information but there are no roles on principal object. Any help with what i have missed would be great. Thanks.
I believe you need a role provider. Much like how a Membership provider handles the membership of users, create, delete, validate, edit, in order to use roles, you need to use a RoleProvider (ASP.NET Implementing a Role Provider).
Which also requires enabling roles in the web.config, for example:
<roleManager enabled="enabled" defaultProvider="AspNetSqlRoleProvider">
<providers>
<clear/>
<add name="AspNetSqlRoleProvider"
type="System.Web.Security.SqlRoleProvider"
connectionStringName="ApplicationServices"
applicationName="/" />
<add name="AspNetWindowsTokenRoleProvider"
type="System.Web.Security.WindowsTokenRoleProvider"
applicationName="/" />
</providers>
</roleManager>
This might be useful:
SO asp-net-mvc-roles-without-database-and-without-role-provider
As Might be:
ASP.NET 2.0, Custom Role assignment without a 'Role Provider'
UPDATE:
In the end I got this working by changing
protected void Application_AuthenticateRequest(Object sender, EventArgs e)
{
var cookie = Context.Request.Cookies[FormsAuthentication.FormsCookieName];
if (cookie == null) return;
var decryptedCookie = FormsAuthentication.Decrypt(cookie.Value);
var roles = decryptedCookie.UserData.Split('|');
var tcmIdentity = new TcmIdentity(decryptedCookie.Name);
var tcmPrincipal = new GenericPrincipal(tcmIdentity, roles);
Context.User = tcmPrincipal;
}
to
protected void Application_AuthenticateRequest(Object sender, EventArgs e)
{
var cookie = Context.Request.Cookies[FormsAuthentication.FormsCookieName];
if (cookie == null) return;
var decryptedCookie = FormsAuthentication.Decrypt(cookie.Value);
var roles = decryptedCookie.UserData.Split('|');
var tcmIdentity = new TcmIdentity(decryptedCookie.Name);
var tcmPrincipal = new GenericPrincipal(tcmIdentity, roles);
Thread.CurrentPrincipal = Context.User = tcmPrincipal;
}

SMO: restoring to a different DB

I've read a dozen different blogs, as well as reading through the msdn examples and they just aren't working for me.
Ultimately what I'm trying to do is automate moving a DB from our production instance to our dev instance, or the other direction.
The approach I've taken is thus:
backup/restore to a temp DB
detach temp DB
copy mdf and ldf files to the other instance
reattach.
I'm stuck on 1 and I cannot understand why. Everything I've read claims this should be working.
NOTE: I've set dbName to the db I want to restore to. I have also set restore.Database = dbName, where restore is an instance of the Restore class in the smo namespace.
mdf.LogicalFileName = dbName;
mdf.PhysicalFileName = String.Format(#"{0}\{1}.mdf", server.Information.MasterDBPath, dbName);
ldf.LogicalFileName = dbName + "_log";
ldf.PhysicalFileName = String.Format(#"{0}\{1}.ldf", server.Information.MasterDBPath, dbName);
restore.RelocateFiles.Add(mdf);
restore.RelocateFiles.Add(ldf);
restore.SqlRestore(server);
This is the exception I'm getting:
The file 'D:\MSSQL.MIQ_Dev\MSSQL.2\MSSQL\Data\MIQDesign2Detach.mdf' cannot be overwritten. It is being used by database 'MIQDesignTest2'.
File 'MIQDesign' cannot be restored to 'D:\MSSQL.MIQ_Dev\MSSQL.2\MSSQL\Data\MIQDesign2Detach.mdf'. Use WITH MOVE to identify a valid location for the file.
The file 'D:\MSSQL.MIQ_Dev\MSSQL.2\MSSQL\Data\MIQDesign2Detach.ldf' cannot be overwritten. It is being used by database 'MIQDesignTest2'.
File 'MIQDesign_log' cannot be restored to 'D:\MSSQL.MIQ_Dev\MSSQL.2\MSSQL\Data\MIQDesign2Detach.ldf'. Use WITH MOVE to identify a valid location for the file.
Problems were identified while planning for the RESTORE statement. Previous messages provide details.
RESTORE DATABASE is terminating abnormally.
Why is this trying to overwrite the original mdf? Isn't the RelocateFiles stuff supposed to specify that you want it being saved to a different physical filename?
It is works.
public class DatabaseManager
{
public Action<int, string> OnSqlBackupPercentComplete;
public Action<int, string> OnSqlRestorePercentComplete;
public Action<SqlError> OnSqlBackupComplete;
public Action<SqlError> OnSqlRestoreComplete;
public bool IsConnected { get; private set; }
private ServerConnection _connection;
public void Connect(string userName, string password, string serverName, bool useInteratedLogin)
{
if (useInteratedLogin)
{
var sqlCon = new SqlConnection(string.Format("Data Source={0}; Integrated Security=True; Connection Timeout=5", serverName));
_connection = new ServerConnection(sqlCon);
_connection.Connect();
IsConnected = true;
}
else
{
_connection = new ServerConnection(serverName, userName, password);
_connection.ConnectTimeout = 5000;
_connection.Connect();
IsConnected = true;
}
}
public void BackupDatabase(string databaseName, string destinationPath)
{
var sqlServer = new Server(_connection);
databaseName = databaseName.Replace("[", "").Replace("]", "");
var sqlBackup = new Backup
{
Action = BackupActionType.Database,
BackupSetDescription = "ArchiveDataBase:" + DateTime.Now.ToShortDateString(),
BackupSetName = "Archive",
Database = databaseName
};
var deviceItem = new BackupDeviceItem(destinationPath, DeviceType.File);
sqlBackup.Initialize = true;
sqlBackup.Checksum = true;
sqlBackup.ContinueAfterError = true;
sqlBackup.Devices.Add(deviceItem);
sqlBackup.Incremental = false;
sqlBackup.ExpirationDate = DateTime.Now.AddDays(3);
sqlBackup.LogTruncation = BackupTruncateLogType.Truncate;
sqlBackup.PercentCompleteNotification = 10;
sqlBackup.PercentComplete += (sender, e) => OnSqlBackupPercentComplete(e.Percent, e.Message);
sqlBackup.Complete += (sender, e) => OnSqlBackupComplete(e.Error);
sqlBackup.FormatMedia = false;
sqlBackup.SqlBackup(sqlServer);
}
public DatabaseCollection GetDatabasesList()
{
if (IsConnected)
{
var sqlServer = new Server(_connection);
return sqlServer.Databases;
}
return null;
}
public void RestoreDatabase(string databaseName, string filePath)
{
var sqlServer = new Server(_connection);
databaseName = databaseName.Replace("[", "").Replace("]", "");
var sqlRestore = new Restore();
sqlRestore.PercentCompleteNotification = 10;
sqlRestore.PercentComplete += (sender, e) => OnSqlRestorePercentComplete(e.Percent, e.Message);
sqlRestore.Complete += (sender, e) => OnSqlRestoreComplete(e.Error);
var deviceItem = new BackupDeviceItem(filePath, DeviceType.File);
sqlRestore.Devices.Add(deviceItem);
sqlRestore.Database = databaseName;
DataTable dtFileList = sqlRestore.ReadFileList(sqlServer);
int lastIndexOf = dtFileList.Rows[1][1].ToString().LastIndexOf(#"\");
string physicalName = dtFileList.Rows[1][1].ToString().Substring(0, lastIndexOf + 1);
string dbLogicalName = dtFileList.Rows[0][0].ToString();
string dbPhysicalName = physicalName + databaseName + ".mdf";
string logLogicalName = dtFileList.Rows[1][0].ToString();
string logPhysicalName = physicalName + databaseName + "_log.ldf";
sqlRestore.RelocateFiles.Add(new RelocateFile(dbLogicalName, dbPhysicalName));
sqlRestore.RelocateFiles.Add(new RelocateFile(logLogicalName, logPhysicalName));
sqlServer.KillAllProcesses(sqlRestore.Database);
Database db = sqlServer.Databases[databaseName];
if (db != null)
{
db.DatabaseOptions.UserAccess = DatabaseUserAccess.Single;
db.Alter(TerminationClause.RollbackTransactionsImmediately);
sqlServer.DetachDatabase(sqlRestore.Database, false);
}
sqlRestore.Action = RestoreActionType.Database;
sqlRestore.ReplaceDatabase = true;
sqlRestore.SqlRestore(sqlServer);
db = sqlServer.Databases[databaseName];
db.SetOnline();
sqlServer.Refresh();
db.DatabaseOptions.UserAccess = DatabaseUserAccess.Multiple;
}
public void Disconnect()
{
if (IsConnected)
_connection.Disconnect();
IsConnected = false;
}
}
I ran into a similar problem and I found this solution to be quite helpful.
Take a look - http://www.eggheadcafe.com/software/aspnet/32188436/smorestore-database-name-change.aspx