trying to get an ETL package going which requires bulk inserting a bunch of .csv files off of a drive. Instead of manually having to scroll through the code and change the file path name for each insert (it increments with each month), would just like to declare the variable at the top.
Eg.
DECLARE #Month NVARCHAR(255)
SET #Month = 'November'
DECLARE #Year NVARCHAR(255)
SET #Year = '2019'
BULK INSERT #MonthlyMetrics
FROM '\\SomeServer\SomeFolder\Metrics_#Month_#Year.csv'
WITH
(
FIELDTERMINATOR = ','
, ROWTERMINATOR = '0x0a'
, FIRSTROW = 3
)
GO
There's a dozen of inserts like this where the only part of the file name that changes is the month. i.e. Metrics_November_2019.csv is that actual file name. Since the FROM is a string, not sure how to get the declared variable in there.
You can try using a dynamic SQL to load the data, only you will need to enable xp_cmdshell like this:
This turns on advanced options and is needed to configure xp_cmdshell
sp_configure 'show advanced options', '1';
RECONFIGURE
This enables xp_cmdshell
sp_configure 'xp_cmdshell', '1'
RECONFIGURE
After that you can load the data in your temp table:
DECLARE #dir varchar(300)
DECLARE #files table (FileName varchar(800));
DECLARE #filesCmd varchar(500);
DECLARE #filename varchar(100);
DECLARE #cmd_BulkInsert nvarchar(max);
SET #dir = '\\SomeServer\SomeFolder';
SET #filesCmd = 'DIR /b ' + #dir + ' | findstr csv';
INSERT INTO #files EXECUTE xp_cmdshell #filesCmd;
BEGIN TRY
DECLARE C_TABLE CURSOR LOCAL FORWARD_ONLY READ_ONLY STATIC FOR
SELECT FileName FROM #files WHERE NOT FileName IS NULL;
OPEN C_TABLE;
FETCH NEXT FROM C_TABLE INTO #filename;
WHILE ##FETCH_STATUS = 0
BEGIN
SET #cmd_BulkInsert = N'
BULK INSERT #MonthlyMetrics
FROM '''+#dir+'\'+#filename+'''
WITH
(
FIELDTERMINATOR = '',''
,ROWTERMINATOR = ''0x0a''
,FIRSTROW = 3
)'
EXECUTE sp_executesql #cmd_BulkInsert;
FETCH NEXT FROM C_TABLE INTO #filename;
END
END TRY
BEGIN CATCH
SELECT
GETDATE()
,ERROR_LINE()
,ERROR_MESSAGE()
,ERROR_NUMBER()
,ERROR_STATE()
,ERROR_SEVERITY()
,#cmd_BulkInsert
END CATCH
Related
I am using SQL Server 2017 version, and I want to import multiple .csv files into multiple tables in SQL server.
I found the following script in the net,
--BULK INSERT MULTIPLE FILES From a Folder
--a table to loop thru filenames drop table ALLFILENAMES
CREATE TABLE ALLFILENAMES(WHICHPATH VARCHAR(255),WHICHFILE varchar(255))
--some variables
declare #filename varchar(255),
#path varchar(255),
#sql varchar(8000),
#cmd varchar(1000)
--get the list of files to process:
SET #path = 'C:\Dump\'
SET #cmd = 'dir ' + #path + '*.csv /b'
INSERT INTO ALLFILENAMES(WHICHFILE)
EXEC Master..xp_cmdShell #cmd
UPDATE ALLFILENAMES SET WHICHPATH = #path where WHICHPATH is null
--cursor loop
declare c1 cursor for SELECT WHICHPATH,WHICHFILE FROM ALLFILENAMES where WHICHFILE like '%.csv%'
open c1
fetch next from c1 into #path,#filename
While ##fetch_status <> -1
begin
--bulk insert won't take a variable name, so make a sql and execute it instead:
set #sql = 'BULK INSERT Temp FROM ''' + #path + #filename + ''' '
+ ' WITH (
FIELDTERMINATOR = '','',
ROWTERMINATOR = ''\n'',
FIRSTROW = 2
) '
print #sql
exec (#sql)
fetch next from c1 into #path,#filename
end
close c1
deallocate c1
But the problem is I cannot use the command 'EXEC Master..xp_cmdShell' cause it was disabled by DBA's due to some security reasons, and they are not permitting me to use it. Is there any alternative command that I can use instead of 'xp_cmdShell' in the same script.
In this script near bulk insert command (set #sql = 'BULK INSERT Temp FROM ''' + #path + #filename + ''' '
+ ') I see only one table name 'Test', and how can I mention multiple table names in the Bulk insert command?
Any help please.
It's been a long time since I have had to do this, but this is how I used to do these kinds of things.
DECLARE #intFlag INT
SET #intFlag = 1
WHILE (#intFlag <=100)
BEGIN
PRINT #intFlag
declare #fullpath1 varchar(1000)
select #fullpath1 = '''\\FTP\' + convert(varchar, getdate()- #intFlag , 112) + '_your_file.csv'''
declare #cmd1 nvarchar(1000)
select #cmd1 = 'bulk insert [dbo].[your_table] from ' + #fullpath1 + ' with (FIELDTERMINATOR = ''\t'', FIRSTROW = 5, ROWTERMINATOR=''0x0a'')'
exec (#cmd1)
SET #intFlag = #intFlag + 1
END
GO
As you can tell, this is looping through a bunch of files with dates as file names. The first part of each file name was in this date format: convert(varchar, getdate()- #intFlag , 112)
I'm guessing your files have names that match some specific pattern.
SQl Server has a tool that does this for you. Goto to your SQL Server folder
Open SQL Server Import and Export Wizard.
Choose a Data Source Microsoft Excel
Select the Excel File. And following the steps
I'm slowly learning more about PostgreSQL, as we are attempting to move to it from MSSQL Server.
In MSSQL I have the following code:
DECLARE ServiceabilityParameters
CURSOR FORWARD_ONLY READ_ONLY STATIC LOCAL FOR
SELECT WorkbookParameterType.ID,
WorkbookParameterType.Name,
WorkbookParameter.DefaultValue,
WorkbookParameter.CommandText
FROM WorkbookParameter
JOIN WorkbookParameterType ON WorkbookParameterType.ID = WorkbookParameter.WorkbookParameterTypeID
JOIN WorkbookParameterDirectionType ON WorkbookParameterDirectionType.ID = WorkbookParameter.WorkbookParameterDirectionTypeID
AND WorkbookParameterDirectionType.Writable = 1
WHERE WorkbookParameter.WorkbookID = #WorkbookID
OPEN ServiceabilityParameters
FETCH NEXT FROM ServiceabilityParameters INTO #WorkbookParameterTypeID, #WorkbookParameterTypeName, #WorkbookDefaultValue, #WorkbookCommandText
WHILE ##FETCH_STATUS = 0
BEGIN
DECLARE #ActualValue NVARCHAR(256) = NULL
IF #WorkbookCommandText IS NOT NULL
BEGIN
EXEC sp_executesql #statement = #WorkbookCommandText,
#params = N'#ApplicationContainerID INT, #Value NVARCHAR(256) OUTPUT',
#ApplicationContainerID = #ApplicationContainerID,
#Value = #ActualValue OUTPUT
END
IF #ActualValue IS NULL AND #WorkbookDefaultValue IS NOT NULL
BEGIN
SET #ActualValue = #WorkbookDefaultValue
END
INSERT #InputParameters (
ID, Name, Value
) VALUES (
#WorkbookParameterTypeID, #WorkbookParameterTypeName, #ActualValue
)
FETCH NEXT FROM ServiceabilityParameters INTO #WorkbookParameterTypeID, #WorkbookParameterTypeName, #WorkbookDefaultValue, #WorkbookCommandText
END
CLOSE ServiceabilityParameters
DEALLOCATE ServiceabilityParameters
I'm trying to work out how to do the sp_executesql part in a PostgreSQL function. I believe that I can do the rest, but most of the examples that I have found show a simple select with maybe a few variables, whereas I need to execute another function, with parameters, where the function name is text in a table.
Many Thanks.
In case you want to execute a function with parameters
EXECUTE 'SELECT Value FROM ' || v_workbookCommandText || '(ApplicationContainerID :=$1)'
INTO v_actualValue
USING v_applicationContainerID;
In case you need select records a function, you can using INOUT refcursor variable
EXECUTE 'SELECT Value FROM ' || v_workbookCommandText || '(ApplicationContainerID :=$1, refcur:= $2)'
INTO v_actualValue
USING v_applicationContainerID, my_cursor;
I think what you want to do is EXECUTE 'some string', like this:
EXECUTE 'SELECT count(*) FROM mytable WHERE inserted_by = $1 AND inserted <= $2'
INTO c
USING checked_user, checked_date;
Another option is to create and use your own PL/PGSQL functions.
I have read about a dozen articles here and I am still stumped with this issue.
I am building a dynamic select statement that will update a view on a monthly schedule.
set ansi_nulls on
go
set quoted_identifier on
go
alter procedure [dbo].[Proc_Name_SP]
as
begin
set nocount on
set quoted_identifier off
declare #dbname varchar(10), #schema_id int, #schema_name varchar(10),
#jacro varchar(10), #rec_cnt int, #tot_rec int
declare #SQL_Main nvarchar(max), #SQL_Final nvarchar(max),
#SQL_schema nvarchar(2000), #SQL_Union nvarchar(max)
declare iteration cursor global static for
-- Begin statement for cursor array
select distinct db, code
from linkedserver.db.schema.Directory
where current_stage = 'live'
order by db
-- End statement for cursor array
-- get total number of cursor iterations to know when to stop
-- "union" statements
select #tot_rec = count(*) from (select distinct db, code
from [linkedserver].db.schema.Directory
where current_stage = 'live') as cur
-- begin loop
open iteration
fetch first from iteration into #dbname, #jacro
while ##fetch_status=0
begin
-- the schema used is not consistent. Because of the linked server it was
-- necessary to get the Schema_ID from the sys.tables and then pull the
-- schema name from sys.schema
set #SQL_schema = 'select #sch_id = schema_id from [linkedserver].'+#dbname+'.sys.tables where name = ''Manuscript'''
execute sp_executesql #SQL_schema, N'#sch_id int OUTPUT', #sch_id = #schema_id output
--print #schema_id
set #SQL_schema ='select #sch_name = name from [linkedserver].'+#dbname+'.sys.schemas where schema_id = '+cast(#schema_id as varchar)+''
execute sp_executesql #SQL_schema, N'#sch_name nvarchar(10) OUTPUT', #sch_name = #schema_name output
--print #schema_name
--building Select statement
set #SQL_Main ='
select jcode.Code as BILLING_ACRO
,s.start_dt as BILLING_DATE
,cmpt_ms_nm as MANUSCRIPT
,isnull(jcode.billing_type, ''reviewed'') as Billing_type
from [linkedserver].'+#dbname+'.'+#schema_name+'.Manuscript as m
join [linkedserver].'+#dbname+'.'+#schema_name+'.Step as s on m.ms_id = s.ms_id and m.ms_rev_no = s.ms_rev_no
join (select j_id, Code, billing_type from [linkedserver].db.schema.Directory where db = '''+#dbname+''') as jcode on jcode.j_id = m.j_id
where jcode.Code = '''+#jacro+'''
and m.ms_rev_no = 0
and s.stage_id = 190
and isnull(cmpt_ms_nm, '''') <> ''''
and s.step_id = (select min(s2.step_id)
from [linkedserver].'+#dbname+'.'+#schema_name+'.Step as s2
where s2.stage_id = 190
and s2.ms_id = m.ms_id
and s2.ms_rev_no = m.ms_rev_no)
'
set #rec_cnt = isnull(#rec_cnt, 0) + 1
if #SQL_Union is null
begin
set #SQL_Union = #SQL_Main
end
else if #tot_rec <> #rec_cnt
begin
set #SQL_Union = #SQL_Union + ' union ' + #SQL_Main
end
else
begin
set #SQL_Union = #SQL_Union + #SQL_Main
end
--print #rec_cnt
fetch next from iteration into #dbname, #jacro --next database
end -- while ##FETCH_STATUS=0
close iteration
deallocate iteration
-- build new view
print len(#SQL_Union)
set #SQL_Final = '
ALTER VIEW [dbo].[View_Name_VW]
AS
'+#SQL_Union+'
'
execute sp_executesql #SQL_Final
--grab string variables to table for troubleshooting
insert into Output_SQL(SQL_Final, SQL_Final_Len, SQL_Union, SQL_Union_Len)
select #SQL_Final, LEN(#SQL_Final), #SQL_Union, LEN(#SQL_Union)
set nocount off
end
go
I have read that others have had problems with this type of truncation and I have tried multiple suggestions but in the end the I am getting capped at 68274 in this code with nvarchar(max). For troubleshooting, I am saving the results of the variables and the len of these variables to a table to eliminate the SSMS cap on the display of strings.
I have tried cast(#varible as nvarchar(max)) on the right side of the = sign. I have changed the data type lengths (as the select that is being built is not that large, it is just large after it has been union for each unique customer)
I am open to any suggestions as I have tried many variations of datatype declarations for these variables.
I have the following code in T-SQL that reads table names from a cursor.
But I have problem with the scoping table name variable inside the WITH statement.
I can run this code when I explicitly set dbo.#sys_name to a synonym name like dbo.mysysnonym but when I put it as variable name like dbo.#syn_name it does not work.
-- drop duplicates records from synonyms
DECLARE #syn_name varchar(50)
DECLARE s_cursor CURSOR FOR
SELECT name
FROM sys.synonyms
WHERE base_object_name LIKE 'xyz%'
OPEN s_cursor;
FETCH NEXT FROM s_cursor INTO #syn_name;
WHILE ##FETCH_STATUS = 0
BEGIN
FETCH NEXT FROM s_cursor INTO #syn_name;
WITH dedupTable AS
(
SELECT
sys_id,
row_number() OVER (PARTITION BY sys_id ORDER BY sys_id) AS nr
FROM
dbo.#syn_name
)
DELETE FROM dedupTable
WHERE nr > 1
END;
CLOSE s_cursor
DEALLOCATE s_cursor
As far as I know, you cannot use variables as table names, so dbo.#syn_name will not work in a FROM clause. Instead, you will have to use Dynamic SQL.
Something like:
...
FETCH NEXT FROM s_cursor INTO #syn_name;
DECLARE #sql nvarchar(4000)
SET #sql = N'
WITH dedupTable
AS (
SELECT sys_id, row_number()
OVER ( PARTITION BY sys_id ORDER BY sys_id ) AS nr
FROM dbo.' + #syn_name + '
)
DELETE FROM dedupTable
WHERE nr > 1'
EXEC sp_executesql #sql
When I try to run the following SQL snippet inside a cursor loop,
set #cmd = N'exec sp_rename ' + #test + N',' +
RIGHT(#test,LEN(#test)-3) + '_Pct' + N',''COLUMN'''
I get the following message,
Msg 15248, Level 11, State 1, Procedure sp_rename, Line 213
Either the parameter #objname is ambiguous or the claimed #objtype (COLUMN) is wrong.
What is wrong and how do I fix it ? I tried wrapping the column name in brackets [], and double quotes "" like some of the search results suggested.
Edit 1 -
Here is the entire script. How do I pass the table name to the rename sp ? I'm not sure how to do that since the column names are in one of many tables.
BEGIN TRANSACTION
declare #cnt int
declare #test nvarchar(128)
declare #cmd nvarchar(500)
declare Tests cursor for
SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE COLUMN_NAME LIKE 'pct%' AND TABLE_NAME LIKE 'TestData%'
open Tests
fetch next from Tests into #test
while ##fetch_status = 0
BEGIN
set #cmd = N'exec sp_rename ' + #test + N',' + RIGHT(#test,LEN(#test)-3) + '_Pct' + N', column'
print #cmd
EXEC sp_executeSQL #cmd
fetch next from Tests into #test
END
close Tests
deallocate Tests
ROLLBACK TRANSACTION
--COMMIT TRANSACTION
Edit 2 -
The script is designed to rename columns whose names match a pattern, in this case with a "pct" prefix. The columns occur in a variety of tables within the database. All table names are prefixed with "TestData".
Here is slightly modified version. Changes are noted as code commentary.
BEGIN TRANSACTION
declare #cnt int
declare #test nvarchar(128)
-- variable to hold table name
declare #tableName nvarchar(255)
declare #cmd nvarchar(500)
-- local means the cursor name is private to this code
-- fast_forward enables some speed optimizations
declare Tests cursor local fast_forward for
SELECT COLUMN_NAME, TABLE_NAME
FROM INFORMATION_SCHEMA.COLUMNS
WHERE COLUMN_NAME LIKE 'pct%'
AND TABLE_NAME LIKE 'TestData%'
open Tests
-- Instead of fetching twice, I rather set up no-exit loop
while 1 = 1
BEGIN
-- And then fetch
fetch next from Tests into #test, #tableName
-- And then, if no row is fetched, exit the loop
if ##fetch_status <> 0
begin
break
end
-- Quotename is needed if you ever use special characters
-- in table/column names. Spaces, reserved words etc.
-- Other changes add apostrophes at right places.
set #cmd = N'exec sp_rename '''
+ quotename(#tableName)
+ '.'
+ quotename(#test)
+ N''','''
+ RIGHT(#test,LEN(#test)-3)
+ '_Pct'''
+ N', ''column'''
print #cmd
EXEC sp_executeSQL #cmd
END
close Tests
deallocate Tests
ROLLBACK TRANSACTION
--COMMIT TRANSACTION