PeopleSoft Subquery not picking up correct leave plans - peoplesoft

I'm attempting to build a report in PeopleSoft's Query Manager that has future and current employees on it. I want to be able to find the employees who do not have leave plans, specifically 50 and 52. I thought I was on the right track with the following BUT after auditing my report it is bringing in people who HAVE 50 and 52. I think it has to do with the subquery (SQL below).
Here are some pictures of query manager:
SELECT DISTINCT B.COMPANY, A.EMPLID, A.NAME, B.FULL_PART_TIME, B.REG_TEMP, C.DESCR, D.PLAN_TYPE, (CASE WHEN B.REG_TEMP = 'T' THEN '' WHEN C.DESCR = 'Intern' THEN '' WHEN C.DESCR = 'PenFed Intern' THEN '' ELSE 'Check records/give Leave' END), CONVERT(CHAR(10),SUBSTRING(CONVERT(CHAR,GETDATE(),121), 1, 10),121)), (CONVERT(CHAR(10),SUBSTRING(CONVERT(CHAR,GETDATE(),121), 1, 10),121)), (CONVERT(CHAR(10),SUBSTRING(CONVERT(CHAR,GETDATE(),121), 1, 10),121))
FROM (PS_PERSONAL_DATA A LEFT OUTER JOIN (PS_LEAVE_PLAN D INNER JOIN PS_EMPLMT_SRCH_QRY D1 ON (D.EMPLID = D1.EMPLID AND D.EMPL_RCD = D1.EMPL_RCD AND D1.OPRID = 'XXXXXXXX' )) ON A.EMPLID = D.EMPLID ), PS_PFC_JOB_VW B, PS_JOBCODE_TBL C, PS_SET_CNTRL_REC C2
WHERE ( C.JOBCODE = B.JOBCODE
AND C2.SETCNTRLVALUE = B.BUSINESS_UNIT
AND C2.RECNAME = 'JOBCODE_TBL'
AND C2.SETID = C.SETID
AND ( A.EMPLID = B.EMPLID
AND ( B.EFFDT =
(SELECT MAX(B_ED.EFFDT) FROM PS_PFC_JOB_VW B_ED
WHERE B.EMPLID = B_ED.EMPLID
AND B.EMPL_RCD = B_ED.EMPL_RCD
AND B_ED.EFFDT <= SUBSTRING(CONVERT(CHAR,GETDATE(),121), 1, 10))
AND B.EFFSEQ =
(SELECT MAX(B_ES.EFFSEQ) FROM PS_PFC_JOB_VW B_ES
WHERE B.EMPLID = B_ES.EMPLID
AND B.EMPL_RCD = B_ES.EMPL_RCD
AND B.EFFDT = B_ES.EFFDT)
OR ( B.EFFDT =
(SELECT MIN(B_ED.EFFDT) FROM PS_PFC_JOB_VW B_ED
WHERE B.EMPLID = B_ED.EMPLID
AND B.EMPL_RCD = B_ED.EMPL_RCD
AND B_ED.EFFDT >= SUBSTRING(CONVERT(CHAR,GETDATE(),121), 1, 10))
AND B.EFFSEQ =
(SELECT MAX(B_ES.EFFSEQ) FROM PS_PFC_JOB_VW B_ES
WHERE B.EMPLID = B_ES.EMPLID
AND B.EMPL_RCD = B_ES.EMPL_RCD
AND B.EFFDT = B_ES.EFFDT)
AND B.ACTION IN ('HIR','REH','REI')))
AND B.EMPL_STATUS IN ('A','L','P')
AND B.PER_ORG = 'EMP'
AND C.EFFDT =
(SELECT MAX(C_ED.EFFDT) FROM PS_JOBCODE_TBL C_ED
WHERE C.SETID = C_ED.SETID
AND C.JOBCODE = C_ED.JOBCODE
AND C_ED.EFFDT <= B.EFFDT)
AND A.EMPLID NOT IN (SELECT E.EMPLID
FROM PS_LEAVE_PLAN E, PS_EMPLMT_SRCH_QRY E1
WHERE E.EMPLID = E1.EMPLID
AND E.EMPL_RCD = E1.EMPL_RCD
AND E1.OPRID = 'XXXXXXXX'
AND ( E.EFFDT =
(SELECT MAX(E_ED.EFFDT) FROM PS_LEAVE_PLAN E_ED
WHERE E.EMPLID = E_ED.EMPLID
AND E.EMPL_RCD = E_ED.EMPL_RCD
AND E.PLAN_TYPE = E_ED.PLAN_TYPE
AND E.BENEFIT_NBR = E_ED.BENEFIT_NBR
AND E_ED.EFFDT <= SUBSTRING(CONVERT(CHAR,GETDATE(),121), 1, 10))
AND ( E.PLAN_TYPE = '50'
OR E.PLAN_TYPE = '52') )) ))
Where am I going wrong here?

Related

Why do I get error "missing 1 required positional argument"?

Why I continue getting the error :
missing 1 required positional argument: 'category_id'
when the argument is already passed within query function in Backend class? I also don't understand the error of unfilled self or missing positional argument self:
missing 1 required positional argument: 'self'
I tried passing self to display_account_types() in Frontend class but it doesn't reflect. Do I need to pass self within the inner nested functions within a class?
import tkinter
from tkinter import *
from tkinter import ttk
import tkinter.messagebox
import sqlite3
root =Tk()
root.title('Simple Application')
root.config(bg='SlateGrey')
root.geometry("")
# BackEnd
class Backend():
def __init__(self):
self.conn = sqlite3.connect('accounting.db')
self.cur = self.conn.cursor()
self.conn.execute("""CREATE TABLE IF NOT EXISTS account_type(
id INTEGER PRIMARY KEY,
category_type INTEGER NOT NULL,
category_id TEXT NOT NULL
)"""),
self.conn.commit()
self.conn.close()
def insert_account_type(self, category_type, category_id):
self.conn = sqlite3.connect('accounting.db')
self.cur = self.conn.cursor()
self.cur.execute("""INSERT INTO account_type(category_id, category_type) VALUES(?,?);""",
(self,category_type, category_id,))
self.conn.commit()
self.conn.close()
def view_account_type(self):
self.conn = sqlite3.connect('accounting.db')
self.cur = self.conn.cursor()
self.cur.execute("SELECT * FROM account_type")
self.rows = self.cur.fetchall()
self.conn.close()
return self.rows
# calling the class
tb = Backend()
# Front End
class Frontend():
def __init__(self, master):
# Frames
self.top_frame = LabelFrame(master,bg ='SlateGrey', relief=SUNKEN)
self.top_frame.pack()
self.bottom_frame = LabelFrame(master, bg = 'SlateGrey', relief=SUNKEN)
self.bottom_frame.pack()
self.right_frame = LabelFrame(self.top_frame, bg = 'SlateGrey', relief = FLAT,
text = 'Details Entry',fg = 'maroon')
self.right_frame.pack(side = RIGHT, anchor = NE)
self.side_frame = LabelFrame(self.top_frame,bg ='SlateGrey',relief=SUNKEN,text = 'Menu Buttons',fg = 'maroon')
self.side_frame.pack(side = LEFT,anchor = NW)
self.bot_frame = LabelFrame(self.bottom_frame, bg='Grey',relief = SUNKEN,text = 'Field View',fg = 'maroon')
self.bot_frame.pack(side = BOTTOM,anchor = SW)
# Side Buttons
self.btn1 = Button(self.side_frame,
text='Main Account Types',
bg='SteelBlue4',
font=('cambria', 11),
anchor=W,
fg='white',
width=18,height=2,
command=lambda :[self.main_account()])
self.btn1.grid(row=0, column=0, pady=0, sticky=W)
def main_account(self):
# variables
self.category_type = StringVar()
self.category_id = StringVar()
# functions
def add_main_accounts():
if self.category_type.get() == "" or self.category_id.get() == "":
tkinter.messagebox.showinfo('All fields are required')
else:
Backend.insert_account_type(
self.category_type.get(),
self.category_id.get(),) # category type unfilled
tkinter.messagebox.showinfo('Entry successful')
def display_account_types(self):
self.trv.delete(*self.trv.get_children())
for self.rows in Backend.view_account_type(self):
self.trv.insert("", END, values=self.rows)
def get_account_type(e):
self.selected_row = self.trv.focus()
self.data = self.trv.item(self.selected_row)
global row
row = self.data["values"]
"""Grab items and send them to entry fields"""
self.category_id.set(row[1])
self.category_type.set(row[2])
"""=================TreeView==============="""
# Scrollbars
ttk.Style().configure("Treeview", background = "SlateGrey", foreground = "white", fieldbackground = "grey")
scroll_x = Scrollbar(self.bot_frame, orient = HORIZONTAL)
scroll_x.pack(side = BOTTOM, fill = X)
scroll_y = Scrollbar(self.bot_frame, orient = VERTICAL)
scroll_y.pack(side = RIGHT, fill = Y)
# Treeview columns & setting scrollbars
self.trv = ttk.Treeview(self.bot_frame, height=3, columns=
('id', 'category_id', 'category_type'), xscrollcommand = scroll_x.set, yscrollcommand = scroll_y.set)
# Treeview style configuration
ttk.Style().configure("Treeview", background = "SlateGrey", foreground = "white", fieldbackground = "grey")
# Configure vertical and Horizontal scroll
scroll_x.config(command = self.trv.xview)
scroll_y.config(command = self.trv.yview)
# Treeview Headings/columns
self.trv.heading('id', text = "No.")
self.trv.heading('category_id', text = 'Category ID')
self.trv.heading('category_type', text = 'Category Type')
self.trv['show'] = 'headings'
# Treeview columns width
self.trv.column('id', width = 23)
self.trv.column('category_id', width = 70)
self.trv.column('category_type', width = 100)
self.trv.pack(fill = BOTH, expand = YES)
# Binding Treeview with data
self.trv.bind('<ButtonRelease-1>',get_account_type)
# Account Types Labels
self.lbl1 = Label(self.right_frame,text = 'Category ID',anchor = W,
width=10,font = ('cambria',11,),bg = 'SlateGrey')
self.lbl1.grid(row = 0,column = 0,pady = 5)
self.lbl2 = Label(self.right_frame, text = 'Category Type', anchor = W,
width = 10,font = ('cambria',11,),bg = 'SlateGrey')
self.lbl2.grid(row = 1, column = 0,pady = 5,padx=5)
self.blank_label = Label(self.right_frame, bg='SlateGrey')
self.blank_label.grid(row=2, columnspan=2, pady=10)
# Account Type Entries
self.entry1 = Entry(self.right_frame,textvariable = self.category_id,
font = ('cambria',11,),bg = 'Grey',width=14)
self.entry1.grid(row = 0,column=1,sticky = W,padx = 5)
self.entry2 = Entry(self.right_frame, textvariable = self.category_type,
font = ('cambria', 11,), bg = 'Grey',width = 14)
self.entry2.grid(row = 1, column = 1, sticky = W,pady = 5,padx = 5)
# Buttons
self.btn_1 = Button(self.right_frame,text = 'Add',font = ('cambria',12,'bold'),bg = 'SlateGrey',
activebackground='green', fg = 'white',width=12,height = 2,relief=RIDGE,
command = lambda :[add_main_accounts()])
self.btn_1.grid(row = 3,column = 0,pady = 15)
self.btn_2 = Button(self.right_frame, text = 'View', font = ('cambria', 12, 'bold'),
bg = 'SlateGrey',command=lambda :[display_account_types()],
activebackground='green', fg ='white', width=12, height = 2, relief = RIDGE)
self.btn_2.grid(row = 3, column = 1)
# calling the class
app = Frontend(root)
root.mainloop()
I got and answer to this question,
I just passed in the 'self' argument to the inner nested functions of the class as below and it worked.
# functions
def add_main_accounts(self):
if self.category_id.get() == "" or self.category_type.get() == "":
tkinter.messagebox.showinfo('All fields are required')
else:
Backend.insert_account_type(self,
self.category_id.get(),
self.category_type.get()) # category type unfilled
tkinter.messagebox.showinfo('Entry successful')
def display_account_types(self):
self.trv.delete(*self.trv.get_children())
for rows in Backend.view_account_type(self):
self.trv.insert("", END, values = rows)
return
def get_account_type(e):
self.selected_row = self.trv.focus()
self.data = self.trv.item(self.selected_row)
global row
self.row = self.data["values"]
"""Grab items and send them to entry fields"""
self.category_id.set(row[1])
self.category_type.set(row[2])
I think you should remove the self in display_account_types function like you did to the previous one.

Capturing Time Difference Between Statuses via DAX

I am working in PowerBI and I have a dataset showing candidate movement within hiring requisitions. Via DAX, I would like to add columns that show the time difference between certain statuses. E.g., the result would show "3" down the column for "New to Hire" and so on.
You can do this using three measures. You will need to adjust Table3 to reflect your actual table name. I also assumed that there is a column that designates a unique ID for each employee.
New to Hire:
New to Hire :=
VAR CurID =
MAX ( Table3[ID] )
VAR NewDate =
CALCULATE (
FIRSTDATE ( Table3[Date] ),
FILTER ( ALL ( Table3 ), Table3[ID] = CurID && Table3[Status] = "New" )
)
VAR HireDate =
CALCULATE (
FIRSTDATE ( Table3[Date] ),
FILTER ( ALL ( Table3 ), Table3[ID] = CurID && Table3[Status] = "Hired" )
)
RETURN
DATEDIFF ( NewDate, HireDate, DAY )
Offer to Accept:
Offer to Acccept :=
VAR CurID =
MAX ( Table3[ID] )
VAR OfferDate =
CALCULATE (
FIRSTDATE ( Table3[Date] ),
FILTER ( ALL ( Table3 ), Table3[ID] = CurID && Table3[Status] = "Offer Sent" )
)
VAR AcceptDate =
CALCULATE (
FIRSTDATE ( Table3[Date] ),
FILTER ( ALL ( Table3 ), Table3[ID] = CurID && Table3[Status] = "Offer Accepted" )
)
RETURN
DATEDIFF ( OfferDate, AcceptDate, DAY )
Offer to Hire
Offer to Hire :=
VAR CurID =
MAX ( Table3[ID] )
VAR OfferDate =
CALCULATE (
FIRSTDATE ( Table3[Date] ),
FILTER ( ALL ( Table3 ), Table3[ID] = CurID && Table3[Status] = "Offer Sent" )
)
VAR HireDate =
CALCULATE (
LASTDATE ( Table3[Date] ),
FILTER ( ALL ( Table3 ), Table3[ID] = CurID && Table3[Status] = "Hired" )
)
RETURN
DATEDIFF ( OfferDate, HireDate, DAY )

Why is wpdb-prepare() with passed variable not working

The following using wpdb->prepare returns empty results while the code below not using wpdb->prepare returns the correct results. What is wrong?
global $wpdb;
$query = $wpdb->prepare("select subid, firstname, lastname from wpks_members where member_id = %d,".$mid);
$row= $wpdb->get_row($query);
$query = $wpdb->prepare("select t.subid, t.testid, t.test_date, t.puzzle_score,t.clock_score,t.match_score,t.oddone_score,(t.puzzle_score+t.clock_score+t.match_score+t.oddone_score) as tot,
if((t.puzzle_score+t.clock_score+t.match_score+t.oddone_score) < 90, 'Yes', 'No') as refer, u.ufname, u.ulname
from wpks_results t
join wpks_hasi_users u on t.userid = u.userid
where t.member_id = %d,".$mid);
$rows = $wpdb->get_results($query);
//this below works
$row= $wpdb->get_row("select subid, firstname, lastname from wpks_members where member_id = ".$mid);
$query = $wpdb->prepare();
$rows = $wpdb->get_results("select t.subid, t.testid, t.test_date, t.puzzle_score,t.clock_score,t.match_score,t.oddone_score,(t.puzzle_score+t.clock_score+t.match_score+t.oddone_score) as tot,
if((t.puzzle_score+t.clock_score+t.match_score+t.oddone_score) < 90, 'Yes', 'No') as refer, u.ufname, u.ulname
from wpks_results t
join wpks_hasi_users u on t.userid = u.userid
where t.member_id = ".$mid);
It should be:
$query = $wpdb->prepare("select subid, firstname, lastname from wpks_members where member_id = %d", $mid);

create multiple buttons from table in lua ( Corona sdk )

I have a table which looks like this:
table =
{
{
id = 1,
name = 'john',
png = 'john.png',
descr = "..."
},
{
id = 2,
name = 'sam',
png = "sam.png",
descr = "..."
}
...
}
What function could I use to display each picture like this and make them buttons
so that when I click on their image I can open their info.
This is where I am stuck:
local buttons = display.newGroup()
local xpos = -20
local ypos = 0
local e = -1
function addpicture ()
for i=1, #table do
xpos = (xpos + 100) % 300
e = e + 1
ypos = math.modf((e)*1/3) * 100 + 100
local c = display.newImage( table[i].name, system.TemporaryDirectory, xpos, ypos)
c:scale( 0.4, 0.4 )
c.name = table[i].tvname
buttons:insert(c)
end
end
function buttons:touch( event )
if event.phase == "began" then
print(self, event.id)
end
end
buttons:addEventListener('touch', buttons)
addpicture()
How can I recognize which image is touched in order to go back to the persons info?
I solved my problem by adding the listener inside of the loop like this:
function addpicture ()
for i=1, #table do
xpos = (xpos + 100) % 300
e = e + 1
ypos = math.modf((e)*1/3) * 100 + 100
local c = display.newImage( table[i].name, system.TemporaryDirectory, xpos, ypos)
c:scale( 0.4, 0.4 )
c.name = table[i].tvname
buttons:insert(c)
function c:touch( event )
if event.phase == "began" then
print(self, event.id)
end
end
c:addEventListener('touch', c)
end
end
addpicture()

Export data from database

I have few tables in database that are having huge amount of data. My need is
1 : To query data exist for more than one year.
2 : Export and archive them to some file.
3 : At any point of time I can insert those data back to database.
The data may or may not contain COMMA, so not sure if I should export them to csv format.
Which is the best file format I should go for ??
What should be the file size limitation here ??
This script exports rows from specified tables to INSERT statement for any tables structure. So, you'll just need to copy the result and run it in sql document of SSMS -
DECLARE
#TableName SYSNAME
, #ObjectID INT
, #IsImportIdentity BIT = 1
DECLARE [tables] CURSOR READ_ONLY FAST_FORWARD LOCAL FOR
SELECT
'[' + s.name + '].[' + t.name + ']'
, t.[object_id]
FROM (
SELECT DISTINCT
t.[schema_id]
, t.[object_id]
, t.name
FROM sys.objects t WITH (NOWAIT)
JOIN sys.partitions p WITH (NOWAIT) ON p.[object_id] = t.[object_id]
WHERE p.[rows] > 0
AND t.[type] = 'U'
) t
JOIN sys.schemas s WITH (NOWAIT) ON t.[schema_id] = s.[schema_id]
WHERE t.name IN ('<your table name>')
OPEN [tables]
FETCH NEXT FROM [tables] INTO
#TableName
, #ObjectID
DECLARE
#SQLInsert NVARCHAR(MAX)
, #SQLColumns NVARCHAR(MAX)
, #SQLTinyColumns NVARCHAR(MAX)
WHILE ##FETCH_STATUS = 0 BEGIN
SELECT
#SQLInsert = ''
, #SQLColumns = ''
, #SQLTinyColumns = ''
;WITH cols AS
(
SELECT
c.name
, datetype = t.name
, c.column_id
FROM sys.columns c WITH (NOWAIT)
JOIN sys.types t WITH (NOWAIT) ON c.system_type_id = t.system_type_id AND c.user_type_id = t.user_type_id
WHERE c.[object_id] = #ObjectID
AND (c.is_identity = 0 OR #IsImportIdentity = 1)
AND c.is_computed = 0
AND t.name NOT IN ('xml', 'geography', 'geometry', 'hierarchyid')
)
SELECT
#SQLInsert = 'INSERT INTO ' + #TableName + ' (' + STUFF((
SELECT ', [' + c.name + ']'
FROM cols c
ORDER BY c.column_id
FOR XML PATH, TYPE, ROOT).value('.', 'NVARCHAR(MAX)'), 1, 2, '') + ')'
, #SQLTinyColumns = STUFF((
SELECT ', ' + c.name
FROM cols c
ORDER BY c.column_id
FOR XML PATH, TYPE, ROOT).value('.', 'NVARCHAR(MAX)'), 1, 2, '')
, #SQLColumns = STUFF((SELECT CHAR(13) +
CASE
WHEN c.datetype = 'uniqueidentifier'
THEN ' + '', '' + ISNULL('''''''' + CAST([' + c.name + '] AS VARCHAR(MAX)) + '''''''', ''NULL'')'
WHEN c.datetype IN ('nvarchar', 'varchar', 'nchar', 'char', 'varbinary', 'binary')
THEN ' + '', '' + ISNULL('''''''' + CAST(REPLACE([' + c.name + '], '''''''', '''''''''''' ) AS NVARCHAR(MAX)) + '''''''', ''NULL'')'
WHEN c.datetype = 'datetime'
THEN ' + '', '' + ISNULL('''''''' + CONVERT(VARCHAR, [' + c.name + '], 120) + '''''''', ''NULL'')'
ELSE
' + '', '' + ISNULL(CAST([' + c.name + '] AS NVARCHAR(MAX)), ''NULL'')'
END
FROM cols c
ORDER BY c.column_id
FOR XML PATH, TYPE, ROOT).value('.', 'NVARCHAR(MAX)'), 1, 10, 'CHAR(13) + '', ('' +')
DECLARE #SQL NVARCHAR(MAX) = '
SET NOCOUNT ON;
DECLARE
#SQL NVARCHAR(MAX) = ''''
, #x INT = 1
, #count INT = (SELECT COUNT(1) FROM ' + #TableName + ')
IF EXISTS(
SELECT 1
FROM tempdb.dbo.sysobjects
WHERE ID = OBJECT_ID(''tempdb..#import'')
)
DROP TABLE #import;
SELECT ' + #SQLTinyColumns + ', ''RowNumber'' = ROW_NUMBER() OVER (ORDER BY ' + #SQLTinyColumns + ')
INTO #import
FROM ' + #TableName + '
WHILE #x < #count BEGIN
SELECT #SQL = ''VALUES '' + STUFF((
SELECT ' + #SQLColumns + ' + '')''' + '
FROM #import
WHERE RowNumber BETWEEN #x AND #x + 9
FOR XML PATH, TYPE, ROOT).value(''.'', ''NVARCHAR(MAX)''), 1, 2, CHAR(13) + '' '') + '';''
PRINT(''' + #SQLInsert + ''')
PRINT(#SQL)
SELECT #x = #x + 10
END'
EXEC sys.sp_executesql #SQL
FETCH NEXT FROM [tables] INTO
#TableName
, #ObjectID
END
CLOSE [tables]
DEALLOCATE [tables]
In output you get something like this (AdventureWorks.Person.Address):
INSERT INTO [Person].[Address] ([AddressID], [AddressLine1], [AddressLine2], [City], [StateProvinceID], [PostalCode], [rowguid], [ModifiedDate])
VALUES
(1, '1970 Napa Ct.', NULL, 'Bothell', 79, '98011', '9AADCB0D-36CF-483F-84D8-585C2D4EC6E9', '2002-01-04 00:00:00')
, (2, '9833 Mt. Dias Blv.', NULL, 'Bothell', 79, '98011', '32A54B9E-E034-4BFB-B573-A71CDE60D8C0', '2003-01-01 00:00:00')
, (3, '7484 Roundtree Drive', NULL, 'Bothell', 79, '98011', '4C506923-6D1B-452C-A07C-BAA6F5B142A4', '2007-04-08 00:00:00')
, (4, '9539 Glenside Dr', NULL, 'Bothell', 79, '98011', 'E5946C78-4BCC-477F-9FA1-CC09DE16A880', '2003-03-07 00:00:00')
, (5, '1226 Shoe St.', NULL, 'Bothell', 79, '98011', 'FBAFF937-4A97-4AF0-81FD-B849900E9BB0', '2003-01-20 00:00:00')
, (6, '1399 Firestone Drive', NULL, 'Bothell', 79, '98011', 'FEBF8191-9804-44C8-877A-33FDE94F0075', '2003-03-17 00:00:00')
, (7, '5672 Hale Dr.', NULL, 'Bothell', 79, '98011', '0175A174-6C34-4D41-B3C1-4419CD6A0446', '2004-01-12 00:00:00')
, (8, '6387 Scenic Avenue', NULL, 'Bothell', 79, '98011', '3715E813-4DCA-49E0-8F1C-31857D21F269', '2003-01-18 00:00:00')
, (9, '8713 Yosemite Ct.', NULL, 'Bothell', 79, '98011', '268AF621-76D7-4C78-9441-144FD139821A', '2006-07-01 00:00:00')
, (10, '250 Race Court', NULL, 'Bothell', 79, '98011', '0B6B739D-8EB6-4378-8D55-FE196AF34C04', '2003-01-03 00:00:00');
UPDATE:
And this script exports rows from specified tables to CSV format in output window for any tables structure.
DECLARE
#TableName SYSNAME
, #ObjectID INT
DECLARE [tables] CURSOR READ_ONLY FAST_FORWARD LOCAL FOR
SELECT
'[' + s.name + '].[' + t.name + ']'
, t.[object_id]
FROM (
SELECT DISTINCT
t.[schema_id]
, t.[object_id]
, t.name
FROM sys.objects t WITH (NOWAIT)
JOIN sys.partitions p WITH (NOWAIT) ON p.[object_id] = t.[object_id]
WHERE p.[rows] > 0
AND t.[type] = 'U'
) t
JOIN sys.schemas s WITH (NOWAIT) ON t.[schema_id] = s.[schema_id]
WHERE t.name IN ('<your table name>')
OPEN [tables]
FETCH NEXT FROM [tables] INTO
#TableName
, #ObjectID
DECLARE
#SQLInsert NVARCHAR(MAX)
, #SQLColumns NVARCHAR(MAX)
, #SQLTinyColumns NVARCHAR(MAX)
WHILE ##FETCH_STATUS = 0 BEGIN
SELECT
#SQLInsert = ''
, #SQLColumns = ''
, #SQLTinyColumns = ''
;WITH cols AS
(
SELECT
c.name
, datetype = t.name
, c.column_id
FROM sys.columns c WITH (NOWAIT)
JOIN sys.types t WITH (NOWAIT) ON c.system_type_id = t.system_type_id AND c.user_type_id = t.user_type_id
WHERE c.[object_id] = #ObjectID
AND c.is_computed = 0
AND t.name NOT IN ('xml', 'geography', 'geometry', 'hierarchyid')
)
SELECT
#SQLTinyColumns = STUFF((
SELECT ', [' + c.name + ']'
FROM cols c
ORDER BY c.column_id
FOR XML PATH, TYPE, ROOT).value('.', 'NVARCHAR(MAX)'), 1, 2, '')
, #SQLColumns = STUFF((SELECT CHAR(13) +
CASE
WHEN c.datetype = 'uniqueidentifier'
THEN ' + '';'' + ISNULL('''' + CAST([' + c.name + '] AS VARCHAR(MAX)) + '''', ''NULL'')'
WHEN c.datetype IN ('nvarchar', 'varchar', 'nchar', 'char', 'varbinary', 'binary')
THEN ' + '';'' + ISNULL('''' + CAST(REPLACE([' + c.name + '], '''', '''''''') AS NVARCHAR(MAX)) + '''', ''NULL'')'
WHEN c.datetype = 'datetime'
THEN ' + '';'' + ISNULL('''' + CONVERT(VARCHAR, [' + c.name + '], 120) + '''', ''NULL'')'
ELSE
' + '';'' + ISNULL(CAST([' + c.name + '] AS NVARCHAR(MAX)), ''NULL'')'
END
FROM cols c
ORDER BY c.column_id
FOR XML PATH, TYPE, ROOT).value('.', 'NVARCHAR(MAX)'), 1, 10, 'CHAR(13) + '''' +')
DECLARE #SQL NVARCHAR(MAX) = '
SET NOCOUNT ON;
DECLARE
#SQL NVARCHAR(MAX) = ''''
, #x INT = 1
, #count INT = (SELECT COUNT(1) FROM ' + #TableName + ')
IF EXISTS(
SELECT 1
FROM tempdb.dbo.sysobjects
WHERE ID = OBJECT_ID(''tempdb..#import'')
)
DROP TABLE #import;
SELECT ' + #SQLTinyColumns + ', ''RowNumber'' = ROW_NUMBER() OVER (ORDER BY ' + #SQLTinyColumns + ')
INTO #import
FROM ' + #TableName + '
WHILE #x < #count BEGIN
SELECT #SQL = STUFF((
SELECT ' + #SQLColumns + ' + ''''' + '
FROM #import
WHERE RowNumber BETWEEN #x AND #x + 9
FOR XML PATH, TYPE, ROOT).value(''.'', ''NVARCHAR(MAX)''), 1, 1, '''')
PRINT(#SQL)
SELECT #x = #x + 10
END'
EXEC sys.sp_executesql #SQL
FETCH NEXT FROM [tables] INTO
#TableName
, #ObjectID
END
CLOSE [tables]
DEALLOCATE [tables]
In output you get something like this (AdventureWorks.Person.Person):
1;EM;0;NULL;Ken;J;Sánchez;NULL;0;92C4279F-1207-48A3-8448-4636514EB7E2;2003-02-08 00:00:00
2;EM;0;NULL;Terri;Lee;Duffy;NULL;1;D8763459-8AA8-47CC-AFF7-C9079AF79033;2002-02-24 00:00:00
3;EM;0;NULL;Roberto;NULL;Tamburello;NULL;0;E1A2555E-0828-434B-A33B-6F38136A37DE;2001-12-05 00:00:00
4;EM;0;NULL;Rob;NULL;Walters;NULL;0;F2D7CE06-38B3-4357-805B-F4B6B71C01FF;2001-12-29 00:00:00
5;EM;0;Ms.;Gail;A;Erickson;NULL;0;F3A3F6B4-AE3B-430C-A754-9F2231BA6FEF;2002-01-30 00:00:00
6;EM;0;Mr.;Jossef;H;Goldberg;NULL;0;0DEA28FD-EFFE-482A-AFD3-B7E8F199D56F;2002-02-17 00:00:00
Try using the bcp command line utility, which is very efficient at handling import/export for large data sets:
bcp "select * from [YourTable]" queryout data.dat -n -S YourServer -d "YourDatabase" -T
-T means Trusted Authentication. -n means native format, so you don't need to worry about data types, commas, etc. However, this does mean you can't view the data in an editor; it's only available for loading back into SQL Server. You can use -c instead if you want CSV format.
To import back in:
bcp "[YourTable]" in data.dat -n -S YourServer -d "YourDatabase" -T

Resources