SQL Server CTE Query - Rows to Single Delimited String Field
I have the following data in SQL Server:
Data creation:
-- First we create some test data.
CREATE TABLE E
(
[Epinum] VARCHAR(9),
[RTyp] VARCHAR(120),
[Date] DATETIME
);
GO
INSERT INTO E ([Epinum], [RTyp], [Date])
VALUES ('1', '', CONVERT(datetime, '2002-11-26 14:18:00', 20)),
('2', '', CONVERT(datetime, '2002-11-24 15:15:00', 20)),
('3', '', CONVERT(datetime, '2002-12-17 11:12:00', 20)),
('4', '', CONVERT(datetime, '2002-12-09 19:57:00', 20)),
('5', '', CONVERT(datetime, '2002-12-11 06:00:00', 20)),
('6', '', CONVERT(datetime, '2002-12-19 12:31:00', 20)),
('7', '', CONVERT(datetime, '2002-12-15 08:39:00', 20)),
('8', '', CONVERT(datetime, '2002-12-20 08:39:00', 20)),
('9', '', CONVERT(datetime, '2002-12-13 08:39:00', 20)),
('10', '', CONVERT(datetime, '2002-12-16 08:39:00', 20)),
('11', '', CONVERT(datetime, '2002-12-14 08:39:00', 20));
GO
CREATE TABLE UJ
(
[Epinum] VARCHAR(9),
[RTyp] VARCHAR(3)
);
GO
INSERT INTO UJ ([Epinum], [RTyp])
VALUES ('1', '111'), ('1', '222'), ('1', '333'), ('1', '444'),
('2', '111'),
('3', '111'), ('3', '222'), ('3', '333'),
('4', '111'),
('5', '111'), ('5', '222'), ('5', '333'), ('5', '444'),
('5', '555'), ('5', '666'), ('5', '777'), ('5', '888'),
('7', '111'),
('8', '111'),
('9', '111'), ('9', '222');
GO
T-SQL query:
-- Now build a query that will create the data we want.
;WITH Tmp AS
(
SELECT
*,
ROW_NUMBER() OVER (PARTITION BY Epinum ORDER BY Epinum) AS rownum
FROM
UJ
)
SELECT DISTINCT
Epinum,
(SELECT
RTyp + CASE
WHEN t.rownum = (SELECT MAX(rownum) FROM Tmp WHERE Epinum = s.Epinum)
THEN ''
ELSE '|'
END
FROM Tmp AS t
WHERE t.Epinum = s.Epinum
FOR XML PATH(''), TYPE).value('(.)[1]', 'VARCHAR(MAX)') AS Piped
FROM
Tmp AS s;
GO
-- Great. Now we update the E table, joining on Epinum.
;WITH Tmp AS
(
SELECT
*, ROW_NUMBER() OVER (PARTITION BY Epinum ORDER BY Epinum) AS rownum
FROM
UJ
)
UPDATE E
SET e.RTyp = q.Piped
FROM
(SELECT DISTINCT
Epinum,
(SELECT RTyp + CASE
WHEN t.rownum = (SELECT MAX(rownum) FROM Tmp WHERE Epinum = s.Epinum)
THEN ''
ELSE '|'
END
FROM Tmp AS t
WHERE t.Epinum = s.Epinum
FOR XML PATH(''), TYPE).value('(.)[1]', 'VARCHAR(MAX)') AS Piped
FROM
Tmp AS s) AS q
INNER JOIN
E AS e ON q.Epinum = e.Epinum;
GO
This gets all of the RTyp columns for a particular Epinum and then pipe-delimits them and updates RTyp in table E. It works great, but the problem is, table E in production is ~2.3M rows and UJ is ~900k (both having lots more columns) and this query is taking far too long to perform the update.
How can I make this query more efficient?
sql sql-server
add a comment |
I have the following data in SQL Server:
Data creation:
-- First we create some test data.
CREATE TABLE E
(
[Epinum] VARCHAR(9),
[RTyp] VARCHAR(120),
[Date] DATETIME
);
GO
INSERT INTO E ([Epinum], [RTyp], [Date])
VALUES ('1', '', CONVERT(datetime, '2002-11-26 14:18:00', 20)),
('2', '', CONVERT(datetime, '2002-11-24 15:15:00', 20)),
('3', '', CONVERT(datetime, '2002-12-17 11:12:00', 20)),
('4', '', CONVERT(datetime, '2002-12-09 19:57:00', 20)),
('5', '', CONVERT(datetime, '2002-12-11 06:00:00', 20)),
('6', '', CONVERT(datetime, '2002-12-19 12:31:00', 20)),
('7', '', CONVERT(datetime, '2002-12-15 08:39:00', 20)),
('8', '', CONVERT(datetime, '2002-12-20 08:39:00', 20)),
('9', '', CONVERT(datetime, '2002-12-13 08:39:00', 20)),
('10', '', CONVERT(datetime, '2002-12-16 08:39:00', 20)),
('11', '', CONVERT(datetime, '2002-12-14 08:39:00', 20));
GO
CREATE TABLE UJ
(
[Epinum] VARCHAR(9),
[RTyp] VARCHAR(3)
);
GO
INSERT INTO UJ ([Epinum], [RTyp])
VALUES ('1', '111'), ('1', '222'), ('1', '333'), ('1', '444'),
('2', '111'),
('3', '111'), ('3', '222'), ('3', '333'),
('4', '111'),
('5', '111'), ('5', '222'), ('5', '333'), ('5', '444'),
('5', '555'), ('5', '666'), ('5', '777'), ('5', '888'),
('7', '111'),
('8', '111'),
('9', '111'), ('9', '222');
GO
T-SQL query:
-- Now build a query that will create the data we want.
;WITH Tmp AS
(
SELECT
*,
ROW_NUMBER() OVER (PARTITION BY Epinum ORDER BY Epinum) AS rownum
FROM
UJ
)
SELECT DISTINCT
Epinum,
(SELECT
RTyp + CASE
WHEN t.rownum = (SELECT MAX(rownum) FROM Tmp WHERE Epinum = s.Epinum)
THEN ''
ELSE '|'
END
FROM Tmp AS t
WHERE t.Epinum = s.Epinum
FOR XML PATH(''), TYPE).value('(.)[1]', 'VARCHAR(MAX)') AS Piped
FROM
Tmp AS s;
GO
-- Great. Now we update the E table, joining on Epinum.
;WITH Tmp AS
(
SELECT
*, ROW_NUMBER() OVER (PARTITION BY Epinum ORDER BY Epinum) AS rownum
FROM
UJ
)
UPDATE E
SET e.RTyp = q.Piped
FROM
(SELECT DISTINCT
Epinum,
(SELECT RTyp + CASE
WHEN t.rownum = (SELECT MAX(rownum) FROM Tmp WHERE Epinum = s.Epinum)
THEN ''
ELSE '|'
END
FROM Tmp AS t
WHERE t.Epinum = s.Epinum
FOR XML PATH(''), TYPE).value('(.)[1]', 'VARCHAR(MAX)') AS Piped
FROM
Tmp AS s) AS q
INNER JOIN
E AS e ON q.Epinum = e.Epinum;
GO
This gets all of the RTyp columns for a particular Epinum and then pipe-delimits them and updates RTyp in table E. It works great, but the problem is, table E in production is ~2.3M rows and UJ is ~900k (both having lots more columns) and this query is taking far too long to perform the update.
How can I make this query more efficient?
sql sql-server
Why are you trying to stuff delimited data into a production system in the first place? It violates 1NF. And are you really saying you are trying to make some of these delimited strings with more than 900k entries? That is a recipe for a performance black hole. Not just shoving the data in there but parsing it later.
– Sean Lange
Nov 15 '18 at 14:42
"are you really saying you are trying to make some of these delimited strings with more than 900k entries?" No, I am just saying that there are 900k rows. The delimited strings have a maximum size of 120 characters 40 x 3 chars.
– MoonKnight
Nov 15 '18 at 14:44
"Why are you trying to stuff delimited data into a production system in the first place? It violates 1NF." I am aware of that, but we have ready this data to conform to a standard - this field requires pipe delimited values. Out of my hands.
– MoonKnight
Nov 15 '18 at 14:45
Understand about being handcuffed. Can you post an execution plan? What about the actual table definition and indexes? Without those kinds of details we are guessing.
– Sean Lange
Nov 15 '18 at 14:47
It says that it is the "Table Value Function [XML Reader] Cost: 96%", so I suppose the question becomes, can we create the piped string without doingFOR XML...
?
– MoonKnight
Nov 15 '18 at 15:41
add a comment |
I have the following data in SQL Server:
Data creation:
-- First we create some test data.
CREATE TABLE E
(
[Epinum] VARCHAR(9),
[RTyp] VARCHAR(120),
[Date] DATETIME
);
GO
INSERT INTO E ([Epinum], [RTyp], [Date])
VALUES ('1', '', CONVERT(datetime, '2002-11-26 14:18:00', 20)),
('2', '', CONVERT(datetime, '2002-11-24 15:15:00', 20)),
('3', '', CONVERT(datetime, '2002-12-17 11:12:00', 20)),
('4', '', CONVERT(datetime, '2002-12-09 19:57:00', 20)),
('5', '', CONVERT(datetime, '2002-12-11 06:00:00', 20)),
('6', '', CONVERT(datetime, '2002-12-19 12:31:00', 20)),
('7', '', CONVERT(datetime, '2002-12-15 08:39:00', 20)),
('8', '', CONVERT(datetime, '2002-12-20 08:39:00', 20)),
('9', '', CONVERT(datetime, '2002-12-13 08:39:00', 20)),
('10', '', CONVERT(datetime, '2002-12-16 08:39:00', 20)),
('11', '', CONVERT(datetime, '2002-12-14 08:39:00', 20));
GO
CREATE TABLE UJ
(
[Epinum] VARCHAR(9),
[RTyp] VARCHAR(3)
);
GO
INSERT INTO UJ ([Epinum], [RTyp])
VALUES ('1', '111'), ('1', '222'), ('1', '333'), ('1', '444'),
('2', '111'),
('3', '111'), ('3', '222'), ('3', '333'),
('4', '111'),
('5', '111'), ('5', '222'), ('5', '333'), ('5', '444'),
('5', '555'), ('5', '666'), ('5', '777'), ('5', '888'),
('7', '111'),
('8', '111'),
('9', '111'), ('9', '222');
GO
T-SQL query:
-- Now build a query that will create the data we want.
;WITH Tmp AS
(
SELECT
*,
ROW_NUMBER() OVER (PARTITION BY Epinum ORDER BY Epinum) AS rownum
FROM
UJ
)
SELECT DISTINCT
Epinum,
(SELECT
RTyp + CASE
WHEN t.rownum = (SELECT MAX(rownum) FROM Tmp WHERE Epinum = s.Epinum)
THEN ''
ELSE '|'
END
FROM Tmp AS t
WHERE t.Epinum = s.Epinum
FOR XML PATH(''), TYPE).value('(.)[1]', 'VARCHAR(MAX)') AS Piped
FROM
Tmp AS s;
GO
-- Great. Now we update the E table, joining on Epinum.
;WITH Tmp AS
(
SELECT
*, ROW_NUMBER() OVER (PARTITION BY Epinum ORDER BY Epinum) AS rownum
FROM
UJ
)
UPDATE E
SET e.RTyp = q.Piped
FROM
(SELECT DISTINCT
Epinum,
(SELECT RTyp + CASE
WHEN t.rownum = (SELECT MAX(rownum) FROM Tmp WHERE Epinum = s.Epinum)
THEN ''
ELSE '|'
END
FROM Tmp AS t
WHERE t.Epinum = s.Epinum
FOR XML PATH(''), TYPE).value('(.)[1]', 'VARCHAR(MAX)') AS Piped
FROM
Tmp AS s) AS q
INNER JOIN
E AS e ON q.Epinum = e.Epinum;
GO
This gets all of the RTyp columns for a particular Epinum and then pipe-delimits them and updates RTyp in table E. It works great, but the problem is, table E in production is ~2.3M rows and UJ is ~900k (both having lots more columns) and this query is taking far too long to perform the update.
How can I make this query more efficient?
sql sql-server
I have the following data in SQL Server:
Data creation:
-- First we create some test data.
CREATE TABLE E
(
[Epinum] VARCHAR(9),
[RTyp] VARCHAR(120),
[Date] DATETIME
);
GO
INSERT INTO E ([Epinum], [RTyp], [Date])
VALUES ('1', '', CONVERT(datetime, '2002-11-26 14:18:00', 20)),
('2', '', CONVERT(datetime, '2002-11-24 15:15:00', 20)),
('3', '', CONVERT(datetime, '2002-12-17 11:12:00', 20)),
('4', '', CONVERT(datetime, '2002-12-09 19:57:00', 20)),
('5', '', CONVERT(datetime, '2002-12-11 06:00:00', 20)),
('6', '', CONVERT(datetime, '2002-12-19 12:31:00', 20)),
('7', '', CONVERT(datetime, '2002-12-15 08:39:00', 20)),
('8', '', CONVERT(datetime, '2002-12-20 08:39:00', 20)),
('9', '', CONVERT(datetime, '2002-12-13 08:39:00', 20)),
('10', '', CONVERT(datetime, '2002-12-16 08:39:00', 20)),
('11', '', CONVERT(datetime, '2002-12-14 08:39:00', 20));
GO
CREATE TABLE UJ
(
[Epinum] VARCHAR(9),
[RTyp] VARCHAR(3)
);
GO
INSERT INTO UJ ([Epinum], [RTyp])
VALUES ('1', '111'), ('1', '222'), ('1', '333'), ('1', '444'),
('2', '111'),
('3', '111'), ('3', '222'), ('3', '333'),
('4', '111'),
('5', '111'), ('5', '222'), ('5', '333'), ('5', '444'),
('5', '555'), ('5', '666'), ('5', '777'), ('5', '888'),
('7', '111'),
('8', '111'),
('9', '111'), ('9', '222');
GO
T-SQL query:
-- Now build a query that will create the data we want.
;WITH Tmp AS
(
SELECT
*,
ROW_NUMBER() OVER (PARTITION BY Epinum ORDER BY Epinum) AS rownum
FROM
UJ
)
SELECT DISTINCT
Epinum,
(SELECT
RTyp + CASE
WHEN t.rownum = (SELECT MAX(rownum) FROM Tmp WHERE Epinum = s.Epinum)
THEN ''
ELSE '|'
END
FROM Tmp AS t
WHERE t.Epinum = s.Epinum
FOR XML PATH(''), TYPE).value('(.)[1]', 'VARCHAR(MAX)') AS Piped
FROM
Tmp AS s;
GO
-- Great. Now we update the E table, joining on Epinum.
;WITH Tmp AS
(
SELECT
*, ROW_NUMBER() OVER (PARTITION BY Epinum ORDER BY Epinum) AS rownum
FROM
UJ
)
UPDATE E
SET e.RTyp = q.Piped
FROM
(SELECT DISTINCT
Epinum,
(SELECT RTyp + CASE
WHEN t.rownum = (SELECT MAX(rownum) FROM Tmp WHERE Epinum = s.Epinum)
THEN ''
ELSE '|'
END
FROM Tmp AS t
WHERE t.Epinum = s.Epinum
FOR XML PATH(''), TYPE).value('(.)[1]', 'VARCHAR(MAX)') AS Piped
FROM
Tmp AS s) AS q
INNER JOIN
E AS e ON q.Epinum = e.Epinum;
GO
This gets all of the RTyp columns for a particular Epinum and then pipe-delimits them and updates RTyp in table E. It works great, but the problem is, table E in production is ~2.3M rows and UJ is ~900k (both having lots more columns) and this query is taking far too long to perform the update.
How can I make this query more efficient?
sql sql-server
sql sql-server
edited Nov 15 '18 at 16:58
marc_s
581k13011211268
581k13011211268
asked Nov 15 '18 at 14:33
MoonKnightMoonKnight
16.6k26112211
16.6k26112211
Why are you trying to stuff delimited data into a production system in the first place? It violates 1NF. And are you really saying you are trying to make some of these delimited strings with more than 900k entries? That is a recipe for a performance black hole. Not just shoving the data in there but parsing it later.
– Sean Lange
Nov 15 '18 at 14:42
"are you really saying you are trying to make some of these delimited strings with more than 900k entries?" No, I am just saying that there are 900k rows. The delimited strings have a maximum size of 120 characters 40 x 3 chars.
– MoonKnight
Nov 15 '18 at 14:44
"Why are you trying to stuff delimited data into a production system in the first place? It violates 1NF." I am aware of that, but we have ready this data to conform to a standard - this field requires pipe delimited values. Out of my hands.
– MoonKnight
Nov 15 '18 at 14:45
Understand about being handcuffed. Can you post an execution plan? What about the actual table definition and indexes? Without those kinds of details we are guessing.
– Sean Lange
Nov 15 '18 at 14:47
It says that it is the "Table Value Function [XML Reader] Cost: 96%", so I suppose the question becomes, can we create the piped string without doingFOR XML...
?
– MoonKnight
Nov 15 '18 at 15:41
add a comment |
Why are you trying to stuff delimited data into a production system in the first place? It violates 1NF. And are you really saying you are trying to make some of these delimited strings with more than 900k entries? That is a recipe for a performance black hole. Not just shoving the data in there but parsing it later.
– Sean Lange
Nov 15 '18 at 14:42
"are you really saying you are trying to make some of these delimited strings with more than 900k entries?" No, I am just saying that there are 900k rows. The delimited strings have a maximum size of 120 characters 40 x 3 chars.
– MoonKnight
Nov 15 '18 at 14:44
"Why are you trying to stuff delimited data into a production system in the first place? It violates 1NF." I am aware of that, but we have ready this data to conform to a standard - this field requires pipe delimited values. Out of my hands.
– MoonKnight
Nov 15 '18 at 14:45
Understand about being handcuffed. Can you post an execution plan? What about the actual table definition and indexes? Without those kinds of details we are guessing.
– Sean Lange
Nov 15 '18 at 14:47
It says that it is the "Table Value Function [XML Reader] Cost: 96%", so I suppose the question becomes, can we create the piped string without doingFOR XML...
?
– MoonKnight
Nov 15 '18 at 15:41
Why are you trying to stuff delimited data into a production system in the first place? It violates 1NF. And are you really saying you are trying to make some of these delimited strings with more than 900k entries? That is a recipe for a performance black hole. Not just shoving the data in there but parsing it later.
– Sean Lange
Nov 15 '18 at 14:42
Why are you trying to stuff delimited data into a production system in the first place? It violates 1NF. And are you really saying you are trying to make some of these delimited strings with more than 900k entries? That is a recipe for a performance black hole. Not just shoving the data in there but parsing it later.
– Sean Lange
Nov 15 '18 at 14:42
"are you really saying you are trying to make some of these delimited strings with more than 900k entries?" No, I am just saying that there are 900k rows. The delimited strings have a maximum size of 120 characters 40 x 3 chars.
– MoonKnight
Nov 15 '18 at 14:44
"are you really saying you are trying to make some of these delimited strings with more than 900k entries?" No, I am just saying that there are 900k rows. The delimited strings have a maximum size of 120 characters 40 x 3 chars.
– MoonKnight
Nov 15 '18 at 14:44
"Why are you trying to stuff delimited data into a production system in the first place? It violates 1NF." I am aware of that, but we have ready this data to conform to a standard - this field requires pipe delimited values. Out of my hands.
– MoonKnight
Nov 15 '18 at 14:45
"Why are you trying to stuff delimited data into a production system in the first place? It violates 1NF." I am aware of that, but we have ready this data to conform to a standard - this field requires pipe delimited values. Out of my hands.
– MoonKnight
Nov 15 '18 at 14:45
Understand about being handcuffed. Can you post an execution plan? What about the actual table definition and indexes? Without those kinds of details we are guessing.
– Sean Lange
Nov 15 '18 at 14:47
Understand about being handcuffed. Can you post an execution plan? What about the actual table definition and indexes? Without those kinds of details we are guessing.
– Sean Lange
Nov 15 '18 at 14:47
It says that it is the "Table Value Function [XML Reader] Cost: 96%", so I suppose the question becomes, can we create the piped string without doing
FOR XML...
?– MoonKnight
Nov 15 '18 at 15:41
It says that it is the "Table Value Function [XML Reader] Cost: 96%", so I suppose the question becomes, can we create the piped string without doing
FOR XML...
?– MoonKnight
Nov 15 '18 at 15:41
add a comment |
2 Answers
2
active
oldest
votes
UPDATE E
SET e.RTyp = q.Piped
FROM (
SELECT Epinum, Piped = STUFF(
(SELECT '|' + RTyp
FROM UJ
WHERE Epinum = t.Epinum
FOR XML PATH ('')), 1, 1, '')
FROM UJ AS t
GROUP BY Epinum) AS q INNER JOIN E AS e
ON q.Epinum = e.Epinum;
GO
add a comment |
If you are on the latest version of SQL Server you could try STRING_AGG
SELECT Epinum, STRING_AGG(Rtyp, '|') delimited
FROM uj
GROUP BY Epinum;
This does not run, care to update? Msg 10753, Level 15, State 3, Line 93 The function 'ROW_NUMBER' must have an OVER clause. Thanks for your time.
– MoonKnight
Nov 15 '18 at 17:05
I can't use SQL Server 2017. Sorry, 2016 at best.
– MoonKnight
Nov 15 '18 at 17:10
Corrected the query. Sorry you can't use it.
– PhilS
Nov 16 '18 at 10:19
add a comment |
Your Answer
StackExchange.ifUsing("editor", function () {
StackExchange.using("externalEditor", function () {
StackExchange.using("snippets", function () {
StackExchange.snippets.init();
});
});
}, "code-snippets");
StackExchange.ready(function() {
var channelOptions = {
tags: "".split(" "),
id: "1"
};
initTagRenderer("".split(" "), "".split(" "), channelOptions);
StackExchange.using("externalEditor", function() {
// Have to fire editor after snippets, if snippets enabled
if (StackExchange.settings.snippets.snippetsEnabled) {
StackExchange.using("snippets", function() {
createEditor();
});
}
else {
createEditor();
}
});
function createEditor() {
StackExchange.prepareEditor({
heartbeatType: 'answer',
autoActivateHeartbeat: false,
convertImagesToLinks: true,
noModals: true,
showLowRepImageUploadWarning: true,
reputationToPostImages: 10,
bindNavPrevention: true,
postfix: "",
imageUploader: {
brandingHtml: "Powered by u003ca class="icon-imgur-white" href="https://imgur.com/"u003eu003c/au003e",
contentPolicyHtml: "User contributions licensed under u003ca href="https://creativecommons.org/licenses/by-sa/3.0/"u003ecc by-sa 3.0 with attribution requiredu003c/au003e u003ca href="https://stackoverflow.com/legal/content-policy"u003e(content policy)u003c/au003e",
allowUrls: true
},
onDemand: true,
discardSelector: ".discard-answer"
,immediatelyShowMarkdownHelp:true
});
}
});
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
StackExchange.ready(
function () {
StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53321743%2fsql-server-cte-query-rows-to-single-delimited-string-field%23new-answer', 'question_page');
}
);
Post as a guest
Required, but never shown
2 Answers
2
active
oldest
votes
2 Answers
2
active
oldest
votes
active
oldest
votes
active
oldest
votes
UPDATE E
SET e.RTyp = q.Piped
FROM (
SELECT Epinum, Piped = STUFF(
(SELECT '|' + RTyp
FROM UJ
WHERE Epinum = t.Epinum
FOR XML PATH ('')), 1, 1, '')
FROM UJ AS t
GROUP BY Epinum) AS q INNER JOIN E AS e
ON q.Epinum = e.Epinum;
GO
add a comment |
UPDATE E
SET e.RTyp = q.Piped
FROM (
SELECT Epinum, Piped = STUFF(
(SELECT '|' + RTyp
FROM UJ
WHERE Epinum = t.Epinum
FOR XML PATH ('')), 1, 1, '')
FROM UJ AS t
GROUP BY Epinum) AS q INNER JOIN E AS e
ON q.Epinum = e.Epinum;
GO
add a comment |
UPDATE E
SET e.RTyp = q.Piped
FROM (
SELECT Epinum, Piped = STUFF(
(SELECT '|' + RTyp
FROM UJ
WHERE Epinum = t.Epinum
FOR XML PATH ('')), 1, 1, '')
FROM UJ AS t
GROUP BY Epinum) AS q INNER JOIN E AS e
ON q.Epinum = e.Epinum;
GO
UPDATE E
SET e.RTyp = q.Piped
FROM (
SELECT Epinum, Piped = STUFF(
(SELECT '|' + RTyp
FROM UJ
WHERE Epinum = t.Epinum
FOR XML PATH ('')), 1, 1, '')
FROM UJ AS t
GROUP BY Epinum) AS q INNER JOIN E AS e
ON q.Epinum = e.Epinum;
GO
answered Nov 15 '18 at 18:01
MoonKnightMoonKnight
16.6k26112211
16.6k26112211
add a comment |
add a comment |
If you are on the latest version of SQL Server you could try STRING_AGG
SELECT Epinum, STRING_AGG(Rtyp, '|') delimited
FROM uj
GROUP BY Epinum;
This does not run, care to update? Msg 10753, Level 15, State 3, Line 93 The function 'ROW_NUMBER' must have an OVER clause. Thanks for your time.
– MoonKnight
Nov 15 '18 at 17:05
I can't use SQL Server 2017. Sorry, 2016 at best.
– MoonKnight
Nov 15 '18 at 17:10
Corrected the query. Sorry you can't use it.
– PhilS
Nov 16 '18 at 10:19
add a comment |
If you are on the latest version of SQL Server you could try STRING_AGG
SELECT Epinum, STRING_AGG(Rtyp, '|') delimited
FROM uj
GROUP BY Epinum;
This does not run, care to update? Msg 10753, Level 15, State 3, Line 93 The function 'ROW_NUMBER' must have an OVER clause. Thanks for your time.
– MoonKnight
Nov 15 '18 at 17:05
I can't use SQL Server 2017. Sorry, 2016 at best.
– MoonKnight
Nov 15 '18 at 17:10
Corrected the query. Sorry you can't use it.
– PhilS
Nov 16 '18 at 10:19
add a comment |
If you are on the latest version of SQL Server you could try STRING_AGG
SELECT Epinum, STRING_AGG(Rtyp, '|') delimited
FROM uj
GROUP BY Epinum;
If you are on the latest version of SQL Server you could try STRING_AGG
SELECT Epinum, STRING_AGG(Rtyp, '|') delimited
FROM uj
GROUP BY Epinum;
edited Nov 16 '18 at 10:18
answered Nov 15 '18 at 15:55
PhilSPhilS
56425
56425
This does not run, care to update? Msg 10753, Level 15, State 3, Line 93 The function 'ROW_NUMBER' must have an OVER clause. Thanks for your time.
– MoonKnight
Nov 15 '18 at 17:05
I can't use SQL Server 2017. Sorry, 2016 at best.
– MoonKnight
Nov 15 '18 at 17:10
Corrected the query. Sorry you can't use it.
– PhilS
Nov 16 '18 at 10:19
add a comment |
This does not run, care to update? Msg 10753, Level 15, State 3, Line 93 The function 'ROW_NUMBER' must have an OVER clause. Thanks for your time.
– MoonKnight
Nov 15 '18 at 17:05
I can't use SQL Server 2017. Sorry, 2016 at best.
– MoonKnight
Nov 15 '18 at 17:10
Corrected the query. Sorry you can't use it.
– PhilS
Nov 16 '18 at 10:19
This does not run, care to update? Msg 10753, Level 15, State 3, Line 93 The function 'ROW_NUMBER' must have an OVER clause. Thanks for your time.
– MoonKnight
Nov 15 '18 at 17:05
This does not run, care to update? Msg 10753, Level 15, State 3, Line 93 The function 'ROW_NUMBER' must have an OVER clause. Thanks for your time.
– MoonKnight
Nov 15 '18 at 17:05
I can't use SQL Server 2017. Sorry, 2016 at best.
– MoonKnight
Nov 15 '18 at 17:10
I can't use SQL Server 2017. Sorry, 2016 at best.
– MoonKnight
Nov 15 '18 at 17:10
Corrected the query. Sorry you can't use it.
– PhilS
Nov 16 '18 at 10:19
Corrected the query. Sorry you can't use it.
– PhilS
Nov 16 '18 at 10:19
add a comment |
Thanks for contributing an answer to Stack Overflow!
- Please be sure to answer the question. Provide details and share your research!
But avoid …
- Asking for help, clarification, or responding to other answers.
- Making statements based on opinion; back them up with references or personal experience.
To learn more, see our tips on writing great answers.
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
StackExchange.ready(
function () {
StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53321743%2fsql-server-cte-query-rows-to-single-delimited-string-field%23new-answer', 'question_page');
}
);
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function () {
StackExchange.helpers.onClickDraftSave('#login-link');
});
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Why are you trying to stuff delimited data into a production system in the first place? It violates 1NF. And are you really saying you are trying to make some of these delimited strings with more than 900k entries? That is a recipe for a performance black hole. Not just shoving the data in there but parsing it later.
– Sean Lange
Nov 15 '18 at 14:42
"are you really saying you are trying to make some of these delimited strings with more than 900k entries?" No, I am just saying that there are 900k rows. The delimited strings have a maximum size of 120 characters 40 x 3 chars.
– MoonKnight
Nov 15 '18 at 14:44
"Why are you trying to stuff delimited data into a production system in the first place? It violates 1NF." I am aware of that, but we have ready this data to conform to a standard - this field requires pipe delimited values. Out of my hands.
– MoonKnight
Nov 15 '18 at 14:45
Understand about being handcuffed. Can you post an execution plan? What about the actual table definition and indexes? Without those kinds of details we are guessing.
– Sean Lange
Nov 15 '18 at 14:47
It says that it is the "Table Value Function [XML Reader] Cost: 96%", so I suppose the question becomes, can we create the piped string without doing
FOR XML...
?– MoonKnight
Nov 15 '18 at 15:41