question

maurizioverdirame-1554 avatar image
0 Votes"
maurizioverdirame-1554 asked maurizioverdirame-1554 commented

PivotTable with column dates

good morning everyone, I would need help to pull out the data from a join of tables that still give the result of the example

drop table #test
CREATE TABLE #Test(
[id] [int] NULL,
[data] [date] NULL,
[field1] [float] NULL,
[field2] [float] NULL,
[field3] [float] NULL,
[field4] [float] NULL
) ON [PRIMARY]
GO


INSERT INTO #Test
(id, data, field1, field2, field3, field4)
VALUES (1, CONVERT(DATETIME, '1955-01-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
,(1, CONVERT(DATETIME, '2022-01-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
,(2, CONVERT(DATETIME, '2022-02-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
,(3, CONVERT(DATETIME, '2022-03-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
,(4, CONVERT(DATETIME, '2022-04-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
,(5, CONVERT(DATETIME, '2022-05-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
,(6, CONVERT(DATETIME, '2022-06-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
,(7, CONVERT(DATETIME, '2022-07-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
,(8, CONVERT(DATETIME, '2022-08-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)


typical result:
select * from #test

      date              field1 field2 field3 field4

1 2022-01-16 2,3 3,2 4,5 6,8
2 2022-02-16 2,3 3,2 4,5 6,8
3 2022-03-16 2,3 3,2 4,5 6,8
4 2022-04-16 2,3 3,2 4,5 6,8
5 2022-05-16 2,3 3,2 4,5 6,8
6 2022-06-16 2,3 3,2 4,5 6,8
7 2022-07-16 2,3 3,2 4,5 6,8
8 2022-08-16 2,3 3,2 4,5 6,8



I would like to have (I desperately need it!) a table with the names of the fields in the first column, the other columns with the dates as a head as in the following result.

field 2022-01-16 2022-02-16 2022-03-16 2022-04-16 2022-05-16 2022-06-16 2022-07-16 2022-08-16
field1 2,3 2,3 2,3 2,3 2,3 2,3 2,3 2,3
field2 3,2 3,2 3,2 3,2 3,2 3,2 3,2 3,2
field3 4,5 4,5 4,5 4,5 4,5 4,5 4,5 4,5
field4 6,8 6,8 6,8 6,8 6,8 6,8 6,8 6,8

I thank anyone who will help me

sql-server-transact-sql
5 |1600 characters needed characters left characters exceeded

Up to 10 attachments (including images) can be used with a maximum of 3.0 MiB each and 30.0 MiB total.

JingyangLi avatar image
0 Votes"
JingyangLi answered JingyangLi commented
 drop table if exists  Test
 CREATE TABLE  Test(
 [id] [int] NULL,
 [data] [date] NULL,
 [field1] [float] NULL,
 [field2] [float] NULL,
 [field3] [float] NULL,
 [field4] [float] NULL
 )  
 GO
    
    
 INSERT INTO Test
 (id, data, field1, field2, field3, field4)
 VALUES (1, CONVERT(DATETIME, '1955-01-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(1, CONVERT(DATETIME, '2022-01-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(2, CONVERT(DATETIME, '2022-02-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(3, CONVERT(DATETIME, '2022-03-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(4, CONVERT(DATETIME, '2022-04-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(5, CONVERT(DATETIME, '2022-05-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(6, CONVERT(DATETIME, '2022-06-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(7, CONVERT(DATETIME, '2022-07-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(8, CONVERT(DATETIME, '2022-08-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
    
    
    
 Declare @sql1 as NVarchar(4000)
 Declare @sql2 as NVarchar(4000)
 Declare @Cols1 as NVarchar(4000)
 Declare @Cols2 as NVarchar(4000)
      
 Set @Cols1=null
      
 Select @Cols1 =  COALESCE(@Cols1 + ', ', '') + '('+QUOTENAME(COLUMN_NAME,'''')+',' + QUOTENAME(COLUMN_NAME)+')'
 FROM  [INFORMATION_SCHEMA].[COLUMNS]
 WHERE TABLE_NAME='Test' and COLUMN_NAME<>'data' and COLUMN_NAME<>'id'
     
     
     
   Select @sql1=';with mycte as (Select
 data, col,val
 FROM Test  
 CROSS APPLY (Values ' + @Cols1 + ' )  d(col,val)
 where id<>1
 )
 SELECT   @Cols2  = STUFF( (SELECT  '','' + ''Max(CASE WHEN data='' + quotename(data,'''''''') + '' THEN val else null end ) as '' + quotename(data)  + char(10)+char(13)
                              FROM mycte
                              Group by data
                              order by  data                           
   FOR XML PATH(''''), TYPE).value(''.'', ''varchar(max)''), 1, 1, ''''); '
      
      
     
  EXEC sp_executesql @sql1,N'@Cols2 NVARCHAR(4000) output',@Cols2  output
     
     
       
  Set @sql2  =N';with mycte as (Select data, col,val
 FROM  Test t
 CROSS APPLY (Values ' + @Cols1 + ' )  d(col,val) )
 Select  col, '+   @Cols2 + '  from  mycte   Group by col';
     
 EXEC sp_executesql @sql2
     
      
 drop table  Test
· 10
5 |1600 characters needed characters left characters exceeded

Up to 10 attachments (including images) can be used with a maximum of 3.0 MiB each and 30.0 MiB total.

dear JingyangLi I tried on the actual data your solution, thank you because it solves my question, the only thing I do not understand is the behavior of the date. in the example I posted the date field is date, in the production reality it is datetime. In the results it comes out like this: May 2022 05 12:00AM. instead of 2022-05-05. can you suggest me a solution? thanks for the answer

0 Votes 0 ·

I solved by copying in test the date field with the 'Convert' now but I ask you how I can have a result suitable for my language (Italian) for example the
date format is dd / MM / yyyy thanks for the attention

0 Votes 0 ·
Show more comments

Select @Cols1 = COALESCE(@Cols1 + ', ', '') + '('+QUOTENAME(COLUMN_NAME,'''')+',' + QUOTENAME(COLUMN_NAME)+')'
FROM [INFORMATION_SCHEMA].[COLUMNS]
WHERE TABLE_NAME='Test' and COLUMN_NAME<>'data' and COLUMN_NAME<>'id'

use the following for temp table (replace the table name with temp table name inside code):


Select @Cols1 = COALESCE(@Cols1 + ', ', '') + '('+QUOTENAME(COLUMN_NAME,'''')+',' + QUOTENAME(COLUMN_NAME)+')'

FROM tempdb.INFORMATION_SCHEMA.COLUMNS c
WHERE TABLE_NAME LIKE '#Test%'
and COLUMN_NAME<>'data' and COLUMN_NAME<>'id'

0 Votes 0 ·
NaomiNNN avatar image
0 Votes"
NaomiNNN answered maurizioverdirame-1554 commented

Try:

 drop table IF EXISTS #test
 CREATE TABLE #Test(
 [id] [int] NULL,
 [data] [date] NULL,
 [field1] [float] NULL,
 [field2] [float] NULL,
 [field3] [float] NULL,
 [field4] [float] NULL
 ) ON [PRIMARY]
 GO
    
    
 INSERT INTO #Test
 (id, data, field1, field2, field3, field4)
 VALUES (1, CONVERT(DATETIME, '1955-01-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(1, CONVERT(DATETIME, '2022-01-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(2, CONVERT(DATETIME, '2022-02-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(3, CONVERT(DATETIME, '2022-03-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(4, CONVERT(DATETIME, '2022-04-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(5, CONVERT(DATETIME, '2022-05-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(6, CONVERT(DATETIME, '2022-06-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(7, CONVERT(DATETIME, '2022-07-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(8, CONVERT(DATETIME, '2022-08-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
    
 DECLARE @SQL NVARCHAR(MAX), @cols NVARCHAR(MAX)
    
 SELECT @cols = STRING_AGG(QUOTENAME(CONVERT(VARCHAR(10),data, 23)), ',') FROM #Test;
 SET @sql = ';WITH cte AS (SELECT Id, [data], FieldName, fieldValue
 FROM #Test t CROSS APPLY (VALUES (''field1'', field1), (''field2'', field2), (''field3'', field3), (''field4'', field4)) F (fieldName, fieldValue))
    
 SELECT * FROM (select [Data], FieldName, FieldValue from cte) X PIVOT (max(fieldValue) for [data] in (' + @cols + ')) pvt'
    
 --;WITH cte AS (SELECT Id, [data], FieldName, fieldValue
 --FROM #Test t CROSS APPLY (VALUES ('field1', field1), ('field2', field2), ('field3', field3), ('field4', field4)) F (fieldName, fieldValue))
    
 --SELECT * FROM (select [Data], FieldName, FieldValue from cte) X PIVOT (max(fieldValue) for [data] in ([1955-01-16],[2022-01-16],[2022-02-16],[2022-03-16],[2022-04-16],[2022-05-16],[2022-06-16],[2022-07-16],[2022-08-16])) pvt
    
 execute (@SQL);
· 1
5 |1600 characters needed characters left characters exceeded

Up to 10 attachments (including images) can be used with a maximum of 3.0 MiB each and 30.0 MiB total.

thanks the information was very useful,
solved my other problem of sorting dates.

0 Votes 0 ·
JingyangLi avatar image
0 Votes"
JingyangLi answered maurizioverdirame-1554 commented
 drop table if exists  #Test
 CREATE TABLE  #Test(
 [id] [int] NULL,
 [data] varchar(10) NULL,
 [field1] [float] NULL,
 [field2] [float] NULL,
 [field3] [float] NULL,
 [field4] [float] NULL
 )  
 GO
    
    
 INSERT INTO #Test
 (id, data, field1, field2, field3, field4)
 VALUES
  (1, '05-11-2016', 2.3, 3.2, 4.5, 6.8)
 ,(2, '07-05-2016', 2.3, 3.2, 4.5, 6.8)
 ,(3, '10-09-2016', 2.3, 3.2, 4.5, 6.8)
 ,(4, '14-01-2017', 2.3, 3.2, 4.5, 6.8)
 ,(5,'16-07-2016', 2.3, 3.2, 4.5, 6.8)
 ,(6, '03-12-2016', 2.3, 3.2, 4.5, 6.8)
     
  --SELECT * FROM tempdb.INFORMATION_SCHEMA.COLUMNS  WHERE  TABLE_NAME LIKE '#Test%'
    
    
 Declare @sql1 as NVarchar(4000)
 Declare @sql2 as NVarchar(4000)
 Declare @Cols1 as NVarchar(4000)
 Declare @Cols2 as NVarchar(4000)
      
 Set @Cols1=null
      
 Select @Cols1 =  COALESCE(@Cols1 + ', ', '') + '('+QUOTENAME(COLUMN_NAME,'''')+',' + QUOTENAME(COLUMN_NAME)+')'
    
 FROM tempdb.INFORMATION_SCHEMA.COLUMNS c 
 WHERE  TABLE_NAME LIKE '#Test%'
  and COLUMN_NAME<>'data' and COLUMN_NAME<>'id'
     
   Select @sql1=';with mycte as (Select
 data, col,val
 FROM #Test  
 CROSS APPLY (Values ' + @Cols1 + ' )  d(col,val)
 where id<>1
 )
 SELECT   @Cols2  = STUFF( (SELECT  '','' + ''Max(CASE WHEN data='' + quotename(data,'''''''') + '' THEN val else null end ) as '' + quotename(data)  + char(10)+char(13)
                              FROM mycte
                              Group by data
    
                            Order by  try_cast(Convert( date,data,103) as date) 
                                
   FOR XML PATH(''''), TYPE).value(''.'', ''varchar(max)''), 1, 1, ''''); '
      
      
     
  EXEC sp_executesql @sql1,N'@Cols2 NVARCHAR(4000) output',@Cols2  output
     
     
       
  Set @sql2  =N';with mycte as (Select data, col,val
 FROM  #Test t
 CROSS APPLY (Values ' + @Cols1 + ' )  d(col,val) )
 Select  col, '+   @Cols2 + '  from  mycte   Group by col';
     
 EXEC sp_executesql @sql2
     
      
 drop table  #Test


· 1
5 |1600 characters needed characters left characters exceeded

Up to 10 attachments (including images) can be used with a maximum of 3.0 MiB each and 30.0 MiB total.

thanks the information was very useful,
solved my problem of sorting dates.

0 Votes 0 ·
NaomiNNN avatar image
0 Votes"
NaomiNNN answered

Try this solution to sort dates correctly:

 drop table IF EXISTS #test
 CREATE TABLE #Test(
 [id] [int] NULL,
 [data] [date] NULL,
 [field1] [float] NULL,
 [field2] [float] NULL,
 [field3] [float] NULL,
 [field4] [float] NULL
 ) ON [PRIMARY]
 GO
    
    
 INSERT INTO #Test
 (id, data, field1, field2, field3, field4)
 VALUES (1, CONVERT(DATETIME, '1955-01-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(1, CONVERT(DATETIME, '2022-01-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(2, CONVERT(DATETIME, '2022-02-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(3, CONVERT(DATETIME, '2022-03-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(4, CONVERT(DATETIME, '2022-04-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(5, CONVERT(DATETIME, '2022-05-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(6, CONVERT(DATETIME, '2022-06-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(7, CONVERT(DATETIME, '2022-07-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
 ,(8, CONVERT(DATETIME, '2022-08-16 00:00:00', 102), 2.3, 3.2, 4.5, 6.8)
    
 DECLARE @SQL NVARCHAR(MAX), @cols NVARCHAR(MAX)
    
 SELECT @cols = STRING_AGG(QUOTENAME(FORMAT(data, 'dd-MM-yyyy')), ',') WITHIN GROUP (ORDER BY data) FROM #Test;
 --SELECT @cols = STRING_AGG(QUOTENAME(FORMAT(data, 'yyyy-MM-dd')), ',') WITHIN GROUP (ORDER BY data) FROM #Test;
 SET @sql = ';WITH cte AS (SELECT Id, [data], FieldName, fieldValue
 FROM #Test t CROSS APPLY (VALUES (''field1'', field1), (''field2'', field2), (''field3'', field3), (''field4'', field4)) F (fieldName, fieldValue))
    
 SELECT * FROM (select FORMAT(data, ''dd-MM-yyyy'') as [Data], FieldName, FieldValue from cte) X PIVOT (max(fieldValue) for [data] in (' + @cols + ')) pvt'
    
 execute (@SQL);
5 |1600 characters needed characters left characters exceeded

Up to 10 attachments (including images) can be used with a maximum of 3.0 MiB each and 30.0 MiB total.