How can I get a JSON object from a SQL Server table? - javascript
I have a view that I want to be converted into JSON. What is the SQL that I can use to produce on the server the JSON string needed to be returned?
--
-- Author: Thiago R. Santos --
-- Create date: Aug 3rd 2008 --
-- Description: Returns the contents of a given table --
-- in JavaScript Object Notation. --
-- Params: --
-- #table_name: the table to execute the query --
-- #registries_per_request: equivalent to "select top N * from table"
--
-- replcing N by the actual number
-- Influenced by Thomas Frank's post MySQL to JSON # January 23, 2007 --
-- Post Url: http://www.thomasfrank.se/mysql_to_json.html --
create procedure [dbo].[GetJSON]
(
#table_name varchar(50),
#registries_per_request smallint = null
)
as
begin
if((select count(*) from information_schema.tables where table_name = #table_name) > 0)
begin
declare #json varchar(max),
#line varchar(max),
#columns varchar(max),
#sql nvarchar(max),
#columnNavigator varchar(50),
#counter tinyint,
#size varchar(10)
if (#registries_per_request is null)
begin
set #size = ''
end
else
begin
set #size = 'top ' + convert(varchar, #registries_per_request)
end
set #columns = '{'
declare schemaCursor cursor
for select column_name from information_schema.columns where table_name = #table_name
open schemaCursor
fetch next from schemaCursor
into #columnNavigator
select #counter = count(*) from information_schema.columns where table_name = #table_name
while ##fetch_status = 0
begin
set #columns = #columns + '''''' + #columnNavigator + ''''':'''''' + convert(varchar, ' + #columnNavigator + ') + '''''''
set #counter = #counter - 1
if(0 != #counter)
begin
set #columns = #columns + ','
end
fetch next from schemaCursor
into #columnNavigator
end
set #columns = #columns + '}'
close schemaCursor
deallocate schemaCursor
set #json = '['
set #sql = 'select ' + #size + '''' + #columns + ''' as json into tmpJsonTable from ' + #table_name
exec sp_sqlexec #sql
select #counter = count(*) from tmpJsonTable
declare tmpCur cursor
for select * from tmpJsonTable
open tmpCur
fetch next from tmpCur
into #line
while ##fetch_status = 0
begin
set #counter = #counter - 1
set #json = #json + #line
if ( 0 != #counter )
begin
set #json = #json + ','
end
fetch next from tmpCur
into #line
end
set #json = #json + ']'
close tmpCur
deallocate tmpCur
drop table tmpJsonTable
select #json as json
end
end
I imagine this can be done, but it seems like an extremely long-winded and error-prone way of achieving the desired result.
If I were you I'd break down the problem into look at the ORM technology of your middle tier framework (ASP.NET I assume?) and then serialise to JSON again from the framework. Failing framework support (i.e. you aren't in .NET 3+) I'd still favour serialising the database to XML and then XSLT transforming the XML to JSON since XML is much much easier to work with on the server.
The name of the game is separation of concerns.
Below version is a total re-design of this concept. If I've missed something please add a note and I'll edit to adjust.
--
-- Author: Matthew D. Erwin (Snaptech, LLC)
-- Create date: May 9, 2013
-- Description: Returns the contents of a given table
-- in JavaScript Object Notation JSON -
--
-- Very notably useful for generating MOCK .json files
-- for testing or before RESTful services are completed.
--
-- This implementation:
-- *removed cursor (using FOR XML PATH(''))
-- *properly supports NULL vs quoted values
-- *supports dates in ISO 8601 - presuming UTC
-- *uses Data_Type and Is_Nullable info
-- *escapes '\'
-- *formats output with tabs/newlines
-- *can return final results as XML to bypass
-- truncation in SSMS
-- *supports schema (e.g. [dbo].[TableName]
-- *includes "recordCount" field
-- Options:
-- #table_name: the table to execute the query
-- #limit: equivalent to "select top N * from table"
-- #ssms: flag to use if executing in Sql Server Management Studio
-- to bypass result truncation limits.
--
-- Inspired primarily by the 2008 work of Thiago R. Santos which was influenced by Thomas Frank.
-- Usage: [dbo].[GetJSON] #Table_name = 'MySchema.MyTable', #limit = 50, #ssms = 0
create procedure [dbo].[GetJSON] (
#table_name varchar(max),
#limit int = null,
#ssms bit = 0
)
as
begin
declare #json varchar(max), #query varchar(max), #table_schema varchar(max) = null
if( charindex('.', #table_name) > 0 )
begin
set #table_schema = replace(replace( substring(#table_name, 0, charindex('.',#table_name)), '[', ''), ']', '')
set #table_name = replace(replace( substring(#table_name, charindex('.',#table_name) + 1,len(#table_name)), '[', ''), ']', '')
end
set #query =
'select ' + case when #limit is not null then 'top ' + cast(#limit as varchar(32)) + ' ' else '' end + '''{ '' + REVERSE(STUFF(REVERSE(''' +
CAST((SELECT ' "' + column_name + '" : ' +
case when is_nullable = 'YES'
then ''' + case when [' + column_name + '] is null then ''null'' else ' +
case when data_type like '%char%' or data_type like '%text%' then '''"'' + ' else '' end +
case when data_type like '%date%' then 'convert(varchar(23),[' + column_name + '], 126) + ''Z''' else
'replace(replace(replace(replace(cast([' + column_name + '] as varchar(max)),''\'',''\\''),''"'',''\"''),char(10),''\n''),char(13),''\n'') ' end +
case when data_type like '%char%' or data_type like '%text%' then '+ ''"''' else '' end + ' end + '''
else
case when data_type like '%char%' or data_type like '%text%' then '"' else '' end +
''' + ' +
case when data_type like '%date%' then 'convert(varchar(23),[' + column_name + '], 126) + ''Z' else
'replace(replace(replace(replace(cast([' + column_name + '] as varchar(max)),''\'',''\\''),''"'',''\"''),char(10),''\n''),char(13),''\n'') + ''' end +
case when data_type like '%char%' or data_type like '%text%' then '"' else '' end end + ',' AS [text()]
from information_schema.columns where table_name = #table_name and (#table_schema is null or table_schema = #table_schema) FOR XML PATH('') ) as varchar(max)) +
'''),1,1,'''')) + '' }'' as json into tmpJsonTable from ' + #table_name + ' with(nolock) '
exec sp_sqlexec #query
set #json =
'{' + char(10) + char(9) +
'"recordCount" : ' + Cast((select count(*) from tmpJsonTable) as varchar(32)) + ',' + char(10) + char(9) +
'"records" : ' + char(10) + char(9) + char(9) + '[' + char(10)
+ REVERSE(STUFF(REVERSE(CAST((SELECT char(9) + char(9) + json + ',' + char(10) AS [text()] FROM tmpJsonTable FOR XML PATH('')) AS varchar(max))),1,2,''))
+ char(10) + char(9) + char(9) + ']' + char(10) + '}'
drop table tmpJsonTable
if( #ssms = 1 and len(#json) > 65535 ) --deal with Sql Server Management Studio text/grid truncation
select cast('<json><![CDATA[' + #json + ']]></json>' as xml) as jsonString
else
select #json as jsonString
end
jlech answer is OK, but I don't see why you cannot generate directly off a VIEW's metadata using a technique similar to the one in this UNPIVOT answer, avoiding CURSORs and a SELECT INTO tempoary table.
Not to derail the OP's question, but I am wondering if doing this in SQL is the best / most appropriate route to take? It seems to me that this might be more easily / effectively done in code.
I was initially wondering the same thing (which is how I found this post), but after chewing on the idea for a few minutes, it seems like this might be better accomplished using a utility / extension method that takes a dataset & returns the resulting JSON string.
Granted, the OP may have good reasons for needing to go this route. I'm just thinking (typing) out loud here...
Related
How to write a JavaScript stored procedure in Azure Cosmo DB to return data from SQL API
I'm trying to create a Cosmo DB stored procedure to return the results of a relatively simple SQL statement. If it were purely SQL I would be fine but since I know nothing about JavaScript I'm struggling mightily. Any help would be very much appreciated. Here is the SQL query: SELECT distinct cx.ID, cxa.FieldValue as PartNo, cx.TransactionDate, cx.TransactionStatus FROM c JOIN cx in c.File.Transactions JOIN cxa in cx.AppDetails JOIN ( SELECT cx2.ID, cxa2.FieldValue as PartNo, max(cx2.TransactionDate) as TransactionDate FROM c JOIN cx2 in c.File.Transactions JOIN cxa2 in cx2.AppDetails WHERE c.File.Category= 'BatchParts' and cxa2.FieldName ='PartNo' GROUP BY cx2.ID,cxa2.FieldValue ) B WHERE c.File.Category= 'BatchParts' and cxa.FieldName ='PartNo'
You can try something like this: function getItems(category,fieldName) { var collection = getContext().getCollection(); var query = 'SELECT distinct cx.ID, cxa.FieldValue as PartNo, cx.TransactionDate, cx.TransactionStatus ' + 'FROM c ' + 'JOIN cx in c.File.Transactions ' + 'JOIN cxa in cx.AppDetails ' + 'JOIN ' + '( ' + 'SELECT cx2.ID, cxa2.FieldValue as PartNo, max(cx2.TransactionDate) as TransactionDate ' + 'FROM c ' + 'JOIN cx2 in c.File.Transactions ' + 'JOIN cxa2 in cx2.AppDetails ' + 'WHERE c.File.Category= #Category and cxa2.FieldName = #FieldName ' + 'GROUP BY cx2.ID,cxa2.FieldValue ' + ') B ' + 'WHERE c.File.Category= #Category and cxa.FieldName = #FieldName'; var filterQuery = { 'query' : query, 'parameters' : [{'name':'#Category', 'value':category},{'name':'#FieldName', 'value':fieldName}] }; var isAccepted = collection.queryDocuments( collection.getSelfLink(), filterQuery, function (err, feed, options) { if (err) throw err; if (!feed || !feed.length) { var response = getContext().getResponse(); response.setBody('no docs found'); } else { var response = getContext().getResponse(); var body = feed; response.setBody(JSON.stringify(body)); } }); if (!isAccepted) throw new Error('The query was not accepted by the server.'); } By the way, when you invoke stored procedure, you need to pass partition key value. And you can only get the data from this partition. You can refer to this doc and this.
This type of query, if timing out in a stored procedure or via SDK, is probably best handled using Synapse Link. Stored procedures are bad candidates for queries because they only operate on the master replica (there are 4 of them). Because throughput is allocated equally across all four replicas, stored procedures only get 1/4 of the provisioned throughput. Synapse Link is designed to be used in this sort of scenario where you have large, complex, analytical type queries and want to visualize your data using Power BI. To learn more about Cosmos DB and Synapse see, What is Azure Synapse Link for Azure Cosmos DB (Preview)?
How to pass a question mark express router api to update sql
I want to update a varchar field (String) using and End-Point Api (Express NodeJS) but I have problem went I pass invalid inputs like question mark. Express End-Point: router.get("/updateField/:table/:field/:value/:num/:postid/", function(req, res) { connection.query( 'UPDATE '+ req.params.table +' SET ' + req.params.field +' = '+JSON.stringify(req.params.value) +' where language ='+ req.params.num +' and post_id ='+req.params.postid This code work fine: http://localhost:3001/api/updateField/posts/TITLE/When/1/1 But this NOT WORK: http://localhost:3001/api/updateField/posts/TITLE/When?/1/1 I send the request from react like this: fetch( "http://localhost:3001/api/updateField/" + table + "/" + field + "/" + value + "/" + lenguage + "/" + post_id );
Use javascript function encodeURIComponent() to escape special characters in URL parameters. For example try this on your browser console and you'll get an idea: console.log( "http://localhost:3001/api/updateField/" + table + "/" + field + "/" + encodeURIComponent(value) + "/" + lenguage + "/" + post_id ); console.log(encodeURIComponent("When?")); You will see that "When?" is replaced with "When%3F" in URL. In Node.Js, you'll receive parameter value as string "When?". To know more about encodeURIComponent(), refer to this
Why is the following way to assign a value to a JSON array not working?
I have this code: compareList[productName] = productID + ',' + productHref; console.log(productName + ' ' + productID + ' ' + productHref + ' ' + compareList.length); Which logs into this (I have removed the link): Acer Iconia B1-790 [NT.LDFEE.002] 112576 link removed for confidentiality 0 As you can see, all three variables are valid strings, but the json object still fails to assign (compareList.length logs as 0). I've been thinking and thinking but I simply can't figure it out. Any help is appreciated.
Maybe this version of adding and checking array length can be useful to you? var compareList=[] var productName = {productID:'saban',productHref:'http://saulic.com'}; compareList.push(productName); console.log(compareList.length);
Sql dynamic query syntax error
This is my sql query "select * from applicant where email =ABC.Com" I want table name, colum name and value after where clause dynamic in form of variables there. Can you tell me the right syntax? I have tried this so far "select * from " tableNameVariable+"where "columnNameVariable+"= "inputedEmailVariable now this query is giving an error near =inputedEmailVariable. Note : I want to use this string in nodejs function. Please tell me right syntax?
Try: "select * from " + tableNameVariable + " where " + columnNameVariable + " = '" + inputedEmailVariable + "'"
You're missing some + operators, and also missing a space before where. var sql = "select * from " + tableNameVariable + " where " + columnNameVariable + " = " + inputedEmailVariable You should be very careful if the variables are coming from user input, since this can result in SQL injection. Make sure the variables contain table and column values that they're allowed to access. And if you're compareing the column with a string, you'll need quotes around the string. But it would be better to use a placeholder at that point, rather than concatenating the variable.
Saving CR/LF in json data
I am saving table data to a json object. The table data is coming from txt inputs and textareas in the table cells. I'm running into a problem with CR/LF characters in the JSON elements holding the textarea data. The JSON data gets saved to the database fine, but when I pass it back to the jQuery function that populates the table using that data, I get this: SyntaxError: JSON.parse: bad control character in string literal at line 1 column 67 of the JSON data var array = JSON.parse(notes), in the console. I put the JSON data in Notepad++ with Show All Characters on and the CR/LF was at column 67. Here's a sample of JSON data that I'm working with: [["","","",""],["","9/23/14","",""],["","30789 detail, x_vendor_no**CR/LF HERE** 20597 header","",""],["","99 del invalid x_vendor_no","",""],["","30780","",""],["","","",""],["","","",""],["","","",""]] Is there a way to allow CR/LF in the data? UPDATE 11684's suggestion to use replace to remove the \r part of the CRLF won't work. Here's why: Here's the complete function that uses the JSON data: (Updated to work with Update #2 code below) function PopulateTaskTableWithNotes(tableID,notesArray) { // JSON parse removed per answer suggestion var r, c, note; for (r = 0; r < notesArray.length; ++r) { for (c = 0; c < notesArray[r].length; ++c) { note = notesArray[r][c]; $('#' + tableID + ' tr:nth-child(' + (r + 1) + ') td:nth-child(' + (c + 1) + ')').children(':first-child').val(note); } } } I still get the error on the line that tries to parse the JSON data. The replace function apparently can't "find" characters within an array element. UPDATE #2 Here's how I am creating the array: var siteID = $('#ddlUserSites option:selected').val(), numRows = $('#' + tableID + ' tr').length, numCols = $('#' + tableID).find('tr:first th').length, notesArray = new Array(numRows), rowNum = 1, note = '', colNum; while (rowNum <= numRows) { notesArray[rowNum] = new Array(numCols); // Reset colNum for next row iteration colNum = 1; while (colNum <= numCols) { note = ''; if ($('#' + tableID + ' tr:nth-child(' + rowNum + ') td:nth-child(' + colNum + ')').children(':first-child').is('input,textarea')) { note = $('#' + tableID + ' tr:nth-child(' + rowNum + ') td:nth-child(' + colNum + ')').children(':first-child').val(); } notesArray[rowNum][colNum] = note; //console.log('Note for rowNum ' + rowNum + ', colNum ' + colNum + ': ' + note); colNum++; } // Remove first element in current row array notesArray[rowNum].shift(); rowNum++; } // Remove first element in array notesArray.shift(); JSON.stringify(notesArray); // Added per an answer here console.log('Final notesArray: ' + $.toJSON(notesArray)); $.ajax({ data: {saveTaskNotes: 'true', userID:userID, siteID:siteID, taskTable:tableID, notes:notesArray}, success: function(data) { console.log('Save task notes data: ' + data); } }); The "Final notesArray" console output looks fine, but now, with stringify added, the function above (PopulateTaskTableWithNotes) console output shows that it's reading through every character in the array as a separate element! Maybe this will help too, as far as what's happening to the data between the creating and reading functions: the array is being saved to a single MySQL database field and then retrieved for the PopulateTable function via $.ajax() (on both ends). Having said that, do I need to look at what I'm doing with/to the array in the PHP code? UPDATE #3 Here's the PHP function that takes the data in and writes to the MySQL db: function SaveTaskNotes($userID,$siteID,$taskTable,$notes) { $notes = json_encode($notes); $insertUpdateTaskNotesResult = ''; $insertTaskNotes = "INSERT INTO userProgress (userProgressUserID,userProgressSiteID,userProgressNotesTable,userProgressNotes) values ($userID,$siteID,'" . $taskTable . "','" . $notes . "')"; $log->lwrite('$insertTaskNotes: ' . $insertTaskNotes); $resultInsertTaskNotes = #mysqli_query($dbc,$insertTaskNotes); if ($resultInsertTaskNotes) { $insertUpdateTaskNotesResult = 'insertTaskNotesSuccess'; } else { if (mysqli_error($dbc) != '') { $log->lwrite('INSERT TASK NOTES: An error occurred while attempting to add the task notes. Query: ' . $insertTaskNotes . ', mysqli_error: ' . mysqli_error($dbc)); } $insertUpdateTaskNotesResult = 'insertTaskNotesFail'; } echo $insertUpdateTaskNotesResult; } And here's the function that gets the data from the db and sends it to the above $.ajax function: function GetUserTaskNotes($userID,$siteID,$taskTableID) { $queryGetUserTaskNotes = "SELECT userProgressNotes FROM userProgress WHERE userProgressUserID = $userID AND userProgressSiteID = $siteID AND userProgressNotesTable = '" . $taskTableID . "'"; $log->lwrite('$queryGetUserTaskNotes: ' . $queryGetUserTaskNotes); $resultGetUserTaskNotes = #mysqli_query($dbc,$queryGetUserTaskNotes); if ($resultGetUserTaskNotes) { $taskNotes = mysqli_fetch_assoc($resultGetUserTaskNotes); $log->lwrite('Retrieved $taskNotes[\'userProgressNotes\']: ' . $taskNotes['userProgressNotes']); echo $taskNotes['userProgressNotes']; } else { if (mysqli_error($dbc) != '') { $log->lwrite('GET TASK NOTES: An error occurred while attempting to retrieve the task notes. Query: ' . $queryGetUserTaskNotes . ', mysqli_error: ' . mysqli_error($dbc)); } echo 'getTaskNotesFail'; } } In both the save and get functions the $log output shows that the array never changes (with the above js/php code) and pasting the array in to notepad++ shows that the CR/LF is still there throughout.
Don't use JSON.parse, the data is already JSON and Javascript can work with it. You only need it when passing a string, imagine JSON.parse() beeing like string2json(). I think this might already be a solution to your problem, I've never had issues with new line characters. As Luis said, the problem is not your client (Javascript, jQuery), besides the JSON.parse, but the providing site is wrong. Example for PHP: <?php echo json_encode(array("test" => " x")); PHP properly escapes the characters: {"test":"\r\n\r\n\r\nx"} But the source of your data is providing malformed JSON. To fix the JSON issue, either use prepared statements or use: $notes = str_replace('\', '\\', json_encode($notes)); // in SaveTaskNotes
Well, the error is on the input data (showed in question). You can't have an CR or LF inside a literal in a JSON string. What you can have are that chars escaped as \r \n. The problem is on other side, where escaped codes are replaced by actual chars and therefore the full JSON string becomes invalid.