Hello

2014-08-22 12:21 GMT+02:00 Jeevan Chalke <jeevan.cha...@enterprisedb.com>:

> Hi Pavel,
>
> You have said that XMLFOREST has something which ignores nulls, what's
> that?
> Will you please provide an example ?
>

I was partially wrong - XMLFOREST ignore null always

postgres=# select xmlforest(10 as a,20 as b,null as c);
     xmlforest
--------------------
 <a>10</a><b>20</b>
(1 row)

so if you would to empty elements, you should to use xmlelement and
xmlconcat

postgres=# select xmlconcat(xmlforest(10 as a,20 as b), xmlelement(name c,
null));
       xmlconcat
------------------------
 <a>10</a><b>20</b><c/>
(1 row)



>
> I am NOT sure, but here you are trying to omit entire field from the output
> when its value is NULL. But that will add an extra efforts at other end
> which is using output of this. That application need to know all fields and
> then need to replace NULL values for missing fields. However we have an
> optional argument for ignoring nulls and thus we are safe. Application will
> use as per its choice.
>

with my patch, you can do decision - lot of REST services doesn't
distinguishes between empty and missing tag - and some developers prefer
remove empty tags due less size of message.


>
> Well, apart from that, tried reviewing the patch. Patch was applied but
> make
> failed with following error.
>
> make[3]: Entering directory `/home/jeevan/pg_master/src/backend/catalog'
> cd ../../../src/include/catalog && '/usr/bin/perl' ./duplicate_oids
> 3255
> make[3]: *** [postgres.bki] Error 1
>
> Please run unused_oids script to find unused oid.
>

it needs remastering

update in attachemnt



>
> However, I had a quick look over code changes. At first glance it looks
> good. But still need to check on SQL level and then code walk-through.
>
> Waiting for updated patch.
>

thank you for review

Regards

Pavel


>
> Thanks
>
> --
> Jeevan B Chalke
> Principal Software Engineer, Product Development
> EnterpriseDB Corporation
> The Enterprise PostgreSQL Company
>
>
commit e1dd47ca1f881a2ce41117526a536555c23c2d81
Author: Pavel Stehule <pavel.steh...@gooddata.com>
Date:   Sat Aug 23 07:19:46 2014 +0200

    remastering, change used oid in pg_proc

diff --git a/doc/src/sgml/func.sgml b/doc/src/sgml/func.sgml
index c715ca2..a27aff4 100644
--- a/doc/src/sgml/func.sgml
+++ b/doc/src/sgml/func.sgml
@@ -10294,11 +10294,12 @@ table2-mapping
       </row>
       <row>
        <entry>
-         <literal>row_to_json(record [, pretty_bool])</literal>
+         <literal>row_to_json(record [, pretty_bool [, ignore_nulls] ])</literal>
        </entry>
        <entry>
          Returns the row as a JSON object. Line feeds will be added between
-         level-1 elements if <parameter>pretty_bool</parameter> is true.
+         level-1 elements if <parameter>pretty_bool</parameter> is true. Ignore
+         NULL when <parameter>ignore_nulls</parameter> is true.
        </entry>
        <entry><literal>row_to_json(row(1,'foo'))</literal></entry>
        <entry><literal>{"f1":1,"f2":"foo"}</literal></entry>
diff --git a/src/backend/utils/adt/json.c b/src/backend/utils/adt/json.c
index 494a028..f35db04 100644
--- a/src/backend/utils/adt/json.c
+++ b/src/backend/utils/adt/json.c
@@ -79,7 +79,8 @@ static void report_invalid_token(JsonLexContext *lex);
 static int	report_json_context(JsonLexContext *lex);
 static char *extract_mb_char(char *s);
 static void composite_to_json(Datum composite, StringInfo result,
-				  bool use_line_feeds);
+				  bool use_line_feeds,
+				  bool ignore_nulls);
 static void array_dim_to_json(StringInfo result, int dim, int ndims, int *dims,
 				  Datum *vals, bool *nulls, int *valcount,
 				  JsonTypeCategory tcategory, Oid outfuncoid,
@@ -1362,7 +1363,7 @@ datum_to_json(Datum val, bool is_null, StringInfo result,
 			array_to_json_internal(val, result, false);
 			break;
 		case JSONTYPE_COMPOSITE:
-			composite_to_json(val, result, false);
+			composite_to_json(val, result, false, false);
 			break;
 		case JSONTYPE_BOOL:
 			outputstr = DatumGetBool(val) ? "true" : "false";
@@ -1591,7 +1592,8 @@ array_to_json_internal(Datum array, StringInfo result, bool use_line_feeds)
  * Turn a composite / record into JSON.
  */
 static void
-composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
+composite_to_json(Datum composite, StringInfo result, bool use_line_feeds,
+								    bool ignore_nulls)
 {
 	HeapTupleHeader td;
 	Oid			tupType;
@@ -1630,6 +1632,12 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
 		if (tupdesc->attrs[i]->attisdropped)
 			continue;
 
+		val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
+
+		/* Don't serialize NULL field when we don't want it */
+		if (isnull && ignore_nulls)
+			continue;
+
 		if (needsep)
 			appendStringInfoString(result, sep);
 		needsep = true;
@@ -1638,8 +1646,6 @@ composite_to_json(Datum composite, StringInfo result, bool use_line_feeds)
 		escape_json(result, attname);
 		appendStringInfoChar(result, ':');
 
-		val = heap_getattr(tuple, i + 1, tupdesc, &isnull);
-
 		if (isnull)
 		{
 			tcategory = JSONTYPE_NULL;
@@ -1731,7 +1737,7 @@ row_to_json(PG_FUNCTION_ARGS)
 
 	result = makeStringInfo();
 
-	composite_to_json(array, result, false);
+	composite_to_json(array, result, false, false);
 
 	PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
 }
@@ -1748,7 +1754,25 @@ row_to_json_pretty(PG_FUNCTION_ARGS)
 
 	result = makeStringInfo();
 
-	composite_to_json(array, result, use_line_feeds);
+	composite_to_json(array, result, use_line_feeds, false);
+
+	PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
+}
+
+/*
+ * SQL function row_to_json(row, prettybool, ignore_nulls)
+ */
+extern Datum
+row_to_json_pretty_choosy(PG_FUNCTION_ARGS)
+{
+	Datum		array = PG_GETARG_DATUM(0);
+	bool		use_line_feeds = PG_GETARG_BOOL(1);
+	bool		ignore_nulls = PG_GETARG_BOOL(2);
+	StringInfo	result;
+
+	result = makeStringInfo();
+
+	composite_to_json(array, result, use_line_feeds, ignore_nulls);
 
 	PG_RETURN_TEXT_P(cstring_to_text_with_len(result->data, result->len));
 }
diff --git a/src/include/catalog/pg_proc.h b/src/include/catalog/pg_proc.h
index a84595e..0c7bd0d 100644
--- a/src/include/catalog/pg_proc.h
+++ b/src/include/catalog/pg_proc.h
@@ -4199,6 +4199,8 @@ DATA(insert OID = 3155 (  row_to_json	   PGNSP PGUID 12 1 0 0 0 f f f f t f s 1
 DESCR("map row to json");
 DATA(insert OID = 3156 (  row_to_json	   PGNSP PGUID 12 1 0 0 0 f f f f t f s 2 0 114 "2249 16" _null_ _null_ _null_ _null_ row_to_json_pretty _null_ _null_ _null_ ));
 DESCR("map row to json with optional pretty printing");
+DATA(insert OID = 3256 (  row_to_json	   PGNSP PGUID 12 1 0 0 0 f f f f t f s 3 0 114 "2249 16 16" _null_ _null_ _null_ _null_ row_to_json_pretty_choosy _null_ _null_ _null_ ));
+DESCR("map row to json with optional pretty printing and skipping NULLs");
 DATA(insert OID = 3173 (  json_agg_transfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i 2 0 2281 "2281 2283" _null_ _null_ _null_ _null_ json_agg_transfn _null_ _null_ _null_ ));
 DESCR("json aggregate transition function");
 DATA(insert OID = 3174 (  json_agg_finalfn	 PGNSP PGUID 12 1 0 0 0 f f f f f f i 1 0 114 "2281" _null_ _null_ _null_ _null_ json_agg_finalfn _null_ _null_ _null_ ));
diff --git a/src/include/utils/json.h b/src/include/utils/json.h
index 82cc48b..6e43dec 100644
--- a/src/include/utils/json.h
+++ b/src/include/utils/json.h
@@ -26,6 +26,7 @@ extern Datum array_to_json(PG_FUNCTION_ARGS);
 extern Datum array_to_json_pretty(PG_FUNCTION_ARGS);
 extern Datum row_to_json(PG_FUNCTION_ARGS);
 extern Datum row_to_json_pretty(PG_FUNCTION_ARGS);
+extern Datum row_to_json_pretty_choosy(PG_FUNCTION_ARGS);
 extern Datum to_json(PG_FUNCTION_ARGS);
 
 extern Datum json_agg_transfn(PG_FUNCTION_ARGS);
diff --git a/src/test/regress/expected/json.out b/src/test/regress/expected/json.out
index bb4d9ed..fd40117 100644
--- a/src/test/regress/expected/json.out
+++ b/src/test/regress/expected/json.out
@@ -403,6 +403,28 @@ SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),
  {"f1":[5,6,7,8,9,10]}
 (1 row)
 
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+                             (10,NULL, NULL), 
+                             (NULL, NULL, NULL)) g(a,b,c))  
+ SELECT row_to_json(x, false, false) FROM x;
+         row_to_json          
+------------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10,"b":null,"c":null}
+ {"a":null,"b":null,"c":null}
+(3 rows)
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+                             (10,NULL, NULL), 
+                             (NULL, NULL, NULL)) g(a,b,c))  
+ SELECT row_to_json(x, false, true) FROM x;
+      row_to_json       
+------------------------
+ {"a":10,"b":20,"c":30}
+ {"a":10}
+ {}
+(3 rows)
+
 -- to_json, timestamps
 select to_json(timestamp '2014-05-28 12:22:35.614298');
            to_json            
diff --git a/src/test/regress/sql/json.sql b/src/test/regress/sql/json.sql
index c980132..48c3e37 100644
--- a/src/test/regress/sql/json.sql
+++ b/src/test/regress/sql/json.sql
@@ -100,6 +100,16 @@ FROM rows q;
 
 SELECT row_to_json(row((select array_agg(x) as d from generate_series(5,10) x)),false);
 
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+                             (10,NULL, NULL), 
+                             (NULL, NULL, NULL)) g(a,b,c))  
+ SELECT row_to_json(x, false, false) FROM x;
+
+WITH x AS (SELECT a,b,c FROM (VALUES(10,20,30),
+                             (10,NULL, NULL), 
+                             (NULL, NULL, NULL)) g(a,b,c))  
+ SELECT row_to_json(x, false, true) FROM x;
+
 -- to_json, timestamps
 
 select to_json(timestamp '2014-05-28 12:22:35.614298');
-- 
Sent via pgsql-hackers mailing list (pgsql-hackers@postgresql.org)
To make changes to your subscription:
http://www.postgresql.org/mailpref/pgsql-hackers

Reply via email to