Skip to content

Commit

Permalink
Completely removes jsx
Browse files Browse the repository at this point in the history
  • Loading branch information
williamthome committed Nov 7, 2023
1 parent 6be384c commit 051cff7
Show file tree
Hide file tree
Showing 8 changed files with 128 additions and 572 deletions.
15 changes: 8 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@

# JSON encoding with records and 'null'/'undefined' mapping

This is a wrapper around `jsx` to handle encoding and decoding of Erlang records.
Originally, this was a wrapper around `jsx` to handle encoding and decoding of Erlang records, but [euneus](https://github.com/williamthome/euneus) gives to
jsxrecord a better performance.

## JSON null handling

Expand Down Expand Up @@ -56,25 +57,25 @@ Decoding returns the `#test{}`:
Defaults are automatically added for fields missing in the JSON:

#test{ a = 1, b = 2, c = undefined } = jsxrecord:decode(<<"{\"_record\":\"test\"}">>).

### Encoding and decoding datetime and timestamp tuples

Datetime tuples are assumed to be in UTC, and are converted into an ISO8601 string:

<<"\"2008-12-10T13:30:00Z\"">> = jsxrecord:encode({{2008, 12, 10}, {13, 30, 0}})

They are converted back into a datetime tuple:

{{2008, 12, 10}, {13, 30, 0}} = jsxrecord:decode(<<"\"2008-12-10T13:30:00Z\"">>)

Erlang timestamp tuples are also converted into an ISO8601 string, but with added precision:

<<"\"2020-06-12T14:00:11.571Z\"">> = jsxrecord:encode({1591,970411,571321})

A little bit of precision is lost when converting it back to a timestamp tuple:

{1591,970411,571000} = jsxrecord:decode(<<"\"2020-06-12T14:00:11.571Z\"">>)


## Configuration

Expand Down
1 change: 0 additions & 1 deletion rebar.config
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
{require_min_otp_vsn, "21"}.

{deps, [
{jsx, "3.1.0"},
{euneus, "0.6.0"}
]}.

Expand Down
1 change: 0 additions & 1 deletion rebar.test.config
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,5 @@
{deps, [
{lager, "3.8.0"},
{proper, "1.3.0"},
{jsx, "3.1.0"},
{euneus, "0.6.0"}
]}.
2 changes: 1 addition & 1 deletion src/jsxrecord.app.src
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
{description, "JSX wrapper to handle records and 'undefined'"},
{vsn, "git"},
{registered, []},
{applications, [ kernel, stdlib, syntax_tools, compiler, jsx ]},
{applications, [ kernel, stdlib, syntax_tools, compiler ]},
{env, [
{record_modules, [ ]}
]},
Expand Down
219 changes: 77 additions & 142 deletions src/jsxrecord.erl
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
%% @author Marc Worrell <[email protected]>
%% @copyright 2018-2023 Marc Worrell
%% @doc JSON with records and 'undefined'/'null' mapping. Wrapper around jsx.
%% @doc JSON with records and 'undefined'/'null' mapping.
%% @end

%% Copyright 2018-2023 Marc Worrell
Expand Down Expand Up @@ -31,9 +31,6 @@

-define(RECORD_TYPE, <<"_type">>).

-define(IS_NUMBER(C), C >= $0, C =< $9).


-include_lib("kernel/include/logger.hrl").

%%====================================================================
Expand Down Expand Up @@ -91,93 +88,91 @@ do_load_records(Modules, CurrRecordDefs) ->
Records),
compile_module(New).


encode_json(undefined) -> <<"null">>;
encode_json(null) -> <<"null">>;
encode_json(true) -> <<"true">>;
encode_json(false) -> <<"false">>;
encode_json({struct, _} = MochiJSON) ->
encode_json( mochijson_to_map(MochiJSON) );
encode_json(Term) ->
Options = [
{error_handler, fun jsx_error/3}
],
jsx:encode(expand_records(Term), Options).
Options = #{
nulls => [undefined, null],
list_encoder => fun
([{K, _} | _] = Proplist, Opts)
when is_binary(K); is_atom(K); is_integer(K) ->
Map = proplists:to_map(Proplist),
euneus_encoder:encode_map(Map, Opts);
(List, Opts) ->
euneus_encoder:encode_list(List, Opts)
end,
unhandled_encoder => fun
({struct, MochiJSON}, Opts) ->
Map = mochijson_to_map(MochiJSON),
euneus_encoder:encode_map(Map, Opts);
(R, _Opts) when is_tuple(R), is_atom(element(1, R)) ->
T = atom_to_binary(element(1, R), utf8),
case maps:find(T, record_defs()) of
{ok, Def} ->
encode_json(expand_record_1(
Def, 2, R, #{ ?RECORD_TYPE => T }
));
error ->
euneus_encoder:throw_unsupported_type_error(R)
end;
(T, _Opts) ->
euneus_encoder:throw_unsupported_type_error(T)
end,
error_handler => fun jsx_error/3
},
case euneus:encode_to_binary(Term, Options) of
{ok, JSON} ->
JSON;
{error, Reason} ->
error(Reason)
end.

decode_json(<<>>) -> undefined;
decode_json(<<"null">>) -> undefined;
decode_json(<<"true">>) -> true;
decode_json(<<"false">>) -> false;
decode_json(B) -> reconstitute_records( jsx:decode(B, [return_maps]) ).
decode_json(B) ->
Options = #{
objects => fun(M1, _Opts) ->
case maps:find(?RECORD_TYPE, M1) of
{ok, Type} ->
case maps:find(Type, record_defs_int()) of
{ok, Def} ->
Rec = lists:foldl(
fun({F, Default}, Acc) ->
V1 = case maps:get(F, M1, Default) of
V when is_map(V), is_list(Default) ->
make_proplist(V);
V ->
V
end,
[ V1 | Acc ]
end,
[ binary_to_atom(Type, utf8) ],
Def),
list_to_tuple( lists:reverse(Rec) );
error ->
M1
end;
error ->
M1
end
end
},
case euneus:decode(B, Options) of
{ok, Term} ->
Term;
{error, Reason} ->
error(Reason)
end.

jsx_error([T|Terms], {parser, State, Handler, Stack}, Config) ->
jsx_error(throw, {{token, Token}, Rest, Opts, Input, Pos, Buffer}, _Stacktrace) ->
?LOG_ERROR(#{
in => jsxrecord,
text => <<"Error mapping value to JSON">>,
result => error,
reason => json_token,
token => T
token => Token
}),
Config1 = jsx_config:parse_config(Config),
jsx_parser:resume([null|Terms], State, Handler, Stack, Config1);
jsx_error(_Terms, _Error, _Config) ->
erlang:error(badarg).


reconstitute_records( M ) when is_map(M) ->
M1 = maps:map( fun(_K, V) -> reconstitute_records(V) end, M ),
case maps:find(?RECORD_TYPE, M1) of
{ok, Type} ->
case maps:find(Type, record_defs_int()) of
{ok, Def} ->
Rec = lists:foldl(
fun({F, Default}, Acc) ->
V1 = case maps:get(F, M1, Default) of
V when is_map(V), is_list(Default) ->
make_proplist(V);
V ->
V
end,
[ V1 | Acc ]
end,
[ binary_to_atom(Type, utf8) ],
Def),
list_to_tuple( lists:reverse(Rec) );
error ->
M1
end;
error ->
M1
end;
reconstitute_records( L ) when is_list(L) ->
[ reconstitute_records(X) || X <- L ];
reconstitute_records( null ) ->
undefined;
reconstitute_records( <<Y4, Y3, Y2, Y1, $-, M2, M1, $-, D2, D1, $T, H2, H1, $:, Min2, Min1, $:, S2, S1, $., Mil3, Mil2, Mil1, $Z>> )
when ?IS_NUMBER(Y4), ?IS_NUMBER(Y3), ?IS_NUMBER(Y2), ?IS_NUMBER(Y1),
?IS_NUMBER(M2), ?IS_NUMBER(M1),
?IS_NUMBER(D2), ?IS_NUMBER(D1),
?IS_NUMBER(H2), ?IS_NUMBER(H1),
?IS_NUMBER(Min2), ?IS_NUMBER(Min1),
?IS_NUMBER(S2), ?IS_NUMBER(S1),
?IS_NUMBER(Mil3), ?IS_NUMBER(Mil2), ?IS_NUMBER(Mil1) ->
DateTime = {{chars_to_integer(Y4, Y3, Y2, Y1), chars_to_integer(M2, M1), chars_to_integer(D2, D1)},
{chars_to_integer(H2, H1), chars_to_integer(Min2, Min1), chars_to_integer(S2, S1)}},
MilliSeconds = chars_to_integer(Mil3, Mil2, Mil1),
Seconds = calendar:datetime_to_gregorian_seconds(DateTime) - 62167219200,
%% 62167219200 == calendar:datetime_to_gregorian_seconds({{1970, 1, 1}, {0, 0, 0}})
{Seconds div 1000000, Seconds rem 1000000, MilliSeconds * 1000};
reconstitute_records( <<Y4, Y3, Y2, Y1, $-, M2, M1, $-, D2, D1, $T, H2, H1, $:, Min2, Min1, $:, S2, S1, $Z>> )
when ?IS_NUMBER(Y4), ?IS_NUMBER(Y3), ?IS_NUMBER(Y2), ?IS_NUMBER(Y1),
?IS_NUMBER(M2), ?IS_NUMBER(M1),
?IS_NUMBER(D2), ?IS_NUMBER(D1),
?IS_NUMBER(H2), ?IS_NUMBER(H1),
?IS_NUMBER(Min2), ?IS_NUMBER(Min1),
?IS_NUMBER(S2), ?IS_NUMBER(S1) ->
{{chars_to_integer(Y4, Y3, Y2, Y1), chars_to_integer(M2, M1), chars_to_integer(D2, D1)},
{chars_to_integer(H2, H1), chars_to_integer(Min2, Min1), chars_to_integer(S2, S1)}};
reconstitute_records( T ) ->
T.
Replacement = null,
euneus_decoder:resume(Token, Replacement, Rest, Opts, Input, Pos, Buffer);
jsx_error(Class, Reason, Stacktrace) ->
euneus_decoder:handle_error(Class, Reason, Stacktrace).

make_proplist(Map) ->
L = maps:to_list(Map),
Expand All @@ -194,69 +189,19 @@ make_proplist(Map) ->
end,
L).

expand_records(R) when is_tuple(R), is_atom(element(1, R)) ->
T = atom_to_binary(element(1, R), utf8),
case maps:find(T, record_defs()) of
{ok, Def} ->
expand_record_1(Def, 2, R, #{ ?RECORD_TYPE => T });
error ->
R
end;
expand_records({MegaSecs, Secs, MicroSecs}=Timestamp) when is_integer(MegaSecs) andalso is_integer(Secs) andalso is_integer(MicroSecs) ->
% Timestamp, map to date in UTC
MilliSecs = MicroSecs div 1000,
{{Year, Month, Day}, {Hour, Min, Sec}} = calendar:now_to_datetime(Timestamp),
unicode:characters_to_binary(io_lib:format("~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~2.10.0B.~3.10.0BZ",
[Year, Month, Day, Hour, Min, Sec, MilliSecs]));

expand_records({{Year,Month,Day},{Hour,Minute,Second}}) when is_integer(Year) andalso is_integer(Month) andalso is_integer(Second) andalso
is_integer(Hour) andalso is_integer(Minute) andalso is_integer(Second) ->
% Date tuple, assume it to be in UTC
unicode:characters_to_binary(io_lib:format(
"~4.10.0B-~2.10.0B-~2.10.0BT~2.10.0B:~2.10.0B:~2.10.0BZ",
[Year, Month, Day, Hour, Minute, Second]));

expand_records({A, B, Params} = Mime) when is_binary(A), is_binary(B), is_list(Params) ->
% Assume to be a MIME content type
format_content_type(Mime);
expand_records({K, V}) when is_number(K) ->
[ K, V ];
expand_records({K, V}) ->
{expand_records(K), expand_records(V)};
expand_records(L) when is_list(L) ->
lists:map(
fun
({K, V}) when is_binary(K); is_atom(K); is_number(K) -> {K, expand_records(V)};
(V) -> expand_records(V)
end,
L);
expand_records(M) when is_map(M) ->
maps:map( fun(_K, V) -> expand_records(V) end, M );
expand_records(undefined) ->
null;
expand_records(X) ->
X.

expand_record_1([ {F, _} | Fs ], N, R, Acc) ->
Acc1 = Acc#{ F => expand_records( element(N, R) ) },
Acc1 = Acc#{ F => element(N, R) },
expand_record_1(Fs, N+1, R, Acc1);
expand_record_1([], _N, _R, Acc) ->
Acc.


mochijson_to_map({struct, L}) ->
maps:from_list([ mochijson_to_map(V) || V <- L ]);
mochijson_to_map({K, V}) ->
{K, mochijson_to_map(V)};
mochijson_to_map(V) ->
V.

format_content_type({T1, T2, []}) ->
<<T1/binary, $/, T2/binary>>;
format_content_type({T1, T2, Params}) ->
ParamsBin = [ [$;, Param, $=, Value] || {Param,Value} <- Params ],
iolist_to_binary([T1, $/, T2, ParamsBin]).

%% @doc Compile the record defs to a module, for effictient caching of all definitions
-spec compile_module( map() ) -> ok.
compile_module( Defs ) ->
Expand Down Expand Up @@ -316,13 +261,3 @@ to_field_name({record_field, _Line, {atom, _, FieldName}}) ->
{FieldName, undefined};
to_field_name({record_field, _Line, {atom, _, FieldName}, InitExpr}) ->
{FieldName, erl_syntax:concrete(InitExpr)}.

chars_to_integer(N2, N1) ->
((N2 - $0) * 10) + (N1 - $0).

chars_to_integer(N3, N2, N1) ->
((N3 - $0) * 100) + ((N2 - $0) * 10) + (N1 - $0).

chars_to_integer(N4, N3, N2, N1) ->
((N4 - $0) * 1000) + ((N3 - $0) * 100) + ((N2 - $0) * 10) + (N1 - $0).

Loading

0 comments on commit 051cff7

Please sign in to comment.