diff --git a/.gitignore b/.gitignore
index c55e9ac..40d22e2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,5 +1,16 @@
-*~
-deps/
-*.log
-*.dump
-ebin/
+.rebar
+_build
+.eunit
+deps
+priv
+*.o
+*.beam
+*.plt
+.erlang.cookie
+ebin
+.depsolver_plt
+_rel
+erl_crash.dump
+logs
+log
+.rebar3
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..1f47c35
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,23 @@
+sudo: false
+install: true
+language: erlang
+otp_release:
+ - 22.0.7
+ - 21.1.2
+ - 20.3
+ - 19.3
+ - 18.3
+
+before_install:
+ - docker-compose -f docker-compose.yml up -d
+ - until curl --silent -XGET --fail http://localhost:9200; do printf '.'; sleep 1; done
+
+script:
+ - wget -c https://github.com/erlang/rebar3/releases/download/3.6.2/rebar3
+ - chmod +x rebar3
+ - ./rebar3 update
+ - ./rebar3 do ct, dialyzer
+
+cache:
+ directories:
+ - $HOME/.cache/rebar3/
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..65c5ca8
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,165 @@
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+
+ This version of the GNU Lesser General Public License incorporates
+the terms and conditions of version 3 of the GNU General Public
+License, supplemented by the additional permissions listed below.
+
+ 0. Additional Definitions.
+
+ As used herein, "this License" refers to version 3 of the GNU Lesser
+General Public License, and the "GNU GPL" refers to version 3 of the GNU
+General Public License.
+
+ "The Library" refers to a covered work governed by this License,
+other than an Application or a Combined Work as defined below.
+
+ An "Application" is any work that makes use of an interface provided
+by the Library, but which is not otherwise based on the Library.
+Defining a subclass of a class defined by the Library is deemed a mode
+of using an interface provided by the Library.
+
+ A "Combined Work" is a work produced by combining or linking an
+Application with the Library. The particular version of the Library
+with which the Combined Work was made is also called the "Linked
+Version".
+
+ The "Minimal Corresponding Source" for a Combined Work means the
+Corresponding Source for the Combined Work, excluding any source code
+for portions of the Combined Work that, considered in isolation, are
+based on the Application, and not on the Linked Version.
+
+ The "Corresponding Application Code" for a Combined Work means the
+object code and/or source code for the Application, including any data
+and utility programs needed for reproducing the Combined Work from the
+Application, but excluding the System Libraries of the Combined Work.
+
+ 1. Exception to Section 3 of the GNU GPL.
+
+ You may convey a covered work under sections 3 and 4 of this License
+without being bound by section 3 of the GNU GPL.
+
+ 2. Conveying Modified Versions.
+
+ If you modify a copy of the Library, and, in your modifications, a
+facility refers to a function or data to be supplied by an Application
+that uses the facility (other than as an argument passed when the
+facility is invoked), then you may convey a copy of the modified
+version:
+
+ a) under this License, provided that you make a good faith effort to
+ ensure that, in the event an Application does not supply the
+ function or data, the facility still operates, and performs
+ whatever part of its purpose remains meaningful, or
+
+ b) under the GNU GPL, with none of the additional permissions of
+ this License applicable to that copy.
+
+ 3. Object Code Incorporating Material from Library Header Files.
+
+ The object code form of an Application may incorporate material from
+a header file that is part of the Library. You may convey such object
+code under terms of your choice, provided that, if the incorporated
+material is not limited to numerical parameters, data structure
+layouts and accessors, or small macros, inline functions and templates
+(ten or fewer lines in length), you do both of the following:
+
+ a) Give prominent notice with each copy of the object code that the
+ Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the object code with a copy of the GNU GPL and this license
+ document.
+
+ 4. Combined Works.
+
+ You may convey a Combined Work under terms of your choice that,
+taken together, effectively do not restrict modification of the
+portions of the Library contained in the Combined Work and reverse
+engineering for debugging such modifications, if you also do each of
+the following:
+
+ a) Give prominent notice with each copy of the Combined Work that
+ the Library is used in it and that the Library and its use are
+ covered by this License.
+
+ b) Accompany the Combined Work with a copy of the GNU GPL and this license
+ document.
+
+ c) For a Combined Work that displays copyright notices during
+ execution, include the copyright notice for the Library among
+ these notices, as well as a reference directing the user to the
+ copies of the GNU GPL and this license document.
+
+ d) Do one of the following:
+
+ 0) Convey the Minimal Corresponding Source under the terms of this
+ License, and the Corresponding Application Code in a form
+ suitable for, and under terms that permit, the user to
+ recombine or relink the Application with a modified version of
+ the Linked Version to produce a modified Combined Work, in the
+ manner specified by section 6 of the GNU GPL for conveying
+ Corresponding Source.
+
+ 1) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (a) uses at run time
+ a copy of the Library already present on the user's computer
+ system, and (b) will operate properly with a modified version
+ of the Library that is interface-compatible with the Linked
+ Version.
+
+ e) Provide Installation Information, but only if you would otherwise
+ be required to provide such information under section 6 of the
+ GNU GPL, and only to the extent that such information is
+ necessary to install and execute a modified version of the
+ Combined Work produced by recombining or relinking the
+ Application with a modified version of the Linked Version. (If
+ you use option 4d0, the Installation Information must accompany
+ the Minimal Corresponding Source and Corresponding Application
+ Code. If you use option 4d1, you must provide the Installation
+ Information in the manner specified by section 6 of the GNU GPL
+ for conveying Corresponding Source.)
+
+ 5. Combined Libraries.
+
+ You may place library facilities that are a work based on the
+Library side by side in a single library together with other library
+facilities that are not Applications and are not covered by this
+License, and convey such a combined library under terms of your
+choice, if you do both of the following:
+
+ a) Accompany the combined library with a copy of the same work based
+ on the Library, uncombined with any other library facilities,
+ conveyed under the terms of this License.
+
+ b) Give prominent notice with the combined library that part of it
+ is a work based on the Library, and explaining where to find the
+ accompanying uncombined form of the same work.
+
+ 6. Revised Versions of the GNU Lesser General Public License.
+
+ The Free Software Foundation may publish revised and/or new versions
+of the GNU Lesser General Public License from time to time. Such new
+versions will be similar in spirit to the present version, but may
+differ in detail to address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Library as you received it specifies that a certain numbered version
+of the GNU Lesser General Public License "or any later version"
+applies to it, you have the option of following the terms and
+conditions either of that published version or of any later version
+published by the Free Software Foundation. If the Library as you
+received it does not specify a version number of the GNU Lesser
+General Public License, you may choose any version of the GNU Lesser
+General Public License ever published by the Free Software Foundation.
+
+ If the Library as you received it specifies that a proxy can decide
+whether future versions of the GNU Lesser General Public License shall
+apply, that proxy's public statement of acceptance of any version is
+permanent authorization for you to choose that version for the
+Library.
diff --git a/README b/README
deleted file mode 100644
index 04f6b37..0000000
--- a/README
+++ /dev/null
@@ -1,13 +0,0 @@
-A client for Elastic Search in Erlang.
-======================================
-
-* rebar
-* uses lhttpc http library
-
-QUICKSTART
-----------
-
-$ rebar get-deps
-$ rebar compile
-$ ./start-dev.sh
-
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..496695b
--- /dev/null
+++ b/README.md
@@ -0,0 +1,144 @@
+ErlasticSearch
+======================================
+
+An Erlang client for [Elasticsearch](https://www.elastic.co/products/elasticsearch).
+
+Build and Run
+-------------
+
+Start a rebar3 shell
+
+```shell
+rebar3 shell
+```
+
+Create an index :
+
+```erlang
+erlastic_search:create_index(<<"index_name">>).
+```
+```
+{ok, [{<<"ok">>,true},{<<"acknowledged">>,true}]}
+```
+
+Index a document :
+
+```erlang
+erlastic_search:index_doc(<<"index_name">>, <<"type">>, [{<<"key1">>, <<"value1">>}]).
+```
+```
+{ok,[{<<"ok">>,true},
+ {<<"_index">>,<<"index_name">>},
+ {<<"_type">>,<<"type">>},
+ {<<"_id">>,<<"T-EzM_yeTkOEHPL9cN5B2g">>},
+ {<<"_version">>,1}]}
+````
+
+Index a document (providing a document id) :
+
+```erlang
+erlastic_search:index_doc_with_id(<<"index_name">>, <<"type">>, <<"id1">>, [{<<"key1">>, <<"value1">>}]).
+```
+```
+{ok,[{<<"ok">>,true},
+ {<<"_index">>,<<"index_name">>},
+ {<<"_type">>,<<"type">>},
+ {<<"_id">>,<<"id1">>},
+ {<<"_version">>,2}]}
+```
+
+Search for a document :
+
+```erlang
+erlastic_search:search(<<"index_name">>, <<"type">>, <<"key1:value1">>).
+```
+```
+{ok,[{<<"took">>,6},
+ {<<"timed_out">>,false},
+ {<<"_shards">>,
+ [{<<"total">>,5},{<<"successful">>,5},{<<"failed">>,0}]},
+ {<<"hits">>,
+ [{<<"total">>,3},
+ {<<"max_score">>,0.30685282},
+ {<<"hits">>,
+ [[{<<"_index">>,<<"index_name">>},
+ {<<"_type">>,<<"type">>},
+ {<<"_id">>,<<"T-EzM_yeTkOEHPL9cN5B2g">>},
+ {<<"_score">>,0.30685282},
+ {<<"_source">>,[{<<"key1">>,<<"value1">>}]}],
+ [{<<"_index">>,<<"index_name">>},
+ {<<"_type">>,<<"type">>},
+ {<<"_id">>,<<"id1">>},
+ {<<"_score">>,0.30685282},
+ {<<"_source">>,[{<<"key1">>,<<"value1">>}]}],
+ [{<<"_index">>,<<"index_name">>},
+ {<<"_type">>,<<"type">>},
+ {<<"_id">>,<<"MMNcfNHUQyeizDkniZD2bg">>},
+ {<<"_score">>,0.30685282},
+ {<<"_source">>,[{<<"key1">>,<<"value1">>}]}]]}]}]}
+```
+
+Testing
+-------
+
+In another terminal use docker-compose to start an Elasticsearch instance :
+
+```bash
+docker-compose up
+```
+
+For convenience, you can also start a Kibana instance for analysis/visualization :
+
+```bash
+docker-compose -f docker-compose.yml -f docker-compose-kibana.yml up
+```
+
+Run Common Test:
+
+```bash
+rebar3 ct
+```
+
+Using another JSON library than `jsx`
+-------------------------------------
+
+By default, we assume all the JSON erlang objects passed to us are in
+[`jsx`](https://github.com/talentdeficit/jsx)'s representation.
+And similarly, all of Elasticsearch's replies will be decoded with `jsx`.
+
+However, you might already be using another JSON library in your project, which
+might encode and decode JSONs from and to a different erlang representation.
+For example, [`jiffy`](https://github.com/davisp/jiffy):
+
+```
+1> SimpleJson = <<"{\"key\":\"value\"}">>.
+<<"{\"key\":\"value\"}">>
+```
+
+```
+2> jiffy:decode(SimpleJson).
+{[{<<"key">>,<<"value">>}]}
+```
+
+```
+3> jsx:decode(SimpleJson).
+[{<<"key">>,<<"value">>}]
+```
+In that case, you probably want `erlastic_search` to use your JSON
+representation of choice instead of `jsx`'s.
+
+You can do so by defining the `ERLASTIC_SEARCH_JSON_MODULE` environment
+variable when compiling `erlastic_search`, for example:
+```shell
+export ERLASTIC_SEARCH_JSON_MODULE=jiffy
+rebar compile
+```
+
+The only constraint is that `ERLASTIC_SEARCH_JSON_MODULE` should be the name
+of a module, in your path, that defines the two following callbacks:
+
+```erlang
+-callback encode(erlastic_json()) -> binary().
+-callback decode(binary()) -> erlastic_json().
+```
+where `erlastic_json()` is a type mapping to your JSON representation of choice.
diff --git a/docker-compose-kibana.yml b/docker-compose-kibana.yml
new file mode 100644
index 0000000..448ff28
--- /dev/null
+++ b/docker-compose-kibana.yml
@@ -0,0 +1,8 @@
+version: '3'
+services:
+ kibana:
+ image: kibana:5.0
+ links:
+ - elasticsearch
+ ports:
+ - 5601:5601
diff --git a/docker-compose.yml b/docker-compose.yml
new file mode 100644
index 0000000..9fb7e4e
--- /dev/null
+++ b/docker-compose.yml
@@ -0,0 +1,13 @@
+version: '3'
+services:
+ elasticsearch:
+ image: elasticsearch:5.3.2-alpine
+ ports:
+ - 9200:9200
+ volumes:
+ - elasticsearch-data:/usr/share/elasticsearch/data
+ environment:
+ ES_JAVA_OPTS: "-Xms500m -Xmx500m"
+volumes:
+ elasticsearch-data: {}
+
diff --git a/include/erlastic_search.hrl b/include/erlastic_search.hrl
index a0bd386..0b40f4a 100644
--- a/include/erlastic_search.hrl
+++ b/include/erlastic_search.hrl
@@ -1,11 +1,18 @@
--type header() :: {string() | atom(), string()}.
+-type header() :: {binary(), binary()}.
-type headers() :: [header()].
+-type erlastic_json() :: tuple() | list() | map().
+%% Hackney async references actually are just that, references... but it seems
+%% to be an undocumented implementation detail; doc (and specs) only says `any()'
+-type erlastic_success_result() :: erlastic_json() | {async, HackneyRef :: any()}.
-record(erls_params, {
- host = "127.0.0.1" :: string(),
- port = 9200 :: integer(),
- ssl = false :: boolean(),
- prefix = "/" :: string(),
- name = default :: term(),
- timeout = infinity :: integer() | infinity
-}).
+ host = erls_config:get_host() :: binary(),
+ port = erls_config:get_port() :: integer(),
+
+ % These are passed verbatim to the underlying http client in use.
+ http_client_options = []:: [term()],
+
+ % Keeping the following two options for backwards compatibility.
+ timeout = infinity :: integer() | infinity,
+ ctimeout = infinity :: integer() | infinity
+ }).
diff --git a/rebar.config b/rebar.config
index 04ae74c..e04806d 100644
--- a/rebar.config
+++ b/rebar.config
@@ -1,8 +1,3 @@
-
{erl_opts, [debug_info]}.
-{deps_dir, ["deps"]}.
-
-{deps, [
- {lhttpc, ".*", {git, "https://github.com/esl/lhttpc.git", "master"}}
-]}.
+{deps, [hackney, jsx]}.
diff --git a/rebar.config.script b/rebar.config.script
new file mode 100644
index 0000000..bc69849
--- /dev/null
+++ b/rebar.config.script
@@ -0,0 +1,13 @@
+ErlOpts = proplists:get_value(erl_opts, CONFIG),
+
+JsonModuleStr = case os:getenv("ERLASTIC_SEARCH_JSON_MODULE") of
+ Value when erlang:is_list(Value), erlang:length(Value) > 0 ->
+ Value;
+ _ ->
+ "jsx"
+end,
+
+JsonModule = erlang:list_to_atom(JsonModuleStr),
+
+NewErlOpts = [ {d, 'ERLASTIC_SEARCH_JSON_MODULE', JsonModule} | ErlOpts],
+lists:keystore(erl_opts, 1, CONFIG, {erl_opts, NewErlOpts}).
diff --git a/rebar.lock b/rebar.lock
new file mode 100644
index 0000000..a4b9d22
--- /dev/null
+++ b/rebar.lock
@@ -0,0 +1,22 @@
+{"1.1.0",
+[{<<"certifi">>,{pkg,<<"certifi">>,<<"2.5.1">>},1},
+ {<<"hackney">>,{pkg,<<"hackney">>,<<"1.15.2">>},0},
+ {<<"idna">>,{pkg,<<"idna">>,<<"6.0.0">>},1},
+ {<<"jsx">>,{pkg,<<"jsx">>,<<"2.10.0">>},0},
+ {<<"metrics">>,{pkg,<<"metrics">>,<<"1.0.1">>},1},
+ {<<"mimerl">>,{pkg,<<"mimerl">>,<<"1.2.0">>},1},
+ {<<"parse_trans">>,{pkg,<<"parse_trans">>,<<"3.3.0">>},2},
+ {<<"ssl_verify_fun">>,{pkg,<<"ssl_verify_fun">>,<<"1.1.5">>},1},
+ {<<"unicode_util_compat">>,{pkg,<<"unicode_util_compat">>,<<"0.4.1">>},2}]}.
+[
+{pkg_hash,[
+ {<<"certifi">>, <<"867CE347F7C7D78563450A18A6A28A8090331E77FA02380B4A21962A65D36EE5">>},
+ {<<"hackney">>, <<"07E33C794F8F8964EE86CEBEC1A8ED88DB5070E52E904B8F12209773C1036085">>},
+ {<<"idna">>, <<"689C46CBCDF3524C44D5F3DDE8001F364CD7608A99556D8FBD8239A5798D4C10">>},
+ {<<"jsx">>, <<"77760560D6AC2B8C51FD4C980E9E19B784016AA70BE354CE746472C33BEB0B1C">>},
+ {<<"metrics">>, <<"25F094DEA2CDA98213CECC3AEFF09E940299D950904393B2A29D191C346A8486">>},
+ {<<"mimerl">>, <<"67E2D3F571088D5CFD3E550C383094B47159F3EEE8FFA08E64106CDF5E981BE3">>},
+ {<<"parse_trans">>, <<"09765507A3C7590A784615CFD421D101AEC25098D50B89D7AA1D66646BC571C1">>},
+ {<<"ssl_verify_fun">>, <<"6EAF7AD16CB568BB01753DBBD7A95FF8B91C7979482B95F38443FE2C8852A79B">>},
+ {<<"unicode_util_compat">>, <<"D869E4C68901DD9531385BB0C8C40444EBF624E60B6962D95952775CAC5E90CD">>}]}
+].
diff --git a/src/erlastic_search.app.src b/src/erlastic_search.app.src
index 9722125..47800b6 100644
--- a/src/erlastic_search.app.src
+++ b/src/erlastic_search.app.src
@@ -1,10 +1,11 @@
-%% This is the application resource file (.app file) for the erlastic_search,
-%% application.
-{application, erlastic_search,
- [{description, "An Erlang app for communicating with Elastic Search's rest interface."},
- {vsn, "0.2.0"},
- {modules, []},
- {registered,[]},
- {applications, [kernel, stdlib, sasl, gas, ssl, crypto, lhttpc]},
- {start_phases, []}]}.
-
+{application,erlastic_search,
+ [{description,"An Erlang app for communicating with Elastic Search's rest interface."},
+ {vsn,"git"},
+ {modules,[]},
+ {registered,[]},
+ {applications,[kernel,stdlib,ssl,hackney]},
+ {start_phases,[]},
+ {maintainers,["Tristan Sloughter"]},
+ {licenses,["LGPL"]},
+ {links,[{"Github",
+ "https://github.com/tsloughter/erlastic_search"}]}]}.
diff --git a/src/erlastic_search.erl b/src/erlastic_search.erl
index 05a64be..9885513 100644
--- a/src/erlastic_search.erl
+++ b/src/erlastic_search.erl
@@ -1,220 +1,758 @@
%%%-------------------------------------------------------------------
%%% @author Tristan Sloughter <>
-%%% @copyright (C) 2010, Tristan Sloughter
+%%% @copyright (C) 2010, 2012, Tristan Sloughter
%%% @doc
%%%
%%% @end
%%% Created : 14 Feb 2010 by Tristan Sloughter <>
%%%-------------------------------------------------------------------
-module(erlastic_search).
--compile([export_all]).
+
+-export([create_index/1
+ ,create_index/2
+ ,create_index/3
+ ,create_index_template/2
+ ,create_index_template/3
+ ,stats_index/0
+ ,stats_index/1
+ ,stats_index/2
+ ,nodes_info/0
+ ,nodes_info/1
+ ,nodes_info/2
+ ,put_mapping/3
+ ,put_mapping/4
+ ,get_mapping/0
+ ,get_mapping/1
+ ,get_mapping/2
+ ,get_mapping/3
+ ,get_settings/0
+ ,get_settings/1
+ ,get_settings/2
+ ,get_index_templates/0
+ ,get_index_templates/1
+ ,get_index_templates/2
+ ,index_doc/3
+ ,index_doc/4
+ ,index_doc_with_opts/5
+ ,index_doc_with_id/4
+ ,index_doc_with_id/5
+ ,index_doc_with_id_opts/6
+ ,update_doc/4
+ ,update_doc/5
+ ,update_doc_opts/6
+ ,upsert_doc/4
+ ,upsert_doc/5
+ ,upsert_doc_opts/6
+ ,bulk_index_docs/2
+ ,bulk_index_docs/1
+ ,search/2
+ ,search/3
+ ,search/5
+ ,count/2
+ ,count/3
+ ,count/5
+ ,search_limit/4
+ ,search_scroll/4
+ ,search_scroll/1
+ ,multi_search/2
+ ,get_doc/3
+ ,get_doc/4
+ ,get_multi_doc/3
+ ,get_doc_opts/5
+ ,flush_index/1
+ ,flush_index/2
+ ,flush_all/0
+ ,flush_all/1
+ ,refresh_all/0
+ ,refresh_all/1
+ ,refresh_index/1
+ ,refresh_index/2
+ ,delete_doc/3
+ ,delete_doc/4
+ ,delete_doc_by_query/3
+ ,delete_doc_by_query/4
+ ,delete_doc_by_query_doc/3
+ ,delete_doc_by_query_doc/4
+ ,delete_index/1
+ ,delete_index/2
+ ,delete_index_template/1
+ ,delete_index_template/2
+ ,index_exists/1
+ ,index_exists/2
+ ,index_template_exists/1
+ ,index_template_exists/2
+ ,optimize_index/1
+ ,optimize_index/2
+ ,percolator_add/3
+ ,percolator_add/4
+ ,percolator_del/2
+ ,percolator_del/3
+ ,percolate/3
+ ,percolate/4
+ ,reindex/1
+ ,reindex/2
+ ,aliases/1
+ ,aliases/2
+ ,bulk_operation/1
+ ,bulk_operation/2
+ ,put_setting/2
+ ,put_setting/3
+]).
+
-include("erlastic_search.hrl").
%%--------------------------------------------------------------------
%% @doc
-%% Takes the name of an index to create and sends the request to
-%% Elastic Search, the default settings on localhost.
-%%
-%% @spec create_index(Index) -> {ok, Data} | {error, Error}
+%% Takes the name of an index to create and sends the request to
+%% Elasticsearch, the default settings on localhost.
%% @end
%%--------------------------------------------------------------------
+-spec create_index(binary()) -> {ok, erlastic_success_result()} | {error, any()}.
create_index(Index) ->
- create_index(#erls_params{}, Index).
+ create_index(#erls_params{}, Index, <<>>).
%%--------------------------------------------------------------------
%% @doc
%% Takes the name of an index and the record describing the servers
-%% details to create and sends the request to Elastic Search.
-%%
-%% @spec create_index(Params, Index) -> {ok, Data} | {error, Error}
+%% details to create and sends the request to Elasticsearch; or else
+%% takes the name of the index to create and a body to use in the request; and
+%% creates that index using the default settings on localhost.
+%% (see the doc at https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html)
%% @end
%%--------------------------------------------------------------------
-create_index(Params, Index) ->
- erls_resource:put(Params, Index, [], [], [], []).
+-spec create_index(#erls_params{}, binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+create_index(#erls_params{} = Params, Index) when is_binary(Index) ->
+ create_index(Params, Index, <<>>);
+create_index(Index, Doc) when is_binary(Index), (is_binary(Doc) orelse is_list(Doc) orelse is_tuple(Doc) orelse is_map(Doc)) ->
+ create_index(#erls_params{}, Index, Doc).
%%--------------------------------------------------------------------
%% @doc
-%% Takes the index and type name and a Json document described in
-%% Erlang terms, converts the document to a string and passes to the
-%% default server. Elastic Search provides the doc with an id.
-%%
-%% @spec index(Index, Type, Doc) -> {ok, Data} | {error, Error}
+%% Takes a record describing the servers details, an index name, and a request body, and creates that index
+%% (see the doc at https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html)
%% @end
%%--------------------------------------------------------------------
-index_doc(Index, Type, Doc) when is_tuple(Doc) ->
- index_doc(#erls_params{}, Index, Type, Doc).
-
+-spec create_index(#erls_params{}, binary(), erlastic_json() | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+create_index(Params, Index, Doc) when is_binary(Index), (is_binary(Doc) orelse is_list(Doc) orelse is_tuple(Doc) orelse is_map(Doc)) ->
+ erls_resource:put(Params, Index, [], [], maybe_encode_doc(Doc), Params#erls_params.http_client_options).
+
%%--------------------------------------------------------------------
%% @doc
-%% Takes the index and type name and a Json document described in
-%% Erlang terms, converts the document to a string and passes to the
-%% server. Elastic Search provides the doc with an id.
-%%
-%% @spec index(Params Index, Type, Doc) -> {ok, Data} | {error, Error}
+%% Takes the name of an index and a body to use in the request; and
+%% creates an index template using the default settings on localhost.
+%% (see the doc at https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html#indices-templates)
%% @end
%%--------------------------------------------------------------------
-index_doc(Params, Index, Type, Doc) when is_tuple(Doc) ->
- Json = erls_mochijson2:encode(Doc),
- erls_resource:post(Params, filename:join(Index, Type), [], [], Json, []).
+-spec create_index_template(Index :: binary(), Doc :: erlastic_json() | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+create_index_template(Index, Doc) when is_binary(Index), (is_binary(Doc) orelse is_list(Doc) orelse is_tuple(Doc) orelse is_map(Doc)) ->
+ create_index_template(#erls_params{}, Index, Doc).
%%--------------------------------------------------------------------
%% @doc
-%% Takes the index and type name and a Json document described in
-%% Erlang terms, converts the document to a string after adding the _id field
-%% and passes to the default server.
-%%
-%% @spec index(Index, Type, Id, Doc) -> {ok, Data} | {error, Error}
+%% Takes a record describing the servers details, an index name, and a request body, and creates an index template
+%% (see the doc at https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html#indices-templates)
+%% @end
+%%--------------------------------------------------------------------
+-spec create_index_template(#erls_params{}, binary(), erlastic_json() | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+create_index_template(Params, Index, Doc) when is_binary(Index), (is_binary(Doc) orelse is_list(Doc) orelse is_tuple(Doc) orelse is_map(Doc)) ->
+ erls_resource:put(Params, <<"_template/", Index/binary>>, [], [], maybe_encode_doc(Doc), Params#erls_params.http_client_options).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Takes an optional list of index names and the record describing the servers
+%% details to read the stats for these index.
+%% If no index in supplied then stats for all indices are returned.
+%% https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-stats.html
+%% @end
+%%--------------------------------------------------------------------
+
+stats_index() ->
+ stats_index(#erls_params{}).
+
+stats_index(Params) ->
+ stats_index(Params, []).
+
+stats_index(Params, Index) ->
+ erls_resource:get(Params, filename:join(commas(Index),"_stats"), [], [],
+ Params#erls_params.http_client_options).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Takes an optional list of node names and the record describing the servers
+%% details to read the infos for these nodes.
+%% If no index in supplied then stats for all indices are returned.
+%% https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-nodes-info.html
+%% @end
+%%--------------------------------------------------------------------
+
+-spec nodes_info() -> {ok, erlastic_success_result()} | {error, any()}.
+nodes_info() ->
+ nodes_info(#erls_params{}).
+
+-spec nodes_info(#erls_params{}) -> {ok, erlastic_success_result()} | {error, any()}.
+nodes_info(#erls_params{} = Params) ->
+ nodes_info(Params, []).
+
+-spec nodes_info(#erls_params{}, [binary()]) -> {ok, erlastic_success_result()} | {error, any()}.
+nodes_info(#erls_params{} = Params, Nodes) when erlang:is_list(Nodes) ->
+ erls_resource:get(Params, filename:join("_nodes", commas(Nodes)), [], [],
+ Params#erls_params.http_client_options).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Insert a mapping into an Elasticsearch index
+%% @end
+%%--------------------------------------------------------------------
+-spec put_mapping(binary(), binary(), erlastic_json() | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+put_mapping(Index, Type, Doc) ->
+ put_mapping(#erls_params{}, Index, Type, Doc).
+
+-spec put_mapping(#erls_params{}, binary(), binary(), erlastic_json() | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+put_mapping(Params, Index, Type, Doc) ->
+ erls_resource:put(Params, filename:join([Index, Type, "_mapping"]), [], [], maybe_encode_doc(Doc), Params#erls_params.http_client_options).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Retrieves the mapping for all indices, using default server parameters
+%% For all flavours of `get_mapping/*' functions, see the doc at
+%% https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-mapping.html
+%% @end
+%%--------------------------------------------------------------------
+-spec get_mapping() -> {ok, erlastic_success_result()} | {error, any()}.
+get_mapping() ->
+ get_mapping(#erls_params{}, <<"_all">>, <<"_all">>).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% If passed server parameters, retrieves the mapping for all indices on that
+%% server; if passed an index name, retrieves the mapping for that index using
+%% default server parameters
+%% @end
+%%--------------------------------------------------------------------
+-spec get_mapping(#erls_params{} | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+get_mapping(#erls_params{} = Params) ->
+ get_mapping(Params, <<"_all">>, <<"_all">>);
+get_mapping(Index) when is_binary(Index) ->
+ get_mapping(#erls_params{}, Index, <<"_all">>).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% If passed server parameters and an index name, retrieves the mapping for
+%% that index on that server; if passed an index name and a type name,
+%% retrieves the mapping for that specific type
%% @end
%%--------------------------------------------------------------------
-index_doc_with_id(Index, Type, Id, Doc) when is_tuple(Doc) ->
- index_doc_with_id(#erls_params{}, Index, Type, Id, Doc).
-
+-spec get_mapping(#erls_params{} | binary(), binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+get_mapping(#erls_params{} = Params, Index) when is_binary(Index) ->
+ get_mapping(Params, Index, <<"_all">>);
+get_mapping(Index, Type) when is_binary(Index), is_binary(Type) ->
+ get_mapping(#erls_params{}, Index, Type).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Retrieves the mapping for the given index and type, using the provided
+%% server parameters
+%% @end
+%%--------------------------------------------------------------------
+-spec get_mapping(#erls_params{}, binary(), binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+get_mapping(#erls_params{} = Params, Index, Type) when is_binary(Index), is_binary(Type) ->
+ erls_resource:get(Params, filename:join([Index, <<"_mapping">>, Type]), [], [], [], Params#erls_params.http_client_options).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Retrieves the settings for all indices, using default server parameters
+%% For all flavours of `get_settings/*' functions, see the doc at
+%% https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-settings.html
+%% @end
+%%--------------------------------------------------------------------
+-spec get_settings() -> {ok, erlastic_success_result()} | {error, any()}.
+get_settings() ->
+ get_settings(#erls_params{}, <<"_all">>).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% If passed server parameters, retrieves the settings for all indices on that
+%% server; if passed an index name, retrieves the settings for that index using
+%% default server parameters
+%% @end
+%%--------------------------------------------------------------------
+-spec get_settings(#erls_params{} | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+get_settings(#erls_params{} = Params) ->
+ get_settings(Params, <<"_all">>);
+get_settings(Index) when is_binary(Index) ->
+ get_settings(#erls_params{}, Index).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Retrieves the settings for the given index, using the provided server
+%% parameters
+%% @end
+%%--------------------------------------------------------------------
+-spec get_settings(#erls_params{}, binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+get_settings(#erls_params{} = Params, Index) when is_binary(Index) ->
+ erls_resource:get(Params, filename:join([Index, <<"_settings">>]), [], [], [], Params#erls_params.http_client_options).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Retrieves all index templates, using the default server parameters. See docs at:
+%% https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html#getting
+%% @end
+%%--------------------------------------------------------------------
+-spec get_index_templates() -> {ok, erlastic_success_result()} | {error, any()}.
+get_index_templates() ->
+ get_index_templates(#erls_params{}, <<>>).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Retrieves the index templates that match the index template string, using the default server parameters
+%% or retrieves all index templates with provided server parameters. See docs at:
+%% https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html#getting
+%% @end
+%%--------------------------------------------------------------------
+-spec get_index_templates(binary() | #erls_params{}) -> {ok, erlastic_success_result()} | {error, any()}.
+get_index_templates(IndexTemplate) when is_binary(IndexTemplate) ->
+ get_index_templates(#erls_params{}, IndexTemplate);
+get_index_templates(#erls_params{} = Params) ->
+ get_index_templates(Params, <<>>).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Retrieves the index templates that match the index template string, using the provided server
+%% parameters. See docs at:
+%% https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html#getting
+%% @end
+%%--------------------------------------------------------------------
+-spec get_index_templates(#erls_params{}, binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+get_index_templates(#erls_params{http_client_options = HttpClientOptions} = Params, IndexTemplate) ->
+ erls_resource:get(Params, filename:join([<<"_template">>, IndexTemplate]), [], [], [], HttpClientOptions).
+
%%--------------------------------------------------------------------
%% @doc
-%% Takes the index and type name and a Json document described in
+%% Takes the index and type name and a Json document described in
+%% Erlang terms, converts the document to a string and passes to the
+%% default server. Elasticsearch provides the doc with an id.
+%% @end
+%%--------------------------------------------------------------------
+-spec index_doc(binary(), binary(), erlastic_json() | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+index_doc(Index, Type, Doc) ->
+ index_doc(#erls_params{}, Index, Type, Doc).
+
+-spec index_doc(#erls_params{}, binary(), binary(), erlastic_json() | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+index_doc(Params, Index, Type, Doc) ->
+ index_doc_with_opts(Params, Index, Type, Doc, []).
+
+-spec index_doc_with_opts(#erls_params{}, binary(), binary(), erlastic_json() | binary(), list()) -> {ok, erlastic_success_result()} | {error, any()}.
+index_doc_with_opts(Params, Index, Type, Doc, Opts) when is_list(Opts) ->
+ erls_resource:post(Params, filename:join(Index, Type), [], Opts, maybe_encode_doc(Doc), Params#erls_params.http_client_options).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Takes the index and type name and a Json document described in
%% Erlang terms, converts the document to a string after adding the _id field
-%% and passes to the server.
-%%
-%% @spec index(Params, Index, Type, Id, Doc) -> {ok, Data} | {error, Error}
+%% and passes to the default server.
%% @end
%%--------------------------------------------------------------------
-index_doc_with_id(Params, Index, Type, Id, Doc) when is_tuple(Doc) ->
- Json = iolist_to_binary(erls_mochijson2:encode(Doc)),
- index_doc_with_id(Params, Index, Type, Id, Json);
+-spec index_doc_with_id(binary(), binary(), binary(), erlastic_json() | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+index_doc_with_id(Index, Type, Id, Doc) ->
+ index_doc_with_id_opts(#erls_params{}, Index, Type, Id, Doc, []).
+
+-spec index_doc_with_id(#erls_params{}, binary(), binary(), binary(), erlastic_json() | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+index_doc_with_id(Params, Index, Type, Id, Doc) ->
+ index_doc_with_id_opts(Params, Index, Type, Id, Doc, []).
+
+-spec index_doc_with_id_opts(#erls_params{}, binary(), binary(), binary(), erlastic_json() | binary(), list()) -> {ok, erlastic_success_result()} | {error, any()}.
+index_doc_with_id_opts(Params, Index, Type, undefined, Doc, Opts) ->
+ index_doc_with_opts(Params, Index, Type, Doc, Opts);
+index_doc_with_id_opts(Params, Index, Type, Id, Doc, Opts) when is_list(Opts) ->
+ erls_resource:post(Params, filename:join([Index, Type, Id]), [], Opts, maybe_encode_doc(Doc), Params#erls_params.http_client_options).
+
+%%--------------------------------------------------------------------
+%% @doc Update the document partly.The Doc Id must exist.
+%% (https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update.html#_updates_with_a_partial_document)
+%% --------------------------------------------------------------------
+
+-spec update_doc(binary(), binary(), binary(), erlastic_json()) -> {ok, erlastic_success_result()} | {error, any()}.
+update_doc(Index, Type, Id, Doc) ->
+ update_doc_opts(#erls_params{}, Index, Type, Id, Doc, []).
+
+-spec update_doc(#erls_params{}, binary(), binary(), binary(), erlastic_json()) -> {ok, erlastic_success_result()} | {error, any()}.
+update_doc(Params, Index, Type, Id, Doc) ->
+ update_doc_opts(Params, Index, Type, Id, Doc, []).
+
+-spec update_doc_opts(#erls_params{}, binary(), binary(), binary(), erlastic_json(), list()) -> {ok, erlastic_success_result()} | {error, any()}.
+update_doc_opts(Params, Index, Type, Id, Doc, Opts) when is_list(Opts), (is_list(Doc) orelse is_tuple(Doc) orelse is_map(Doc)) ->
+ DocBin = erls_json:encode(Doc),
+ %% we cannot use erls_json to generate this, see the doc string for `erls_json:encode/1'
+ Body = <<"{\"doc\":", DocBin/binary, "}">>,
+ erls_resource:post(Params, filename:join([Index, Type, Id, "_update"]), [], Opts,
+ Body,
+ Params#erls_params.http_client_options).
+
+%%--------------------------------------------------------------------
+%% @doc Insert the document, or replacing it when it already exists (upsert)
+%% (https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update.html)
+%% --------------------------------------------------------------------
+
+-spec upsert_doc(binary(), binary(), binary(), erlastic_json()) -> {ok, erlastic_success_result()} | {error, any()}.
+upsert_doc(Index, Type, Id, Doc) ->
+ upsert_doc_opts(#erls_params{}, Index, Type, Id, Doc, []).
-index_doc_with_id(Params, Index, Type, Id, Json) when is_binary(Json) ->
- index_doc_with_id_opts(Params, Index, Type, Id, Json, []).
+-spec upsert_doc(#erls_params{}, binary(), binary(), binary(), erlastic_json()) -> {ok, erlastic_success_result()} | {error, any()}.
+upsert_doc(Params, Index, Type, Id, Doc) ->
+ upsert_doc_opts(Params, Index, Type, Id, Doc, []).
-index_doc_with_id_opts(Params, Index, Type, Id, Json, Opts) when is_binary(Json), is_list(Opts) ->
- erls_resource:post(Params, filename:join([Index, Type, Id]), [], Opts, Json, []).
+-spec upsert_doc_opts(#erls_params{}, binary(), binary(), binary(), erlastic_json(), list()) -> {ok, erlastic_success_result()} | {error, any()}.
+upsert_doc_opts(Params, Index, Type, Id, Doc, Opts) when is_list(Opts), (is_list(Doc) orelse is_tuple(Doc) orelse is_map(Doc)) ->
+ DocBin = erls_json:encode(Doc),
+ %% we cannot use erls_json to generate this, see the doc string for `erls_json:encode/1'
+ Body = <<"{\"doc_as_upsert\":true,\"doc\":", DocBin/binary, "}">>,
+ erls_resource:post(Params, filename:join([Index, Type, Id, "_update"]), [], Opts,
+ Body,
+ Params#erls_params.http_client_options).
-to_bin(A) when is_atom(A) -> to_bin(atom_to_list(A));
-to_bin(L) when is_list(L) -> list_to_binary(L);
-to_bin(B) when is_binary(B) -> B.
+%% Bulk index docs with default params
+-spec bulk_index_docs(list()) -> {ok, list} | {error, any()}.
+bulk_index_docs(IndexTypeIdJsonTuples) ->
+ bulk_index_docs(#erls_params{}, IndexTypeIdJsonTuples).
-%% Documents is [ {Index, Type, Id, Json}, ... ]
+%% Documents is [ {Index, Type, Id, Json}, {Index, Type, Id, HeaderInformation, Json}... ]
+-spec bulk_index_docs(#erls_params{}, list()) -> {ok, list()} | {error, any()}.
bulk_index_docs(Params, IndexTypeIdJsonTuples) ->
- Body = lists:map(fun({Index, Type, Id, Json}) ->
- Header = erls_mochijson2:encode({struct, [
- {<<"index">>, [ {struct, [
- {<<"_index">>, to_bin(Index)},
- {<<"_type">>, to_bin(Type)},
- {<<"_id">>, to_bin(Id)}
- ]}]}]}),
- [
- Header,
- <<"\n">>,
- Json,
- <<"\n">>
- ]
- end, IndexTypeIdJsonTuples),
- erls_resource:post(Params, "/_bulk", [], [], Body, []).
-
+ bulk_operation(Params, [{index, IndexTypeIdJsonTuple} || IndexTypeIdJsonTuple <- IndexTypeIdJsonTuples]).
+%%--------------------------------------------------------------------
+%% @doc
+%% Takes the index and type name and a query as "key:value" and sends
+%% it to the Elasticsearch server specified in Params.
+%% @end
+%%--------------------------------------------------------------------
+-spec search(binary() | list(), erlastic_json() | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
search(Index, Query) ->
- search(#erls_params{}, Index, "", Query, []).
+ search(#erls_params{}, Index, <<>>, Query, []).
+-spec search(binary() | list() | #erls_params{}, binary() | list(), erlastic_json() | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
search(Params, Index, Query) when is_record(Params, erls_params) ->
- search(Params, Index, "", Query, []);
+ search(Params, Index, <<>>, Query, []);
+search(Index, Type, Query) ->
+ search(#erls_params{}, Index, Type, Query, []).
+
+-spec search_limit(binary() | list(), binary(), erlastic_json() | binary(), integer()) -> {ok, erlastic_success_result()} | {error, any()}.
+search_limit(Index, Type, Query, Limit) when is_integer(Limit) ->
+ search(#erls_params{}, Index, Type, Query, [{<<"size">>, integer_to_list(Limit)}]).
%%--------------------------------------------------------------------
%% @doc
-%% Takes the index and type name and a query as "key:value" and sends
-%% it to the default Elastic Search server on localhost:9100
-%%
-%% @spec search(Index, Type, Query) -> {ok, Data} | {error, Error}
+%% Uses the count API to execute a query and get the number of matches
+%% for that query. See `search/*' for more details regarding to
+%% query types and the different input parameters.
%% @end
%%--------------------------------------------------------------------
-search(Index, Type, Query) ->
- search(#erls_params{}, Index, Type, Query, []).
+-spec count(binary() | list(), erlastic_json() | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+count(Index, Query) ->
+ count(#erls_params{}, Index, <<>>, Query, []).
+
+-spec count(binary() | list() | #erls_params{}, binary() | list(), erlastic_json() | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+count(Params, Index, Query) when is_record(Params, erls_params) ->
+ count(Params, Index, <<>>, Query, []);
+count(Index, Type, Query) ->
+ count(#erls_params{}, Index, Type, Query, []).
+
+-spec count(#erls_params{}, list() | binary(), list() | binary(), erlastic_json() | binary(), list()) -> {ok, erlastic_success_result()} | {error, any()}.
+count(Params, Index, Type, Query, Opts) ->
+ search_helper(<<"_count">>, Params, Index, Type, Query, Opts).
-search_limit(Index, Type, Query, Limit) when is_integer(Limit) ->
- search(#erls_params{}, Index, Type, Query, [{"size", lists:flatten(io_lib:format("~B",[Limit]))}]).
%%--------------------------------------------------------------------
%% @doc
-%% Takes the index and type name and a query as "key:value" and sends
-%% it to the Elastic Search server specified in Params.
-%%
-%% @spec search(Params, Index, Type, Query) -> {ok, Data} | {error, Error}
+%% search_scroll/4 -- Takes the index, type name and search query
+%% sends it to the Elasticsearch server specified in Params.
+%% Returns search results along with scroll id which can be passed
+%% to search_scroll/1 to get next set of search results
%% @end
%%--------------------------------------------------------------------
-search(Params, Index=[H|_T], Type=[H2|_T2], Query, Opts) when not is_list(H), is_list(H2) ->
- search(Params, [Index], Type, Query, Opts);
-search(Params, Index=[H|_T], Type=[H2|_T2], Query, Opts) when is_list(H), not is_list(H2) ->
- search(Params, Index, [Type], Query, Opts);
-search(Params, Index=[H|_T], Type=[H2|_T2], Query, Opts) when not is_list(H), not is_list(H2) ->
- search(Params, [Index], [Type], Query, Opts);
+-spec search_scroll(binary() | list(), binary(), erlastic_json() | binary(), list()) -> {ok, erlastic_success_result()} | {error, any()}.
+search_scroll(Index, Type, Query, Timeout) ->
+ search(#erls_params{}, Index, Type, Query, [{<<"scroll">>, list_to_binary(Timeout)}]).
+
+-spec search_scroll(erlastic_json() | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+search_scroll(Query) ->
+ Params = #erls_params{},
+ erls_resource:post(Params, filename:join([<<"_search">>, <<"scroll">>]), [], [], erls_json:encode(Query), Params#erls_params.http_client_options).
+
+-spec search(#erls_params{}, list() | binary(), list() | binary(), erlastic_json() | binary(), list()) -> {ok, erlastic_success_result()} | {error, any()}.
search(Params, Index, Type, Query, Opts) ->
- erls_resource:get(Params, filename:join([erls_utils:comma_separate(Index), Type, "_search"]), [], [{"q", Query}]++Opts, []).
+ search_helper(<<"_search">>, Params, Index, Type, Query, Opts).
+
+-spec multi_search(#erls_params{}, list({HeaderInformation :: headers(), SearchRequest :: erlastic_json() | binary()})) -> {ok, ResultJson :: erlastic_success_result()} | {error, Reason :: any()}.
+multi_search(Params, HeaderJsonTuples) ->
+ Body = lists:map(fun({HeaderInformation, SearchRequest}) ->
+ [ erls_json:encode(HeaderInformation), <<"\n">>, maybe_encode_doc(SearchRequest), <<"\n">> ]
+ end, HeaderJsonTuples),
+ erls_resource:get(Params, <<"/_msearch">>, [], [], iolist_to_binary(Body), Params#erls_params.http_client_options).
%%--------------------------------------------------------------------
%% @doc
-%% Takes the index and type name and a doc id and sends
-%% it to the default Elastic Search server on localhost:9100
-%%
-%% @spec index(Index, Type, Id, Doc) -> {ok, Data} | {error, Error}
+%% Takes the index and type name and a doc id and sends
+%% it to the default Elasticsearch server on localhost:9100
%% @end
%%--------------------------------------------------------------------
+-spec get_doc(binary(), binary(), binary()) -> {ok, erlastic_success_result()} | {error, any()}.
get_doc(Index, Type, Id) ->
get_doc(#erls_params{}, Index, Type, Id).
%%--------------------------------------------------------------------
%% @doc
-%% Takes the index and type name and a doc id and sends
-%% it to the Elastic Search server specified in Params.
-%%
-%% @spec index(Params, Index, Type, Id, Doc) -> {ok, Data} | {error, Error}
+%% Takes the index and type name and a doc id and sends
+%% it to the Elasticsearch server specified in Params.
%% @end
%%--------------------------------------------------------------------
+-spec get_doc(#erls_params{}, binary(), binary(), binary()) -> {ok, erlastic_success_result()} | {error, any()}.
get_doc(Params, Index, Type, Id) ->
- erls_resource:get(Params, filename:join([Index, Type, Id]), [], [], []).
+ get_doc_opts(Params, Index, Type, Id, []).
+
+-spec get_doc_opts(#erls_params{}, binary(), binary(), binary(), list()) -> {ok, erlastic_success_result()}
+ | {error, any()}.
+get_doc_opts(Params, Index, Type, Id, Opts) ->
+ erls_resource:get(Params, filename:join([Index, Type, Id]), [], Opts, Params#erls_params.http_client_options).
+
+-spec get_multi_doc(binary(), binary(), list()) -> {ok, erlastic_success_result()} | {error, any()}.
+get_multi_doc(Index, Type, Data) ->
+ Params = #erls_params{},
+ erls_resource:post(Params, filename:join([Index, Type, <<"_mget">>]), [], [], erls_json:encode(Data),
+ Params#erls_params.http_client_options).
flush_index(Index) ->
flush_index(#erls_params{}, Index).
-flush_index(Params, Index=[H|_T]) when not is_list(H) ->
- flush_index(Params, [Index]);
flush_index(Params, Index) ->
- erls_resource:post(Params, filename:join([erls_utils:comma_separate(Index), "_flush"]), [], [], [], []).
+ erls_resource:post(Params, filename:join([commas(Index), <<"_flush">>]), [], [], [], Params#erls_params.http_client_options).
flush_all() ->
refresh_all(#erls_params{}).
flush_all(Params) ->
- erls_resource:post(Params, "_flush", [], [], [], []).
+ erls_resource:post(Params, <<"_flush">>, [], [], [], Params#erls_params.http_client_options).
refresh_index(Index) ->
refresh_index(#erls_params{}, Index).
-refresh_index(Params, Index=[H|_T]) when not is_list(H) ->
- refresh_index(Params, [Index]);
refresh_index(Params, Index) ->
- erls_resource:post(Params, filename:join([erls_utils:comma_separate(Index), "_refresh"]), [], [], [], []).
+ erls_resource:post(Params, filename:join([commas(Index), <<"_refresh">>]), [], [], [], Params#erls_params.http_client_options).
refresh_all() ->
refresh_all(#erls_params{}).
refresh_all(Params) ->
- erls_resource:post(Params, "_refresh", [], [], [], []).
+ erls_resource:post(Params, <<"_refresh">>, [], [], [], Params#erls_params.http_client_options).
delete_doc(Index, Type, Id) ->
delete_doc(#erls_params{}, Index, Type, Id).
delete_doc(Params, Index, Type, Id) ->
- erls_resource:delete(Params, filename:join([Index, Type, Id]), [], [], []).
+ erls_resource:delete(Params, filename:join([Index, Type, Id]), [], [], Params#erls_params.http_client_options).
delete_doc_by_query(Index, Type, Query) ->
delete_doc_by_query(#erls_params{}, Index, Type, Query).
delete_doc_by_query(Params, Index, Type, Query) ->
- erls_resource:delete(Params, filename:join([Index, Type]), [], [{"q", Query}], []).
+ erls_resource:delete(Params, filename:join([Index, Type]), [], [{<<"q">>, Query}], Params#erls_params.http_client_options).
+
+delete_doc_by_query_doc(Index, Type, Doc) ->
+ delete_doc_by_query_doc(#erls_params{}, Index, Type, Doc).
+
+delete_doc_by_query_doc(Params, Index, any, Doc) ->
+ erls_resource:post(Params, filename:join([Index, <<"_delete_by_query">>]), [], [], erls_json:encode(Doc), Params#erls_params.http_client_options);
+
+delete_doc_by_query_doc(Params, Index, Type, Doc) ->
+ erls_resource:post(Params, filename:join([Index, Type, <<"_delete_by_query">>]), [], [], erls_json:encode(Doc), Params#erls_params.http_client_options).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Delete existing index
+%% @end
+%%--------------------------------------------------------------------
+delete_index(Index) ->
+ delete_index(#erls_params{}, Index).
+
+delete_index(Params, Index) ->
+ erls_resource:delete(Params, Index, [], [], [],
+ Params#erls_params.http_client_options).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Delete existing index template
+%% See docs at: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html#delete
+%% @end
+%%--------------------------------------------------------------------
+-spec delete_index_template(binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+delete_index_template(Index) ->
+ delete_index_template(#erls_params{}, Index).
+
+-spec delete_index_template(#erls_params{}, binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+delete_index_template(Params, Index) ->
+ erls_resource:delete(Params, <<"_template/", Index/binary>>, [], [], [],
+ Params#erls_params.http_client_options).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Tests if a given index exists
+%% See https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-exists.html
+%% @end
+%%--------------------------------------------------------------------
+-spec index_exists(binary()) -> {ok, boolean()} | {error, any()}.
+index_exists(Index) ->
+ index_exists(#erls_params{}, Index).
+
+-spec index_exists(#erls_params{}, binary()) -> {ok, boolean()} | {error, any()}.
+index_exists(Params, Index) ->
+ exists(erls_resource:head(Params, Index, [], [], Params#erls_params.http_client_options)).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Tests if a given index template exists
+%% See https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html
+%% @end
+%%--------------------------------------------------------------------
+-spec index_template_exists(binary()) -> {ok, boolean()} | {error, any()}.
+index_template_exists(IndexTemplate) ->
+ index_template_exists(#erls_params{}, IndexTemplate).
+
+-spec index_template_exists(#erls_params{}, binary()) -> {ok, boolean()} | {error, any()}.
+index_template_exists(Params, IndexTemplate) ->
+ exists(erls_resource:head(Params, filename:join([<<"_template">>, IndexTemplate]), [], [], Params#erls_params.http_client_options)).
+
+%% @private
+exists(ok) -> {ok, true};
+exists({error, 404}) -> {ok, false};
+exists({error, _Else} = Error) -> Error.
optimize_index(Index) ->
optimize_index(#erls_params{}, Index).
-optimize_index(Params, Index=[H|_T]) when not is_list(H)->
- optimize_index(Params, [Index]);
optimize_index(Params, Index) ->
- erls_resource:post(Params, filename:join([erls_utils:comma_separate(Index), "_optimize"]), [], [], [], []).
+ erls_resource:post(Params, filename:join([commas(Index), <<"_optimize">>]), [], [], [], Params#erls_params.http_client_options).
+
+percolator_add(Index, Name, Query) ->
+ percolator_add(#erls_params{}, Index, Name, Query).
+
+percolator_add(Params, Index, Name, Query) ->
+ erls_resource:put(Params, filename:join([<<"_percolator">>, commas(Index), Name]), [], [], erls_json:encode(Query), Params#erls_params.http_client_options).
+
+percolator_del(Index, Name) ->
+ percolator_del(#erls_params{}, Index, Name).
+
+percolator_del(Params, Index, Name) ->
+ erls_resource:delete(Params, filename:join([<<"_percolator">>, commas(Index), Name]), [], [], [], Params#erls_params.http_client_options).
+
+percolate(Index, Type, Doc) ->
+ percolate(#erls_params{}, Index, Type, Doc).
+
+percolate(Params, Index, Type, Doc) ->
+ erls_resource:get(Params, filename:join([commas(Index), Type, <<"_percolate">>]), [], [], erls_json:encode(Doc), Params#erls_params.http_client_options).
+
+reindex(Body) ->
+ reindex(#erls_params{}, Body).
+
+reindex(Params, Body) ->
+ erls_resource:post(Params, filename:join([<<"_reindex">>]), [], [], erls_json:encode(Body), Params#erls_params.http_client_options).
+
+aliases(Body) ->
+ aliases(#erls_params{}, Body).
+
+aliases(Params, Body) ->
+ erls_resource:post(Params, filename:join([<<"_aliases">>]), [], [], erls_json:encode(Body), Params#erls_params.http_client_options).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Takes a list of operations of the type {Operation, Index, Type, Id, Json}
+%% Performs these operations using _bulk endpoint.
+%% The possible operations are index, create, delete and update.
+%% (see the doc at https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html)
+%% @end
+%%--------------------------------------------------------------------
+-type index() :: binary().
+-type type() :: binary().
+-type id() :: binary() | undefined.
+-type metadata_tuple() :: {index(), type(), id()} |
+ {index(), type(), id(), headers()} |
+ {index(), type(), id(), erlastic_json()} |
+ {index(), type(), id(), erlastic_json(), headers()}.
+-type operation() :: {index | create | delete | update, metadata_tuple()}.
+
+-spec bulk_operation([operation()]) -> {ok, list()} | {error, any()}.
+bulk_operation(OperationIndexTypeIdJsonTuples) ->
+ bulk_operation(#erls_params{}, OperationIndexTypeIdJsonTuples).
+
+-spec bulk_operation(#erls_params{}, [operation()]) -> {ok, list()} | {error, any()}.
+bulk_operation(Params, OperationIndexTypeIdJsonTuples) ->
+ Body = lists:map(fun
+ Build({delete, {Index, Type, Id}}) ->
+ Build({delete, {Index, Type, Id, [], no_body}});
+ Build({delete, {Index, Type, Id, HeaderInformation}}) ->
+ Build({delete, {Index, Type, Id, HeaderInformation, no_body}});
+ Build({Operation, {Index, Type, Id, Doc}}) ->
+ Build({Operation, {Index, Type, Id, [], Doc}});
+ Build({Operation, {Index, Type, Id, HeaderInformation, Doc}}) ->
+ Header = build_header(Operation, Index, Type, Id, HeaderInformation),
+ Header ++ build_body(Operation, Doc)
+ end, OperationIndexTypeIdJsonTuples),
+
+ erls_resource:post(Params, <<"/_bulk">>, [], [], iolist_to_binary(Body), Params#erls_params.http_client_options).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Insert a setting into an Elasticsearch index
+%% @end
+%%--------------------------------------------------------------------
+-spec put_setting(binary(), erlastic_json() | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+put_setting(Index, Doc) ->
+ put_setting(#erls_params{}, Index, Doc).
+
+-spec put_setting(#erls_params{}, binary(), erlastic_json() | binary()) -> {ok, erlastic_success_result()} | {error, any()}.
+put_setting(Params, Index, Doc) ->
+ erls_resource:put(Params, filename:join([Index, "_settings"]), [], [], maybe_encode_doc(Doc), Params#erls_params.http_client_options).
+
+%%% Internal functions
+
+-spec search_helper(binary(), #erls_params{}, list() | binary(), list() | binary(), erlastic_json() | binary(), list()) -> {ok, erlastic_success_result()} | {error, any()}.
+search_helper(Endpoint, Params, Index, Type, Query, Opts) when is_binary(Query) ->
+ erls_resource:get(Params, filename:join([commas(Index), Type, Endpoint]), [], [{<<"q">>, Query}]++Opts, Params#erls_params.http_client_options);
+search_helper(Endpoint, Params, Index, Type, Query, Opts) ->
+ erls_resource:post(Params, filename:join([commas(Index), Type, Endpoint]), [], Opts, erls_json:encode(Query), Params#erls_params.http_client_options).
+
+-spec commas(list(binary()) | binary()) -> binary().
+commas(Bin) when is_binary(Bin) ->
+ Bin;
+commas([]) ->
+ <<>>;
+commas([H | T]) ->
+ << H/binary, << <<",", B/binary>> || B <- T >>/binary >>.
+
+build_header(Operation, Index, Type, Id, HeaderInformation) ->
+ Header1 = [
+ {<<"_index">>, Index},
+ {<<"_type">>, Type}
+ | HeaderInformation
+ ],
+
+ Header2 = case Id =:= undefined of
+ true -> Header1;
+ false -> [{<<"_id">>, Id} | Header1]
+ end,
+
+ [erls_json:encode([{erlang:atom_to_binary(Operation, utf8), Header2}])].
+
+build_body(delete, no_body) ->
+ [<<"\n">>];
+build_body(update, Doc) ->
+ DocBin = maybe_encode_doc(Doc),
+ Json = <<"{\"doc\":", DocBin/binary, "}">>,
+ [<<"\n">>, Json, <<"\n">>];
+build_body(_Operation, Doc) ->
+ Json = maybe_encode_doc(Doc),
+ [<<"\n">>, Json, <<"\n">>].
+
+-spec maybe_encode_doc(binary() | erlastic_json()) -> binary().
+maybe_encode_doc(Bin) when is_binary(Bin) -> Bin;
+maybe_encode_doc(Doc) when is_list(Doc); is_tuple(Doc); is_map(Doc) -> erls_json:encode(Doc).
diff --git a/src/erlastic_search_app.erl b/src/erlastic_search_app.erl
new file mode 100644
index 0000000..e308408
--- /dev/null
+++ b/src/erlastic_search_app.erl
@@ -0,0 +1,24 @@
+%%%-------------------------------------------------------------------
+%%% @author Tristan Sloughter
+%%% @copyright (C) 2012, Tristan Sloughter
+%%% @doc
+%%%
+%%% @end
+%%% Created : 31 Aug 2012 by Tristan Sloughter
+%%%-------------------------------------------------------------------
+-module(erlastic_search_app).
+
+-export([start_deps/0]).
+
+start_deps() ->
+ start_deps(erlastic_search, permanent).
+
+start_deps(App, Type) ->
+ case application:start(App, Type) of
+ ok ->
+ ok;
+ {error, {not_started, Dep}} ->
+ start_deps(Dep, Type),
+ start_deps(App, Type)
+ end.
+
diff --git a/src/erls_config.erl b/src/erls_config.erl
new file mode 100644
index 0000000..055188e
--- /dev/null
+++ b/src/erls_config.erl
@@ -0,0 +1,39 @@
+%%%-------------------------------------------------------------------
+%%% @author Brujo Benavides <>
+%%% @copyright (C) 2010, 2012, Tristan Sloughter
+%%% @doc
+%%%
+%%% @end
+%%% Created : 29 Mar 2016 by Brujo Benavides <>
+%%%-------------------------------------------------------------------
+-module(erls_config).
+-export([get_host/0, get_port/0]).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Retrieves the default host.
+%% If nothing is defined in the app env for the key 'host', it's
+%% 127.0.0.1
+%% @end
+%%--------------------------------------------------------------------
+get_host() ->
+ case application:get_env(erlastic_search, host) of
+ undefined ->
+ <<"127.0.0.1">>;
+ {ok, Host}->
+ Host
+ end.
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Retrieves the default port.
+%% If nothing is defined in the app env for the key 'port', it's 9200.
+%% @end
+%%--------------------------------------------------------------------
+get_port() ->
+ case application:get_env(erlastic_search, port) of
+ undefined ->
+ 9200;
+ {ok, Port}->
+ Port
+ end.
diff --git a/src/erls_json.erl b/src/erls_json.erl
new file mode 100644
index 0000000..fbb3ff1
--- /dev/null
+++ b/src/erls_json.erl
@@ -0,0 +1,30 @@
+-module(erls_json).
+
+-export([encode/1
+ ,decode/1]).
+
+-include("erlastic_search.hrl").
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Encodes the user-supplied `Json' with the user's defined JSON
+%% module (defaults to `jsx`)
+%% In particular, this function cannot be used to encode any JSON
+%% built internally to `erlastic_search` as we do not know how
+%% the user's JSON module encodes JSONs in Erlang
+%% @end
+%%--------------------------------------------------------------------
+-spec encode(erlastic_json()) -> binary().
+encode(Json) ->
+ ?ERLASTIC_SEARCH_JSON_MODULE:encode(Json).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Decodes the given `BinaryJson' with the user's defined JSON
+%% module (defaults to `jsx`)
+%% The same caveat as for `encode/1' above applies
+%% @end
+%%--------------------------------------------------------------------
+-spec decode(binary()) -> erlastic_json().
+decode(BinaryJson) ->
+ ?ERLASTIC_SEARCH_JSON_MODULE:decode(BinaryJson).
diff --git a/src/erls_mochijson2.erl b/src/erls_mochijson2.erl
deleted file mode 100644
index cfc60d0..0000000
--- a/src/erls_mochijson2.erl
+++ /dev/null
@@ -1,782 +0,0 @@
-%% @author Bob Ippolito
-%% @copyright 2007 Mochi Media, Inc.
-
-%% @doc Yet another JSON (RFC 4627) library for Erlang. mochijson2 works
-%% with binaries as strings, arrays as lists (without an {array, _})
-%% wrapper and it only knows how to decode UTF-8 (and ASCII).
-
--module(erls_mochijson2).
--author('bob@mochimedia.com').
--export([encoder/1, encode/1]).
--export([decoder/1, decode/1]).
-
-% This is a macro to placate syntax highlighters..
--define(Q, $\").
--define(ADV_COL(S, N), S#decoder{offset=N+S#decoder.offset,
- column=N+S#decoder.column}).
--define(INC_COL(S), S#decoder{offset=1+S#decoder.offset,
- column=1+S#decoder.column}).
--define(INC_LINE(S), S#decoder{offset=1+S#decoder.offset,
- column=1,
- line=1+S#decoder.line}).
--define(INC_CHAR(S, C),
- case C of
- $\n ->
- S#decoder{column=1,
- line=1+S#decoder.line,
- offset=1+S#decoder.offset};
- _ ->
- S#decoder{column=1+S#decoder.column,
- offset=1+S#decoder.offset}
- end).
--define(IS_WHITESPACE(C),
- (C =:= $\s orelse C =:= $\t orelse C =:= $\r orelse C =:= $\n)).
-
-%% @type iolist() = [char() | binary() | iolist()]
-%% @type iodata() = iolist() | binary()
-%% @type json_string() = atom | binary()
-%% @type json_number() = integer() | float()
-%% @type json_array() = [json_term()]
-%% @type json_object() = {struct, [{json_string(), json_term()}]}
-%% @type json_iolist() = {json, iolist()}
-%% @type json_term() = json_string() | json_number() | json_array() |
-%% json_object() | json_iolist()
-
--record(encoder, {handler=null,
- utf8=false}).
-
--record(decoder, {object_hook=null,
- offset=0,
- line=1,
- column=1,
- state=null}).
-
-%% @spec encoder([encoder_option()]) -> function()
-%% @doc Create an encoder/1 with the given options.
-%% @type encoder_option() = handler_option() | utf8_option()
-%% @type utf8_option() = boolean(). Emit unicode as utf8 (default - false)
-encoder(Options) ->
- State = parse_encoder_options(Options, #encoder{}),
- fun (O) -> json_encode(O, State) end.
-
-%% @spec encode(json_term()) -> iolist()
-%% @doc Encode the given as JSON to an iolist.
-encode(Any) ->
- json_encode(Any, #encoder{}).
-
-%% @spec decoder([decoder_option()]) -> function()
-%% @doc Create a decoder/1 with the given options.
-decoder(Options) ->
- State = parse_decoder_options(Options, #decoder{}),
- fun (O) -> json_decode(O, State) end.
-
-%% @spec decode(iolist()) -> json_term()
-%% @doc Decode the given iolist to Erlang terms.
-decode(S) ->
- json_decode(S, #decoder{}).
-
-%% Internal API
-
-parse_encoder_options([], State) ->
- State;
-parse_encoder_options([{handler, Handler} | Rest], State) ->
- parse_encoder_options(Rest, State#encoder{handler=Handler});
-parse_encoder_options([{utf8, Switch} | Rest], State) ->
- parse_encoder_options(Rest, State#encoder{utf8=Switch}).
-
-parse_decoder_options([], State) ->
- State;
-parse_decoder_options([{object_hook, Hook} | Rest], State) ->
- parse_decoder_options(Rest, State#decoder{object_hook=Hook}).
-
-json_encode(true, _State) ->
- <<"true">>;
-json_encode(false, _State) ->
- <<"false">>;
-json_encode(null, _State) ->
- <<"null">>;
-json_encode(I, _State) when is_integer(I) andalso I >= -2147483648 andalso I =< 2147483647 ->
- %% Anything outside of 32-bit integers should be encoded as a float
- integer_to_list(I);
-json_encode(I, _State) when is_integer(I) ->
- mochinum:digits(float(I));
-json_encode(F, _State) when is_float(F) ->
- mochinum:digits(F);
-json_encode(S, State) when is_binary(S); is_atom(S) ->
- json_encode_string(S, State);
-json_encode(Array, State) when is_list(Array) ->
- json_encode_array(Array, State);
-json_encode({struct, Props}, State) when is_list(Props) ->
- json_encode_proplist(Props, State);
-json_encode({json, IoList}, _State) ->
- IoList;
-json_encode(Bad, #encoder{handler=null}) ->
- exit({json_encode, {bad_term, Bad}});
-json_encode(Bad, State=#encoder{handler=Handler}) ->
- json_encode(Handler(Bad), State).
-
-json_encode_array([], _State) ->
- <<"[]">>;
-json_encode_array(L, State) ->
- F = fun (O, Acc) ->
- [$,, json_encode(O, State) | Acc]
- end,
- [$, | Acc1] = lists:foldl(F, "[", L),
- lists:reverse([$\] | Acc1]).
-
-json_encode_proplist([], _State) ->
- <<"{}">>;
-json_encode_proplist(Props, State) ->
- F = fun ({K, V}, Acc) ->
- KS = json_encode_string(K, State),
- VS = json_encode(V, State),
- [$,, VS, $:, KS | Acc]
- end,
- [$, | Acc1] = lists:foldl(F, "{", Props),
- lists:reverse([$\} | Acc1]).
-
-json_encode_string(A, State) when is_atom(A) ->
- L = atom_to_list(A),
- case json_string_is_safe(L) of
- true ->
- [?Q, L, ?Q];
- false ->
- json_encode_string_unicode(xmerl_ucs:from_utf8(L), State, [?Q])
- end;
-json_encode_string(B, State) when is_binary(B) ->
- case json_bin_is_safe(B) of
- true ->
- [?Q, B, ?Q];
- false ->
- json_encode_string_unicode(xmerl_ucs:from_utf8(B), State, [?Q])
- end;
-json_encode_string(I, _State) when is_integer(I) ->
- [?Q, integer_to_list(I), ?Q];
-json_encode_string(L, State) when is_list(L) ->
- case json_string_is_safe(L) of
- true ->
- [?Q, L, ?Q];
- false ->
- json_encode_string_unicode(L, State, [?Q])
- end.
-
-json_string_is_safe([]) ->
- true;
-json_string_is_safe([C | Rest]) ->
- case C of
- ?Q ->
- false;
- $\\ ->
- false;
- $\b ->
- false;
- $\f ->
- false;
- $\n ->
- false;
- $\r ->
- false;
- $\t ->
- false;
- C when C >= 0, C < $\s; C >= 16#7f, C =< 16#10FFFF ->
- false;
- C when C < 16#7f ->
- json_string_is_safe(Rest);
- _ ->
- false
- end.
-
-json_bin_is_safe(<<>>) ->
- true;
-json_bin_is_safe(<>) ->
- case C of
- ?Q ->
- false;
- $\\ ->
- false;
- $\b ->
- false;
- $\f ->
- false;
- $\n ->
- false;
- $\r ->
- false;
- $\t ->
- false;
- C when C >= 0, C < $\s; C >= 16#7f ->
- false;
- C when C < 16#7f ->
- json_bin_is_safe(Rest)
- end.
-
-json_encode_string_unicode([], _State, Acc) ->
- lists:reverse([$\" | Acc]);
-json_encode_string_unicode([C | Cs], State, Acc) ->
- Acc1 = case C of
- ?Q ->
- [?Q, $\\ | Acc];
- %% Escaping solidus is only useful when trying to protect
- %% against "" injection attacks which are only
- %% possible when JSON is inserted into a HTML document
- %% in-line. mochijson2 does not protect you from this, so
- %% if you do insert directly into HTML then you need to
- %% uncomment the following case or escape the output of encode.
- %%
- %% $/ ->
- %% [$/, $\\ | Acc];
- %%
- $\\ ->
- [$\\, $\\ | Acc];
- $\b ->
- [$b, $\\ | Acc];
- $\f ->
- [$f, $\\ | Acc];
- $\n ->
- [$n, $\\ | Acc];
- $\r ->
- [$r, $\\ | Acc];
- $\t ->
- [$t, $\\ | Acc];
- C when C >= 0, C < $\s ->
- [unihex(C) | Acc];
- C when C >= 16#7f, C =< 16#10FFFF, State#encoder.utf8 ->
- [xmerl_ucs:to_utf8(C) | Acc];
- C when C >= 16#7f, C =< 16#10FFFF, not State#encoder.utf8 ->
- [unihex(C) | Acc];
- C when C < 16#7f ->
- [C | Acc];
- _ ->
- exit({json_encode, {bad_char, C}})
- end,
- json_encode_string_unicode(Cs, State, Acc1).
-
-hexdigit(C) when C >= 0, C =< 9 ->
- C + $0;
-hexdigit(C) when C =< 15 ->
- C + $a - 10.
-
-unihex(C) when C < 16#10000 ->
- <> = <>,
- Digits = [hexdigit(D) || D <- [D3, D2, D1, D0]],
- [$\\, $u | Digits];
-unihex(C) when C =< 16#10FFFF ->
- N = C - 16#10000,
- S1 = 16#d800 bor ((N bsr 10) band 16#3ff),
- S2 = 16#dc00 bor (N band 16#3ff),
- [unihex(S1), unihex(S2)].
-
-json_decode(L, S) when is_list(L) ->
- json_decode(iolist_to_binary(L), S);
-json_decode(B, S) ->
- {Res, S1} = decode1(B, S),
- {eof, _} = tokenize(B, S1#decoder{state=trim}),
- Res.
-
-decode1(B, S=#decoder{state=null}) ->
- case tokenize(B, S#decoder{state=any}) of
- {{const, C}, S1} ->
- {C, S1};
- {start_array, S1} ->
- decode_array(B, S1);
- {start_object, S1} ->
- decode_object(B, S1)
- end.
-
-make_object(V, #decoder{object_hook=null}) ->
- V;
-make_object(V, #decoder{object_hook=Hook}) ->
- Hook(V).
-
-decode_object(B, S) ->
- decode_object(B, S#decoder{state=key}, []).
-
-decode_object(B, S=#decoder{state=key}, Acc) ->
- case tokenize(B, S) of
- {end_object, S1} ->
- V = make_object({struct, lists:reverse(Acc)}, S1),
- {V, S1#decoder{state=null}};
- {{const, K}, S1} ->
- {colon, S2} = tokenize(B, S1),
- {V, S3} = decode1(B, S2#decoder{state=null}),
- decode_object(B, S3#decoder{state=comma}, [{K, V} | Acc])
- end;
-decode_object(B, S=#decoder{state=comma}, Acc) ->
- case tokenize(B, S) of
- {end_object, S1} ->
- V = make_object({struct, lists:reverse(Acc)}, S1),
- {V, S1#decoder{state=null}};
- {comma, S1} ->
- decode_object(B, S1#decoder{state=key}, Acc)
- end.
-
-decode_array(B, S) ->
- decode_array(B, S#decoder{state=any}, []).
-
-decode_array(B, S=#decoder{state=any}, Acc) ->
- case tokenize(B, S) of
- {end_array, S1} ->
- {lists:reverse(Acc), S1#decoder{state=null}};
- {start_array, S1} ->
- {Array, S2} = decode_array(B, S1),
- decode_array(B, S2#decoder{state=comma}, [Array | Acc]);
- {start_object, S1} ->
- {Array, S2} = decode_object(B, S1),
- decode_array(B, S2#decoder{state=comma}, [Array | Acc]);
- {{const, Const}, S1} ->
- decode_array(B, S1#decoder{state=comma}, [Const | Acc])
- end;
-decode_array(B, S=#decoder{state=comma}, Acc) ->
- case tokenize(B, S) of
- {end_array, S1} ->
- {lists:reverse(Acc), S1#decoder{state=null}};
- {comma, S1} ->
- decode_array(B, S1#decoder{state=any}, Acc)
- end.
-
-tokenize_string(B, S=#decoder{offset=O}) ->
- case tokenize_string_fast(B, O) of
- {escape, O1} ->
- Length = O1 - O,
- S1 = ?ADV_COL(S, Length),
- <<_:O/binary, Head:Length/binary, _/binary>> = B,
- tokenize_string(B, S1, lists:reverse(binary_to_list(Head)));
- O1 ->
- Length = O1 - O,
- <<_:O/binary, String:Length/binary, ?Q, _/binary>> = B,
- {{const, String}, ?ADV_COL(S, Length + 1)}
- end.
-
-tokenize_string_fast(B, O) ->
- case B of
- <<_:O/binary, ?Q, _/binary>> ->
- O;
- <<_:O/binary, $\\, _/binary>> ->
- {escape, O};
- <<_:O/binary, C1, _/binary>> when C1 < 128 ->
- tokenize_string_fast(B, 1 + O);
- <<_:O/binary, C1, C2, _/binary>> when C1 >= 194, C1 =< 223,
- C2 >= 128, C2 =< 191 ->
- tokenize_string_fast(B, 2 + O);
- <<_:O/binary, C1, C2, C3, _/binary>> when C1 >= 224, C1 =< 239,
- C2 >= 128, C2 =< 191,
- C3 >= 128, C3 =< 191 ->
- tokenize_string_fast(B, 3 + O);
- <<_:O/binary, C1, C2, C3, C4, _/binary>> when C1 >= 240, C1 =< 244,
- C2 >= 128, C2 =< 191,
- C3 >= 128, C3 =< 191,
- C4 >= 128, C4 =< 191 ->
- tokenize_string_fast(B, 4 + O);
- _ ->
- throw(invalid_utf8)
- end.
-
-tokenize_string(B, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary, ?Q, _/binary>> ->
- {{const, iolist_to_binary(lists:reverse(Acc))}, ?INC_COL(S)};
- <<_:O/binary, "\\\"", _/binary>> ->
- tokenize_string(B, ?ADV_COL(S, 2), [$\" | Acc]);
- <<_:O/binary, "\\\\", _/binary>> ->
- tokenize_string(B, ?ADV_COL(S, 2), [$\\ | Acc]);
- <<_:O/binary, "\\/", _/binary>> ->
- tokenize_string(B, ?ADV_COL(S, 2), [$/ | Acc]);
- <<_:O/binary, "\\b", _/binary>> ->
- tokenize_string(B, ?ADV_COL(S, 2), [$\b | Acc]);
- <<_:O/binary, "\\f", _/binary>> ->
- tokenize_string(B, ?ADV_COL(S, 2), [$\f | Acc]);
- <<_:O/binary, "\\n", _/binary>> ->
- tokenize_string(B, ?ADV_COL(S, 2), [$\n | Acc]);
- <<_:O/binary, "\\r", _/binary>> ->
- tokenize_string(B, ?ADV_COL(S, 2), [$\r | Acc]);
- <<_:O/binary, "\\t", _/binary>> ->
- tokenize_string(B, ?ADV_COL(S, 2), [$\t | Acc]);
- <<_:O/binary, "\\u", C3, C2, C1, C0, Rest/binary>> ->
- C = erlang:list_to_integer([C3, C2, C1, C0], 16),
- if C > 16#D7FF, C < 16#DC00 ->
- %% coalesce UTF-16 surrogate pair
- <<"\\u", D3, D2, D1, D0, _/binary>> = Rest,
- D = erlang:list_to_integer([D3,D2,D1,D0], 16),
- [CodePoint] = xmerl_ucs:from_utf16be(<>),
- Acc1 = lists:reverse(xmerl_ucs:to_utf8(CodePoint), Acc),
- tokenize_string(B, ?ADV_COL(S, 12), Acc1);
- true ->
- Acc1 = lists:reverse(xmerl_ucs:to_utf8(C), Acc),
- tokenize_string(B, ?ADV_COL(S, 6), Acc1)
- end;
- <<_:O/binary, C, _/binary>> ->
- tokenize_string(B, ?INC_CHAR(S, C), [C | Acc])
- end.
-
-tokenize_number(B, S) ->
- case tokenize_number(B, sign, S, []) of
- {{int, Int}, S1} ->
- {{const, list_to_integer(Int)}, S1};
- {{float, Float}, S1} ->
- {{const, list_to_float(Float)}, S1}
- end.
-
-tokenize_number(B, sign, S=#decoder{offset=O}, []) ->
- case B of
- <<_:O/binary, $-, _/binary>> ->
- tokenize_number(B, int, ?INC_COL(S), [$-]);
- _ ->
- tokenize_number(B, int, S, [])
- end;
-tokenize_number(B, int, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary, $0, _/binary>> ->
- tokenize_number(B, frac, ?INC_COL(S), [$0 | Acc]);
- <<_:O/binary, C, _/binary>> when C >= $1 andalso C =< $9 ->
- tokenize_number(B, int1, ?INC_COL(S), [C | Acc])
- end;
-tokenize_number(B, int1, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
- tokenize_number(B, int1, ?INC_COL(S), [C | Acc]);
- _ ->
- tokenize_number(B, frac, S, Acc)
- end;
-tokenize_number(B, frac, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary, $., C, _/binary>> when C >= $0, C =< $9 ->
- tokenize_number(B, frac1, ?ADV_COL(S, 2), [C, $. | Acc]);
- <<_:O/binary, E, _/binary>> when E =:= $e orelse E =:= $E ->
- tokenize_number(B, esign, ?INC_COL(S), [$e, $0, $. | Acc]);
- _ ->
- {{int, lists:reverse(Acc)}, S}
- end;
-tokenize_number(B, frac1, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
- tokenize_number(B, frac1, ?INC_COL(S), [C | Acc]);
- <<_:O/binary, E, _/binary>> when E =:= $e orelse E =:= $E ->
- tokenize_number(B, esign, ?INC_COL(S), [$e | Acc]);
- _ ->
- {{float, lists:reverse(Acc)}, S}
- end;
-tokenize_number(B, esign, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary, C, _/binary>> when C =:= $- orelse C=:= $+ ->
- tokenize_number(B, eint, ?INC_COL(S), [C | Acc]);
- _ ->
- tokenize_number(B, eint, S, Acc)
- end;
-tokenize_number(B, eint, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
- tokenize_number(B, eint1, ?INC_COL(S), [C | Acc])
- end;
-tokenize_number(B, eint1, S=#decoder{offset=O}, Acc) ->
- case B of
- <<_:O/binary, C, _/binary>> when C >= $0 andalso C =< $9 ->
- tokenize_number(B, eint1, ?INC_COL(S), [C | Acc]);
- _ ->
- {{float, lists:reverse(Acc)}, S}
- end.
-
-tokenize(B, S=#decoder{offset=O}) ->
- case B of
- <<_:O/binary, C, _/binary>> when ?IS_WHITESPACE(C) ->
- tokenize(B, ?INC_CHAR(S, C));
- <<_:O/binary, "{", _/binary>> ->
- {start_object, ?INC_COL(S)};
- <<_:O/binary, "}", _/binary>> ->
- {end_object, ?INC_COL(S)};
- <<_:O/binary, "[", _/binary>> ->
- {start_array, ?INC_COL(S)};
- <<_:O/binary, "]", _/binary>> ->
- {end_array, ?INC_COL(S)};
- <<_:O/binary, ",", _/binary>> ->
- {comma, ?INC_COL(S)};
- <<_:O/binary, ":", _/binary>> ->
- {colon, ?INC_COL(S)};
- <<_:O/binary, "null", _/binary>> ->
- {{const, null}, ?ADV_COL(S, 4)};
- <<_:O/binary, "true", _/binary>> ->
- {{const, true}, ?ADV_COL(S, 4)};
- <<_:O/binary, "false", _/binary>> ->
- {{const, false}, ?ADV_COL(S, 5)};
- <<_:O/binary, "\"", _/binary>> ->
- tokenize_string(B, ?INC_COL(S));
- <<_:O/binary, C, _/binary>> when (C >= $0 andalso C =< $9)
- orelse C =:= $- ->
- tokenize_number(B, S);
- <<_:O/binary>> ->
- trim = S#decoder.state,
- {eof, S}
- end.
-%%
-%% Tests
-%%
--include_lib("eunit/include/eunit.hrl").
--ifdef(TEST).
-
-
-%% testing constructs borrowed from the Yaws JSON implementation.
-
-%% Create an object from a list of Key/Value pairs.
-
-obj_new() ->
- {struct, []}.
-
-is_obj({struct, Props}) ->
- F = fun ({K, _}) when is_binary(K) -> true end,
- lists:all(F, Props).
-
-obj_from_list(Props) ->
- Obj = {struct, Props},
- ?assert(is_obj(Obj)),
- Obj.
-
-%% Test for equivalence of Erlang terms.
-%% Due to arbitrary order of construction, equivalent objects might
-%% compare unequal as erlang terms, so we need to carefully recurse
-%% through aggregates (tuples and objects).
-
-equiv({struct, Props1}, {struct, Props2}) ->
- equiv_object(Props1, Props2);
-equiv(L1, L2) when is_list(L1), is_list(L2) ->
- equiv_list(L1, L2);
-equiv(N1, N2) when is_number(N1), is_number(N2) -> N1 == N2;
-equiv(B1, B2) when is_binary(B1), is_binary(B2) -> B1 == B2;
-equiv(A, A) when A =:= true orelse A =:= false orelse A =:= null -> true.
-
-%% Object representation and traversal order is unknown.
-%% Use the sledgehammer and sort property lists.
-
-equiv_object(Props1, Props2) ->
- L1 = lists:keysort(1, Props1),
- L2 = lists:keysort(1, Props2),
- Pairs = lists:zip(L1, L2),
- true = lists:all(fun({{K1, V1}, {K2, V2}}) ->
- equiv(K1, K2) and equiv(V1, V2)
- end, Pairs).
-
-%% Recursively compare tuple elements for equivalence.
-
-equiv_list([], []) ->
- true;
-equiv_list([V1 | L1], [V2 | L2]) ->
- equiv(V1, V2) andalso equiv_list(L1, L2).
-
-decode_test() ->
- [1199344435545.0, 1] = decode(<<"[1199344435545.0,1]">>),
- <<16#F0,16#9D,16#9C,16#95>> = decode([34,"\\ud835","\\udf15",34]).
-
-e2j_vec_test() ->
- test_one(e2j_test_vec(utf8), 1).
-
-test_one([], _N) ->
- %% io:format("~p tests passed~n", [N-1]),
- ok;
-test_one([{E, J} | Rest], N) ->
- %% io:format("[~p] ~p ~p~n", [N, E, J]),
- true = equiv(E, decode(J)),
- true = equiv(E, decode(encode(E))),
- test_one(Rest, 1+N).
-
-e2j_test_vec(utf8) ->
- [
- {1, "1"},
- {3.1416, "3.14160"}, %% text representation may truncate, trail zeroes
- {-1, "-1"},
- {-3.1416, "-3.14160"},
- {12.0e10, "1.20000e+11"},
- {1.234E+10, "1.23400e+10"},
- {-1.234E-10, "-1.23400e-10"},
- {10.0, "1.0e+01"},
- {123.456, "1.23456E+2"},
- {10.0, "1e1"},
- {<<"foo">>, "\"foo\""},
- {<<"foo", 5, "bar">>, "\"foo\\u0005bar\""},
- {<<"">>, "\"\""},
- {<<"\n\n\n">>, "\"\\n\\n\\n\""},
- {<<"\" \b\f\r\n\t\"">>, "\"\\\" \\b\\f\\r\\n\\t\\\"\""},
- {obj_new(), "{}"},
- {obj_from_list([{<<"foo">>, <<"bar">>}]), "{\"foo\":\"bar\"}"},
- {obj_from_list([{<<"foo">>, <<"bar">>}, {<<"baz">>, 123}]),
- "{\"foo\":\"bar\",\"baz\":123}"},
- {[], "[]"},
- {[[]], "[[]]"},
- {[1, <<"foo">>], "[1,\"foo\"]"},
-
- %% json array in a json object
- {obj_from_list([{<<"foo">>, [123]}]),
- "{\"foo\":[123]}"},
-
- %% json object in a json object
- {obj_from_list([{<<"foo">>, obj_from_list([{<<"bar">>, true}])}]),
- "{\"foo\":{\"bar\":true}}"},
-
- %% fold evaluation order
- {obj_from_list([{<<"foo">>, []},
- {<<"bar">>, obj_from_list([{<<"baz">>, true}])},
- {<<"alice">>, <<"bob">>}]),
- "{\"foo\":[],\"bar\":{\"baz\":true},\"alice\":\"bob\"}"},
-
- %% json object in a json array
- {[-123, <<"foo">>, obj_from_list([{<<"bar">>, []}]), null],
- "[-123,\"foo\",{\"bar\":[]},null]"}
- ].
-
-%% test utf8 encoding
-encoder_utf8_test() ->
- %% safe conversion case (default)
- [34,"\\u0001","\\u0442","\\u0435","\\u0441","\\u0442",34] =
- encode(<<1,"\321\202\320\265\321\201\321\202">>),
-
- %% raw utf8 output (optional)
- Enc = mochijson2:encoder([{utf8, true}]),
- [34,"\\u0001",[209,130],[208,181],[209,129],[209,130],34] =
- Enc(<<1,"\321\202\320\265\321\201\321\202">>).
-
-input_validation_test() ->
- Good = [
- {16#00A3, <>}, %% pound
- {16#20AC, <>}, %% euro
- {16#10196, <>} %% denarius
- ],
- lists:foreach(fun({CodePoint, UTF8}) ->
- Expect = list_to_binary(xmerl_ucs:to_utf8(CodePoint)),
- Expect = decode(UTF8)
- end, Good),
-
- Bad = [
- %% 2nd, 3rd, or 4th byte of a multi-byte sequence w/o leading byte
- <>,
- %% missing continuations, last byte in each should be 80-BF
- <>,
- <>,
- <>,
- %% we don't support code points > 10FFFF per RFC 3629
- <>
- ],
- lists:foreach(
- fun(X) ->
- ok = try decode(X) catch invalid_utf8 -> ok end,
- %% could be {ucs,{bad_utf8_character_code}} or
- %% {json_encode,{bad_char,_}}
- {'EXIT', _} = (catch encode(X))
- end, Bad).
-
-inline_json_test() ->
- ?assertEqual(<<"\"iodata iodata\"">>,
- iolist_to_binary(
- encode({json, [<<"\"iodata">>, " iodata\""]}))),
- ?assertEqual({struct, [{<<"key">>, <<"iodata iodata">>}]},
- decode(
- encode({struct,
- [{key, {json, [<<"\"iodata">>, " iodata\""]}}]}))),
- ok.
-
-big_unicode_test() ->
- UTF8Seq = list_to_binary(xmerl_ucs:to_utf8(16#0001d120)),
- ?assertEqual(
- <<"\"\\ud834\\udd20\"">>,
- iolist_to_binary(encode(UTF8Seq))),
- ?assertEqual(
- UTF8Seq,
- decode(iolist_to_binary(encode(UTF8Seq)))),
- ok.
-
-custom_decoder_test() ->
- ?assertEqual(
- {struct, [{<<"key">>, <<"value">>}]},
- (decoder([]))("{\"key\": \"value\"}")),
- F = fun ({struct, [{<<"key">>, <<"value">>}]}) -> win end,
- ?assertEqual(
- win,
- (decoder([{object_hook, F}]))("{\"key\": \"value\"}")),
- ok.
-
-atom_test() ->
- %% JSON native atoms
- [begin
- ?assertEqual(A, decode(atom_to_list(A))),
- ?assertEqual(iolist_to_binary(atom_to_list(A)),
- iolist_to_binary(encode(A)))
- end || A <- [true, false, null]],
- %% Atom to string
- ?assertEqual(
- <<"\"foo\"">>,
- iolist_to_binary(encode(foo))),
- ?assertEqual(
- <<"\"\\ud834\\udd20\"">>,
- iolist_to_binary(encode(list_to_atom(xmerl_ucs:to_utf8(16#0001d120))))),
- ok.
-
-key_encode_test() ->
- %% Some forms are accepted as keys that would not be strings in other
- %% cases
- ?assertEqual(
- <<"{\"foo\":1}">>,
- iolist_to_binary(encode({struct, [{foo, 1}]}))),
- ?assertEqual(
- <<"{\"foo\":1}">>,
- iolist_to_binary(encode({struct, [{<<"foo">>, 1}]}))),
- ?assertEqual(
- <<"{\"foo\":1}">>,
- iolist_to_binary(encode({struct, [{"foo", 1}]}))),
- ?assertEqual(
- <<"{\"\\ud834\\udd20\":1}">>,
- iolist_to_binary(
- encode({struct, [{[16#0001d120], 1}]}))),
- ?assertEqual(
- <<"{\"1\":1}">>,
- iolist_to_binary(encode({struct, [{1, 1}]}))),
- ok.
-
-unsafe_chars_test() ->
- Chars = "\"\\\b\f\n\r\t",
- [begin
- ?assertEqual(false, json_string_is_safe([C])),
- ?assertEqual(false, json_bin_is_safe(<>)),
- ?assertEqual(<>, decode(encode(<>)))
- end || C <- Chars],
- ?assertEqual(
- false,
- json_string_is_safe([16#0001d120])),
- ?assertEqual(
- false,
- json_bin_is_safe(list_to_binary(xmerl_ucs:to_utf8(16#0001d120)))),
- ?assertEqual(
- [16#0001d120],
- xmerl_ucs:from_utf8(
- binary_to_list(
- decode(encode(list_to_atom(xmerl_ucs:to_utf8(16#0001d120))))))),
- ?assertEqual(
- false,
- json_string_is_safe([16#110000])),
- ?assertEqual(
- false,
- json_bin_is_safe(list_to_binary(xmerl_ucs:to_utf8([16#110000])))),
- %% solidus can be escaped but isn't unsafe by default
- ?assertEqual(
- <<"/">>,
- decode(<<"\"\\/\"">>)),
- ok.
-
-int_test() ->
- ?assertEqual(0, decode("0")),
- ?assertEqual(1, decode("1")),
- ?assertEqual(11, decode("11")),
- ok.
-
-float_fallback_test() ->
- ?assertEqual(<<"-2147483649.0">>, iolist_to_binary(encode(-2147483649))),
- ?assertEqual(<<"2147483648.0">>, iolist_to_binary(encode(2147483648))),
- ok.
-
-handler_test() ->
- ?assertEqual(
- {'EXIT',{json_encode,{bad_term,{}}}},
- catch encode({})),
- F = fun ({}) -> [] end,
- ?assertEqual(
- <<"[]">>,
- iolist_to_binary((encoder([{handler, F}]))({}))),
- ok.
-
--endif.
diff --git a/src/erls_query_constructor.erl b/src/erls_query_constructor.erl
deleted file mode 100644
index 28cada7..0000000
--- a/src/erls_query_constructor.erl
+++ /dev/null
@@ -1,66 +0,0 @@
-%%%-------------------------------------------------------------------
-%%% @author Tristan Sloughter <>
-%%% @copyright (C) 2010, Tristan Sloughter
-%%% @doc
-%%%
-%%% @end
-%%% Created : 14 Feb 2010 by Tristan Sloughter <>
-%%%-------------------------------------------------------------------
--module(erls_query_constructor).
--compile([export_all]).
-
--include("erlastic_search.hrl").
-
-term_query() ->
- ok.
-
-range_query() ->
- ok.
-
-prefix_query() ->
- ok.
-
-wildcard_query() ->
- ok.
-
-match_all_query() ->
- ok.
-
-query_string_query() ->
- ok.
-
-field_query() ->
- ok.
-
-boolean_query() ->
- ok.
-
-disjunction_max_query() ->
- ok.
-
-constant_score_query() ->
- ok.
-
-filtered_query() ->
- ok.
-
-more_like_this_query() ->
- ok.
-
-more_like_this_field_query() ->
- ok.
-
-term_filter() ->
- ok.
-
-range_filter() ->
- ok.
-
-prefix_filter() ->
- ok.
-
-query_filter() ->
- ok.
-
-boolean_filter() ->
- ok.
diff --git a/src/erls_resource.erl b/src/erls_resource.erl
index e33997f..74cb3ab 100644
--- a/src/erls_resource.erl
+++ b/src/erls_resource.erl
@@ -10,186 +10,104 @@
%%%-------------------------------------------------------------------
-module(erls_resource).
--export([get/5, get/6, head/5, delete/5, post/6, put/6]).
+-export([get/5
+ ,get/6
+ ,head/5
+ ,delete/5
+ ,delete/6
+ ,post/6
+ ,put/6]).
-include("erlastic_search.hrl").
--record(response, {
- method,
- status,
- reason,
- headers,
- body
- }).
-
get(State, Path, Headers, Params, Opts) ->
- request(State, "GET", Path, Headers, Params, [], Opts).
+ request(State, get, Path, Headers, Params, [], Opts).
get(State, Path, Headers, Params, Body, Opts) ->
- request(State, "GET", Path, Headers, Params, Body, Opts).
+ request(State, get, Path, Headers, Params, Body, Opts).
head(State, Path, Headers, Params, Opts) ->
- request(State, "HEAD", Path, Headers, Params, [], Opts).
-
+ request(State, head, Path, Headers, Params, [], Opts).
+
delete(State, Path, Headers, Params, Opts) ->
- request(State, "DELETE", Path, Headers, Params, [], Opts).
+ request(State, delete, Path, Headers, Params, [], Opts).
+
+delete(State, Path, Headers, Params, Body, Opts) ->
+ request(State, delete, Path, Headers, Params, Body, Opts).
post(State, Path, Headers, Params, Body, Opts) ->
- request(State, "POST", Path, Headers, Params, Body, Opts).
+ request(State, post, Path, Headers, Params, Body, Opts).
put(State, Path, Headers, Params, Body, Opts) ->
- request(State, "PUT", Path, Headers, Params, Body, Opts).
-
-
-request(State, Method, Path, Headers, Params, Body, Options) ->
- Path1 = lists:append([Path,
- case Params of
- [] -> [];
- Props -> "?" ++ encode_query(Props)
- end]),
- %Headers1 = make_auth(State,
- % default_header("Content-Type", "application/json", Headers)),
- Headers1 = Headers,
- case has_body(Method) of
- true ->
- case make_body(Body, Headers1, Options) of
- {Headers2, Options1, InitialBody, BodyFun} ->
- do_request(State, Method, Path1, Headers2, {BodyFun, InitialBody}, Options1);
- Error ->
- Error
- end;
- false ->
- do_request(State, Method, Path1, Headers1, {nil, <<>>}, Options)
- end.
-
+ request(State, put, Path, Headers, Params, Body, Opts).
-do_request(#erls_params{host=Host, port=Port, ssl=Ssl, timeout=Timeout},
- Method, Path, Headers, {BodyFun, InitialBody}, Options) ->
- case lhttpc:request(Host, Port, Ssl, Path, Method, Headers, InitialBody, Timeout, Options) of
- {ok, {{StatusCode, ReasonPhrase}, ResponseHeaders, ResponseBody}} ->
- State = #response{method = Method,
- status = StatusCode,
- reason = ReasonPhrase,
- headers = ResponseHeaders,
- body = ResponseBody},
-
- make_response(State);
- {ok, UploadState} -> %% we stream
- case stream_body(BodyFun, UploadState) of
- {ok, {{StatusCode, ReasonPhrase}, ResponseHeaders, ResponseBody}} ->
- State = #response{method = Method,
- status = StatusCode,
- reason = ReasonPhrase,
- headers = ResponseHeaders,
- body = ResponseBody},
-
- make_response(State);
- Error -> Error
- end;
- Error -> Error
- end.
+request(State, Method, Path, Headers, Params, Body, Options) ->
+ Path1 = < <<>>;
+ Props -> <<"?", (encode_query(Props))/binary>>
+ end)/binary>>,
+ {Headers2, Options1, Body} = make_body(Body, Headers, Options),
+ Headers3 = default_header(<<"Content-Type">>, <<"application/json">>, Headers2),
+ do_request(State, Method, Path1, Headers3, Body, Options1).
-make_response(#response{method=Method, status=Status, reason=Reason, body=Body}) ->
- if
- Status >= 400, Status == 404 ->
- {error, not_found};
- Status >= 400, Status == 409 ->
- {error, conflict};
- Status >= 400, Status == 412 ->
- {error, precondition_failed};
- Status >= 400 ->
- {error, {unknown_error, Status}};
- true ->
- if
- Method == "HEAD" ->
- {ok, {Status, Reason}};
- true ->
- case is_pid(Body) of
- true ->
- {ok, Body};
- false ->
- try mochijson2:decode(binary_to_list(Body)) of
- Resp1 ->
- case Resp1 of
- {[{<<"ok">>, true}]} -> ok;
- {[{<<"ok">>, true}|Res]} -> {ok, {Res}};
- Obj -> {ok, Obj}
- end
- catch
- _:_ -> {ok, Body}
- end
- end
+do_request(#erls_params{host=Host, port=Port, timeout=Timeout, ctimeout=CTimeout},
+ Method, Path, Headers, Body, Options) ->
+ % Ugly, but to keep backwards compatibility: add recv_timeout and
+ % connect_timeout when *not* present in Options.
+ NewOptions = lists:foldl(
+ fun({BCOpt, Value}, Acc) ->
+ case proplists:get_value(BCOpt, Acc) of
+ undefined -> [{BCOpt, Value}|Acc];
+ _ -> Acc
end
+ end,
+ Options,
+ [{recv_timeout, Timeout}, {connect_timeout, CTimeout}]
+ ),
+ case hackney:request(Method, <>, Headers, Body,
+ NewOptions) of
+ {ok, Status, _Headers, Client} when Status =:= 200
+ ; Status =:= 201 ->
+ case hackney:body(Client) of
+ {ok, RespBody} ->
+ {ok, erls_json:decode(RespBody)};
+ {error, _Reason} = Error ->
+ Error
+ end;
+ {ok, Status, _Headers, Client} ->
+ case hackney:body(Client) of
+ {ok, RespBody} -> {error, {Status, erls_json:decode(RespBody)}};
+ {error, _Reason} -> {error, Status}
+ end;
+ {ok, 200, _Headers} ->
+ %% we hit this case for HEAD requests, or more generally when
+ %% there's no response body
+ ok;
+ {ok, Not200, _Headers} ->
+ {error, Not200};
+ {ok, ClientRef} ->
+ %% that's when the options passed to hackney included `async'
+ %% this reference can then be used to match the messages from
+ %% hackney when ES replies; see the hackney doc for more information
+ {ok, {async, ClientRef}};
+ {error, R} ->
+ {error, R}
end.
encode_query(Props) ->
- P = fun({A,B}, AccIn) -> io_lib:format("~s=~s&", [A,B]) ++ AccIn end,
- lists:flatten(lists:foldr(P, [], Props)).
+ P = fun({A,B}, AccIn) -> io_lib:format("~s=~s&", [A,B]) ++ AccIn end,
+ iolist_to_binary((lists:foldr(P, [], Props))).
default_header(K, V, H) ->
case proplists:is_defined(K, H) of
true -> H;
false -> [{K, V}|H]
end.
-
-has_body("HEAD") ->
- false;
-has_body("GET") ->
- false;
-has_body("DELETE") ->
- false;
-has_body(_) ->
- true.
-
+
default_content_length(B, H) ->
- default_header("Content-Length", integer_to_list(erlang:iolist_size(B)), H).
+ default_header(<<"Content-Length">>, list_to_binary(integer_to_list(erlang:iolist_size(B))), H).
-body_length(H) ->
- case proplists:get_value("Content-Length", H) of
- undefined -> false;
- _ -> true
- end.
-
-make_body(Body, Headers, Options) when is_list(Body) ->
- {default_content_length(Body, Headers), Options, Body, nil};
-make_body(Body, Headers, Options) when is_binary(Body) ->
- {default_content_length(Body, Headers), Options, Body, nil};
-make_body(Fun, Headers, Options) when is_function(Fun) ->
- case body_length(Headers) of
- true ->
- {ok, InitialState} = Fun(),
- Options1 = [{partial_upload, infinity}|Options],
- {Headers, Options1, InitialState, Fun};
- false ->
- {error, "Content-Length undefined"}
- end;
-make_body({Fun, State}, Headers, Options) when is_function(Fun) ->
- case body_length(Headers) of
- true ->
- Options1 = [{partial_upload, infinity}|Options],
- {ok, InitialState, NextState} = Fun(State),
-
- {Headers, Options1, InitialState, {Fun, NextState}};
- false ->
- {error, "Content-Length undefined"}
- end;
-make_body(_, _, _) ->
- {error, "body invalid"}.
-
-stream_body({Source, State}, CurrentState) ->
- do_stream_body(Source, Source(State), CurrentState);
-stream_body(Source, CurrentState) ->
- do_stream_body(Source, Source(), CurrentState).
-
-do_stream_body(Source, Resp, CurrentState) ->
- case Resp of
- {ok, Data} ->
- {ok, NextState} = lhttpc:send_body_part(CurrentState, Data),
- stream_body(Source, NextState);
- {ok, Data, NewSourceState} ->
- {ok, NextState} = lhttpc:send_body_part(CurrentState, Data),
- stream_body({Source, NewSourceState}, NextState);
- eof ->
- lhttpc:send_body_part(CurrentState, http_eob)
- end.
-
+make_body(Body, Headers, Options) ->
+ {default_content_length(Body, Headers), Options, Body}.
diff --git a/src/erls_utils.erl b/src/erls_utils.erl
deleted file mode 100644
index ee95a3c..0000000
--- a/src/erls_utils.erl
+++ /dev/null
@@ -1,17 +0,0 @@
-%%%-------------------------------------------------------------------
-%%% @author Tristan Sloughter <>
-%%% @copyright (C) 2010, Tristan Sloughter
-%%% @doc
-%%%
-%%% @end
-%%% Created : 14 Feb 2010 by Tristan Sloughter <>
-%%%-------------------------------------------------------------------
--module(erls_utils).
--compile([export_all]).
-
-comma_separate([H | []]) ->
- H;
-comma_separate(List) ->
- lists:foldl(fun(String, Acc) ->
- io_lib:format("~s,~s", [String, Acc])
- end, "", List).
diff --git a/start-dev.sh b/start-dev.sh
deleted file mode 100755
index ffd5ef4..0000000
--- a/start-dev.sh
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-cd `dirname $0`
-exec erl -name erlastic@127.0.0.1 -pa $PWD/ebin $PWD/deps/*/ebin -boot start_sasl -config sys.config -s erlastic_search
-
diff --git a/sys.config b/sys.config
deleted file mode 100644
index 52c7c51..0000000
--- a/sys.config
+++ /dev/null
@@ -1,34 +0,0 @@
-%%% -*- mode:erlang -*-
-%%% Warning - this config file *must* end with
-
-[
- {sasl,
- [
- {sasl_error_logger, {file, "/tmp/erlastic_search.sasl_log"}}
- ]
- },
-
- %% (G)eneric (A)pplication (S)services config below here. This default config provides
- %% the release with log rotaion and trunctation.
- {gas,
- [
- {mod_specs, [{elwrap, {ewl_elwrap_h, start_link}}]},
-
- {wiring, [
- {elwrap, [
- {wire, sasl, sasl_error_logger, "V:{sasl_error_logger, V}"},
- {err_log, "/tmp/erlastic_search.err_log"},
- {err_log_wrap_info, {{err,5000000,10},{sasl,5000000,10}}},
- {transform_previous, "V:[V]"}
- ]}
- ]
- },
-
- {err_log_tty, true} % Log to the screen
-
- ]
- },
-
-
- {erlastic_search, []}
-].
diff --git a/test/basic_SUITE.erl b/test/basic_SUITE.erl
new file mode 100644
index 0000000..8c81e40
--- /dev/null
+++ b/test/basic_SUITE.erl
@@ -0,0 +1,136 @@
+-module(basic_SUITE).
+
+-export([all/0
+ ,groups/0
+ ,init_per_group/2
+ ,end_per_group/2]).
+-export([index_id/1
+ ,index_encoded_id/1
+ ,index_no_id/1
+ ,bulk_index_id/1
+ ,search/1
+ ,index_templates/1]).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("eunit/include/eunit.hrl").
+-include("erlastic_search.hrl").
+
+all() ->
+ [{group, index_access}].
+
+groups() ->
+ [{index_access, [], [index_id
+ ,index_encoded_id
+ ,index_no_id
+ ,bulk_index_id
+ ,search
+ ,index_templates]}].
+
+init_per_group(index_access, Config) ->
+ erlastic_search_app:start_deps(),
+
+ IndexName = create_random_name(<<"es_index_name_">>),
+ {ok, _} = erlastic_search:create_index(IndexName),
+
+ [{index_name, IndexName} | Config].
+
+
+end_per_group(index_access, _Config) ->
+ ok.
+
+index_id(Config) ->
+ IndexName = ?config(index_name, Config),
+ Id = create_random_name(<<"es_id_">>),
+ {ok, _} = erlastic_search:index_doc_with_id(IndexName, <<"type_1">>, Id, [{<<"hello">>, <<"there">>}]).
+
+index_encoded_id(Config) ->
+ IndexName = ?config(index_name, Config),
+ Id = create_random_name(<<"es_id_">>),
+ {ok, _} = erlastic_search:index_doc_with_id(IndexName, <<"type_1">>, Id, erls_json:encode([{<<"hello">>, <<"there">>}])).
+
+index_no_id(Config) ->
+ IndexName = ?config(index_name, Config),
+ {ok, _} = erlastic_search:index_doc(IndexName, <<"type_1">>, [{<<"hello">>, <<"there">>}]).
+
+bulk_index_id(Config) ->
+ IndexName = ?config(index_name, Config),
+ Id = create_random_name(<<"es_id_">>),
+ Doc = {<<"how">>, <<"you_doing">>}, %% in Joey Tribbiani voice
+ Items = [{IndexName, <<"type_1">>, Id, [Doc]}],
+ {ok, _} = erlastic_search:bulk_index_docs(#erls_params{}, Items),
+ {ok, _} = erlastic_search:flush_index(IndexName),
+ {ok, Resp} = erlastic_search:search(IndexName, <<"how:you_doing">>),
+ {<<"hits">>, Hits} = lists:keyfind(<<"hits">>, 1, Resp),
+ {<<"hits">>, Hits1} = lists:keyfind(<<"hits">>, 1, Hits),
+ F = fun(Item) ->
+ {<<"_id">>, AId} = lists:keyfind(<<"_id">>, 1, Item),
+ AId == Id
+ end,
+ [_] = lists:filter(F, Hits1).
+
+search(Config) ->
+ IndexName = ?config(index_name, Config),
+ {ok, _} = erlastic_search:search(IndexName, <<"hello:there">>).
+
+%% @doc Creates an index template, and tests that it exists with the correct settings and mapping
+index_templates(_Config) ->
+ %% First we create the index template
+ IndexTemplateName = create_random_name(<<"test_template_">>),
+ {ok, _} = erlastic_search:create_index_template(IndexTemplateName, template_mapping_json()),
+
+ %% When searching for an index template, we should only need the name, not the full path
+ %% The get_templates/1 fun should handle creating the correct path
+ {ok, [{IndexTemplateName, ActualTemplateSettingsAndMapping1}]} = erlastic_search:get_index_templates(IndexTemplateName),
+
+ %% Also make sure that the get_templates/0 fun returns the same thing as get_templates/1 with the current state
+ {ok, [{IndexTemplateName, ActualTemplateSettingsAndMapping1}]} = erlastic_search:get_index_templates(),
+
+ %% The order and aliases are generated automatically, both of which will be default, we will not compare
+ ActualTemplateSettingsAndMapping2 = proplists:delete(<<"order">>, ActualTemplateSettingsAndMapping1),
+ ActualTemplateSettingsAndMapping3 = proplists:delete(<<"aliases">>, ActualTemplateSettingsAndMapping2),
+
+ ExpectedTemplateMappingAndSettings = lists:sort(erls_json:decode(erls_json:encode(template_mapping_json()))),
+ ActualTemplateSettingsAndMapping = lists:sort(ActualTemplateSettingsAndMapping3),
+
+ ExpectedTemplateMappingAndSettings = ActualTemplateSettingsAndMapping,
+
+ %% Remove this index template
+ erlastic_search:delete_index_template(IndexTemplateName),
+ %% And we confirm that it is removed
+ {ok,[{}]} = erlastic_search:get_index_templates().
+
+%%%===================================================================
+%%% Helper Functions
+%%%===================================================================
+
+create_random_name(Name) ->
+ random:seed(os:timestamp()),
+ <>.
+
+%% @doc Uses the example template settings from
+%% https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html#indices-templates
+template_mapping_json() ->
+ #{
+ <<"mappings">> => #{
+ <<"test_type">> => #{
+ <<"_source">> => #{
+ <<"enabled">> => false
+ },
+ <<"properties">> => #{
+ <<"host_name">> => #{
+ <<"type">> => <<"string">>
+ },
+ <<"created_at">> => #{
+ <<"type">> => <<"date">>,
+ <<"format">> => <<"EEE MMM dd HH:mm:ss Z YYYY">>
+ }
+ }
+ }
+ },
+ <<"settings">> => #{
+ <<"index">> => #{
+ <<"number_of_shards">> => <<"1">>
+ }
+ },
+ <<"template">> => <<"test_template-*">>
+ }.