upgrade elasticsearch to 5.0, upgrade ember
authorymh <ymh.work@gmail.com>
Wed, 09 Nov 2016 15:05:41 +0100
changeset 406 cf0f23803a53
parent 405 f239c8c5bb94
child 407 2dba812c7ef2
upgrade elasticsearch to 5.0, upgrade ember
cms/app-client/.gitignore
cms/app-client/bower.json
cms/app-client/package.json
cms/app-client/tests/index.html
common/corpus-common-addon/.gitignore
common/corpus-common-addon/.jshintrc
common/corpus-common-addon/.travis.yml
common/corpus-common-addon/README.md
common/corpus-common-addon/bower.json
common/corpus-common-addon/config/ember-try.js
common/corpus-common-addon/package.json
common/corpus-common-addon/tests/.jshintrc
common/corpus-common-addon/tests/dummy/app/index.html
common/corpus-common-addon/tests/dummy/app/router.js
common/corpus-common-addon/tests/dummy/config/environment.js
common/corpus-common-addon/tests/helpers/module-for-acceptance.js
common/corpus-common-addon/tests/index.html
dev/Vagrantfile
dev/provisioning/modules/elasticsearch/CHANGELOG.md
dev/provisioning/modules/elasticsearch/CONTRIBUTING.md
dev/provisioning/modules/elasticsearch/CONTRIBUTORS
dev/provisioning/modules/elasticsearch/Gemfile
dev/provisioning/modules/elasticsearch/LICENSE
dev/provisioning/modules/elasticsearch/Makefile
dev/provisioning/modules/elasticsearch/README.md
dev/provisioning/modules/elasticsearch/Rakefile
dev/provisioning/modules/elasticsearch/lib/facter/es_facts.rb
dev/provisioning/modules/elasticsearch/lib/puppet/feature/elasticsearch_shield_users_native.rb
dev/provisioning/modules/elasticsearch/lib/puppet/parser/functions/array_suffix.rb
dev/provisioning/modules/elasticsearch/lib/puppet/parser/functions/concat_merge.rb
dev/provisioning/modules/elasticsearch/lib/puppet/parser/functions/deep_implode.rb
dev/provisioning/modules/elasticsearch/lib/puppet/parser/functions/es_plugin_name.rb
dev/provisioning/modules/elasticsearch/lib/puppet/parser/functions/plugin_dir.rb
dev/provisioning/modules/elasticsearch/lib/puppet/provider/elastic_plugin.rb
dev/provisioning/modules/elasticsearch/lib/puppet/provider/elastic_yaml.rb
dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_plugin/elasticsearch_plugin.rb
dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_plugin/plugin.rb
dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_shield_role/parsed.rb
dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_shield_role_mapping/parsed.rb
dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_shield_user/esusers.rb
dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_shield_user/parsed.rb
dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_shield_user_roles/parsed.rb
dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_template/ruby.rb
dev/provisioning/modules/elasticsearch/lib/puppet/provider/es_instance_conn_validator/tcp_port.rb
dev/provisioning/modules/elasticsearch/lib/puppet/type/elasticsearch_plugin.rb
dev/provisioning/modules/elasticsearch/lib/puppet/type/elasticsearch_shield_role.rb
dev/provisioning/modules/elasticsearch/lib/puppet/type/elasticsearch_shield_role_mapping.rb
dev/provisioning/modules/elasticsearch/lib/puppet/type/elasticsearch_shield_user.rb
dev/provisioning/modules/elasticsearch/lib/puppet/type/elasticsearch_shield_user_roles.rb
dev/provisioning/modules/elasticsearch/lib/puppet/type/elasticsearch_template.rb
dev/provisioning/modules/elasticsearch/lib/puppet/type/es_instance_conn_validator.rb
dev/provisioning/modules/elasticsearch/lib/puppet/util/es_instance_validator.rb
dev/provisioning/modules/elasticsearch/lib/puppet_x/elastic/deep_implode.rb
dev/provisioning/modules/elasticsearch/lib/puppet_x/elastic/deep_to_i.rb
dev/provisioning/modules/elasticsearch/lib/puppet_x/elastic/hash.rb
dev/provisioning/modules/elasticsearch/lib/puppet_x/elastic/plugin_name.rb
dev/provisioning/modules/elasticsearch/manifests/config.pp
dev/provisioning/modules/elasticsearch/manifests/init.pp
dev/provisioning/modules/elasticsearch/manifests/instance.pp
dev/provisioning/modules/elasticsearch/manifests/package.pp
dev/provisioning/modules/elasticsearch/manifests/package/pin.pp
dev/provisioning/modules/elasticsearch/manifests/params.pp
dev/provisioning/modules/elasticsearch/manifests/plugin.pp
dev/provisioning/modules/elasticsearch/manifests/python.pp
dev/provisioning/modules/elasticsearch/manifests/repo.pp
dev/provisioning/modules/elasticsearch/manifests/ruby.pp
dev/provisioning/modules/elasticsearch/manifests/script.pp
dev/provisioning/modules/elasticsearch/manifests/service.pp
dev/provisioning/modules/elasticsearch/manifests/service/init.pp
dev/provisioning/modules/elasticsearch/manifests/service/openbsd.pp
dev/provisioning/modules/elasticsearch/manifests/service/openrc.pp
dev/provisioning/modules/elasticsearch/manifests/service/systemd.pp
dev/provisioning/modules/elasticsearch/manifests/shield/role.pp
dev/provisioning/modules/elasticsearch/manifests/shield/user.pp
dev/provisioning/modules/elasticsearch/manifests/template.pp
dev/provisioning/modules/elasticsearch/metadata.json
dev/provisioning/modules/elasticsearch/spec/acceptance/.beaker-foss.cfg
dev/provisioning/modules/elasticsearch/spec/acceptance/001_basic_spec.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/002_class_spec.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/003_template_spec.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/004_plugin_spec.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/009_datapath_spec.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/010_pkg_url_spec.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/011_service_spec.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/012_instances_spec.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/013_config_spec.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/014_hiera_spec.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/015_staged_removal.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/016_package_pin_spec.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/017_restart_on_change_spec.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/018_shield_spec.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/020_usergroup_spec.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/021_es2x_spec.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/basic_spec.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/integration001.rb
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/centos-6-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/centos-7-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/debian-6-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/debian-7-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/debian-8-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/opensuse-121-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/opensuse-13-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/opensuse-131-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/oracle-6-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/oracle-7-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/sles-11-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/sles-11sp3-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/sles-12-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/ubuntu-server-1204-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/ubuntu-server-1210-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/ubuntu-server-1304-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/ubuntu-server-1310-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/ubuntu-server-1404-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/ubuntu-server-1604-x64.yml
dev/provisioning/modules/elasticsearch/spec/acceptance/xplugins001.rb
dev/provisioning/modules/elasticsearch/spec/classes/000_elasticsearch_init_spec.rb
dev/provisioning/modules/elasticsearch/spec/classes/001_hiera_spec.rb
dev/provisioning/modules/elasticsearch/spec/classes/005_elasticsearch_repo_spec.rb
dev/provisioning/modules/elasticsearch/spec/classes/010_elasticsearch_init_unkown_spec.rb
dev/provisioning/modules/elasticsearch/spec/classes/099_coverage_spec.rb
dev/provisioning/modules/elasticsearch/spec/defines/001_elasticsearch_python_spec.rb
dev/provisioning/modules/elasticsearch/spec/defines/002_elasticsearch_ruby_spec.rb
dev/provisioning/modules/elasticsearch/spec/defines/003_elasticsearch_template_spec.rb
dev/provisioning/modules/elasticsearch/spec/defines/004_elasticsearch_plugin_spec.rb
dev/provisioning/modules/elasticsearch/spec/defines/005_elasticsearch_instance_spec.rb
dev/provisioning/modules/elasticsearch/spec/defines/006_elasticsearch_script_spec.rb
dev/provisioning/modules/elasticsearch/spec/defines/007_elasticsearch_shield_user_spec.rb
dev/provisioning/modules/elasticsearch/spec/defines/008_elasticsearch_shield_role_spec.rb
dev/provisioning/modules/elasticsearch/spec/defines/010_elasticsearch_service_init_spec.rb
dev/provisioning/modules/elasticsearch/spec/defines/011_elasticsearch_service_system_spec.rb
dev/provisioning/modules/elasticsearch/spec/fixtures/artifacts/.gitkeep
dev/provisioning/modules/elasticsearch/spec/fixtures/facts/Warlock-nodes.json
dev/provisioning/modules/elasticsearch/spec/fixtures/facts/Warlock-root.json
dev/provisioning/modules/elasticsearch/spec/fixtures/facts/Zom-nodes.json
dev/provisioning/modules/elasticsearch/spec/fixtures/facts/Zom-root.json
dev/provisioning/modules/elasticsearch/spec/fixtures/facts/facts_url1.json
dev/provisioning/modules/elasticsearch/spec/fixtures/facts/facts_url2.json
dev/provisioning/modules/elasticsearch/spec/fixtures/hiera/hiera.yaml
dev/provisioning/modules/elasticsearch/spec/fixtures/hiera/hieradata/envs/defaultinstance.yaml
dev/provisioning/modules/elasticsearch/spec/fixtures/hiera/hieradata/multipleinstances.yaml
dev/provisioning/modules/elasticsearch/spec/fixtures/hiera/hieradata/singleinstance.yaml
dev/provisioning/modules/elasticsearch/spec/fixtures/hiera/hieradata/singleplugin.yaml
dev/provisioning/modules/elasticsearch/spec/functions/concat_merge_spec.rb
dev/provisioning/modules/elasticsearch/spec/functions/deep_implode_spec.rb
dev/provisioning/modules/elasticsearch/spec/functions/es_facts_spec.rb
dev/provisioning/modules/elasticsearch/spec/functions/es_plugin_name_spec.rb
dev/provisioning/modules/elasticsearch/spec/functions/plugin_dir_spec.rb
dev/provisioning/modules/elasticsearch/spec/integration/integration001.rb
dev/provisioning/modules/elasticsearch/spec/integration/xplugins001.rb
dev/provisioning/modules/elasticsearch/spec/spec_acceptance_common.rb
dev/provisioning/modules/elasticsearch/spec/spec_acceptance_integration.rb
dev/provisioning/modules/elasticsearch/spec/spec_helper.rb
dev/provisioning/modules/elasticsearch/spec/spec_helper_acceptance.rb
dev/provisioning/modules/elasticsearch/spec/spec_helper_faraday.rb
dev/provisioning/modules/elasticsearch/spec/spec_helper_tls.rb
dev/provisioning/modules/elasticsearch/spec/templates/001_elasticsearch.yml.erb_spec.rb
dev/provisioning/modules/elasticsearch/spec/unit/provider/elastic_yaml_spec.rb
dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_plugin/elasticsearch_plugin_spec.rb
dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_plugin/plugin_spec.rb
dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_plugin/shared_examples.rb
dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_shield_role/parsed_spec.rb
dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_shield_role_mapping/parsed_spec.rb
dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_shield_user/esusers_spec.rb
dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_shield_user/parsed_spec.rb
dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_shield_user_roles/parsed_spec.rb
dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_template/ruby.rb
dev/provisioning/modules/elasticsearch/spec/unit/provider/plugin_spec.rb
dev/provisioning/modules/elasticsearch/spec/unit/type/elasticsearch_plugin_spec.rb
dev/provisioning/modules/elasticsearch/spec/unit/type/elasticsearch_shield_role_mapping_spec.rb
dev/provisioning/modules/elasticsearch/spec/unit/type/elasticsearch_shield_role_spec.rb
dev/provisioning/modules/elasticsearch/spec/unit/type/elasticsearch_shield_user_roles_spec.rb
dev/provisioning/modules/elasticsearch/spec/unit/type/elasticsearch_shield_user_spec.rb
dev/provisioning/modules/elasticsearch/spec/unit/type/elasticsearch_template_spec.rb
dev/provisioning/modules/elasticsearch/spec/unit/type/plugin_spec.rb
dev/provisioning/modules/elasticsearch/templates/etc/elasticsearch/elasticsearch.yml.erb
dev/provisioning/modules/elasticsearch/templates/etc/elasticsearch/logging.yml.erb
dev/provisioning/modules/elasticsearch/templates/etc/init.d/elasticsearch.Debian.erb
dev/provisioning/modules/elasticsearch/templates/etc/init.d/elasticsearch.OpenBSD.erb
dev/provisioning/modules/elasticsearch/templates/etc/init.d/elasticsearch.RedHat.erb
dev/provisioning/modules/elasticsearch/templates/etc/init.d/elasticsearch.SLES.erb
dev/provisioning/modules/elasticsearch/templates/etc/init.d/elasticsearch.openrc.erb
dev/provisioning/modules/elasticsearch/templates/etc/init.d/elasticsearch.systemd.erb
dev/provisioning/modules/elasticsearch/templates/etc/sysconfig/defaults.erb
dev/provisioning/modules/elasticsearch/templates/usr/lib/tmpfiles.d/elasticsearch.conf.erb
dev/provisioning/modules/sysconfig/files/elasticsearch/elasticsearch-es_01
dev/provisioning/modules/sysconfig/files/elasticsearch/elasticsearch-es_01.service
dev/provisioning/modules/sysconfig/files/elasticsearch/jvm.options
dev/provisioning/modules/sysconfig/files/elasticsearch/log4j2.properties
dev/provisioning/modules/sysconfig/manifests/elasticsearch.pp
dev/provisioning/modules/sysconfig/templates/corpus/corpus_env.conf.erb
dev/provisioning/modules/sysconfig/templates/corpus/local.env.erb
server/bo_client/.gitignore
server/bo_client/app/app.js
server/bo_client/bower.json
server/bo_client/package.json
server/bo_client/tests/index.html
server/src/.env.example
server/src/app/Console/Commands/IndexDocuments.php
server/src/app/Console/Commands/ManageHandles.php
server/src/app/Http/Controllers/Api/DateStatsController.php
server/src/app/Http/Controllers/Api/DiscourseController.php
server/src/app/Http/Controllers/Api/GeoStatsController.php
server/src/app/Http/Controllers/Api/LanguageController.php
server/src/app/Http/Controllers/Api/ThemeController.php
server/src/composer.json
server/src/composer.lock
server/src/config/app.php
server/src/package.json
--- a/cms/app-client/.gitignore	Tue Nov 08 18:23:01 2016 +0100
+++ b/cms/app-client/.gitignore	Wed Nov 09 15:05:41 2016 +0100
@@ -13,5 +13,5 @@
 /connect.lock
 /coverage/*
 /libpeerconnection.log
-npm-debug.log
+npm-debug.log*
 testem.log
--- a/cms/app-client/bower.json	Tue Nov 08 18:23:01 2016 +0100
+++ b/cms/app-client/bower.json	Wed Nov 09 15:05:41 2016 +0100
@@ -1,9 +1,8 @@
 {
   "name": "app-client",
   "dependencies": {
-    "ember": "~2.8.0",
-    "ember-cli-shims": "0.1.1",
-    "ember-qunit-notifications": "0.1.0",
+    "ember": "~2.9.0",
+    "ember-cli-shims": "0.1.3",
     "jquery": "^2.2",
     "qunit": "~1.18.0",
     "bootstrap-sass": "bootstrap-sass-official#~3.3.6",
--- a/cms/app-client/package.json	Tue Nov 08 18:23:01 2016 +0100
+++ b/cms/app-client/package.json	Wed Nov 09 15:05:41 2016 +0100
@@ -20,35 +20,35 @@
   },
   "repository": "",
   "engines": {
-    "node": ">= 0.10.0"
+    "node": ">= 0.12.0"
   },
   "author": "",
   "license": "MIT",
   "devDependencies": {
-    "bower": "^1.7.9",
-    "broccoli-asset-rev": "^2.4.2",
+    "bower": "^1.8.0",
+    "broccoli-asset-rev": "^2.4.5",
     "broccoli-funnel": "^1.0.1",
     "broccoli-merge-trees": "^1.1.1",
     "broccoli-static-compiler": "^0.2.2",
     "corpus-common-addon": "file:../../common/corpus-common-addon",
     "ember-ajax": "^2.4.1",
-    "ember-cli": "2.8.0",
-    "ember-cli-app-version": "^1.0.0",
-    "ember-cli-babel": "^5.1.6",
+    "ember-cli": "2.9.1",
+    "ember-cli-app-version": "^2.0.0",
+    "ember-cli-babel": "^5.1.7",
     "ember-cli-d3": "1.1.6",
-    "ember-cli-dependency-checker": "^1.2.0",
-    "ember-cli-htmlbars": "^1.0.3",
-    "ember-cli-htmlbars-inline-precompile": "^0.3.1",
-    "ember-cli-inject-live-reload": "^1.4.0",
-    "ember-cli-jshint": "^1.0.0",
+    "ember-cli-dependency-checker": "^1.3.0",
+    "ember-cli-htmlbars": "^1.0.10",
+    "ember-cli-htmlbars-inline-precompile": "^0.3.3",
+    "ember-cli-inject-live-reload": "^1.4.1",
+    "ember-cli-jshint": "^1.0.4",
     "ember-cli-mirage": "^0.2.2",
-    "ember-cli-qunit": "^2.1.0",
+    "ember-cli-qunit": "^3.0.1",
     "ember-cli-release": "^0.2.9",
     "ember-cli-sass": "5.3.1",
     "ember-cli-sri": "^2.1.0",
     "ember-cli-test-loader": "^1.1.0",
     "ember-cli-uglify": "^1.2.0",
-    "ember-data": "^2.8.0",
+    "ember-data": "^2.9.0",
     "ember-data-fixture-adapter": "1.13.0",
     "ember-disable-proxy-controllers": "^1.0.1",
     "ember-export-application-global": "^1.0.5",
@@ -56,9 +56,9 @@
     "ember-load-initializers": "^0.5.1",
     "ember-lodash": "0.0.10",
     "ember-resolver": "^2.0.3",
-    "ember-welcome-page": "^1.0.1",
+    "ember-welcome-page": "^1.0.3",
     "ember-wormhole": "0.4.0",
     "eslint-config-ember": "^0.3.0",
-    "loader.js": "^4.0.1"
+    "loader.js": "^4.0.10"
   }
 }
--- a/cms/app-client/tests/index.html	Tue Nov 08 18:23:01 2016 +0100
+++ b/cms/app-client/tests/index.html	Wed Nov 09 15:05:41 2016 +0100
@@ -21,7 +21,7 @@
     {{content-for "body"}}
     {{content-for "test-body"}}
 
-    <script src="{{rootURL}}testem.js" integrity=""></script>
+    <script src="/testem.js" integrity=""></script>
     <script src="{{rootURL}}assets/vendor.js"></script>
     <script src="{{rootURL}}assets/test-support.js"></script>
     <script src="{{rootURL}}assets/app-client.js"></script>
--- a/common/corpus-common-addon/.gitignore	Tue Nov 08 18:23:01 2016 +0100
+++ b/common/corpus-common-addon/.gitignore	Wed Nov 09 15:05:41 2016 +0100
@@ -13,5 +13,5 @@
 /connect.lock
 /coverage/*
 /libpeerconnection.log
-npm-debug.log
+npm-debug.log*
 testem.log
--- a/common/corpus-common-addon/.jshintrc	Tue Nov 08 18:23:01 2016 +0100
+++ b/common/corpus-common-addon/.jshintrc	Wed Nov 09 15:05:41 2016 +0100
@@ -27,6 +27,6 @@
   "strict": false,
   "white": false,
   "eqnull": true,
-  "esnext": true,
+  "esversion": 6,
   "unused": true
 }
--- a/common/corpus-common-addon/.travis.yml	Tue Nov 08 18:23:01 2016 +0100
+++ b/common/corpus-common-addon/.travis.yml	Wed Nov 09 15:05:41 2016 +0100
@@ -1,7 +1,7 @@
 ---
 language: node_js
 node_js:
-  - "0.12"
+  - "4"
 
 sudo: false
 
@@ -10,8 +10,9 @@
     - node_modules
 
 env:
-  - EMBER_TRY_SCENARIO=default
-  - EMBER_TRY_SCENARIO=ember-1-13
+  # we recommend testing LTS's and latest stable release (bonus points to beta/canary)
+  - EMBER_TRY_SCENARIO=ember-1.13
+  - EMBER_TRY_SCENARIO=ember-lts-2.4
   - EMBER_TRY_SCENARIO=ember-release
   - EMBER_TRY_SCENARIO=ember-beta
   - EMBER_TRY_SCENARIO=ember-canary
@@ -22,14 +23,17 @@
     - env: EMBER_TRY_SCENARIO=ember-canary
 
 before_install:
-  - export PATH=/usr/local/phantomjs-2.0.0/bin:$PATH
-  - "npm config set spin false"
-  - "npm install -g npm@^2"
+  - npm config set spin false
+  - npm install -g bower
+  - bower --version
+  - npm install phantomjs-prebuilt
+  - node_modules/phantomjs-prebuilt/bin/phantomjs --version
 
 install:
-  - npm install -g bower
   - npm install
   - bower install
 
 script:
-  - ember try $EMBER_TRY_SCENARIO test
+  # Usually, it's ok to finish the test scenario without reverting
+  #  to the addon's original dependency state, skipping "cleanup".
+  - ember try:one $EMBER_TRY_SCENARIO test --skip-cleanup
--- a/common/corpus-common-addon/README.md	Tue Nov 08 18:23:01 2016 +0100
+++ b/common/corpus-common-addon/README.md	Wed Nov 09 15:05:41 2016 +0100
@@ -4,18 +4,19 @@
 
 ## Installation
 
-* `git clone` this repository
+* `git clone <repository-url>` this repository
+* `cd corpus-common-addon`
 * `npm install`
 * `bower install`
 
 ## Running
 
-* `ember server`
-* Visit your app at http://localhost:4200.
+* `ember serve`
+* Visit your app at [http://localhost:4200](http://localhost:4200).
 
 ## Running Tests
 
-* `npm test` (Runs `ember try:testall` to test your addon against multiple Ember versions)
+* `npm test` (Runs `ember try:each` to test your addon against multiple Ember versions)
 * `ember test`
 * `ember test --server`
 
@@ -23,4 +24,4 @@
 
 * `ember build`
 
-For more information on using ember-cli, visit [http://www.ember-cli.com/](http://www.ember-cli.com/).
+For more information on using ember-cli, visit [http://ember-cli.com/](http://ember-cli.com/).
--- a/common/corpus-common-addon/bower.json	Tue Nov 08 18:23:01 2016 +0100
+++ b/common/corpus-common-addon/bower.json	Wed Nov 09 15:05:41 2016 +0100
@@ -1,10 +1,8 @@
 {
   "name": "corpus-common-addon",
   "dependencies": {
-    "ember": "~2.4.1",
-    "ember-cli-shims": "0.1.0",
-    "ember-cli-test-loader": "0.2.2",
-    "ember-qunit-notifications": "0.1.0"
+    "ember": "~2.9.0",
+    "ember-cli-shims": "0.1.3"
   },
   "devDependencies": {
     "store": "https://github.com/marcuswestin/store.js.git#v1.3.20"
--- a/common/corpus-common-addon/config/ember-try.js	Tue Nov 08 18:23:01 2016 +0100
+++ b/common/corpus-common-addon/config/ember-try.js	Wed Nov 09 15:05:41 2016 +0100
@@ -2,13 +2,7 @@
 module.exports = {
   scenarios: [
     {
-      name: 'default',
-      bower: {
-        dependencies: { }
-      }
-    },
-    {
-      name: 'ember-1-13',
+      name: 'ember-1.13',
       bower: {
         dependencies: {
           'ember': '~1.13.0'
@@ -19,6 +13,17 @@
       }
     },
     {
+      name: 'ember-lts-2.4',
+      bower: {
+        dependencies: {
+          'ember': 'components/ember#lts-2-4'
+        },
+        resolutions: {
+          'ember': 'lts-2-4'
+        }
+      }
+    },
+    {
       name: 'ember-release',
       bower: {
         dependencies: {
--- a/common/corpus-common-addon/package.json	Tue Nov 08 18:23:01 2016 +0100
+++ b/common/corpus-common-addon/package.json	Wed Nov 09 15:05:41 2016 +0100
@@ -9,42 +9,43 @@
   "scripts": {
     "build": "ember build",
     "start": "ember server",
-    "test": "ember try:testall",
+    "test": "ember try:each",
     "set-version": "bash -c 'sed -i \"\" -e \"s/\\([:space:]*\\\"version\\\"[[:space:]]*\\:[[:space:]]*\\\"\\)[\\.0-9]*\\(\\\".*\\)/\\1${1}\\2/\" package.json' 0"
   },
   "repository": "",
   "engines": {
-    "node": ">= 0.10.0"
+    "node": ">= 0.12.0"
   },
   "author": "",
   "license": "MIT",
   "devDependencies": {
-    "bower": "^1.7.6",
-    "broccoli-asset-rev": "^2.2.0",
-    "ember-ajax": "0.7.1",
-    "ember-cli": "^2.4.0",
-    "ember-cli-app-version": "^1.0.0",
-    "ember-cli-dependency-checker": "^1.2.0",
-    "ember-cli-htmlbars-inline-precompile": "^0.3.1",
-    "ember-cli-inject-live-reload": "^1.3.1",
-    "ember-cli-qunit": "^1.2.1",
-    "ember-cli-release": "0.2.8",
-    "ember-cli-sri": "^2.0.0",
+    "bower": "^1.8.0",
+    "broccoli-asset-rev": "^2.4.5",
+    "ember-ajax": "^2.4.1",
+    "ember-cli": "2.9.1",
+    "ember-cli-app-version": "^2.0.0",
+    "ember-cli-dependency-checker": "^1.3.0",
+    "ember-cli-htmlbars-inline-precompile": "^0.3.3",
+    "ember-cli-inject-live-reload": "^1.4.1",
+    "ember-cli-jshint": "^1.0.4",
+    "ember-cli-qunit": "^3.0.1",
+    "ember-cli-release": "^0.2.9",
+    "ember-cli-sri": "^2.1.0",
+    "ember-cli-test-loader": "^1.1.0",
     "ember-cli-uglify": "^1.2.0",
-    "ember-data": "^2.4.0",
+    "ember-data": "^2.9.0",
     "ember-disable-prototype-extensions": "^1.1.0",
-    "ember-disable-proxy-controllers": "^1.0.1",
-    "ember-export-application-global": "^1.0.4",
-    "ember-load-initializers": "^0.5.0",
+    "ember-export-application-global": "^1.0.5",
+    "ember-load-initializers": "^0.5.1",
     "ember-resolver": "^2.0.3",
-    "ember-try": "^0.1.2",
+    "ember-welcome-page": "^1.0.3",
     "eslint": "^3.5.0",
     "eslint-config-ember": "^0.3.0",
-    "eslint-config-standard": "^6.0.1",
-    "eslint-plugin-promise": "^2.0.1",
+    "eslint-config-standard": "^6.2.1",
+    "eslint-plugin-promise": "^3.3.1",
     "eslint-plugin-standard": "^2.0.0",
-    "loader.js": "^4.0.0",
-    "lodash": "^4.5.1",
+    "loader.js": "^4.0.10",
+    "lodash": "^3.10.0",
     "q": "^1.4.1",
     "request": "^2.67.0"
   },
@@ -53,8 +54,8 @@
   ],
   "dependencies": {
     "chalk": "^1.1.3",
-    "ember-cli-babel": "^5.1.5",
-    "ember-cli-htmlbars": "^1.0.1"
+    "ember-cli-babel": "^5.1.10",
+    "ember-cli-htmlbars": "^1.0.10"
   },
   "ember-addon": {
     "configPath": "tests/dummy/config"
--- a/common/corpus-common-addon/tests/.jshintrc	Tue Nov 08 18:23:01 2016 +0100
+++ b/common/corpus-common-addon/tests/.jshintrc	Wed Nov 09 15:05:41 2016 +0100
@@ -47,6 +47,6 @@
   "strict": false,
   "white": false,
   "eqnull": true,
-  "esnext": true,
+  "esversion": 6,
   "unused": true
 }
--- a/common/corpus-common-addon/tests/dummy/app/index.html	Tue Nov 08 18:23:01 2016 +0100
+++ b/common/corpus-common-addon/tests/dummy/app/index.html	Wed Nov 09 15:05:41 2016 +0100
@@ -9,16 +9,16 @@
 
     {{content-for "head"}}
 
-    <link rel="stylesheet" href="assets/vendor.css">
-    <link rel="stylesheet" href="assets/dummy.css">
+    <link rel="stylesheet" href="{{rootURL}}assets/vendor.css">
+    <link rel="stylesheet" href="{{rootURL}}assets/dummy.css">
 
     {{content-for "head-footer"}}
   </head>
   <body>
     {{content-for "body"}}
 
-    <script src="assets/vendor.js"></script>
-    <script src="assets/dummy.js"></script>
+    <script src="{{rootURL}}assets/vendor.js"></script>
+    <script src="{{rootURL}}assets/dummy.js"></script>
 
     {{content-for "body-footer"}}
   </body>
--- a/common/corpus-common-addon/tests/dummy/app/router.js	Tue Nov 08 18:23:01 2016 +0100
+++ b/common/corpus-common-addon/tests/dummy/app/router.js	Wed Nov 09 15:05:41 2016 +0100
@@ -2,8 +2,8 @@
 import config from './config/environment';
 
 const Router = Ember.Router.extend({
-    location: config.locationType,
-    rootURL: config.rootURL || '/'
+  location: config.locationType,
+  rootURL: config.rootURL
 });
 
 Router.map(function() {
--- a/common/corpus-common-addon/tests/dummy/config/environment.js	Tue Nov 08 18:23:01 2016 +0100
+++ b/common/corpus-common-addon/tests/dummy/config/environment.js	Wed Nov 09 15:05:41 2016 +0100
@@ -10,13 +10,16 @@
       FEATURES: {
         // Here you can enable experimental features on an ember canary build
         // e.g. 'with-controller': true
+      },
+      EXTEND_PROTOTYPES: {
+        // Prevent Ember Data from overriding Date.parse.
+        Date: false
       }
     },
 
     APP: {
       // Here you can pass flags/options to your application instance
       // when it is created
-      backRootURL: '/'
     }
   };
 
@@ -30,8 +33,6 @@
 
   if (environment === 'test') {
     // Testem prefers this...
-    ENV.rootURL = '/';
-    ENV.APP.backRootURL = '/';
     ENV.locationType = 'none';
 
     // keep test console output quieter
--- a/common/corpus-common-addon/tests/helpers/module-for-acceptance.js	Tue Nov 08 18:23:01 2016 +0100
+++ b/common/corpus-common-addon/tests/helpers/module-for-acceptance.js	Wed Nov 09 15:05:41 2016 +0100
@@ -1,23 +1,23 @@
 import { module } from 'qunit';
+import Ember from 'ember';
 import startApp from '../helpers/start-app';
 import destroyApp from '../helpers/destroy-app';
 
+const { RSVP: { Promise } } = Ember;
+
 export default function(name, options = {}) {
   module(name, {
     beforeEach() {
       this.application = startApp();
 
       if (options.beforeEach) {
-        options.beforeEach.apply(this, arguments);
+        return options.beforeEach.apply(this, arguments);
       }
     },
 
     afterEach() {
-      if (options.afterEach) {
-        options.afterEach.apply(this, arguments);
-      }
-
-      destroyApp(this.application);
+      let afterEach = options.afterEach && options.afterEach.apply(this, arguments);
+      return Promise.resolve(afterEach).then(() => destroyApp(this.application));
     }
   });
 }
--- a/common/corpus-common-addon/tests/index.html	Tue Nov 08 18:23:01 2016 +0100
+++ b/common/corpus-common-addon/tests/index.html	Wed Nov 09 15:05:41 2016 +0100
@@ -10,9 +10,9 @@
     {{content-for "head"}}
     {{content-for "test-head"}}
 
-    <link rel="stylesheet" href="assets/vendor.css">
-    <link rel="stylesheet" href="assets/dummy.css">
-    <link rel="stylesheet" href="assets/test-support.css">
+    <link rel="stylesheet" href="{{rootURL}}assets/vendor.css">
+    <link rel="stylesheet" href="{{rootURL}}assets/dummy.css">
+    <link rel="stylesheet" href="{{rootURL}}assets/test-support.css">
 
     {{content-for "head-footer"}}
     {{content-for "test-head-footer"}}
@@ -21,12 +21,11 @@
     {{content-for "body"}}
     {{content-for "test-body"}}
 
-    <script src="testem.js" integrity=""></script>
-    <script src="assets/vendor.js"></script>
-    <script src="assets/test-support.js"></script>
-    <script src="assets/dummy.js"></script>
-    <script src="assets/tests.js"></script>
-    <script src="assets/test-loader.js"></script>
+    <script src="/testem.js" integrity=""></script>
+    <script src="{{rootURL}}assets/vendor.js"></script>
+    <script src="{{rootURL}}assets/test-support.js"></script>
+    <script src="{{rootURL}}assets/dummy.js"></script>
+    <script src="{{rootURL}}assets/tests.js"></script>
 
     {{content-for "body-footer"}}
     {{content-for "test-body-footer"}}
--- a/dev/Vagrantfile	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/Vagrantfile	Wed Nov 09 15:05:41 2016 +0100
@@ -75,7 +75,7 @@
   #   vb.memory = "1024"
   # end
   config.vm.provider "virtualbox" do |v|
-    v.memory = 1024
+    v.memory = 2048
     v.customize ["modifyvm", :id, "--natdnshostresolver1", "on"]
   end
 
--- a/dev/provisioning/modules/elasticsearch/CHANGELOG.md	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/CHANGELOG.md	Wed Nov 09 15:05:41 2016 +0100
@@ -1,3 +1,210 @@
+## x.x.x (Month Day, Year)
+
+### Summary
+* Support for Ubuntu Xenial (16.04) formally declared.
+
+#### Features
+* Support management of 5.x-style Elastic yum/apt package repositories.
+
+#### Bugfixes
+* Update the apt::source call to not cause deprecation warnings
+* Updated module metadata to correctly require puppet-stdlib with validate_integer()
+
+#### Changes
+
+#### Testing changes
+* Ubuntu Xenial (16.04) added to the test matrix.
+
+## 0.14.0 (October 12, 2016)
+
+### Summary
+Primarily a bugfix release for issues related to plugin proxy functionality, various system service fixes, and directory permissions.
+This release also adds the ability to define logging rolling file settings and a CA file/path for template API access.
+
+#### Features
+* Added 'file_rolling_type' parameter to allow selecting file logging rotation type between "dailyRollingFile" or "rollingFile". Also added 'daily_rolling_date_pattern', 'rolling_file_max_backup_index' and 'rolling_file_max_file_size' for file rolling customization.
+
+#### Bugfixes
+* Permissions on the Elasticsearch plugin directory have been fixed to permit world read rights.
+* The service systemd unit now `Wants=` a network target to fix bootup parallelization problems.
+* Recursively create the logdir for elasticsearch when creating multiple instances
+* Files and directories with root ownership now specify UID/GID 0 instead to improve compatability with *BSDs.
+* Elasticsearch Debian init file changed to avoid throwing errors when DATA_DIR, WORK_DIR and/or LOG_DIR were an empty variable.
+* Fixed a broken File dependency when a plugin was set to absent and ::elasticsearch set to present.
+* Fixed issue when using the `proxy` parameter on plugins in Elasticsearch 2.x.
+
+#### Changes
+* The `api_ca_file` and `api_ca_path` parameters have been added to support custom CA bundles for API access.
+* Numerics in elasticsearch.yml will always be properly unquoted.
+* puppetlabs/java is now listed as a dependency in metadata.json to avoid unexpected installation problems.
+
+#### Testing changes
+
+## 0.13.2 (August 29, 2016)
+
+### Summary
+Primarily a bugfix release to resolve HTTPS use in elasticsearch::template resources, 5.x plugin operations, and plugin file permission enforcement.
+
+#### Features
+* Plugin installation for the 5.x series of Elasticsearch is now properly supported.
+
+#### Bugfixes
+* Recursively enforce correct plugin directory mode to avoid Elasticsearch startup permissions errors.
+* Fixed an edge case where dependency cycles could arise when managing absent resources.
+* Elasticsearch templates now properly use HTTPS when instructed to do so.
+
+#### Changes
+* Updated the elasticsearch_template type to return more helpful error output.
+* Updated the es_instance_conn_validator type to silence deprecation warnings in Puppet >= 4.
+
+#### Testing changes
+
+## 0.13.1 (August 8, 2016)
+
+### Summary
+Lingering bugfixes from elasticsearch::template changes.
+More robust systemd mask handling.
+Updated some upstream module parameters for deprecation warnings.
+Support for the Shield `system_key` file.
+
+#### Features
+* Added `system_key` parameter to the `elasticsearch` class and `elasticsearch::instance` type for placing Shield system keys.
+
+#### Bugfixes
+* Fixed systemd elasticsearch.service unit masking to use systemctl rather than raw symlinking to avoid puppet file backup errors.
+* Fixed a couple of cases that broke compatability with older versions of puppet (elasticsearch_template types on puppet versions prior to 3.6 and yumrepo parameters on puppet versions prior to 3.5.1)
+* Fixed issues that caused templates to be incorrectly detected as out-of-sync and thus always changed on each puppet run.
+* Resources are now explicitly ordered to ensure behavior such as plugins being installed before instance start, users managed before templates changed, etc.
+
+#### Changes
+* Updated repository gpg fingerprint key to long form to silence module warnings.
+
+#### Testing changes
+
+## 0.13.0 (August 1, 2016)
+
+### Summary
+Rewritten elasticsearch::template using native type and provider.
+Fixed and added additional proxy parameters to elasticsearch::plugin instances.
+Exposed repo priority parameters for apt and yum repos.
+
+#### Features
+* In addition to better consistency, the `elasticsearch::template` type now also accepts various `api_*` parameters to control how access to the Elasticsearch API is configured (there are top-level parameters that are inherited and can be overwritten in `elasticsearch::api_*`).
+* The `elasticsearch::config` parameter now supports deep hiera merging.
+* Added the `elasticsearch::repo_priority` parameter to support apt and yum repository priority configuration.
+* Added `proxy_username` and `proxy_password` parameters to `elasticsearch::plugin`.
+
+#### Bugfixes
+* Content of templates should now properly trigger new API PUT requests when the index template stored in Elasticsearch differs from the template defined in puppet.
+* Installing plugins with proxy parameters now works correctly due to changed Java property flags.
+* The `elasticsearch::plugin::module_dir` parameter has been re-implemented to aid in working around plugins with non-standard plugin directories.
+
+#### Changes
+* The `file` parameter on the `elasticsearch::template` defined type has been deprecated to be consistent with usage of the `source` parameter for other types.
+
+#### Testing changes
+
+## 0.12.0 (July 20, 2016)
+
+IMPORTANT! A bug was fixed that mistakenly added /var/lib to the list of DATA_DIR paths on Debian-based systems.  This release removes that environment variable, which could potentially change path.data directories for instances of Elasticsearch.  Take proper precautions when upgrading to avoid unexpected downtime or data loss (test module upgrades, et cetera).
+
+### Summary
+Rewritten yaml generator, code cleanup, and various bugfixes. Configuration file yaml no longer nested. Service no longer restarts by default, and exposes more granular restart options.
+
+#### Features
+* The additional parameters restart_config_change, restart_package_change, and restart_plugin_change have been added for more granular control over service restarts.
+
+#### Bugfixes
+* Special yaml cases such as arrays of hashes and strings like "::" are properly supported.
+* Previous Debian SysV init scripts mistakenly set the `DATA_DIR` environment variable to a non-default value.
+* Some plugins failed installation due to capitalization munging, the elasticsearch_plugin provider no longer forces downcasing.
+
+#### Changes
+* The `install_options` parameter on the `elasticsearch::plugin` type has been removed. This was an undocumented parameter that often caused problems for users.
+* The `elasticsearch.service` systemd unit is no longer removed but masked by default, effectively hiding it from systemd but retaining the upstream vendor unit on disk for package management consistency.
+* `restart_on_change` now defaults to false to reduce unexpected cluster downtime (can be set to true if desired).
+* Package pinning is now contained within a separate class, so users can opt to manage package repositories manually and still use this module's pinning feature.
+* All configuration hashes are now flattened into dot-notated yaml in the elasticsearch configuration file. This should be fairly transparent in terms of behavior, though the config file formatting will change.
+
+#### Testing changes
+* The acceptance test suite has been dramatically slimmed to cut down on testing time and reduce false positives.
+
+## 0.11.0 ( May 23, 2016 )
+
+### Summary
+Shield support, SLES support, and overhauled testing setup.
+
+#### Features
+* Support for shield
+  * TLS Certificate management
+  * Users (role and password management for file-based realms)
+  * Roles (file-based with mapping support)
+* Support (repository proxies)[https://github.com/elastic/puppet-elasticsearch/pull/615]
+* Support for (SSL auth on API calls)[https://github.com/elastic/puppet-elasticsearch/pull/577]
+
+#### Bugfixes
+* (Fix Facter calls)[https://github.com/elastic/puppet-elasticsearch/pull/590] in custom providers
+
+#### Changes
+
+#### Testing changes
+* Overhaul testing methodology, see CONTRIBUTING for updates
+* Add SLES 12, Oracle 6, and PE 2016.1.1 to testing matrix
+* Enforce strict variable checking
+
+#### Known bugs
+* This is the first release with Shield support, some untested edge cases may exist
+
+
+##0.10.3 ( Feb 08, 2016 )
+
+###Summary
+Adding support for OpenBSD and minor fixes
+
+####Features
+* Add required changes to work with ES 2.2.x plugins
+* Support for custom log directory
+* Support for OpenBSD
+
+####Bugfixes
+* Add correct relation to file resource and plugin installation
+* Notify service when upgrading the package
+
+####Changes
+* Remove plugin dir when upgrading Elasticsearch
+
+####Testing changes
+
+####Known bugs
+* Possible package conflicts when using ruby/python defines with main package name
+
+
+##0.10.2 ( Jan 19, 2016 )
+
+###Summary
+Bugfix release and adding Gentoo support
+
+####Features
+* Added Gentoo support
+
+####Bugfixes
+* Create init script when set to unmanaged
+* init_template variable was not passed on correctly to other classes / defines
+* Fix issue with plugin type that caused run to stall
+* Export ES_GC_LOG_FILE in init scripts
+
+####Changes
+* Improve documentation about init_defaults
+* Update common files
+* Removed recurse option on data directory management
+* Add retry functionality to plugin type
+
+####Testing changes
+
+####Known bugs
+* Possible package conflicts when using ruby/python defines with main package name
+
+
 ##0.10.1 ( Dec 17, 2015 )
 
 ###Summary
--- a/dev/provisioning/modules/elasticsearch/CONTRIBUTING.md	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/CONTRIBUTING.md	Wed Nov 09 15:05:41 2016 +0100
@@ -1,46 +1,70 @@
-If you have a bugfix or new feature that you would like to contribute to this puppet module, please find or open an issue about it first. Talk about what you would like to do. It may be that somebody is already working on it, or that there are particular issues that you should know about before implementing the change.
+# Contributing
 
-We enjoy working with contributors to get their code accepted. There are many approaches to fixing a problem and it is important to find the best approach before writing too much code.
+If you have a bugfix or new feature that you would like to contribute to this puppet module, please find or open an issue about it first.
+Talk about what you would like to do.
+It may be that somebody is already working on it, or that there are particular issues that you should know about before implementing the change.
+
+**Note**: If you have support-oriented questions that aren't a bugfix or feature request, please post your questions on the [discussion forums](https://discuss.elastic.co/c/elasticsearch).
+
+We enjoy working with contributors to get their code accepted.
+There are many approaches to fixing a problem and it is important to find the best approach before writing too much code.
 
 The process for contributing to any of the Elastic repositories is similar.
 
-1. Sign the contributor license agreement
-Please make sure you have signed the [Contributor License Agreement](http://www.elastic.co/contributor-agreement/). We are not asking you to assign copyright to us, but to give us the right to distribute your code without restriction. We ask this of all contributors in order to assure our users of the origin and continuing existence of the code. You only need to sign the CLA once.
+## The Contributor License Agreement
+
+Please make sure you have signed the [Contributor License Agreement](http://www.elastic.co/contributor-agreement/).
+We are not asking you to assign copyright to us, but to give us the right to distribute your code without restriction.
+We ask this of all contributors in order to assure our users of the origin and continuing existence of the code.
+You only need to sign the CLA once.
+
+## Development Setup
+
+There are a few testing prerequisites to meet:
+
+* Ruby.
+  As long as you have a recent version with `bundler` available, `bundler` will install development dependencies.
 
-2. Run the rspec tests and ensure it completes without errors with your changes.
+You can then install the necessary gems with:
+
+    make
+
+This will install the requisite rubygems for testing into `.vendor`.
+Note that you can purge all testing fixtures/artifacts/gems with `make clean`.
 
-3. Run the acceptance tests
+* Docker.
+  Note that Docker is used to run tests that require a Linux container/VM - if you only need to run simple rspec/doc tests, this shouldn't be necessary.
+  If you are developing on a Linux machine with a working Docker instance, this should be sufficient.
+  On OS X, just use the official [Docker installation method](https://docs.docker.com/engine/installation/mac/) to get a working `docker` setup.
+  Confirm that you can communicate with the Docker hypervisor with `docker version`.
 
-These instructions are for Ubuntu 14.04
+## Testing
 
-* install docker 0.11.1 
- * wget https://get.docker.io/ubuntu/pool/main/l/lxc-docker/lxc-docker_0.11.1_amd64.deb
- * wget https://get.docker.io/ubuntu/pool/main/l/lxc-docker-0.11.1/lxc-docker-0.11.1_0.11.1_amd64.deb
- * sudo dpkg -i lxc-docker_0.11.1_amd64.deb lxc-docker-0.11.1_0.11.1_amd64.deb
- * sudo usermod -a -G docker $USER
-* export RS_SET='ubuntu-server-1404-x64' # see spec/acceptance/nodesets for more
-* export VM_PUPPET_VERSION='3.7.0'
-* wget https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-1.1.0.deb
-* wget https://forgeapi.puppetlabs.com/v3/files/puppetlabs-stdlib-3.2.0.tar.gz
-* wget https://forgeapi.puppetlabs.com/v3/files/puppetlabs-apt-1.4.2.tar.gz
-* export files_dir=$(pwd)
-* bundle install
-* bundle exec rspec --require ci/reporter/rspec --format CI::Reporter::RSpecFormatter spec/acceptance/*_spec.rb
+Running through the tests on your own machine can get ahead of any problems others (or Jenkins) may run into.
+
+First, run the rspec tests and ensure it completes without errors with your changes. These are lightweight tests.
+
+    make test-rspec
+
+Next, run the more thorough acceptance tests.
+By default, the test will run against a Debian 8 Docker image - other available hosts can be found in `spec/acceptance/nodesets`.
+For example, to run the acceptance tests against CentOS 6, run the following:
+
+    DISTRO=centos-6-x64 make test-acceptance
+
+The final output line will tell you which, if any, tests failed.
 
-```
-    Hypervisor for ubuntu-14-04 is docker
-    Beaker::Hypervisor, found some docker boxes to create
-    Provisioning docker
-    provisioning ubuntu-14-04
-    ...
-    Finished in 18 minutes 6 seconds
-    224 examples, 0 failures, 3 pending
-```
+## Opening Pull Requests
+
+In summary, to open a new PR:
 
-4. Rebase your changes
-Update your local repository with the most recent code from the main this puppet module repository, and rebase your branch on top of the latest master branch. We prefer your changes to be squashed into a single commit.
+* Sign the Contributor License Agreement
+* Run the tests to confirm everything works as expected
+* Rebase your changes.
+  Update your local repository with the most recent code from this puppet module repository, and rebase your branch on top of the latest master branch.
+* Submit a pull request
+  Push your local changes to your forked copy of the repository and submit a pull request.
+  In the pull request, describe what your changes do and mention the number of the issue where discussion has taken place, eg "Closes #123".
 
-5. Submit a pull request
-Push your local changes to your forked copy of the repository and submit a pull request. In the pull request, describe what your changes do and mention the number of the issue where discussion has taken place, eg “Closes #123″.
-
-Then sit back and wait. There will probably be discussion about the pull request and, if any changes are needed, we would love to work with you to get your pull request merged into this puppet module.
+Then sit back and wait!
+There will probably be discussion about the pull request and, if any changes are needed, we would love to work with you to get your pull request merged into this puppet module.
--- a/dev/provisioning/modules/elasticsearch/CONTRIBUTORS	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/CONTRIBUTORS	Wed Nov 09 15:05:41 2016 +0100
@@ -2,9 +2,11 @@
 reports, or in general have helped this puppet module along its way.
 
 Project Owner
-* Richard Pijnenburg (electrical)
+* Elastic (elastic)
 
 Contributors:
+Tyler Langlois (tylerjl)
+Richard Pijnenburg (electrical)
 Martin Seener (martinseener)
 Marcus Furlong (furlongm)
 Chris Boulton (chrisboulton)
@@ -18,3 +20,12 @@
 Bruce Morrison (brucem)
 deanmalmgren
 Matteo Sessa (msessa-cotd)
+Sebastian Reitenbach (buzzdeee)
+Toni Schmidbauer (tosmi)
+Dan Sajner (dansajner)
+Leo Antunes (costela)
+Philip Wigg (philipwigg)
+Ian Bissett (bisscuitt)
+Fabian M. Krack (onibox)
+Rhommel Lamas (rhoml)
+Jose Luis Ledesma (sp-joseluis-ledesma)
--- a/dev/provisioning/modules/elasticsearch/Gemfile	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/Gemfile	Wed Nov 09 15:05:41 2016 +0100
@@ -3,10 +3,13 @@
 puppetversion = ENV['PUPPET_VERSION'] || '~> 3.8.0'
 gem 'puppet', puppetversion, :require => false
 
-gem 'beaker'
-gem 'beaker-rspec'
+gem 'beaker', '~> 2.51'
+gem 'beaker-pe', '~> 0.12'
+gem 'beaker-rspec', '~> 5.0'
 gem 'metadata-json-lint'
-gem 'rspec-puppet', '2.2.0'
+gem 'rspec-puppet', '~> 2.4'
+gem 'specinfra', '~> 2.60'
+gem 'infrataster'
 
 gem 'pry'
 gem 'docker-api', '~> 1.0'
@@ -19,6 +22,8 @@
 gem 'puppetlabs_spec_helper'
 gem 'puppet-syntax'
 gem 'rspec-puppet-facts'
+gem 'rspec-puppet-utils'
+gem 'rspec-retry'
 gem 'webmock'
 
 # Extra Puppet-lint gems
--- a/dev/provisioning/modules/elasticsearch/LICENSE	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/LICENSE	Wed Nov 09 15:05:41 2016 +0100
@@ -1,4 +1,4 @@
-Copyright (c) 2012-2015 Elasticsearch <http://www.elastic.co>
+Copyright (c) 2012-2016 Elasticsearch <http://www.elastic.co>
 
 Licensed under the Apache License, Version 2.0 (the "License");
 you may not use this file except in compliance with the License.
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/Makefile	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,60 @@
+DISTRO ?= ubuntu-server-1404-x64
+PE ?= false
+STRICT_VARIABLES ?= yes
+
+ifeq ($(PE), true)
+	PE_VER ?= 3.8.6
+	BEAKER_PE_VER := $(PE_VER)
+	BEAKER_IS_PE := $(PE)
+	export BEAKER_PE_VER
+	export BEAKER_IS_PE
+endif
+
+.DEFAULT_GOAL := .vendor
+
+.vendor: Gemfile
+	bundle update || true
+	bundle install --path .vendor
+	touch .vendor
+
+.PHONY: clean
+clean:
+	bundle exec rake spec_clean
+	bundle exec rake artifacts:clean
+	rm -rf .bundle .vendor
+
+.PHONY: clean-logs
+clean-logs:
+	rm -rf log
+
+.PHONY: release
+release: clean-logs
+	bundle exec puppet module build
+
+.PHONY: test-intake
+test-intake: test-docs test-rspec
+
+.PHONY: test-acceptance
+test-acceptance: .vendor
+	BEAKER_PE_DIR=spec/fixtures/artifacts \
+		BEAKER_set=$(DISTRO) \
+		bundle exec rake beaker:acceptance
+
+.PHONY: test-integration
+test-integration: .vendor
+	BEAKER_PE_DIR=spec/fixtures/artifacts \
+		BEAKER_PE_VER=$(PE_VER) \
+		BEAKER_IS_PE=$(PE) \
+		BEAKER_set=$(DISTRO) \
+		bundle exec rake beaker:integration
+
+.PHONY: test-docs
+test-docs: .vendor
+	bundle exec rake spec_docs
+
+.PHONY: test-rspec
+test-rspec: .vendor
+	bundle exec rake lint
+	bundle exec rake validate
+	STRICT_VARIABLES=$(STRICT_VARIABLES) \
+		bundle exec rake spec_unit
--- a/dev/provisioning/modules/elasticsearch/README.md	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/README.md	Wed Nov 09 15:05:41 2016 +0100
@@ -1,33 +1,32 @@
-#Elasticsearch Puppet module
-
-####Table of Contents
+# Elasticsearch Puppet Module
 
-1. [Overview](#overview)
-2. [Module description - What the module does and why it is useful](#module-description)
-3. [Setup - The basics of getting started with Elasticsearch](#setup)
+[![Build Status](https://travis-ci.org/elastic/puppet-elasticsearch.svg?branch=master)](https://travis-ci.org/elastic/puppet-elasticsearch)
+[![Puppet Forge endorsed](https://img.shields.io/puppetforge/e/elasticsearch/elasticsearch.svg)](https://forge.puppetlabs.com/elasticsearch/elasticsearch)
+[![Puppet Forge Version](https://img.shields.io/puppetforge/v/elasticsearch/elasticsearch.svg)](https://forge.puppetlabs.com/elasticsearch/elasticsearch)
+[![Puppet Forge Downloads](https://img.shields.io/puppetforge/dt/elasticsearch/elasticsearch.svg)](https://forge.puppetlabs.com/elasticsearch/elasticsearch)
+
+#### Table of Contents
+
+1. [Module description - What the module does and why it is useful](#module-description)
+2. [Setup - The basics of getting started with Elasticsearch](#setup)
   * [The module manages the following](#the-module-manages-the-following)
   * [Requirements](#requirements)
-4. [Usage - Configuration options and additional functionality](#usage)
-5. [Advanced features - Extra information on advanced usage](#advanced-features)
+3. [Usage - Configuration options and additional functionality](#usage)
+4. [Advanced features - Extra information on advanced usage](#advanced-features)
+5. [Reference - An under-the-hood peek at what the module is doing and how](#reference)
 6. [Limitations - OS compatibility, etc.](#limitations)
 7. [Development - Guide for contributing to the module](#development)
 8. [Support - When you need help with this module](#support)
 
-
-
-##Overview
+## Module description
 
-This module manages Elasticsearch (http://www.elasticsearch.org/overview/elasticsearch/)
-
-##Module description
+This module sets up [Elasticsearch](https://www.elastic.co/overview/elasticsearch/) instances with additional resource for plugins, templates, and more.
 
-The elasticsearch module sets up Elasticsearch instances and can manage plugins and templates.
-
-This module has been tested against all versions of ES 1.x and 2.x
+This module has been tested against all versions of ES 1.x and 2.x.
 
-##Setup
+## Setup
 
-###The module manages the following
+### The module manages the following
 
 * Elasticsearch repository files.
 * Elasticsearch package.
@@ -35,25 +34,46 @@
 * Elasticsearch service.
 * Elasticsearch plugins.
 * Elasticsearch templates.
+* Elasticsearch Shield users, roles, and certificates.
 
-###Requirements
+### Requirements
 
 * The [stdlib](https://forge.puppetlabs.com/puppetlabs/stdlib) Puppet library.
 * [ceritsc/yum](https://forge.puppetlabs.com/ceritsc/yum) For yum version lock.
 * [richardc/datacat](https://forge.puppetlabs.com/richardc/datacat)
 * [Augeas](http://augeas.net/)
+* [puppetlabs-java](https://forge.puppetlabs.com/puppetlabs/java) for Java installation (optional).
+* [puppetlabs-java_ks](https://forge.puppetlabs.com/puppetlabs/java_ks) for Shield certificate management (optional).
 
 #### Repository management
-When using the repository management you will need the following dependency modules:
+
+When using the repository management, the following module dependencies are required:
 
 * Debian/Ubuntu: [Puppetlabs/apt](http://forge.puppetlabs.com/puppetlabs/apt)
-* OpenSuSE: [Darin/zypprepo](https://forge.puppetlabs.com/darin/zypprepo)
+* OpenSuSE/SLES: [Darin/zypprepo](https://forge.puppetlabs.com/darin/zypprepo)
 
-##Usage
+### Beginning with Elasticsearch
+
+Declare the top-level `elasticsearch` class (managing repositories) and set up an instance:
 
-###Main class
+```puppet
+class { 'elasticsearch':
+  java_install => true,
+  manage_repo  => true,
+  repo_version => '2.x',
+}
 
-####Install a specific version
+elasticsearch::instance { 'es-01': }
+```
+
+## Usage
+
+### Main class
+
+Most top-level parameters in the `elasticsearch` class are set to reasonable defaults.
+The following are some parameters that may be useful to override:
+
+#### Install a specific version
 
 ```puppet
 class { 'elasticsearch':
@@ -63,72 +83,114 @@
 
 Note: This will only work when using the repository.
 
-####Automatic upgrade of the software ( default set to false )
+#### Automatically restarting the service (default set to false)
+
+By default, the module will not restart Elasticsearch when the configuration file, package, or plugins change.
+This can be overridden globally with the following option:
+
+```puppet
+class { 'elasticsearch':
+  restart_on_change => true
+}
+```
+
+Or controlled with the more granular options: `restart_config_change`, `restart_package_change`, and `restart_plugin_change.`
+
+#### Automatic upgrades (default set to false)
+
 ```puppet
 class { 'elasticsearch':
   autoupgrade => true
 }
 ```
 
-####Removal/decommissioning
+#### Removal/Decommissioning
+
 ```puppet
 class { 'elasticsearch':
   ensure => 'absent'
 }
 ```
 
-####Install everything but disable service(s) afterwards
+#### Install everything but disable service(s) afterwards
+
 ```puppet
 class { 'elasticsearch':
   status => 'disabled'
 }
 ```
 
-###Instances
+#### API Settings
+
+Some resources, such as `elasticsearch::template`, require communicating with the Elasticsearch REST API.
+By default, these API settings are set to:
+
+```puppet
+class { 'elasticsearch':
+  api_protocol            => 'http',
+  api_host                => 'localhost',
+  api_port                => 9200,
+  api_timeout             => 10,
+  api_basic_auth_username => undef,
+  api_basic_auth_password => undef,
+  api_ca_file             => undef,
+  api_ca_path             => undef,
+  validate_tls            => true,
+}
+```
+
+Each of these can be set at the top-level `elasticsearch` class and inherited for each resource or overridden on a per-resource basis.
+
+### Instances
 
 This module works with the concept of instances. For service to start you need to specify at least one instance.
 
-####Quick setup
+#### Quick setup
+
 ```puppet
 elasticsearch::instance { 'es-01': }
 ```
 
 This will set up its own data directory and set the node name to `$hostname-$instance_name`
 
-####Advanced options
+#### Advanced options
 
 Instance specific options can be given:
 
 ```puppet
 elasticsearch::instance { 'es-01':
-  config => { },        # Configuration hash
+  config        => { }, # Configuration hash
   init_defaults => { }, # Init defaults hash
-  datadir => [ ],       # Data directory
+  datadir       => [ ], # Data directory
 }
 ```
 
-See [Advanced features](#advanced-features) for more information
+See [Advanced features](#advanced-features) for more information.
 
-###Plug-ins
+### Plugins
 
-Install [a variety of plugins](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/modules-plugins.html#known-plugins). Note that `module_dir` is where the plugin will install itself to and must match that published by the plugin author; it is not where you would like to install it yourself.
+This module can help manage [a variety of plugins](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/modules-plugins.html#known-plugins).
+Note that `module_dir` is where the plugin will install itself to and must match that published by the plugin author; it is not where you would like to install it yourself.
 
-####From official repository
+#### From an official repository
+
 ```puppet
-elasticsearch::plugin{'lmenezes/elasticsearch-kopf':
-  instances  => 'instance_name'
+elasticsearch::plugin { 'lmenezes/elasticsearch-kopf':
+  instances => 'instance_name'
 }
 ```
-####From custom url
+
+#### From a custom url
+
 ```puppet
-elasticsearch::plugin{ 'jetty':
+elasticsearch::plugin { 'jetty':
   url        => 'https://oss-es-plugins.s3.amazonaws.com/elasticsearch-jetty/elasticsearch-jetty-1.2.1.zip',
   instances  => 'instance_name'
 }
 ```
 
+#### Using a proxy
 
-####Using a proxy
 You can also use a proxy if required by setting the `proxy_host` and `proxy_port` options:
 ```puppet
 elasticsearch::plugin { 'lmenezes/elasticsearch-kopf',
@@ -138,36 +200,38 @@
 }
 ```
 
-#####Plugin name could be:
-* `elasticsearch/plugin/version` for official elasticsearch plugins (download from download.elasticsearch.org)
-* `groupId/artifactId/version`   for community plugins (download from maven central or oss sonatype)
-* `username/repository`          for site plugins (download from github master)
+Proxies that require usernames and passwords are similarly supported with the `proxy_username` and `proxy_password` parameters.
+
+Plugin name formats that are supported include:
 
-####Upgrading plugins
+* `elasticsearch/plugin/version` (for official elasticsearch plugins downloaded from download.elastic.co)
+* `groupId/artifactId/version` (for community plugins downloaded from maven central or OSS Sonatype)
+* `username/repository` (for site plugins downloaded from github master)
+
+#### Upgrading plugins
+
 When you specify a certain plugin version, you can upgrade that plugin by specifying the new version.
 
 ```puppet
-elasticsearch::plugin { 'elasticsearch/elasticsearch-cloud-aws/2.1.1':
-}
+elasticsearch::plugin { 'elasticsearch/elasticsearch-cloud-aws/2.1.1': }
 ```
 
 And to upgrade, you would simply change it to
 
 ```puppet
-elasticsearch::plugin { 'elasticsearch/elasticsearch-cloud-aws/2.4.1':
-}
+elasticsearch::plugin { 'elasticsearch/elasticsearch-cloud-aws/2.4.1': }
 ```
 
 Please note that this does not work when you specify 'latest' as a version number.
 
-####ES 2.x official plugins
+#### ES 2.x official plugins
 For the Elasticsearch commercial plugins you can refer them to the simple name.
 
-See the [Plugin installation](https://www.elastic.co/guide/en/elasticsearch/plugins/current/installation.html) for more details.
+See [Plugin installation](https://www.elastic.co/guide/en/elasticsearch/plugins/current/installation.html) for more details.
 
-###Scripts
+### Scripts
 
-Install [scripts](http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html) to be used by Elasticsearch.
+Installs [scripts](http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html) to be used by Elasticsearch.
 These scripts are shared across all defined instances on the same host.
 
 ```puppet
@@ -177,7 +241,25 @@
 }
 ```
 
-###Templates
+### Templates
+
+By default templates use the top-level `elasticsearch::api_*` settings to communicate with Elasticsearch.
+The following is an example of how to override these settings:
+
+```puppet
+elasticsearch::template { 'templatename':
+  api_protocol            => 'https',
+  api_host                => $::ipaddress,
+  api_port                => 9201,
+  api_timeout             => 60,
+  api_basic_auth_username => 'admin',
+  api_basic_auth_password => 'adminpassword',
+  api_ca_file             => '/etc/ssl/certs',
+  api_ca_path             => '/etc/pki/certs',
+  validate_tls            => false,
+  source                  => 'puppet:///path/to/template.json',
+}
+```
 
 #### Add a new template using a file
 
@@ -185,7 +267,7 @@
 
 ```puppet
 elasticsearch::template { 'templatename':
-  file => 'puppet:///path/to/template.json'
+  source => 'puppet:///path/to/template.json',
 }
 ```
 
@@ -195,6 +277,19 @@
 
 ```puppet
 elasticsearch::template { 'templatename':
+  content => {
+    'template' => "*",
+    'settings' => {
+      'number_of_replicas' => 0
+    }
+  }
+}
+```
+
+Plain JSON strings are also supported.
+
+```puppet
+elasticsearch::template { 'templatename':
   content => '{"template":"*","settings":{"number_of_replicas":0}}'
 }
 ```
@@ -207,33 +302,23 @@
 }
 ```
 
-#### Host
-
-By default it uses localhost:9200 as host. you can change this with the `host` and `port` variables
-
-```puppet
-elasticsearch::template { 'templatename':
-  host => $::ipaddress,
-  port => 9200
-}
-```
-
-###Bindings / Clients
+### Bindings/Clients
 
 Install a variety of [clients/bindings](http://www.elasticsearch.org/guide/en/elasticsearch/client/community/current/clients.html):
 
-####Python
+#### Python
 
 ```puppet
 elasticsearch::python { 'rawes': }
 ```
 
-####Ruby
+#### Ruby
+
 ```puppet
 elasticsearch::ruby { 'elasticsearch': }
 ```
 
-###Connection Validator
+### Connection Validator
 
 This module offers a way to make sure an instance has been started and is up and running before
 doing a next action. This is done via the use of the `es_instance_conn_validator` resource.
@@ -252,14 +337,14 @@
 }
 ```
 
-###Package installation
+### Package installation
 
-There are 2 different ways of installing the software
+There are two different ways of installing Elasticsearch:
 
-####Repository
+#### Repository
 
 This option allows you to use an existing repository for package installation.
-The `repo_version` corresponds with the major version of Elasticsearch.
+The `repo_version` corresponds with the `major.minor` version of Elasticsearch for versions before 2.x.
 
 ```puppet
 class { 'elasticsearch':
@@ -268,36 +353,50 @@
 }
 ```
 
-####Remote package source
+For 2.x versions of Elasticsearch, use `repo_version => '2.x'`.
+
+```puppet
+class { 'elasticsearch':
+  manage_repo  => true,
+  repo_version => '2.x',
+}
+```
+
+#### Remote package source
 
 When a repository is not available or preferred you can install the packages from a remote source:
 
-#####http/https/ftp
+##### http/https/ftp
+
 ```puppet
 class { 'elasticsearch':
-  package_url       => 'https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.4.2.deb',
-  proxy_url         => 'http://proxy.example.com:8080/',
+  package_url => 'https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.4.2.deb',
+  proxy_url   => 'http://proxy.example.com:8080/',
 }
 ```
-Setting proxy_url to a location will enable download using the provided proxy
-server. This parameter is also used by elasticsearch::plugin. Setting the port
-in the proxy_url is mandatory. proxy_url defaults to undef (proxy disabled). 
 
-#####puppet://
+Setting `proxy_url` to a location will enable download using the provided proxy
+server.
+This parameter is also used by `elasticsearch::plugin`.
+Setting the port in the `proxy_url` is mandatory.
+`proxy_url` defaults to `undef` (proxy disabled).
+
+##### puppet://
 ```puppet
 class { 'elasticsearch':
   package_url => 'puppet:///path/to/elasticsearch-1.4.2.deb'
 }
 ```
 
-#####Local file
+##### Local file
+
 ```puppet
 class { 'elasticsearch':
   package_url => 'file:/path/to/elasticsearch-1.4.2.deb'
 }
 ```
 
-###Java installation
+### Java installation
 
 Most sites will manage Java separately; however, this module can attempt to install Java as well.
 This is done by using the [puppetlabs-java](https://forge.puppetlabs.com/puppetlabs/java) module.
@@ -317,26 +416,27 @@
 }
 ```
 
-###Service management
+### Service management
 
 Currently only the basic SysV-style [init](https://en.wikipedia.org/wiki/Init) and [Systemd](http://en.wikipedia.org/wiki/Systemd) service providers are supported, but other systems could be implemented as necessary (pull requests welcome).
 
-
-####Defaults File
+#### Defaults File
 
-The *defaults* file (`/etc/defaults/elasticsearch` or `/etc/sysconfig/elasticsearch`) for the Elasticsearch service can be populated as necessary. This can either be a static file resource or a simple key value-style  [hash](http://docs.puppetlabs.com/puppet/latest/reference/lang_datatypes.html#hashes) object, the latter being particularly well-suited to pulling out of a data source such as Hiera.
+The *defaults* file (`/etc/defaults/elasticsearch` or `/etc/sysconfig/elasticsearch`) for the Elasticsearch service can be populated as necessary.
+This can either be a static file resource or a simple key value-style  [hash](http://docs.puppetlabs.com/puppet/latest/reference/lang_datatypes.html#hashes) object, the latter being particularly well-suited to pulling out of a data source such as Hiera.
 
-#####file source
+##### File source
+
 ```puppet
 class { 'elasticsearch':
   init_defaults_file => 'puppet:///path/to/defaults'
 }
 ```
-#####hash representation
+##### Hash representation
+
 ```puppet
 $config_hash = {
-  'ES_USER' => 'elasticsearch',
-  'ES_GROUP' => 'elasticsearch',
+  'ES_HEAP_SIZE' => '30g',
 }
 
 class { 'elasticsearch':
@@ -346,9 +446,169 @@
 
 Note: `init_defaults` hash can be passed to the main class and to the instance.
 
-##Advanced features
+## Advanced features
+
+### Shield
+
+[Shield](https://www.elastic.co/products/shield) users, roles, and certificates can be managed by this module.
+
+**Note**: If you are planning to use these features, it is *highly recommended* you read the following documentation to understand the caveats and extent of the resources available to you.
+
+#### Getting Started
+
+Although this module can handle several types of Shield resources, you are expected to manage the plugin installation and versions for your deployment.
+For example, the following manifest will install Elasticseach with a single instance running shield:
+
+```puppet
+class { 'elasticsearch':
+  java_install => true,
+  manage_repo  => true,
+  repo_version => '1.7',
+}
+
+elasticsearch::instance { 'es-01': }
+
+Elasticsearch::Plugin { instances => ['es-01'], }
+elasticsearch::plugin { 'elasticsearch/license/latest': }
+elasticsearch::plugin { 'elasticsearch/shield/latest': }
+```
+
+The following examples will assume the preceding resources are part of your puppet manifest.
+
+#### Roles
+
+Roles in the `esusers` realm can be managed using the `elasticsearch::shield::role` type.
+For example, to create a role called `myrole`, you could use the following resource:
+
+```puppet
+elasticsearch::shield::role { 'myrole':
+  privileges => {
+    'cluster' => 'monitor',
+    'indices' => {
+      '*' => 'read'
+    }
+  }
+}
+```
+
+This role would grant users access to cluster monitoring and read access to all indices.
+See the [Shield documentation](https://www.elastic.co/guide/en/shield/index.html) for your version to determine what `privileges` to use and how to format them (the Puppet hash representation will simply be translated into yaml.)
+
+**Note**: The Puppet provider for `esusers` has fine-grained control over the `roles.yml` file and thus will leave the default roles Shield installs in-place.
+If you would like to explicitly purge the default roles (leaving only roles managed by puppet), you can do so by including the following in your manifest:
+
+```puppet
+resources { 'elasticsearch_shield_role':
+  purge => true,
+}
+```
+
+##### Mappings
+
+Associating mappings with a role is done by passing an array of strings to the `mappings` parameter of the `elasticsearch::shield::role` type.
+For example, to define a role with mappings using Shield >= 2.3.x style role definitions:
+
+```puppet
+elasticsearch::shield::role { 'logstash':
+  mappings   => [
+    'cn=group,ou=devteam',
+  ],
+  privileges => {
+    'cluster' => 'manage_index_templates',
+    'indices' => [{
+      'names'      => ['logstash-*'],
+      'privileges' => [
+        'write',
+        'delete',
+        'create_index',
+      ],
+    }],
+  },
+}
+```
 
-###Package version pinning
+**Note**: Observe the brackets around `indices` in the preceding role definition; which is an array of hashes per the format in Shield 2.3.x. Follow the documentation to determine the correct formatting for your version of Shield.
+
+If you'd like to keep the mappings file purged of entries not under Puppet's control, you should use the following `resources` declaration because mappings are a separate low-level type:
+
+```puppet
+resources { 'elasticsearch_shield_role_mapping':
+  purge => true,
+}
+```
+
+#### Users
+
+Users can be managed using the `elasticsearch::shield::user` type.
+For example, to create a user `mysuser` with membership in `myrole`:
+
+```puppet
+elasticsearch::shield::user { 'myuser':
+  password => 'mypassword',
+  roles    => ['myrole'],
+}
+```
+
+The `password` parameter will also accept password hashes generated from the `esusers` utility and ensure the password is kept in-sync with the Shield `users` file for all Elasticsearch instances.
+
+```puppet
+elasticsearch::shield::user { 'myuser':
+  password => '$2a$10$IZMnq6DF4DtQ9c4sVovgDubCbdeH62XncmcyD1sZ4WClzFuAdqspy',
+  roles    => ['myrole'],
+}
+```
+
+**Note**: When using the `esusers` provider (the default for plaintext passwords), Puppet has no way to determine whether the given password is in-sync with the password hashed by Shield.
+In order to work around this, the `elasticsearch::shield::user` resource has been designed to accept refresh events in order to update password values.
+This is not ideal, but allows you to instruct the resource to change the password when needed.
+For example, to update the aforementioned user's password, you could include the following your manifest:
+
+```puppet
+notify { 'update password': } ~>
+elasticsearch::shield::user { 'myuser':
+  password => 'mynewpassword',
+  roles    => ['myrole'],
+}
+```
+
+#### Certificates
+
+SSL/TLS can be enabled by providing an `elasticsearch::instance` type with paths to the certificate and private key files, and a password for the keystore.
+
+```puppet
+elasticsearch::instance { 'es-01':
+  ssl                  => true,
+  ca_certificate       => '/path/to/ca.pem',
+  certificate          => '/path/to/cert.pem',
+  private_key          => '/path/to/key.pem',
+  keystore_password    => 'keystorepassword',
+}
+```
+
+**Note**: Setting up a proper CA and certificate infrastructure is outside the scope of this documentation, see the aforementioned Shield guide for more information regarding the generation of these certificate files.
+
+The module will set up a keystore file for the node to use and set the relevant options in `elasticsearch.yml` to enable TLS/SSL using the certificates and key provided.
+
+#### System Keys
+
+Shield system keys can be passed to the module, where they will be placed into individual instance configuration directories.
+This can be set at the `elasticsearch` class and inherited across all instances:
+
+```puppet
+class { 'elasticsearch':
+  system_key => 'puppet:///path/to/key',
+}
+```
+
+Or set on a per-instance basis:
+
+```puppet
+elasticsearch::instance { 'es-01':
+  system_key => '/local/path/to/key',
+}
+```
+
+### Package version pinning
 
 The module supports pinning the package version to avoid accidental upgrades that are not done by Puppet.
 To enable this feature:
@@ -362,32 +622,31 @@
 
 In this example we pin the package version to 1.5.2.
 
-
-###Data directories
+### Data directories
 
 There are 4 different ways of setting data directories for Elasticsearch.
 In every case the required configuration options are placed in the `elasticsearch.yml` file.
 
-####Default
+#### Default
 By default we use:
 
-`/usr/share/elasticsearch/data/$instance_name`
+    /usr/share/elasticsearch/data/$instance_name
 
 Which provides a data directory per instance.
 
-
-####Single global data directory
+#### Single global data directory
 
 ```puppet
 class { 'elasticsearch':
   datadir => '/var/lib/elasticsearch-data'
 }
 ```
+
 Creates the following for each instance:
 
-`/var/lib/elasticsearch-data/$instance_name`
+    /var/lib/elasticsearch-data/$instance_name
 
-####Multiple Global data directories
+#### Multiple Global data directories
 
 ```puppet
 class { 'elasticsearch':
@@ -397,10 +656,9 @@
 Creates the following for each instance:
 `/var/lib/es-data1/$instance_name`
 and
-`/var/lib/es-data2/$instance_name`
+`/var/lib/es-data2/$instance_name`.
 
-
-####Single instance data directory
+#### Single instance data directory
 
 ```puppet
 class { 'elasticsearch': }
@@ -409,10 +667,12 @@
   datadir => '/var/lib/es-data-es01'
 }
 ```
+
 Creates the following for this instance:
-`/var/lib/es-data-es01`
 
-####Multiple instance data directories
+    /var/lib/es-data-es01
+
+#### Multiple instance data directories
 
 ```puppet
 class { 'elasticsearch': }
@@ -421,13 +681,14 @@
   datadir => ['/var/lib/es-data1-es01', '/var/lib/es-data2-es01']
 }
 ```
+
 Creates the following for this instance:
 `/var/lib/es-data1-es01`
 and
-`/var/lib/es-data2-es01`
+`/var/lib/es-data2-es01`.
 
 
-###Main and instance configurations
+### Main and instance configurations
 
 The `config` option in both the main class and the instances can be configured to work together.
 
@@ -446,7 +707,6 @@
 elasticsearch::instance { 'es-02':
   config => { 'node.name' => 'nodename2' }
 }
-
 ```
 
 This example merges the `cluster.name` together with the `node.name` option.
@@ -471,13 +731,14 @@
 
 This will set the cluster name to `otherclustername` for the instance `es-01` but will keep it to `clustername` for instance `es-02`
 
-####Configuration writeup
+#### Configuration writeup
 
 The `config` hash can be written in 2 different ways:
 
 ##### Full hash writeup
 
 Instead of writing the full hash representation:
+
 ```puppet
 class { 'elasticsearch':
   config                 => {
@@ -494,7 +755,9 @@
   }
 }
 ```
+
 ##### Short hash writeup
+
 ```puppet
 class { 'elasticsearch':
   config => {
@@ -506,8 +769,7 @@
 }
 ```
 
-
-##Limitations
+## Limitations
 
 This module has been built on and tested against Puppet 3.2 and higher.
 
@@ -515,20 +777,22 @@
 
 * Debian 6/7/8
 * CentOS 6/7
+* OracleLinux 6/7
 * Ubuntu 12.04, 14.04
 * OpenSuSE 13.x
+* SLES 12
 
 Other distro's that have been reported to work:
 
 * RHEL 6
-* OracleLinux 6
 * Scientific 6
 
 Testing on other platforms has been light and cannot be guaranteed.
 
-##Development
+## Development
 
+Please see the [CONTRIBUTING.md](CONTRIBUTING.md) file for instructions regarding development environments and testing.
 
-##Support
+## Support
 
 Need help? Join us in [#elasticsearch](https://webchat.freenode.net?channels=%23elasticsearch) on Freenode IRC or on the [discussion forum](https://discuss.elastic.co/).
--- a/dev/provisioning/modules/elasticsearch/Rakefile	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/Rakefile	Wed Nov 09 15:05:41 2016 +0100
@@ -1,28 +1,176 @@
 require 'rubygems'
 require 'puppetlabs_spec_helper/rake_tasks'
+require 'net/http'
+require 'uri'
+require 'fileutils'
+require 'rspec/core/rake_task'
+require 'puppet-doc-lint/rake_task'
+
+module TempFixForRakeLastComment
+  def last_comment
+    last_description
+  end
+end
+Rake::Application.send :include, TempFixForRakeLastComment
 
 exclude_paths = [
-  "pkg/**/*",
-  "vendor/**/*",
-  "spec/**/*",
+  'pkg/**/*',
+  'vendor/**/*',
+  'spec/**/*'
 ]
 
-require 'puppet-doc-lint/rake_task'
-PuppetDocLint.configuration.ignore_paths = exclude_paths
-
 require 'puppet-lint/tasks/puppet-lint'
 require 'puppet-syntax/tasks/puppet-syntax'
 
 PuppetSyntax.exclude_paths = exclude_paths
 PuppetSyntax.future_parser = true if ENV['FUTURE_PARSER'] == 'true'
 
-disable_checks = [
+[
   '80chars',
   'class_inherits_from_params_class',
   'class_parameter_defaults',
   'documentation',
   'single_quote_string_with_variables'
-].each { |check| PuppetLint.configuration.send("disable_#{check}") }
+].each do |check|
+  PuppetLint.configuration.send("disable_#{check}")
+end
 
 PuppetLint.configuration.ignore_paths = exclude_paths
 PuppetLint.configuration.log_format = "%{path}:%{linenumber}:%{check}:%{KIND}:%{message}"
+
+desc 'Run documentation tests'
+task :spec_docs do
+  results = PuppetDocLint::Runner.new.run(
+    FileList['**/*.pp'].exclude(*exclude_paths)
+  )
+
+  results.each { |result| result.result_report }
+  if results.map(&:percent_documented).any?{|n| n < 100}
+    abort 'Issues found!'
+  end
+end
+
+RSpec::Core::RakeTask.new(:spec_verbose) do |t|
+  t.pattern = 'spec/{classes,defines,unit,functions,templates}/**/*_spec.rb'
+  t.rspec_opts = [
+    '--format documentation',
+    '--require "ci/reporter/rspec"',
+    '--format CI::Reporter::RSpecFormatter',
+    '--color'
+  ]
+end
+task :spec_verbose => :spec_prep
+
+RSpec::Core::RakeTask.new(:spec_unit) do |t|
+  t.pattern = 'spec/{classes,defines,unit,functions,templates}/**/*_spec.rb'
+  t.rspec_opts = ['--color']
+end
+task :spec_unit => :spec_prep
+
+task :beaker => [:spec_prep, 'artifacts:prep']
+
+desc 'Run integration tests'
+RSpec::Core::RakeTask.new('beaker:integration') do |c|
+  c.pattern = 'spec/integration/integration*.rb'
+end
+task 'beaker:integration' => [:spec_prep, 'artifacts:prep']
+
+desc 'Run acceptance tests'
+RSpec::Core::RakeTask.new('beaker:acceptance') do |c|
+  c.pattern = 'spec/acceptance/0*_spec.rb'
+end
+task 'beaker:acceptance' => [:spec_prep, 'artifacts:prep']
+
+
+if not ENV['BEAKER_IS_PE'].nil? and ENV['BEAKER_IS_PE'] == 'true'
+  task :beaker => 'artifacts:pe'
+  task 'beaker:integration' => 'artifacts:pe'
+  task 'beaker:acceptance' => 'artifacts:pe'
+end
+
+
+namespace :artifacts do
+  desc "Fetch artifacts for tests"
+  task :prep do
+    fetch_archives({
+    'https://github.com/lmenezes/elasticsearch-kopf/archive/v2.1.1.zip' => 'elasticsearch-kopf.zip',
+    'https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-1.3.1.deb' => 'elasticsearch-1.3.1.deb',
+    'https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-1.1.0.deb' => 'elasticsearch-1.1.0.deb',
+    'https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-1.3.1.noarch.rpm' => 'elasticsearch-1.3.1.noarch.rpm',
+    'https://github.com/lukas-vlcek/bigdesk/zipball/v2.4.0' => 'elasticsearch-bigdesk.zip',
+  })
+  end
+
+  desc "Retrieve PE archives"
+  task :pe do
+    if not ENV['BEAKER_set'].nil?
+      case ENV['BEAKER_set']
+      when /centos-(\d)/
+        distro = 'el'
+        version = $1
+        arch = "x86_64"
+      when /(debian)-(\d)/
+        distro = $1
+        version = $2
+        arch = "amd64"
+      when /(sles)-(\d+)/
+        distro = $1
+        version = $2
+        arch = "x86_64"
+      when /(ubuntu)-server-(12|14)/
+        distro = $1
+        version = "#{$2}.04"
+        arch = "amd64"
+      else
+        puts "Could not find PE version for #{ENV['BEAKER_set']}"
+        return
+      end
+      pe_version = ENV['BEAKER_PE_VER']
+      file = "puppet-enterprise-#{pe_version}-#{distro}-#{version}-#{arch}.tar.gz"
+      fetch_archives({
+        "https://s3.amazonaws.com/pe-builds/released/#{pe_version}/#{file}" => file
+      })
+    else
+      puts "No nodeset set, skipping PE artifact retrieval"
+    end
+  end
+
+  desc "Purge fetched artifacts"
+  task :clean do
+    FileUtils.rm_rf(Dir.glob('spec/fixtures/artifacts/*'))
+  end
+end
+
+def fetch_archives archives
+  archives.each do |url, fp|
+    fp.replace "spec/fixtures/artifacts/#{fp}"
+    if File.exists? fp
+      if fp.end_with? 'tar.gz' and \
+          not system("tar -tzf #{fp} &>/dev/null")
+        puts "Archive #{fp} corrupt, re-fetching..."
+        File.delete fp
+      else
+        puts "Already retrieved intact archive #{fp}..."
+        next
+      end
+    end
+    get url, fp
+  end
+end
+
+def get url, file_path
+  puts "Fetching #{url}..."
+  found = false
+  until found
+    uri = URI::parse(url)
+    conn = Net::HTTP.new(uri.host, uri.port)
+    conn.use_ssl = true
+    res = conn.get(uri.path)
+    if res.header['location']
+      url = res.header['location']
+    else
+      found = true
+    end
+  end
+  File.open(file_path, 'w+') { |fh| fh.write res.body }
+end
--- a/dev/provisioning/modules/elasticsearch/lib/facter/es_facts.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/lib/facter/es_facts.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -19,23 +19,24 @@
     # only when the directory exists we need to process the stuff
     if File.directory?(dir_prefix)
 
-      Dir.foreach(dir_prefix) { |dir| 
+      Dir.foreach(dir_prefix) do |dir|
         next if dir == '.'
+
         if File.exists?("#{dir_prefix}/#{dir}/elasticsearch.yml")
           config_data = YAML.load_file("#{dir_prefix}/#{dir}/elasticsearch.yml")
-          unless config_data['http'].nil?
-            next if config_data['http']['enabled'] == 'false'
-            if config_data['http']['port'].nil?
-              port = "9200"
-            else
-              port = config_data['http']['port']
-            end
+
+          if not config_data['http.enabled'].nil? and \
+              config_data['http.enabled'] == 'false'
+            next
+          elsif not config_data['http.port'].nil?
+            port = config_data['http.port']
           else
-            port = "9200"
+            port = '9200'
           end
+
           ports << port
         end
-      }
+      end
 
       begin
         if ports.count > 0
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/feature/elasticsearch_shield_users_native.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,16 @@
+require 'puppet/util/feature'
+require 'puppet/util/package'
+
+shield_plugin_dir = '/usr/share/elasticsearch/plugins/shield'
+
+Puppet.features.add(:elasticsearch_shield_users_native) {
+  File.exists? shield_plugin_dir and
+    Dir[shield_plugin_dir + '/*.jar'].map do |file|
+      File.basename(file, '.jar').split('-')
+    end.select do |parts|
+      parts.include? 'shield'
+    end.any? do |parts|
+      parts.last =~ /^[\d.]+$/ and
+        Puppet::Util::Package.versioncmp(parts.last, '2.3') >= 0
+    end
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/parser/functions/concat_merge.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,47 @@
+module Puppet::Parser::Functions
+  newfunction(
+    :concat_merge,
+    :type => :rvalue,
+    :doc => <<-'ENDHEREDOC') do |args|
+    Merges two or more hashes together concatenating duplicate keys
+    with array values and returns the resulting hash.
+
+    For example:
+
+        $hash1 = {'a' => [1]}
+        $hash2 = {'a' => [2]}
+        concat_merge($hash1, $hash2)
+        # The resulting hash is equivalent to:
+        # { 'a' => [1, 2] }
+
+    When there is a duplicate key that is not an array, the key in
+    the rightmost hash will "win."
+    ENDHEREDOC
+
+    if args.length < 2
+      raise Puppet::ParseError, ("concat_merge(): wrong number of arguments (#{args.length}; must be at least 2)")
+    end
+
+    concat_merge = Proc.new do |hash1,hash2|
+      hash1.merge(hash2) do |key,old_value,new_value|
+        if old_value.is_a?(Array) && new_value.is_a?(Array)
+          old_value + new_value
+        else
+          new_value
+        end
+      end
+    end
+
+    result = Hash.new
+    args.each do |arg|
+      next if arg.is_a? String and arg.empty? # empty string is synonym for puppet's undef
+      # If the argument was not a hash, skip it.
+      unless arg.is_a?(Hash)
+        raise Puppet::ParseError, "concat_merge: unexpected argument type #{arg.class}, only expects hash arguments"
+      end
+
+      result = concat_merge.call(result, arg)
+    end
+    result
+  end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/parser/functions/deep_implode.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,43 @@
+$LOAD_PATH.unshift(File.join(File.dirname(__FILE__),"..","..",".."))
+
+require 'puppet_x/elastic/deep_implode'
+
+module Puppet::Parser::Functions
+  newfunction(
+    :deep_implode,
+    :type => :rvalue,
+    :doc => <<-'ENDHEREDOC') do |args|
+    Recursively flattens all keys of a hash into a dot-notated
+    hash, deeply merging duplicate key values by natively combining
+    them and returns the resulting hash.
+
+    That is confusing, look at the examples for more clarity.
+
+    For example:
+
+        $hash = {'top' => {'sub' => [1]}, 'top.sub' => [2] }
+        $flattened_hash = deep_implode($hash)
+        # The resulting hash is equivalent to:
+        # { 'top.sub' => [1, 2] }
+
+    When the function encounters array or hash values, they are
+    concatenated or merged, respectively.
+    When duplace paths for a key are generated, the function will prefer
+    to retain keys with the longest root key.
+    ENDHEREDOC
+
+    if args.length != 1
+      raise Puppet::ParseError, ("deep_implode(): wrong number of arguments (#{args.length}; must be 1)")
+    end
+
+    arg = args[0]
+
+    unless arg.is_a? Hash
+      raise Puppet::ParseError, "deep_implode: unexpected argument type, only expects hashes"
+    end
+
+    return {} if arg.empty?
+
+    Puppet_X::Elastic::deep_implode arg
+  end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/parser/functions/es_plugin_name.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,39 @@
+$LOAD_PATH.unshift(File.join(File.dirname(__FILE__),"..","..",".."))
+
+require 'puppet_x/elastic/plugin_name'
+
+module Puppet::Parser::Functions
+  newfunction(
+    :es_plugin_name,
+    :type => :rvalue,
+    :doc => <<-'ENDHEREDOC') do |args|
+    Given a string, return the best guess at what the directory name
+    will be for the given plugin. Any arguments past the first will
+    be fallbacks (using the same logic) should the first fail.
+
+    For example, all the following return values are "plug":
+
+        es_plugin_name('plug')
+        es_plugin_name('foo/plug')
+        es_plugin_name('foo/plug/1.0.0')
+        es_plugin_name('foo/elasticsearch-plug')
+        es_plugin_name('foo/es-plug/1.3.2')
+    ENDHEREDOC
+
+    if args.length < 1
+      raise Puppet::ParseError,
+        'wrong number of arguments, at least one value required'
+    end
+
+    ret = args.select do |arg|
+      arg.is_a? String and not arg.empty?
+    end.first
+
+    if ret
+      Puppet_X::Elastic::plugin_name ret
+    else
+      raise Puppet::Error,
+        'could not determine plugin name'
+    end
+  end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/provider/elastic_plugin.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,226 @@
+require 'uri'
+require 'puppet_x/elastic/plugin_name'
+
+class Puppet::Provider::ElasticPlugin < Puppet::Provider
+
+  def homedir
+    case Facter.value('osfamily')
+    when 'OpenBSD'
+      '/usr/local/elasticsearch'
+    else
+      '/usr/share/elasticsearch'
+    end
+  end
+
+  def exists?
+    es_version
+    if !File.exists?(pluginfile)
+      debug "Plugin file #{pluginfile} does not exist"
+      return false
+    elsif File.exists?(pluginfile) && readpluginfile != pluginfile_content
+      debug "Got #{readpluginfile} Expected #{pluginfile_content}. Removing for reinstall"
+      self.destroy
+      return false
+    else
+      debug "Plugin exists"
+      return true
+    end
+  end
+
+  def pluginfile_content
+    return @resource[:name] if is1x?
+
+    if @resource[:name].split("/").count == 1 # Official plugin
+      version = plugin_version(@resource[:name])
+      return "#{@resource[:name]}/#{version}"
+    else
+      return @resource[:name]
+    end
+  end
+
+  def pluginfile
+    if @resource[:plugin_path]
+      File.join(
+        @resource[:plugin_dir],
+        @resource[:plugin_path],
+        '.name'
+      )
+    else
+      File.join(
+        @resource[:plugin_dir],
+        Puppet_X::Elastic::plugin_name(@resource[:name]),
+        '.name'
+      )
+    end
+  end
+
+  def writepluginfile
+    File.open(pluginfile, 'w') do |file|
+      file.write pluginfile_content
+    end
+  end
+
+  def readpluginfile
+    f = File.open(pluginfile)
+    f.readline
+  end
+
+  def install1x
+    if !@resource[:url].nil?
+      [
+        Puppet_X::Elastic::plugin_name(@resource[:name]),
+        '--url',
+        @resource[:url]
+      ]
+    elsif !@resource[:source].nil?
+      [
+        Puppet_X::Elastic::plugin_name(@resource[:name]),
+        '--url',
+        "file://#{@resource[:source]}"
+      ]
+    else
+      [
+        @resource[:name]
+      ]
+    end
+  end
+
+  def install2x
+    if !@resource[:url].nil?
+      [
+        @resource[:url]
+      ]
+    elsif !@resource[:source].nil?
+      [
+        "file://#{@resource[:source]}"
+      ]
+    else
+      [
+        @resource[:name]
+      ]
+    end
+  end
+
+  def proxy_args url
+    parsed = URI(url)
+    ['http', 'https'].map do |schema|
+      [:host, :port, :user, :password].map do |param|
+        option = parsed.send(param)
+        if not option.nil?
+          "-D#{schema}.proxy#{param.to_s.capitalize}=#{option}"
+        end
+      end
+    end.flatten.compact
+  end
+
+  def create
+    es_version
+    commands = []
+    if is2x?
+      commands << "-Des.path.conf=#{homedir}"
+      if @resource[:proxy]
+        commands += proxy_args(@resource[:proxy])
+      end
+    end
+    commands << 'install'
+    commands << '--batch' if batch_capable?
+    commands += is1x? ? install1x : install2x
+    debug("Commands: #{commands.inspect}")
+
+    retry_count = 3
+    retry_times = 0
+    begin
+      with_environment do
+        plugin(commands)
+      end
+    rescue Puppet::ExecutionFailure => e
+      retry_times += 1
+      debug("Failed to install plugin. Retrying... #{retry_times} of #{retry_count}")
+      sleep 2
+      retry if retry_times < retry_count
+      raise "Failed to install plugin. Received error: #{e.inspect}"
+    end
+
+    writepluginfile
+  end
+
+  def destroy
+    with_environment do
+      plugin(['remove', @resource[:name]])
+    end
+  end
+
+  def es_version
+    return @es_version if @es_version
+    es_save = ENV['ES_INCLUDE']
+    java_save = ENV['JAVA_HOME']
+
+    os = Facter.value('osfamily')
+    if os == 'OpenBSD'
+      ENV['JAVA_HOME'] = javapathhelper('-h', 'elasticsearch').chomp
+      ENV['ES_INCLUDE'] = '/etc/elasticsearch/elasticsearch.in.sh'
+    end
+    begin
+      version = es('-version')
+    rescue
+      ENV['ES_INCLUDE'] = es_save if es_save
+      ENV['JAVA_HOME'] = java_save if java_save
+      raise "Unknown ES version. Got #{version.inspect}"
+    ensure
+      ENV['ES_INCLUDE'] = es_save if es_save
+      ENV['JAVA_HOME'] = java_save if java_save
+      @es_version = version.scan(/\d+\.\d+\.\d+(?:\-\S+)?/).first
+      debug "Found ES version #{@es_version}"
+    end
+  end
+
+  def is1x?
+    Puppet::Util::Package.versioncmp(@es_version, '2.0.0') < 0
+  end
+
+  def is2x?
+    (Puppet::Util::Package.versioncmp(@es_version, '2.0.0') >= 0) && (Puppet::Util::Package.versioncmp(@es_version, '3.0.0') < 0)
+  end
+
+  def batch_capable?
+    Puppet::Util::Package.versioncmp(@es_version, '2.2.0') >= 0
+  end
+
+  def plugin_version(plugin_name)
+    _vendor, _plugin, version = plugin_name.split('/')
+    return @es_version if is2x? && version.nil?
+    return version.scan(/\d+\.\d+\.\d+(?:\-\S+)?/).first unless version.nil?
+    return false
+  end
+
+  # Run a command wrapped in necessary env vars
+  def with_environment(&block)
+    env_vars = {
+      'ES_JAVA_OPTS' => [],
+    }
+    saved_vars = {}
+
+    if not is2x?
+      env_vars['ES_JAVA_OPTS'] << "-Des.path.conf=#{homedir}"
+      if @resource[:proxy]
+        env_vars['ES_JAVA_OPTS'] += proxy_args(@resource[:proxy])
+      end
+    end
+
+    env_vars['ES_JAVA_OPTS'] = env_vars['ES_JAVA_OPTS'].join(' ')
+
+    env_vars.each do |env_var, value|
+      saved_vars[env_var] = ENV[env_var]
+      ENV[env_var] = value
+    end
+
+    ret = block.call
+
+    saved_vars.each do |env_var, value|
+      ENV[env_var] = value
+    end
+
+    return ret
+  end
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/provider/elastic_yaml.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,54 @@
+$LOAD_PATH.unshift(File.join(File.dirname(__FILE__),"..","..",".."))
+require 'puppet/provider/parsedfile'
+require 'puppet/util/package'
+require 'puppet_x/elastic/hash'
+
+class Puppet::Provider::ElasticYaml < Puppet::Provider::ParsedFile
+
+  class << self
+    attr_accessor :metadata
+  end
+
+  def self.parse text
+    yaml = YAML.load text
+    if yaml
+      yaml.map do |key, metadata|
+        {
+          :name => key,
+          :ensure => :present,
+          @metadata => metadata
+        }
+      end
+    else
+      []
+    end
+  end
+
+  def self.to_file records
+    yaml = records.map do |record|
+      # Convert top-level symbols to strings
+      Hash[record.map { |k, v| [k.to_s, v] }]
+    end.inject({}) do |hash, record|
+      # Flatten array of hashes into single hash
+      hash.merge({ record['name'] => record.delete(@metadata.to_s) })
+    end.extend(Puppet_X::Elastic::SortedHash).to_yaml
+
+    # Puppet < 4 uses ZAML, which prepends spaces in to_yaml ಠ_ಠ
+    unless Puppet::Util::Package.versioncmp(Puppet.version, '4') >= 0
+      yaml.gsub!(/^\s{2}/, '')
+    end
+
+    yaml << "\n"
+  end
+
+  def self.skip_record? record
+    false
+  end
+
+  # This is ugly, but it's overridden in ParsedFile with abstract functionality
+  # we don't need for our simple provider class.
+  # This has been observed to break in Puppet version 3/4 switches.
+  def self.valid_attr?(klass, attr_name)
+    klass.is_a? Class ? klass.parameters.include?(attr_name) : true
+  end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_plugin/elasticsearch_plugin.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,22 @@
+require 'puppet/provider/elastic_plugin'
+
+Puppet::Type.type(:elasticsearch_plugin).provide(
+  :elasticsearch_plugin,
+  :parent => Puppet::Provider::ElasticPlugin
+) do
+  desc <<-END
+    Post-5.x provider for Elasticsearch bin/elasticsearch-plugin
+    command operations.'
+  END
+
+  case Facter.value('osfamily')
+  when 'OpenBSD'
+    commands :plugin => '/usr/local/elasticsearch/bin/elasticsearch-plugin'
+    commands :es => '/usr/local/elasticsearch/bin/elasticsearch'
+    commands :javapathhelper => '/usr/local/bin/javaPathHelper'
+  else
+    commands :plugin => '/usr/share/elasticsearch/bin/elasticsearch-plugin'
+    commands :es => '/usr/share/elasticsearch/bin/elasticsearch'
+  end
+
+end
--- a/dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_plugin/plugin.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_plugin/plugin.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,131 +1,19 @@
-$LOAD_PATH.unshift(File.join(File.dirname(__FILE__),"..","..",".."))
-
-Puppet::Type.type(:elasticsearch_plugin).provide(:plugin) do
-  desc "A provider for the resource type `elasticsearch_plugin`,
-        which handles plugin installation"
-
-  commands :plugin => '/usr/share/elasticsearch/bin/plugin'
-  commands :es => '/usr/share/elasticsearch/bin/elasticsearch'
-
-  def exists?
-    es_version
-    if !File.exists?(pluginfile)
-      debug "Plugin file #{pluginfile} does not exist"
-      return false
-    elsif File.exists?(pluginfile) && readpluginfile != pluginfile_content
-      debug "Got #{readpluginfile} Expected #{pluginfile_content}. Removing for reinstall"
-      self.destroy
-      return false
-    else
-      debug "Plugin exists"
-      return true
-    end
-  end
-
-  def pluginfile_content
-    return @resource[:name] if is1x?
+require 'puppet/provider/elastic_plugin'
 
-    if @resource[:name].split("/").count == 1 # Official plugin
-      version = plugin_version(@resource[:name])
-      return "#{@resource[:name]}/#{version}"
-    else
-      return @resource[:name]
-    end
-  end
-
-  def pluginfile
-    File.join(@resource[:plugin_dir], plugin_name(@resource[:name]), '.name')
-  end
-
-  def writepluginfile
-    File.open(pluginfile, 'w') do |file|
-      file.write pluginfile_content
-    end
-  end
-
-  def readpluginfile
-    f = File.open(pluginfile)
-    f.readline
-  end
-
-  def install1x
-    if !@resource[:url].nil?
-      commands = [ plugin_name(@resource[:name]), '--url', @resource[:url] ]
-    elsif !@resource[:source].nil?
-      commands = [ plugin_name(@resource[:name]), '--url', "file://#{@resource[:source]}" ]
-    else
-      commands = [ @resource[:name] ]
-    end
-    commands
-  end
+Puppet::Type.type(:elasticsearch_plugin).provide(
+  :plugin,
+  :parent => Puppet::Provider::ElasticPlugin
+) do
+  desc 'Pre-5.x provider for Elasticsearch bin/plugin command operations.'
 
-  def install2x
-    if !@resource[:url].nil?
-      commands = [ @resource[:url] ]
-    elsif !@resource[:source].nil?
-      commands = [ "file://#{@resource[:source]}" ]
-    else
-      commands = [ @resource[:name] ]
-    end
-    commands
-  end
-
-  def create
-    es_version
-    commands = []
-    commands << @resource[:proxy_args].split(' ') if @resource[:proxy_args]
-    commands << 'install'
-    commands << install1x if is1x?
-    commands << install2x if is2x?
-    debug("Commands: #{commands.inspect}")
-    
-    plugin(commands)
-    writepluginfile
-  end
-
-  def destroy
-    plugin(['remove', @resource[:name]])
-  end
-
-  def es_version
-    return @es_version if @es_version
-    begin
-      version = es('-v') # ES 1.x
-    rescue
-      version = es('--version') # ES 2.x
-    rescue
-      raise "Unknown ES version. Got #{version.inspect}"
-    ensure
-      @es_version = version.scan(/\d+\.\d+\.\d+(?:\-\S+)?/).first
-      debug "Found ES version #{@es_version}"
-    end
-  end
-
-  def is1x?
-    Puppet::Util::Package.versioncmp(@es_version, '2.0.0') < 0
-  end
-
-  def is2x?
-    (Puppet::Util::Package.versioncmp(@es_version, '2.0.0') >= 0) && (Puppet::Util::Package.versioncmp(@es_version, '3.0.0') < 0)
-  end
-
-  def plugin_version(plugin_name)
-    vendor, plugin, version = plugin_name.split('/')
-    return @es_version if is2x? && version.nil?
-    return version.scan(/\d+\.\d+\.\d+(?:\-\S+)?/).first unless version.nil?
-    return false
-  end
-
-  def plugin_name(plugin_name)
-
-    vendor, plugin, version = plugin_name.split('/')
-
-    endname = vendor if plugin.nil? # If its a single name plugin like the ES 2.x official plugins
-    endname = plugin.gsub(/(elasticsearch-|es-)/, '') unless plugin.nil?
-
-    return endname.downcase if is2x?
-    return endname
-
+  case Facter.value('osfamily')
+  when 'OpenBSD'
+    commands :plugin => '/usr/local/elasticsearch/bin/plugin'
+    commands :es => '/usr/local/elasticsearch/bin/elasticsearch'
+    commands :javapathhelper => '/usr/local/bin/javaPathHelper'
+  else
+    commands :plugin => '/usr/share/elasticsearch/bin/plugin'
+    commands :es => '/usr/share/elasticsearch/bin/elasticsearch'
   end
 
 end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_shield_role/parsed.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,17 @@
+require 'puppet/provider/elastic_yaml'
+
+case Facter.value('osfamily')
+when 'OpenBSD'
+  roles = '/usr/local/elasticsearch/shield/roles.yml'
+else
+  roles = '/usr/share/elasticsearch/shield/roles.yml'
+end
+
+Puppet::Type.type(:elasticsearch_shield_role).provide(
+  :parsed,
+  :parent => Puppet::Provider::ElasticYaml,
+  :default_target => roles,
+  :metadata => :privileges
+) do
+  desc "Provider for Shield role resources."
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_shield_role_mapping/parsed.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,17 @@
+require 'puppet/provider/elastic_yaml'
+
+case Facter.value('osfamily')
+when 'OpenBSD'
+  mappings = '/usr/local/elasticsearch/shield/role_mapping.yml'
+else
+  mappings = '/usr/share/elasticsearch/shield/role_mapping.yml'
+end
+
+Puppet::Type.type(:elasticsearch_shield_role_mapping).provide(
+  :parsed,
+  :parent => Puppet::Provider::ElasticYaml,
+  :default_target => mappings,
+  :metadata => :mappings
+) do
+  desc "Provider for Shield role mappings."
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_shield_user/esusers.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,100 @@
+Puppet::Type.type(:elasticsearch_shield_user).provide(:esusers) do
+  desc "Provider for Shield file (esusers) user resources."
+
+  mk_resource_methods
+
+  os = Facter.value('osfamily')
+  if os == 'OpenBSD'
+    @homedir = '/usr/local/elasticsearch'
+  else
+    @homedir = '/usr/share/elasticsearch'
+  end
+
+  commands :esusers => "#{@homedir}/bin/shield/esusers"
+  commands :es => "#{@homedir}/bin/elasticsearch"
+
+  def self.esusers_with_path args
+    args = [args] unless args.is_a? Array
+    esusers(["--default.path.conf=#{@homedir}"] + args)
+  end
+
+  def self.users
+    begin
+      output = esusers_with_path('list')
+    rescue Puppet::ExecutionFailure => e
+      debug("#users had an error: #{e.inspect}")
+      return nil
+    end
+
+    debug("Raw `esusers list` output: #{output}")
+    output.split("\n").select { |u|
+      # Keep only expected "user : role1,role2" formatted lines
+      u[/^[^:]+:\s+\S+$/]
+    }.map { |u|
+      # Break into ["user ", " role1,role2"]
+      u.split(':').first.strip
+    }.map do |user|
+      {
+        :name => user,
+        :ensure => :present,
+        :provider => :esusers,
+      }
+    end
+  end
+
+  def self.instances
+    users.map do |user|
+      new user
+    end
+  end
+
+  def self.prefetch(resources)
+    instances.each do |prov|
+      if resource = resources[prov.name]
+        resource.provider = prov
+      end
+    end
+  end
+
+  def initialize(value={})
+    super(value)
+    @property_flush = {}
+  end
+
+  def flush
+    arguments = []
+
+    case @property_flush[:ensure]
+    when :absent
+      arguments << 'userdel'
+      arguments << resource[:name]
+    else
+      arguments << 'useradd'
+      arguments << resource[:name]
+      arguments << '-p' << resource[:password]
+    end
+
+    self.class.esusers_with_path(arguments)
+    @property_hash = self.class.users.detect { |u| u[:name] == resource[:name] }
+  end
+
+  def create
+    @property_flush[:ensure] = :present
+  end
+
+  def exists?
+    @property_hash[:ensure] == :present
+  end
+
+  def destroy
+    @property_flush[:ensure] = :absent
+  end
+
+  def passwd
+    self.class.esusers_with_path([
+      'passwd',
+      resource[:name],
+      '-p', resource[:password]
+    ])
+  end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_shield_user/parsed.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,36 @@
+require 'puppet/provider/parsedfile'
+
+case Facter.value('osfamily')
+when 'OpenBSD'
+  users = '/usr/local/elasticsearch/shield/users'
+else
+  users = '/usr/share/elasticsearch/shield/users'
+end
+
+Puppet::Type.type(:elasticsearch_shield_user).provide(
+  :parsed,
+  :parent => Puppet::Provider::ParsedFile,
+  :default_target => users
+) do
+  desc "Provider for Shield esusers using plain files."
+
+  confine :exists => users
+
+  has_feature :manages_passwords
+
+  text_line :comment,
+            :match => %r{^\s*#}
+
+  record_line :parsed,
+              :fields => %w{name hashed_password},
+              :separator => ':',
+              :joiner => ':'
+
+  def self.valid_attr?(klass, attr_name)
+    if klass.respond_to? :parameters
+      klass.parameters.include?(attr_name)
+    else
+      true
+    end
+  end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_shield_user_roles/parsed.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,59 @@
+require 'puppet/provider/parsedfile'
+
+case Facter.value('osfamily')
+when 'OpenBSD'
+  users_roles = '/usr/local/elasticsearch/shield/users_roles'
+else
+  users_roles = '/usr/share/elasticsearch/shield/users_roles'
+end
+
+Puppet::Type.type(:elasticsearch_shield_user_roles).provide(
+  :parsed,
+  :parent => Puppet::Provider::ParsedFile,
+  :default_target => users_roles
+) do
+  desc "Provider for Shield user roles (parsed file.)"
+
+  confine :exists => users_roles
+
+  def self.parse text
+    text.split("\n").map{|l|l.strip}.select do |line|
+      # Strip comments
+      not line.start_with? '#' and not line.empty?
+    end.map do |line|
+      # Turn array of roles into array of users that have the role
+      role, users = line.split(':')
+      users.split(',').map do |user|
+        { user => [role] }
+      end
+    end.flatten.inject({}) do |hash, user|
+      # Gather up user => role hashes by append-merging role lists
+      hash.merge(user) { |_, o, n| o + n }
+    end.map do |user, roles|
+      # Map those hashes into what the provider expects
+      {
+        :name => user,
+        :roles => roles
+      }
+    end.to_a
+  end
+
+  def self.to_file records
+    debug "Flushing: #{records.inspect}"
+    records.map do |record|
+      record[:roles].map do |r|
+        { [record[:name]] => r }
+      end
+    end.flatten.map(&:invert).inject({}) do |acc, role|
+      acc.merge(role) { |_, o, n| o + n }
+    end.delete_if do |_, users|
+      users.empty?
+    end.map do |role, users|
+      "#{role}:#{users.join(',')}"
+    end.join("\n") + "\n"
+  end
+
+  def self.skip_record? record
+    false
+  end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/provider/elasticsearch_template/ruby.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,205 @@
+$LOAD_PATH.unshift(File.join(File.dirname(__FILE__),"..","..","..",".."))
+
+require 'json'
+require 'net/http'
+require 'openssl'
+
+require 'puppet_x/elastic/deep_to_i'
+
+Puppet::Type.type(:elasticsearch_template).provide(:ruby) do
+  desc <<-ENDHEREDOC
+    A REST API based provider to manage Elasticsearch templates.
+  ENDHEREDOC
+
+  mk_resource_methods
+
+  def self.rest http, \
+                req, \
+                validate_tls=true, \
+                timeout=10, \
+                username=nil, \
+                password=nil
+
+    if username and password
+      req.basic_auth username, password
+    elsif username or password
+      Puppet.warning(
+        'username and password must both be defined, skipping basic auth'
+      )
+    end
+
+    http.read_timeout = timeout
+    http.open_timeout = timeout
+    http.verify_mode = OpenSSL::SSL::VERIFY_NONE if not validate_tls
+
+    begin
+      http.request req
+    rescue EOFError => e
+      # Because the provider attempts a best guess at API access, we
+      # only fail when HTTP operations fail for mutating methods.
+      unless ['GET', 'OPTIONS', 'HEAD'].include? req.method
+        raise Puppet::Error,
+          "Received '#{e}' from the Elasticsearch API. Are your API settings correct?"
+      end
+    end
+  end
+
+  def self.templates protocol='http', \
+                     validate_tls=true, \
+                     host='localhost', \
+                     port=9200, \
+                     timeout=10, \
+                     username=nil, \
+                     password=nil, \
+                     ca_file=nil, \
+                     ca_path=nil
+
+    uri = URI("#{protocol}://#{host}:#{port}/_template")
+    http = Net::HTTP.new uri.host, uri.port
+    req = Net::HTTP::Get.new uri.request_uri
+
+    http.use_ssl = uri.scheme == 'https'
+    [[ca_file, :ca_file=], [ca_path, :ca_path=]].each do |arg, method|
+      if arg and http.respond_to? method
+        http.send method, arg
+      end
+    end
+
+    response = rest http, req, validate_tls, timeout, username, password
+
+    if response.respond_to? :code and response.code.to_i == 200
+      JSON.parse(response.body).map do |name, template|
+        {
+          :name => name,
+          :ensure => :present,
+          :content => Puppet_X::Elastic::deep_to_i(template),
+          :provider => :ruby
+        }
+      end
+    else
+      []
+    end
+  end
+
+  def self.instances
+    templates.map { |resource| new resource }
+  end
+
+  # Unlike a typical #prefetch, which just ties discovered #instances to the
+  # correct resources, we need to quantify all the ways the resources in the
+  # catalog know about Elasticsearch API access and use those settings to
+  # fetch any templates we can before associating resources and providers.
+  def self.prefetch(resources)
+    # Get all relevant API access methods from the resources we know about
+    resources.map do |_, resource|
+      p = resource.parameters
+      [
+        p[:protocol].value,
+        p[:validate_tls].value,
+        p[:host].value,
+        p[:port].value,
+        p[:timeout].value,
+        (p.has_key?(:username) ? p[:username].value : nil),
+        (p.has_key?(:password) ? p[:password].value : nil),
+        (p.has_key?(:ca_file) ? p[:ca_file].value : nil),
+        (p.has_key?(:ca_path) ? p[:ca_path].value : nil)
+      ]
+    # Deduplicate identical settings, and fetch templates
+    end.uniq.map do |api|
+      templates(*api)
+    # Flatten and deduplicate the array, instantiate providers, and do the
+    # typical association dance
+    end.flatten.uniq.map{|resource| new resource}.each do |prov|
+      if resource = resources[prov.name]
+        resource.provider = prov
+      end
+    end
+  end
+
+  def initialize(value={})
+    super(value)
+    @property_flush = {}
+  end
+
+  def flush
+    uri = URI(
+      "%s://%s:%d/_template/%s" % [
+      resource[:protocol],
+      resource[:host],
+      resource[:port],
+      resource[:name]
+    ])
+
+    http = Net::HTTP.new uri.host, uri.port
+    http.use_ssl = uri.scheme == 'https'
+    [:ca_file, :ca_path].each do |arg|
+      if not resource[arg].nil? and http.respond_to? arg
+        http.send "#{arg}=".to_sym, resource[arg]
+      end
+    end
+
+    case @property_flush[:ensure]
+    when :absent
+      req = Net::HTTP::Delete.new uri.request_uri
+    else
+      req = Net::HTTP::Put.new uri.request_uri
+      req.body = JSON.generate(resource[:content])
+    end
+
+    response = self.class.rest(
+      http,
+      req,
+      resource[:validate_tls],
+      resource[:timeout],
+      resource[:username],
+      resource[:password]
+    )
+
+    # Attempt to return useful error output
+    unless response.code.to_i == 200
+      json = JSON.parse(response.body)
+
+      if json.has_key? 'error'
+        if json['error'].is_a? Hash and json['error'].has_key? 'root_cause'
+          # Newer versions have useful output
+          err_msg = json['error']['root_cause'].first['reason']
+        else
+          # Otherwise fallback to old-style error messages
+          err_msg = json['error']
+        end
+      else
+        # As a last resort, return the response error code
+        err_msg = "HTTP #{response.code}"
+      end
+
+      raise Puppet::Error, "Elasticsearch API responded with: #{err_msg}"
+    end
+
+    @property_hash = self.class.templates(
+      resource[:protocol],
+      resource[:validate_tls],
+      resource[:host],
+      resource[:port],
+      resource[:timeout],
+      resource[:username],
+      resource[:password],
+      resource[:ca_file],
+      resource[:ca_path]
+    ).detect do |t|
+      t[:name] == resource[:name]
+    end
+  end
+
+  def create
+    @property_flush[:ensure] = :present
+  end
+
+  def exists?
+    @property_hash[:ensure] == :present
+  end
+
+  def destroy
+    @property_flush[:ensure] = :absent
+  end
+
+end # of .provide
--- a/dev/provisioning/modules/elasticsearch/lib/puppet/type/elasticsearch_plugin.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/type/elasticsearch_plugin.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,7 +1,7 @@
 Puppet::Type.newtype(:elasticsearch_plugin) do
 
   @doc = "Plugin installation type"
-  
+
   ensurable do
     defaultvalues
     defaultto :present
@@ -19,13 +19,17 @@
     desc 'Source of the package. puppet:// or file:// resource'
   end
 
-  newparam(:proxy_args) do
+  newparam(:proxy) do
     desc 'Proxy Host'
   end
 
   newparam(:plugin_dir) do
-    desc 'Plugin directory'
+    desc 'Path to the Plugins directory'
     defaultto '/usr/share/elasticsearch/plugins'
   end
 
+  newparam(:plugin_path) do
+    desc 'Override name of the directory created for the plugin'
+  end
+
 end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/type/elasticsearch_shield_role.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,18 @@
+Puppet::Type.newtype(:elasticsearch_shield_role) do
+  desc "Type to model Elasticsearch shield roles."
+
+  ensurable do
+    defaultvalues
+    defaultto :present
+  end
+
+  newparam(:name, :namevar => true) do
+    desc 'Role name.'
+
+    newvalues(/^[a-zA-Z_]{1}[-\w@.$]{0,29}$/)
+  end
+
+  newproperty(:privileges) do
+    desc 'Security privileges of the given role.'
+  end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/type/elasticsearch_shield_role_mapping.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,18 @@
+Puppet::Type.newtype(:elasticsearch_shield_role_mapping) do
+  desc "Type to model Elasticsearch shield role mappings."
+
+  ensurable do
+    defaultvalues
+    defaultto :present
+  end
+
+  newparam(:name, :namevar => true) do
+    desc 'Role name.'
+
+    newvalues(/^[a-zA-Z_]{1}[-\w@.$]{0,29}$/)
+  end
+
+  newproperty(:mappings, :array_matching => :all) do
+    desc 'List of role mappings.'
+  end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/type/elasticsearch_shield_user.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,50 @@
+Puppet::Type.newtype(:elasticsearch_shield_user) do
+  desc "Type to model Elasticsearch shield users."
+
+  feature :manages_passwords,
+    'The provider can control the password hash without a need
+    to explicitly refresh.'
+
+  ensurable do
+    defaultvalues
+    defaultto :present
+  end
+
+  newparam(:name, :namevar => true) do
+    desc 'User name.'
+  end
+
+  newparam(:password) do
+    desc 'Plaintext password for user.'
+
+    validate do |value|
+      if value.length < 6
+        raise ArgumentError, 'Password must be at least 6 characters long'
+      end
+    end
+
+    def is_to_s currentvalue
+      return '[old password hash redacted]'
+    end
+    def should_to_s newvalue
+      return '[new password hash redacted]'
+    end
+  end
+
+  newproperty(
+    :hashed_password,
+    :required_features => :manages_passwords
+  ) do
+    desc 'Hashed password for user.'
+
+    newvalues(/^[$]2a[$].{56}$/)
+  end
+
+  def refresh
+    if @parameters[:ensure]
+      provider.passwd
+    else
+      debug 'skipping password set'
+    end
+  end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/type/elasticsearch_shield_user_roles.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,23 @@
+Puppet::Type.newtype(:elasticsearch_shield_user_roles) do
+  desc "Type to model Elasticsearch shield user roles."
+
+  ensurable do
+    defaultvalues
+    defaultto :present
+  end
+
+  newparam(:name, :namevar => true) do
+    desc 'User name.'
+  end
+
+  newproperty(:roles, :array_matching => :all) do
+    desc 'Array of roles that the user should belong to.'
+    def insync? is
+      is.sort == should.sort
+    end
+  end
+
+  autorequire(:elasticsearch_shield_user) do
+    self[:name]
+  end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/type/elasticsearch_template.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,194 @@
+$LOAD_PATH.unshift(File.join(File.dirname(__FILE__),"..","..",".."))
+
+require 'puppet/file_serving/content'
+require 'puppet/file_serving/metadata'
+require 'puppet/parameter/boolean'
+
+require 'puppet_x/elastic/deep_implode'
+require 'puppet_x/elastic/deep_to_i'
+
+Puppet::Type.newtype(:elasticsearch_template) do
+  desc 'Manages Elasticsearch index templates.'
+
+  ensurable do
+    defaultvalues
+    defaultto :present
+  end
+
+  newparam(:name, :namevar => true) do
+    desc 'Template name.'
+  end
+
+  newproperty(:content) do
+    desc 'Structured content of template.'
+
+    validate do |value|
+      raise Puppet::Error, 'hash expected' unless value.is_a? Hash
+    end
+
+    munge do |value|
+
+      # The Elasticsearch API will return default empty values for
+      # order, aliases, and mappings if they aren't defined in the
+      # user mapping, so we need to set defaults here to keep the
+      # `in` and `should` states consistent if the user hasn't
+      # provided any.
+      #
+      # We use deep_to_i to ensure any numeric values are properly
+      # parsed, whether from user-defined resources or when reading
+      # from the API.
+      #
+      # We also need to fully qualify index settings, since users
+      # can define those with the index json key absent, but the API
+      # always fully qualifies them.
+      {'order'=>0,'aliases'=>{},'mappings'=>{}}.merge(
+        Puppet_X::Elastic::deep_to_i(
+          value.tap do |val|
+            if val.has_key? 'settings'
+              unless val['settings'].has_key? 'index'
+                val['settings']['index'] = {}
+              end
+              (val['settings'].keys - ['index']).each do |setting|
+                val['settings']['index'][setting] = \
+                  val['settings'].delete(setting)
+              end
+            end
+          end
+      ))
+    end
+
+    def insync?(is)
+      Puppet_X::Elastic::deep_implode(is) == \
+        Puppet_X::Elastic::deep_implode(should)
+    end
+  end
+
+  newparam(:source) do
+    desc 'Puppet source to file containing template contents.'
+
+    validate do |value|
+      raise Puppet::Error, 'string expected' unless value.is_a? String
+    end
+  end
+
+  newparam(:host) do
+    desc 'Optional host where Elasticsearch is listening.'
+    defaultto 'localhost'
+
+    validate do |value|
+      unless value.is_a? String
+        raise Puppet::Error, 'invalid parameter, expected string'
+      end
+    end
+  end
+
+  newparam(:port) do
+    desc 'Port to use for Elasticsearch HTTP API operations.'
+    defaultto 9200
+
+    munge do |value|
+      if value.is_a? String
+        value.to_i
+      elsif value.is_a? Fixnum
+        value
+      else
+        raise Puppet::Error, "unknown '#{value}' timeout type '#{value.class}'"
+      end
+    end
+
+    validate do |value|
+      if value.to_s =~ /^([0-9]+)$/
+        unless (0 < $1.to_i) and ($1.to_i < 65535)
+          raise Puppet::Error, "invalid port value '#{value}'"
+        end
+      else
+        raise Puppet::Error, "invalid port value '#{value}'"
+      end
+    end
+  end
+
+  newparam(:protocol) do
+    desc 'Protocol to communicate over to Elasticsearch.'
+    defaultto 'http'
+  end
+
+  newparam(
+    :validate_tls,
+    :boolean => true,
+    :parent => Puppet::Parameter::Boolean
+  ) do
+    desc 'Whether to verify TLS/SSL certificates.'
+    defaultto true
+  end
+
+  newparam(:timeout) do
+    desc 'HTTP timeout for reading/writing content to Elasticsearch.'
+    defaultto 10
+
+    munge do |value|
+      if value.is_a? String
+        value.to_i
+      elsif value.is_a? Fixnum
+        value
+      else
+        raise Puppet::Error, "unknown '#{value}' timeout type '#{value.class}'"
+      end
+    end
+
+    validate do |value|
+      if value.to_s !~ /^\d+$/
+        raise Puppet::Error, 'timeout must be a positive integer'
+      end
+    end
+  end
+
+  newparam(:username) do
+    desc 'Optional HTTP basic authentication username for Elasticsearch.'
+  end
+
+  newparam(:password) do
+    desc 'Optional HTTP basic authentication plaintext password for Elasticsearch.'
+  end
+
+  newparam(:ca_file) do
+    desc 'Absolute path to a CA file to authenticate server certificates against.'
+  end
+
+  newparam(:ca_path) do
+    desc 'Absolute path to a directory containing CA files to authenticate server certificates against.'
+  end
+
+  validate do
+
+    # Ensure that at least one source of template content has been provided
+    if self[:ensure] == :present
+      if self[:content].nil? and self[:source].nil?
+        fail Puppet::ParseError, '"content" or "source" required'
+      elsif !self[:content].nil? and !self[:source].nil?
+        fail(Puppet::ParseError,
+             "'content' and 'source' cannot be simultaneously defined")
+      end
+    end
+
+    # If a source was passed, retrieve the source content from Puppet's
+    # FileServing indirection and set the content property
+    if !self[:source].nil?
+      unless Puppet::FileServing::Metadata.indirection.find(self[:source])
+        fail "Could not retrieve source %s" % self[:source]
+      end
+
+      if not self.catalog.nil? and \
+          self.catalog.respond_to?(:environment_instance)
+        tmp = Puppet::FileServing::Content.indirection.find(
+          self[:source],
+          :environment => self.catalog.environment_instance
+        )
+      else
+        tmp = Puppet::FileServing::Content.indirection.find(self[:source])
+      end
+
+      fail "Could not find any content at %s" % self[:source] unless tmp
+      self[:content] = PSON::load(tmp.content)
+    end
+  end
+end # of newtype
--- a/dev/provisioning/modules/elasticsearch/lib/puppet/util/es_instance_validator.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet/util/es_instance_validator.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -10,6 +10,13 @@
       def initialize(instance_server, instance_port)
         @instance_server = instance_server
         @instance_port   = instance_port
+
+        # Avoid deprecation warnings in Puppet versions < 4
+        if Facter.value(:puppetversion).split('.').first.to_i < 4
+          @timeout = Puppet[:configtimeout]
+        else
+          @timeout = Puppet[:http_connect_timeout]
+        end
       end
 
       # Utility method; attempts to make an https connection to the Elasticsearch instance.
@@ -18,7 +25,7 @@
       #
       # @return true if the connection is successful, false otherwise.
       def attempt_connection
-        Timeout::timeout(Puppet[:configtimeout]) do
+        Timeout::timeout(@timeout) do
           begin
             TCPSocket.new(@instance_server, @instance_port).close
             true
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet_x/elastic/deep_implode.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,28 @@
+module Puppet_X
+  module Elastic
+    def self.deep_implode(hash)
+      ret = Hash.new
+      implode ret, hash
+      ret
+    end
+
+    def self.implode(new_hash, hash, path=[])
+      hash.sort_by{|k,v| k.length}.reverse.each do |key, value|
+        new_path = path + [key]
+        case value
+        when Hash
+          implode(new_hash, value, new_path)
+        else
+          new_key = new_path.join('.')
+          if value.is_a? Array \
+              and new_hash.has_key? new_key \
+              and new_hash[new_key].is_a? Array
+              new_hash[new_key] += value
+          else
+            new_hash[new_key] ||= value
+          end
+        end
+      end
+    end # of deep_implode
+  end # of Elastic
+end # of Puppet_X
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet_x/elastic/deep_to_i.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,19 @@
+module Puppet_X
+  module Elastic
+    # This ugly hack is required due to the fact Puppet passes in the
+    # puppet-native hash with stringified numerics, which causes the
+    # decoded JSON from the Elasticsearch API to be seen as out-of-sync
+    # when the parsed template hash is compared against the puppet hash.
+    def self.deep_to_i obj
+      if obj.is_a? String and obj =~ /^[0-9]+$/
+        obj.to_i
+      elsif obj.is_a? Array
+        obj.map { |element| deep_to_i(element) }
+      elsif obj.is_a? Hash
+        obj.merge(obj) { |key, val| deep_to_i(val) }
+      else
+        obj
+      end
+    end
+  end # of Elastic
+end # of Puppet_X
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet_x/elastic/hash.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,68 @@
+module Puppet_X
+  module Elastic
+    module SortedHash
+
+      # Upon extension, modify the hash appropriately to render
+      # sorted yaml dependent upon whichever way is supported for
+      # this version of Puppet/Ruby's yaml implementation.
+      def self.extended(base)
+
+        if RUBY_VERSION >= '1.9'
+          # We can sort the hash in Ruby >= 1.9 by recursively
+          # re-inserting key/values in sorted order. Native to_yaml will
+          # call .each and get sorted pairs back.
+          tmp = base.to_a.sort
+          base.clear
+          tmp.each do |key, val|
+            if val.is_a? base.class
+              val.extend Puppet_X::Elastic::SortedHash
+            elsif val.is_a? Array
+              val.map do |elem|
+                if elem.is_a? base.class
+                  elem.extend(Puppet_X::Elastic::SortedHash)
+                else
+                  elem
+                end
+              end
+            end
+            base[key] = val
+          end
+        else
+          # Otherwise, recurse into the hash to extend all nested
+          # hashes with the sorted each_pair method.
+          #
+          # Ruby < 1.9 doesn't support any notion of sorted hashes,
+          # so we have to expressly monkey patch each_pair, which is
+          # called by ZAML (the yaml library used in Puppet < 4; Puppet
+          # >= 4 deprecates Ruby 1.8)
+          #
+          # Note that respond_to? is used here as there were weird
+          # problems with .class/.is_a?
+          base.merge! base do |_, ov, nv|
+            if ov.respond_to? :each_pair
+              ov.extend Puppet_X::Elastic::SortedHash
+            elsif ov.is_a? Array
+              ov.map do |elem|
+                if elem.respond_to? :each_pair
+                  elem.extend Puppet_X::Elastic::SortedHash
+                else
+                  elem
+                end
+              end
+            else
+              ov
+            end
+          end
+        end
+      end
+
+      # Override each_pair with a method that yields key/values in
+      # sorted order.
+      def each_pair
+        keys.sort.each do |key|
+          yield key, self[key]
+        end
+      end
+    end
+  end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/lib/puppet_x/elastic/plugin_name.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,16 @@
+module Puppet_X
+  module Elastic
+    # Attempt to guess at the plugin's final directory name
+    def self.plugin_name(original_string)
+      vendor, plugin, _version = original_string.split('/')
+
+      if plugin.nil?
+        # Not delineated by slashes; single plugin name in the style of
+        # commercial plugins post-2.x
+        vendor
+      else # strip off potential es prefixes and return the plugin name
+        plugin.gsub(/(elasticsearch-|es-)/, '')
+      end
+    end
+  end # of Elastic
+end # of Puppet_X
--- a/dev/provisioning/modules/elasticsearch/manifests/config.pp	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/manifests/config.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -38,40 +38,42 @@
 
   if ( $elasticsearch::ensure == 'present' ) {
 
-    $notify_service = $elasticsearch::restart_on_change ? {
-      true  => Class['elasticsearch::service'],
-      false => undef,
-    }
-
-    file { $elasticsearch::configdir:
-      ensure => directory,
-      mode   => '0644',
-    }
-
-    file { $elasticsearch::params::logdir:
-      ensure  => 'directory',
-      group   => undef,
-      mode    => '0644',
-      recurse => true,
-    }
-
-    file { $elasticsearch::params::homedir:
-      ensure  => 'directory',
-    }
-
-    file { "${elasticsearch::params::homedir}/bin":
-      ensure  => 'directory',
-      recurse => true,
-      mode    => '0755',
-    }
-
-    file { $elasticsearch::datadir:
-      ensure  => 'directory',
-    }
-
-    file { "${elasticsearch::homedir}/lib":
-      ensure  => 'directory',
-      recurse => true,
+    file {
+      $elasticsearch::configdir:
+        ensure => 'directory',
+        mode   => '0644';
+      $elasticsearch::datadir:
+        ensure => 'directory';
+      $elasticsearch::logdir:
+        ensure  => 'directory',
+        group   => undef,
+        mode    => '0644',
+        recurse => true;
+      $elasticsearch::plugindir:
+        ensure => 'directory',
+        mode   => 'o+Xr';
+      "${elasticsearch::homedir}/lib":
+        ensure  => 'directory',
+        recurse => true;
+      $elasticsearch::params::homedir:
+        ensure => 'directory';
+      "${elasticsearch::params::homedir}/templates_import":
+        ensure => 'directory',
+        mode   => '0644';
+      "${elasticsearch::params::homedir}/scripts":
+        ensure => 'directory',
+        mode   => '0644';
+      "${elasticsearch::params::homedir}/shield":
+        ensure => 'directory',
+        mode   => '0644',
+        owner  => 'root',
+        group  => '0';
+      '/etc/elasticsearch/elasticsearch.yml':
+        ensure => 'absent';
+      '/etc/elasticsearch/logging.yml':
+        ensure => 'absent';
+      '/etc/init.d/elasticsearch':
+        ensure => 'absent';
     }
 
     if $elasticsearch::params::pid_dir {
@@ -90,42 +92,25 @@
           ensure  => 'file',
           content => template("${module_name}/usr/lib/tmpfiles.d/elasticsearch.conf.erb"),
           owner   => 'root',
-          group   => 'root',
+          group   => '0',
         }
       }
     }
 
-
-    file { "${elasticsearch::params::homedir}/templates_import":
-      ensure => 'directory',
-      mode   => '0644',
-    }
-
-    file { "${elasticsearch::params::homedir}/scripts":
-      ensure => 'directory',
-      mode   => '0644',
-    }
-
-    # Removal of files that are provided with the package which we don't use
-    file { '/etc/init.d/elasticsearch':
-      ensure => 'absent',
-    }
-    file { '/lib/systemd/system/elasticsearch.service':
-      ensure => 'absent',
+    if ($elasticsearch::service_providers == 'systemd') {
+      # Mask default unit (from package)
+      exec { 'systemctl mask elasticsearch.service':
+        unless => 'test `systemctl is-enabled elasticsearch.service` = masked',
+      }
     }
 
     $new_init_defaults = { 'CONF_DIR' => $elasticsearch::configdir }
-    augeas { "${elasticsearch::params::defaults_location}/elasticsearch":
-      incl    => "${elasticsearch::params::defaults_location}/elasticsearch",
-      lens    => 'Shellvars.lns',
-      changes => template("${module_name}/etc/sysconfig/defaults.erb"),
-    }
-
-    file { '/etc/elasticsearch/elasticsearch.yml':
-      ensure => 'absent',
-    }
-    file { '/etc/elasticsearch/logging.yml':
-      ensure => 'absent',
+    if $elasticsearch::params::defaults_location {
+      augeas { "${elasticsearch::params::defaults_location}/elasticsearch":
+        incl    => "${elasticsearch::params::defaults_location}/elasticsearch",
+        lens    => 'Shellvars.lns',
+        changes => template("${module_name}/etc/sysconfig/defaults.erb"),
+      }
     }
 
   } elsif ( $elasticsearch::ensure == 'absent' ) {
--- a/dev/provisioning/modules/elasticsearch/manifests/init.pp	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/manifests/init.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -51,12 +51,49 @@
 #
 # [*restart_on_change*]
 #   Boolean that determines if the application should be automatically restarted
-#   whenever the configuration changes. Disabling automatic restarts on config
-#   changes may be desired in an environment where you need to ensure restarts
-#   occur in a controlled/rolling manner rather than during a Puppet run.
+#   whenever the configuration, package, or plugins change. Enabling this
+#   setting will cause Elasticsearch to restart whenever there is cause to
+#   re-read configuration files, load new plugins, or start the service using an
+#   updated/changed executable. This may be undesireable in highly available
+#   environments.
+#
+#   If all other restart_* parameters are left unset, the value of
+#   restart_on_change is used for all other restart_*_change defaults.
+#
+#   Defaults to <tt>false</tt>, which disables automatic restarts. Setting to
+#   <tt>true</tt> will restart the application on any config, plugin, or
+#   package change.
+#
+# [*restart_config_change*]
+#   Boolean that determines if the application should be automatically restarted
+#   whenever the configuration changes. This includes the Elasticsearch
+#   configuration file, any service files, and defaults files.
+#   Disabling automatic restarts on config changes may be desired in an
+#   environment where you need to ensure restarts occur in a controlled/rolling
+#   manner rather than during a Puppet run.
 #
-#   Defaults to <tt>true</tt>, which will restart the application on any config
-#   change. Setting to <tt>false</tt> disables the automatic restart.
+#   Defaults to <tt>undef</tt>, in which case the default value of
+#   restart_on_change will be used (defaults to false).
+#
+# [*restart_package_change*]
+#   Boolean that determines if the application should be automatically restarted
+#   whenever the package (or package version) for Elasticsearch changes.
+#   Disabling automatic restarts on package changes may be desired in an
+#   environment where you need to ensure restarts occur in a controlled/rolling
+#   manner rather than during a Puppet run.
+#
+#   Defaults to <tt>undef</tt>, in which case the default value of
+#   restart_on_change will be used (defaults to false).
+#
+# [*restart_plugin_change*]
+#   Boolean that determines if the application should be automatically restarted
+#   whenever plugins are installed or removed.
+#   Disabling automatic restarts on plugin changes may be desired in an
+#   environment where you need to ensure restarts occur in a controlled/rolling
+#   manner rather than during a Puppet run.
+#
+#   Defaults to <tt>undef</tt>, in which case the default value of
+#   restart_on_change will be used (defaults to false).
 #
 # [*configdir*]
 #   Path to directory containing the elasticsearch configuration.
@@ -66,10 +103,6 @@
 #   Path to directory containing the elasticsearch plugins
 #   Use this setting if your packages deviate from the norm (/usr/share/elasticsearch/plugins)
 #
-# [*plugintool*]
-#   Path to directory containing the elasticsearch plugin installation script
-#   Use this setting if your packages deviate from the norm (/usr/share/elasticsearch/bin/plugin)
-#
 # [*package_url*]
 #   Url to the package to download.
 #   This can be a http,https or ftp resource for remote packages
@@ -120,9 +153,16 @@
 # [*config*]
 #   Elasticsearch configuration hash
 #
+# [*config_hiera_merge*]
+#   Enable Hiera merging for the config hash
+#   Defaults to: false
+#
 # [*datadir*]
 #   Allows you to set the data directory of Elasticsearch
 #
+# [*logdir*]
+#   Use different directory for logging
+#
 # [*java_install*]
 #  Install java which is required for Elasticsearch.
 #  Defaults to: false
@@ -136,14 +176,22 @@
 # [*repo_version*]
 #   Our repositories are versioned per major version (0.90, 1.0) select here which version you want
 #
+# [*repo_priority*]
+#   Repository priority. yum and apt supported.
+#   Default: undef
+#
 # [*repo_key_id*]
 #   String.  The apt GPG key id
-#   Default: D88E42B4
+#   Default: 46095ACC8548582C1A2699A9D27D666CD88E42B4
 #
 # [*repo_key_source*]
 #   String.  URL of the apt GPG key
 #   Default: http://packages.elastic.co/GPG-KEY-elasticsearch
 #
+# [*repo_proxy*]
+#   String.  URL for repository proxy
+#   Default: undef
+#
 # [*logging_config*]
 #   Hash representation of information you want in the logging.yml file
 #
@@ -182,6 +230,95 @@
 #   package upgrades.
 #   Defaults to: true
 #
+# [*use_ssl*]
+#   Enable auth on api calls. This parameter is deprecated in favor of setting
+#   the `api_protocol` parameter to "https".
+#   Defaults to: false
+#   This variable is deprecated
+#
+# [*validate_ssl*]
+#   Enable ssl validation on api calls. This parameter is deprecated in favor
+#   of the `validate_tls` parameter.
+#   Defaults to: true
+#   This variable is deprecated
+#
+# [*ssl_user*]
+#   Defines the username for authentication. This parameter is deprecated in
+#   favor of the `api_basic_auth_username` parameter.
+#   Defaults to: undef
+#   This variable is deprecated
+#
+# [*ssl_password*]
+#   Defines the password for authentication. This parameter is deprecated in
+#   favor of the `api_basic_auth_password` parameter.
+#   Defaults to: undef
+#   This variable is deprecated
+#
+# [*api_protocol*]
+#   Default protocol to use when accessing Elasticsearch APIs.
+#   Defaults to: http
+#
+# [*api_host*]
+#   Default host to use when accessing Elasticsearch APIs.
+#   Defaults to: localhost
+#
+# [*api_port*]
+#   Default port to use when accessing Elasticsearch APIs.
+#   Defaults to: 9200
+#
+# [*api_timeout*]
+#   Default timeout (in seconds) to use when accessing Elasticsearch APIs.
+#   Defaults to: 10
+#
+# [*validate_tls*]
+#   Enable TLS/SSL validation on API calls.
+#   Defaults to: true
+#
+# [*api_basic_auth_username*]
+#   Defines the default REST basic auth username for API authentication.
+#   Defaults to: undef
+#
+# [*api_basic_auth_password*]
+#   Defines the default REST basic auth password for API authentication.
+#   Defaults to: undef
+#
+# [*api_ca_file*]
+#   Path to a CA file which will be used to validate server certs when
+#   communicating with the Elasticsearch API over HTTPS.
+#   Defaults to: undef
+#
+# [*api_ca_path*]
+#   Path to a directory with CA files which will be used to validate server
+#   certs when communicating with the Elasticsearch API over HTTPS.
+#   Defaults to: undef
+#
+# [*system_key*]
+#   Source for the Shield system key. Valid values are any that are
+#   supported for the file resource `source` parameter.
+#   Value type is string
+#   Default value: undef
+#
+# [*file_rolling_type*]
+#   Configuration for the file appender rotation. It can be 'dailyRollingFile'
+#   or 'rollingFile'. The first rotates by name, and the second one by size.
+#   Value type is string
+#   Default value: dailyRollingFile
+#
+# [*daily_rolling_date_pattern*]
+#   File pattern for the file appender log when file_rolling_type is 'dailyRollingFile'
+#   Value type is string
+#   Default value: "'.'yyyy-MM-dd"
+#
+# [*rolling_file_max_backup_index*]
+#   Max number of logs to store whern file_rolling_type is 'rollingFile'
+#   Value type is integer
+#   Default value: 1
+#
+# [*rolling_file_max_file_size*]
+#   Max log file size when file_rolling_type is 'rollingFile'
+#   Value type is string
+#   Default value: 10MB
+#
 # The default values for the parameters are set in elasticsearch::params. Have
 # a look at the corresponding <tt>params.pp</tt> manifest file if you need more
 # technical information about them.
@@ -207,46 +344,70 @@
 # * Richard Pijnenburg <mailto:richard.pijnenburg@elasticsearch.com>
 #
 class elasticsearch(
-  $ensure                = $elasticsearch::params::ensure,
-  $status                = $elasticsearch::params::status,
-  $restart_on_change     = $elasticsearch::params::restart_on_change,
-  $autoupgrade           = $elasticsearch::params::autoupgrade,
-  $version               = false,
-  $package_provider      = 'package',
-  $package_url           = undef,
-  $package_dir           = $elasticsearch::params::package_dir,
-  $package_name          = $elasticsearch::params::package,
-  $package_pin           = true,
-  $purge_package_dir     = $elasticsearch::params::purge_package_dir,
-  $package_dl_timeout    = $elasticsearch::params::package_dl_timeout,
-  $proxy_url             = undef,
-  $elasticsearch_user    = $elasticsearch::params::elasticsearch_user,
-  $elasticsearch_group   = $elasticsearch::params::elasticsearch_group,
-  $configdir             = $elasticsearch::params::configdir,
-  $purge_configdir       = $elasticsearch::params::purge_configdir,
-  $service_provider      = 'init',
-  $init_defaults         = undef,
-  $init_defaults_file    = undef,
-  $init_template         = undef,
-  $config                = undef,
-  $datadir               = $elasticsearch::params::datadir,
-  $plugindir             = $elasticsearch::params::plugindir,
-  $plugintool            = $elasticsearch::params::plugintool,
-  $java_install          = false,
-  $java_package          = undef,
-  $manage_repo           = false,
-  $repo_version          = undef,
-  $repo_key_id           = 'D88E42B4',
-  $repo_key_source       = 'http://packages.elastic.co/GPG-KEY-elasticsearch',
-  $logging_file          = undef,
-  $logging_config        = undef,
-  $logging_template      = undef,
-  $default_logging_level = $elasticsearch::params::default_logging_level,
-  $repo_stage            = false,
-  $instances             = undef,
-  $instances_hiera_merge = false,
-  $plugins               = undef,
-  $plugins_hiera_merge   = false
+  $ensure                         = $elasticsearch::params::ensure,
+  $status                         = $elasticsearch::params::status,
+  $restart_on_change              = $elasticsearch::params::restart_on_change,
+  $restart_config_change          = $elasticsearch::restart_on_change,
+  $restart_package_change         = $elasticsearch::restart_on_change,
+  $restart_plugin_change          = $elasticsearch::restart_on_change,
+  $autoupgrade                    = $elasticsearch::params::autoupgrade,
+  $version                        = false,
+  $package_provider               = 'package',
+  $package_url                    = undef,
+  $package_dir                    = $elasticsearch::params::package_dir,
+  $package_name                   = $elasticsearch::params::package,
+  $package_pin                    = true,
+  $purge_package_dir              = $elasticsearch::params::purge_package_dir,
+  $package_dl_timeout             = $elasticsearch::params::package_dl_timeout,
+  $proxy_url                      = undef,
+  $elasticsearch_user             = $elasticsearch::params::elasticsearch_user,
+  $elasticsearch_group            = $elasticsearch::params::elasticsearch_group,
+  $configdir                      = $elasticsearch::params::configdir,
+  $purge_configdir                = $elasticsearch::params::purge_configdir,
+  $service_provider               = 'init',
+  $init_defaults                  = undef,
+  $init_defaults_file             = undef,
+  $init_template                  = "${module_name}/etc/init.d/${elasticsearch::params::init_template}",
+  $config                         = undef,
+  $config_hiera_merge             = false,
+  $datadir                        = $elasticsearch::params::datadir,
+  $logdir                         = $elasticsearch::params::logdir,
+  $plugindir                      = $elasticsearch::params::plugindir,
+  $java_install                   = false,
+  $java_package                   = undef,
+  $manage_repo                    = false,
+  $repo_version                   = undef,
+  $repo_priority                  = undef,
+  $repo_key_id                    = '46095ACC8548582C1A2699A9D27D666CD88E42B4',
+  $repo_key_source                = 'https://artifacts.elastic.co/GPG-KEY-elasticsearch',
+  $repo_proxy                     = undef,
+  $logging_file                   = undef,
+  $logging_config                 = undef,
+  $logging_template               = undef,
+  $default_logging_level          = $elasticsearch::params::default_logging_level,
+  $repo_stage                     = false,
+  $instances                      = undef,
+  $instances_hiera_merge          = false,
+  $plugins                        = undef,
+  $plugins_hiera_merge            = false,
+  $use_ssl                        = undef,
+  $validate_ssl                   = undef,
+  $ssl_user                       = undef,
+  $ssl_password                   = undef,
+  $api_protocol                   = 'http',
+  $api_host                       = 'localhost',
+  $api_port                       = 9200,
+  $api_timeout                    = 10,
+  $api_basic_auth_username        = undef,
+  $api_basic_auth_password        = undef,
+  $api_ca_file                    = undef,
+  $api_ca_path                    = undef,
+  $validate_tls                   = true,
+  $system_key                     = undef,
+  $file_rolling_type              = $elasticsearch::params::file_rolling_type,
+  $daily_rolling_date_pattern     = $elasticsearch::params::daily_rolling_date_pattern,
+  $rolling_file_max_backup_index  = $elasticsearch::params::rolling_file_max_backup_index,
+  $rolling_file_max_file_size     = $elasticsearch::params::rolling_file_max_file_size,
 ) inherits elasticsearch::params {
 
   anchor {'elasticsearch::begin': }
@@ -267,8 +428,22 @@
     fail("\"${status}\" is not a valid status parameter value")
   }
 
+  if ! ($file_rolling_type in [ 'dailyRollingFile', 'rollingFile']) {
+    file("\"${file_rolling_type}\" is not a valid type")
+  }
+
+  validate_integer($rolling_file_max_backup_index)
+  validate_string($daily_rolling_date_pattern)
+  validate_string($rolling_file_max_file_size)
+
+
   # restart on change
-  validate_bool($restart_on_change)
+  validate_bool(
+    $restart_on_change,
+    $restart_config_change,
+    $restart_package_change,
+    $restart_plugin_change
+  )
 
   # purge conf dir
   validate_bool($purge_configdir)
@@ -293,14 +468,21 @@
     if ($config != undef) {
       validate_hash($config)
     }
+
+    if ($logging_config != undef) {
+      validate_hash($logging_config)
+    }
   }
 
   # java install validation
   validate_bool($java_install)
 
-  validate_bool($manage_repo)
+  validate_bool(
+    $manage_repo,
+    $package_pin
+  )
 
-  if ($manage_repo == true) {
+  if ($manage_repo == true and $ensure == 'present') {
     if $repo_version == undef {
       fail('Please fill in a repository version at $repo_version')
     } else {
@@ -323,6 +505,54 @@
     }
   }
 
+  # Various parameters governing API access to Elasticsearch, handling
+  # deprecated params.
+  validate_string($api_protocol, $api_host)
+  if $use_ssl != undef {
+    validate_bool($use_ssl)
+    warning('"use_ssl" parameter is deprecated; set $api_protocol to "https" instead')
+    $_api_protocol = 'https'
+  } else {
+    $_api_protocol = $api_protocol
+  }
+
+  validate_bool($validate_tls)
+  if $validate_ssl != undef {
+    validate_bool($validate_ssl)
+    warning('"validate_ssl" parameter is deprecated; use $validate_tls instead')
+    $_validate_tls = $validate_ssl
+  } else {
+    $_validate_tls = $validate_tls
+  }
+
+  if $api_basic_auth_username { validate_string($api_basic_auth_username) }
+  if $ssl_user != undef {
+    validate_string($ssl_user)
+    warning('"ssl_user" parameter is deprecated; use $api_basic_auth_username instead')
+    $_api_basic_auth_username = $ssl_user
+  } else {
+    $_api_basic_auth_username = $api_basic_auth_username
+  }
+
+  if $api_basic_auth_password { validate_string($api_basic_auth_password) }
+  if $ssl_password != undef {
+    validate_string($ssl_password)
+    warning('"ssl_password" parameter is deprecated; use $api_basic_auth_password instead')
+    $_api_basic_auth_password = $ssl_password
+  } else {
+    $_api_basic_auth_password = $api_basic_auth_password
+  }
+
+  if ! is_integer($api_timeout) {
+    fail("'${api_timeout}' is not an integer")
+  }
+
+  if ! is_integer($api_port) {
+    fail("'${api_port}' is not an integer")
+  }
+
+  if $system_key != undef { validate_string($system_key) }
+
   #### Manage actions
 
   # package(s)
@@ -331,6 +561,15 @@
   # configuration
   class { 'elasticsearch::config': }
 
+  # Hiera support for configuration hash
+  validate_bool($config_hiera_merge)
+
+  if $config_hiera_merge == true {
+    $x_config = hiera_hash('elasticsearch::config', $config)
+  } else {
+    $x_config = $config
+  }
+
   # Hiera support for instances
   validate_bool($instances_hiera_merge)
 
@@ -373,6 +612,12 @@
     -> Class['elasticsearch::package']
   }
 
+  if $package_pin {
+    class { 'elasticsearch::package::pin':
+      before => Class['elasticsearch::package'],
+    }
+  }
+
   if ($manage_repo == true) {
 
     if ($repo_stage == false) {
@@ -398,28 +643,107 @@
         stage => $repo_stage,
       }
     }
+
+    if defined(Class['elasticsearch::package::pin']) {
+      Class['elasticsearch::package::pin']
+      -> Class['elasticsearch::repo']
+    }
+
   }
 
   #### Manage relationships
+  #
+  # Note that many of these overly verbose declarations work around
+  # https://tickets.puppetlabs.com/browse/PUP-1410
+  # which means clean arrow order chaining won't work if someone, say,
+  # doesn't declare any plugins.
+  #
+  # forgive me for what you're about to see
 
   if $ensure == 'present' {
 
-    # we need the software before configuring it
+    # Anchor, installation, and configuration
     Anchor['elasticsearch::begin']
     -> Class['elasticsearch::package']
     -> Class['elasticsearch::config']
-    -> Elasticsearch::Plugin <| |>
+
+    # Top-level ordering bindings for resources.
+    Class['elasticsearch::config']
+    -> Elasticsearch::Plugin <| ensure == 'present' or ensure == 'installed' |>
+    Elasticsearch::Plugin <| ensure == 'absent' |>
+    -> Class['elasticsearch::config']
+    Class['elasticsearch::config']
     -> Elasticsearch::Instance <| |>
+    Class['elasticsearch::config']
+    -> Elasticsearch::Shield::User <| |>
+    Class['elasticsearch::config']
+    -> Elasticsearch::Shield::Role <| |>
+    Class['elasticsearch::config']
     -> Elasticsearch::Template <| |>
 
   } else {
 
-    # make sure all services are getting stopped before software removal
+    # Main anchor and included classes
+    Anchor['elasticsearch::begin']
+    -> Class['elasticsearch::config']
+    -> Class['elasticsearch::package']
+
+    # Top-level ordering bindings for resources.
+    Anchor['elasticsearch::begin']
+    -> Elasticsearch::Plugin <| |>
+    -> Class['elasticsearch::config']
     Anchor['elasticsearch::begin']
     -> Elasticsearch::Instance <| |>
     -> Class['elasticsearch::config']
-    -> Class['elasticsearch::package']
+    Anchor['elasticsearch::begin']
+    -> Elasticsearch::Shield::User <| |>
+    -> Class['elasticsearch::config']
+    Anchor['elasticsearch::begin']
+    -> Elasticsearch::Shield::Role <| |>
+    -> Class['elasticsearch::config']
+    Anchor['elasticsearch::begin']
+    -> Elasticsearch::Template <| |>
+    -> Class['elasticsearch::config']
 
   }
 
+  # Install plugins before managing instances or shield users/roles
+  Elasticsearch::Plugin <| ensure == 'present' or ensure == 'installed' |>
+  -> Elasticsearch::Instance <| |>
+  Elasticsearch::Plugin <| ensure == 'present' or ensure == 'installed' |>
+  -> Elasticsearch::Shield::User <| |>
+  Elasticsearch::Plugin <| ensure == 'present' or ensure == 'installed' |>
+  -> Elasticsearch::Shield::Role <| |>
+
+  # Remove plugins after managing shield users/roles
+  Elasticsearch::Shield::User <| |>
+  -> Elasticsearch::Plugin <| ensure == 'absent' |>
+  Elasticsearch::Shield::Role <| |>
+  -> Elasticsearch::Plugin <| ensure == 'absent' |>
+
+  # Ensure roles are defined before managing users that reference roles
+  Elasticsearch::Shield::Role <| |>
+  -> Elasticsearch::Shield::User <| ensure == 'present' |>
+  # Ensure users are removed before referenced roles are managed
+  Elasticsearch::Shield::User <| ensure == 'absent' |>
+  -> Elasticsearch::Shield::Role <| |>
+
+  # Ensure users and roles are managed before calling out to templates
+  Elasticsearch::Shield::Role <| |>
+  -> Elasticsearch::Template <| |>
+  Elasticsearch::Shield::User <| |>
+  -> Elasticsearch::Template <| |>
+
+  # Manage users/roles before instances (req'd to keep shield dir in sync)
+  Elasticsearch::Shield::Role <| |>
+  -> Elasticsearch::Instance <| |>
+  Elasticsearch::Shield::User <| |>
+  -> Elasticsearch::Instance <| |>
+
+  # Ensure instances are started before managing templates
+  Elasticsearch::Instance <| ensure == 'present' |>
+  -> Elasticsearch::Template <| |>
+  # Ensure instances are stopped after managing templates
+  Elasticsearch::Template <| |>
+  -> Elasticsearch::Instance <| ensure == 'absent' |>
 }
--- a/dev/provisioning/modules/elasticsearch/manifests/instance.pp	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/manifests/instance.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -64,22 +64,105 @@
 # [*init_defaults_file*]
 #   Defaults file as puppet resource
 #
+# [*service_flags*]
+#   Service flags used for the OpenBSD service configuration, defaults to undef.
+#
+# [*init_template*]
+#   Service file as a template
+#
+# [*logdir*]
+#   Log directory for this instance.
+#
+# [*ssl*]
+#   Whether to manage TLS certificates for Shield. Requires the ca_certificate,
+#   certificate, private_key and keystore_password parameters to be set.
+#   Value type is boolean
+#   Default value: false
+#
+# [*ca_certificate*]
+#   Path to the trusted CA certificate to add to this node's java keystore.
+#   Value type is string
+#   Default value: undef
+#
+# [*certificate*]
+#   Path to the certificate for this node signed by the CA listed in
+#   ca_certificate.
+#   Value type is string
+#   Default value: undef
+#
+# [*private_key*]
+#   Path to the key associated with this node's certificate.
+#   Value type is string
+#   Default value: undef
+#
+# [*keystore_password*]
+#   Password to encrypt this node's Java keystore.
+#   Value type is string
+#   Default value: undef
+#
+# [*keystore_path*]
+#   Custom path to the java keystore file. This parameter is optional.
+#   Value type is string
+#   Default value: undef
+#
+# [*system_key*]
+#   Source for the Shield system key. Valid values are any that are
+#   supported for the file resource `source` parameter.
+#   Value type is string
+#   Default value: undef
+#
+# [*file_rolling_type*]
+#   Configuration for the file appender rotation. It can be 'dailyRollingFile'
+#   or 'rollingFile'. The first rotates by name, and the second one by size.
+#   Value type is string
+#   Default value: dailyRollingFile
+#
+# [*daily_rolling_date_pattern*]
+#   File pattern for the file appender log when file_rolling_type is 'dailyRollingFile'
+#   Value type is string
+#   Default value: "'.'yyyy-MM-dd"
+#
+# [*rolling_file_max_backup_index*]
+#   Max number of logs to store whern file_rolling_type is 'rollingFile'
+#   Value type is integer
+#   Default value: 1
+#
+# [*rolling_file_max_file_size*]
+#   Max log file size when file_rolling_type is 'rollingFile'
+#   Value type is string
+#   Default value: 10MB
+#
 # === Authors
 #
+# * Tyler Langlois <mailto:tyler@elastic.co>
 # * Richard Pijnenburg <mailto:richard.pijnenburg@elasticsearch.com>
 #
 define elasticsearch::instance(
-  $ensure             = $elasticsearch::ensure,
-  $status             = $elasticsearch::status,
-  $config             = undef,
-  $configdir          = undef,
-  $datadir            = undef,
-  $logging_file       = undef,
-  $logging_config     = undef,
-  $logging_template   = undef,
-  $logging_level      = $elasticsearch::default_logging_level,
-  $init_defaults      = undef,
-  $init_defaults_file = undef
+  $ensure                        = $elasticsearch::ensure,
+  $status                        = $elasticsearch::status,
+  $config                        = undef,
+  $configdir                     = undef,
+  $datadir                       = undef,
+  $logdir                        = undef,
+  $logging_file                  = undef,
+  $logging_config                = undef,
+  $logging_template              = undef,
+  $logging_level                 = $elasticsearch::default_logging_level,
+  $service_flags                 = undef,
+  $init_defaults                 = undef,
+  $init_defaults_file            = undef,
+  $init_template                 = $elasticsearch::init_template,
+  $ssl                           = false,
+  $ca_certificate                = undef,
+  $certificate                   = undef,
+  $private_key                   = undef,
+  $keystore_password             = undef,
+  $keystore_path                 = undef,
+  $system_key                    = $elasticsearch::system_key,
+  $file_rolling_type             = $elasticsearch::file_rolling_type,
+  $daily_rolling_date_pattern    = $elasticsearch::daily_rolling_date_pattern,
+  $rolling_file_max_backup_index = $elasticsearch::rolling_file_max_backup_index,
+  $rolling_file_max_file_size    = $elasticsearch::rolling_file_max_file_size,
 ) {
 
   require elasticsearch::params
@@ -99,7 +182,7 @@
     fail("\"${ensure}\" is not a valid ensure parameter value")
   }
 
-  $notify_service = $elasticsearch::restart_on_change ? {
+  $notify_service = $elasticsearch::restart_config_change ? {
     true  => Elasticsearch::Service[$name],
     false => undef,
   }
@@ -118,17 +201,11 @@
       $instance_config = {}
     } else {
       validate_hash($config)
-      $instance_config = $config
+      $instance_config = deep_implode($config)
     }
 
     if(has_key($instance_config, 'node.name')) {
       $instance_node_name = {}
-    } elsif(has_key($instance_config,'node')) {
-      if(has_key($instance_config['node'], 'name')) {
-        $instance_node_name = {}
-      } else {
-        $instance_node_name = { 'node.name' => "${::hostname}-${name}" }
-      }
     } else {
       $instance_node_name = { 'node.name' => "${::hostname}-${name}" }
     }
@@ -154,13 +231,13 @@
     } else {
 
       if(is_hash($elasticsearch::logging_config)) {
-        $main_logging_config = $elasticsearch::logging_config
+        $main_logging_config = deep_implode($elasticsearch::logging_config)
       } else {
         $main_logging_config = { }
       }
 
       if(is_hash($logging_config)) {
-        $instance_logging_config = $logging_config
+        $instance_logging_config = deep_implode($logging_config)
       } else {
         $instance_logging_config = { }
       }
@@ -175,23 +252,13 @@
       $logging_source = undef
     }
 
-    if ($elasticsearch::config != undef) {
-      $main_config = $elasticsearch::config
+    if ($elasticsearch::x_config != undef) {
+      $main_config = deep_implode($elasticsearch::x_config)
     } else {
       $main_config = { }
     }
 
-    if(has_key($instance_config, 'path.data')) {
-      $instance_datadir_config = { 'path.data' => $instance_datadir }
-    } elsif(has_key($instance_config, 'path')) {
-      if(has_key($instance_config['path'], 'data')) {
-        $instance_datadir_config = { 'path' => { 'data' => $instance_datadir } }
-      } else {
-        $instance_datadir_config = { 'path.data' => $instance_datadir }
-      }
-    } else {
-      $instance_datadir_config = { 'path.data' => $instance_datadir }
-    }
+    $instance_datadir_config = { 'path.data' => $instance_datadir }
 
     if(is_array($instance_datadir)) {
       $dirs = join($instance_datadir, ' ')
@@ -199,6 +266,73 @@
       $dirs = $instance_datadir
     }
 
+    # Manage instance log directory
+    if ($logdir == undef) {
+      $instance_logdir = "${elasticsearch::logdir}/${name}"
+    } else {
+      $instance_logdir = $logdir
+    }
+
+    $instance_logdir_config = { 'path.logs' => $instance_logdir }
+
+    validate_bool($ssl)
+    if $ssl {
+      validate_absolute_path($ca_certificate, $certificate, $private_key)
+      validate_string($keystore_password)
+
+      if ($keystore_path == undef) {
+        $_keystore_path = "${instance_configdir}/shield/${name}.ks"
+      } else {
+        validate_absolute_path($keystore_path)
+        $_keystore_path = $keystore_path
+      }
+
+      $tls_config = {
+        'shield.ssl.keystore.path'     => $_keystore_path,
+        'shield.ssl.keystore.password' => $keystore_password,
+        'shield.transport.ssl'         => true,
+        'shield.http.ssl'              => true,
+      }
+
+      # Trust CA Certificate
+      java_ks { "elasticsearch_instance_${name}_keystore_ca":
+        ensure       => 'latest',
+        certificate  => $ca_certificate,
+        target       => $_keystore_path,
+        password     => $keystore_password,
+        trustcacerts => true,
+      }
+
+      # Load node certificate and private key
+      java_ks { "elasticsearch_instance_${name}_keystore_node":
+        ensure      => 'latest',
+        certificate => $certificate,
+        private_key => $private_key,
+        target      => $_keystore_path,
+        password    => $keystore_password,
+      }
+    } else { $tls_config = {} }
+
+    if $system_key != undef {
+      validate_string($system_key)
+    }
+
+    exec { "mkdir_logdir_elasticsearch_${name}":
+      command => "mkdir -p ${instance_logdir}",
+      creates => $instance_logdir,
+      require => Class['elasticsearch::package'],
+      before  => File[$instance_logdir],
+    }
+
+    file { $instance_logdir:
+      ensure  => 'directory',
+      owner   => $elasticsearch::elasticsearch_user,
+      group   => undef,
+      mode    => '0644',
+      require => Class['elasticsearch::package'],
+      before  => Elasticsearch::Service[$name],
+    }
+
     exec { "mkdir_datadir_elasticsearch_${name}":
       command => "mkdir -p ${dirs}",
       creates => $instance_datadir,
@@ -211,7 +345,6 @@
       owner   => $elasticsearch::elasticsearch_user,
       group   => undef,
       mode    => '0644',
-      recurse => true,
       require => [ Exec["mkdir_datadir_elasticsearch_${name}"], Class['elasticsearch::package'] ],
       before  => Elasticsearch::Service[$name],
     }
@@ -247,8 +380,28 @@
       target => "${elasticsearch::params::homedir}/scripts",
     }
 
+    file { "${instance_configdir}/shield":
+      ensure  => 'directory',
+      mode    => '0644',
+      source  => "${elasticsearch::params::homedir}/shield",
+      recurse => 'remote',
+      owner   => 'root',
+      group   => '0',
+      before  => Elasticsearch::Service[$name],
+    }
+
+    if $system_key != undef {
+      file { "${instance_configdir}/shield/system_key":
+        ensure  => 'file',
+        source  => $system_key,
+        mode    => '0400',
+        before  => Elasticsearch::Service[$name],
+        require => File["${instance_configdir}/shield"],
+      }
+    }
+
     # build up new config
-    $instance_conf = merge($main_config, $instance_node_name, $instance_config, $instance_datadir_config)
+    $instance_conf = merge($main_config, $instance_node_name, $instance_config, $instance_datadir_config, $instance_logdir_config, $tls_config)
 
     # defaults file content
     # ensure user did not provide both init_defaults and init_defaults_file
@@ -262,14 +415,23 @@
       $global_init_defaults = { }
     }
 
-    $instance_init_defaults_main = { 'CONF_DIR' => $instance_configdir, 'CONF_FILE' => "${instance_configdir}/elasticsearch.yml", 'LOG_DIR' => "/var/log/elasticsearch/${name}", 'ES_HOME' => '/usr/share/elasticsearch' }
+    $instance_init_defaults_main = {
+      'CONF_DIR'  => $instance_configdir,
+      'CONF_FILE' => "${instance_configdir}/elasticsearch.yml",
+      'LOG_DIR'   => $instance_logdir,
+      'ES_HOME'   => '/usr/share/elasticsearch',
+    }
 
     if (is_hash($init_defaults)) {
       $instance_init_defaults = $init_defaults
     } else {
       $instance_init_defaults = { }
     }
-    $init_defaults_new = merge($global_init_defaults, $instance_init_defaults_main, $instance_init_defaults )
+    $init_defaults_new = merge(
+      $global_init_defaults,
+      $instance_init_defaults_main,
+      $instance_init_defaults
+    )
 
     $user = $elasticsearch::elasticsearch_user
     $group = $elasticsearch::elasticsearch_group
@@ -307,9 +469,10 @@
   elasticsearch::service { $name:
     ensure             => $ensure,
     status             => $status,
+    service_flags      => $service_flags,
     init_defaults      => $init_defaults_new,
     init_defaults_file => $init_defaults_file,
-    init_template      => "${module_name}/etc/init.d/${elasticsearch::params::init_template}",
+    init_template      => $init_template,
     require            => $require_service,
     before             => $before_service,
   }
--- a/dev/provisioning/modules/elasticsearch/manifests/package.pp	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/manifests/package.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -37,15 +37,21 @@
   # set params: in operation
   if $elasticsearch::ensure == 'present' {
 
+    if $elasticsearch::restart_package_change {
+      Package[$elasticsearch::package_name] ~> Elasticsearch::Service <| |>
+    }
+    Package[$elasticsearch::package_name] ~> Exec['remove_plugin_dir']
+
     # Create directory to place the package file
+    $package_dir = $elasticsearch::package_dir
     exec { 'create_package_dir_elasticsearch':
       cwd     => '/',
       path    => ['/usr/bin', '/bin'],
-      command => "mkdir -p ${elasticsearch::package_dir}",
-      creates => $elasticsearch::package_dir,
+      command => "mkdir -p ${package_dir}",
+      creates => $package_dir,
     }
 
-    file { $elasticsearch::package_dir:
+    file { $package_dir:
       ensure  => 'directory',
       purge   => $elasticsearch::purge_package_dir,
       force   => $elasticsearch::purge_package_dir,
@@ -53,7 +59,6 @@
       require => Exec['create_package_dir_elasticsearch'],
     }
 
-
     # Check if we want to install a specific version or not
     if $elasticsearch::version == false {
 
@@ -77,7 +82,6 @@
         default:   { fail("software provider \"${elasticsearch::package_provider}\".") }
       }
 
-      $package_dir = $elasticsearch::package_dir
 
       $filenameArray = split($elasticsearch::package_url, '/')
       $basefilename = $filenameArray[-1]
@@ -111,6 +115,8 @@
               "http_proxy=${elasticsearch::proxy_url}",
               "https_proxy=${elasticsearch::proxy_url}",
             ]
+          } else {
+            $exec_environment = []
           }
 
           exec { 'download_package_elasticsearch':
@@ -155,21 +161,29 @@
   # Package removal
   } else {
 
-    if ($::operatingsystem == 'OpenSuSE') {
+    if ($::osfamily == 'Suse') {
       Package {
         provider  => 'rpm',
       }
+      $package_ensure = 'absent'
+    } else {
+      $package_ensure = 'purged'
     }
-    $package_ensure = 'purged'
 
   }
 
   if ($elasticsearch::package_provider == 'package') {
 
     package { $elasticsearch::package_name:
-      ensure   => $package_ensure,
+      ensure => $package_ensure,
     }
 
+    exec { 'remove_plugin_dir':
+      refreshonly => true,
+      command     => "rm -rf ${elasticsearch::plugindir}",
+    }
+
+
   } else {
     fail("\"${elasticsearch::package_provider}\" is not supported")
   }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/manifests/package/pin.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,70 @@
+# == Class: elasticsearch::package::pin
+#
+# Controls package pinning for the Elasticsearch package.
+#
+# === Parameters
+#
+# This class does not provide any parameters.
+#
+# === Examples
+#
+# This class may be imported by other classes to use its functionality:
+#   class { 'elasticsearch::package::pin': }
+#
+# It is not intended to be used directly by external resources like node
+# definitions or other modules.
+#
+# === Authors
+#
+# * Tyler Langlois <mailto:tyler@elastic.co>
+#
+class elasticsearch::package::pin {
+
+  Exec {
+    path => [ '/bin', '/usr/bin', '/usr/local/bin' ],
+    cwd  => '/',
+  }
+
+  case $::osfamily {
+    'Debian': {
+      include ::apt
+
+      if ($elasticsearch::ensure == 'absent') {
+        apt::pin { $elasticsearch::package_name:
+          ensure => $elasticsearch::ensure,
+        }
+      } elsif ($elasticsearch::version != false) {
+        apt::pin { $elasticsearch::package_name:
+          ensure   => $elasticsearch::ensure,
+          packages => $elasticsearch::package_name,
+          version  => $elasticsearch::version,
+          priority => 1000,
+        }
+      }
+
+    }
+    'RedHat', 'Linux': {
+
+      if ($elasticsearch::ensure == 'absent') {
+        $_versionlock = '/etc/yum/pluginconf.d/versionlock.list'
+        $_lock_line = '0:elasticsearch-'
+        exec { 'elasticsearch_purge_versionlock.list':
+          command => "sed -i '/${_lock_line}/d' ${_versionlock}",
+          onlyif  => [
+            "test -f ${_versionlock}",
+            "grep -F '${_lock_line}' ${_versionlock}",
+          ],
+        }
+      } elsif ($elasticsearch::version != false) {
+        yum::versionlock {
+          "0:elasticsearch-${elasticsearch::pkg_version}.noarch":
+            ensure => $elasticsearch::ensure,
+        }
+      }
+
+    }
+    default: {
+      warning("Unable to pin package for OSfamily \"${::osfamily}\".")
+    }
+  }
+}
--- a/dev/provisioning/modules/elasticsearch/manifests/params.pp	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/manifests/params.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -41,7 +41,7 @@
   $status = 'enabled'
 
   # restart on configuration change?
-  $restart_on_change = true
+  $restart_on_change = false
 
   # Purge configuration directory
   $purge_configdir = false
@@ -60,6 +60,14 @@
     'index.indexing.slowlog' => 'TRACE, index_indexing_slow_log_file',
   }
 
+  $file_rolling_type = 'dailyRollingFile'
+
+  $daily_rolling_date_pattern = '"\'.\'yyyy-MM-dd"'
+
+  $rolling_file_max_backup_index = 1
+
+  $rolling_file_max_file_size ='10MB'
+
   #### Internal module values
 
   # User and Group for the files and user to run the service as.
@@ -72,6 +80,10 @@
       $elasticsearch_user  = 'elasticsearch'
       $elasticsearch_group = 'elasticsearch'
     }
+    'OpenBSD': {
+      $elasticsearch_user  = '_elasticsearch'
+      $elasticsearch_group = '_elasticsearch'
+    }
     default: {
       fail("\"${module_name}\" provides no user/group default value
            for \"${::kernel}\"")
@@ -87,6 +99,9 @@
     'Darwin': {
       $download_tool = 'curl --insecure -o'
     }
+    'OpenBSD': {
+      $download_tool = 'ftp -o'
+    }
     default: {
       fail("\"${module_name}\" provides no download tool default value
            for \"${::kernel}\"")
@@ -102,9 +117,17 @@
       $installpath = '/opt/elasticsearch'
       $homedir     = '/usr/share/elasticsearch'
       $plugindir   = "${homedir}/plugins"
-      $plugintool  = "${homedir}/bin/plugin"
       $datadir     = '/usr/share/elasticsearch/data'
     }
+    'OpenBSD': {
+      $configdir   = '/etc/elasticsearch'
+      $logdir      = '/var/log/elasticsearch'
+      $package_dir = '/var/cache/elasticsearch'
+      $installpath = undef
+      $homedir     = '/usr/local/elasticsearch'
+      $plugindir   = "${homedir}/plugins"
+      $datadir     = '/var/elasticsearch/data'
+    }
     default: {
       fail("\"${module_name}\" provides no config directory default value
            for \"${::kernel}\"")
@@ -121,7 +144,13 @@
       # main application
       $package = [ 'elasticsearch' ]
     }
-    'OpenSuSE': {
+    'OpenSuSE', 'SLES': {
+      $package = [ 'elasticsearch' ]
+    }
+    'Gentoo': {
+      $package = [ 'app-misc/elasticsearch' ]
+    }
+    'OpenBSD': {
       $package = [ 'elasticsearch' ]
     }
     default: {
@@ -141,23 +170,26 @@
       $pid_dir            = '/var/run/elasticsearch'
 
       if versioncmp($::operatingsystemmajrelease, '7') >= 0 {
-        $init_template     = 'elasticsearch.systemd.erb'
-        $service_providers = 'systemd'
+        $init_template        = 'elasticsearch.systemd.erb'
+        $service_providers    = 'systemd'
+        $systemd_service_path = '/lib/systemd/system'
       } else {
-        $init_template     = 'elasticsearch.RedHat.erb'
-        $service_providers = 'init'
+        $init_template        = 'elasticsearch.RedHat.erb'
+        $service_providers    = 'init'
+        $systemd_service_path = undef
       }
 
     }
     'Amazon': {
-      $service_name       = 'elasticsearch'
-      $service_hasrestart = true
-      $service_hasstatus  = true
-      $service_pattern    = $service_name
-      $defaults_location  = '/etc/sysconfig'
-      $pid_dir            = '/var/run/elasticsearch'
-      $init_template      = 'elasticsearch.RedHat.erb'
-      $service_providers  = 'init'
+      $service_name         = 'elasticsearch'
+      $service_hasrestart   = true
+      $service_hasstatus    = true
+      $service_pattern      = $service_name
+      $defaults_location    = '/etc/sysconfig'
+      $pid_dir              = '/var/run/elasticsearch'
+      $init_template        = 'elasticsearch.RedHat.erb'
+      $service_providers    = 'init'
+      $systemd_service_path = undef
     }
     'Debian': {
       $service_name       = 'elasticsearch'
@@ -166,13 +198,15 @@
       $service_pattern    = $service_name
       $defaults_location  = '/etc/default'
       if versioncmp($::operatingsystemmajrelease, '8') >= 0 {
-        $init_template     = 'elasticsearch.systemd.erb'
-        $service_providers = 'systemd'
-        $pid_dir           = '/var/run/elasticsearch'
+        $init_template        = 'elasticsearch.systemd.erb'
+        $service_providers    = 'systemd'
+        $systemd_service_path = '/lib/systemd/system'
+        $pid_dir              = '/var/run/elasticsearch'
       } else {
-        $init_template     = 'elasticsearch.Debian.erb'
-        $service_providers = [ 'init' ]
-        $pid_dir           = false
+        $init_template        = 'elasticsearch.Debian.erb'
+        $pid_dir              = false
+        $service_providers    = [ 'init' ]
+        $systemd_service_path = undef
       }
     }
     'Ubuntu': {
@@ -183,38 +217,86 @@
       $defaults_location  = '/etc/default'
 
       if versioncmp($::operatingsystemmajrelease, '15') >= 0 {
-        $init_template     = 'elasticsearch.systemd.erb'
-        $service_providers = 'systemd'
-        $pid_dir           = '/var/run/elasticsearch'
+        $init_template        = 'elasticsearch.systemd.erb'
+        $service_providers    = 'systemd'
+        $systemd_service_path = '/lib/systemd/system'
+        $pid_dir              = '/var/run/elasticsearch'
       } else {
-        $init_template     = 'elasticsearch.Debian.erb'
-        $service_providers = [ 'init' ]
-        $pid_dir           = false
+        $init_template        = 'elasticsearch.Debian.erb'
+        $pid_dir              = false
+        $service_providers    = [ 'init' ]
+        $systemd_service_path = undef
       }
     }
     'Darwin': {
-      $service_name       = 'FIXME/TODO'
-      $service_hasrestart = true
-      $service_hasstatus  = true
-      $service_pattern    = $service_name
-      $service_providers  = 'launchd'
-      $defaults_location  = false
-      $pid_dir            = false
+      $service_name         = 'FIXME/TODO'
+      $service_hasrestart   = true
+      $service_hasstatus    = true
+      $service_pattern      = $service_name
+      $service_providers    = 'launchd'
+      $systemd_service_path = undef
+      $defaults_location    = false
+      $pid_dir              = false
     }
     'OpenSuSE': {
+      $service_name          = 'elasticsearch'
+      $service_hasrestart    = true
+      $service_hasstatus     = true
+      $service_pattern       = $service_name
+      $service_providers     = 'systemd'
+      $defaults_location     = '/etc/sysconfig'
+      $init_template         = 'elasticsearch.systemd.erb'
+      $pid_dir               = '/var/run/elasticsearch'
+      if versioncmp($::operatingsystemmajrelease, '12') <= 0 {
+        $systemd_service_path = '/lib/systemd/system'
+      } else {
+        $systemd_service_path = '/usr/lib/systemd/system'
+      }
+    }
+    'SLES': {
       $service_name       = 'elasticsearch'
       $service_hasrestart = true
       $service_hasstatus  = true
       $service_pattern    = $service_name
-      $service_providers  = 'systemd'
       $defaults_location  = '/etc/sysconfig'
-      $init_template      = 'elasticsearch.systemd.erb'
-      $pid_dir            = '/var/run/elasticsearch'
+
+      if versioncmp($::operatingsystemmajrelease, '12') >= 0 {
+        $init_template        = 'elasticsearch.systemd.erb'
+        $service_providers    = 'systemd'
+        $systemd_service_path = '/usr/lib/systemd/system'
+        $pid_dir              = '/var/run/elasticsearch'
+      } else {
+        $init_template        = 'elasticsearch.SLES.erb'
+        $service_providers    = [ 'init' ]
+        $systemd_service_path = undef
+        $pid_dir              = false
+      }
+    }
+    'Gentoo': {
+      $service_name         = 'elasticsearch'
+      $service_hasrestart   = true
+      $service_hasstatus    = true
+      $service_pattern      = $service_name
+      $service_providers    = 'openrc'
+      $systemd_service_path = undef
+      $defaults_location    = '/etc/conf.d'
+      $init_template        = 'elasticsearch.openrc.erb'
+      $pid_dir              = '/run/elasticsearch'
+    }
+    'OpenBSD': {
+      $service_name         = 'elasticsearch'
+      $service_hasrestart   = true
+      $service_hasstatus    = true
+      $service_pattern      = undef
+      $service_providers    = 'openbsd'
+      $systemd_service_path = undef
+      $defaults_location    = undef
+      $init_template        = 'elasticsearch.OpenBSD.erb'
+      $pid_dir              = '/var/run/elasticsearch'
     }
     default: {
       fail("\"${module_name}\" provides no service parameters
             for \"${::operatingsystem}\"")
     }
   }
-
 }
--- a/dev/provisioning/modules/elasticsearch/manifests/plugin.pp	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/manifests/plugin.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -9,10 +9,12 @@
 # === Parameters
 #
 # [*module_dir*]
-#   Directory name where the module will be installed
+#   Directory name where the module has been installed
+#   This is automatically generated based on the module name
+#   Specify a value here to override the auto generated value
 #   Value type is string
 #   Default value: None
-#   This variable is deprecated
+#   This variable is optional
 #
 # [*ensure*]
 #   Whether the plugin will be installed or removed.
@@ -46,6 +48,18 @@
 #   Default value: None
 #   This variable is optional
 #
+# [*proxy_username*]
+#   Proxy auth username to use when installing the plugin
+#   Value type is string
+#   Default value: None
+#   This variable is optional
+#
+# [*proxy_password*]
+#   Proxy auth username to use when installing the plugin
+#   Value type is string
+#   Default value: None
+#   This variable is optional
+#
 # [*instances*]
 #   Specify all the instances related
 #   value type is string or array
@@ -68,43 +82,57 @@
 # * Richard Pijnenburg <mailto:richard.pijnenburg@elasticsearch.com>
 #
 define elasticsearch::plugin(
-    $instances,
-    $module_dir  = undef,
-    $ensure      = 'present',
-    $url         = undef,
-    $source      = undef,
-    $proxy_host  = undef,
-    $proxy_port  = undef,
+  $instances      = undef,
+  $module_dir     = undef,
+  $ensure         = 'present',
+  $url            = undef,
+  $source         = undef,
+  $proxy_host     = undef,
+  $proxy_port     = undef,
+  $proxy_username = undef,
+  $proxy_password = undef,
 ) {
 
   include elasticsearch
 
-  $notify_service = $elasticsearch::restart_on_change ? {
-    false   => undef,
-    default => Elasticsearch::Service[$instances],
+  case $ensure {
+    'installed', 'present': {
+      if empty($instances) {
+        fail('no $instances defined')
+      }
+
+      $_file_ensure = 'directory'
+      $_file_before = []
+    }
+    'absent': {
+      $_file_ensure = $ensure
+      $_file_before = File[$elasticsearch::plugindir]
+    }
+    default: {
+      fail("'${ensure}' is not a valid ensure parameter value")
+    }
   }
 
-  if ($module_dir != undef) {
-    warning("module_dir settings is deprecated for plugin ${name}. The directory is now auto detected.")
+  if ! empty($instances) and $elasticsearch::restart_plugin_change {
+    Elasticsearch_plugin[$name] {
+      notify +> Elasticsearch::Instance[$instances],
+    }
   }
 
   # set proxy by override or parse and use proxy_url from
   # elasticsearch::proxy_url or use no proxy at all
-  
+
   if ($proxy_host != undef and $proxy_port != undef) {
-    $proxy = "-DproxyPort=${proxy_port} -DproxyHost=${proxy_host}"
-  }
-  elsif ($elasticsearch::proxy_url != undef) {
-    $proxy_host_from_url = regsubst($elasticsearch::proxy_url, '(http|https)://([^:]+)(|:\d+).+', '\2')
-    $proxy_port_from_url = regsubst($elasticsearch::proxy_url, '(?:http|https)://[^:/]+(?::([0-9]+))?(?:/.*)?', '\1')
-    
-    # validate parsed values before using them
-    if (is_string($proxy_host_from_url) and is_integer($proxy_port_from_url)) {
-      $proxy = "-DproxyPort=${proxy_port_from_url} -DproxyHost=${proxy_host_from_url}"
+    if ($proxy_username != undef and $proxy_password != undef) {
+      $_proxy_auth = "${proxy_username}:${proxy_password}@"
+    } else {
+      $_proxy_auth = undef
     }
-  }
-  else {
-    $proxy = undef
+    $_proxy = "http://${_proxy_auth}${proxy_host}:${proxy_port}"
+  } elsif ($elasticsearch::proxy_url != undef) {
+    $_proxy = $elasticsearch::proxy_url
+  } else {
+    $_proxy = undef
   }
 
   if ($source != undef) {
@@ -117,31 +145,31 @@
     file { $file_source:
       ensure => 'file',
       source => $source,
+      before => Elasticsearch_plugin[$name],
     }
 
-  } elsif ($url != undef) {
+  } else {
+    $file_source = undef
+  }
+
+  if ($url != undef) {
     validate_string($url)
   }
 
-  case $ensure {
-    'installed', 'present': {
+  $_module_dir = es_plugin_name($module_dir, $name)
 
-      elasticsearch_plugin { $name:
-        ensure     => 'present',
-        source     => $file_source,
-        url        => $url,
-        proxy_args => $proxy,
-        notify     => $notify_service,
-      }
-
-    }
-    'absent': {
-      elasticsearch_plugin { $name:
-        ensure => absent,
-      }
-    }
-    default: {
-      fail("${ensure} is not a valid ensure command.")
-    }
+  elasticsearch_plugin { $name:
+    ensure      => $ensure,
+    source      => $file_source,
+    url         => $url,
+    proxy       => $_proxy,
+    plugin_dir  => $::elasticsearch::plugindir,
+    plugin_path => $module_dir,
+  } ->
+  file { "${elasticsearch::plugindir}/${_module_dir}":
+    ensure  => $_file_ensure,
+    mode    => 'o+Xr',
+    recurse => true,
+    before  => $_file_before,
   }
 }
--- a/dev/provisioning/modules/elasticsearch/manifests/repo.pp	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/manifests/repo.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -26,8 +26,43 @@
 class elasticsearch::repo {
 
   Exec {
-    path      => [ '/bin', '/usr/bin', '/usr/local/bin' ],
-    cwd       => '/',
+    path => [ '/bin', '/usr/bin', '/usr/local/bin' ],
+    cwd  => '/',
+  }
+
+  if $elasticsearch::ensure == 'present' {
+    if versioncmp($elasticsearch::repo_version, '5.0') >= 0 {
+      $_repo_url = 'https://artifacts.elastic.co/packages'
+      case $::osfamily {
+        'Debian': {
+          $_repo_path = 'apt'
+        }
+        default: {
+          $_repo_path = 'yum'
+        }
+      }
+    } else {
+      $_repo_url = 'http://packages.elastic.co/elasticsearch'
+      case $::osfamily {
+        'Debian': {
+          $_repo_path = 'debian'
+        }
+        default: {
+          $_repo_path = 'centos'
+        }
+      }
+    }
+
+    $_baseurl = "${_repo_url}/${elasticsearch::repo_version}/${_repo_path}"
+  } else {
+    case $::osfamily {
+      'Debian': {
+        $_baseurl = undef
+      }
+      default: {
+        $_baseurl = 'absent'
+      }
+    }
   }
 
   case $::osfamily {
@@ -36,38 +71,71 @@
       Class['apt::update'] -> Package[$elasticsearch::package_name]
 
       apt::source { 'elasticsearch':
-        location    => "http://packages.elastic.co/elasticsearch/${elasticsearch::repo_version}/debian",
-        release     => 'stable',
-        repos       => 'main',
-        key         => $::elasticsearch::repo_key_id,
-        key_source  => $::elasticsearch::repo_key_source,
-        include_src => false,
+        ensure   => $elasticsearch::ensure,
+        location => $_baseurl,
+        release  => 'stable',
+        repos    => 'main',
+        key      => {
+          'id'     => $::elasticsearch::repo_key_id,
+          'source' => $::elasticsearch::repo_key_source,
+        },
+        include  => {
+          'src' => false,
+          'deb' => true,
+        },
+        pin      => $elasticsearch::repo_priority,
       }
     }
     'RedHat', 'Linux': {
+      # Versions prior to 3.5.1 have issues with this param
+      # See: https://tickets.puppetlabs.com/browse/PUP-2163
+      if versioncmp($::puppetversion, '3.5.1') >= 0 {
+        Yumrepo['elasticsearch'] {
+          ensure => $elasticsearch::ensure,
+        }
+      }
       yumrepo { 'elasticsearch':
         descr    => 'elasticsearch repo',
-        baseurl  => "http://packages.elastic.co/elasticsearch/${elasticsearch::repo_version}/centos",
+        baseurl  => $_baseurl,
         gpgcheck => 1,
         gpgkey   => $::elasticsearch::repo_key_source,
         enabled  => 1,
+        proxy    => $::elasticsearch::repo_proxy,
+        priority => $elasticsearch::repo_priority,
+      } ~>
+      exec { 'elasticsearch_yumrepo_yum_clean':
+        command     => 'yum clean metadata expire-cache --disablerepo="*" --enablerepo="elasticsearch"',
+        refreshonly => true,
+        returns     => [0, 1],
       }
     }
     'Suse': {
+      if $::operatingsystem == 'SLES' and versioncmp($::operatingsystemmajrelease, '11') <= 0 {
+        # Older versions of SLES do not ship with rpmkeys
+        $_import_cmd = "rpm --import ${::elasticsearch::repo_key_source}"
+      } else {
+        $_import_cmd = "rpmkeys --import ${::elasticsearch::repo_key_source}"
+      }
+
       exec { 'elasticsearch_suse_import_gpg':
-        command => "rpmkeys --import ${::elasticsearch::repo_key_source}",
-        unless  => "test $(rpm -qa gpg-pubkey | grep -i '${::elasticsearch::repo_key_id}' | wc -l) -eq 1 ",
-        notify  => [ Zypprepo['elasticsearch'] ],
+        command => $_import_cmd,
+        unless  =>
+          "test $(rpm -qa gpg-pubkey | grep -i 'D88E42B4' | wc -l) -eq 1",
+        notify  => Zypprepo['elasticsearch'],
       }
 
       zypprepo { 'elasticsearch':
-        baseurl     => "http://packages.elastic.co/elasticsearch/${elasticsearch::repo_version}/centos",
+        baseurl     => $_baseurl,
         enabled     => 1,
         autorefresh => 1,
         name        => 'elasticsearch',
         gpgcheck    => 1,
         gpgkey      => $::elasticsearch::repo_key_source,
         type        => 'yum',
+      } ~>
+      exec { 'elasticsearch_zypper_refresh_elasticsearch':
+        command     => 'zypper refresh elasticsearch',
+        refreshonly => true,
       }
     }
     default: {
@@ -75,32 +143,4 @@
     }
   }
 
-  # Package pinning
-
-    case $::osfamily {
-      'Debian': {
-        include ::apt
-
-        if ($elasticsearch::package_pin == true and $elasticsearch::version != false) {
-          apt::pin { $elasticsearch::package_name:
-            ensure   => 'present',
-            packages => $elasticsearch::package_name,
-            version  => $elasticsearch::version,
-            priority => 1000,
-          }
-        }
-
-      }
-      'RedHat', 'Linux': {
-
-        if ($elasticsearch::package_pin == true and $elasticsearch::version != false) {
-          yum::versionlock { "0:elasticsearch-${elasticsearch::pkg_version}.noarch":
-            ensure => 'present',
-          }
-        }
-      }
-      default: {
-        warning("Unable to pin package for OSfamily \"${::osfamily}\".")
-      }
-    }
 }
--- a/dev/provisioning/modules/elasticsearch/manifests/service.pp	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/manifests/service.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -50,6 +50,9 @@
 # [*init_template*]
 #   Service file as a template
 #
+# [*service_flags*]
+#   Service flags, used on OpenBSD for service configuration
+#
 # === Authors
 #
 # * Richard Pijnenburg <mailto:richard.pijnenburg@elasticsearch.com>
@@ -60,6 +63,7 @@
   $init_defaults_file = undef,
   $init_defaults      = undef,
   $init_template      = undef,
+  $service_flags      = undef,
 ) {
 
   case $elasticsearch::real_service_provider {
@@ -73,6 +77,14 @@
         init_template      => $init_template,
       }
     }
+    'openbsd': {
+      elasticsearch::service::openbsd { $name:
+        ensure        => $ensure,
+        status        => $status,
+        init_template => $init_template,
+        service_flags => $service_flags,
+      }
+    }
     'systemd': {
       elasticsearch::service::systemd { $name:
         ensure             => $ensure,
@@ -82,6 +94,15 @@
         init_template      => $init_template,
       }
     }
+    'openrc': {
+      elasticsearch::service::openrc { $name:
+        ensure             => $ensure,
+        status             => $status,
+        init_defaults_file => $init_defaults_file,
+        init_defaults      => $init_defaults,
+        init_template      => $init_template,
+      }
+    }
     default: {
       fail("Unknown service provider ${elasticsearch::real_service_provider}")
     }
--- a/dev/provisioning/modules/elasticsearch/manifests/service/init.pp	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/manifests/service/init.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -107,13 +107,13 @@
 
   }
 
-  $notify_service = $elasticsearch::restart_on_change ? {
+  $notify_service = $elasticsearch::restart_config_change ? {
     true  => Service["elasticsearch-instance-${name}"],
     false => undef,
   }
 
 
-  if ( $status != 'unmanaged' and $ensure == 'present' ) {
+  if ( $ensure == 'present' ) {
 
     # defaults file content. Either from a hash or file
     if ($init_defaults_file != undef) {
@@ -121,17 +121,19 @@
         ensure => $ensure,
         source => $init_defaults_file,
         owner  => 'root',
-        group  => 'root',
+        group  => '0',
         mode   => '0644',
         before => Service["elasticsearch-instance-${name}"],
         notify => $notify_service,
       }
 
-    } elsif ($init_defaults != undef and is_hash($init_defaults) ) {
+    } else {
 
-      if(has_key($init_defaults, 'ES_USER')) {
-        if($init_defaults['ES_USER'] != $elasticsearch::elasticsearch_user) {
-          fail('Found ES_USER setting for init_defaults but is not same as elasticsearch_user setting. Please use elasticsearch_user setting.')
+      if ($init_defaults != undef and is_hash($init_defaults) ) {
+        if(has_key($init_defaults, 'ES_USER')) {
+          if($init_defaults['ES_USER'] != $elasticsearch::elasticsearch_user) {
+            fail('Found ES_USER setting for init_defaults but is not same as elasticsearch_user setting. Please use elasticsearch_user setting.')
+          }
         }
       }
 
@@ -155,7 +157,7 @@
         ensure  => $ensure,
         content => template($init_template),
         owner   => 'root',
-        group   => 'root',
+        group   => '0',
         mode    => '0755',
         before  => Service["elasticsearch-instance-${name}"],
         notify  => $notify_service,
@@ -163,7 +165,7 @@
 
     }
 
-  } elsif ($status != 'unmanaged') {
+  } else {
 
     file { "/etc/init.d/elasticsearch-${name}":
       ensure    => 'absent',
@@ -177,19 +179,15 @@
 
   }
 
-
-  if ( $status != 'unmanaged') {
-
-    # action
-    service { "elasticsearch-instance-${name}":
-      ensure     => $service_ensure,
-      enable     => $service_enable,
-      name       => "elasticsearch-${name}",
-      hasstatus  => $elasticsearch::params::service_hasstatus,
-      hasrestart => $elasticsearch::params::service_hasrestart,
-      pattern    => $elasticsearch::params::service_pattern,
-    }
-
+  # action
+  service { "elasticsearch-instance-${name}":
+    ensure     => $service_ensure,
+    enable     => $service_enable,
+    name       => "elasticsearch-${name}",
+    hasstatus  => $elasticsearch::params::service_hasstatus,
+    hasrestart => $elasticsearch::params::service_hasrestart,
+    pattern    => $elasticsearch::params::service_pattern,
   }
 
+
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/manifests/service/openbsd.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,156 @@
+# == Define: elasticsearch::service::openbsd
+#
+# This class exists to coordinate all service management related actions,
+# functionality and logical units in a central place.
+#
+# <b>Note:</b> "service" is the Puppet term and type for background processes
+# in general and is used in a platform-independent way. E.g. "service" means
+# "daemon" in relation to Unix-like systems.
+#
+#
+# === Parameters
+#
+# [*ensure*]
+#   String. Controls if the managed resources shall be <tt>present</tt> or
+#   <tt>absent</tt>. If set to <tt>absent</tt>:
+#   * The managed software packages are being uninstalled.
+#   * Any traces of the packages will be purged as good as possible. This may
+#     include existing configuration files. The exact behavior is provider
+#     dependent. Q.v.:
+#     * Puppet type reference: {package, "purgeable"}[http://j.mp/xbxmNP]
+#     * {Puppet's package provider source code}[http://j.mp/wtVCaL]
+#   * System modifications (if any) will be reverted as good as possible
+#     (e.g. removal of created users, services, changed log settings, ...).
+#   * This is thus destructive and should be used with care.
+#   Defaults to <tt>present</tt>.
+#
+# [*status*]
+#   String to define the status of the service. Possible values:
+#   * <tt>enabled</tt>: Service is running and will be started at boot time.
+#   * <tt>disabled</tt>: Service is stopped and will not be started at boot
+#     time.
+#   * <tt>running</tt>: Service is running but will not be started at boot time.
+#     You can use this to start a service on the first Puppet run instead of
+#     the system startup.
+#   * <tt>unmanaged</tt>: Service will not be started at boot time and Puppet
+#     does not care whether the service is running or not. For example, this may
+#     be useful if a cluster management software is used to decide when to start
+#     the service plus assuring it is running on the desired node.
+#   Defaults to <tt>enabled</tt>. The singular form ("service") is used for the
+#   sake of convenience. Of course, the defined status affects all services if
+#   more than one is managed (see <tt>service.pp</tt> to check if this is the
+#   case).
+#
+# [*pid_dir*]
+#   String, directory where to store the serice pid file
+#
+# [*init_template*]
+#   Service file as a template
+#
+# [*service_flags*]
+#   String, flags to pass to the service
+#
+# === Authors
+#
+# * Richard Pijnenburg <mailto:richard.pijnenburg@elasticsearch.com>
+#
+define elasticsearch::service::openbsd(
+  $ensure             = $elasticsearch::ensure,
+  $status             = $elasticsearch::status,
+  $pid_dir            = $elasticsearch::pid_dir,
+  $init_template      = $elasticsearch::init_template,
+  $service_flags      = undef,
+) {
+
+  #### Service management
+
+  # set params: in operation
+  if $ensure == 'present' {
+
+    case $status {
+      # make sure service is currently running, start it on boot
+      'enabled': {
+        $service_ensure = 'running'
+        $service_enable = true
+      }
+      # make sure service is currently stopped, do not start it on boot
+      'disabled': {
+        $service_ensure = 'stopped'
+        $service_enable = false
+      }
+      # make sure service is currently running, do not start it on boot
+      'running': {
+        $service_ensure = 'running'
+        $service_enable = false
+      }
+      # do not start service on boot, do not care whether currently running
+      # or not
+      'unmanaged': {
+        $service_ensure = undef
+        $service_enable = false
+      }
+      # unknown status
+      # note: don't forget to update the parameter check in init.pp if you
+      #       add a new or change an existing status.
+      default: {
+        fail("\"${status}\" is an unknown service status value")
+      }
+    }
+
+  # set params: removal
+  } else {
+
+    # make sure the service is stopped and disabled (the removal itself will be
+    # done by package.pp)
+    $service_ensure = 'stopped'
+    $service_enable = false
+
+  }
+
+  $notify_service = $elasticsearch::restart_config_change ? {
+    true  => Service["elasticsearch-instance-${name}"],
+    false => undef,
+  }
+
+  if ( $status != 'unmanaged' and $ensure == 'present' ) {
+
+    # init file from template
+    if ($init_template != undef) {
+
+      file { "/etc/rc.d/elasticsearch_${name}":
+        ensure  => $ensure,
+        content => template($init_template),
+        owner   => 'root',
+        group   => '0',
+        mode    => '0555',
+        before  => Service["elasticsearch-instance-${name}"],
+        notify  => $notify_service,
+      }
+
+    }
+
+  } elsif ($status != 'unmanaged') {
+
+    file { "/etc/rc.d/elasticsearch_${name}":
+      ensure    => 'absent',
+      subscribe => Service["elasticsearch-instance-${name}"],
+    }
+
+  }
+
+  if ( $status != 'unmanaged') {
+
+    # action
+    service { "elasticsearch-instance-${name}":
+      ensure     => $service_ensure,
+      enable     => $service_enable,
+      name       => "elasticsearch_${name}",
+      flags      => $service_flags,
+      hasstatus  => $elasticsearch::params::service_hasstatus,
+      hasrestart => $elasticsearch::params::service_hasrestart,
+      pattern    => $elasticsearch::params::service_pattern,
+    }
+
+  }
+
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/manifests/service/openrc.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,195 @@
+# == Define: elasticsearch::service::init
+#
+# This class exists to coordinate all service management related actions,
+# functionality and logical units in a central place.
+#
+# <b>Note:</b> "service" is the Puppet term and type for background processes
+# in general and is used in a platform-independent way. E.g. "service" means
+# "daemon" in relation to Unix-like systems.
+#
+#
+# === Parameters
+#
+# [*ensure*]
+#   String. Controls if the managed resources shall be <tt>present</tt> or
+#   <tt>absent</tt>. If set to <tt>absent</tt>:
+#   * The managed software packages are being uninstalled.
+#   * Any traces of the packages will be purged as good as possible. This may
+#     include existing configuration files. The exact behavior is provider
+#     dependent. Q.v.:
+#     * Puppet type reference: {package, "purgeable"}[http://j.mp/xbxmNP]
+#     * {Puppet's package provider source code}[http://j.mp/wtVCaL]
+#   * System modifications (if any) will be reverted as good as possible
+#     (e.g. removal of created users, services, changed log settings, ...).
+#   * This is thus destructive and should be used with care.
+#   Defaults to <tt>present</tt>.
+#
+# [*status*]
+#   String to define the status of the service. Possible values:
+#   * <tt>enabled</tt>: Service is running and will be started at boot time.
+#   * <tt>disabled</tt>: Service is stopped and will not be started at boot
+#     time.
+#   * <tt>running</tt>: Service is running but will not be started at boot time.
+#     You can use this to start a service on the first Puppet run instead of
+#     the system startup.
+#   * <tt>unmanaged</tt>: Service will not be started at boot time and Puppet
+#     does not care whether the service is running or not. For example, this may
+#     be useful if a cluster management software is used to decide when to start
+#     the service plus assuring it is running on the desired node.
+#   Defaults to <tt>enabled</tt>. The singular form ("service") is used for the
+#   sake of convenience. Of course, the defined status affects all services if
+#   more than one is managed (see <tt>service.pp</tt> to check if this is the
+#   case).
+#
+# [*init_defaults*]
+#   Defaults file content in hash representation
+#
+# [*init_defaults_file*]
+#   Defaults file as puppet resource
+#
+# [*init_template*]
+#   Service file as a template
+#
+# === Authors
+#
+# * Richard Pijnenburg <mailto:richard.pijnenburg@elasticsearch.com>
+#
+define elasticsearch::service::openrc(
+  $ensure             = $elasticsearch::ensure,
+  $status             = $elasticsearch::status,
+  $init_defaults_file = undef,
+  $init_defaults      = undef,
+  $init_template      = undef,
+) {
+
+  #### Service management
+
+  # set params: in operation
+  if $ensure == 'present' {
+
+    case $status {
+      # make sure service is currently running, start it on boot
+      'enabled': {
+        $service_ensure = 'running'
+        $service_enable = true
+      }
+      # make sure service is currently stopped, do not start it on boot
+      'disabled': {
+        $service_ensure = 'stopped'
+        $service_enable = false
+      }
+      # make sure service is currently running, do not start it on boot
+      'running': {
+        $service_ensure = 'running'
+        $service_enable = false
+      }
+      # do not start service on boot, do not care whether currently running
+      # or not
+      'unmanaged': {
+        $service_ensure = undef
+        $service_enable = false
+      }
+      # unknown status
+      # note: don't forget to update the parameter check in init.pp if you
+      #       add a new or change an existing status.
+      default: {
+        fail("\"${status}\" is an unknown service status value")
+      }
+    }
+
+  # set params: removal
+  } else {
+
+    # make sure the service is stopped and disabled (the removal itself will be
+    # done by package.pp)
+    $service_ensure = 'stopped'
+    $service_enable = false
+
+  }
+
+  $notify_service = $elasticsearch::restart_config_change ? {
+    true  => Service["elasticsearch-instance-${name}"],
+    false => undef,
+  }
+
+
+  if ( $status != 'unmanaged' and $ensure == 'present' ) {
+
+    # defaults file content. Either from a hash or file
+    if ($init_defaults_file != undef) {
+      file { "${elasticsearch::params::defaults_location}/elasticsearch.${name}":
+        ensure => $ensure,
+        source => $init_defaults_file,
+        owner  => 'root',
+        group  => '0',
+        mode   => '0644',
+        before => Service["elasticsearch-instance-${name}"],
+        notify => $notify_service,
+      }
+
+    } elsif ($init_defaults != undef and is_hash($init_defaults) ) {
+
+      if(has_key($init_defaults, 'ES_USER')) {
+        if($init_defaults['ES_USER'] != $elasticsearch::elasticsearch_user) {
+          fail('Found ES_USER setting for init_defaults but is not same as elasticsearch_user setting. Please use elasticsearch_user setting.')
+        }
+      }
+
+      $init_defaults_pre_hash = { 'ES_USER' => $elasticsearch::elasticsearch_user, 'ES_GROUP' => $elasticsearch::elasticsearch_group, 'MAX_OPEN_FILES' => '65535' }
+      $new_init_defaults = merge($init_defaults_pre_hash, $init_defaults)
+
+      augeas { "defaults_${name}":
+        incl    => "${elasticsearch::params::defaults_location}/elasticsearch.${name}",
+        lens    => 'Shellvars.lns',
+        changes => template("${module_name}/etc/sysconfig/defaults.erb"),
+        before  => Service["elasticsearch-instance-${name}"],
+        notify  => $notify_service,
+      }
+
+    }
+
+    # init file from template
+    if ($init_template != undef) {
+
+      file { "/etc/init.d/elasticsearch.${name}":
+        ensure  => $ensure,
+        content => template($init_template),
+        owner   => 'root',
+        group   => '0',
+        mode    => '0755',
+        before  => Service["elasticsearch-instance-${name}"],
+        notify  => $notify_service,
+      }
+
+    }
+
+  } elsif ($status != 'unmanaged') {
+
+    file { "/etc/init.d/elasticsearch.${name}":
+      ensure    => 'absent',
+      subscribe => Service["elasticsearch-instance-${name}"],
+    }
+
+    file { "${elasticsearch::params::defaults_location}/elasticsearch.${name}":
+      ensure    => 'absent',
+      subscribe => Service["elasticsearch.${$name}"],
+    }
+
+  }
+
+
+  if ( $status != 'unmanaged') {
+
+    # action
+    service { "elasticsearch-instance-${name}":
+      ensure     => $service_ensure,
+      enable     => $service_enable,
+      name       => "elasticsearch.${name}",
+      hasstatus  => $elasticsearch::params::service_hasstatus,
+      hasrestart => $elasticsearch::params::service_hasrestart,
+      pattern    => $elasticsearch::params::service_pattern,
+    }
+
+  }
+
+}
--- a/dev/provisioning/modules/elasticsearch/manifests/service/systemd.pp	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/manifests/service/systemd.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -103,12 +103,12 @@
     $service_enable = false
   }
 
-  $notify_service = $elasticsearch::restart_on_change ? {
+  $notify_service = $elasticsearch::restart_config_change ? {
     true  => [ Exec["systemd_reload_${name}"], Service["elasticsearch-instance-${name}"] ],
     false => Exec["systemd_reload_${name}"]
   }
 
-  if ( $status != 'unmanaged' and $ensure == 'present' ) {
+  if ( $ensure == 'present' ) {
 
     # defaults file content. Either from a hash or file
     if ($init_defaults_file != undef) {
@@ -116,7 +116,7 @@
         ensure => $ensure,
         source => $init_defaults_file,
         owner  => 'root',
-        group  => 'root',
+        group  => '0',
         mode   => '0644',
         before => Service["elasticsearch-instance-${name}"],
         notify => $notify_service,
@@ -163,7 +163,7 @@
         $memlock = undef
       }
 
-      file { "/lib/systemd/system/elasticsearch-${name}.service":
+      file { "${elasticsearch::params::systemd_service_path}/elasticsearch-${name}.service":
         ensure  => $ensure,
         content => template($init_template),
         before  => Service["elasticsearch-instance-${name}"],
@@ -174,9 +174,9 @@
 
   $service_require = Exec["systemd_reload_${name}"]
 
-  } elsif($status != 'unmanaged') {
+  } else {
 
-    file { "/lib/systemd/system/elasticsearch-${name}.service":
+    file { "${elasticsearch::params::systemd_service_path}/elasticsearch-${name}.service":
       ensure    => 'absent',
       subscribe => Service["elasticsearch-instance-${name}"],
       notify    => Exec["systemd_reload_${name}"],
@@ -197,20 +197,16 @@
     refreshonly => true,
   }
 
-  if ($status != 'unmanaged') {
-
-    # action
-    service { "elasticsearch-instance-${name}":
-      ensure     => $service_ensure,
-      enable     => $service_enable,
-      name       => "elasticsearch-${name}.service",
-      hasstatus  => $elasticsearch::params::service_hasstatus,
-      hasrestart => $elasticsearch::params::service_hasrestart,
-      pattern    => $elasticsearch::params::service_pattern,
-      provider   => 'systemd',
-      require    => $service_require,
-    }
-
+  # action
+  service { "elasticsearch-instance-${name}":
+    ensure     => $service_ensure,
+    enable     => $service_enable,
+    name       => "elasticsearch-${name}.service",
+    hasstatus  => $elasticsearch::params::service_hasstatus,
+    hasrestart => $elasticsearch::params::service_hasrestart,
+    pattern    => $elasticsearch::params::service_pattern,
+    provider   => 'systemd',
+    require    => $service_require,
   }
 
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/manifests/shield/role.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,76 @@
+# == Define: elasticsearch::shield::role
+#
+# Manage shield roles.
+#
+# === Parameters
+#
+# [*ensure*]
+#   Whether the role should be present or not.
+#   Set to 'absent' to ensure a role is not present.
+#   Value type is string
+#   Default value: present
+#   This variable is optional
+#
+# [*privileges*]
+#   A hash of permissions defined for the role. Valid privilege settings can
+#   be found in the Shield documentation:
+#   https://www.elastic.co/guide/en/shield/current/index.html
+#   Value type is hash
+#   Default value: {}
+#
+# [*mappings*]
+#   A list of optional mappings defined for this role.
+#   Value type is array
+#   Default value: []
+#
+# === Examples
+#
+# # Creates and manages the role 'power_user' mapped to an LDAP group.
+# elasticsearch::shield::role { 'power_user':
+#   privileges => {
+#     'cluster' => 'monitor',
+#     'indices' => {
+#       '*' => 'all',
+#     },
+#   },
+#   mappings => [
+#     "cn=users,dc=example,dc=com",
+#   ],
+# }
+#
+# === Authors
+#
+# * Tyler Langlois <mailto:tyler@elastic.co>
+#
+define elasticsearch::shield::role (
+  $ensure     = 'present',
+  $privileges = {},
+  $mappings   = [],
+) {
+  validate_string($ensure)
+  validate_hash($privileges)
+  validate_array($mappings)
+  validate_slength($name, 30, 1)
+
+  if empty($privileges) or $ensure == 'absent' {
+    $_role_ensure = 'absent'
+  } else {
+    $_role_ensure = $ensure
+  }
+
+  if empty($mappings) or $ensure == 'absent' {
+    $_mapping_ensure = 'absent'
+  } else {
+    $_mapping_ensure = $ensure
+  }
+
+  elasticsearch_shield_role { $name :
+    ensure     => $_role_ensure,
+    privileges => $privileges,
+  }
+
+  elasticsearch_shield_role_mapping { $name :
+    ensure   => $_mapping_ensure,
+    mappings => $mappings,
+  }
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/manifests/shield/user.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,65 @@
+# == Define: elasticsearch::shield::user
+#
+# Manages shield users.
+#
+# === Parameters
+#
+# [*ensure*]
+#   Whether the user should be present or not.
+#   Set to 'absent' to ensure a user is not installed
+#   Value type is string
+#   Default value: present
+#   This variable is optional
+#
+# [*password*]
+#   Password for the given user. A plaintext password will be managed
+#   with the esusers utility and requires a refresh to update, while
+#   a hashed password from the esusers utility will be managed manually
+#   in the uses file.
+#   Value type is string
+#   Default value: undef
+#
+# [*roles*]
+#   A list of roles to which the user should belong.
+#   Value type is array
+#   Default value: []
+#
+# === Examples
+#
+# # Creates and manages a user with membership in the 'logstash'
+# # and 'kibana4' roles.
+# elasticsearch::shield::user { 'bob':
+#   password => 'foobar',
+#   roles    => ['logstash', 'kibana4'],
+# }
+#
+# === Authors
+#
+# * Tyler Langlois <mailto:tyler@elastic.co>
+#
+define elasticsearch::shield::user (
+  $password,
+  $ensure = 'present',
+  $roles  = [],
+) {
+  validate_string($ensure, $password)
+  validate_array($roles)
+
+  if $password =~ /^\$2a\$/ {
+    elasticsearch_shield_user { $name:
+      ensure          => $ensure,
+      hashed_password => $password,
+    }
+  } else {
+    elasticsearch_shield_user { $name:
+      ensure   => $ensure,
+      password => $password,
+      provider => 'esusers',
+    }
+  }
+
+  elasticsearch_shield_user_roles { $name:
+    ensure => $ensure,
+    roles  => $roles,
+  }
+}
--- a/dev/provisioning/modules/elasticsearch/manifests/template.pp	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/manifests/template.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -1,145 +1,175 @@
 # == Define: elasticsearch::template
 #
-#  This define allows you to insert, update or delete templates that are used within Elasticsearch for the indexes
+#  This define allows you to insert, update or delete Elasticsearch index
+#  templates.
+#
+#  Template content should be defined through either the `content` parameter
+#  (when passing a hash or json string) or the `source` parameter (when passing
+#  the puppet file URI to a template json file).
 #
 # === Parameters
 #
 # [*ensure*]
-#   String. Controls if the managed resources shall be <tt>present</tt> or
-#   <tt>absent</tt>. If set to <tt>absent</tt>:
-#   * The managed software packages are being uninstalled.
-#   * Any traces of the packages will be purged as good as possible. This may
-#     include existing configuration files. The exact behavior is provider
-#     dependent. Q.v.:
-#     * Puppet type reference: {package, "purgeable"}[http://j.mp/xbxmNP]
-#     * {Puppet's package provider source code}[http://j.mp/wtVCaL]
-#   * System modifications (if any) will be reverted as good as possible
-#     (e.g. removal of created users, services, changed log settings, ...).
-#   * This is thus destructive and should be used with care.
-#   Defaults to <tt>present</tt>.
+#   Controls whether the named index template should be present or absent in
+#   the cluster.
+#   Value type is string
+#   Default value: present
 #
 # [*file*]
-#   File path of the template ( json file )
+#   File path of the template (json file). This parameter is deprecated;
+#   use `source` instead.
+#   Value type is string
+#   Default value: undef
+#   This variable is deprecated
+#
+# [*source*]
+#   Source path for the template file. Can be any value similar to `source`
+#   values for `file` resources.
 #   Value type is string
 #   Default value: undef
 #   This variable is optional
 #
 # [*content*]
-#   Contents of the template ( json )
-#   Value type is string
+#   Contents of the template. Can be either a puppet hash or a string
+#   containing JSON.
+#   Value type is string or hash.
 #   Default value: undef
 #   This variable is optional
 #
-# [*host*]
+# [*api_protocol*]
+#   Protocol that should be used to connect to the Elasticsearch API.
+#   Value type is string
+#   Default value inherited from elasticsearch::api_protocol: http
+#   This variable is optional
+#
+# [*api_host*]
 #   Host name or IP address of the ES instance to connect to
 #   Value type is string
-#   Default value: localhost
+#   Default value inherited from $elasticsearch::api_host: localhost
+#   This variable is optional
+#
+# [*api_port*]
+#   Port number of the ES instance to connect to
+#   Value type is number
+#   Default value inherited from $elasticsearch::api_port: 9200
+#   This variable is optional
+#
+# [*api_timeout*]
+#   Timeout period (in seconds) for the Elasticsearch API.
+#   Value type is int
+#   Default value inherited from elasticsearch::api_timeout: 10
+#   This variable is optional
+#
+# [*api_basic_auth_username*]
+#   HTTP basic auth username to use when communicating over the Elasticsearch
+#   API.
+#   Value type is String
+#   Default value inherited from elasticsearch::api_basic_auth_username: undef
 #   This variable is optional
 #
-# [*port*]
-#   Port number of the ES instance to connect to
-#   Value type is number
-#   Default value: 9200
+# [*api_basic_auth_password*]
+#   HTTP basic auth password to use when communicating over the Elasticsearch
+#   API.
+#   Value type is String
+#   Default value inherited from elasticsearch::api_basic_auth_password: undef
+#   This variable is optional
+#
+# [*api_ca_file*]
+#   Path to a CA file which will be used to validate server certs when
+#   communicating with the Elasticsearch API over HTTPS.
+#   Value type is String
+#   Default value inherited from elasticsearch::api_ca_file: undef
+#   This variable is optional
+#
+# [*api_ca_path*]
+#   Path to a directory with CA files which will be used to validate server
+#   certs when communicating with the Elasticsearch API over HTTPS.
+#   Value type is String
+#   Default value inherited from elasticsearch::api_ca_path: undef
+#   This variable is optional
+#
+# [*validate_tls*]
+#   Determines whether the validity of SSL/TLS certificates received from the
+#   Elasticsearch API should be verified or ignored.
+#   Value type is boolean
+#   Default value inherited from elasticsearch::validate_tls: true
 #   This variable is optional
 #
 # === Authors
 #
 # * Richard Pijnenburg <mailto:richard.pijnenburg@elasticsearch.com>
+# * Tyler Langlois <mailto:tyler@elastic.co>
 #
-define elasticsearch::template(
-  $ensure  = 'present',
-  $file    = undef,
-  $content = undef,
-  $host    = 'localhost',
-  $port    = 9200
+define elasticsearch::template (
+  $ensure                  = 'present',
+  $file                    = undef,
+  $source                  = undef,
+  $content                 = undef,
+  $api_protocol            = $elasticsearch::_api_protocol,
+  $api_host                = $elasticsearch::api_host,
+  $api_port                = $elasticsearch::api_port,
+  $api_timeout             = $elasticsearch::api_timeout,
+  $api_basic_auth_username = $elasticsearch::_api_basic_auth_username,
+  $api_basic_auth_password = $elasticsearch::_api_basic_auth_password,
+  $api_ca_file             = $elasticsearch::api_ca_file,
+  $api_ca_path             = $elasticsearch::api_ca_path,
+  $validate_tls            = $elasticsearch::_validate_tls,
 ) {
+  validate_string(
+    $api_protocol,
+    $api_host,
+    $api_basic_auth_username,
+    $api_basic_auth_password
+  )
+  validate_bool($validate_tls)
+
+  if ! ($ensure in ['present', 'absent']) {
+    fail("'${ensure}' is not a valid 'ensure' parameter value")
+  }
+  if ! is_integer($api_port)    { fail('"api_port" is not an integer') }
+  if ! is_integer($api_timeout) { fail('"api_timeout" is not an integer') }
+  if ($api_ca_file != undef) { validate_absolute_path($api_ca_file) }
+  if ($api_ca_path != undef) { validate_absolute_path($api_ca_path) }
+
+  if ($file != undef) {
+    warning('"file" parameter is deprecated; use $source instead')
+    $_source = $file
+  } else {
+    $_source = $source
+  }
+
+  if $_source != undef { validate_string($_source) }
+
+  if $content != undef and is_string($content) {
+    $_content = parsejson($content)
+  } else {
+    $_content = $content
+  }
+
+  if $ensure == 'present' and $_source == undef and $_content == undef {
+    fail('one of "file" or "content" required.')
+  } elsif $_source != undef and $_content != undef {
+    fail('"file" and "content" cannot be simultaneously defined.')
+  }
 
   require elasticsearch
 
-  # ensure
-  if ! ($ensure in [ 'present', 'absent' ]) {
-    fail("\"${ensure}\" is not a valid ensure parameter value")
-  }
-
-  if ! is_integer($port) {
-    fail("\"${port}\" is not an integer")
-  }
-
-  Exec {
-    path      => [ '/bin', '/usr/bin', '/usr/local/bin' ],
-    cwd       => '/',
-    tries     => 6,
-    try_sleep => 10,
-  }
-
-  # Build up the url
-  $es_url = "http://${host}:${port}/_template/${name}"
-
-  # Can't do a replace and delete at the same time
-
-  if ($ensure == 'present') {
-
-    # Fail when no file or content is supplied
-    if $file == undef and $content == undef {
-      fail('The variables "file" and "content" cannot be empty when inserting or updating a template.')
-    } elsif $file != undef and $content != undef {
-      fail('The variables "file" and "content" cannot be used together when inserting or updating a template.')
-    } else { # we are good to go. notify to insert in case we deleted
-      $insert_notify = Exec[ "insert_template_${name}" ]
-    }
-
-  } else {
-
-    $insert_notify = undef
-
+  es_instance_conn_validator { "${name}-template":
+    server => $api_host,
+    port   => $api_port,
+  } ->
+  elasticsearch_template { $name:
+    ensure       => $ensure,
+    content      => $_content,
+    source       => $_source,
+    protocol     => $api_protocol,
+    host         => $api_host,
+    port         => $api_port,
+    timeout      => $api_timeout,
+    username     => $api_basic_auth_username,
+    password     => $api_basic_auth_password,
+    ca_file      => $api_ca_file,
+    ca_path      => $api_ca_path,
+    validate_tls => $validate_tls,
   }
-
-  # Delete the existing template
-  # First check if it exists of course
-  exec { "delete_template_${name}":
-    command     => "curl -s -XDELETE ${es_url}",
-    onlyif      => "test $(curl -s '${es_url}?pretty=true' | wc -l) -gt 1",
-    notify      => $insert_notify,
-    refreshonly => true,
-  }
-
-  if ($ensure == 'absent') {
-
-    # delete the template file on disk and then on the server
-    file { "${elasticsearch::params::homedir}/templates_import/elasticsearch-template-${name}.json":
-      ensure  => 'absent',
-      notify  => Exec[ "delete_template_${name}" ],
-      require => File[ "${elasticsearch::params::homedir}/templates_import" ],
-    }
-  }
-
-  if ($ensure == 'present') {
-
-    if $content == undef {
-      # place the template file using the file source
-      file { "${elasticsearch::params::homedir}/templates_import/elasticsearch-template-${name}.json":
-        ensure  => file,
-        source  => $file,
-        notify  => Exec[ "delete_template_${name}" ],
-        require => File[ "${elasticsearch::params::homedir}/templates_import" ],
-      }
-    } else {
-      # place the template file using content
-      file { "${elasticsearch::params::homedir}/templates_import/elasticsearch-template-${name}.json":
-        ensure  => file,
-        content => $content,
-        notify  => Exec[ "delete_template_${name}" ],
-        require => File[ "${elasticsearch::params::homedir}/templates_import" ],
-      }
-    }
-
-    exec { "insert_template_${name}":
-      command     => "curl -sL -w \"%{http_code}\\n\" -XPUT ${es_url} -d @${elasticsearch::params::homedir}/templates_import/elasticsearch-template-${name}.json -o /dev/null | egrep \"(200|201)\" > /dev/null",
-      unless      => "test $(curl -s '${es_url}?pretty=true' | wc -l) -gt 1",
-      refreshonly => true,
-      loglevel    => 'debug',
-    }
-
-  }
-
 }
--- a/dev/provisioning/modules/elasticsearch/metadata.json	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/metadata.json	Wed Nov 09 15:05:41 2016 +0100
@@ -1,6 +1,6 @@
 {
   "name": "elasticsearch-elasticsearch",
-  "version": "0.10.1",
+  "version": "0.14.0",
   "source": "https://github.com/elastic/puppet-elasticsearch",
   "author": "elasticsearch",
   "license": "Apache-2.0",
@@ -10,20 +10,24 @@
   "issues_url": "https://github.com/elastic/puppet-elasticsearch/issues",
   "dependencies": [
     {
-      "name": "puppetlabs/stdlib",
-      "version_requirement": ">= 3.2.0 < 5.0.0"
+      "name": "puppetlabs/apt",
+      "version_requirement": ">= 2.0.0 < 3.0.0"
     },
     {
-      "name": "puppetlabs/apt",
-      "version_requirement": ">= 1.4.0 < 3.0.0"
+      "name": "richardc/datacat",
+      "version_requirement": ">= 0.6.2 < 1.0.0"
+    },
+    {
+      "name": "puppetlabs/java",
+      "version_requirement": ">= 1.0.0 < 2.0.0"
+    },
+    {
+      "name": "puppetlabs/stdlib",
+      "version_requirement": ">= 4.6.0 < 5.0.0"
     },
     {
       "name": "ceritsc/yum",
       "version_requirement": ">= 0.9.6 < 1.0.0"
-    },
-    {
-      "name": "richardc/datacat",
-      "version_requirement": ">= 0.6.2 < 1.0.0"
     }
   ],
   "operatingsystem_support": [
@@ -72,7 +76,8 @@
       "operatingsystemrelease": [
         "10.04",
         "12.04",
-        "14.04"
+        "14.04",
+        "16.04"
       ]
     },
     {
@@ -81,6 +86,13 @@
         "12",
         "13"
       ]
+    },
+    {
+      "operatingsystem": "SLES",
+      "operatingsystemrelease": [
+        "12.0",
+        "12.1"
+      ]
     }
   ],
   "requirements": [
@@ -90,7 +102,7 @@
     },
     {
       "name": "puppet",
-      "version_requirement": ">=3.2.0 <4.3.0"
+      "version_requirement": ">=3.2.0 <5.0.0"
     }
   ]
 }
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/.beaker-foss.cfg	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,5 @@
+{
+   :ssh => {
+       :user_known_hosts_file => '/dev/null'
+   }
+}
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/001_basic_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,8 +0,0 @@
-require 'spec_helper_acceptance'
-
-# Here we put the more basic fundamental tests, ultra obvious stuff.
-describe "basic tests:" do
-  it 'make sure we have copied the module across' do
-    shell("ls #{default['distmoduledir']}/elasticsearch/metadata.json", {:acceptable_exit_codes => 0})
-  end
-end
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/002_class_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/002_class_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,20 +1,36 @@
 require 'spec_helper_acceptance'
-
-describe "elasticsearch class:" do
-
-  describe "single instance" do
+require 'spec_helper_faraday'
+require 'json'
 
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-           "
+describe '::elasticsearch' do
+  describe 'single instance' do
+    describe 'manifest' do
+      pp = <<-EOS
+        class { 'elasticsearch':
+          config => {
+            'cluster.name' => '#{test_settings['cluster_name']}'
+          },
+          manage_repo => true,
+          repo_version => '#{test_settings['repo_version']}',
+          java_install => true
+        }
 
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
+        elasticsearch::instance { 'es-01':
+          config => {
+            'node.name' => 'elasticsearch001',
+            'http.port' => '#{test_settings['port_a']}'
+          }
+        }
+      EOS
+
+      it 'applies cleanly ' do
+        apply_manifest pp, :catch_failures => true
+      end
+      it 'is idempotent' do
+        apply_manifest pp , :catch_changes  => true
+      end
     end
 
-
     describe service(test_settings['service_name_a']) do
       it { should be_enabled }
       it { should be_running }
@@ -26,39 +42,88 @@
 
     describe file(test_settings['pid_file_a']) do
       it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    describe "Elasticsearch serves requests on" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
+      its(:content) { should match(/[0-9]+/) }
     end
 
     describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
       it { should be_file }
       it { should contain 'name: elasticsearch001' }
+      it { should contain "/usr/share/elasticsearch/data/es-01" }
     end
 
     describe file('/usr/share/elasticsearch/templates_import') do
       it { should be_directory }
     end
 
+    describe file('/usr/share/elasticsearch/data/es-01') do
+      it { should be_directory }
+    end
 
+    describe file('/usr/share/elasticsearch/scripts') do
+      it { should be_directory }
+    end
+
+    describe file('/etc/elasticsearch/es-01/scripts') do
+      it { should be_symlink }
+    end
+
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
+    end
+
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}/_nodes/_local",
+        :faraday_middleware => middleware
+      ) do
+        it 'serves requests', :with_retries do
+          expect(response.status).to eq(200)
+        end
+
+        it 'uses the default data path' do
+          json = JSON.parse(response.body)['nodes'].values.first
+          expect(
+            json['settings']['path']
+          ).to include(
+              'data' => '/usr/share/elasticsearch/data/es-01'
+          )
+        end
+      end
+    end
   end
 
-
-  describe "multiple instances" do
+  describe 'multiple instances' do
 
     it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-            elasticsearch::instance { 'es-02': config => { 'node.name' => 'elasticsearch002', 'http.port' => '#{test_settings['port_b']}' } }
-           "
+
+      pp = <<-EOS
+        class { 'elasticsearch':
+          config => {
+            'cluster.name' => '#{test_settings['cluster_name']}'
+          },
+          manage_repo => true,
+          repo_version => '#{test_settings['repo_version']}',
+          java_install => true
+        }
+
+        elasticsearch::instance { 'es-01':
+          config => {
+            'node.name' => 'elasticsearch001',
+            'http.port' => '#{test_settings['port_a']}'
+          }
+        }
+
+        elasticsearch::instance { 'es-02':
+          config => {
+            'node.name' => 'elasticsearch002',
+            'http.port' => '#{test_settings['port_b']}'
+          }
+        }
+      EOS
 
       # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
+      apply_manifest pp, :catch_failures => true
+      apply_manifest pp, :catch_changes => true
     end
 
 
@@ -78,24 +143,43 @@
 
     describe file(test_settings['pid_file_a']) do
       it { should be_file }
-      its(:content) { should match /[0-9]+/ }
+      its(:content) { should match(/[0-9]+/) }
     end
 
     describe file(test_settings['pid_file_b']) do
       it { should be_file }
-      its(:content) { should match /[0-9]+/ }
+      its(:content) { should match(/[0-9]+/) }
+    end
+
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
+    end
+    describe port(test_settings['port_b']) do
+      it 'open', :with_retries do should be_listening end
     end
 
-    describe "make sure elasticsearch can serve requests #{test_settings['port_a']}" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
-    end
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}",
+        :faraday_middleware => middleware
+      ) do
+        describe 'instance a' do
+          it 'serves requests', :with_retries do
+            expect(response.status).to eq(200)
+          end
+        end
+      end
 
-    describe "make sure elasticsearch can serve requests #{test_settings['port_b']}" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_b']}", default, "http://localhost:#{test_settings['port_b']}/?pretty=true", 0)
-      }
+      describe http(
+        "http://localhost:#{test_settings['port_b']}",
+        :faraday_middleware => middleware
+      ) do
+        describe 'instance b' do
+          it 'serves requests', :with_retries do
+            expect(response.status).to eq(200)
+          end
+        end
+      end
     end
 
     describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
@@ -110,16 +194,17 @@
 
   end
 
-
-  describe "module removal" do
+  describe 'module removal' do
 
     it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-            elasticsearch::instance{ 'es-02': ensure => 'absent' }
-           "
 
-      apply_manifest(pp, :catch_failures => true)
+      pp = <<-EOS
+        class { 'elasticsearch': ensure => 'absent' }
+        elasticsearch::instance { 'es-01': ensure => 'absent' }
+        elasticsearch::instance { 'es-02': ensure => 'absent' }
+      EOS
+
+      apply_manifest pp, :catch_failures => true
     end
 
     describe file('/etc/elasticsearch/es-01') do
@@ -143,7 +228,5 @@
       it { should_not be_enabled }
       it { should_not be_running }
     end
-
   end
-
 end
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/003_template_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/003_template_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,70 +1,161 @@
 require 'spec_helper_acceptance'
+require 'spec_helper_faraday'
+require 'json'
+
+describe 'elasticsearch::template', :with_cleanup do
 
-describe "elasticsearch template define:" do
+  before :all do
+    shell "mkdir -p #{default['distmoduledir']}/another/files"
+
+    create_remote_file default,
+      "#{default['distmoduledir']}/another/files/good.json",
+      JSON.dump(test_settings['template'])
 
-  shell("mkdir -p #{default['distmoduledir']}/another/files")
-  shell("echo '#{test_settings['good_json']}' >> #{default['distmoduledir']}/another/files/good.json")
-  shell("echo '#{test_settings['bad_json']}' >> #{default['distmoduledir']}/another/files/bad.json")
+    create_remote_file default,
+      "#{default['distmoduledir']}/another/files/bad.json",
+      JSON.dump(test_settings['template'])[0..-5]
+  end
+
+  describe 'valid json template' do
+
+    context 'from source', :with_cleanup do
+
+      it 'should run successfully' do
+
+        pp = <<-EOS
+          class { 'elasticsearch':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'cluster.name' => '#{test_settings['cluster_name']}'
+            },
+            manage_repo => true,
+            repo_version => '#{test_settings['repo_version']}',
+            java_install => true
+          }
 
-  describe "Insert a template with valid json content" do
+          elasticsearch::instance { 'es-01':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'http.port' => '#{test_settings['port_a']}'
+            }
+          }
+
+          elasticsearch::template { 'foo':
+            ensure => 'present',
+            source => 'puppet:///modules/another/good.json'
+          }
+        EOS
 
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-          elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-          elasticsearch::template { 'foo': ensure => 'present', file => 'puppet:///modules/another/good.json' }"
+        # Run it twice and test for idempotency
+        apply_manifest pp, :catch_failures => true
+        apply_manifest pp, :catch_changes => true
+      end
+
+      describe port(test_settings['port_a']) do
+        it 'open', :with_retries do should be_listening end
+      end
 
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
+      describe server :container do
+        describe http(
+          "http://localhost:#{test_settings['port_a']}/_template/foo",
+          :params => {'flat_settings' => 'false'},
+          :faraday_middleware => middleware
+        ) do
+          it 'returns the installed template', :with_retries do
+            expect(JSON.parse(response.body)['foo'])
+              .to include(test_settings['template'])
+          end
+        end
+      end
     end
 
-    it 'should report as existing in Elasticsearch' do
-      curl_with_retries('validate template as installed', default, "http://localhost:#{test_settings['port_a']}/_template/foo | grep logstash", 0)
+    describe 'from content' do
+      it 'should run successfully' do
+
+        pp = <<-EOS
+          class { 'elasticsearch':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'cluster.name' => '#{test_settings['cluster_name']}'
+            },
+            manage_repo => true,
+            repo_version => '#{test_settings['repo_version']}',
+            java_install => true
+          }
+
+          elasticsearch::instance { 'es-01':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'http.port' => '#{test_settings['port_a']}'
+            }
+          }
+
+          elasticsearch::template { 'foo':
+            ensure => 'present',
+            content => '#{JSON.dump(test_settings['template'])}'
+          }
+        EOS
+
+        # Run it twice and test for idempotency
+        apply_manifest pp, :catch_failures => true
+        apply_manifest pp, :catch_changes => true
+      end
+
+      describe port(test_settings['port_a']) do
+        it 'open', :with_retries do should be_listening end
+      end
+
+      describe server :container do
+        describe http(
+          "http://localhost:#{test_settings['port_a']}/_template/foo",
+          :params => {'flat_settings' => 'false'},
+          :faraday_middleware => middleware
+        ) do
+          it 'returns the installed template', :with_retries do
+            expect(JSON.parse(response.body)['foo'])
+              .to include(test_settings['template'])
+          end
+        end
+      end
     end
   end
 
   if fact('puppetversion') =~ /3\.[2-9]\./
-    describe "Insert a template with bad json content" do
+
+    describe 'invalid json template' do
+
+      it 'should fail to apply cleanly' do
+
+        pp = <<-EOS
+          class { 'elasticsearch':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'cluster.name' => '#{test_settings['cluster_name']}'
+            },
+            manage_repo => true,
+            repo_version => '#{test_settings['repo_version']}',
+            java_install => true
+          }
 
-      it 'run should fail' do
-        pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-             elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-             elasticsearch::template { 'foo': ensure => 'present', file => 'puppet:///modules/another/bad.json' }"
+          elasticsearch::instance { 'es-01':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'http.port' => '#{test_settings['port_a']}'
+            }
+          }
 
-        apply_manifest(pp, :expect_failures => true)
-      end
+          elasticsearch::template { 'foo':
+            ensure => 'present',
+            file => 'puppet:///modules/another/bad.json'
+          }
+        EOS
 
+        apply_manifest pp, :expect_failures => true
+      end
     end
-
   else
     # The exit codes have changes since Puppet 3.2x
-    # Since beaker expectations are based on the most recent puppet code All runs on previous versions fails.
+    # Since beaker expectations are based on the most recent puppet code All
+    # runs on previous versions fails.
   end
-
-  describe "module removal" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
-    end
-
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should_not be_installed }
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
-  end
-
-
 end
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/004_plugin_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/004_plugin_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,299 +1,382 @@
 require 'spec_helper_acceptance'
-
-describe "elasticsearch plugin define:" do
-
-  shell("mkdir -p #{default['distmoduledir']}/another/files")
-  shell("cp /tmp/elasticsearch-bigdesk.zip #{default['distmoduledir']}/another/files/elasticsearch-bigdesk.zip")
-
-  describe "Install a plugin from official repository" do
+require 'spec_helper_faraday'
+require 'json'
 
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-            elasticsearch::plugin{'mobz/elasticsearch-head': module_dir => 'head', instances => 'es-01' }
-           "
+shared_examples 'plugin behavior' do |version, user, plugin, offline, config|
+  describe "plugin operations on #{version}" do
+    context 'official repo', :with_cleanup do
+      describe 'manifest' do
+        pp = <<-EOS
+          class { 'elasticsearch':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'cluster.name' => '#{test_settings['cluster_name']}',
+              'network.host' => '0.0.0.0',
+            },
+            manage_repo => true,
+            #{config}
+            java_install => true,
+            restart_on_change => true,
+          }
 
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
+          elasticsearch::instance { 'es-01':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'http.port' => '#{test_settings['port_a']}'
+            }
+          }
 
-    describe service(test_settings['service_name_a']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
+          elasticsearch::plugin { 'mobz/elasticsearch-head':
+             instances => 'es-01'
+          }
+        EOS
 
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
+        it 'applies cleanly ' do
+          apply_manifest pp, :catch_failures => true
+        end
+        it 'is idempotent' do
+          apply_manifest pp , :catch_changes  => true
+        end
+      end
+
+      describe file('/usr/share/elasticsearch/plugins/head/') do
+        it { should be_directory }
+      end
 
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
+      describe port(test_settings['port_a']) do
+        it 'open', :with_retries do should be_listening end
+      end
 
-    it 'make sure the directory exists' do
-      shell('ls /usr/share/elasticsearch/plugins/head/', {:acceptable_exit_codes => 0})
-    end
-
-    it 'make sure elasticsearch reports it as existing' do
-      curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep head", 0)
-    end
-
-  end
-  describe "Install a plugin from custom git repo" do
-    it 'should run successfully' do
+      describe server :container do
+        describe http(
+          "http://localhost:#{test_settings['port_a']}/_cluster/stats",
+        ) do
+          it 'reports the plugin as installed', :with_retries do
+            plugins = JSON.parse(response.body)['nodes']['plugins'].map do |h|
+              h['name']
+            end
+            expect(plugins).to include('head')
+          end
+        end
+      end
     end
 
-    it 'make sure the directory exists' do
-    end
-
-    it 'make sure elasticsearch reports it as existing' do
-    end
-
-  end
-
-  if fact('puppetversion') =~ /3\.[2-9]\./
-
-    describe "Install a non existing plugin" do
-
-      it 'should run successfully' do
-        pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-              elasticsearch::plugin{'elasticsearch/non-existing': module_dir => 'non-existing', instances => 'es-01' }
-        "
-        #  Run it twice and test for idempotency
-        apply_manifest(pp, :expect_failures => true)
-      end
-
-    end
-
-  else
-    # The exit codes have changes since Puppet 3.2x
-    # Since beaker expectations are based on the most recent puppet code All runs on previous versions fails.
-  end
-
-  describe "module removal" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
-    end
-
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should_not be_installed }
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
+    # Pending
+    context 'custom git repo' do
+      describe 'manifest'
+      describe file('/usr/share/elasticsearch/plugins/head/')
+      describe server :container
     end
 
-  end
-
-
-  describe "install plugin while running ES under user 'root'" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true, elasticsearch_user => 'root', elasticsearch_group => 'root' }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-            elasticsearch::plugin{'lmenezes/elasticsearch-kopf': module_dir => 'kopf', instances => 'es-01' }
-      "
-
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
+    if fact('puppetversion') =~ /3\.[2-9]\./
+      context 'invalid plugin', :with_cleanup do
+        describe 'manifest' do
+          pp = <<-EOS
+            class { 'elasticsearch':
+              config => {
+                'node.name' => 'elasticearch001',
+                'cluster.name' => '#{test_settings['cluster_name']}',
+                'network.host' => '0.0.0.0',
+              },
+              manage_repo => true,
+              #{config}
+              java_install => true
+            }
 
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    it 'make sure the directory exists' do
-      shell('ls /usr/share/elasticsearch/plugins/kopf/', {:acceptable_exit_codes => 0})
-    end
+            elasticsearch::instance { 'es-01':
+              config => {
+                'node.name' => 'elasticsearch001',
+                'http.port' => '#{test_settings['port_a']}'
+              }
+            }
 
-    it 'make sure elasticsearch reports it as existing' do
-      curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep kopf", 0)
-    end
-
-  end
-
+            elasticsearch::plugin { 'elasticsearch/non-existing':
+              instances => 'es-01'
+            }
+          EOS
 
-  describe "module removal" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
+          it 'fails to apply cleanly' do
+            apply_manifest pp, :expect_failures => true
+          end
+        end
+      end
+    else
+      # The exit codes have changes since Puppet 3.2x
+      # Since beaker expectations are based on the most recent puppet code
+      # all runs on previous versions fails.
     end
 
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should_not be_installed }
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
+    describe "running ES under #{user} user", :with_cleanup do
+      describe 'manifest' do
+        pp = <<-EOS
+          class { 'elasticsearch':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'cluster.name' => '#{test_settings['cluster_name']}',
+              'network.host' => '0.0.0.0',
+            },
+            manage_repo => true,
+            #{config}
+            java_install => true,
+            elasticsearch_user => '#{user}',
+            elasticsearch_group => '#{user}',
+            restart_on_change => true,
+          }
 
-  end
-
-  describe 'plugin upgrading' do
+          elasticsearch::instance { 'es-01':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'http.port' => '#{test_settings['port_a']}'
+            }
+          }
 
-    describe 'Setup first plugin' do
-      it 'should run successful' do
-        pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true, elasticsearch_user => 'root', elasticsearch_group => 'root' }
-              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-              elasticsearch::plugin{'elasticsearch/elasticsearch-cloud-aws/2.1.1': module_dir => 'cloud-aws', instances => 'es-01' }
-        "
+          elasticsearch::plugin { '#{plugin[:prefix]}#{plugin[:name]}/#{plugin[:old]}':
+            instances => 'es-01'
+          }
+        EOS
 
-        # Run it twice and test for idempotency
-        apply_manifest(pp, :catch_failures => true)
-        expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-
+        it 'applies cleanly ' do
+          apply_manifest pp, :catch_failures => true
+        end
+        it 'is idempotent' do
+          apply_manifest pp , :catch_changes  => true
+        end
       end
 
-      it 'make sure the directory exists' do
-        shell('ls /usr/share/elasticsearch/plugins/cloud-aws/', {:acceptable_exit_codes => 0})
+      describe file("/usr/share/elasticsearch/plugins/#{plugin[:name]}/") do
+        it { should be_directory }
       end
 
-      it 'make sure elasticsearch reports it as existing' do
-        curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep cloud-aws | grep 2.1.1", 0)
+      describe port(test_settings['port_a']) do
+        it 'open', :with_retries do should be_listening end
       end
 
-    end
-
-    describe "Upgrade plugin" do
-      it 'Should run succesful' do
-        pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true, elasticsearch_user => 'root', elasticsearch_group => 'root' }
-              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-              elasticsearch::plugin{'elasticsearch/elasticsearch-cloud-aws/2.2.0': module_dir => 'cloud-aws', instances => 'es-01' }
-        "
-
-        # Run it twice and test for idempotency
-        apply_manifest(pp, :catch_failures => true)
-        expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-
-      end
-
-      it 'make sure elasticsearch reports it as existing' do
-        curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep cloud-aws | grep 2.2.0", 0)
+      describe server :container do
+        describe http(
+          "http://localhost:#{test_settings['port_a']}/_cluster/stats",
+        ) do
+          it 'reports the plugin as installed', :with_retries do
+            plugins = JSON.parse(response.body)['nodes']['plugins'].map do |h|
+              {
+                name: h['name'],
+                version: h['version']
+              }
+            end
+            expect(plugins).to include({
+              name: plugin[:name],
+              version: plugin[:old]
+            })
+          end
+        end
       end
     end
 
-  end
+    if version =~ /^1/
+      describe 'upgrading', :with_cleanup do
+        describe 'manifest' do
+          pp = <<-EOS
+            class { 'elasticsearch':
+              config => {
+                'node.name' => 'elasticsearch001',
+                'cluster.name' => '#{test_settings['cluster_name']}',
+                'network.host' => '0.0.0.0',
+              },
+              manage_repo => true,
+              #{config}
+              java_install => true,
+              elasticsearch_user => '#{user}',
+              elasticsearch_group => '#{user}',
+              restart_on_change => true,
+            }
 
-  describe "offline install via puppetmaster" do
-      it 'Should run succesful' do
-        pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true, elasticsearch_user => 'root', elasticsearch_group => 'root' }
-              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-              elasticsearch::plugin{'bigdesk': source => 'puppet:///modules/another/elasticsearch-bigdesk.zip', instances => 'es-01' }
-        "
+            elasticsearch::instance { 'es-01':
+              config => {
+                'node.name' => 'elasticsearch001',
+                'http.port' => '#{test_settings['port_a']}'
+              }
+            }
+
+            elasticsearch::plugin { '#{plugin[:prefix]}#{plugin[:name]}/#{plugin[:new]}':
+              instances => 'es-01'
+            }
+          EOS
+
+          it 'applies cleanly ' do
+            apply_manifest pp, :catch_failures => true
+          end
+          it 'is idempotent' do
+            apply_manifest pp , :catch_changes  => true
+          end
+        end
+
+        describe port(test_settings['port_a']) do
+          it 'open', :with_retries do should be_listening end
+        end
 
-        # Run it twice and test for idempotency
-        apply_manifest(pp, :catch_failures => true)
-        expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
+        describe server :container do
+          describe http(
+            "http://localhost:#{test_settings['port_a']}/_cluster/stats",
+          ) do
+            it 'reports the upgraded plugin version', :with_retries do
+              j = JSON.parse(response.body)['nodes']['plugins'].find do |h|
+                h['name'] == plugin[:name]
+              end
+              expect(j).to include('version' => plugin[:new])
+            end
+          end
+        end
+      end
+    end
 
-      end
+    describe 'offline installation via puppet://', :with_cleanup do
+      describe 'manifest' do
+        pp = <<-EOS
+          class { 'elasticsearch':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'cluster.name' => '#{test_settings['cluster_name']}',
+              'network.host' => '0.0.0.0',
+            },
+            manage_repo => true,
+            #{config}
+            java_install => true,
+            elasticsearch_user => '#{user}',
+            elasticsearch_group => '#{user}',
+            restart_on_change => true,
+          }
 
-      it 'make sure elasticsearch reports it as existing' do
-        curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep bigdesk", 0)
+          elasticsearch::instance { 'es-01':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'http.port' => '#{test_settings['port_a']}'
+            }
+          }
+
+          elasticsearch::plugin { '#{offline}':
+            source => 'puppet:///modules/another/elasticsearch-#{offline}.zip',
+            instances => 'es-01'
+          }
+        EOS
+
+        it 'applies cleanly ' do
+          apply_manifest pp, :catch_failures => true
+        end
+        it 'is idempotent' do
+          apply_manifest pp , :catch_changes  => true
+        end
       end
 
-  end
-
-  describe "module removal" do
+      describe port(test_settings['port_a']) do
+        it 'open', :with_retries do should be_listening end
+      end
 
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
+      describe server :container do
+        describe http(
+          "http://localhost:#{test_settings['port_a']}/_cluster/stats",
+        ) do
+          it 'reports the plugin as installed', :with_retries do
+            plugins = JSON.parse(response.body)['nodes']['plugins']
+            expect(plugins.first).to include('name' => offline)
+          end
+        end
+      end
     end
 
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
-    end
+    describe 'installation via url', :with_cleanup do
+      describe 'manifest' do
+        pp = <<-EOS
+          class { 'elasticsearch':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'cluster.name' => '#{test_settings['cluster_name']}',
+              'network.host' => '0.0.0.0',
+            },
+            manage_repo => true,
+            #{config}
+            java_install => true,
+            restart_on_change => true,
+          }
+
+          elasticsearch::instance { 'es-01':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'http.port' => '#{test_settings['port_a']}'
+            }
+          }
 
-    describe package(test_settings['package_name']) do
-      it { should_not be_installed }
+          elasticsearch::plugin { 'hq':
+            url => 'https://github.com/royrusso/elasticsearch-HQ/archive/v2.0.3.zip',
+            instances => 'es-01'
+          }
+        EOS
+
+        it 'applies cleanly ' do
+          apply_manifest pp, :catch_failures => true
+        end
+        it 'is idempotent' do
+          apply_manifest pp , :catch_changes  => true
+        end
+      end
+
+      describe port(test_settings['port_a']) do
+        it 'open', :with_retries do should be_listening end
+      end
+
+      describe server :container do
+        describe http(
+          "http://localhost:#{test_settings['port_a']}/_cluster/stats",
+        ) do
+          it 'reports the plugin as installed', :with_retries do
+            plugins = JSON.parse(response.body)['nodes']['plugins'].map do |h|
+              h['name']
+            end
+            expect(plugins).to include('hq')
+          end
+        end
+      end
     end
+  end
+end
 
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
+describe 'elasticsearch::plugin' do
+  before :all do
+    shell "mkdir -p #{default['distmoduledir']}/another/files"
 
+    shell %W{
+      ln -sf /tmp/elasticsearch-bigdesk.zip
+      #{default['distmoduledir']}/another/files/elasticsearch-bigdesk.zip
+    }.join(' ')
+
+    shell %W{
+      ln -sf /tmp/elasticsearch-kopf.zip
+      #{default['distmoduledir']}/another/files/elasticsearch-kopf.zip
+    }.join(' ')
   end
 
-  describe "install via url" do
-      it 'Should run succesful' do
-        pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-              elasticsearch::plugin{'HQ': url => 'https://github.com/royrusso/elasticsearch-HQ/archive/v2.0.3.zip', instances => 'es-01' }
-        "
-
-        # Run it twice and test for idempotency
-        apply_manifest(pp, :catch_failures => true)
-        expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-
-      end
-
-      it 'make sure elasticsearch reports it as existing' do
-        curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep HQ", 0)
-      end
-
-  end
-
-  describe "module removal" do
+  include_examples 'plugin behavior',
+    test_settings['repo_version'],
+    'root',
+    {
+      prefix: 'elasticsearch/elasticsearch-',
+      name: 'cloud-aws',
+      old: '2.1.1',
+      new: '2.2.0',
+    },
+    'bigdesk',
+    "repo_version => '#{test_settings['repo_version']}',"
 
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
-    end
-
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should_not be_installed }
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
-  end
-
-
+  include_examples 'plugin behavior',
+    test_settings['repo_version2x'],
+    'elasticsearch',
+    {
+      prefix: 'lmenezes/elasticsearch-',
+      name: 'kopf',
+      old: '2.0.1',
+      new: '2.1.1',
+    },
+    'kopf',
+    <<-EOS
+      repo_version => '#{test_settings['repo_version2x']}',
+      version => '2.0.0',
+    EOS
 end
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/009_datapath_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/009_datapath_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,90 +1,35 @@
 require 'spec_helper_acceptance'
-
-describe "Data dir settings" do
-
-  describe "Default data dir" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-           "
-
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
-
-
-    describe service(test_settings['service_name_a']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
-
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    describe "Elasticsearch serves requests on" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
-    end
-
-    describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
-      it { should be_file }
-      it { should contain "/usr/share/elasticsearch/data/es-01" }
-    end
+require 'spec_helper_faraday'
+require 'json'
 
-     describe "Elasticsearch config has the data path" do
-      it {
-        curl_with_retries("check data path on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/_nodes?pretty=true | grep /usr/share/elasticsearch/data/es-01", 0)
-      }
-
-    end
-
-    describe file('/usr/share/elasticsearch/data/es-01') do
-      it { should be_directory }
-    end
-
-  end
-
-
-  describe "Single data dir from main class" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true, datadir => '/var/lib/elasticsearch-data' }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-           "
+describe 'elasticsearch::datadir' do
+  describe 'single data dir from class', :with_cleanup do
+    describe 'manifest' do
+      pp = <<-EOS
+        class { 'elasticsearch':
+          config => {
+            'cluster.name' => '#{test_settings['cluster_name']}'
+          },
+          manage_repo => true,
+          repo_version => '#{test_settings['repo_version']}',
+          java_install => true,
+          datadir => '/var/lib/elasticsearch-data'
+        }
 
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
-
-
-    describe service(test_settings['service_name_a']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
+        elasticsearch::instance { 'es-01':
+          config => {
+            'node.name' => 'elasticsearch001',
+            'http.port' => '#{test_settings['port_a']}'
+          }
+        }
+      EOS
 
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
-
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    describe "Elasticsearch serves requests on" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
+      it 'applies cleanly' do
+        apply_manifest pp, :catch_failures => true
+      end
+      it 'is idempotent' do
+        apply_manifest pp , :catch_changes  => true
+      end
     end
 
     describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
@@ -92,71 +37,56 @@
       it { should contain '/var/lib/elasticsearch-data/es-01' }
     end
 
-     describe "Elasticsearch config has the data path" do
-      it {
-        curl_with_retries("check data path on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/_nodes?pretty=true | grep /var/lib/elasticsearch-data/es-01", 0)
-      }
-
-    end
-
     describe file('/var/lib/elasticsearch-data/es-01') do
       it { should be_directory }
     end
 
-  end
-
-  describe "module removal" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
     end
 
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}/_nodes/_local",
+        :faraday_middleware => middleware
+      ) do
+        it 'uses a custom data path' do
+          json = JSON.parse(response.body)['nodes'].values.first
+          expect(
+            json['settings']['path']['data']
+          ).to eq('/var/lib/elasticsearch-data/es-01')
+        end
+      end
     end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
   end
 
-  describe "Single data dir from instance config" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}'}, datadir => '#{test_settings['datadir_1']}' }
-           "
-
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
-
+  describe 'single data dir from instance', :with_cleanup do
+    describe 'manifest' do
+      pp = <<-EOS
+        class { 'elasticsearch':
+            config => {
+            'cluster.name' => '#{test_settings['cluster_name']}'
+            },
+          manage_repo => true,
+          repo_version => '#{test_settings['repo_version']}',
+          java_install => true
+        }
 
-    describe service(test_settings['service_name_a']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
+        elasticsearch::instance { 'es-01':
+          config => {
+            'node.name' => 'elasticsearch001',
+            'http.port' => '#{test_settings['port_a']}'
+          },
+          datadir => '#{test_settings['datadir_1']}'
+        }
+      EOS
 
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    describe "Elasticsearch serves requests on" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
+      it 'applies cleanly ' do
+        apply_manifest pp, :catch_failures => true
+      end
+      it 'is idempotent' do
+        apply_manifest pp , :catch_changes  => true
+      end
     end
 
     describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
@@ -164,70 +94,59 @@
       it { should contain "#{test_settings['datadir_1']}" }
     end
 
-     describe "Elasticsearch config has the data path" do
-      it {
-        curl_with_retries("check data path on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/_nodes?pretty=true | grep #{test_settings['datadir_1']}", 0)
-      }
-    end
-
     describe file(test_settings['datadir_1']) do
       it { should be_directory }
     end
 
-  end
-
-  describe "module removal" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
     end
 
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}/_nodes/_local",
+        :faraday_middleware => middleware
+      ) do
+        it 'uses the default data path' do
+          json = JSON.parse(response.body)['nodes'].values.first
+          expect(
+            json['settings']['path']['data']
+          ).to eq(test_settings['datadir_1'])
+        end
+      end
     end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
   end
 
-  describe "multiple data dir's from main class" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true, datadir => [ '/var/lib/elasticsearch/01', '/var/lib/elasticsearch/02'] }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-           "
-
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
-
+  describe 'multiple data dirs from class', :with_cleanup do
+    describe 'manifest' do
+      pp = <<-EOS
+        class { 'elasticsearch':
+          config => {
+            'cluster.name' => '#{test_settings['cluster_name']}'
+          },
+          manage_repo => true,
+          repo_version => '#{test_settings['repo_version']}',
+          java_install => true,
+          datadir => [
+            '/var/lib/elasticsearch/01',
+            '/var/lib/elasticsearch/02'
+          ]
+        }
 
-    describe service(test_settings['service_name_a']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
+        elasticsearch::instance { 'es-01':
+          config => {
+            'node.name' => 'elasticsearch001',
+            'http.port' => '#{test_settings['port_a']}'
+          }
+        }
+      EOS
 
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    describe "Elasticsearch serves requests on" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
+      it 'applies cleanly ' do
+        apply_manifest pp, :catch_failures => true
+      end
+      it 'is idempotent' do
+        apply_manifest pp , :catch_changes  => true
+      end
     end
 
     describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
@@ -236,126 +155,99 @@
       it { should contain '/var/lib/elasticsearch/02/es-01' }
     end
 
-     describe "Elasticsearch config has the data path" do
-      it {
-        curl_with_retries("check data path on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/_nodes?pretty=true | grep /var/lib/elasticsearch/01/es-01", 0)
-      }
-      it {
-        curl_with_retries("check data path on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/_nodes?pretty=true | grep /var/lib/elasticsearch/02/es-01", 0)
-      }
-
-    end
-
-    describe file('/var/lib/elasticsearch/01/es-01') do
+    describe file '/var/lib/elasticsearch/01/es-01' do
       it { should be_directory }
     end
-
-    describe file('/var/lib/elasticsearch/02/es-01') do
+    describe file '/var/lib/elasticsearch/02/es-01' do
       it { should be_directory }
     end
 
-  end
-
-  describe "module removal" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
     end
 
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}/_nodes/_local",
+        :faraday_middleware => middleware
+      ) do
+        it 'uses custom data paths' do
+          json = JSON.parse(response.body)['nodes'].values.first
+          expect(
+            json['settings']['path']['data']
+          ).to contain_exactly(
+            '/var/lib/elasticsearch/01/es-01',
+            '/var/lib/elasticsearch/02/es-01'
+          )
+        end
+      end
     end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
   end
 
-
-  describe "multiple data dir's from instance config" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' }, datadir => [ '#{test_settings['datadir_1']}', '#{test_settings['datadir_2']}'] }
-           "
-
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
-
+  describe 'multiple data dirs from instance', :with_cleanup do
+    describe 'manifest' do
+      pp = <<-EOS
+        class { 'elasticsearch':
+          config => {
+            'cluster.name' => '#{test_settings['cluster_name']}'
+          },
+          manage_repo => true,
+          repo_version => '#{test_settings['repo_version']}',
+          java_install => true
+        }
 
-    describe service(test_settings['service_name_a']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
+        elasticsearch::instance { 'es-01':
+          config => {
+            'node.name' => 'elasticsearch001',
+            'http.port' => '#{test_settings['port_a']}'
+          },
+          datadir => [
+            '#{test_settings['datadir_1']}',
+            '#{test_settings['datadir_2']}'
+          ]
+        }
+      EOS
 
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    describe "Elasticsearch serves requests on" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
+      it 'applies cleanly ' do
+        apply_manifest pp, :catch_failures => true
+      end
+      it 'is idempotent' do
+        apply_manifest pp , :catch_changes  => true
+      end
     end
 
     describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
       it { should be_file }
-      it { should contain "#{test_settings['datadir_1']}" }
-      it { should contain "#{test_settings['datadir_2']}" }
+      it { should contain test_settings['datadir_1'] }
+      it { should contain test_settings['datadir_2'] }
     end
 
-     describe "Elasticsearch config has the data path" do
-      it {
-        curl_with_retries("check data path on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/_nodes?pretty=true | grep #{test_settings['datadir_1']}", 0)
-      }
-      it {
-        curl_with_retries("check data path on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/_nodes?pretty=true | grep #{test_settings['datadir_2']}", 0)
-      }
-
-    end
-
-    describe file(test_settings['datadir_1']) do
+    describe file test_settings['datadir_1'] do
       it { should be_directory }
     end
-
-    describe file(test_settings['datadir_2']) do
+    describe file test_settings['datadir_2'] do
       it { should be_directory }
     end
 
-  end
-
-  describe "module removal" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
     end
 
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}/_nodes/_local",
+        :faraday_middleware => middleware
+      ) do
+        it 'uses custom data paths' do
+          json = JSON.parse(response.body)['nodes'].values.first
+          expect(
+            json['settings']['path']['data']
+          ).to contain_exactly(
+            test_settings['datadir_1'],
+            test_settings['datadir_2']
+          )
+        end
+      end
     end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
   end
-
 end
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/010_pkg_url_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/010_pkg_url_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,167 +1,175 @@
 require 'spec_helper_acceptance'
+require 'spec_helper_faraday'
+require 'json'
 
-describe "Elasticsearch class:" do
+describe 'elasticsearch::package_url' do
 
-  shell("mkdir -p #{default['distmoduledir']}/another/files")
-  shell("cp #{test_settings['local']} #{default['distmoduledir']}/another/files/#{test_settings['puppet']}")
+  before :all do
+    shell "mkdir -p #{default['distmoduledir']}/another/files"
 
-  context "install via http resource" do
+    shell %W{
+      cp #{test_settings['local']}
+      #{default['distmoduledir']}/another/files/#{test_settings['puppet']}
+    }.join(' ')
+  end
 
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': package_url => '#{test_settings['url']}', java_install => true, config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' } }
-            elasticsearch::instance{ 'es-01': }
-           "
+  context 'via http', :with_cleanup do
+    describe 'manifest' do
+      pp = <<-EOS
+        class { 'elasticsearch':
+          package_url => '#{test_settings['url']}',
+          java_install => true,
+          config => {
+            'node.name' => 'elasticsearch001',
+            'cluster.name' => '#{test_settings['cluster_name']}'
+          }
+        }
 
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
+        elasticsearch::instance{ 'es-01': }
+      EOS
 
+      it 'applies cleanly ' do
+        apply_manifest pp, :catch_failures => true
+      end
+      it 'is idempotent' do
+        apply_manifest pp , :catch_changes  => true
+      end
     end
 
     describe package(test_settings['package_name']) do
       it { should be_installed }
     end
 
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    it 'make sure elasticsearch can serve requests' do
-      curl_with_retries('check ES', default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-    end
-
     describe service(test_settings['service_name_a']) do
       it { should be_enabled }
       it { should be_running }
     end
 
-  end
-
-  context "Clean" do
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
+    describe file(test_settings['pid_file_a']) do
+      it { should be_file }
+      its(:content) { should match(/[0-9]+/) }
     end
 
-    describe package(test_settings['package_name']) do
-      it { should_not be_installed }
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
     end
 
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}",
+        :faraday_middleware => middleware
+      ) do
+        it 'serves requests' do
+          expect(response.status).to eq(200)
+        end
+      end
     end
-
   end
 
-  context "Install via local file resource" do
+  context 'via local file', :with_cleanup do
+    describe 'manifest' do
+      pp = <<-EOS
+        class { 'elasticsearch':
+          package_url => 'file:#{test_settings['local']}',
+          java_install => true,
+          config => {
+            'node.name' => 'elasticsearch001',
+            'cluster.name' => '#{test_settings['cluster_name']}'
+          }
+        }
 
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': package_url => 'file:#{test_settings['local']}', java_install => true, config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' } }
-            elasticsearch::instance{ 'es-01': }
-           "
+        elasticsearch::instance { 'es-01': }
+      EOS
 
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-
+      it 'applies cleanly ' do
+        apply_manifest pp, :catch_failures => true
+      end
+      it 'is idempotent' do
+        apply_manifest pp , :catch_changes  => true
+      end
     end
 
     describe package(test_settings['package_name']) do
       it { should be_installed }
     end
 
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    it 'make sure elasticsearch can serve requests' do
-      curl_with_retries('check ES', default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-    end
-
     describe service(test_settings['service_name_a']) do
       it { should be_enabled }
       it { should be_running }
     end
 
-  end
-
-  context "Clean" do
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
+    describe file(test_settings['pid_file_a']) do
+      it { should be_file }
+      its(:content) { should match(/[0-9]+/) }
     end
 
-    describe package(test_settings['package_name']) do
-      it { should_not be_installed }
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
     end
 
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}",
+        :faraday_middleware => middleware
+      ) do
+        it 'serves requests' do
+          expect(response.status).to eq(200)
+        end
+      end
     end
-
   end
 
-  context "Install via Puppet resource" do
+  context 'via puppet', :with_cleanup do
+    describe 'manifest' do
+      pp = <<-EOS
+        class { 'elasticsearch':
+          package_url =>
+            'puppet:///modules/another/#{test_settings['puppet']}',
+          java_install => true,
+          config => {
+            'node.name' => 'elasticsearch001',
+            'cluster.name' => '#{test_settings['cluster_name']}'
+          }
+        }
 
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': package_url => 'puppet:///modules/another/#{test_settings['puppet']}', java_install => true, config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' } }
-            elasticsearch::instance { 'es-01': }
-           "
+        elasticsearch::instance { 'es-01': }
+      EOS
 
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-
+      it 'applies cleanly ' do
+        apply_manifest pp, :catch_failures => true
+      end
+      it 'is idempotent' do
+        apply_manifest pp , :catch_changes  => true
+      end
     end
 
     describe package(test_settings['package_name']) do
       it { should be_installed }
     end
 
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    it 'make sure elasticsearch can serve requests' do
-      curl_with_retries('check ES', default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-    end
-
     describe service(test_settings['service_name_a']) do
       it { should be_enabled }
       it { should be_running }
     end
 
-  end
+    describe file(test_settings['pid_file_a']) do
+      it { should be_file }
+      its(:content) { should match(/[0-9]+/) }
+    end
 
-  context "Clean" do
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
     end
 
-    describe package(test_settings['package_name']) do
-      it { should_not be_installed }
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}",
+        :faraday_middleware => middleware
+      ) do
+        it 'serves requests' do
+          expect(response.status).to eq(200)
+        end
+      end
     end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
   end
-
 end
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/011_service_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,77 +0,0 @@
-require 'spec_helper_acceptance'
-
-describe "Service tests:" do
-
-  describe "Make sure we can manage the defaults file" do
-
-    context "Change the defaults file" do
-      it 'should run successfully' do
-        pp = "class { 'elasticsearch': manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true, config => { 'cluster.name' => '#{test_settings['cluster_name']}' }, init_defaults => { 'ES_JAVA_OPTS' => '\"-server -XX:+UseTLAB -XX:+CMSClassUnloadingEnabled\"' } }
-              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001' } }
-             "
-
-        # Run it twice and test for idempotency
-        apply_manifest(pp, :catch_failures => true)
-        expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-      end
-
-      describe service(test_settings['service_name_a']) do
-        it { should be_enabled }
-        it { should be_running }
-      end
-
-      describe package(test_settings['package_name']) do
-        it { should be_installed }
-      end
-
-      describe file(test_settings['pid_file_a']) do
-        it { should be_file }
-        its(:content) { should match /[0-9]+/ }
-      end
-
-      describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
-        it { should be_file }
-        it { should contain 'name: elasticsearch001' }
-      end
-
-      describe 'make sure elasticsearch can serve requests' do
-        it {
-          curl_with_retries('check ES', default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-        }
-      end
-
-      context "Make sure we have ES_USER=root" do
-
-        describe file(test_settings['defaults_file_a']) do
-          its(:content) { should match /^ES_JAVA_OPTS="-server -XX:\+UseTLAB -XX:\+CMSClassUnloadingEnabled"/ }
-        end
-
-      end
-
-    end
-
-  end
-
-  describe "module removal" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
-    end
-
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
-  end
-
-
-end
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/012_instances_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/012_instances_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,74 +1,43 @@
 require 'spec_helper_acceptance'
-
-describe "elasticsearch class:" do
-
-  describe "Setup single instance" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-           "
+require 'spec_helper_faraday'
+require 'json'
 
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
-
-
-    describe service(test_settings['service_name_a']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
-
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
+describe 'elasticsearch::instance' do
+  describe 'two instances' do
+    describe 'manifest' do
+      pp = <<-EOS
+        class { 'elasticsearch':
+          config => {
+            'cluster.name' => '#{test_settings['cluster_name']}'
+          },
+          manage_repo => true,
+          repo_version => '#{test_settings['repo_version']}',
+          java_install => true
+        }
 
-    describe "Elasticsearch serves requests on" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
-    end
-
-    describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
-      it { should be_file }
-      it { should contain 'name: elasticsearch001' }
-    end
-
-    describe file('/usr/share/elasticsearch/templates_import') do
-      it { should be_directory }
-    end
-
-    describe file('/usr/share/elasticsearch/scripts') do
-      it { should be_directory }
-    end
+        elasticsearch::instance { 'es-01':
+          config => {
+            'node.name' => 'elasticsearch001',
+            'http.port' => '#{test_settings['port_a']}'
+          }
+        }
 
-    describe file('/etc/elasticsearch/es-01/scripts') do
-      it { should be_symlink }
-    end
-
-  end
-
-
-  describe "Setup second instance" do
+        elasticsearch::instance { 'es-02':
+          config => {
+            'node.name' => 'elasticsearch002',
+            'http.port' => '#{test_settings['port_b']}'
+          }
+        }
+      EOS
 
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-            elasticsearch::instance { 'es-02': config => { 'node.name' => 'elasticsearch002', 'http.port' => '#{test_settings['port_b']}' } }
-           "
-
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
+      it 'applies cleanly ' do
+        apply_manifest pp, :catch_failures => true
+      end
+      it 'is idempotent' do
+        apply_manifest pp , :catch_changes  => true
+      end
     end
 
-
     describe service(test_settings['service_name_a']) do
       it { should be_enabled }
       it { should be_running }
@@ -79,30 +48,14 @@
       it { should be_running }
     end
 
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
-
     describe file(test_settings['pid_file_a']) do
       it { should be_file }
-      its(:content) { should match /[0-9]+/ }
+      its(:content) { should match(/[0-9]+/) }
     end
 
     describe file(test_settings['pid_file_b']) do
       it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    describe "make sure elasticsearch can serve requests #{test_settings['port_a']}" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
-    end
-
-    describe "make sure elasticsearch can serve requests #{test_settings['port_b']}" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_b']}", default, "http://localhost:#{test_settings['port_b']}/?pretty=true", 0)
-      }
+      its(:content) { should match(/[0-9]+/) }
     end
 
     describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
@@ -115,10 +68,6 @@
       it { should contain 'name: elasticsearch002' }
     end
 
-    describe file('/usr/share/elasticsearch/scripts') do
-      it { should be_directory }
-    end
-
     describe file('/etc/elasticsearch/es-01/scripts') do
       it { should be_symlink }
     end
@@ -127,60 +76,63 @@
       it { should be_symlink }
     end
 
-  end
-
-
-  describe "Remove instance 1" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-            elasticsearch::instance { 'es-02': config => { 'node.name' => 'elasticsearch002', 'http.port' => '#{test_settings['port_b']}' } }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
     end
 
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}",
+        :faraday_middleware => middleware
+      ) do
+        it "serves requests on #{test_settings['port_a']}" do
+          expect(response.status).to eq(200)
+        end
+      end
     end
 
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
+    describe port(test_settings['port_b']) do
+      it 'open', :with_retries do should be_listening end
     end
 
-    describe service(test_settings['service_name_b']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
-
-    describe file(test_settings['pid_file_b']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_b']}",
+        :faraday_middleware => middleware
+      ) do
+        it "serves requests on #{test_settings['port_b']}" do
+          expect(response.status).to eq(200)
+        end
+      end
     end
-
-    describe "make sure elasticsearch can serve requests #{test_settings['port_b']}" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_b']}", default, "http://localhost:#{test_settings['port_b']}/?pretty=true", 0)
-      }
-    end
-
-    describe file('/etc/elasticsearch/es-02/elasticsearch.yml') do
-      it { should be_file }
-      it { should contain 'name: elasticsearch002' }
-    end
-
   end
 
-  describe "Cleanup" do
+  describe 'removing instance 2', :with_cleanup do
+    describe 'manifest' do
+      pp = <<-EOS
+        class { 'elasticsearch':
+          config => {
+            'cluster.name' => '#{test_settings['cluster_name']}'},
+          manage_repo => true,
+          repo_version => '#{test_settings['repo_version']}',
+          java_install => true
+        }
 
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-02': ensure => 'absent' }
-           "
+        elasticsearch::instance { 'es-01':
+          config => {
+            'node.name' => 'elasticsearch001',
+            'http.port' => '#{test_settings['port_a']}'
+          }
+        }
 
-      apply_manifest(pp, :catch_failures => true)
+        elasticsearch::instance { 'es-02':
+          ensure => 'absent'
+        }
+      EOS
+
+      it 'applies cleanly ' do
+        apply_manifest pp, :catch_failures => true
+      end
     end
 
     describe file('/etc/elasticsearch/es-02') do
@@ -192,6 +144,32 @@
       it { should_not be_running }
     end
 
-  end
+    describe service(test_settings['service_name_a']) do
+      it { should be_enabled }
+      it { should be_running }
+    end
+
+    describe file(test_settings['pid_file_a']) do
+      it { should be_file }
+      its(:content) { should match(/[0-9]+/) }
+    end
+
+    describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
+      it { should be_file }
+      it { should contain 'name: elasticsearch001' }
+    end
 
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
+    end
+
+    describe server :container do
+      describe http "http://localhost:#{test_settings['port_a']}" do
+        it "serves requests on #{test_settings['port_a']}",
+           :with_generous_retries do
+          expect(response.status).to eq(200)
+        end
+      end
+    end
+  end
 end
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/013_config_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,83 +0,0 @@
-require 'spec_helper_acceptance'
-
-describe "elasticsearch class:" do
-
-  describe "Setup single instance" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => 'foobar' }, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}', 'node.master' => true, 'node.data' => false, 'index' => { 'routing' => { 'allocation' => { 'include' => 'tag1', 'exclude' => [ 'tag2', 'tag3' ] } } }, 'node' => { 'rack' => 46 }, 'boostrap.mlockall' => true, 'cluster.name' => '#{test_settings['cluster_name']}' } }
-           "
-
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
-
-
-    describe service(test_settings['service_name_a']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
-
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    describe "Elasticsearch serves requests on" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
-    end
-
-    describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
-      it { should be_file }
-      it { should contain 'name: elasticsearch001' }
-      it { should contain 'master: true' }
-      it { should contain 'data: false' }
-      it { should contain "cluster:\n  name: #{test_settings['cluster_name']}" }
-      it { should contain 'rack: 46' }
-      it { should contain "index: \n  routing: \n    allocation: \n      exclude: \n             - tag2\n             - tag3\n      include: tag1" }
-    end
-
-    describe file('/usr/share/elasticsearch/templates_import') do
-      it { should be_directory }
-    end
-
-    describe file('/usr/share/elasticsearch/scripts') do
-      it { should be_directory }
-    end
-
-    describe file('/etc/elasticsearch/es-01/scripts') do
-      it { should be_symlink }
-    end
-
-  end
-
-  describe "Cleanup" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
-    end
-
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
-  end
-
-end
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/014_hiera_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/014_hiera_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,18 +1,28 @@
 require 'spec_helper_acceptance'
-
-# Here we put the more basic fundamental tests, ultra obvious stuff.
-
-describe "Hiera tests" do
-
-  describe "single instance" do
+require 'spec_helper_faraday'
+require 'json'
 
-    it 'should run successfully' do
-      write_hiera_config(['singleinstance'])
-      pp = "class { 'elasticsearch': manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }"
+describe 'hiera' do
+  let :base_manifest do
+    <<-EOS
+      class { 'elasticsearch':
+        manage_repo => true,
+        repo_version => '#{test_settings['repo_version']}',
+        java_install => true
+      }
+    EOS
+  end
 
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
+  describe 'single instance' do
+    describe 'manifest' do
+      before :all do write_hiera_config(['singleinstance']) end
+
+      it 'applies cleanly ' do
+        apply_manifest base_manifest, :catch_failures => true
+      end
+      it 'is idempotent' do
+        apply_manifest base_manifest, :catch_changes  => true
+      end
     end
 
     describe service(test_settings['service_name_a']) do
@@ -20,19 +30,9 @@
       it { should be_running }
     end
 
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
-
     describe file(test_settings['pid_file_a']) do
       it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    describe "Elasticsearch serves requests on" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
+      its(:content) { should match(/[0-9]+/) }
     end
 
     describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
@@ -40,72 +40,67 @@
       it { should contain 'name: es-01' }
     end
 
-    describe file('/usr/share/elasticsearch/templates_import') do
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
+    end
+
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}",
+        :faraday_middleware => middleware
+      ) do
+        it 'serves requests' do
+          expect(response.status).to eq(200)
+        end
+      end
+    end
+  end
+
+  describe 'single instance with plugin' do
+    describe 'manifest' do
+      before :all do write_hiera_config(['singleplugin']) end
+
+      it 'applies cleanly ' do
+        apply_manifest base_manifest, :catch_failures => true
+      end
+      it 'is idempotent' do
+        apply_manifest base_manifest, :catch_changes  => true
+      end
+    end
+
+    describe file('/usr/share/elasticsearch/plugins/head/') do
       it { should be_directory }
     end
 
-  end
-
-  describe "single instance with plugin" do
-
-    it 'should run successfully' do
-      write_hiera_config(['singleplugin'])
-      pp = "class { 'elasticsearch': manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }"
-
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
-
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
     end
 
-    describe "Elasticsearch serves requests on" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
-    end
-
-    describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
-      it { should be_file }
-      it { should contain 'name: es-01' }
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}/_cluster/stats",
+        :faraday_middleware => middleware
+      ) do
+        it 'reports the plugin as installed', :with_retries do
+          plugins = JSON.parse(response.body)['nodes']['plugins'].map do |h|
+            h['name']
+          end
+          expect(plugins).to include('head')
+        end
+      end
     end
-
-    describe file('/usr/share/elasticsearch/templates_import') do
-      it { should be_directory }
-    end
-
-    it 'make sure the directory exists' do
-      shell('ls /usr/share/elasticsearch/plugins/head/', {:acceptable_exit_codes => 0})
-    end
-
-    it 'make sure elasticsearch reports it as existing' do
-      curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep head", 0)
-    end
-
   end
 
-  describe "multiple instances" do
-
+  describe 'multiple instances' do
+    describe 'manifest' do
+      before :all do write_hiera_config(['multipleinstances']) end
 
-    it 'should run successfully' do
-      write_hiera_config(['multipleinstances'])
-      pp = "class { 'elasticsearch': manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }"
-
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
+      it 'applies cleanly ' do
+        apply_manifest base_manifest, :catch_failures => true
+      end
+      it 'is idempotent' do
+        apply_manifest base_manifest, :catch_changes  => true
+      end
     end
 
     describe service(test_settings['service_name_a']) do
@@ -118,32 +113,6 @@
       it { should be_running }
     end
 
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
-
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    describe file(test_settings['pid_file_b']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    describe "make sure elasticsearch can serve requests #{test_settings['port_a']}" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
-    end
-
-    describe "make sure elasticsearch can serve requests #{test_settings['port_b']}" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_b']}", default, "http://localhost:#{test_settings['port_b']}/?pretty=true", 0)
-      }
-    end
-
     describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
       it { should be_file }
       it { should contain 'name: es-01' }
@@ -154,28 +123,44 @@
       it { should contain 'name: es-02' }
     end
 
-  end
-
-
-  describe "Cleanup" do
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
+    end
 
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-           "
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}",
+        :faraday_middleware => middleware
+      ) do
+        it 'serves requests' do
+          expect(response.status).to eq(200)
+        end
+      end
+    end
 
-      apply_manifest(pp, :catch_failures => true)
+    describe port(test_settings['port_b']) do
+      it 'open', :with_retries do should be_listening end
     end
 
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_b']}",
+        :faraday_middleware => middleware
+      ) do
+        it 'serves requests' do
+          expect(response.status).to eq(200)
+        end
+      end
     end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
   end
 
+  after :all do
+    write_hiera_config([])
 
+    apply_manifest <<-EOS
+      class { 'elasticsearch': ensure => 'absent' }
+      elasticsearch::instance { 'es-01': ensure => 'absent' }
+      elasticsearch::instance { 'es-02': ensure => 'absent' }
+    EOS
+  end
 end
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/015_staged_removal.rb	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,69 +0,0 @@
-require 'spec_helper_acceptance'
-
-describe "elasticsearch class:" do
-
-  describe "Setup" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-            elasticsearch::instance { 'es-02': config => { 'node.name' => 'elasticsearch002', 'http.port' => '#{test_settings['port_b']}' } }
-           "
-
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
-  end
-
-  describe "First removal of instance 1" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-            elasticsearch::instance { 'es-02': config => { 'node.name' => 'elasticsearch002', 'http.port' => '#{test_settings['port_b']}' } }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
-    end
-
-  end
-
-  describe "Second removal of instance 1" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-            elasticsearch::instance { 'es-02': config => { 'node.name' => 'elasticsearch002', 'http.port' => '#{test_settings['port_b']}' } }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
-    end
-
-  end
-
-  describe "First removal of the rest" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-02': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
-    end
-
-  end
-
-  describe "Second removal of the rest" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-02': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
-    end
-
-  end
-
-end
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/016_package_pin_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/016_package_pin_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,107 +1,111 @@
 require 'spec_helper_acceptance'
 
-describe "Package pinning:" do
-
-  if fact('osfamily') != 'Suse'
-
-    describe "Pinning enabled" do
-
-      describe "Setup" do
+if fact('osfamily') != 'Suse'
+  describe 'elasticsearch::package_pin', :with_cleanup do
+    describe 'initial installation' do
+      describe 'manifest' do
+        pp = <<-EOS
+          class { 'elasticsearch':
+            config => {
+              'cluster.name' => '#{test_settings['cluster_name']}'
+            },
+            manage_repo => true,
+            repo_version => '#{test_settings['repo_version']}',
+            version => '#{test_settings['install_package_version']}',
+            java_install => true
+          }
 
-        it 'should run successful' do
-          write_hiera_config('')
-          pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', version => '#{test_settings['install_package_version']}', java_install => true }
-                elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-               "
-          # Run it twice and test for idempotency
-          apply_manifest(pp, :catch_failures => true)
-          expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-        end
+          elasticsearch::instance { 'es-01':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'http.port' => '#{test_settings['port_a']}'
+            }
+          }
+        EOS
 
-        describe package(test_settings['package_name']) do
-          it { should be_installed.with_version(test_settings['install_version']) }
+        it 'applies cleanly' do
+          apply_manifest pp, :catch_failures => true
         end
-
-      end # end setup
-
-      describe "Run upgrade" do
-        it 'should run fine' do
-          case fact('osfamily')
-          when 'Debian'
-            shell('apt-get update && apt-get -y install elasticsearch')
-          when 'RedHat'
-            shell('yum -y update elasticsearch')
-          end
+        it 'is idempotent' do
+          apply_manifest pp , :catch_changes  => true
         end
       end
 
-      describe "check installed package" do
-
-        describe package(test_settings['package_name']) do
-          it { should be_installed.with_version(test_settings['install_version']) }
+      describe package(test_settings['package_name']) do
+        it do
+          should be_installed.with_version(test_settings['install_version'])
         end
-
       end
+    end
 
-      describe "Upgrade" do
+    describe 'package manager upgrade' do
+      it 'should run successfully' do
+        case fact('osfamily')
+        when 'Debian'
+          shell 'apt-get update && apt-get -y install elasticsearch'
+        when 'RedHat'
+          shell 'yum -y update elasticsearch'
+        end
+      end
+    end
+
+    describe package(test_settings['package_name']) do
+      it do
+        should be_installed.with_version(test_settings['install_version'])
+      end
+    end
 
-        it 'should run successful' do
-          pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', version => '#{test_settings['upgrade_package_version']}', java_install => true }
-                elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-               "
-          # Run it twice and test for idempotency
-          apply_manifest(pp, :catch_failures => true)
-          expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-        end
+    describe 'puppet upgrade' do
+      describe 'manifest' do
+        pp = <<-EOS
+          class { 'elasticsearch':
+            config => {
+              'cluster.name' => '#{test_settings['cluster_name']}'
+            },
+            manage_repo => true,
+            repo_version => '#{test_settings['repo_version']}',
+            version => '#{test_settings['upgrade_package_version']}',
+            java_install => true
+          }
 
-        describe package(test_settings['package_name']) do
-          it { should be_installed.with_version(test_settings['upgrade_version']) }
+          elasticsearch::instance { 'es-01':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'http.port' => '#{test_settings['port_a']}'
+            } 
+          }
+        EOS
+
+        it 'applies cleanly' do
+          apply_manifest pp, :catch_failures => true
         end
-
-      end # end setup
-
-      describe "Run upgrade" do
-        it 'should run fine' do
-          case fact('osfamily')
-          when 'Debian'
-            shell('apt-get update && apt-get -y install elasticsearch')
-          when 'RedHat'
-            shell('yum -y update elasticsearch')
-          end
+        it 'is idempotent' do
+          apply_manifest pp , :catch_changes  => true
         end
       end
 
-      describe "check installed package" do
-
-        describe package(test_settings['package_name']) do
-          it { should be_installed.with_version(test_settings['upgrade_version']) }
+      describe package(test_settings['package_name']) do
+        it do
+          should be_installed.with_version(test_settings['upgrade_version'])
         end
-
       end
-
     end
 
-    describe "Cleanup" do
-
+    describe 'package manager second upgrade' do
       it 'should run successfully' do
-        pp = "class { 'elasticsearch': ensure => 'absent' }
-              elasticsearch::instance{ 'es-01': ensure => 'absent' }
-             "
-
-        apply_manifest(pp, :catch_failures => true)
+        case fact('osfamily')
+        when 'Debian'
+          shell 'apt-get update && apt-get -y install elasticsearch'
+        when 'RedHat'
+          shell 'yum -y update elasticsearch'
+        end
       end
-
-      describe file('/etc/elasticsearch/es-01') do
-        it { should_not be_directory }
-      end
-
-      describe service(test_settings['service_name_a']) do
-        it { should_not be_enabled }
-        it { should_not be_running }
-      end
-
     end
 
+    describe package(test_settings['package_name']) do
+      it do
+        should be_installed.with_version(test_settings['upgrade_version'])
+      end
+    end
   end
-
 end
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/017_restart_on_change_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,144 +0,0 @@
-require 'spec_helper_acceptance'
-
-describe "elasticsearch class:" do
-
-  describe "Setup" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true, restart_on_change => false }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-            elasticsearch::plugin{'lmenezes/elasticsearch-kopf': instances => 'es-01' }
-           "
-
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
-
-
-    describe service(test_settings['service_name_a']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
-
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    describe "Elasticsearch serves requests on" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
-    end
-
-    describe "Returns correct node name" do
-      it {
-        curl_with_retries("check hostname on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true | grep elasticsearch001", 0)
-      }
-    end
-
-    describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
-      it { should be_file }
-      it { should contain 'name: elasticsearch001' }
-    end
-
-    describe file('/usr/share/elasticsearch/templates_import') do
-      it { should be_directory }
-    end
-
-
-  end
-
-  describe "Change config" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true, restart_on_change => false }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch002', 'http.port' => '#{test_settings['port_a']}' } }
-            elasticsearch::plugin{'lmenezes/elasticsearch-kopf': instances => 'es-01' }
-           "
-
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
-
-
-    describe service(test_settings['service_name_a']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
-
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    describe "Elasticsearch serves requests on" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
-    end
-
-    describe "Returns correct node name" do
-      it {
-        curl_with_retries("check hostname on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true | grep elasticsearch001", 0)
-      }
-    end
-
-
-    describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
-      it { should be_file }
-      it { should contain 'name: elasticsearch002' }
-    end
-
-    describe file('/usr/share/elasticsearch/templates_import') do
-      it { should be_directory }
-    end
-
-
-  end
-
-  describe "module removal" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
-    end
-
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
-    end
-
-    describe file('/etc/elasticsearch/es-02') do
-      it { should_not be_directory }
-    end
-
-    describe file('/etc/elasticsearch/es-03') do
-      it { should_not be_directory }
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
-    describe service(test_settings['service_name_b']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
-  end
-
-end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/018_shield_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,393 @@
+require 'spec_helper_acceptance'
+require 'spec_helper_faraday'
+require 'json'
+
+describe "elasticsearch shield" do
+
+  # Template manifest
+  let :base_manifest do <<-EOF
+    class { 'elasticsearch' :
+      java_install => true,
+      manage_repo  => true,
+      repo_version => '#{test_settings['repo_version']}',
+      config => {
+        'cluster.name' => '#{test_settings['cluster_name']}',
+        'http.port' => #{test_settings['port_a']},
+      },
+      restart_on_change => true,
+    }
+
+    elasticsearch::plugin { 'elasticsearch/license/latest' :  }
+    elasticsearch::plugin { 'elasticsearch/shield/latest' : }
+    EOF
+  end
+
+  describe 'user authentication' do
+
+    describe 'single instance manifest' do
+
+      let :single_manifest do
+        base_manifest + <<-EOF
+          elasticsearch::instance { ['es-01'] :  }
+
+          Elasticsearch::Plugin { instances => ['es-01'],  }
+
+          elasticsearch::shield::user { '#{test_settings['shield_user']}':
+            password => '#{test_settings['shield_password']}',
+            roles    => ['admin'],
+          }
+          elasticsearch::shield::user { '#{test_settings['shield_user']}pwchange':
+            password => '#{test_settings['shield_hashed_password']}',
+            roles    => ['admin'],
+          }
+        EOF
+      end
+
+      it 'should apply cleanly' do
+        apply_manifest single_manifest, :catch_failures => true
+      end
+
+      it 'should be idempotent' do
+        apply_manifest(
+          single_manifest,
+          :catch_changes => true
+        )
+      end
+    end
+
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
+    end
+
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}/_cluster/health",
+        :faraday_middleware => middleware
+      ) do
+        it 'denies unauthorized access', :with_retries do
+          expect(response.status).to eq(401)
+        end
+      end
+
+      describe http(
+        "http://localhost:#{test_settings['port_a']}/_cluster/health",
+        {
+          :faraday_middleware => middleware,
+          :basic_auth => [
+            test_settings['shield_user'],
+            test_settings['shield_password']
+          ]
+        }
+      ) do
+        it 'permits authorized access', :with_retries do
+          expect(response.status).to eq(200)
+        end
+      end
+
+      describe http(
+        "http://localhost:#{test_settings['port_a']}/_cluster/health",
+        {
+          :faraday_middleware => middleware,
+          :basic_auth => [
+            "#{test_settings['shield_user']}pwchange",
+            test_settings['shield_hashed_plaintext']
+          ]
+        }
+      ) do
+        it 'permits authorized access using pre-hashed creds',
+           :with_retries do
+          expect(response.status).to eq(200)
+        end
+      end
+    end
+  end
+
+  describe 'changing passwords' do
+    describe 'password change manifest' do
+
+      let :passwd_manifest do
+        base_manifest + <<-EOF
+          elasticsearch::instance { ['es-01'] :  }
+
+          Elasticsearch::Plugin { instances => ['es-01'],  }
+
+          notify { 'change password' : } ~>
+          elasticsearch::shield::user { '#{test_settings['shield_user']}pwchange':
+            password => '#{test_settings['shield_password'][0..5]}',
+            roles    => ['admin'],
+          }
+        EOF
+      end
+
+      it 'should apply cleanly' do
+        apply_manifest passwd_manifest, :catch_failures => true
+      end
+    end
+
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
+    end
+
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}/_cluster/health",
+        {
+          :faraday_middleware => middleware,
+          :basic_auth => [
+            "#{test_settings['shield_user']}pwchange",
+            test_settings['shield_password'][0..5]
+          ]
+        }
+      ) do
+        it 'authorizes changed passwords', :with_retries do
+          expect(response.status).to eq(200)
+        end
+      end
+    end
+  end
+
+  describe 'role permission control' do
+
+    describe 'single instance manifest' do
+
+      let :single_manifest do
+        base_manifest + <<-EOF
+          elasticsearch::instance { ['es-01'] :  }
+
+          Elasticsearch::Plugin { instances => ['es-01'],  }
+
+
+          elasticsearch::shield::role { '#{@role}':
+            privileges => {
+              'cluster' => [
+                'cluster:monitor/health',
+              ]
+            }
+          }
+
+          elasticsearch::shield::user { '#{test_settings['shield_user']}':
+            password => '#{test_settings['shield_password']}',
+            roles    => ['#{@role}'],
+          }
+        EOF
+      end
+
+      it 'should apply cleanly' do
+        apply_manifest single_manifest, :catch_failures => true
+      end
+
+      it 'should be idempotent' do
+        apply_manifest(
+          single_manifest,
+          :catch_changes => true
+        )
+      end
+    end
+
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
+    end
+
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}/_cluster/stats",
+        {
+          :faraday_middleware => middleware,
+          :basic_auth => [
+            test_settings['shield_user'],
+            test_settings['shield_password']
+          ]
+        }
+      ) do
+        it 'denies stats API access', :with_retries do
+          expect(response.status).to eq(403)
+        end
+      end
+
+      describe http(
+        "http://localhost:#{test_settings['port_a']}/_cluster/health",
+        {
+          :faraday_middleware => middleware,
+          :basic_auth => [
+            test_settings['shield_user'],
+            test_settings['shield_password']
+          ]
+        }
+      ) do
+        it 'permits health API access', :with_retries do
+          expect(response.status).to eq(200)
+        end
+      end
+    end
+  end
+
+  describe 'tls' do
+
+    describe 'single instance' do
+
+      describe 'manifest' do
+
+        let :single_manifest do
+          base_manifest + <<-EOF
+            elasticsearch::instance { 'es-01':
+              ssl                  => true,
+              ca_certificate       => '#{@tls[:ca][:cert][:path]}',
+              certificate          => '#{@tls[:clients].first[:cert][:path]}',
+              private_key          => '#{@tls[:clients].first[:key][:path]}',
+              keystore_password    => '#{@keystore_password}',
+            }
+
+            Elasticsearch::Plugin { instances => ['es-01'],  }
+
+            elasticsearch::shield::user { '#{test_settings['shield_user']}':
+              password => '#{test_settings['shield_password']}',
+              roles => ['admin'],
+            }
+          EOF
+        end
+
+        it 'should apply cleanly' do
+          apply_manifest single_manifest, :catch_failures => true
+        end
+
+        it 'should be idempotent' do
+          apply_manifest(
+            single_manifest,
+            :catch_changes => true
+          )
+        end
+      end
+
+      describe port(test_settings['port_a']) do
+        it 'open', :with_retries do should be_listening end
+      end
+
+      describe server :container do
+        describe http(
+          "https://localhost:#{test_settings['port_a']}/_cluster/health",
+          {
+            :faraday_middleware => middleware,
+            :basic_auth => [
+              test_settings['shield_user'],
+              test_settings['shield_password']
+            ],
+            :ssl => {:verify => false}
+          }
+        ) do
+          it 'permits TLS health API access', :with_retries do
+            expect(response.status).to eq(200)
+          end
+        end
+      end
+    end
+
+    describe 'multi-instance' do
+
+      describe 'manifest' do
+
+        let :multi_manifest do
+          base_manifest + %Q{
+            elasticsearch::shield::user { '#{test_settings['shield_user']}':
+              password => '#{test_settings['shield_password']}',
+              roles => ['admin'],
+            }
+          } + @tls[:clients].each_with_index.map do |cert, i|
+            %Q{
+              elasticsearch::instance { 'es-%02d':
+                ssl                  => true,
+                ca_certificate       => '#{@tls[:ca][:cert][:path]}',
+                certificate          => '#{cert[:cert][:path]}',
+                private_key          => '#{cert[:key][:path]}',
+                keystore_password    => '#{@keystore_password}',
+                config => {
+                  'discovery.zen.minimum_master_nodes' => %s,
+                  'shield.ssl.hostname_verification' => false,
+                  'http.port' => '92%02d',
+                }
+              }
+            } % [i+1, @tls[:clients].length, i]
+          end.join("\n") + %Q{
+            Elasticsearch::Plugin { instances => %s, }
+          } % @tls[:clients].each_with_index.map { |_, i| "es-%02d" % (i+1)}.to_s
+        end
+
+        it 'should apply cleanly' do
+          apply_manifest multi_manifest, :catch_failures => true
+        end
+
+        it 'should be idempotent' do
+          apply_manifest(
+            multi_manifest,
+            :catch_changes => true
+          )
+        end
+      end
+
+      describe port(test_settings['port_a']) do
+        it 'open', :with_retries do should be_listening end
+      end
+
+      describe port(test_settings['port_b']) do
+        it 'open', :with_retries do should be_listening end
+      end
+
+      describe server :container do
+        describe http(
+          "https://localhost:#{test_settings['port_a']}/_nodes",
+          {
+            :faraday_middleware => middleware,
+            :basic_auth => [
+              test_settings['shield_user'],
+              test_settings['shield_password']
+            ],
+            :ssl => {:verify => false}
+          }
+        ) do
+          it 'clusters over TLS', :with_generous_retries do
+            expect(
+              JSON.parse(response.body)['nodes'].size
+            ).to eq(2)
+          end
+        end
+      end
+    end
+  end
+
+  describe 'module removal' do
+
+    describe 'manifest' do
+
+      let :removal_manifest do
+        %Q{
+          class { 'elasticsearch' : ensure => absent, }
+
+          Elasticsearch::Instance { ensure => absent, }
+          elasticsearch::instance { %s : }
+        } % @tls[:clients].each_with_index.map do |_, i|
+          "es-%02d" % (i+1)
+        end.to_s
+      end
+
+      it 'should apply cleanly' do
+        apply_manifest removal_manifest, :catch_failures => true
+      end
+    end
+  end
+
+  # Boilerplate for shield setup
+  before :all do
+
+    @keystore_password = SecureRandom.hex
+    @role = [*('a'..'z')].sample(8).join
+
+    # Setup TLS cert placement
+    @tls = gen_certs(2, '/tmp')
+
+    create_remote_file hosts, @tls[:ca][:cert][:path], @tls[:ca][:cert][:pem]
+    @tls[:clients].each do |node|
+      node.each do |type, params|
+        create_remote_file hosts, params[:path], params[:pem]
+      end
+    end
+  end
+end
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/020_usergroup_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/020_usergroup_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,47 +1,52 @@
 require 'spec_helper_acceptance'
 
-describe "elasticsearch class:" do
+describe 'elasticsearch::elasticsearch_user' do
+  describe 'changing service user', :with_cleanup do
+    describe 'manifest' do
+      before :all do
+        shell 'rm -rf /usr/share/elasticsearch'
+      end
 
-  describe "Run as a different user" do
-
-    it 'should run successfully' do
+      pp = <<-EOS
+        user { 'esuser':
+          ensure => 'present',
+          groups => ['esgroup', 'esuser']
+        }
+        group { 'esuser': ensure => 'present' }
+        group { 'esgroup': ensure => 'present' }
 
-      write_hiera_config('')
-      shell("rm -rf /usr/share/elasticsearch")
-      pp = "user { 'esuser': ensure => 'present', groups => ['esgroup', 'esuser'] }
-            group { 'esuser': ensure => 'present' }
-            group { 'esgroup': ensure => 'present' }
-            class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, manage_repo => true, repo_version => '#{test_settings['repo_version']}', java_install => true, elasticsearch_user => 'esuser', elasticsearch_group => 'esgroup' }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-           "
+        class { 'elasticsearch':
+          config => {
+            'cluster.name' => '#{test_settings['cluster_name']}'
+          },
+          manage_repo => true,
+          repo_version => '#{test_settings['repo_version']}',
+          java_install => true,
+          elasticsearch_user => 'esuser',
+          elasticsearch_group => 'esgroup'
+        }
 
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
+        elasticsearch::instance { 'es-01':
+          config => {
+            'node.name' => 'elasticsearch001',
+            'http.port' => '#{test_settings['port_a']}'
+          }
+        }
+      EOS
+
+      it 'applies cleanly ' do
+        apply_manifest pp, :catch_failures => true
+      end
+      it 'is idempotent' do
+        apply_manifest pp , :catch_changes  => true
+      end
     end
 
-
     describe service(test_settings['service_name_a']) do
       it { should be_enabled }
       it { should be_running }
     end
 
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
-
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      it { should be_owned_by 'esuser' }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    describe "make sure elasticsearch can serve requests #{test_settings['port_a']}" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
-    end
-
     describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
       it { should be_file }
       it { should be_owned_by 'esuser' }
@@ -63,29 +68,24 @@
       it { should be_owned_by 'esuser' }
     end
 
+    describe port(test_settings['port_a']) do
+      it 'open', :with_retries do should be_listening end
+    end
 
+    describe server :container do
+      describe http(
+        "http://localhost:#{test_settings['port_a']}",
+      ) do
+        describe 'instance a' do
+          it 'serves requests', :with_retries do
+            expect(response.status).to eq(200)
+          end
+        end
+      end
+    end
   end
 
-
-  describe "Cleanup" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
-    end
-
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
+  after :all do
+    shell 'rm -rf /usr/share/elasticsearch'
   end
-
 end
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/021_es2x_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/021_es2x_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,351 +1,157 @@
 require 'spec_helper_acceptance'
 
-describe "elasticsearch 2x:" do
-
-  shell("mkdir -p #{default['distmoduledir']}/another/files")
-  shell("cp /tmp/elasticsearch-kopf.zip #{default['distmoduledir']}/another/files/elasticsearch-kopf.zip")
-
-  describe "Install a plugin from official repository" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version2x']}', java_install => true, version => '2.0.0' }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-            elasticsearch::plugin{'lmenezes/elasticsearch-kopf': instances => 'es-01' }
-           "
-
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
-
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
+describe 'elasticsearch 2x' do
+  context 'upgrading', :upgrade => true do
+    describe '2.0.0 install' do
+      describe 'manifest' do
+        pp = <<-EOS
+          class { 'elasticsearch':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'cluster.name' => '#{test_settings['cluster_name']}',
+              'network.host' => '0.0.0.0',
+            },
+            manage_repo => true,
+            repo_version => '#{test_settings['repo_version2x']}',
+            java_install => true,
+            version => '2.0.0',
+            restart_on_change => true,
+          }
 
-    it 'make sure the directory exists' do
-      shell('ls /usr/share/elasticsearch/plugins/kopf/', {:acceptable_exit_codes => 0})
-    end
-
-    it 'make sure elasticsearch reports it as existing' do
-      curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep kopf", 0)
-    end
-
-  end
-  describe "Install a plugin from custom git repo" do
-    it 'should run successfully' do
-    end
-
-    it 'make sure the directory exists' do
-    end
+          elasticsearch::instance { 'es-01':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'http.port' => '#{test_settings['port_a']}'
+            }
+          }
 
-    it 'make sure elasticsearch reports it as existing' do
-    end
-
-  end
-
-  if fact('puppetversion') =~ /3\.[2-9]\./
+          Elasticsearch::Plugin { instances => 'es-01' }
+          elasticsearch::plugin { 'cloud-aws': }
+          elasticsearch::plugin { 'marvel-agent': }
+          elasticsearch::plugin { 'license': }
+        EOS
 
-    describe "Install a non existing plugin" do
-
-      it 'should run successfully' do
-        pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version2x']}', java_install => true, version => '2.0.0' }
-              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-              elasticsearch::plugin{'elasticsearch/non-existing': module_dir => 'non-existing', instances => 'es-01' }
-        "
-        #  Run it twice and test for idempotency
-        apply_manifest(pp, :expect_failures => true)
+        it 'applies cleanly' do
+          apply_manifest pp, :catch_failures => true
+        end
+        it 'is idempotent' do
+          apply_manifest pp , :catch_changes  => true
+        end
       end
 
-    end
-
-  else
-    # The exit codes have changes since Puppet 3.2x
-    # Since beaker expectations are based on the most recent puppet code All runs on previous versions fails.
-  end
-
-  describe "module removal" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
-    end
-
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should_not be_installed }
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
-  end
-
-
-  describe "install plugin while running ES under user 'elasticsearch'" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version2x']}', java_install => true, elasticsearch_user => 'elasticsearch', elasticsearch_group => 'elasticsearch', version => '2.0.0' }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-            elasticsearch::plugin{'lmenezes/elasticsearch-kopf': module_dir => 'kopf', instances => 'es-01' }
-      "
-
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
+      describe file('/usr/share/elasticsearch/plugins/cloud-aws') do
+        it { should be_directory }
+      end
 
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    it 'make sure the directory exists' do
-      shell('ls /usr/share/elasticsearch/plugins/kopf/', {:acceptable_exit_codes => 0})
-    end
-
-    it 'make sure elasticsearch reports it as existing' do
-      curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep kopf", 0)
-    end
-
-  end
-
-
-  describe "module removal" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
-    end
-
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should_not be_installed }
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
-  end
-
-  describe 'upgrading', :upgrade => true do
-
-    describe 'Setup 2.0.0' do
-      it 'should run successful' do
-        pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version2x']}', java_install => true, version => '2.0.0' }
-              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-              elasticsearch::plugin{'cloud-aws': instances => 'es-01' }
-        "
-
-        # Run it twice and test for idempotency
-        apply_manifest(pp, :catch_failures => true)
-        expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-
+      describe port(test_settings['port_a']) do
+        it 'open', :with_retries do should be_listening end
       end
 
-      it 'make sure the directory exists' do
-        shell('ls /usr/share/elasticsearch/plugins/cloud-aws/', {:acceptable_exit_codes => 0})
-      end
-
-      it 'make sure elasticsearch reports it as existing' do
-        curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep cloud-aws | grep 2.0.0", 0)
+      describe server :container do
+        describe http(
+          "http://localhost:#{test_settings['port_a']}/_cluster/stats",
+        ) do
+          it 'returns cloud-aws with version 2.0.0', :with_retries do
+            json = JSON.parse(response.body)
+            plugins = json['nodes']['plugins'].map do |h|
+              {
+                name: h['name'],
+                version: h['version']
+              }
+            end
+            expect(plugins).to include({
+              name: 'cloud-aws',
+              version: '2.0.0'
+            })
+          end
+        end
       end
 
-    end
-
-    describe "Upgrade to 2.0.1" do
-      it 'Should run succesful' do
-        pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version2x']}', java_install => true, version => '2.0.1' }
-              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-              elasticsearch::plugin{'cloud-aws': instances => 'es-01' }
-        "
-
-        # Run it twice and test for idempotency
-        apply_manifest(pp, :catch_failures => true)
-        expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-
-      end
-
-      it 'make sure the directory exists' do
-        shell('ls /usr/share/elasticsearch/plugins/cloud-aws/', {:acceptable_exit_codes => 0})
-      end
-
-      it 'make sure elasticsearch reports it as existing' do
-        curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep cloud-aws | grep 2.0.1", 0)
+      describe server :container do
+        describe http "http://localhost:#{test_settings['port_a']}" do
+          it 'returns ES version 2.0.0', :with_retries do
+            expect(
+              JSON.parse(response.body)['version']['number']
+            ).to eq('2.0.0')
+          end
+        end
       end
     end
 
-  end
-
-  describe "offline install via puppet resource" do
-      it 'Should run succesful' do
-        pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version2x']}', java_install => true }
-              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-              elasticsearch::plugin{'kopf': source => 'puppet:///modules/another/elasticsearch-kopf.zip', instances => 'es-01' }
-        "
-
-        # Run it twice and test for idempotency
-        apply_manifest(pp, :catch_failures => true)
-        expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-
-      end
-
-      it 'make sure elasticsearch reports it as existing' do
-        curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep kopf", 0)
-      end
-
-  end
-
-  describe "module removal" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-            file { '/usr/share/elasticsearch/plugin':
-              ensure => 'absent',
-              force => true,
-              recurse => true,
-            }
-           "
+    describe 'upgrading to 2.0.1' do
+      describe 'manifest' do
+        pp = <<-EOS
+          class { 'elasticsearch':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'cluster.name' => '#{test_settings['cluster_name']}',
+              'network.host' => '0.0.0.0',
+            },
+            manage_repo => true,
+            repo_version => '#{test_settings['repo_version2x']}',
+            java_install => true,
+            version => '2.0.1',
+            restart_on_change => true,
+          }
 
-      apply_manifest(pp, :catch_failures => true)
-    end
-
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should_not be_installed }
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
+          elasticsearch::instance { 'es-01':
+            config => {
+              'node.name' => 'elasticsearch001',
+              'http.port' => '#{test_settings['port_a']}'
+            }
+          }
 
-  end
-
-  describe "offline install via file url" do
-      it 'Should run succesful' do
-        pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version2x']}', java_install => true }
-              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-              elasticsearch::plugin{'kopf': url => 'file:///tmp/elasticsearch-kopf.zip', instances => 'es-01' }
-        "
+          Elasticsearch::Plugin { instances => 'es-01' }
+          elasticsearch::plugin { 'cloud-aws': }
+          elasticsearch::plugin { 'marvel-agent': }
+          elasticsearch::plugin { 'license': }
+        EOS
 
-        # Run it twice and test for idempotency
-        apply_manifest(pp, :catch_failures => true)
-        expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-
-      end
-
-      it 'make sure elasticsearch reports it as existing' do
-        curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep kopf", 0)
+        it 'applies cleanly' do
+          apply_manifest pp, :catch_failures => true
+        end
+        it 'is idempotent' do
+          apply_manifest pp , :catch_changes  => true
+        end
       end
 
-  end
-
-  describe "module removal" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
-    end
-
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
-    end
+      describe file('/usr/share/elasticsearch/plugins/cloud-aws') do
+        it { should be_directory }
+      end
 
-    describe package(test_settings['package_name']) do
-      it { should_not be_installed }
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
-  end
-
-  describe "install via url" do
-      it 'Should run succesful' do
-        pp = "class { 'elasticsearch': config => { 'node.name' => 'elasticsearch001', 'cluster.name' => '#{test_settings['cluster_name']}' }, manage_repo => true, repo_version => '#{test_settings['repo_version2x']}', java_install => true }
-              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-              elasticsearch::plugin{'HQ': url => 'https://github.com/royrusso/elasticsearch-HQ/archive/v2.0.3.zip', instances => 'es-01' }
-        "
-
-        # Run it twice and test for idempotency
-        apply_manifest(pp, :catch_failures => true)
-        expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-
+      describe port(test_settings['port_a']) do
+        it 'open', :with_retries do should be_listening end
       end
 
-      it 'make sure elasticsearch reports it as existing' do
-        curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep hq", 0)
+      describe server :container do
+        describe http(
+          "http://localhost:#{test_settings['port_a']}/_cluster/stats",
+        ) do
+          it 'reports cloud-aws as upgraded', :with_retries do
+            json = JSON.parse(response.body)
+            plugins = json['nodes']['plugins'].map do |h|
+              {
+                name: h['name'],
+                version: h['version']
+              }
+            end
+            expect(plugins).to include({
+              name: 'cloud-aws',
+              version: '2.0.1'
+            })
+          end
+        end
       end
 
-  end
-
-  describe "module removal" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': ensure => 'absent' }
-            elasticsearch::instance{ 'es-01': ensure => 'absent' }
-           "
-
-      apply_manifest(pp, :catch_failures => true)
+      describe server :container do
+        describe http "http://localhost:#{test_settings['port_a']}" do
+          it 'reports ES as upgraded', :with_retries do
+            expect(
+              JSON.parse(response.body)['version']['number']
+            ).to eq('2.0.1')
+          end
+        end
+      end
     end
-
-    describe file('/etc/elasticsearch/es-01') do
-      it { should_not be_directory }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should_not be_installed }
-    end
-
-    describe service(test_settings['service_name_a']) do
-      it { should_not be_enabled }
-      it { should_not be_running }
-    end
-
   end
-
 end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/basic_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,7 @@
+require 'spec_helper_acceptance'
+
+# Here we put the more basic fundamental tests, ultra obvious stuff.
+
+describe file("#{default['distmoduledir']}/elasticsearch/metadata.json") do
+  it { should be_file }
+end
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/integration001.rb	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,153 +0,0 @@
-require 'spec_helper_acceptance'
-
-describe "Integration testing" do
-
-  shell("mkdir -p #{default['distmoduledir']}/another/files")
-  shell("echo '#{test_settings['good_json']}' >> #{default['distmoduledir']}/another/files/good.json")
-  shell("echo '#{test_settings['bad_json']}' >> #{default['distmoduledir']}/another/files/bad.json")
-
-
-  describe "Setup Elasticsearch", :main => true do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, java_install => true, package_url => '#{test_settings['snapshot_package']}' }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-           "
-
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
-
-
-    describe service(test_settings['service_name_a']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
-
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    describe "Elasticsearch serves requests on" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
-    end
-
-    describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
-      it { should be_file }
-      it { should contain 'name: elasticsearch001' }
-    end
-
-    describe file('/usr/share/elasticsearch/templates_import') do
-      it { should be_directory }
-    end
-
-  end
-
-  describe "Template tests", :template => true do
-
-    describe "Insert a template with valid json content" do
-
-      it 'should run successfully' do
-        pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, java_install => true, package_url => '#{test_settings['snapshot_package']}' }
-              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-              elasticsearch::template { 'foo': ensure => 'present', file => 'puppet:///modules/another/good.json' }"
-
-        # Run it twice and test for idempotency
-        apply_manifest(pp, :catch_failures => true)
-        expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-      end
-
-      it 'should report as existing in Elasticsearch' do
-        curl_with_retries('validate template as installed', default, "http://localhost:#{test_settings['port_a']}/_template/foo | grep logstash", 0)
-      end
-    end
-
-    if fact('puppetversion') =~ /3\.[2-9]\./
-      describe "Insert a template with bad json content" do
-
-        it 'run should fail' do
-          pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, java_install => true, package_url => '#{test_settings['snapshot_package']}' }
-                elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-                elasticsearch::template { 'foo': ensure => 'present', file => 'puppet:///modules/another/bad.json' }"
-
-          apply_manifest(pp, :expect_failures => true)
-        end
-
-      end
-
-    else
-      # The exit codes have changes since Puppet 3.2x
-      # Since beaker expectations are based on the most recent puppet code All runs on previous versions fails.
-    end
-
-  end
-
-  describe "Plugin tests", :plugin => true do
-
-    describe "Install a plugin from official repository" do
-
-      it 'should run successfully' do
-        pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, java_install => true, package_url => '#{test_settings['snapshot_package']}' }
-              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-              elasticsearch::plugin{'lmenezes/elasticsearch-kopf': instances => 'es-01' }
-             "
-
-        # Run it twice and test for idempotency
-        apply_manifest(pp, :catch_failures => true)
-        expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-      end
-
-      describe service(test_settings['service_name_a']) do
-        it { should be_enabled }
-        it { should be_running }
-      end
-
-      describe package(test_settings['package_name']) do
-        it { should be_installed }
-      end
-
-      describe file(test_settings['pid_file_a']) do
-        it { should be_file }
-        its(:content) { should match /[0-9]+/ }
-      end
-
-      it 'make sure the directory exists' do
-        shell('ls /usr/share/elasticsearch/plugins/kopf/', {:acceptable_exit_codes => 0})
-      end
-
-      it 'make sure elasticsearch reports it as existing' do
-        curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep kopf", 0)
-      end
-
-    end
-
-    if fact('puppetversion') =~ /3\.[2-9]\./
-
-      describe "Install a non existing plugin" do
-
-        it 'should run successfully' do
-          pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, java_install => true, package_url => '#{test_settings['snapshot_package']}' }
-                elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-                elasticsearch::plugin{'elasticsearch/non-existing': module_dir => 'non-existing', instances => 'es-01' }
-               "
-          apply_manifest(pp, :expect_failures => true)
-        end
-
-      end
-
-    else
-      # The exit codes have changes since Puppet 3.2x
-      # Since beaker expectations are based on the most recent puppet code All runs on previous versions fails.
-    end
-
-  end
-
-end
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/centos-6-x64.yml	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/centos-6-x64.yml	Wed Nov 09 15:05:41 2016 +0100
@@ -5,12 +5,12 @@
       - database
       - dashboard
     platform: el-6-x86_64
-    image: electrical/centos:6.4
+    image: centos:6.7
     hypervisor: docker
-    docker_cmd: '["/sbin/init"]'
+    docker_cmd: ["/sbin/init"]
+    docker_preserve_image: true
     docker_image_commands:
-      - 'yum install -y wget ntpdate rubygems ruby-augeas ruby-devel augeas-devel'
-      - 'touch /etc/sysconfig/network'
-    docker_preserve_image: true
+      - yum install -y wget tar which
+      - rm /etc/init/tty.conf
 CONFIG:
   type: foss
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/centos-7-x64.yml	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/centos-7-x64.yml	Wed Nov 09 15:05:41 2016 +0100
@@ -5,11 +5,15 @@
       - database
       - dashboard
     platform: el-7-x86_64
-    image: electrical/centos:7
+    image: centos:7
     hypervisor: docker
-    docker_cmd: '["/usr/sbin/init"]'
+    docker_cmd: ["/sbin/init"]
+    docker_preserve_image: true
     docker_image_commands:
-      - 'yum install -y wget ntpdate rubygems ruby-devel augeas-devel ruby-augeas tar'
-    docker_preserve_image: true
+      - yum install -y wget which cronie iproute
+      - mkdir -p /etc/selinux/targeted/contexts/
+      - echo '<busconfig><selinux></selinux></busconfig>' > /etc/selinux/targeted/contexts/dbus_contexts
+      - rm /lib/systemd/system/systemd*udev*
+      - rm /lib/systemd/system/getty.target
 CONFIG:
   type: foss
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/debian-6-x64.yml	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,16 +0,0 @@
-HOSTS:
-  debian-6:
-    roles:
-      - master
-      - database
-      - dashboard
-    platform: debian-6-amd64
-    image: electrical/debian:6.0.8
-    hypervisor: docker
-    docker_cmd: '["/sbin/init"]'
-    docker_image_commands:
-      - 'apt-get install -yq lsb-release wget net-tools ruby rubygems ruby1.8-dev libaugeas-dev libaugeas-ruby ntpdate locales-all'
-      - 'REALLY_GEM_UPDATE_SYSTEM=1 gem update --system --no-ri --no-rdoc'
-    docker_preserve_image: true
-CONFIG:
-  type: foss
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/debian-7-x64.yml	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/debian-7-x64.yml	Wed Nov 09 15:05:41 2016 +0100
@@ -5,12 +5,11 @@
       - database
       - dashboard
     platform: debian-7-amd64
-    image: electrical/debian:7.3
+    image: debian:7
     hypervisor: docker
-    docker_cmd: '["/sbin/init"]'
+    docker_cmd: ["/sbin/init"]
+    docker_preserve_image: true
     docker_image_commands:
-      - 'apt-get install -yq lsb-release wget net-tools ruby rubygems ruby1.8-dev libaugeas-dev libaugeas-ruby ntpdate locales-all'
-      - 'REALLY_GEM_UPDATE_SYSTEM=1 gem update --system --no-ri --no-rdoc'
-    docker_preserve_image: true
+      - apt-get install -yq wget libssl-dev net-tools
 CONFIG:
   type: foss
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/debian-8-x64.yml	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/debian-8-x64.yml	Wed Nov 09 15:05:41 2016 +0100
@@ -5,12 +5,13 @@
       - database
       - dashboard
     platform: debian-8-amd64
-    image: electrical/debian:8
+    image: debian:8
     hypervisor: docker
-    docker_cmd: '["/sbin/init"]'
+    docker_cmd: ["/bin/systemd"]
+    docker_preserve_image: true
     docker_image_commands:
-      - 'apt-get install -yq ruby ruby-dev lsb-release wget net-tools libaugeas-dev libaugeas-ruby ntpdate locales-all'
-      - 'REALLY_GEM_UPDATE_SYSTEM=1 gem update --system --no-ri --no-rdoc'
-    docker_preserve_image: true
+      - apt-get install -yq wget net-tools
+      - rm /lib/systemd/system/systemd*udev*
+      - rm /lib/systemd/system/getty.target
 CONFIG:
   type: foss
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/opensuse-121-x64.yml	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-HOSTS:
-  opensuse-121-x64:
-    roles:
-      - master
-      - database
-      - dashboard
-    platform: sles-12-x64
-    box: opensuse-121-x64
-    box_url: https://s3.amazonaws.com/circlejtp/OpenSuseVagrant/OpenSuse12_1x64_July14.box
-    hypervisor: vagrant
-CONFIG:
-  type: foss
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/opensuse-13-x64.yml	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,21 @@
+HOSTS:
+  opensuse-13-x64:
+    roles:
+      - master
+      - database
+      - dashboard
+    platform: sles-13-x86_64
+    image: opensuse:13.2
+    hypervisor: docker
+    docker_cmd: ["/bin/systemd"]
+    docker_preserve_image: true
+    docker_image_commands:
+      - zypper install -y dbus-1 rubygems which augeas augeas-lenses wget
+      - zypper install -y -t pattern devel_basis || true
+      - mkdir -p /etc/selinux/targeted/contexts/
+      - echo '<busconfig><selinux></selinux></busconfig>' > /etc/selinux/targeted/contexts/dbus_contexts
+      - mkdir /etc/systemd/system/sshd.service.d/
+      - echo -e "[Service]\nExecStart=\nExecStart=/usr/bin/sshd -D"
+      - ln -s /usr/lib/systemd/system/sshd.service /etc/systemd/system/multi-user.target.wants/sshd.service
+CONFIG:
+  type: foss
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/opensuse-131-x64.yml	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-HOSTS:
-  opensuse-131-x64:
-    roles:
-      - master
-      - database
-      - dashboard
-    platform: sles-13-x64
-    box: opensuse-13.1-test
-    box_url: https://s3-eu-west-1.amazonaws.com/users.eu.elasticsearch.org/electrical/opensuse-131.box
-    hypervisor: vagrant
-CONFIG:
-  type: foss
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/oracle-6-x64.yml	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,16 @@
+HOSTS:
+  centos-6-x64:
+    roles:
+      - master
+      - database
+      - dashboard
+    platform: el-6-x86_64
+    image: oraclelinux:6
+    hypervisor: docker
+    docker_cmd: ["/sbin/init"]
+    docker_preserve_image: true
+    docker_image_commands:
+      - yum install -y wget
+      - rm /etc/init/tty.conf
+CONFIG:
+  type: foss
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/oracle-7-x64.yml	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,19 @@
+HOSTS:
+  oracle-7-x64:
+    roles:
+      - master
+      - database
+      - dashboard
+    platform: el-7-x86_64
+    image: oraclelinux:7
+    hypervisor: docker
+    docker_cmd: ["/sbin/init"]
+    docker_preserve_image: true
+    docker_image_commands:
+      - yum install -y wget which cronie
+      - mkdir -p /etc/selinux/targeted/contexts/
+      - echo '<busconfig><selinux></selinux></busconfig>' > /etc/selinux/targeted/contexts/dbus_contexts
+      - rm /lib/systemd/system/systemd*udev*
+      - rm /lib/systemd/system/getty.target
+CONFIG:
+  type: foss
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/sles-11-x64.yml	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,18 @@
+HOSTS:
+  sles-11-x64:
+    roles:
+      - master
+      - database
+      - dashboard
+    platform: sles-11-x64
+    image: dliappis/sles:11sp4
+    hypervisor: docker
+    docker_cmd: ["/sbin/init"]
+    docker_preserve_image: true
+    docker_image_commands:
+      - gem uninstall puppet hiera
+      - zypper install -y augeas augeas-lenses pkgconfig
+      - mkdir -p /etc/puppetlabs/code /etc/puppet/modules
+      - ln -sf /etc/puppet/modules /etc/puppetlabs/code/modules
+CONFIG:
+  type: foss
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/sles-11sp3-x64.yml	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,12 +0,0 @@
-HOSTS:
-  sles-11-x64:
-    roles:
-      - master
-      - database
-      - dashboard
-    platform: sles-11-x86_64
-    box: sles-11sp3-x64
-    box_url: https://s3-eu-west-1.amazonaws.com/users.eu.elasticsearch.org/electrical/sles-11sp3-x64.box
-    hypervisor: vagrant
-CONFIG:
-  type: foss
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/sles-12-x64.yml	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,19 @@
+HOSTS:
+  sles-12-x64:
+    roles:
+      - master
+      - database
+      - dashboard
+    platform: sles-12-x86_64
+    image: dliappis/sles:12
+    hypervisor: docker
+    docker_cmd: ["/sbin/init"]
+    docker_preserve_image: true
+    docker_image_commands:
+      - rm /etc/zypp/repos.d/devel_languages_python.repo
+      - gem uninstall -x puppet hiera
+      - zypper clean -a
+      - zypper install --force-resolution -y augeas which
+      - ln -s /usr/lib/systemd/system/sshd.service /etc/systemd/system/multi-user.target.wants/sshd.service
+CONFIG:
+  type: foss
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/ubuntu-server-1204-x64.yml	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/ubuntu-server-1204-x64.yml	Wed Nov 09 15:05:41 2016 +0100
@@ -5,11 +5,14 @@
       - database
       - dashboard
     platform: ubuntu-12.04-amd64
-    image: electrical/ubuntu:12.04
+    image: ubuntu:12.04
     hypervisor: docker
-    docker_cmd: '["/sbin/init"]'
+    docker_cmd: ["/sbin/init"]
+    docker_preserve_image: true
     docker_image_commands:
-      - 'apt-get install -yq ruby1.8-dev libaugeas-dev libaugeas-ruby ruby rubygems lsb-release wget net-tools curl'
-    docker_preserve_image: true
+      - apt-get install -yq libssl-dev net-tools
+      - ln -sf /sbin/initctl.distrib /sbin/initctl
+      - locale-gen en_US en_US.UTF-8
+      - dpkg-reconfigure locales
 CONFIG:
   type: foss
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/ubuntu-server-1210-x64.yml	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,15 +0,0 @@
-HOSTS:
-  ubuntu-12-10:
-    roles:
-      - master
-      - database
-      - dashboard
-    platform: ubuntu-12.10-amd64
-    image: electrical/ubuntu:12.10
-    hypervisor: docker
-    docker_cmd: '["/sbin/init"]'
-    docker_image_commands:
-      - 'apt-get install -yq ruby1.8-dev libaugeas-dev libaugeas-ruby ruby rubygems lsb-release wget net-tools curl'
-    docker_preserve_image: true
-CONFIG:
-  type: foss
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/ubuntu-server-1304-x64.yml	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,15 +0,0 @@
-HOSTS:
-  ubuntu-13-04:
-    roles:
-      - master
-      - database
-      - dashboard
-    platform: ubuntu-13.04-amd64
-    image: electrical/ubuntu:13.04
-    hypervisor: docker
-    docker_cmd: '["/sbin/init"]'
-    docker_image_commands:
-      - 'apt-get install -yq ruby1.8-dev libaugeas-dev libaugeas-ruby ruby rubygems lsb-release wget net-tools curl'
-    docker_preserve_image: true
-CONFIG:
-  type: foss
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/ubuntu-server-1310-x64.yml	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,15 +0,0 @@
-HOSTS:
-  ubuntu-13-10:
-    roles:
-      - master
-      - database
-      - dashboard
-    platform: ubuntu-13.10-amd64
-    image: electrical/ubuntu:13.10
-    hypervisor: docker
-    docker_cmd: '["/sbin/init"]'
-    docker_image_commands:
-      - 'apt-get install -yq ruby1.8-dev libaugeas-dev libaugeas-ruby ruby rubygems lsb-release wget net-tools curl'
-    docker_preserve_image: true
-CONFIG:
-  type: foss
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/ubuntu-server-1404-x64.yml	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/ubuntu-server-1404-x64.yml	Wed Nov 09 15:05:41 2016 +0100
@@ -5,11 +5,14 @@
       - database
       - dashboard
     platform: ubuntu-14.04-amd64
-    image: electrical/ubuntu:14.04
+    image: ubuntu:14.04
     hypervisor: docker
-    docker_cmd: '["/sbin/init"]'
+    docker_cmd: ["/sbin/init"]
+    docker_preserve_image: true
     docker_image_commands:
-      - 'apt-get install -yq ruby ruby1.9.1-dev libaugeas-dev libaugeas-ruby lsb-release wget net-tools curl'
-    docker_preserve_image: true
+      - apt-get install -yq libssl-dev
+      - ln -sf /sbin/initctl.distrib /sbin/initctl
+      - locale-gen en_US en_US.UTF-8
+      - dpkg-reconfigure locales
 CONFIG:
   type: foss
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/acceptance/nodesets/ubuntu-server-1604-x64.yml	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,18 @@
+HOSTS:
+  ubuntu-16-04:
+    roles:
+      - master
+      - database
+      - dashboard
+    platform: ubuntu-16.04-amd64
+    image: ubuntu:16.04
+    hypervisor: docker
+    docker_cmd: ["/sbin/init"]
+    docker_preserve_image: true
+    docker_image_commands:
+      - apt-get install -yq libssl-dev puppet
+      - locale-gen en_US en_US.UTF-8
+      - dpkg-reconfigure locales
+CONFIG:
+  type: foss
+  skip_puppet_install: true
--- a/dev/provisioning/modules/elasticsearch/spec/acceptance/xplugins001.rb	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,91 +0,0 @@
-require 'spec_helper_acceptance'
-
-describe "Integration testing" do
-
-  describe "Setup Elasticsearch" do
-
-    it 'should run successfully' do
-      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, java_install => true, package_url => '#{test_settings['snapshot_package']}' }
-            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-           "
-
-      # Run it twice and test for idempotency
-      apply_manifest(pp, :catch_failures => true)
-      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-    end
-
-
-    describe service(test_settings['service_name_a']) do
-      it { should be_enabled }
-      it { should be_running }
-    end
-
-    describe package(test_settings['package_name']) do
-      it { should be_installed }
-    end
-
-    describe file(test_settings['pid_file_a']) do
-      it { should be_file }
-      its(:content) { should match /[0-9]+/ }
-    end
-
-    describe "Elasticsearch serves requests on" do
-      it {
-        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
-      }
-    end
-
-    describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
-      it { should be_file }
-      it { should contain 'name: elasticsearch001' }
-    end
-
-    describe file('/usr/share/elasticsearch/templates_import') do
-      it { should be_directory }
-    end
-
-  end
-
-  describe "Plugin tests" do
-
-    describe "Install a plugin from official repository" do
-
-      it 'should run successfully' do
-        pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, java_install => true, package_url => '#{test_settings['snapshot_package']}' }
-              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
-              elasticsearch::plugin{'#{ENV['LICENSE_PLUGIN_NAME']}': instances => 'es-01', url => '#{ENV['LICENSE_PLUGIN_URL']}' }
-              elasticsearch::plugin{'#{ENV['PLUGIN_NAME']}': instances => 'es-01', url => '#{ENV['PLUGIN_URL']}' }
-             "
-
-        # Run it twice and test for idempotency
-        apply_manifest(pp, :catch_failures => true)
-        expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
-      end
-
-      describe service(test_settings['service_name_a']) do
-        it { should be_enabled }
-        it { should be_running }
-      end
-
-      describe package(test_settings['package_name']) do
-        it { should be_installed }
-      end
-
-      describe file(test_settings['pid_file_a']) do
-        it { should be_file }
-        its(:content) { should match /[0-9]+/ }
-      end
-
-      it 'make sure the directory exists' do
-        shell("ls /usr/share/elasticsearch/plugins/#{ENV['PLUGIN_NAME']}", {:acceptable_exit_codes => 0})
-      end
-
-      it 'make sure elasticsearch reports it as existing' do
-        curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep #{ENV['PLUGIN_NAME']}", 0)
-      end
-
-    end
-
-  end
-
-end
--- a/dev/provisioning/modules/elasticsearch/spec/classes/000_elasticsearch_init_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/classes/000_elasticsearch_init_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -13,20 +13,24 @@
       case facts[:osfamily]
       when 'Debian'
         let(:defaults_path) { '/etc/default' }
+        let(:system_service_folder) { '/lib/systemd/system' }
         let(:pkg_ext) { 'deb' }
         let(:pkg_prov) { 'dpkg' }
         let(:version_add) { '' }
         if facts[:lsbmajdistrelease] >= '8'
+          let(:systemd_service_path) { '/lib/systemd/system' }
           test_pid = true
         else
           test_pid = false
         end
       when 'RedHat'
         let(:defaults_path) { '/etc/sysconfig' }
+        let(:system_service_folder) { '/lib/systemd/system' }
         let(:pkg_ext) { 'rpm' }
         let(:pkg_prov) { 'rpm' }
         let(:version_add) { '-1' }
         if facts[:operatingsystemmajrelease] >= '7'
+          let(:systemd_service_path) { '/lib/systemd/system' }
           test_pid = true
         else
           test_pid = false
@@ -36,6 +40,12 @@
         let(:pkg_ext) { 'rpm' }
         let(:pkg_prov) { 'rpm' }
         let(:version_add) { '-1' }
+        if facts[:operatingsystem] == 'OpenSuSE' and
+            facts[:operatingsystemrelease].to_i <= 12
+          let(:systemd_service_path) { '/lib/systemd/system' }
+        else
+          let(:systemd_service_path) { '/usr/lib/systemd/system' }
+        end
       end
 
       let(:facts) do
@@ -60,22 +70,25 @@
         it { should contain_file('/etc/elasticsearch') }
         it { should contain_file('/usr/share/elasticsearch/templates_import') }
         it { should contain_file('/usr/share/elasticsearch/scripts') }
+        it { should contain_file('/usr/share/elasticsearch/shield') }
         it { should contain_file('/usr/share/elasticsearch') }
         it { should contain_file('/usr/share/elasticsearch/lib') }
-        # it { should contain_file('/usr/share/elasticsearch/plugins') }
-        it { should contain_file('/usr/share/elasticsearch/bin').with(:mode => '0755') }
-	it { should contain_augeas("#{defaults_path}/elasticsearch") }
+        it { should contain_augeas("#{defaults_path}/elasticsearch") }
+
+        it { should contain_exec('remove_plugin_dir') }
 
         # Base files
         if test_pid == true
-          it { should contain_file('/usr/lib/tmpfiles.d/elasticsearch.conf') }
+          it { should contain_exec('systemctl mask elasticsearch.service')}
+          it { should contain_file(
+            '/usr/lib/tmpfiles.d/elasticsearch.conf'
+          ) }
         end
 
-	# file removal from package
-	it { should contain_file('/etc/init.d/elasticsearch').with(:ensure => 'absent') }
-	it { should contain_file('/lib/systemd/system/elasticsearch.service').with(:ensure => 'absent') }
-	it { should contain_file('/etc/elasticsearch/elasticsearch.yml').with(:ensure => 'absent') }
-	it { should contain_file('/etc/elasticsearch/logging.yml').with(:ensure => 'absent') }
+        # file removal from package
+        it { should contain_file('/etc/init.d/elasticsearch').with(:ensure => 'absent') }
+        it { should contain_file('/etc/elasticsearch/elasticsearch.yml').with(:ensure => 'absent') }
+        it { should contain_file('/etc/elasticsearch/logging.yml').with(:ensure => 'absent') }
       end
 
       context 'package installation' do
@@ -98,6 +111,12 @@
             }
 
             it { should contain_package('elasticsearch').with(:ensure => "1.0#{version_add}") }
+            case facts[:osfamily]
+            when 'RedHat'
+              it { should contain_yum__versionlock(
+                "0:elasticsearch-1.0#{version_add}.noarch"
+              ) }
+            end
           end
 
           if facts[:osfamily] == 'RedHat'
@@ -110,6 +129,9 @@
               }
 
               it { should contain_package('elasticsearch').with(:ensure => "1.1-2") }
+              it { should contain_yum__versionlock(
+                '0:elasticsearch-1.1-2.noarch'
+              ) }
             end
           end
 
@@ -244,9 +266,23 @@
           })
         }
 
-        it { should contain_package('elasticsearch').with(:ensure => 'purged') }
-        it { should contain_file('/usr/share/elasticsearch/plugins').with(:ensure => 'absent') }
+        case facts[:osfamily]
+        when 'Suse'
+          it { should contain_package('elasticsearch').with(:ensure => 'absent') }
+        when 'RedHat'
+          it { should contain_exec(
+            'elasticsearch_purge_versionlock.list'
+          ) }
+        else
+          it { should contain_package('elasticsearch').with(:ensure => 'purged') }
+        end
 
+        it {
+          should contain_file('/usr/share/elasticsearch/plugins')
+            .with(
+              :ensure => 'absent',
+          )
+        }
       end
 
       context 'When managing the repository' do
@@ -264,11 +300,18 @@
           it { should contain_apt__source('elasticsearch').with(:release => 'stable', :repos => 'main', :location => 'http://packages.elastic.co/elasticsearch/1.0/debian') }
         when 'RedHat'
           it { should contain_class('elasticsearch::repo').that_requires('Anchor[elasticsearch::begin]') }
-          it { should contain_yumrepo('elasticsearch').with(:baseurl => 'http://packages.elastic.co/elasticsearch/1.0/centos', :gpgkey => 'http://packages.elastic.co/GPG-KEY-elasticsearch', :enabled => 1) }
+          it { should contain_yumrepo('elasticsearch')
+            .with(
+              :baseurl => 'http://packages.elastic.co/elasticsearch/1.0/centos',
+              :gpgkey  => 'https://artifacts.elastic.co/GPG-KEY-elasticsearch',
+              :enabled => 1
+          ) }
+          it { should contain_exec('elasticsearch_yumrepo_yum_clean') }
         when 'SuSE'
           it { should contain_class('elasticsearch::repo').that_requires('Anchor[elasticsearch::begin]') }
           it { should contain_exec('elasticsearch_suse_import_gpg') }
           it { should contain_zypprepo('elasticsearch').with(:baseurl => 'http://packages.elastic.co/elasticsearch/1.0/centos') }
+          it { should contain_exec('elasticsearch_zypper_refresh_elasticsearch') }
         end
 
       end
@@ -282,6 +325,65 @@
         it { expect { should raise_error(Puppet::Error, 'Please fill in a repository version at $repo_version') } }
       end
 
+      context 'package pinning' do
+
+        let :params do
+          default_params.merge({
+            :package_pin => true,
+            :version => '1.6.0'
+          })
+        end
+
+        it { should contain_class(
+          'elasticsearch::package::pin'
+        ).that_comes_before(
+          'Class[elasticsearch::package]'
+        ) }
+
+        case facts[:osfamily]
+        when 'Debian'
+          context 'is supported' do
+            it { should contain_apt__pin('elasticsearch').with(:packages => ['elasticsearch'], :version => '1.6.0') }
+          end
+        when 'RedHat'
+          context 'is supported' do
+            it { should contain_yum__versionlock(
+              '0:elasticsearch-1.6.0-1.noarch'
+            ) }
+          end
+        else
+          context 'is not supported' do
+            pending("unable to test for warnings yet. https://github.com/rodjek/rspec-puppet/issues/108")
+          end
+        end
+      end
+
+      context 'repository priority pinning' do
+
+        let :params do
+          default_params.merge({
+            :manage_repo => true,
+            :repo_priority => 10,
+            :repo_version => '2.x'
+          })
+        end
+
+        case facts[:osfamily]
+        when 'Debian'
+          context 'is supported' do
+            it { should contain_apt__source('elasticsearch').with(
+              :pin => 10
+            ) }
+          end
+        when 'RedHat'
+          context 'is supported' do
+            it { should contain_yumrepo('elasticsearch').with(
+              :priority => 10
+            ) }
+          end
+        end
+      end
+
       context "Running a a different user" do
 
         let (:params) {
--- a/dev/provisioning/modules/elasticsearch/spec/classes/001_hiera_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/classes/001_hiera_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -38,6 +38,7 @@
         it { should contain_file('/etc/elasticsearch/es-01').with(:ensure => 'directory') }
         it { should contain_file('/etc/elasticsearch/es-01/elasticsearch.yml') }
         it { should contain_file('/etc/elasticsearch/es-01/logging.yml') }
+        it { should contain_exec('mkdir_logdir_elasticsearch_es-01').with(:command => 'mkdir -p /var/log/elasticsearch/es-01') }
         it { should contain_exec('mkdir_datadir_elasticsearch_es-01').with(:command => 'mkdir -p /usr/share/elasticsearch/data/es-01') }
         it { should contain_file('/usr/share/elasticsearch/data/es-01') }
         it { should contain_file('/etc/init.d/elasticsearch-es-01') }
@@ -65,8 +66,10 @@
         it { should contain_file('/etc/elasticsearch/es-01').with(:ensure => 'directory') }
         it { should contain_file('/etc/elasticsearch/es-01/elasticsearch.yml') }
         it { should contain_file('/etc/elasticsearch/es-01/logging.yml') }
+        it { should contain_exec('mkdir_logdir_elasticsearch_es-01') }
         it { should contain_exec('mkdir_datadir_elasticsearch_es-01') }
         it { should contain_file('/usr/share/elasticsearch/data/es-01') }
+        it { should contain_file('/var/log/elasticsearch/es-01') }
         it { should contain_file('/etc/init.d/elasticsearch-es-01') }
         it { should contain_file('/etc/elasticsearch/es-01/scripts').with(:target => '/usr/share/elasticsearch/scripts') }
         it { should contain_datacat_fragment('main_config_es-01') }
@@ -82,10 +85,13 @@
         it { should contain_file('/etc/elasticsearch/es-02').with(:ensure => 'directory') }
         it { should contain_file('/etc/elasticsearch/es-02/elasticsearch.yml') }
         it { should contain_file('/etc/elasticsearch/es-02/logging.yml') }
+        it { should contain_exec('mkdir_logdir_elasticsearch_es-02') }
         it { should contain_exec('mkdir_datadir_elasticsearch_es-02') }
         it { should contain_file('/usr/share/elasticsearch/data/es-02') }
+        it { should contain_file('/var/log/elasticsearch/es-02') }
         it { should contain_file('/etc/init.d/elasticsearch-es-02') }
         it { should contain_file('/etc/elasticsearch/es-02/scripts').with(:target => '/usr/share/elasticsearch/scripts') }
+        it { should contain_file('/etc/elasticsearch/es-02/shield') }
         it { should contain_datacat_fragment('main_config_es-02') }
         it { should contain_datacat('/etc/elasticsearch/es-02/elasticsearch.yml') }
 
@@ -103,7 +109,8 @@
         })
       }
 
-      it { should_not contain_elasticsearch__instance }
+      it { should_not contain_elasticsearch__instance('es-01') }
+      it { should_not contain_elasticsearch__instance('es-02') }
 
     end
 
@@ -132,7 +139,9 @@
         })
       }
 
-      it { should_not contain_elasticsearch__plugin }
+      it { should_not contain_elasticsearch__plugin(
+        'mobz/elasticsearch-head/1.0.0'
+      ) }
 
     end
 
@@ -160,10 +169,13 @@
       it { should contain_file('/etc/elasticsearch/default').with(:ensure => 'directory') }
       it { should contain_file('/etc/elasticsearch/default/elasticsearch.yml') }
       it { should contain_file('/etc/elasticsearch/default/logging.yml') }
+      it { should contain_exec('mkdir_logdir_elasticsearch_default') }
       it { should contain_exec('mkdir_datadir_elasticsearch_default') }
       it { should contain_file('/usr/share/elasticsearch/data/default') }
+      it { should contain_file('/var/log/elasticsearch/default') }
       it { should contain_file('/etc/init.d/elasticsearch-default') }
       it { should contain_file('/etc/elasticsearch/default/scripts').with(:target => '/usr/share/elasticsearch/scripts') }
+      it { should contain_file('/etc/elasticsearch/default/shield') }
       it { should contain_datacat_fragment('main_config_default') }
       it { should contain_datacat('/etc/elasticsearch/default/elasticsearch.yml') }
 
@@ -177,8 +189,10 @@
       it { should contain_file('/etc/elasticsearch/es-01').with(:ensure => 'directory') }
       it { should contain_file('/etc/elasticsearch/es-01/elasticsearch.yml') }
       it { should contain_file('/etc/elasticsearch/es-01/logging.yml') }
+      it { should contain_exec('mkdir_logdir_elasticsearch_es-01').with(:command => 'mkdir -p /var/log/elasticsearch/es-01') }
       it { should contain_exec('mkdir_datadir_elasticsearch_es-01').with(:command => 'mkdir -p /usr/share/elasticsearch/data/es-01') }
       it { should contain_file('/usr/share/elasticsearch/data/es-01') }
+      it { should contain_file('/var/log/elasticsearch/es-01') }
       it { should contain_file('/etc/init.d/elasticsearch-es-01') }
       it { should contain_file('/etc/elasticsearch/es-01/scripts').with(:target => '/usr/share/elasticsearch/scripts') }
       it { should contain_datacat_fragment('main_config_es-01') }
--- a/dev/provisioning/modules/elasticsearch/spec/classes/005_elasticsearch_repo_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/classes/005_elasticsearch_repo_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -13,7 +13,6 @@
 
     context "on #{os}" do
 
-
       let(:facts) do
         facts.merge({ 'scenario' => '', 'common' => '' })
       end
@@ -22,6 +21,19 @@
         default_params
       end
 
+      context 'ordered with package pinning' do
+
+        let :params do
+          default_params
+        end
+
+        it { should contain_class(
+          'elasticsearch::package::pin'
+        ).that_comes_before(
+          'Class[elasticsearch::repo]'
+        ) }
+      end
+
       context "Use anchor type for ordering" do
 
         let :params do
@@ -56,34 +68,11 @@
         end
       when 'Suse'
         context 'has zypper repo parts' do
-          it { should contain_exec('elasticsearch_suse_import_gpg').with(:command => 'rpmkeys --import http://packages.elastic.co/GPG-KEY-elasticsearch') }
+          it { should contain_exec('elasticsearch_suse_import_gpg')
+            .with(:command => 'rpmkeys --import https://artifacts.elastic.co/GPG-KEY-elasticsearch') }
           it { should contain_zypprepo('elasticsearch').with(:baseurl => 'http://packages.elastic.co/elasticsearch/1.3/centos') }
-        end
-      end
-
-      context "Package pinning" do
-
-        let :params do
-          default_params.merge({
-            :package_pin => true
-          })
+          it { should contain_exec('elasticsearch_zypper_refresh_elasticsearch') }
         end
-
-        case facts[:osfamily]
-        when 'Debian'
-          context 'is supported' do
-            it { should contain_apt__pin('elasticsearch').with(:packages => ['elasticsearch'], :version => '1.6.0') }
-          end
-        when 'RedHat'
-          context 'is supported' do
-            it { should contain_yum__versionlock('0:elasticsearch-1.6.0-1.noarch') }
-          end
-        else
-          context 'is not supported' do
-            pending("unable to test for warnings yet. https://github.com/rodjek/rspec-puppet/issues/108")
-          end
-        end
-
       end
 
       context "Override repo key ID" do
@@ -98,14 +87,19 @@
         when 'Debian'
           context 'has override apt key' do
             it { is_expected.to contain_apt__source('elasticsearch').with({
-              :key => '46095ACC8548582C1A2699A9D27D666CD88E42B4',
+              :key => {
+                'id' => '46095ACC8548582C1A2699A9D27D666CD88E42B4',
+                'source' => 'https://artifacts.elastic.co/GPG-KEY-elasticsearch'
+              }
             })}
           end
         when 'Suse'
           context 'has override yum key' do
-            it { is_expected.to contain_exec('elasticsearch_suse_import_gpg').with({
-              :unless  => "test $(rpm -qa gpg-pubkey | grep -i '46095ACC8548582C1A2699A9D27D666CD88E42B4' | wc -l) -eq 1 ",
-            })}
+            it { is_expected.to contain_exec(
+              'elasticsearch_suse_import_gpg'
+            ).with_unless(
+              "test $(rpm -qa gpg-pubkey | grep -i 'D88E42B4' | wc -l) -eq 1"
+            )}
           end
         end
 
@@ -115,7 +109,7 @@
 
         let :params do
           default_params.merge({
-            :repo_key_source => 'https://packages.elasticsearch.org/GPG-KEY-elasticsearch'
+            :repo_key_source => 'http://artifacts.elastic.co/GPG-KEY-elasticsearch'
           })
         end
 
@@ -123,21 +117,66 @@
         when 'Debian'
           context 'has override apt key source' do
             it { is_expected.to contain_apt__source('elasticsearch').with({
-              :key_source => 'https://packages.elasticsearch.org/GPG-KEY-elasticsearch',
+              :key => {
+                'id' => '46095ACC8548582C1A2699A9D27D666CD88E42B4',
+                'source' => 'http://artifacts.elastic.co/GPG-KEY-elasticsearch'
+              }
             })}
           end
         when 'RedHat'
           context 'has override yum key source' do
-            it { should contain_yumrepo('elasticsearch').with(:gpgkey => 'https://packages.elasticsearch.org/GPG-KEY-elasticsearch') }
+            it { should contain_yumrepo('elasticsearch')
+              .with(:gpgkey => 'http://artifacts.elastic.co/GPG-KEY-elasticsearch') }
           end
         when 'Suse'
           context 'has override yum key source' do
-            it { should contain_exec('elasticsearch_suse_import_gpg').with(:command => 'rpmkeys --import https://packages.elasticsearch.org/GPG-KEY-elasticsearch') }
+            it { should contain_exec('elasticsearch_suse_import_gpg')
+              .with(:command => 'rpmkeys --import http://artifacts.elastic.co/GPG-KEY-elasticsearch') }
+          end
+        end
+
+      end
+
+      context "Override repo proxy" do
+
+        let :params do
+          default_params.merge({
+              :repo_proxy => 'http://proxy.com:8080'
+          })
+        end
+
+        case facts[:osfamily]
+        when 'RedHat'
+          context 'has override repo proxy' do
+            it { is_expected.to contain_yumrepo('elasticsearch').with_proxy('http://proxy.com:8080') }
           end
         end
 
       end
 
+      describe 'unified release repositories' do
+
+        let :params do
+          default_params.merge({
+            :repo_version => '5.x',
+            :version => '5.0.0'
+          })
+        end
+
+        case facts[:osfamily]
+        when 'Debian'
+          it { should contain_apt__source('elasticsearch')
+            .with_location('https://artifacts.elastic.co/packages/5.x/apt') }
+        when 'RedHat'
+          it { should contain_yumrepo('elasticsearch')
+            .with_baseurl('https://artifacts.elastic.co/packages/5.x/yum') }
+        when 'Suse'
+          it { should contain_zypprepo('elasticsearch')
+            .with_baseurl('https://artifacts.elastic.co/packages/5.x/yum') }
+        end
+
+      end
+
     end
   end
 end
--- a/dev/provisioning/modules/elasticsearch/spec/classes/099_coverage_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/classes/099_coverage_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,1 +1,1 @@
-at_exit { RSpec::Puppet::Coverage.report! }
+at_exit { RSpec::Puppet::Coverage.report! 100 }
--- a/dev/provisioning/modules/elasticsearch/spec/defines/003_elasticsearch_template_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/defines/003_elasticsearch_template_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -12,53 +12,113 @@
   } end
 
   let(:title) { 'foo' }
-  let(:pre_condition) { 'class {"elasticsearch": config => { "node" => {"name" => "test" }}}'}
+  let(:pre_condition) { <<-EOS
+    class { 'elasticsearch' : }
+  EOS
+  }
 
-  context "Add a template" do
+  describe 'parameter validation' do
+    [:api_ca_file, :api_ca_path].each do |param|
+      let :params do {
+        :ensure => 'present',
+        :content => '{}',
+        param => 'foo/cert'
+      } end
+
+      it { is_expected.to compile
+        .and_raise_error(/absolute path/) }
+    end
+  end
+
+
+  describe 'template from source' do
 
     let :params do {
       :ensure => 'present',
-      :file   => 'puppet:///path/to/foo.json',
-    } end
-
-    it { should contain_elasticsearch__template('foo') }
-    it { should contain_file('/usr/share/elasticsearch/templates_import/elasticsearch-template-foo.json').with(:source => 'puppet:///path/to/foo.json', :notify => "Exec[delete_template_foo]") }
-    it { should contain_exec('insert_template_foo').with(:command => "curl -sL -w \"%{http_code}\\n\" -XPUT http://localhost:9200/_template/foo -d @/usr/share/elasticsearch/templates_import/elasticsearch-template-foo.json -o /dev/null | egrep \"(200|201)\" > /dev/null", :unless => 'test $(curl -s \'http://localhost:9200/_template/foo?pretty=true\' | wc -l) -gt 1') }
-  end
-
-  context "Delete a template" do
-
-    let :params do {
-      :ensure => 'absent'
+      :source => 'puppet:///path/to/foo.json',
+      :api_protocol => 'https',
+      :api_host => '127.0.0.1',
+      :api_port => 9201,
+      :api_timeout => 11,
+      :api_basic_auth_username => 'elastic',
+      :api_basic_auth_password => 'password',
+      :validate_tls => false
     } end
 
     it { should contain_elasticsearch__template('foo') }
-    it { should_not contain_file('/usr/share/elasticsearch/templates_import/elasticsearch-template-foo.json').with(:source => 'puppet:///path/to/foo.json') }
-    it { should_not contain_exec('insert_template_foo') }
-    it { should contain_exec('delete_template_foo').with(:command => 'curl -s -XDELETE http://localhost:9200/_template/foo', :notify => nil, :onlyif => 'test $(curl -s \'http://localhost:9200/_template/foo?pretty=true\' | wc -l) -gt 1' ) }
+    it { should contain_es_instance_conn_validator('foo-template')
+      .that_comes_before('Elasticsearch_template[foo]') }
+    it { should contain_elasticsearch_template('foo').with(
+      :ensure => 'present',
+      :source => 'puppet:///path/to/foo.json',
+      :protocol => 'https',
+      :host => '127.0.0.1',
+      :port => 9201,
+      :timeout => 11,
+      :username => 'elastic',
+      :password => 'password',
+      :validate_tls => false
+    ) }
   end
 
-  context "Add template with alternative host and port" do
+  describe 'class parameter inheritance' do
 
     let :params do {
-      :file => 'puppet:///path/to/foo.json',
-      :host => 'otherhost',
-      :port => 9201
+      :ensure => 'present',
+      :content => '{}',
+    } end
+    let(:pre_condition) { <<-EOS
+      class { 'elasticsearch' :
+        api_protocol => 'https',
+        api_host => '127.0.0.1',
+        api_port => 9201,
+        api_timeout => 11,
+        api_basic_auth_username => 'elastic',
+        api_basic_auth_password => 'password',
+        api_ca_file => '/foo/bar.pem',
+        api_ca_path => '/foo/',
+        validate_tls => false,
+      }
+    EOS
+    }
+
+    it { should contain_elasticsearch_template('foo').with(
+      :ensure => 'present',
+      :content => '{}',
+      :protocol => 'https',
+      :host => '127.0.0.1',
+      :port => 9201,
+      :timeout => 11,
+      :username => 'elastic',
+      :password => 'password',
+      :ca_file => '/foo/bar.pem',
+      :ca_path => '/foo/',
+      :validate_tls => false
+    ) }
+  end
+
+  describe 'template from file' do
+
+    let :params do {
+      :ensure => 'present',
+      :file => '/path/to/other_foo.json',
     } end
 
-    it { should contain_elasticsearch__template('foo') }
-    it { should contain_file('/usr/share/elasticsearch/templates_import/elasticsearch-template-foo.json').with(:source => 'puppet:///path/to/foo.json') }
-    it { should contain_exec('insert_template_foo').with(:command => "curl -sL -w \"%{http_code}\\n\" -XPUT http://otherhost:9201/_template/foo -d @/usr/share/elasticsearch/templates_import/elasticsearch-template-foo.json -o /dev/null | egrep \"(200|201)\" > /dev/null", :unless => 'test $(curl -s \'http://otherhost:9201/_template/foo?pretty=true\' | wc -l) -gt 1') }
+    it { should contain_elasticsearch_template('foo').with(
+      :ensure => 'present',
+      :source => '/path/to/other_foo.json',
+    ) }
   end
 
-  context "Add template using content" do
+  describe 'template deletion' do
 
     let :params do {
-      :content => '{"template":"*","settings":{"number_of_replicas":0}}'
+      :ensure => 'absent',
     } end
 
-    it { should contain_elasticsearch__template('foo') }
-    it { should contain_file('/usr/share/elasticsearch/templates_import/elasticsearch-template-foo.json').with(:content => '{"template":"*","settings":{"number_of_replicas":0}}') }
+    it { should contain_elasticsearch_template('foo').with(
+      :ensure => 'absent'
+    ) }
   end
 
 end
--- a/dev/provisioning/modules/elasticsearch/spec/defines/004_elasticsearch_plugin_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/defines/004_elasticsearch_plugin_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -11,7 +11,36 @@
     :scenario => '',
     :common => ''
   } end
-  let(:pre_condition) { 'class {"elasticsearch": config => { "node" => {"name" => "test" }}}'}
+
+  let(:pre_condition) {%q{
+    class { "elasticsearch":
+      config => {
+        "node" => {
+          "name" => "test"
+        }
+      }
+    }
+  }}
+
+  context 'default values' do
+    context 'present' do
+      let :params do {
+        :ensure => 'present',
+        :instances  => 'es-01'
+      } end
+
+      it { is_expected.to compile }
+    end
+
+    context 'absent' do
+      let :params do {
+        :ensure => 'absent',
+        :instances  => 'es-01'
+      } end
+
+      it { is_expected.to compile }
+    end
+  end
 
   context 'with module_dir' do
 
@@ -23,8 +52,17 @@
         :instances  => 'es-01'
       } end
 
-      it { should contain_elasticsearch__plugin('mobz/elasticsearch-head/1.0.0') }
-      it { should contain_elasticsearch_plugin('mobz/elasticsearch-head/1.0.0') }
+      it { should contain_elasticsearch__plugin(
+        'mobz/elasticsearch-head/1.0.0'
+      ) }
+      it { should contain_elasticsearch_plugin(
+        'mobz/elasticsearch-head/1.0.0'
+      ) }
+      it { should contain_file(
+        '/usr/share/elasticsearch/plugins/head'
+      ).that_requires(
+        'Elasticsearch_plugin[mobz/elasticsearch-head/1.0.0]'
+      ) }
     end
 
     context "Remove a plugin" do
@@ -35,8 +73,19 @@
         :instances  => 'es-01'
       } end
 
-      it { should contain_elasticsearch__plugin('mobz/elasticsearch-head/1.0.0') }
-      it { should contain_elasticsearch_plugin('mobz/elasticsearch-head/1.0.0').with(:ensure => 'absent') }
+      it { should contain_elasticsearch__plugin(
+        'mobz/elasticsearch-head/1.0.0'
+      ) }
+      it { should contain_elasticsearch_plugin(
+        'mobz/elasticsearch-head/1.0.0'
+      ).with(
+        :ensure => 'absent'
+      ) }
+      it { should contain_file(
+        '/usr/share/elasticsearch/plugins/head'
+      ).that_requires(
+        'Elasticsearch_plugin[mobz/elasticsearch-head/1.0.0]'
+      ) }
     end
 
   end
@@ -67,9 +116,180 @@
       } end
 
       it { should contain_elasticsearch__plugin('head') }
-      it { should contain_file('/opt/elasticsearch/swdl/plugin.zip').with(:source => 'puppet:///path/to/my/plugin.zip') }
+      it { should contain_file('/opt/elasticsearch/swdl/plugin.zip').with(:source => 'puppet:///path/to/my/plugin.zip', :before => 'Elasticsearch_plugin[head]') }
       it { should contain_elasticsearch_plugin('head').with(:ensure => 'present', :source => '/opt/elasticsearch/swdl/plugin.zip') }
 
   end
-  
+
+  describe 'service restarts' do
+
+    let(:title) { 'head' }
+    let :params do {
+      :ensure     => 'present',
+      :instances  => 'es-01',
+      :module_dir => 'head',
+    } end
+
+    context 'restart_on_change set to false (default)' do
+      let(:pre_condition) { %q{
+        class { "elasticsearch": }
+
+        elasticsearch::instance { 'es-01': }
+      }}
+
+      it { should_not contain_elasticsearch_plugin(
+        'head'
+      ).that_notifies(
+        'Elasticsearch::Service[es-01]'
+      )}
+    end
+
+    context 'restart_on_change set to true' do
+      let(:pre_condition) { %q{
+        class { "elasticsearch":
+          restart_on_change => true,
+        }
+
+        elasticsearch::instance { 'es-01': }
+      }}
+
+      it { should contain_elasticsearch_plugin(
+        'head'
+      ).that_notifies(
+        'Elasticsearch::Service[es-01]'
+      )}
+    end
+
+    context 'restart_plugin_change set to false (default)' do
+      let(:pre_condition) { %q{
+        class { "elasticsearch":
+          restart_plugin_change => false,
+        }
+
+        elasticsearch::instance { 'es-01': }
+      }}
+
+      it { should_not contain_elasticsearch_plugin(
+        'head'
+      ).that_notifies(
+        'Elasticsearch::Service[es-01]'
+      )}
+    end
+
+    context 'restart_plugin_change set to true' do
+      let(:pre_condition) { %q{
+        class { "elasticsearch":
+          restart_plugin_change => true,
+        }
+
+        elasticsearch::instance { 'es-01': }
+      }}
+
+      it { should contain_elasticsearch_plugin(
+        'head'
+      ).that_notifies(
+        'Elasticsearch::Service[es-01]'
+      )}
+    end
+
+  end
+
+  describe 'proxy arguments' do
+
+    let(:title) { 'head' }
+
+    context 'unauthenticated' do
+      context 'on define' do
+        let :params do {
+          :ensure         => 'present',
+          :instances      => 'es-01',
+          :proxy_host     => 'es.local',
+          :proxy_port     => '8080'
+        } end
+
+        it { should contain_elasticsearch_plugin(
+          'head'
+        ).with_proxy(
+          'http://es.local:8080'
+        )}
+      end
+
+      context 'on main class' do
+        let :params do {
+          :ensure    => 'present',
+          :instances => 'es-01'
+        } end
+
+        let(:pre_condition) { %q{
+          class { 'elasticsearch':
+            proxy_url => 'https://es.local:8080',
+          }
+        }}
+
+        it { should contain_elasticsearch_plugin(
+          'head'
+        ).with_proxy(
+          'https://es.local:8080'
+        )}
+      end
+    end
+
+    context 'authenticated' do
+      context 'on define' do
+        let :params do {
+          :ensure         => 'present',
+          :instances      => 'es-01',
+          :proxy_host     => 'es.local',
+          :proxy_port     => '8080',
+          :proxy_username => 'elastic',
+          :proxy_password => 'password'
+        } end
+
+        it { should contain_elasticsearch_plugin(
+          'head'
+        ).with_proxy(
+          'http://elastic:password@es.local:8080'
+        )}
+      end
+
+      context 'on main class' do
+        let :params do {
+          :ensure    => 'present',
+          :instances => 'es-01'
+        } end
+
+        let(:pre_condition) { %q{
+          class { 'elasticsearch':
+            proxy_url => 'http://elastic:password@es.local:8080',
+          }
+        }}
+
+        it { should contain_elasticsearch_plugin(
+          'head'
+        ).with_proxy(
+          'http://elastic:password@es.local:8080'
+        )}
+      end
+    end
+
+  end
+
+  describe 'collector ordering' do
+    describe 'present' do
+      let(:title) { 'head' }
+      let(:pre_condition) {%q{
+        class { 'elasticsearch': }
+        elasticsearch::instance { 'es-01': }
+      }}
+      let :params do {
+        :instances => 'es-01'
+      } end
+
+      it { should contain_elasticsearch__plugin(
+        'head'
+      ).that_comes_before(
+        'Elasticsearch::Instance[es-01]'
+      )}
+    end
+  end
 end
--- a/dev/provisioning/modules/elasticsearch/spec/defines/005_elasticsearch_instance_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/defines/005_elasticsearch_instance_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -2,11 +2,11 @@
 
 describe 'elasticsearch::instance', :type => 'define' do
 
-  default_params = { }
+  let(:title) { 'es-01' }
+  let(:pre_condition) { 'class { "elasticsearch": }' }
 
   on_supported_os.each do |os, facts|
 
-    let(:title) { 'es-01' }
     context "on #{os}" do
 
       case facts[:osfamily]
@@ -48,23 +48,23 @@
         facts.merge({ 'scenario' => '', 'common' => '' })
       end
 
-      let (:params) do
-        default_params.merge({ })
-      end
-
-      let(:title) { 'es-01' }
-      let(:pre_condition) { 'class {"elasticsearch": }'  }
-
-      context "Service" do
+      it { should contain_elasticsearch__service(
+        'es-01'
+      ).with(
+        :init_template =>
+          "elasticsearch/etc/init.d/elasticsearch.#{initscript}.erb",
+        :init_defaults => {
+          "CONF_DIR"  => "/etc/elasticsearch/es-01",
+          "CONF_FILE" => "/etc/elasticsearch/es-01/elasticsearch.yml",
+          "LOG_DIR"   => "/var/log/elasticsearch/es-01",
+          "ES_HOME"   => "/usr/share/elasticsearch"
+        }
+      )}
 
-          it { should contain_elasticsearch__service('es-01').with(:init_template => "elasticsearch/etc/init.d/elasticsearch.#{initscript}.erb", :init_defaults => {"CONF_DIR"=>"/etc/elasticsearch/es-01", "CONF_FILE"=>"/etc/elasticsearch/es-01/elasticsearch.yml", "LOG_DIR"=>"/var/log/elasticsearch/es-01", "ES_HOME"=>"/usr/share/elasticsearch"}) }
-
-      end
+    end # of on os context
+  end # of on supported OSes loop
 
-    end
-
-  end
-
+  # Test all non OS-specific functionality with just a single distro
   let :facts do {
     :operatingsystem => 'CentOS',
     :kernel => 'Linux',
@@ -75,15 +75,11 @@
     :hostname => 'foo'
   } end
 
-  let(:title) { 'es-01' }
-  let(:pre_condition) { 'class {"elasticsearch": }'  }
-
+  let :params do {
+    :config => { 'node' => { 'name' => 'test' }  },
+  } end
 
-  context "Config file" do
-
-    let :params do {
-      :config => { }
-    } end
+  describe 'config file' do
 
     it { should contain_datacat_fragment('main_config_es-01') }
     it { should contain_datacat('/etc/elasticsearch/es-01/elasticsearch.yml') }
@@ -92,35 +88,59 @@
 
   end
 
-  context "service restarts" do
+  describe 'service restarts' do
 
-    context "does not restart when restart_on_change is false" do
-      let :params do {
-        :config => { 'node' => { 'name' => 'test' }  },
-      } end
-      let(:pre_condition) { 'class {"elasticsearch": config => { }, restart_on_change => false }'  }
-      it { should contain_datacat_fragment('main_config_es-01') }
-      it { should contain_datacat('/etc/elasticsearch/es-01/elasticsearch.yml').without_notify }
+    context 'do not happen when restart_on_change is false (default)' do
+      it { should_not contain_datacat(
+        '/etc/elasticsearch/es-01/elasticsearch.yml'
+      ).that_notifies('Elasticsearch::Service[es-01]') }
+      it { should_not contain_package(
+        'elasticsearch'
+      ).that_notifies('Elasticsearch::Service[es-01]') }
+    end
 
+    context 'happen when restart_on_change is true' do
+      let(:pre_condition) { 'class { "elasticsearch": restart_on_change => true }' }
+
+      it { should contain_datacat(
+        '/etc/elasticsearch/es-01/elasticsearch.yml'
+      ).that_notifies('Elasticsearch::Service[es-01]') }
+      it { should contain_package(
+        'elasticsearch'
+      ).that_notifies('Elasticsearch::Service[es-01]') }
     end
 
-    context "should happen restart_on_change is true (default)" do
-      let :params do {
-        :config => { 'node' => { 'name' => 'test' }  },
-      } end
-      let(:pre_condition) { 'class {"elasticsearch": config => { }}'  }
+    context 'on package change' do
+      let(:pre_condition) { %q{
+        class { "elasticsearch": restart_package_change => true }
+      }}
 
-      it { should contain_datacat_fragment('main_config_es-01') }
-      it { should contain_datacat('/etc/elasticsearch/es-01/elasticsearch.yml').with(:notify => "Elasticsearch::Service[es-01]") }
+      it { should_not contain_datacat(
+        '/etc/elasticsearch/es-01/elasticsearch.yml'
+      ).that_notifies('Elasticsearch::Service[es-01]') }
+      it { should contain_package(
+        'elasticsearch'
+      ).that_notifies('Elasticsearch::Service[es-01]') }
+    end
 
+    context 'on config change' do
+      let(:pre_condition) { %q{
+        class { "elasticsearch": restart_config_change => true }
+      }}
+
+      it { should contain_datacat(
+        '/etc/elasticsearch/es-01/elasticsearch.yml'
+      ).that_notifies('Elasticsearch::Service[es-01]') }
+      it { should_not contain_package(
+        'elasticsearch'
+      ).that_notifies('Elasticsearch::Service[es-01]') }
     end
 
   end
 
-  context "Config dir" do
+  context 'config dir' do
 
     context "default" do
-      let(:pre_condition) { 'class {"elasticsearch": }'  }
       it { should contain_exec('mkdir_configdir_elasticsearch_es-01') }
       it { should contain_file('/etc/elasticsearch/es-01').with(:ensure => 'directory') }
       it { should contain_datacat_fragment('main_config_es-01') }
@@ -128,11 +148,18 @@
 
       it { should contain_file('/etc/elasticsearch/es-01/logging.yml') }
       it { should contain_file('/usr/share/elasticsearch/scripts') }
+      it { should contain_file('/usr/share/elasticsearch/shield') }
       it { should contain_file('/etc/elasticsearch/es-01/scripts').with(:target => '/usr/share/elasticsearch/scripts') }
+      it { should contain_file('/etc/elasticsearch/es-01/shield') }
     end
 
-    context "Set in main class" do
-      let(:pre_condition) { 'class {"elasticsearch": configdir => "/etc/elasticsearch-config" }'  }
+    context 'set in main class' do
+      let(:pre_condition) { <<-EOS
+        class { "elasticsearch":
+          configdir => "/etc/elasticsearch-config"
+        }
+      EOS
+      }
 
       it { should contain_exec('mkdir_configdir_elasticsearch_es-01') }
       it { should contain_file('/etc/elasticsearch-config').with(:ensure => 'directory') }
@@ -143,11 +170,12 @@
 
       it { should contain_file('/etc/elasticsearch-config/es-01/logging.yml') }
       it { should contain_file('/usr/share/elasticsearch/scripts') }
+      it { should contain_file('/usr/share/elasticsearch/shield') }
       it { should contain_file('/etc/elasticsearch-config/es-01/scripts').with(:target => '/usr/share/elasticsearch/scripts') }
+      it { should contain_file('/etc/elasticsearch-config/es-01/shield') }
     end
 
-    context "set in instance" do
-      let(:pre_condition) { 'class {"elasticsearch": }'  }
+    context 'set in instance' do
       let :params do {
         :configdir => '/etc/elasticsearch-config/es-01'
       } end
@@ -160,43 +188,60 @@
 
       it { should contain_file('/etc/elasticsearch-config/es-01/logging.yml') }
       it { should contain_file('/usr/share/elasticsearch/scripts') }
+      it { should contain_file('/usr/share/elasticsearch/shield') }
       it { should contain_file('/etc/elasticsearch-config/es-01/scripts').with(:target => '/usr/share/elasticsearch/scripts') }
+      it { should contain_file('/etc/elasticsearch-config/es-01/shield') }
     end
 
   end
 
 
-  context "data directory" do
-    let(:pre_condition) { 'class {"elasticsearch": }'  }
+  context 'data directory' do
 
-    context "default" do
+    context 'default' do
+      it { should contain_exec('mkdir_logdir_elasticsearch_es-01') }
       it { should contain_exec('mkdir_datadir_elasticsearch_es-01') }
       it { should contain_file('/usr/share/elasticsearch/data/es-01').with( :ensure => 'directory') }
       it { should contain_file('/usr/share/elasticsearch/data').with( :ensure => 'directory') }
     end
 
-    context "single from main config " do
-      let(:pre_condition) { 'class {"elasticsearch": datadir => "/var/lib/elasticsearch-data" }'  }
+    context 'single from main config ' do
+      let(:pre_condition) { <<-EOS
+        class { "elasticsearch":
+          datadir => "/var/lib/elasticsearch-data"
+        }
+      EOS
+      }
 
+      it { should contain_exec('mkdir_logdir_elasticsearch_es-01') }
       it { should contain_exec('mkdir_datadir_elasticsearch_es-01') }
       it { should contain_file('/var/lib/elasticsearch-data').with( :ensure => 'directory') }
       it { should contain_file('/var/lib/elasticsearch-data/es-01').with( :ensure => 'directory') }
     end
 
-    context "single from instance config" do
-      let(:pre_condition) { 'class {"elasticsearch": }'  }
+    context 'single from instance config' do
       let :params do {
         :datadir => '/var/lib/elasticsearch/data'
       } end
 
+      it { should contain_exec('mkdir_logdir_elasticsearch_es-01') }
       it { should contain_exec('mkdir_datadir_elasticsearch_es-01') }
       it { should contain_file('/var/lib/elasticsearch/data').with( :ensure => 'directory') }
 
     end
 
-    context "multiple from main config" do
-      let(:pre_condition) { 'class {"elasticsearch": datadir => [ "/var/lib/elasticsearch-data01", "/var/lib/elasticsearch-data02"] }'  }
+    context 'multiple from main config' do
+      let(:pre_condition) { <<-EOS
+        class { "elasticsearch":
+          datadir => [
+            "/var/lib/elasticsearch-data01",
+            "/var/lib/elasticsearch-data02"
+          ]
+        }
+      EOS
+      }
 
+      it { should contain_exec('mkdir_logdir_elasticsearch_es-01') }
       it { should contain_exec('mkdir_datadir_elasticsearch_es-01') }
       it { should contain_file('/var/lib/elasticsearch-data01').with( :ensure => 'directory') }
       it { should contain_file('/var/lib/elasticsearch-data01/es-01').with( :ensure => 'directory') }
@@ -204,92 +249,170 @@
       it { should contain_file('/var/lib/elasticsearch-data02/es-01').with( :ensure => 'directory') }
     end
 
-    context "multiple from instance config" do
-      let(:pre_condition) { 'class {"elasticsearch": }'  }
+    context 'multiple from instance config' do
       let :params do {
-        :datadir => ['/var/lib/elasticsearch-data/01', '/var/lib/elasticsearch-data/02']
+        :datadir => [
+          '/var/lib/elasticsearch-data/01',
+          '/var/lib/elasticsearch-data/02'
+        ]
       } end
 
+      it { should contain_exec('mkdir_logdir_elasticsearch_es-01') }
       it { should contain_exec('mkdir_datadir_elasticsearch_es-01') }
       it { should contain_file('/var/lib/elasticsearch-data/01').with( :ensure => 'directory') }
       it { should contain_file('/var/lib/elasticsearch-data/02').with( :ensure => 'directory') }
     end
 
-   context "Conflicting setting path.data" do
-     let(:pre_condition) { 'class {"elasticsearch": }'  }
+   context 'conflicting setting path.data' do
      let :params do {
        :datadir => '/var/lib/elasticsearch/data',
        :config  => { 'path.data' => '/var/lib/elasticsearch/otherdata' }
      } end
 
+      it { should contain_exec('mkdir_logdir_elasticsearch_es-01') }
+      it { should contain_exec('mkdir_datadir_elasticsearch_es-01') }
+      it { should contain_file('/var/lib/elasticsearch/data').with( :ensure => 'directory') }
+      it { should_not contain_file('/var/lib/elasticsearch/otherdata').with( :ensure => 'directory') }
+   end
+
+   context 'conflicting setting path => data' do
+     let :params do {
+       :datadir => '/var/lib/elasticsearch/data',
+       :config  => {
+         'path' => { 'data' => '/var/lib/elasticsearch/otherdata' }
+       }
+     } end
+
+      it { should contain_exec('mkdir_logdir_elasticsearch_es-01') }
       it { should contain_exec('mkdir_datadir_elasticsearch_es-01') }
       it { should contain_file('/var/lib/elasticsearch/data').with( :ensure => 'directory') }
       it { should_not contain_file('/var/lib/elasticsearch/otherdata').with( :ensure => 'directory') }
    end
 
-   context "Conflicting setting path => data" do
-     let(:pre_condition) { 'class {"elasticsearch": }'  }
-     let :params do {
-       :datadir => '/var/lib/elasticsearch/data',
-       :config  => { 'path' => { 'data' => '/var/lib/elasticsearch/otherdata' } }
-     } end
-
-      it { should contain_exec('mkdir_datadir_elasticsearch_es-01') }
-      it { should contain_file('/var/lib/elasticsearch/data').with( :ensure => 'directory') }
-      it { should_not contain_file('/var/lib/elasticsearch/otherdata').with( :ensure => 'directory') }
-   end
-
-   context "With other path options defined" do
-     let(:pre_condition) { 'class {"elasticsearch": }'  }
+   context 'with other path options defined' do
      let :params do {
        :datadir => '/var/lib/elasticsearch/data',
        :config  => { 'path' => { 'home' => '/var/lib/elasticsearch' } }
      } end
 
+      it { should contain_exec('mkdir_logdir_elasticsearch_es-01') }
       it { should contain_exec('mkdir_datadir_elasticsearch_es-01') }
       it { should contain_file('/var/lib/elasticsearch/data').with( :ensure => 'directory') }
    end
-
-
   end
 
-  context "Logging" do
-
-    let(:pre_condition) { 'class {"elasticsearch": }'  }
+  context 'logs directory' do
 
     context "default" do
-      it { should contain_file('/etc/elasticsearch/es-01/logging.yml').with_content(/^logger.index.search.slowlog: TRACE, index_search_slow_log_file$/).with(:source => nil) }
+      it { should contain_file('/var/log/elasticsearch/es-01')
+        .with( :ensure => 'directory') }
+      it { should contain_file('/var/log/elasticsearch')
+        .with( :ensure => 'directory') }
+    end
+
+    context "single from main config " do
+      let(:pre_condition) { <<-EOS
+        class { "elasticsearch":
+          logdir => "/var/log/elasticsearch-logs"
+        }
+      EOS
+      }
+
+      it { should contain_file('/var/log/elasticsearch-logs').with( :ensure => 'directory') }
+      it { should contain_file('/var/log/elasticsearch-logs/es-01').with( :ensure => 'directory') }
+    end
+
+    context 'single from instance config' do
+      let :params do {
+        :logdir => '/var/log/elasticsearch/logs-a'
+      } end
+
+      it { should contain_file('/var/log/elasticsearch/logs-a').with( :ensure => 'directory') }
+
     end
 
-    context "from main class" do
+   context 'Conflicting setting path.logs' do
+     let :params do {
+       :logdir => '/var/log/elasticsearch/logs-a',
+       :config  => { 'path.logs' => '/var/log/elasticsearch/otherlogs' }
+     } end
+
+      it { should contain_file('/var/log/elasticsearch/logs-a').with( :ensure => 'directory') }
+      it { should_not contain_file('/var/log/elasticsearch/otherlogs').with( :ensure => 'directory') }
+   end
+
+   context 'Conflicting setting path => logs' do
+     let :params do {
+       :logdir => '/var/log/elasticsearch/logs-a',
+       :config  => { 'path' => { 'logs' => '/var/log/elasticsearch/otherlogs' } }
+     } end
+
+      it { should contain_file('/var/log/elasticsearch/logs-a').with( :ensure => 'directory') }
+      it { should_not contain_file('/var/log/elasticsearch/otherlogs').with( :ensure => 'directory') }
+   end
+
+   context 'With other path options defined' do
+     let :params do {
+       :logdir => '/var/log/elasticsearch/logs-a',
+       :config  => { 'path' => { 'home' => '/var/log/elasticsearch' } }
+     } end
 
-      context "config" do
-        let(:pre_condition) { 'class {"elasticsearch": logging_config => { "index.search.slowlog" => "DEBUG, index_search_slow_log_file" } }'  }
+      it { should contain_file('/var/log/elasticsearch/logs-a').with( :ensure => 'directory') }
+   end
+  end
+
+  context 'logging' do
+
+    context 'default' do
+      it { should contain_file('/etc/elasticsearch/es-01/logging.yml')
+        .with_content(
+          /^logger.index.search.slowlog: TRACE, index_search_slow_log_file$/,
+          /type: dailyRollingFile/,
+          /datePattern: "'.'yyyy-MM-dd"/
+        ).with(:source => nil)
+      }
+    end
+
+    context 'from main class' do
+
+      context 'config' do
+        let(:pre_condition) { <<-EOS
+          class { "elasticsearch":
+            logging_config => {
+              "index.search.slowlog" => "DEBUG, index_search_slow_log_file"
+            }
+          }
+        EOS
+        }
 
         it { should contain_file('/etc/elasticsearch/es-01/logging.yml').with_content(/^logger.index.search.slowlog: DEBUG, index_search_slow_log_file$/).with(:source => nil) }
       end
 
-      context "logging file " do
-        let(:pre_condition) { 'class {"elasticsearch": logging_file => "puppet:///path/to/logging.yml" }'  }
+      context 'logging file ' do
+        let(:pre_condition) { <<-EOS
+          class { "elasticsearch":
+            logging_file => "puppet:///path/to/logging.yml"
+          }
+        EOS
+        }
 
         it { should contain_file('/etc/elasticsearch/es-01/logging.yml').with(:source => 'puppet:///path/to/logging.yml', :content => nil) }
       end
 
     end
 
-    context "from instance" do
-
-      let(:pre_condition) { 'class {"elasticsearch": }'  }
-
+    context 'from instance' do
       context "config" do
         let :params do {
-          :logging_config => { 'index.search.slowlog' => 'INFO, index_search_slow_log_file' }
+          :logging_config => {
+            'index.search.slowlog' => 'INFO, index_search_slow_log_file'
+          }
         } end
 
         it { should contain_file('/etc/elasticsearch/es-01/logging.yml').with_content(/^logger.index.search.slowlog: INFO, index_search_slow_log_file$/).with(:source => nil) }
       end
 
-      context "logging file " do
+      context 'logging file' do
         let :params do {
           :logging_file => 'puppet:///path/to/logging.yml'
         } end
@@ -299,11 +422,34 @@
 
     end
 
+    describe 'rollingFile apender' do
+      let(:pre_condition) {%q{
+        class { 'elasticsearch':
+          file_rolling_type             => 'rollingFile',
+          rolling_file_max_backup_index => 10,
+          rolling_file_max_file_size    => '100MB',
+        }
+      }}
+
+      it { should contain_file('/etc/elasticsearch/es-01/logging.yml')
+        .with_content(
+          /type: rollingFile/,
+          /maxBackupIndex: 10/,
+          /maxBackupIndex: 10/,
+          /maxFileSize: 100MB/)
+      }
+    end
   end
 
-  context "running as an other user" do
+  context 'running as an other user' do
 
-    let(:pre_condition) { 'class {"elasticsearch": elasticsearch_user => "myesuser", elasticsearch_group => "myesgroup" }'  }
+    let(:pre_condition) { <<-EOS
+      class { "elasticsearch":
+        elasticsearch_user => "myesuser",
+        elasticsearch_group => "myesgroup"
+      }
+    EOS
+    }
 
     it { should contain_file('/usr/share/elasticsearch/data/es-01').with(:owner => 'myesuser') }
     it { should contain_file('/etc/elasticsearch/es-01').with(:owner => 'myesuser', :group => 'myesgroup') }
@@ -312,11 +458,11 @@
     it { should contain_file('/etc/elasticsearch/es-01/logging.yml').with(:owner => 'myesuser', :group => 'myesgroup') }
   end
 
-    context "setting different service status then main class" do
+  context 'setting different service status then main class' do
 
     let(:pre_condition) { 'class {"elasticsearch": status => "enabled" }'  }
 
-    context "staus option" do
+    context 'status option' do
 
       let :params do {
         :status => 'running'
@@ -325,5 +471,53 @@
       it { should contain_service('elasticsearch-instance-es-01').with(:ensure => 'running', :enable => false) }
 
     end
+
+  end
+
+  context 'init_template' do
+
+    context 'default' do
+      it { should contain_elasticsearch__service('es-01').with(:init_template => 'elasticsearch/etc/init.d/elasticsearch.RedHat.erb') }
+    end
+
+    context 'override in main class' do
+      let(:pre_condition) { <<-EOS
+        class { "elasticsearch":
+          init_template => "elasticsearch/etc/init.d/elasticsearch.systemd.erb"
+        }
+      EOS
+      }
+
+      it { should contain_elasticsearch__service('es-01').with(:init_template => 'elasticsearch/etc/init.d/elasticsearch.systemd.erb') }
+    end
+
+  end
+
+  describe 'system_key' do
+    context 'inherited' do
+      let(:pre_condition) {%q{
+        class { 'elasticsearch':
+          system_key => '/tmp/key'
+        }
+      }}
+
+      it { should contain_file(
+        '/etc/elasticsearch/es-01/shield/system_key'
+      ).with_source(
+        '/tmp/key'
+      ) }
+    end
+
+    context 'from instance' do
+      let :params do {
+        :system_key => 'puppet:///test/key'
+      } end
+
+      it { should contain_file(
+        '/etc/elasticsearch/es-01/shield/system_key'
+      ).with_source(
+        'puppet:///test/key'
+      ) }
+    end
   end
 end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/defines/007_elasticsearch_shield_user_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,121 @@
+require 'spec_helper'
+
+describe 'elasticsearch::shield::user' do
+
+  let :facts do {
+    :operatingsystem => 'CentOS',
+    :kernel => 'Linux',
+    :osfamily => 'RedHat',
+    :operatingsystemmajrelease => '7',
+    :scenario => '',
+    :common => ''
+  } end
+
+  let(:title) { 'elastic' }
+
+  let(:pre_condition) {%q{
+    class { 'elasticsearch': }
+  }}
+
+  context 'with default parameters' do
+
+    let(:params) do
+      {
+        :password => 'foobar',
+        :roles => ['monitor', 'user']
+      }
+    end
+
+    it { should contain_elasticsearch__shield__user('elastic') }
+    it { should contain_elasticsearch_shield_user('elastic') }
+    it do
+      should contain_elasticsearch_shield_user_roles('elastic').with(
+        'ensure' => 'present',
+        'roles'  => ['monitor', 'user']
+      )
+    end
+  end
+
+  describe 'collector ordering' do
+    describe 'when present' do
+      let(:pre_condition) {%q{
+        class { 'elasticsearch': }
+        elasticsearch::instance { 'es-01': }
+        elasticsearch::plugin { 'shield': instances => 'es-01' }
+        elasticsearch::template { 'foo': content => {"foo" => "bar"} }
+        elasticsearch::shield::role { 'test_role':
+          privileges => {
+            'cluster' => 'monitor',
+            'indices' => {
+              '*' => 'all',
+            },
+          },
+        }
+      }}
+
+      let(:params) {{
+        :password => 'foobar',
+        :roles => ['monitor', 'user']
+      }}
+
+      it { should contain_elasticsearch__shield__role('test_role') }
+      it { should contain_elasticsearch_shield_role('test_role') }
+      it { should contain_elasticsearch_shield_role_mapping('test_role') }
+      it { should contain_elasticsearch__plugin('shield') }
+      it { should contain_elasticsearch_plugin('shield') }
+      it { should contain_file(
+        '/usr/share/elasticsearch/plugins/shield'
+      ) }
+      it { should contain_elasticsearch__shield__user('elastic')
+        .that_comes_before([
+        'Elasticsearch::Template[foo]'
+      ]).that_requires([
+        'Elasticsearch::Plugin[shield]',
+        'Elasticsearch::Shield::Role[test_role]'
+      ])}
+    end
+
+    describe 'when absent' do
+      let(:pre_condition) {%q{
+        class { 'elasticsearch': }
+        elasticsearch::instance { 'es-01': }
+        elasticsearch::plugin { 'shield':
+          ensure => 'absent',
+          instances => 'es-01',
+        }
+        elasticsearch::template { 'foo': content => {"foo" => "bar"} }
+        elasticsearch::shield::role { 'test_role':
+          privileges => {
+            'cluster' => 'monitor',
+            'indices' => {
+              '*' => 'all',
+            },
+          },
+        }
+      }}
+
+      let(:params) {{
+        :password => 'foobar',
+        :roles => ['monitor', 'user']
+      }}
+
+      it { should contain_elasticsearch__shield__role('test_role') }
+      it { should contain_elasticsearch_shield_role('test_role') }
+      it { should contain_elasticsearch_shield_role_mapping('test_role') }
+      it { should contain_elasticsearch__plugin('shield') }
+      it { should contain_elasticsearch_plugin('shield') }
+      it { should contain_file(
+        '/usr/share/elasticsearch/plugins/shield'
+      ) }
+      # TODO: Uncomment once upstream issue is fixed.
+      # https://github.com/rodjek/rspec-puppet/issues/418
+      # it { should contain_elasticsearch__shield__user('elastic')
+      #   .that_comes_before([
+      #   'Elasticsearch::Template[foo]',
+      #   'Elasticsearch::Plugin[shield]'
+      # ]).that_requires([
+      #   'Elasticsearch::Shield::Role[test_role]'
+      # ])}
+    end
+  end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/defines/008_elasticsearch_shield_role_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,100 @@
+require 'spec_helper'
+
+describe 'elasticsearch::shield::role' do
+
+  let :facts do {
+    :operatingsystem => 'CentOS',
+    :kernel => 'Linux',
+    :osfamily => 'RedHat',
+    :operatingsystemmajrelease => '7',
+    :scenario => '',
+    :common => ''
+  } end
+
+  let(:title) { 'elastic_role' }
+
+  let(:params) do
+    {
+      :privileges => {
+        'cluster' => '*'
+      },
+      :mappings => [
+        "cn=users,dc=example,dc=com",
+        "cn=admins,dc=example,dc=com",
+        "cn=John Doe,cn=other users,dc=example,dc=com"
+      ]
+    }
+  end
+
+  context 'with an invalid role name' do
+    context 'too long' do
+      let(:title) { 'A'*31 }
+      it { should raise_error(Puppet::Error, /expected length/i) }
+    end
+  end
+
+  context 'with default parameters' do
+    it { should contain_elasticsearch__shield__role('elastic_role') }
+    it { should contain_elasticsearch_shield_role('elastic_role') }
+    it do
+      should contain_elasticsearch_shield_role_mapping('elastic_role').with(
+        'ensure' => 'present',
+        'mappings' => [
+          "cn=users,dc=example,dc=com",
+          "cn=admins,dc=example,dc=com",
+          "cn=John Doe,cn=other users,dc=example,dc=com"
+        ]
+      )
+    end
+  end
+
+  describe 'collector ordering' do
+    describe 'when present' do
+      let(:pre_condition) {%q{
+        class { 'elasticsearch': }
+        elasticsearch::instance { 'es-01': }
+        elasticsearch::plugin { 'shield': instances => 'es-01' }
+        elasticsearch::template { 'foo': content => {"foo" => "bar"} }
+        elasticsearch::shield::user { 'elastic':
+          password => 'foobar',
+          roles => ['elastic_role'],
+        }
+      }}
+
+      it { should contain_elasticsearch__plugin('shield') }
+      it { should contain_elasticsearch__shield__role('elastic_role')
+        .that_comes_before([
+        'Elasticsearch::Template[foo]',
+        'Elasticsearch::Shield::User[elastic]'
+      ]).that_requires([
+        'Elasticsearch::Plugin[shield]'
+      ])}
+    end
+
+    describe 'when absent' do
+      let(:pre_condition) {%q{
+        class { 'elasticsearch': }
+        elasticsearch::instance { 'es-01': }
+        elasticsearch::plugin { 'shield':
+          ensure => 'absent',
+          instances => 'es-01',
+        }
+        elasticsearch::template { 'foo': content => {"foo" => "bar"} }
+        elasticsearch::shield::user { 'elastic':
+          password => 'foobar',
+          roles => ['elastic_role'],
+        }
+      }}
+
+      it { should contain_elasticsearch__plugin('shield') }
+      # TODO: Uncomment once upstream issue is fixed.
+      # https://github.com/rodjek/rspec-puppet/issues/418
+      # it { should contain_elasticsearch__shield__role('elastic_role')
+      #   .that_comes_before([
+      #   'Elasticsearch::Template[foo]',
+      #   'Elasticsearch::Plugin[shield]',
+      #   'Elasticsearch::Shield::User[elastic]'
+      # ])}
+    end
+  end
+end
--- a/dev/provisioning/modules/elasticsearch/spec/defines/010_elasticsearch_service_init_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/defines/010_elasticsearch_service_init_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -42,9 +42,8 @@
       } end
 
     it { should contain_elasticsearch__service__init('es-01') }
-      it { should_not contain_service('elasticsearch-instance-es-01') }
-      it { should_not contain_file('/etc/init.d/elasticsearch-es-01') }
-      it { should_not contain_file('/etc/sysconfig/elasticsearch-es-01') }
+    it { should contain_service('elasticsearch-instance-es-01').with(:enable => false) }
+    it { should contain_augeas('defaults_es-01') }
 
   end
 
@@ -53,48 +52,141 @@
     context "Set via file" do
       let :params do {
         :ensure => 'present',
-	:status => 'enabled',
-	:init_defaults_file => 'puppet:///path/to/initdefaultsfile'
+        :status => 'enabled',
+        :init_defaults_file =>
+          'puppet:///path/to/initdefaultsfile'
       } end
 
-      it { should contain_file('/etc/sysconfig/elasticsearch-es-01').with(:source => 'puppet:///path/to/initdefaultsfile', :notify => 'Service[elasticsearch-instance-es-01]', :before => 'Service[elasticsearch-instance-es-01]') }
+      it { should contain_file(
+        '/etc/sysconfig/elasticsearch-es-01'
+      ).with(
+        :source => 'puppet:///path/to/initdefaultsfile'
+      )}
+      it { should contain_file(
+        '/etc/sysconfig/elasticsearch-es-01'
+      ).that_comes_before(
+        'Service[elasticsearch-instance-es-01]'
+      ) }
     end
 
     context "Set via hash" do
       let :params do {
         :ensure => 'present',
-	:status => 'enabled',
-	:init_defaults => {'ES_HOME' => '/usr/share/elasticsearch' }
+        :status => 'enabled',
+        :init_defaults => {'ES_HOME' => '/usr/share/elasticsearch' }
       } end
 
-      it { should contain_augeas('defaults_es-01').with(:incl => '/etc/sysconfig/elasticsearch-es-01', :changes => "set ES_GROUP 'elasticsearch'\nset ES_HOME '/usr/share/elasticsearch'\nset ES_USER 'elasticsearch'\nset MAX_OPEN_FILES '65535'\n", :notify => 'Service[elasticsearch-instance-es-01]', :before => 'Service[elasticsearch-instance-es-01]') }
+      it 'writes the defaults file' do
+        should contain_augeas('defaults_es-01').with(
+          :incl => '/etc/sysconfig/elasticsearch-es-01',
+          :changes => [
+            "set ES_GROUP 'elasticsearch'",
+            "set ES_HOME '/usr/share/elasticsearch'",
+            "set ES_USER 'elasticsearch'",
+            "set MAX_OPEN_FILES '65535'",
+          ].join("\n") << "\n",
+          :before => 'Service[elasticsearch-instance-es-01]'
+        )
+      end
     end
 
-    context "No restart when 'restart_on_change' is false" do
-      let(:pre_condition) { 'class {"elasticsearch": config => { "node" => {"name" => "test" }}, restart_on_change => false } ' }
+    context 'restarts when "restart_on_change" is true' do
+      let(:pre_condition) { %q{
+        class { "elasticsearch":
+          config => { "node" => {"name" => "test" }},
+          restart_on_change => true
+        }
+      }}
 
       context "Set via file" do
         let :params do {
           :ensure => 'present',
-	  :status => 'enabled',
-	  :init_defaults_file => 'puppet:///path/to/initdefaultsfile'
+          :status => 'enabled',
+          :init_defaults_file => 'puppet:///path/to/initdefaultsfile'
         } end
 
-        it { should contain_file('/etc/sysconfig/elasticsearch-es-01').with(:source => 'puppet:///path/to/initdefaultsfile', :notify => nil, :before => 'Service[elasticsearch-instance-es-01]') }
+        it { should contain_file(
+          '/etc/sysconfig/elasticsearch-es-01'
+        ).with(
+          :source => 'puppet:///path/to/initdefaultsfile'
+        ) }
+        it { should contain_file(
+          '/etc/sysconfig/elasticsearch-es-01'
+        ).that_comes_before(
+          'Service[elasticsearch-instance-es-01]'
+        ) }
+        it { should contain_file(
+          '/etc/sysconfig/elasticsearch-es-01'
+        ).that_notifies(
+          'Service[elasticsearch-instance-es-01]'
+        ) }
       end
 
       context "Set via hash" do
         let :params do {
           :ensure => 'present',
-  	  :status => 'enabled',
-  	  :init_defaults => {'ES_HOME' => '/usr/share/elasticsearch' }
+          :status => 'enabled',
+          :init_defaults => {
+            'ES_HOME' => '/usr/share/elasticsearch'
+          }
         } end
 
-        it { should contain_augeas('defaults_es-01').with(:incl => '/etc/sysconfig/elasticsearch-es-01', :changes => "set ES_GROUP 'elasticsearch'\nset ES_HOME '/usr/share/elasticsearch'\nset ES_USER 'elasticsearch'\nset MAX_OPEN_FILES '65535'\n", :notify => nil, :before => 'Service[elasticsearch-instance-es-01]') }
+        it { should contain_augeas(
+          'defaults_es-01'
+        ).with(
+          :incl => '/etc/sysconfig/elasticsearch-es-01',
+          :changes => "set ES_GROUP 'elasticsearch'\nset ES_HOME '/usr/share/elasticsearch'\nset ES_USER 'elasticsearch'\nset MAX_OPEN_FILES '65535'\n"
+        ) }
+        it { should contain_augeas(
+          'defaults_es-01'
+        ).that_comes_before(
+          'Service[elasticsearch-instance-es-01]'
+        ) }
+        it { should contain_augeas(
+          'defaults_es-01'
+        ).that_notifies(
+          'Service[elasticsearch-instance-es-01]'
+        ) }
+      end
+    end
+
+    context 'does not restart when "restart_on_change" is false' do
+      let(:pre_condition) { %q{
+        class { "elasticsearch":
+          config => { "node" => {"name" => "test" }},
+        }
+      }}
+
+      context "Set via file" do
+        let :params do {
+          :ensure => 'present',
+          :status => 'enabled',
+          :init_defaults_file => 'puppet:///path/to/initdefaultsfile'
+        } end
+
+        it { should_not contain_file(
+          '/etc/sysconfig/elasticsearch-es-01'
+        ).that_notifies(
+          'Service[elasticsearch-instance-es-01]'
+        ) }
       end
 
+      context "Set via hash" do
+        let :params do {
+          :ensure => 'present',
+          :status => 'enabled',
+          :init_defaults => {
+            'ES_HOME' => '/usr/share/elasticsearch'
+          }
+        } end
+
+        it { should_not contain_augeas(
+          'defaults_es-01'
+        ).that_notifies(
+          'Service[elasticsearch-instance-es-01]'
+        ) }
+      end
     end
-
   end
 
   context "Init file" do
@@ -103,26 +195,62 @@
     context "Via template" do
       let :params do {
         :ensure => 'present',
-	:status => 'enabled',
-	:init_template => 'elasticsearch/etc/init.d/elasticsearch.RedHat.erb'
+        :status => 'enabled',
+        :init_template =>
+          'elasticsearch/etc/init.d/elasticsearch.RedHat.erb'
       } end
 
-      it { should contain_file('/etc/init.d/elasticsearch-es-01').with(:notify => 'Service[elasticsearch-instance-es-01]', :before => 'Service[elasticsearch-instance-es-01]') }
+      it { should contain_file(
+        '/etc/init.d/elasticsearch-es-01'
+      ).that_comes_before('Service[elasticsearch-instance-es-01]') }
     end
 
-    context "No restart when 'restart_on_change' is false" do
-      let(:pre_condition) { 'class {"elasticsearch": config => { "node" => {"name" => "test" }}, restart_on_change => false } ' }
+    context 'restarts when "restart_on_change" is true' do
+      let(:pre_condition) { %q{
+        class { "elasticsearch":
+          config => { "node" => {"name" => "test" }},
+          restart_on_change => true
+        }
+      }}
 
       let :params do {
         :ensure => 'present',
-	:status => 'enabled',
-	:init_template => 'elasticsearch/etc/init.d/elasticsearch.RedHat.erb'
+        :status => 'enabled',
+        :init_template =>
+          'elasticsearch/etc/init.d/elasticsearch.RedHat.erb'
       } end
 
-      it { should contain_file('/etc/init.d/elasticsearch-es-01').with(:notify => nil, :before => 'Service[elasticsearch-instance-es-01]') }
-
+      it { should contain_file(
+        '/etc/init.d/elasticsearch-es-01'
+      ).that_comes_before(
+        'Service[elasticsearch-instance-es-01]'
+      ) }
+      it { should contain_file(
+        '/etc/init.d/elasticsearch-es-01'
+      ).that_notifies(
+        'Service[elasticsearch-instance-es-01]'
+      ) }
     end
 
-  end
+    context 'does not restart when "restart_on_change" is false' do
+      let(:pre_condition) { %q{
+        class { "elasticsearch":
+          config => { "node" => {"name" => "test" }},
+        }
+      }}
 
+      let :params do {
+        :ensure => 'present',
+        :status => 'enabled',
+        :init_template =>
+          'elasticsearch/etc/init.d/elasticsearch.RedHat.erb'
+      } end
+
+      it { should_not contain_file(
+        '/etc/init.d/elasticsearch-es-01'
+      ).that_notifies(
+        'Service[elasticsearch-instance-es-01]'
+      ) }
+    end
+  end
 end
--- a/dev/provisioning/modules/elasticsearch/spec/defines/011_elasticsearch_service_system_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/defines/011_elasticsearch_service_system_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -2,129 +2,235 @@
 
 describe 'elasticsearch::service::systemd', :type => 'define' do
 
-  let :facts do {
-    :operatingsystem => 'OpenSuSE',
-    :kernel => 'Linux',
-    :osfamily => 'Suse',
-    :operatingsystemmajrelease => '11',
-    :scenario => '',
-    :common => ''
-  } end
-
-  let(:title) { 'es-01' }
-  let(:pre_condition) { 'class {"elasticsearch": config => { "node" => {"name" => "test" }}}' }
-
-  context "Setup service" do
+  on_supported_os({
+    :hardwaremodels => ['x86_64'],
+    :supported_os => [
+      {
+        'operatingsystem' => 'OpenSuSE',
+        'operatingsystemrelease' => ['12', '13'],
+      },
+      {
+        'operatingsystem' => 'CentOS',
+        'operatingsystemrelease' => ['7'],
+      }
+    ]
+  }).each do |os, facts|
 
-    let :params do {
-      :ensure => 'present',
-      :status => 'enabled'
-    } end
-
-    it { should contain_elasticsearch__service__systemd('es-01') }
-    it { should contain_exec('systemd_reload_es-01').with(:command => '/bin/systemctl daemon-reload') }
-    it { should contain_service('elasticsearch-instance-es-01').with(:ensure => 'running', :enable => true, :provider => 'systemd') }
-  end
+    context "on #{os}" do
 
-  context "Remove service" do
-
-    let :params do {
-      :ensure => 'absent'
-    } end
-
-    it { should contain_elasticsearch__service__systemd('es-01') }
-    it { should contain_exec('systemd_reload_es-01').with(:command => '/bin/systemctl daemon-reload') }
-    it { should contain_service('elasticsearch-instance-es-01').with(:ensure => 'stopped', :enable => false, :provider => 'systemd') }
-  end
+      let(:facts) { facts.merge({
+          :scenario => '',
+          :common => ''
+      }) }
+      let(:title) { 'es-01' }
+      let(:pre_condition) { 'class {"elasticsearch": config => { "node" => {"name" => "test" }}}' }
 
-  context "unmanaged" do
-    let :params do {
-      :ensure => 'present',
-      :status => 'unmanaged'
-    } end
+      if facts[:operatingsystem] == 'OpenSuSE' and
+        facts[:operatingsystemrelease].to_i >= 13
+        let(:systemd_service_path) { '/usr/lib/systemd/system' }
+      else
+        let(:systemd_service_path) { '/lib/systemd/system' }
+      end
 
-    it { should contain_elasticsearch__service__systemd('es-01') }
-    it { should_not contain_service('elasticsearch-instance-es-01') }
-    it { should_not contain_file('/lib/systemd/system/elasticsearch-es-01.service') }
-    it { should_not contain_file('/etc/sysconfig/elasticsearch-es-01') }
+      context "Setup service" do
 
-  end
-
-  context "Defaults file" do
+        let :params do {
+          :ensure => 'present',
+          :status => 'enabled'
+        } end
 
-    context "Set via file" do
-      let :params do {
-        :ensure => 'present',
-	:status => 'enabled',
-	:init_defaults_file => 'puppet:///path/to/initdefaultsfile'
-      } end
+        it { should contain_elasticsearch__service__systemd('es-01') }
+        it { should contain_exec('systemd_reload_es-01').with(:command => '/bin/systemctl daemon-reload') }
+        it { should contain_service('elasticsearch-instance-es-01').with(:ensure => 'running', :enable => true, :provider => 'systemd') }
+      end
 
-      it { should contain_file('/etc/sysconfig/elasticsearch-es-01').with(:source => 'puppet:///path/to/initdefaultsfile', :before => 'Service[elasticsearch-instance-es-01]') }
-    end
+      context "Remove service" do
 
-    context "Set via hash" do
-      let :params do {
-        :ensure => 'present',
-	:status => 'enabled',
-	:init_defaults => {'ES_HOME' => '/usr/share/elasticsearch' }
-      } end
+        let :params do {
+          :ensure => 'absent'
+        } end
 
-      it { should contain_augeas('defaults_es-01').with(:incl => '/etc/sysconfig/elasticsearch-es-01', :changes => "set ES_GROUP 'elasticsearch'\nset ES_HOME '/usr/share/elasticsearch'\nset ES_USER 'elasticsearch'\nset MAX_OPEN_FILES '65535'\n", :before => 'Service[elasticsearch-instance-es-01]') }
-    end
+        it { should contain_elasticsearch__service__systemd('es-01') }
+        it { should contain_exec('systemd_reload_es-01').with(:command => '/bin/systemctl daemon-reload') }
+        it { should contain_service('elasticsearch-instance-es-01').with(:ensure => 'stopped', :enable => false, :provider => 'systemd') }
+      end
 
-    context "No restart when 'restart_on_change' is false" do
-      let(:pre_condition) { 'class {"elasticsearch": config => { "node" => {"name" => "test" }}, restart_on_change => false } ' }
-
-      context "Set via file" do
+      context "unmanaged" do
         let :params do {
           :ensure => 'present',
-	  :status => 'enabled',
-	  :init_defaults_file => 'puppet:///path/to/initdefaultsfile'
+          :status => 'unmanaged'
         } end
 
-        it { should contain_file('/etc/sysconfig/elasticsearch-es-01').with(:source => 'puppet:///path/to/initdefaultsfile', :notify => 'Exec[systemd_reload_es-01]', :before => 'Service[elasticsearch-instance-es-01]') }
-      end
 
-      context "Set via hash" do
-        let :params do {
-          :ensure => 'present',
-  	  :status => 'enabled',
-  	  :init_defaults => {'ES_HOME' => '/usr/share/elasticsearch' }
-        } end
+        it { should contain_elasticsearch__service__systemd('es-01') }
+        it { should contain_service('elasticsearch-instance-es-01').with(:enable => false) }
+        it { should contain_augeas('defaults_es-01') }
 
-        it { should contain_augeas('defaults_es-01').with(:incl => '/etc/sysconfig/elasticsearch-es-01', :changes => "set ES_GROUP 'elasticsearch'\nset ES_HOME '/usr/share/elasticsearch'\nset ES_USER 'elasticsearch'\nset MAX_OPEN_FILES '65535'\n", :notify => 'Exec[systemd_reload_es-01]', :before => 'Service[elasticsearch-instance-es-01]') }
       end
 
-    end
+      context "Defaults file" do
+
+        context "Set via file" do
+          let :params do {
+            :ensure => 'present',
+        :status => 'enabled',
+        :init_defaults_file => 'puppet:///path/to/initdefaultsfile'
+          } end
+
+          it { should contain_file('/etc/sysconfig/elasticsearch-es-01').with(:source => 'puppet:///path/to/initdefaultsfile', :before => 'Service[elasticsearch-instance-es-01]') }
+        end
+
+        context "Set via hash" do
+          let :params do {
+            :ensure        => 'present',
+            :status        => 'enabled',
+            :init_defaults => {'ES_HOME' => '/usr/share/elasticsearch' }
+          } end
 
-  end
+          it { should contain_augeas('defaults_es-01').with(:incl => '/etc/sysconfig/elasticsearch-es-01', :changes => "set ES_GROUP 'elasticsearch'\nset ES_HOME '/usr/share/elasticsearch'\nset ES_USER 'elasticsearch'\nset MAX_OPEN_FILES '65535'\n", :before => 'Service[elasticsearch-instance-es-01]') }
+        end
 
-  context "Init file" do
-    let(:pre_condition) { 'class {"elasticsearch": config => { "node" => {"name" => "test" }} } ' }
+        context 'restarts when "restart_on_change" is true' do
+          let(:pre_condition) { %q{
+            class { "elasticsearch":
+              config => { "node" => {"name" => "test" }},
+              restart_on_change => true
+            }
+          }}
+
+          context "Set via file" do
+            let :params do {
+              :ensure             => 'present',
+              :status             => 'enabled',
+              :init_defaults_file =>
+                'puppet:///path/to/initdefaultsfile'
+            } end
 
-    context "Via template" do
-      let :params do {
-        :ensure => 'present',
-	:status => 'enabled',
-	:init_template => 'elasticsearch/etc/init.d/elasticsearch.systemd.erb'
-      } end
+            it { should contain_file(
+              '/etc/sysconfig/elasticsearch-es-01'
+            ).with(:source => 'puppet:///path/to/initdefaultsfile') }
+            it { should contain_file(
+              '/etc/sysconfig/elasticsearch-es-01'
+            ).that_notifies([
+              'Service[elasticsearch-instance-es-01]',
+            ]) }
+          end
+
+          context 'set via hash' do
+            let :params do {
+              :ensure => 'present',
+              :status => 'enabled',
+              :init_defaults => {
+                'ES_HOME' => '/usr/share/elasticsearch'
+              }
+            } end
 
-      it { should contain_file('/lib/systemd/system/elasticsearch-es-01.service').with(:before => 'Service[elasticsearch-instance-es-01]') }
-    end
+            it { should contain_augeas(
+              'defaults_es-01'
+            ).with(
+              :incl => '/etc/sysconfig/elasticsearch-es-01',
+              :changes => "set ES_GROUP 'elasticsearch'\nset ES_HOME '/usr/share/elasticsearch'\nset ES_USER 'elasticsearch'\nset MAX_OPEN_FILES '65535'\n",
+            )}
+            it { should contain_augeas(
+              'defaults_es-01'
+            ).that_comes_before(
+              'Service[elasticsearch-instance-es-01]'
+            ) }
+            it { should contain_augeas(
+              'defaults_es-01'
+            ).that_notifies(
+              'Exec[systemd_reload_es-01]'
+            ) }
+          end
+        end
+
+        context 'does not restart when "restart_on_change" is false' do
+          let(:pre_condition) { %q{
+            class { "elasticsearch":
+              config => { "node" => {"name" => "test" }},
+            }
+          }}
 
-    context "No restart when 'restart_on_change' is false" do
-      let(:pre_condition) { 'class {"elasticsearch": config => { "node" => {"name" => "test" }}, restart_on_change => false } ' }
+          context "Set via file" do
+            let :params do {
+              :ensure             => 'present',
+              :status             => 'enabled',
+              :init_defaults_file =>
+                'puppet:///path/to/initdefaultsfile'
+            } end
+
+            it { should_not contain_file(
+              '/etc/sysconfig/elasticsearch-es-01'
+            ).that_notifies(
+              'Service[elasticsearch-instance-es-01]',
+            ) }
+          end
+        end
+      end
+
+      context "Init file" do
+        let(:pre_condition) { 'class {"elasticsearch": config => { "node" => {"name" => "test" }} } ' }
+
+        context "Via template" do
+          let :params do {
+            :ensure => 'present',
+            :status => 'enabled',
+            :init_template =>
+              'elasticsearch/etc/init.d/elasticsearch.systemd.erb'
+          } end
+
+          it { should contain_file("#{systemd_service_path}/elasticsearch-es-01.service").with(:before => 'Service[elasticsearch-instance-es-01]') }
+        end
+
+        context 'restarts when "restart_on_change" is true' do
+          let(:pre_condition) { %q{
+            class { "elasticsearch":
+              config => { "node" => {"name" => "test" }},
+              restart_on_change => true
+            }
+          }}
 
-      let :params do {
-        :ensure => 'present',
-	:status => 'enabled',
-	:init_template => 'elasticsearch/etc/init.d/elasticsearch.systemd.erb'
-      } end
+          let :params do {
+            :ensure => 'present',
+            :status => 'enabled',
+            :init_template =>
+              'elasticsearch/etc/init.d/elasticsearch.systemd.erb'
+          } end
+
+          it { should contain_file(
+            "#{systemd_service_path}/elasticsearch-es-01.service"
+          ).that_notifies([
+            'Exec[systemd_reload_es-01]',
+            'Service[elasticsearch-instance-es-01]'
+          ]) }
+          it { should contain_file(
+            "#{systemd_service_path}/elasticsearch-es-01.service"
+          ).that_comes_before(
+            'Service[elasticsearch-instance-es-01]'
+          ) }
+        end
 
-      it { should contain_file('/lib/systemd/system/elasticsearch-es-01.service').with(:notify => 'Exec[systemd_reload_es-01]', :before => 'Service[elasticsearch-instance-es-01]') }
-
-    end
+        context 'does not restart when "restart_on_change" is false' do
+          let(:pre_condition) { %q{
+            class { "elasticsearch":
+              config => { "node" => {"name" => "test" }},
+            }
+          }}
 
-  end
+          let :params do {
+            :ensure => 'present',
+            :status => 'enabled',
+            :init_template =>
+              'elasticsearch/etc/init.d/elasticsearch.systemd.erb'
+          } end
 
-end
+          it { should_not contain_file(
+            "#{systemd_service_path}/elasticsearch-es-01.service"
+          ).that_notifies(
+            'Service[elasticsearch-instance-es-01]'
+          ) }
+        end
+      end
+    end # of context on os
+  end # of on_supported_os
+end # of describe elasticsearch::service::systemd
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/fixtures/facts/Warlock-nodes.json	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,1 @@
+{"cluster_name":"elasticsearch","nodes":{"yQAWBO3FS8CupZnSvAVziQ":{"name":"Warlock","transport_address":"inet[/192.168.100.40:9300]","host":"devel01","ip":"192.168.100.40","version":"1.4.2","build":"927caff","http_address":"inet[/192.168.100.40:9200]","settings":{"path":{"data":"/var/lib/elasticsearch","work":"/tmp/elasticsearch","home":"/usr/share/elasticsearch","conf":"/etc/elasticsearch","logs":"/var/log/elasticsearch"},"pidfile":"/var/run/elasticsearch.pid","cluster":{"name":"elasticsearch"},"config":"/etc/elasticsearch/elasticsearch.yml","client":{"type":"node"},"name":"Warlock"},"os":{"refresh_interval_in_millis":1000,"available_processors":2,"cpu":{"vendor":"Intel","model":"Common KVM processor","mhz":3399,"total_cores":2,"total_sockets":1,"cores_per_socket":2,"cache_size_in_bytes":4096},"mem":{"total_in_bytes":2099183616},"swap":{"total_in_bytes":2145382400}},"process":{"refresh_interval_in_millis":1000,"id":20638,"max_file_descriptors":65535,"mlockall":false},"jvm":{"pid":20638,"version":"1.7.0_55","vm_name":"OpenJDK 64-Bit Server VM","vm_version":"24.51-b03","vm_vendor":"Oracle Corporation","start_time_in_millis":1421686713108,"mem":{"heap_init_in_bytes":268435456,"heap_max_in_bytes":1056309248,"non_heap_init_in_bytes":24313856,"non_heap_max_in_bytes":224395264,"direct_max_in_bytes":1056309248},"gc_collectors":["ParNew","ConcurrentMarkSweep"],"memory_pools":["Code Cache","Par Eden Space","Par Survivor Space","CMS Old Gen","CMS Perm Gen"]},"thread_pool":{"generic":{"type":"cached","keep_alive":"30s","queue_size":-1},"index":{"type":"fixed","min":2,"max":2,"queue_size":"200"},"bench":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1},"get":{"type":"fixed","min":2,"max":2,"queue_size":"1k"},"snapshot":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1},"merge":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1},"suggest":{"type":"fixed","min":2,"max":2,"queue_size":"1k"},"bulk":{"type":"fixed","min":2,"max":2,"queue_size":"50"},"optimize":{"type":"fixed","min":1,"max":1,"queue_size":-1},"warmer":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1},"flush":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1},"search":{"type":"fixed","min":6,"max":6,"queue_size":"1k"},"listener":{"type":"fixed","min":1,"max":1,"queue_size":-1},"percolate":{"type":"fixed","min":2,"max":2,"queue_size":"1k"},"management":{"type":"scaling","min":1,"max":5,"keep_alive":"5m","queue_size":-1},"refresh":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1}},"network":{"refresh_interval_in_millis":5000,"primary_interface":{"address":"192.168.100.40","name":"eth0","mac_address":"9E:FA:5C:B8:CC:4D"}},"transport":{"bound_address":"inet[/0:0:0:0:0:0:0:0:9300]","publish_address":"inet[/192.168.100.40:9300]"},"http":{"bound_address":"inet[/0:0:0:0:0:0:0:0:9200]","publish_address":"inet[/192.168.100.40:9200]","max_content_length_in_bytes":104857600},"plugins":[{"name":"kopf","version":"1.4.3","description":"kopf - simple web administration tool for ElasticSearch","url":"/_plugin/kopf/","jvm":false,"site":true}]}}}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/fixtures/facts/Warlock-root.json	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,13 @@
+{
+  "status" : 200,
+  "name" : "Warlock",
+  "cluster_name" : "elasticsearch",
+  "version" : {
+    "number" : "1.4.2",
+    "build_hash" : "927caff6f05403e936c20bf4529f144f0c89fd8c",
+    "build_timestamp" : "2014-12-16T14:11:12Z",
+    "build_snapshot" : false,
+    "lucene_version" : "4.10.2"
+  },
+  "tagline" : "You Know, for Search"
+}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/fixtures/facts/Zom-nodes.json	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,1 @@
+{"cluster_name":"elasticsearch","nodes":{"yQAWBO3FS8CupZnSvAVziQ":{"name":"Zom","transport_address":"inet[/192.168.100.40:9300]","host":"devel01","ip":"192.168.100.40","version":"1.4.2","build":"927caff","http_address":"inet[/192.168.100.40:9200]","settings":{"path":{"data":"/var/lib/elasticsearch","work":"/tmp/elasticsearch","home":"/usr/share/elasticsearch","conf":"/etc/elasticsearch","logs":"/var/log/elasticsearch"},"pidfile":"/var/run/elasticsearch.pid","cluster":{"name":"elasticsearch"},"config":"/etc/elasticsearch/elasticsearch.yml","client":{"type":"node"},"name":"Zom"},"os":{"refresh_interval_in_millis":1000,"available_processors":2,"cpu":{"vendor":"Intel","model":"Common KVM processor","mhz":3399,"total_cores":2,"total_sockets":1,"cores_per_socket":2,"cache_size_in_bytes":4096},"mem":{"total_in_bytes":2099183616},"swap":{"total_in_bytes":2145382400}},"process":{"refresh_interval_in_millis":1000,"id":20638,"max_file_descriptors":65535,"mlockall":false},"jvm":{"pid":20638,"version":"1.7.0_55","vm_name":"OpenJDK 64-Bit Server VM","vm_version":"24.51-b03","vm_vendor":"Oracle Corporation","start_time_in_millis":1421686713108,"mem":{"heap_init_in_bytes":268435456,"heap_max_in_bytes":1056309248,"non_heap_init_in_bytes":24313856,"non_heap_max_in_bytes":224395264,"direct_max_in_bytes":1056309248},"gc_collectors":["ParNew","ConcurrentMarkSweep"],"memory_pools":["Code Cache","Par Eden Space","Par Survivor Space","CMS Old Gen","CMS Perm Gen"]},"thread_pool":{"generic":{"type":"cached","keep_alive":"30s","queue_size":-1},"index":{"type":"fixed","min":2,"max":2,"queue_size":"200"},"bench":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1},"get":{"type":"fixed","min":2,"max":2,"queue_size":"1k"},"snapshot":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1},"merge":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1},"suggest":{"type":"fixed","min":2,"max":2,"queue_size":"1k"},"bulk":{"type":"fixed","min":2,"max":2,"queue_size":"50"},"optimize":{"type":"fixed","min":1,"max":1,"queue_size":-1},"warmer":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1},"flush":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1},"search":{"type":"fixed","min":6,"max":6,"queue_size":"1k"},"listener":{"type":"fixed","min":1,"max":1,"queue_size":-1},"percolate":{"type":"fixed","min":2,"max":2,"queue_size":"1k"},"management":{"type":"scaling","min":1,"max":5,"keep_alive":"5m","queue_size":-1},"refresh":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1}},"network":{"refresh_interval_in_millis":5000,"primary_interface":{"address":"192.168.100.40","name":"eth0","mac_address":"9E:FA:5C:B8:CC:4D"}},"transport":{"bound_address":"inet[/0:0:0:0:0:0:0:0:9300]","publish_address":"inet[/192.168.100.40:9300]"},"http":{"bound_address":"inet[/0:0:0:0:0:0:0:0:9200]","publish_address":"inet[/192.168.100.40:9200]","max_content_length_in_bytes":104857600},"plugins":[{"name":"kopf","version":"1.4.3","description":"kopf - simple web administration tool for ElasticSearch","url":"/_plugin/kopf/","jvm":false,"site":true}]}}}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/fixtures/facts/Zom-root.json	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,13 @@
+{
+  "status" : 200,
+  "name" : "Zom",
+  "cluster_name" : "elasticsearch",
+  "version" : {
+    "number" : "1.4.2",
+    "build_hash" : "927caff6f05403e936c20bf4529f144f0c89fd8c",
+    "build_timestamp" : "2014-12-16T14:11:12Z",
+    "build_snapshot" : false,
+    "lucene_version" : "4.10.2"
+  },
+  "tagline" : "You Know, for Search"
+}
--- a/dev/provisioning/modules/elasticsearch/spec/fixtures/facts/facts_url1.json	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,13 +0,0 @@
-{
-  "status" : 200,
-  "name" : "Warlock",
-  "cluster_name" : "elasticsearch",
-  "version" : {
-    "number" : "1.4.2",
-    "build_hash" : "927caff6f05403e936c20bf4529f144f0c89fd8c",
-    "build_timestamp" : "2014-12-16T14:11:12Z",
-    "build_snapshot" : false,
-    "lucene_version" : "4.10.2"
-  },
-  "tagline" : "You Know, for Search"
-}
--- a/dev/provisioning/modules/elasticsearch/spec/fixtures/facts/facts_url2.json	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,1 +0,0 @@
-{"cluster_name":"elasticsearch","nodes":{"yQAWBO3FS8CupZnSvAVziQ":{"name":"Warlock","transport_address":"inet[/192.168.100.40:9300]","host":"devel01","ip":"192.168.100.40","version":"1.4.2","build":"927caff","http_address":"inet[/192.168.100.40:9200]","settings":{"path":{"data":"/var/lib/elasticsearch","work":"/tmp/elasticsearch","home":"/usr/share/elasticsearch","conf":"/etc/elasticsearch","logs":"/var/log/elasticsearch"},"pidfile":"/var/run/elasticsearch.pid","cluster":{"name":"elasticsearch"},"config":"/etc/elasticsearch/elasticsearch.yml","client":{"type":"node"},"name":"Warlock"},"os":{"refresh_interval_in_millis":1000,"available_processors":2,"cpu":{"vendor":"Intel","model":"Common KVM processor","mhz":3399,"total_cores":2,"total_sockets":1,"cores_per_socket":2,"cache_size_in_bytes":4096},"mem":{"total_in_bytes":2099183616},"swap":{"total_in_bytes":2145382400}},"process":{"refresh_interval_in_millis":1000,"id":20638,"max_file_descriptors":65535,"mlockall":false},"jvm":{"pid":20638,"version":"1.7.0_55","vm_name":"OpenJDK 64-Bit Server VM","vm_version":"24.51-b03","vm_vendor":"Oracle Corporation","start_time_in_millis":1421686713108,"mem":{"heap_init_in_bytes":268435456,"heap_max_in_bytes":1056309248,"non_heap_init_in_bytes":24313856,"non_heap_max_in_bytes":224395264,"direct_max_in_bytes":1056309248},"gc_collectors":["ParNew","ConcurrentMarkSweep"],"memory_pools":["Code Cache","Par Eden Space","Par Survivor Space","CMS Old Gen","CMS Perm Gen"]},"thread_pool":{"generic":{"type":"cached","keep_alive":"30s","queue_size":-1},"index":{"type":"fixed","min":2,"max":2,"queue_size":"200"},"bench":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1},"get":{"type":"fixed","min":2,"max":2,"queue_size":"1k"},"snapshot":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1},"merge":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1},"suggest":{"type":"fixed","min":2,"max":2,"queue_size":"1k"},"bulk":{"type":"fixed","min":2,"max":2,"queue_size":"50"},"optimize":{"type":"fixed","min":1,"max":1,"queue_size":-1},"warmer":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1},"flush":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1},"search":{"type":"fixed","min":6,"max":6,"queue_size":"1k"},"listener":{"type":"fixed","min":1,"max":1,"queue_size":-1},"percolate":{"type":"fixed","min":2,"max":2,"queue_size":"1k"},"management":{"type":"scaling","min":1,"max":5,"keep_alive":"5m","queue_size":-1},"refresh":{"type":"scaling","min":1,"max":1,"keep_alive":"5m","queue_size":-1}},"network":{"refresh_interval_in_millis":5000,"primary_interface":{"address":"192.168.100.40","name":"eth0","mac_address":"9E:FA:5C:B8:CC:4D"}},"transport":{"bound_address":"inet[/0:0:0:0:0:0:0:0:9300]","publish_address":"inet[/192.168.100.40:9300]"},"http":{"bound_address":"inet[/0:0:0:0:0:0:0:0:9200]","publish_address":"inet[/192.168.100.40:9200]","max_content_length_in_bytes":104857600},"plugins":[{"name":"kopf","version":"1.4.3","description":"kopf - simple web administration tool for ElasticSearch","url":"/_plugin/kopf/","jvm":false,"site":true}]}}}
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/functions/concat_merge_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,168 @@
+require 'spec_helper'
+
+describe 'concat_merge' do
+
+  describe 'exception handling' do
+    it { is_expected.to run.with_params().and_raise_error(
+      Puppet::ParseError, /wrong number of arguments/i
+    ) }
+
+    it { is_expected.to run.with_params({}).and_raise_error(
+      Puppet::ParseError, /wrong number of arguments/i
+    ) }
+
+    it { is_expected.to run.with_params('2', 2).and_raise_error(
+      Puppet::ParseError, /unexpected argument type/
+    ) }
+
+    it { is_expected.to run.with_params(2, '2').and_raise_error(
+      Puppet::ParseError, /unexpected argument type/
+    ) }
+  end
+
+  describe 'collisions' do
+    context 'single keys' do
+      it { is_expected.to run.with_params({
+        'key1' => 'value1'
+      },{
+        'key1' => 'value2'
+      }).and_return({
+        'key1' => 'value2'
+      }) }
+
+      it { is_expected.to run.with_params({
+        'key1' => 'value1'
+      },{
+        'key1' => 'value2'
+      },{
+        'key1' => 'value3'
+      }).and_return({
+        'key1' => 'value3'
+      }) }
+    end
+
+    context 'multiple keys' do
+      it { is_expected.to run.with_params({
+        'key1' => 'value1',
+        'key2' => 'value2'
+      },{
+        'key1' => 'value2'
+      }).and_return({
+        'key1' => 'value2',
+        'key2' => 'value2'
+      }) }
+
+      it { is_expected.to run.with_params({
+        'key1' => 'value1',
+        'key2' => 'value1'
+      },{
+        'key1' => 'value2'
+      },{
+        'key1' => 'value3',
+        'key2' => 'value2'
+      }).and_return({
+        'key1' => 'value3',
+        'key2' => 'value2'
+      }) }
+    end
+  end
+
+  describe 'concat merging' do
+    context 'single keys' do
+      it { is_expected.to run.with_params({
+        'key1' => ['value1']
+      },{
+        'key1' => ['value2']
+      }).and_return({
+        'key1' => ['value1', 'value2']
+      }) }
+
+      it { is_expected.to run.with_params({
+        'key1' => ['value1']
+      },{
+        'key1' => ['value2']
+      },{
+        'key1' => ['value3']
+      }).and_return({
+        'key1' => ['value1', 'value2', 'value3']
+      }) }
+
+      it { is_expected.to run.with_params({
+        'key1' => ['value1']
+      },{
+        'key1' => 'value2'
+      }).and_return({
+        'key1' => 'value2'
+      }) }
+
+      it { is_expected.to run.with_params({
+        'key1' => 'value1'
+      },{
+        'key1' => ['value2']
+      }).and_return({
+        'key1' => ['value2']
+      }) }
+    end
+
+    context 'multiple keys' do
+      it { is_expected.to run.with_params({
+        'key1' => ['value1'],
+        'key2' => ['value3']
+      },{
+        'key1' => ['value2'],
+        'key2' => ['value4']
+      }).and_return({
+        'key1' => ['value1', 'value2'],
+        'key2' => ['value3', 'value4']
+      }) }
+
+      it { is_expected.to run.with_params({
+        'key1' => ['value1'],
+        'key2' => ['value1.1']
+      },{
+        'key1' => ['value2'],
+        'key2' => ['value2.1']
+      },{
+        'key1' => ['value3'],
+        'key2' => ['value3.1']
+      }).and_return({
+        'key1' => ['value1', 'value2', 'value3'],
+        'key2' => ['value1.1', 'value2.1', 'value3.1']
+      }) }
+
+      it { is_expected.to run.with_params({
+        'key1' => ['value1'],
+        'key2' => 'value1'
+      },{
+        'key1' => 'value2',
+        'key2' => ['value2']
+      }).and_return({
+        'key1' => 'value2',
+        'key2' => ['value2']
+      }) }
+
+      it { is_expected.to run.with_params({
+        'key1' => 'value1',
+        'key2' => ['value1']
+      },{
+        'key1' => ['value2'],
+        'key2' => 'value2'
+      }).and_return({
+        'key1' => ['value2'],
+        'key2' => 'value2'
+      }) }
+    end
+  end
+
+  it 'should not change the original hashes' do
+    argument1 = { 'key1' => 'value1' }
+    original1 = argument1.dup
+    argument2 = { 'key2' => 'value2' }
+    original2 = argument2.dup
+
+    subject.call([argument1, argument2])
+    expect(argument1).to eq(original1)
+    expect(argument2).to eq(original2)
+  end
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/functions/deep_implode_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,111 @@
+require 'spec_helper'
+
+describe 'deep_implode' do
+
+  describe 'exception handling' do
+    it { is_expected.to run.with_params().and_raise_error(
+      Puppet::ParseError, /wrong number of arguments/i
+    ) }
+
+    it { is_expected.to run.with_params({}, {}).and_raise_error(
+      Puppet::ParseError, /wrong number of arguments/i
+    ) }
+
+    it { is_expected.to run.with_params('2').and_raise_error(
+      Puppet::ParseError, /unexpected argument type/
+    ) }
+  end
+
+  ['value', ['value'], 0, 10].each do |value|
+    describe "qualifying #{value}" do
+      it { is_expected.to run.with_params({}).and_return({}) }
+
+      it { is_expected.to run.with_params({
+        'key' => value
+      }).and_return({
+        'key' => value
+      }) }
+
+      it { is_expected.to run.with_params({
+        'key' => { 'subkey' => value }
+      }).and_return({
+        'key.subkey' => value
+      }) }
+
+      it { is_expected.to run.with_params({
+        'key' => { 'subkey' => {'subsubkey' => { 'bottom' => value } } }
+      }).and_return({
+        'key.subkey.subsubkey.bottom' => value
+      }) }
+    end
+  end
+
+  # The preferred behavior is to favor fully-qualified keys
+  describe 'key collisions' do
+    it { is_expected.to run.with_params({
+      'key1' => {
+        'subkey1' => 'value1'
+      },
+      'key1.subkey1' => 'value2'
+    }).and_return({
+      'key1.subkey1' => 'value2'
+    }) }
+
+    it { is_expected.to run.with_params({
+      'key1.subkey1' => 'value2',
+      'key1' => {
+        'subkey1' => 'value1'
+      }
+    }).and_return({
+      'key1.subkey1' => 'value2'
+    }) }
+  end
+
+  describe 'deep merging' do
+    it { is_expected.to run.with_params({
+      'key1' => {
+        'subkey1' => ['value1']
+      },
+      'key1.subkey1' => ['value2']
+    }).and_return({
+      'key1.subkey1' => ['value2', 'value1']
+    }) }
+
+    it { is_expected.to run.with_params({
+      'key1' => {
+        'subkey1' => {'key2' => 'value1'}
+      },
+      'key1.subkey1' => {'key3' => 'value2'}
+    }).and_return({
+      'key1.subkey1.key2' => 'value1',
+      'key1.subkey1.key3' => 'value2'
+    }) }
+
+    it { is_expected.to run.with_params({
+      'key1' => {
+        'subkey1' => {'key2' => ['value1']}
+      },
+      'key1.subkey1' => {'key2' => ['value2']}
+    }).and_return({
+      'key1.subkey1.key2' => ['value2', 'value1']
+    }) }
+
+    it { is_expected.to run.with_params({
+      'key1' => {
+        'subkey1' => {'key2' => 'value1'},
+        'subkey1.key2' => 'value2'
+      }
+    }).and_return({
+      'key1.subkey1.key2' => 'value2'
+    }) }
+  end
+
+  it 'should not change the original hashes' do
+    argument1 = { 'key1' => 'value1' }
+    original1 = argument1.dup
+
+    subject.call([argument1])
+    expect(argument1).to eq(original1)
+  end
+
+end
--- a/dev/provisioning/modules/elasticsearch/spec/functions/es_facts_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/functions/es_facts_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,74 +1,118 @@
 require 'spec_helper'
 require 'webmock/rspec'
 
-describe "ES facts" do
+describe 'elasticsearch facts' do
 
   before(:each) do
-    stub_request(:get, "http://localhost:9200/").with(:headers => {'Accept'=>'*/*', 'User-Agent'=>'Ruby'}).to_return(:status => 200, :body => File.read(File.join(fixture_path, 'facts/facts_url1.json') ), :headers => {})
-    stub_request(:get, "http://localhost:9200/_nodes/Warlock").with(:headers => {'Accept'=>'*/*', 'User-Agent'=>'Ruby'}).to_return(:status => 200, :body => File.read(File.join(fixture_path, 'facts/facts_url2.json') ))
+    ['Warlock', 'Zom'].each_with_index do |instance, n|
+      stub_request(:get, "http://localhost:920#{n}/")
+        .with(:headers => {'Accept'=>'*/*', 'User-Agent'=>'Ruby'})
+        .to_return(
+          :status => 200,
+          :body => File.read(File.join(
+            fixture_path, "facts/#{instance}-root.json"))
+        )
+
+      stub_request(:get, "http://localhost:920#{n}/_nodes/#{instance}")
+        .with(:headers => {'Accept'=>'*/*', 'User-Agent'=>'Ruby'})
+        .to_return(
+          :status => 200,
+          :body => File.read(File.join(
+            fixture_path, "facts/#{instance}-nodes.json"))
+        )
+    end
 
-    allow(File).to receive(:directory?).with('/etc/elasticsearch').and_return(true)
-    allow(Dir).to receive(:foreach).and_yield('es01')
-    allow(File).to receive(:exists?).with('/etc/elasticsearch/es01/elasticsearch.yml').and_return(true)
-    allow(YAML).to receive(:load_file).with('/etc/elasticsearch/es01/elasticsearch.yml').and_return({})
+    allow(File)
+      .to receive(:directory?)
+      .with('/etc/elasticsearch')
+      .and_return(true)
+
+    allow(Dir)
+      .to receive(:foreach)
+      .and_yield('es01').and_yield('es02')
+
+    ['es01', 'es02'].each do |instance|
+      allow(File)
+        .to receive(:exists?)
+        .with("/etc/elasticsearch/#{instance}/elasticsearch.yml")
+        .and_return(true)
+    end
+
+    allow(YAML)
+      .to receive(:load_file)
+      .with('/etc/elasticsearch/es01/elasticsearch.yml', any_args)
+      .and_return({})
+
+    allow(YAML)
+      .to receive(:load_file)
+      .with('/etc/elasticsearch/es02/elasticsearch.yml', any_args)
+      .and_return({'http.port' => '9201'})
+
     require 'lib/facter/es_facts'
   end
 
-  describe "main" do
-    it "elasticsearch_ports" do 
-      expect(Facter.fact(:elasticsearch_ports).value).to eq("9200")
-    end
-
-  end
-
-  describe "instance" do
-
-    it "elasticsearch_9200_name" do 
-      expect(Facter.fact(:elasticsearch_9200_name).value).to eq("Warlock")
-    end
-
-    it "elasticsearch_9200_version" do 
-      expect(Facter.fact(:elasticsearch_9200_version).value).to eq("1.4.2")
-    end
-
-    it "elasticsearch_9200_cluster_name" do 
-      expect(Facter.fact(:elasticsearch_9200_cluster_name).value).to eq("elasticsearch")
-    end
-
-    it "elasticsearch_9200_node_id" do 
-      expect(Facter.fact(:elasticsearch_9200_node_id).value).to eq("yQAWBO3FS8CupZnSvAVziQ")
-    end
-    
-    it "elasticsearch_9200_mlockall" do 
-      expect(Facter.fact(:elasticsearch_9200_mlockall).value).to be_falsy
-    end
-    
-    it "elasticsearch_9200_plugins" do 
-      expect(Facter.fact(:elasticsearch_9200_plugins).value).to eq("kopf")
-    end
- 
-    describe "plugin kopf" do
-      it "elasticsearch_9200_plugin_kopf_version" do 
-        expect(Facter.fact(:elasticsearch_9200_plugin_kopf_version).value).to eq("1.4.3")
-      end
-      
-      it "elasticsearch_9200_plugin_kopf_description" do 
-        expect(Facter.fact(:elasticsearch_9200_plugin_kopf_description).value).to eq("kopf - simple web administration tool for ElasticSearch")
-      end
-      
-      it "elasticsearch_9200_plugin_kopf_url" do 
-        expect(Facter.fact(:elasticsearch_9200_plugin_kopf_url).value).to eq("/_plugin/kopf/")
-      end
-
-      it "elasticsearch_9200_plugin_kopf_jvm" do 
-        expect(Facter.fact(:elasticsearch_9200_plugin_kopf_jvm).value).to be_falsy
-      end
-      
-      it "elasticsearch_9200_plugin_kopf_site" do 
-        expect(Facter.fact(:elasticsearch_9200_plugin_kopf_site).value).to be_truthy
-      end
-
+  describe 'elasticsearch_ports' do
+    it 'finds listening ports' do
+      expect(Facter.fact(:elasticsearch_ports).value.split(','))
+        .to contain_exactly('9200', '9201')
     end
   end
 
-end
+  describe 'instance' do
+
+    it 'returns the node name' do
+      expect(Facter.fact(:elasticsearch_9200_name).value).to eq('Warlock')
+    end
+
+    it 'returns the node version' do
+      expect(Facter.fact(:elasticsearch_9200_version).value).to eq('1.4.2')
+    end
+
+    it 'returns the cluster name' do
+      expect(Facter.fact(:elasticsearch_9200_cluster_name).value)
+        .to eq('elasticsearch')
+    end
+
+    it 'returns the node ID' do
+      expect(Facter.fact(:elasticsearch_9200_node_id).value)
+        .to eq('yQAWBO3FS8CupZnSvAVziQ')
+    end
+
+    it 'returns the mlockall boolean' do
+      expect(Facter.fact(:elasticsearch_9200_mlockall).value).to be_falsy
+    end
+
+    it 'returns installed plugins' do
+      expect(Facter.fact(:elasticsearch_9200_plugins).value).to eq('kopf')
+    end
+
+    describe 'kopf plugin' do
+
+      it 'returns the correct version' do
+        expect(Facter.fact(:elasticsearch_9200_plugin_kopf_version).value)
+          .to eq('1.4.3')
+      end
+
+      it 'returns the correct description' do
+        expect(Facter.fact(:elasticsearch_9200_plugin_kopf_description).value)
+          .to eq('kopf - simple web administration tool for ElasticSearch')
+      end
+
+      it 'returns the plugin URL' do
+        expect(Facter.fact(:elasticsearch_9200_plugin_kopf_url).value)
+          .to eq('/_plugin/kopf/')
+      end
+
+      it 'returns the plugin JVM boolean' do
+        expect(Facter.fact(:elasticsearch_9200_plugin_kopf_jvm).value)
+          .to be_falsy
+      end
+
+      it 'returns the plugin _site boolean' do
+        expect(Facter.fact(:elasticsearch_9200_plugin_kopf_site).value)
+          .to be_truthy
+      end
+
+    end # of describe plugin
+  end # of describe instance
+end # of describe elasticsearch facts
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/functions/es_plugin_name_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,65 @@
+require 'spec_helper'
+
+describe 'es_plugin_name' do
+
+  describe 'exception handling' do
+    it { is_expected.to run.with_params().and_raise_error(
+      Puppet::ParseError, /wrong number of arguments/i
+    ) }
+  end
+
+  describe 'single arguments' do
+    it { is_expected.to run
+      .with_params('foo')
+      .and_return('foo') }
+
+    it { is_expected.to run
+      .with_params('vendor/foo')
+      .and_return('foo') }
+
+    it { is_expected.to run
+      .with_params('vendor/foo/1.0.0')
+      .and_return('foo') }
+
+    it { is_expected.to run
+      .with_params('vendor/es-foo/1.0.0')
+      .and_return('foo') }
+
+    it { is_expected.to run
+      .with_params('vendor/elasticsearch-foo/1.0.0')
+      .and_return('foo') }
+  end
+
+  describe 'multiple arguments' do
+    it { is_expected.to run
+      .with_params('foo', nil)
+      .and_return('foo') }
+
+    it { is_expected.to run
+      .with_params(nil, 'foo')
+      .and_return('foo') }
+
+    it { is_expected.to run
+      .with_params(nil, 0, 'foo', 'bar')
+      .and_return('foo') }
+  end
+
+  describe 'undef parameters' do
+    it { is_expected.to run
+      .with_params('', 'foo')
+      .and_return('foo') }
+
+    it { is_expected.to run
+      .with_params('')
+      .and_raise_error(Puppet::Error, /could not/) }
+  end
+
+  it 'should not change the original values' do
+    argument1 = 'foo'
+    original1 = argument1.dup
+
+    subject.call([argument1])
+    expect(argument1).to eq(original1)
+  end
+
+end
--- a/dev/provisioning/modules/elasticsearch/spec/functions/plugin_dir_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/functions/plugin_dir_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,46 +1,37 @@
-#! /usr/bin/env ruby -S rspec
 require 'spec_helper'
 
-describe "the plugin_dir function" do
-  let(:scope) { PuppetlabsSpec::PuppetInternals.scope }
+describe 'plugin_dir' do
 
-  it "should exist" do
-    expect(Puppet::Parser::Functions.function("plugin_dir")).to eq("function_plugin_dir")
-  end
+  describe 'exception handling' do
+    describe 'with no arguments' do
+      it { is_expected.to run.with_params()
+        .and_raise_error(Puppet::ParseError) }
+    end
 
-  it "should raise a ParseError if there is less than 1 argument" do
-    expect { scope.function_plugin_dir([]) }.to raise_error(Puppet::ParseError)
-  end
+    describe 'more than two arguments' do
+      it { is_expected.to run.with_params('a', 'b', 'c')
+        .and_raise_error(Puppet::ParseError) }
+    end
 
-  it "should raise a ParseError if there are more than 2 arguments" do
-    expect { scope.function_plugin_dir(['a', 'b', 'c']) }.to raise_error(Puppet::ParseError)
-  end
-
-  it "should complain about non-string first argument" do
-    expect { scope.function_plugin_dir([[]]) }.to raise_error(Puppet::ParseError)
+    describe 'non-string arguments' do
+      it { is_expected.to run.with_params([])
+        .and_raise_error(Puppet::ParseError) }
+    end
   end
 
-  list = [
-    { 'name' => 'mobz/elasticsearch-head',  'dir' => 'head' },
-    { 'name' => 'lukas-vlcek/bigdesk/2.4.0', 'dir' => 'bigdesk' },
-    { 'name' => 'elasticsearch/elasticsearch-cloud-aws/2.5.1', 'dir' => 'cloud-aws' },
-    { 'name' => 'com.sksamuel.elasticsearch/elasticsearch-river-redis/1.1.0', 'dir' => 'river-redis' },
-    { 'name' => 'com.github.lbroudoux.elasticsearch/amazon-s3-river/1.4.0', 'dir' => 'amazon-s3-river' },
-    { 'name' => 'elasticsearch/elasticsearch-lang-groovy/2.0.0', 'dir' => 'lang-groovy' },
-    { 'name' => 'royrusso/elasticsearch-HQ', 'dir' => 'HQ' },
-    { 'name' => 'polyfractal/elasticsearch-inquisitor', 'dir' => 'inquisitor' },
-    { 'name' => 'mycustomplugin', 'dir' => 'mycustomplugin' },
-  ]
-
-  describe "passing plugin name" do
-
-    list.each do |plugin|
-
-      it "should return #{plugin['dir']} directory name for #{plugin['name']}" do
-        result = scope.function_plugin_dir([plugin['name']])
-        expect(result).to eq(plugin['dir'])
-      end
-
+  {
+    'mobz/elasticsearch-head' => 'head',
+    'lukas-vlcek/bigdesk/2.4.0' => 'bigdesk',
+    'elasticsearch/elasticsearch-cloud-aws/2.5.1' => 'cloud-aws',
+    'com.sksamuel.elasticsearch/elasticsearch-river-redis/1.1.0' => 'river-redis',
+    'com.github.lbroudoux.elasticsearch/amazon-s3-river/1.4.0' => 'amazon-s3-river',
+    'elasticsearch/elasticsearch-lang-groovy/2.0.0' => 'lang-groovy',
+    'royrusso/elasticsearch-hq' => 'hq',
+    'polyfractal/elasticsearch-inquisitor' => 'inquisitor',
+    'mycustomplugin' => 'mycustomplugin'
+  }.each do |plugin, dir|
+    describe "parsed dir for #{plugin}" do
+      it { is_expected.to run.with_params(plugin).and_return(dir) }
     end
   end
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/integration/integration001.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,161 @@
+require 'spec_helper_acceptance'
+require 'json'
+
+describe "Integration testing" do
+
+  before :all do
+    shell "mkdir -p #{default['distmoduledir']}/another/files"
+
+    create_remote_file default,
+      "#{default['distmoduledir']}/another/files/good.json",
+      JSON.dump(test_settings['template'])
+
+    create_remote_file default,
+      "#{default['distmoduledir']}/another/files/bad.json",
+      JSON.dump(test_settings['template'])[0..-5]
+  end
+
+  describe "Setup Elasticsearch", :main => true do
+
+    it 'should run successfully' do
+      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, java_install => true, package_url => '#{test_settings['snapshot_package']}' }
+            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
+           "
+
+      # Run it twice and test for idempotency
+      apply_manifest(pp, :catch_failures => true)
+      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
+    end
+
+
+    describe service(test_settings['service_name_a']) do
+      it { should be_enabled }
+      it { should be_running }
+    end
+
+    describe package(test_settings['package_name']) do
+      it { should be_installed }
+    end
+
+    describe file(test_settings['pid_file_a']) do
+      it { should be_file }
+      its(:content) { should match /[0-9]+/ }
+    end
+
+    describe "Elasticsearch serves requests on" do
+      it {
+        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
+      }
+    end
+
+    describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
+      it { should be_file }
+      it { should contain 'name: elasticsearch001' }
+    end
+
+    describe file('/usr/share/elasticsearch/templates_import') do
+      it { should be_directory }
+    end
+
+  end
+
+  describe "Template tests", :template => true do
+
+    describe "Insert a template with valid json content" do
+
+      it 'should run successfully' do
+        pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, java_install => true, package_url => '#{test_settings['snapshot_package']}' }
+              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
+              elasticsearch::template { 'foo': ensure => 'present', file => 'puppet:///modules/another/good.json' }"
+
+        # Run it twice and test for idempotency
+        apply_manifest(pp, :catch_failures => true)
+        expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
+      end
+
+      it 'should report as existing in Elasticsearch' do
+        curl_with_retries('validate template as installed', default, "http://localhost:#{test_settings['port_a']}/_template/foo | grep logstash", 0)
+      end
+    end
+
+    if fact('puppetversion') =~ /3\.[2-9]\./
+      describe "Insert a template with bad json content" do
+
+        it 'run should fail' do
+          pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, java_install => true, package_url => '#{test_settings['snapshot_package']}' }
+                elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
+                elasticsearch::template { 'foo': ensure => 'present', file => 'puppet:///modules/another/bad.json' }"
+
+          apply_manifest(pp, :expect_failures => true)
+        end
+
+      end
+
+    else
+      # The exit codes have changes since Puppet 3.2x
+      # Since beaker expectations are based on the most recent puppet code All runs on previous versions fails.
+    end
+
+  end
+
+  describe "Plugin tests", :plugin => true do
+
+    describe "Install a plugin from official repository" do
+
+      it 'should run successfully' do
+        pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, java_install => true, package_url => '#{test_settings['snapshot_package']}' }
+              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
+              elasticsearch::plugin { 'lmenezes/elasticsearch-kopf': instances => 'es-01' }
+             "
+
+        # Run it twice and test for idempotency
+        apply_manifest(pp, :catch_failures => true)
+        expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
+      end
+
+      describe service(test_settings['service_name_a']) do
+        it { should be_enabled }
+        it { should be_running }
+      end
+
+      describe package(test_settings['package_name']) do
+        it { should be_installed }
+      end
+
+      describe file(test_settings['pid_file_a']) do
+        it { should be_file }
+        its(:content) { should match /[0-9]+/ }
+      end
+
+      it 'make sure the directory exists' do
+        shell('ls /usr/share/elasticsearch/plugins/kopf/', {:acceptable_exit_codes => 0})
+      end
+
+      it 'make sure elasticsearch reports it as existing' do
+        curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep kopf", 0)
+      end
+
+    end
+
+    if fact('puppetversion') =~ /3\.[2-9]\./
+
+      describe "Install a non existing plugin" do
+
+        it 'should run successfully' do
+          pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, java_install => true, package_url => '#{test_settings['snapshot_package']}' }
+                elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
+                elasticsearch::plugin{'elasticsearch/non-existing': module_dir => 'non-existing', instances => 'es-01' }
+               "
+          apply_manifest(pp, :expect_failures => true)
+        end
+
+      end
+
+    else
+      # The exit codes have changes since Puppet 3.2x
+      # Since beaker expectations are based on the most recent puppet code All runs on previous versions fails.
+    end
+
+  end
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/integration/xplugins001.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,91 @@
+require 'spec_helper_acceptance'
+
+describe "Integration testing" do
+
+  describe "Setup Elasticsearch" do
+
+    it 'should run successfully' do
+      pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, java_install => true, package_url => '#{test_settings['snapshot_package']}' }
+            elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
+           "
+
+      # Run it twice and test for idempotency
+      apply_manifest(pp, :catch_failures => true)
+      expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
+    end
+
+
+    describe service(test_settings['service_name_a']) do
+      it { should be_enabled }
+      it { should be_running }
+    end
+
+    describe package(test_settings['package_name']) do
+      it { should be_installed }
+    end
+
+    describe file(test_settings['pid_file_a']) do
+      it { should be_file }
+      its(:content) { should match /[0-9]+/ }
+    end
+
+    describe "Elasticsearch serves requests on" do
+      it {
+        curl_with_retries("check ES on #{test_settings['port_a']}", default, "http://localhost:#{test_settings['port_a']}/?pretty=true", 0)
+      }
+    end
+
+    describe file('/etc/elasticsearch/es-01/elasticsearch.yml') do
+      it { should be_file }
+      it { should contain 'name: elasticsearch001' }
+    end
+
+    describe file('/usr/share/elasticsearch/templates_import') do
+      it { should be_directory }
+    end
+
+  end
+
+  describe "Plugin tests" do
+
+    describe "Install a plugin from official repository" do
+
+      it 'should run successfully' do
+        pp = "class { 'elasticsearch': config => { 'cluster.name' => '#{test_settings['cluster_name']}'}, java_install => true, package_url => '#{test_settings['snapshot_package']}' }
+              elasticsearch::instance { 'es-01': config => { 'node.name' => 'elasticsearch001', 'http.port' => '#{test_settings['port_a']}' } }
+              elasticsearch::plugin{'#{ENV['LICENSE_PLUGIN_NAME']}': instances => 'es-01', url => '#{ENV['LICENSE_PLUGIN_URL']}' }
+              elasticsearch::plugin{'#{ENV['PLUGIN_NAME']}': instances => 'es-01', url => '#{ENV['PLUGIN_URL']}' }
+             "
+
+        # Run it twice and test for idempotency
+        apply_manifest(pp, :catch_failures => true)
+        expect(apply_manifest(pp, :catch_failures => true).exit_code).to be_zero
+      end
+
+      describe service(test_settings['service_name_a']) do
+        it { should be_enabled }
+        it { should be_running }
+      end
+
+      describe package(test_settings['package_name']) do
+        it { should be_installed }
+      end
+
+      describe file(test_settings['pid_file_a']) do
+        it { should be_file }
+        its(:content) { should match /[0-9]+/ }
+      end
+
+      it 'make sure the directory exists' do
+        shell("ls /usr/share/elasticsearch/plugins/#{ENV['PLUGIN_NAME']}", {:acceptable_exit_codes => 0})
+      end
+
+      it 'make sure elasticsearch reports it as existing' do
+        curl_with_retries('validated plugin as installed', default, "http://localhost:#{test_settings['port_a']}/_nodes/?plugin | grep #{ENV['PLUGIN_NAME']}", 0)
+      end
+
+    end
+
+  end
+
+end
--- a/dev/provisioning/modules/elasticsearch/spec/spec_acceptance_common.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/spec_acceptance_common.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,9 +1,18 @@
   test_settings['cluster_name'] = SecureRandom.hex(10)
 
-  test_settings['repo_version2x'] = '2.x'
+  test_settings['repo_version2x']          = '2.x'
+  test_settings['repo_version']            = '1.7'
+  test_settings['install_package_version'] = '1.7.4'
+  test_settings['install_version']         = '1.7.4'
+  test_settings['upgrade_package_version'] = '1.7.5'
+  test_settings['upgrade_version']         = '1.7.5'
+
+  test_settings['shield_user']             = 'elastic'
+  test_settings['shield_password']         = SecureRandom.hex
+  test_settings['shield_hashed_password']  = '$2a$10$DddrTs0PS3qNknUTq0vpa.g.0JpU.jHDdlKp1xox1W5ZHX.w8Cc8C'
+  test_settings['shield_hashed_plaintext'] = 'foobar'
   case fact('osfamily')
     when 'RedHat'
-      test_settings['repo_version']    = '1.3'
       test_settings['url']             = 'http://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.3.1.noarch.rpm'
       test_settings['local']           = '/tmp/elasticsearch-1.3.1.noarch.rpm'
       test_settings['puppet']          = 'elasticsearch-1.3.1.noarch.rpm'
@@ -16,58 +25,34 @@
       test_settings['defaults_file_b'] = '/etc/sysconfig/elasticsearch-es-02'
       test_settings['port_a']          = '9200'
       test_settings['port_b']          = '9201'
-      test_settings['install_package_version'] = '1.3.5'
-      test_settings['install_version'] = '1.3.5'
-      test_settings['upgrade_package_version'] = '1.3.6'
-      test_settings['upgrade_version'] = '1.3.6'
     when 'Debian'
       case fact('operatingsystem')
         when 'Ubuntu'
-          test_settings['repo_version']    = '1.3'
           test_settings['url']             = 'http://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.3.1.deb'
           test_settings['local']           = '/tmp/elasticsearch-1.3.1.deb'
           test_settings['puppet']          = 'elasticsearch-1.3.1.deb'
-          test_settings['pid_file_a']      = '/var/run/elasticsearch-es-01.pid'
-          test_settings['pid_file_b']      = '/var/run/elasticsearch-es-02.pid'
-          test_settings['install_package_version'] = '1.3.5'
-          test_settings['install_version'] = '1.3.5'
-          test_settings['upgrade_package_version'] = '1.3.6'
-          test_settings['upgrade_version'] = '1.3.6'
+          # From 15.04 onwards, ubuntu moved to systemd.
+          if Gem::Version.new(fact('operatingsystemrelease')) >= Gem::Version.new('15.04')
+            test_settings['pid_file_a']    = '/var/run/elasticsearch/elasticsearch-es-01.pid'
+            test_settings['pid_file_b']    = '/var/run/elasticsearch/elasticsearch-es-02.pid'
+          else
+            test_settings['pid_file_a']    = '/var/run/elasticsearch-es-01.pid'
+            test_settings['pid_file_b']    = '/var/run/elasticsearch-es-02.pid'
+          end
         when 'Debian'
           case fact('lsbmajdistrelease')
-            when '6'
-              test_settings['repo_version']    = '1.1'
-              test_settings['url']             = 'http://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.1.0.deb'
-              test_settings['local']           = '/tmp/elasticsearch-1.1.0.deb'
-              test_settings['puppet']          = 'elasticsearch-1.1.0.deb'
-              test_settings['pid_file_a']      = '/var/run/elasticsearch-es-01.pid'
-              test_settings['pid_file_b']      = '/var/run/elasticsearch-es-02.pid'
-              test_settings['install_package_version'] = '1.1.1'
-              test_settings['install_version'] = '1.1.1'
-              test_settings['upgrade_package_version'] = '1.1.2'
-              test_settings['upgrade_version'] = '1.1.2'
             when '7'
-              test_settings['repo_version']    = '1.3'
               test_settings['url']             = 'http://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.3.1.deb'
               test_settings['local']           = '/tmp/elasticsearch-1.3.1.deb'
               test_settings['puppet']          = 'elasticsearch-1.3.1.deb'
               test_settings['pid_file_a']      = '/var/run/elasticsearch-es-01.pid'
               test_settings['pid_file_b']      = '/var/run/elasticsearch-es-02.pid'
-              test_settings['install_package_version'] = '1.3.5'
-              test_settings['install_version'] = '1.3.5'
-              test_settings['upgrade_package_version'] = '1.3.6'
-              test_settings['upgrade_version'] = '1.3.6'
             else
-              test_settings['repo_version']    = '1.3'
               test_settings['url']             = 'http://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.3.1.deb'
               test_settings['local']           = '/tmp/elasticsearch-1.3.1.deb'
               test_settings['puppet']          = 'elasticsearch-1.3.1.deb'
               test_settings['pid_file_a']      = '/var/run/elasticsearch/elasticsearch-es-01.pid'
               test_settings['pid_file_b']      = '/var/run/elasticsearch/elasticsearch-es-02.pid'
-              test_settings['install_package_version'] = '1.3.5'
-              test_settings['install_version'] = '1.3.5'
-              test_settings['upgrade_package_version'] = '1.3.6'
-              test_settings['upgrade_version'] = '1.3.6'
           end
       end
       test_settings['package_name']    = 'elasticsearch'
@@ -78,14 +63,9 @@
       test_settings['port_a']          = '9200'
       test_settings['port_b']          = '9201'
     when 'Suse'
-      test_settings['repo_version']    = '1.3'
       test_settings['url']             = 'http://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-1.3.1.noarch.rpm'
       test_settings['local']           = '/tmp/elasticsearch-1.3.1.noarch.rpm'
       test_settings['puppet']          = 'elasticsearch-1.3.1.noarch.rpm'
-      test_settings['install_package_version'] = '1.3.5'
-      test_settings['install_version'] = '1.3.5'
-      test_settings['upgrade_package_version'] = '1.3.6'
-      test_settings['upgrade_version'] = '1.3.6'
       test_settings['package_name']    = 'elasticsearch'
       test_settings['service_name_a']  = 'elasticsearch-es-01'
       test_settings['service_name_b']  = 'elasticsearch-es-02'
@@ -101,91 +81,54 @@
   test_settings['datadir_2'] = '/var/lib/elasticsearch-data/2/'
   test_settings['datadir_3'] = '/var/lib/elasticsearch-data/3/'
 
-  test_settings['good_json']='{
-    "template" : "logstash-*",
-    "settings" : {
-      "index.refresh_interval" : "5s",
-      "analysis" : {
-	"analyzer" : {
-	  "default" : {
-	    "type" : "standard",
-	    "stopwords" : "_none_"
-	  }
-	}
+  test_settings['template'] = {
+    "template" => "logstash-*",
+    "settings" => {
+      "index" => {
+        "refresh_interval" => "5s",
+        "analysis" => {
+          "analyzer" => {
+            "default" => {
+              "type" => "standard",
+              "stopwords" => "_none_"
+            }
+          }
+        }
       }
     },
-    "mappings" : {
-      "_default_" : {
-	 "_all" : {"enabled" : true},
-	 "dynamic_templates" : [ {
-	   "string_fields" : {
-	     "match" : "*",
-	     "match_mapping_type" : "string",
-	     "mapping" : {
-	       "type" : "multi_field",
-		 "fields" : {
-		   "{name}" : {"type": "string", "index" : "analyzed", "omit_norms" : true },
-		   "raw" : {"type": "string", "index" : "not_analyzed", "ignore_above" : 256}
-		 }
-	     }
-	   }
-	 } ],
-	 "properties" : {
-	   "@version": { "type": "string", "index": "not_analyzed" },
-	   "geoip"  : {
-	     "type" : "object",
-	       "dynamic": true,
-	       "path": "full",
-	       "properties" : {
-		 "location" : { "type" : "geo_point" }
-	       }
-	   }
-	 }
+    "mappings" => {
+      "_default_" => {
+        "_all" => {"enabled" => true},
+        "dynamic_templates" => [ {
+          "string_fields" => {
+            "match" => "*",
+            "match_mapping_type" => "string",
+            "mapping" => {
+              "type" => "multi_field",
+              "fields" => {
+                "{name}" => {
+                  "type"=> "string", "index" => "analyzed", "omit_norms" => true
+                },
+                "raw" => {
+                  "type"=> "string", "index" => "not_analyzed", "ignore_above" => 256
+                }
+              }
+            }
+          }
+        } ],
+        "properties" => {
+          "@version"=> { "type"=> "string", "index"=> "not_analyzed" },
+          "geoip"  => {
+            "type" => "object",
+            "dynamic"=> true,
+            "path"=> "full",
+            "properties" => {
+              "location" => { "type" => "geo_point" }
+            }
+          }
+        }
       }
     }
-  }'
-
-  test_settings['bad_json']='{
-    "settings" : {
-      "index.refresh_interval" : "5s",
-      "analysis" : {
-	"analyzer" : {
-	  "default" : {
-	    "type" : "standard",
-	    "stopwords" : "_none_"
-	  }
-	}
-      }
-    },
-    "mappings" : {
-      "_default_" : {
-	 "_all" : {"enabled" : true},
-	 "dynamic_templates" : [ {
-	   "string_fields" : {
-	     "match" : "*",
-	     "match_mapping_type" : "string",
-	     "mapping" : {
-	       "type" : "multi_field",
-		 "fields" : {
-		   "{name}" : {"type": "string", "index" : "analyzed", "omit_norms" : true },
-		   "raw" : {"type": "string", "index" : "not_analyzed", "ignore_above" : 256}
-		 }
-	     }
-	   }
-	 } ],
-	 "properties" : {
-	   "@version": { "type": "string", "index": "not_analyzed" },
-	   "geoip"  : {
-	     "type" : "object",
-	       "dynamic": true,
-	       "path": "full",
-	       "properties" : {
-		 "location" : { "type" : "geo_point" }
-	       }
-	   }
-	 }
-      }
-    }
-  }'
+  }
 
 RSpec.configuration.test_settings = test_settings
--- a/dev/provisioning/modules/elasticsearch/spec/spec_helper.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/spec_helper.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,4 +1,5 @@
 require 'puppetlabs_spec_helper/module_spec_helper'
+require 'rspec-puppet-utils'
 require 'rspec-puppet-facts'
 include RspecPuppetFacts
 
--- a/dev/provisioning/modules/elasticsearch/spec/spec_helper_acceptance.rb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/spec/spec_helper_acceptance.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,7 +1,10 @@
 require 'beaker-rspec'
-require 'pry'
 require 'securerandom'
+require 'thread'
+require 'infrataster/rspec'
+require 'rspec/retry'
 require_relative 'spec_acceptance_integration'
+require_relative 'spec_helper_tls'
 
 def test_settings
   RSpec.configuration.test_settings
@@ -9,43 +12,61 @@
 
 RSpec.configure do |c|
   c.add_setting :test_settings, :default => {}
+
+  # rspec-retry
+  c.display_try_failure_messages = true
+  c.default_sleep_interval = 5
+  # General-case retry keyword for unstable tests
+  c.around :each, :with_retries do |example|
+    example.run_with_retry retry: 4
+  end
+  # More forgiving retry config for really flaky tests
+  c.around :each, :with_generous_retries do |example|
+    example.run_with_retry retry: 10
+  end
+
+  # Helper hook for module cleanup
+  c.after :context, :with_cleanup do
+    apply_manifest <<-EOS
+      class { 'elasticsearch':
+        ensure      => 'absent',
+        manage_repo => true,
+      }
+      elasticsearch::instance { 'es-01': ensure => 'absent' }
+
+      file { '/usr/share/elasticsearch/plugin':
+        ensure  => 'absent',
+        force   => true,
+        recurse => true,
+        require => Class['elasticsearch'],
+      }
+    EOS
+  end
 end
 
-files_dir = ENV['files_dir'] || '/home/jenkins/puppet'
-
-proxy_host = ENV['BEAKER_PACKAGE_PROXY'] || ''
-
-if !proxy_host.empty?
-  gem_proxy = "http_proxy=#{proxy_host}" unless proxy_host.empty?
-
-  hosts.each do |host|
-    on host, "echo 'export http_proxy='#{proxy_host}'' >> /root/.bashrc"
-    on host, "echo 'export https_proxy='#{proxy_host}'' >> /root/.bashrc"
-    on host, "echo 'export no_proxy=\"localhost,127.0.0.1,localaddress,.localdomain.com,#{host.name}\"' >> /root/.bashrc"
-  end
-else
-  gem_proxy = ''
-end
+files_dir = ENV['files_dir'] || './spec/fixtures/artifacts'
 
 hosts.each do |host|
 
   # Install Puppet
   if host.is_pe?
+    pe_progress = Thread.new { while sleep 5 ; print '.' ; end }
     install_pe
+    pe_progress.exit
   else
-    puppetversion = ENV['VM_PUPPET_VERSION']
-    on host, "#{gem_proxy} gem install puppet --no-ri --no-rdoc --version '~> #{puppetversion}'"
-    on host, "mkdir -p #{host['distmoduledir']}"
+    unless host[:skip_puppet_install]
+      install_puppet_on host, :default_action => 'gem_install'
+    end
 
     if fact('osfamily') == 'Suse'
-      install_package host, 'rubygems ruby-devel augeas-devel libxml2-devel'
-      on host, "#{gem_proxy} gem install ruby-augeas --no-ri --no-rdoc"
+      install_package host, '--force-resolution augeas-devel libxml2-devel'
+      install_package host, 'ruby-devel' if fact('operatingsystem') == 'SLES'
+      on host, "gem install ruby-augeas --no-ri --no-rdoc"
     end
 
     if host[:type] == 'aio'
       on host, "mkdir -p /var/log/puppetlabs/puppet"
     end
-
   end
 
   if ENV['ES_VERSION']
@@ -65,68 +86,78 @@
 
     url = get_url
     RSpec.configuration.test_settings['snapshot_package'] = url.gsub('$EXT$', ext)
-
   else
 
     case fact('osfamily')
       when 'RedHat'
-        scp_to(host, "#{files_dir}/elasticsearch-1.3.1.noarch.rpm", '/tmp/elasticsearch-1.3.1.noarch.rpm')
+        package_name = 'elasticsearch-1.3.1.noarch.rpm'
       when 'Debian'
         case fact('lsbmajdistrelease')
           when '6'
-            scp_to(host, "#{files_dir}/elasticsearch-1.1.0.deb", '/tmp/elasticsearch-1.1.0.deb')
+            package_name = 'elasticsearch-1.1.0.deb'
           else
-            scp_to(host, "#{files_dir}/elasticsearch-1.3.1.deb", '/tmp/elasticsearch-1.3.1.deb')
+            package_name = 'elasticsearch-1.3.1.deb'
         end
       when 'Suse'
-        case fact('operatingsystem')
-          when 'OpenSuSE'
-            scp_to(host, "#{files_dir}/elasticsearch-1.3.1.noarch.rpm", '/tmp/elasticsearch-1.3.1.noarch.rpm')
-        end
+        package_name = 'elasticsearch-1.3.1.noarch.rpm'
     end
 
+    snapshot_package = {
+        :src => "#{files_dir}/#{package_name}",
+        :dst => "/tmp/#{package_name}"
+    }
+
+    scp_to(host, snapshot_package[:src], snapshot_package[:dst])
     scp_to(host, "#{files_dir}/elasticsearch-bigdesk.zip", "/tmp/elasticsearch-bigdesk.zip")
     scp_to(host, "#{files_dir}/elasticsearch-kopf.zip", "/tmp/elasticsearch-kopf.zip")
 
+    RSpec.configuration.test_settings['snapshot_package'] = "file:#{snapshot_package[:dst]}"
+
   end
 
-  # on debian/ubuntu nodes ensure we get the latest info
-  # Can happen we have stalled data in the images
-  if fact('osfamily') == 'Debian'
-    on host, "apt-get update"
+  Infrataster::Server.define(:docker) do |server|
+    server.address = host[:ip]
+    server.ssh = host[:ssh].tap { |s| s.delete :forward_agent }
   end
-  if fact('osfamily') == 'RedHat'
-    on host, "yum -y update"
+  Infrataster::Server.define(:container) do |server|
+    server.address = host[:vm_ip] # this gets ignored anyway
+    server.from = :docker
   end
-
 end
 
 RSpec.configure do |c|
-  # Project root
-  proj_root = File.expand_path(File.join(File.dirname(__FILE__), '..'))
 
+  # Uncomment for verbose test descriptions.
   # Readable test descriptions
-  c.formatter = :documentation
+  # c.formatter = :documentation
 
   # Configure all nodes in nodeset
   c.before :suite do
+
     # Install module and dependencies
-    puppet_module_install(:source => proj_root, :module_name => 'elasticsearch')
+    install_dev_puppet_module :ignore_list => [
+      'junit'
+    ] + Beaker::DSL::InstallUtils::ModuleUtils::PUPPET_MODULE_INSTALL_IGNORE
+
     hosts.each do |host|
 
       copy_hiera_data_to(host, 'spec/fixtures/hiera/hieradata/')
-      on host, puppet('module','install','puppetlabs-java'), { :acceptable_exit_codes => [0,1] }
-      on host, puppet('module','install','richardc-datacat'), { :acceptable_exit_codes => [0,1] }
+
+      modules = ['stdlib', 'java', 'datacat', 'java_ks']
 
-      if fact('osfamily') == 'Debian'
-        on host, puppet('module','install','puppetlabs-apt', '--version=1.8.0'), { :acceptable_exit_codes => [0,1] }
-      end
-      if fact('osfamily') == 'Suse'
-        on host, puppet('module','install','darin-zypprepo'), { :acceptable_exit_codes => [0,1] }
-      end
-      if fact('osfamily') == 'RedHat'
-        on host, puppet('module', 'upgrade', 'puppetlabs-stdlib'), {  :acceptable_exit_codes => [0,1] }
-        on host, puppet('module', 'install', 'ceritsc-yum'), { :acceptable_exit_codes => [0,1] }
+      dist_module = {
+        'Debian' => 'apt',
+        'Suse'   => 'zypprepo',
+        'RedHat' => 'yum',
+      }[fact('osfamily')]
+
+      modules << dist_module if not dist_module.nil?
+
+      modules.each do |mod|
+        copy_module_to host, {
+          :module_name => mod,
+          :source      => "spec/fixtures/modules/#{mod}"
+        }
       end
 
       if host.is_pe?
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/spec_helper_faraday.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,5 @@
+require 'faraday'
+
+def middleware
+  [Faraday::Request::Retry, {}]
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/spec_helper_tls.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,87 @@
+require 'openssl'
+
+def gen_certs num_certs, path
+  ret = { :clients => [] }
+  serial = 1000000
+  ca_key = OpenSSL::PKey::RSA.new 2048
+
+  # CA Cert
+  ca_name = OpenSSL::X509::Name.parse 'CN=ca/DC=example'
+  ca_cert = OpenSSL::X509::Certificate.new
+  ca_cert.serial = serial
+  serial += 1
+  ca_cert.version = 2
+  ca_cert.not_before = Time.now
+  ca_cert.not_after = Time.now + 86400
+  ca_cert.public_key = ca_key.public_key
+  ca_cert.subject = ca_name
+  ca_cert.issuer = ca_name
+  extension_factory = OpenSSL::X509::ExtensionFactory.new
+  extension_factory.subject_certificate = ca_cert
+  extension_factory.issuer_certificate = ca_cert
+  ca_cert.add_extension extension_factory.create_extension(
+    'subjectKeyIdentifier', 'hash'
+  )
+  ca_cert.add_extension extension_factory.create_extension(
+    'basicConstraints', 'CA:TRUE', true
+  )
+  ca_cert.sign ca_key, OpenSSL::Digest::SHA1.new
+  ret[:ca] = {
+    :cert => {
+      :pem => ca_cert.to_pem,
+      :path => path + '/ca_cert.pem',
+    }
+  }
+
+  num_certs.times do |i|
+    key, cert, serial = gen_cert_pair serial, ca_cert
+    cert.sign ca_key, OpenSSL::Digest::SHA1.new
+    ret[:clients] << {
+      :key => {
+        :pem => key.to_pem,
+        :path => path + '/' + i.to_s + '_key.pem',
+      },
+      :cert => {
+        :pem => cert.to_pem,
+        :path => path + '/' + i.to_s + '_cert.pem',
+      },
+    }
+  end
+
+  ret
+end
+
+def gen_cert_pair serial, ca_cert
+  serial += 1
+  # Node Key
+  key = OpenSSL::PKey::RSA.new 2048
+  node_name = OpenSSL::X509::Name.parse 'CN=localhost/DC=example'
+
+  # Node Cert
+  cert = OpenSSL::X509::Certificate.new
+  cert.serial = serial
+  cert.version = 2
+  cert.not_before = Time.now
+  cert.not_after = Time.now + 6000
+
+  cert.subject = node_name
+  cert.public_key = key.public_key
+  cert.issuer = ca_cert.subject
+
+  csr_extension_factory = OpenSSL::X509::ExtensionFactory.new
+  csr_extension_factory.subject_certificate = cert
+  csr_extension_factory.issuer_certificate = ca_cert
+
+  cert.add_extension csr_extension_factory.create_extension(
+    'basicConstraints',
+    'CA:FALSE'
+  )
+  cert.add_extension csr_extension_factory.create_extension(
+    'keyUsage',
+    'keyEncipherment,dataEncipherment,digitalSignature'
+  )
+  cert.add_extension csr_extension_factory.create_extension(
+    'subjectKeyIdentifier', 'hash'
+  )
+  [key, cert, serial]
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/templates/001_elasticsearch.yml.erb_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,89 @@
+require 'spec_helper'
+require 'yaml'
+
+class String
+  def config
+    "### MANAGED BY PUPPET ###\n---#{unindent}"
+  end
+
+  def unindent
+    gsub(/^#{scan(/^\s*/).min_by{|l|l.length}}/, "")
+  end
+end
+
+describe 'elasticsearch.yml.erb' do
+
+  let :harness do
+    TemplateHarness.new(
+      'templates/etc/elasticsearch/elasticsearch.yml.erb'
+    )
+  end
+
+  it 'should render normal hashes' do
+    harness.set(
+      '@data', {
+        'node.name' => 'test',
+        'path.data' => '/mnt/test',
+        'discovery.zen.ping.unicast.hosts' => [
+          'host1', 'host2'
+        ]
+      }
+    )
+
+    expect( YAML.load(harness.run) ).to eq( YAML.load(%q{
+      discovery.zen.ping.unicast.hosts:
+        - host1
+        - host2
+      node.name: test
+      path.data: /mnt/test
+      }.config))
+  end
+
+  it 'should render arrays of hashes correctly' do
+    harness.set(
+      '@data', {
+        'data' => [
+          { 'key' => 'value0',
+            'other_key' => 'othervalue0' },
+          { 'key' => 'value1',
+            'other_key' => 'othervalue1' }
+        ]
+      }
+    )
+
+    expect( YAML.load(harness.run) ).to eq( YAML.load(%q{
+      data:
+      - key: value0
+        other_key: othervalue0
+      - key: value1
+        other_key: othervalue1
+      }.config))
+  end
+
+  it 'should quote IPv6 loopback addresses' do
+    harness.set(
+      '@data', {
+        'network.host' => ['::', '[::]']
+      }
+    )
+
+    expect( YAML.load(harness.run) ).to eq( YAML.load(%q{
+      network.host:
+        - "::"
+        - "[::]"
+      }.config))
+  end
+
+  it 'should not quote numeric values' do
+    harness.set(
+      '@data', {
+        'some.setting' => '10'
+      }
+    )
+
+    expect( YAML.load(harness.run) ).to eq( YAML.load(%q{
+      some.setting: 10
+    }.config))
+  end
+
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/unit/provider/elastic_yaml_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,68 @@
+$LOAD_PATH.unshift(File.join(File.dirname(__FILE__),'..','..','lib'))
+
+require 'spec_helper'
+require 'puppet/provider/elastic_yaml'
+
+class String
+  def flattened
+    split("\n").reject(&:empty?).map(&:strip).join("\n").strip
+  end
+end
+
+describe Puppet::Provider::ElasticYaml do
+
+  subject do
+    described_class.tap do |o|
+      o.instance_eval { @metadata = :metadata }
+    end
+  end
+
+  let :unsorted_hash do
+    [{
+      :name => 'role',
+      :metadata => {
+        'zeta' => {
+          'zeta'  => 5,
+          'gamma' => 4,
+          'delta' => 3,
+          'beta'  => 2,
+          'alpha' => 1
+        },
+        'phi' => [{
+          'zeta'  => 3,
+          'gamma' => 2,
+          'alpha' => 1
+        }],
+        'beta'  => 'foobaz',
+        'gamma' => 1,
+        'alpha' => 'foobar'
+      }
+    }]
+  end
+
+  it { is_expected.to respond_to :to_file }
+
+  describe 'to_file' do
+    it 'returns sorted yaml' do
+      expect(described_class.to_file(unsorted_hash).flattened).to(
+        eq(%q{
+          ---
+          role:
+            alpha: foobar
+            beta: foobaz
+            gamma: 1
+            phi:
+              - alpha: 1
+                gamma: 2
+                zeta: 3
+            zeta:
+              alpha: 1
+              beta: 2
+              delta: 3
+              gamma: 4
+              zeta: 5
+        }.flattened)
+      )
+    end
+  end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_plugin/elasticsearch_plugin_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,28 @@
+require 'spec_helper'
+require_relative 'shared_examples'
+
+provider_class = Puppet::Type.type(:elasticsearch_plugin)
+  .provider(:elasticsearch_plugin)
+
+describe provider_class do
+
+  let(:resource_name) { 'lmenezes/elasticsearch-kopf' }
+  let(:resource) do
+    Puppet::Type.type(:elasticsearch_plugin).new(
+      :name     => resource_name,
+      :ensure   => :present,
+      :provider => 'elasticsearch_plugin'
+    )
+  end
+  let(:provider) do
+    provider = provider_class.new
+    provider.resource = resource
+    provider
+  end
+  let(:shortname) { provider.plugin_name(resource_name) }
+  let(:klass) { provider_class }
+
+  include_examples 'plugin provider',
+    '5.x',
+    'Version: 5.0.0-alpha5, Build: d327dd4/2016-08-04T08:59:39.568Z, JVM: 1.8.0_101'
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_plugin/plugin_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,30 @@
+require 'spec_helper'
+require_relative 'shared_examples'
+
+provider_class = Puppet::Type.type(:elasticsearch_plugin).provider(:plugin)
+
+describe provider_class do
+
+  let(:resource_name) { 'lmenezes/elasticsearch-kopf' }
+  let(:resource) do
+    Puppet::Type.type(:elasticsearch_plugin).new(
+      :name     => resource_name,
+      :ensure   => :present,
+      :provider => 'plugin'
+    )
+  end
+  let(:provider) do
+    provider = provider_class.new
+    provider.resource = resource
+    provider
+  end
+  let(:klass) { provider_class }
+
+  include_examples 'plugin provider',
+    '1.x',
+    'Version: 1.7.1, Build: b88f43f/2015-07-29T09:54:16Z, JVM: 1.7.0_79'
+
+  include_examples 'plugin provider',
+    '2.x',
+    'Version: 2.0.0, Build: de54438/2015-10-22T08:09:48Z, JVM: 1.8.0_66'
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_plugin/shared_examples.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,166 @@
+require 'puppet/util/package'
+
+shared_examples 'plugin provider' do |version, build|
+  describe "elasticsearch #{version}" do
+    before(:each) do
+      klass.expects(:es).with('-version').returns(build)
+      allow(File).to receive(:open)
+      provider.es_version
+    end
+
+    describe 'setup' do
+      it 'installs with default parameters' do
+        provider.expects(:plugin).with(
+          ['install', resource_name].tap do |args|
+            if build =~ (/^\S+\s+([^,]+),/)
+              if Puppet::Util::Package.versioncmp($1, '2.2.0') >= 0
+                args.insert 1, '--batch'
+              end
+              if $1.start_with? '2'
+                args.unshift '-Des.path.conf=/usr/share/elasticsearch'
+              end
+            end
+          end
+        )
+        provider.create
+      end
+
+      it 'installs via URLs' do
+        resource[:url] = 'http://url/to/my/plugin.zip'
+        provider.expects(:plugin).with(['install'].tap { |args|
+              if version.start_with? '2'
+                args.unshift '-Des.path.conf=/usr/share/elasticsearch'
+              end
+            } + ['http://url/to/my/plugin.zip'].tap { |args|
+            build =~ (/^\S+\s+([^,]+),/)
+            if $1.start_with? '1'
+              args.unshift('kopf', '--url')
+            end
+
+            if Puppet::Util::Package.versioncmp($1, '2.2.0') >= 0
+              args.unshift '--batch'
+            end
+
+            args
+          }
+        )
+        provider.create
+      end
+
+      it 'installs with a local file' do
+        resource[:source] = '/tmp/plugin.zip'
+        provider.expects(:plugin).with(['install'].tap { |args|
+            if version.start_with? '2'
+              args.unshift '-Des.path.conf=/usr/share/elasticsearch'
+            end
+          } + ['file:///tmp/plugin.zip'].tap { |args|
+            build =~ (/^\S+\s+([^,]+),/)
+            if $1.start_with? '1'
+              args.unshift('kopf', '--url')
+            end
+
+            if Puppet::Util::Package.versioncmp($1, '2.2.0') >= 0
+              args.unshift '--batch'
+            end
+
+            args
+          }
+        )
+        provider.create
+      end
+
+      it 'sets the path.conf Elasticsearch Java property' do
+        expect(provider.with_environment do
+          ENV['ES_JAVA_OPTS']
+        end).to eq(
+          if version.start_with? '2'
+            ''
+          else
+            '-Des.path.conf=/usr/share/elasticsearch'
+          end
+        )
+      end
+
+      describe 'proxying' do
+        it 'installs behind a proxy' do
+          resource[:proxy] = 'http://localhost:3128'
+          if version.start_with? '2'
+            provider
+              .expects(:plugin)
+              .with([
+                '-Des.path.conf=/usr/share/elasticsearch',
+                '-Dhttp.proxyHost=localhost',
+                '-Dhttp.proxyPort=3128',
+                '-Dhttps.proxyHost=localhost',
+                '-Dhttps.proxyPort=3128',
+                'install',
+                resource_name
+              ])
+            provider.create
+          else
+            expect(provider.with_environment do
+              ENV['ES_JAVA_OPTS']
+            end).to eq([
+              '-Des.path.conf=/usr/share/elasticsearch',
+              '-Dhttp.proxyHost=localhost',
+              '-Dhttp.proxyPort=3128',
+              '-Dhttps.proxyHost=localhost',
+              '-Dhttps.proxyPort=3128'
+            ].join(' '))
+          end
+        end
+
+        it 'uses authentication credentials' do
+          resource[:proxy] = 'http://elastic:password@es.local:8080'
+          if version.start_with? '2'
+            provider
+              .expects(:plugin)
+              .with([
+                '-Des.path.conf=/usr/share/elasticsearch',
+                '-Dhttp.proxyHost=es.local',
+                '-Dhttp.proxyPort=8080',
+                '-Dhttp.proxyUser=elastic',
+                '-Dhttp.proxyPassword=password',
+                '-Dhttps.proxyHost=es.local',
+                '-Dhttps.proxyPort=8080',
+                '-Dhttps.proxyUser=elastic',
+                '-Dhttps.proxyPassword=password',
+                'install',
+                resource_name
+              ])
+            provider.create
+          else
+            expect(provider.with_environment do
+              ENV['ES_JAVA_OPTS']
+            end).to eq([
+              '-Des.path.conf=/usr/share/elasticsearch',
+              '-Dhttp.proxyHost=es.local',
+              '-Dhttp.proxyPort=8080',
+              '-Dhttp.proxyUser=elastic',
+              '-Dhttp.proxyPassword=password',
+              '-Dhttps.proxyHost=es.local',
+              '-Dhttps.proxyPort=8080',
+              '-Dhttps.proxyUser=elastic',
+              '-Dhttps.proxyPassword=password'
+            ].join(' '))
+          end
+        end
+      end
+    end # of setup
+
+    describe 'plugin_name' do
+      let(:resource_name) { 'appbaseio/dejaVu' }
+
+      it 'maintains mixed-case names' do
+        expect(provider.pluginfile).to include('dejaVu')
+      end
+    end
+
+    describe 'removal' do
+      it 'uninstalls the plugin' do
+        provider.expects(:plugin).with(['remove', resource_name])
+        provider.destroy
+      end
+    end
+  end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_shield_role/parsed_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,60 @@
+require 'spec_helper'
+
+describe Puppet::Type.type(:elasticsearch_shield_role).provider(:parsed) do
+
+  describe 'instances' do
+    it 'should have an instance method' do
+      expect(described_class).to respond_to :instances
+    end
+
+    context 'with no roles' do
+      it 'should return no resources' do
+        expect(described_class.parse("\n")).to eq([])
+      end
+    end
+
+    context 'with one role' do
+      it 'should return one resource' do
+        expect(described_class.parse(%q{
+          admin:
+            cluster: all
+            indices:
+              '*': all
+        })[0]).to eq({
+          :ensure => :present,
+          :name => 'admin',
+          :privileges => {
+            'cluster' => 'all',
+            'indices' => {
+              '*' => 'all',
+            },
+          },
+        })
+      end
+    end
+
+    context 'with multiple roles' do
+      it 'should return three resources' do
+        expect(described_class.parse(%q{
+          admin:
+            cluster: all
+            indices:
+              '*': all
+          user:
+            indices:
+                '*': read
+          power_user:
+            cluster: monitor
+            indices:
+              '*': all
+        }).length).to eq(3)
+      end
+    end
+  end # of describe instances
+
+  describe 'prefetch' do
+    it 'should have a prefetch method' do
+      expect(described_class).to respond_to :prefetch
+    end
+  end
+end # of describe puppet type
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_shield_role_mapping/parsed_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,52 @@
+require 'spec_helper'
+
+describe Puppet::Type.type(:elasticsearch_shield_role_mapping).provider(:parsed) do
+
+  describe 'instances' do
+    it 'should have an instance method' do
+      expect(described_class).to respond_to :instances
+    end
+
+    context 'with no roles' do
+      it 'should return no resources' do
+        expect(described_class.parse("\n")).to eq([])
+      end
+    end
+
+    context 'with one role' do
+      it 'should return one resource' do
+        expect(described_class.parse(%q{
+          admin:
+            - "cn=users,dc=example,dc=com"
+        })[0]).to eq({
+          :ensure => :present,
+          :name => 'admin',
+          :mappings => [
+            "cn=users,dc=example,dc=com"
+          ]
+        })
+      end
+    end
+
+    context 'with multiple roles' do
+      it 'should return three resources' do
+        expect(described_class.parse(%q{
+          admin:
+            - "cn=users,dc=example,dc=com"
+          user:
+            - "cn=users,dc=example,dc=com"
+            - "cn=admins,dc=example,dc=com"
+            - "cn=John Doe,cn=other users,dc=example,dc=com"
+          power_user:
+            - "cn=admins,dc=example,dc=com"
+        }).length).to eq(3)
+      end
+    end
+  end # of describe instances
+
+  describe 'prefetch' do
+    it 'should have a prefetch method' do
+      expect(described_class).to respond_to :prefetch
+    end
+  end
+end # of describe puppet type
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_shield_user/esusers_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,63 @@
+require 'spec_helper'
+
+describe Puppet::Type.type(:elasticsearch_shield_user).provider(:esusers) do
+
+  describe 'instances' do
+    it 'should have an instance method' do
+      expect(described_class).to respond_to :instances
+    end
+
+    context 'without users' do
+      before do
+        described_class.expects(:esusers_with_path).with('list').returns(
+          'No users found'
+        )
+      end
+
+      it 'should return no resources' do
+        expect(described_class.instances.size).to eq(0)
+      end
+    end
+
+    context 'with one user' do
+      before do
+        described_class.expects(:esusers_with_path).with('list').returns(
+          'elastic        : admin*,power_user'
+        )
+      end
+
+      it 'should return one resource' do
+        expect(described_class.instances[0].instance_variable_get(
+          "@property_hash"
+        )).to eq({
+          :ensure => :present,
+          :name => 'elastic',
+          :provider => :esusers,
+        })
+      end
+    end
+
+    context 'with multiple users' do
+      before do
+        described_class.expects(
+          :esusers_with_path
+        ).with('list').returns(<<-EOL
+          elastic        : admin*
+          logstash       : user
+          kibana         : kibana
+        EOL
+        )
+      end
+
+      it 'should return three resources' do
+        expect(described_class.instances.length).to eq(3)
+      end
+    end
+  end # of describe instances
+
+  describe 'prefetch' do
+    it 'should have a prefetch method' do
+      expect(described_class).to respond_to :prefetch
+    end
+  end
+end # of describe puppet type
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_shield_user/parsed_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,45 @@
+require 'spec_helper'
+
+describe Puppet::Type.type(:elasticsearch_shield_user).provider(:parsed) do
+
+  describe 'instances' do
+    it 'should have an instance method' do
+      expect(described_class).to respond_to :instances
+    end
+
+    context 'without users' do
+      it 'should return no resources' do
+        expect(described_class.parse("\n")).to eq([])
+      end
+    end
+
+    context 'with one user' do
+      it 'should return one resource' do
+        expect(described_class.parse(%q{
+          elastic:$2a$10$DddrTs0PS3qNknUTq0vpa.g.0JpU.jHDdlKp1xox1W5ZHX.w8Cc8C
+        }.gsub(/^\s+/, ''))[0]).to eq({
+          :name => 'elastic',
+          :hashed_password => '$2a$10$DddrTs0PS3qNknUTq0vpa.g.0JpU.jHDdlKp1xox1W5ZHX.w8Cc8C',
+          :record_type => :parsed,
+        })
+      end
+    end
+
+    context 'with multiple users' do
+      it 'should return three resources' do
+        expect(described_class.parse(%q{
+
+          admin:$2a$10$DddrTs0PS3qNknUTq0vpa.g.0JpU.jHDdlKp1xox1W5ZHX.w8Cc8C
+          user:$2a$10$caYr8GhYeJ2Yo0yEhQhQvOjLSwt8Lm6MKQWx8WSnZ/L/IL5sGdQFu
+          kibana:$2a$10$daYr8GhYeJ2Yo0yEhQhQvOjLSwt8Lm6MKQWx8WSnZ/L/IL5sGdQFu
+        }.gsub(/^\s+/, '')).length).to eq(3)
+      end
+    end
+  end # of describe instances
+
+  describe 'prefetch' do
+    it 'should have a prefetch method' do
+      expect(described_class).to respond_to :prefetch
+    end
+  end
+end # of describe puppet type
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_shield_user_roles/parsed_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,45 @@
+require 'spec_helper'
+
+describe Puppet::Type.type(:elasticsearch_shield_user_roles)
+  .provider(:parsed) do
+
+  describe 'instances' do
+    it 'should have an instance method' do
+      expect(described_class).to respond_to :instances
+    end
+
+    context 'without roles' do
+      it 'should return no resources' do
+        expect(described_class.parse("\n")).to eq([])
+      end
+    end
+
+    context 'with one user' do
+      it 'should return one resource' do
+        expect(described_class.parse(%q{
+          admin:elastic
+          power_user:elastic
+        })[0]).to eq({
+          :name => 'elastic',
+          :roles => ['admin', 'power_user']
+        })
+      end
+    end
+
+    context 'with multiple users' do
+      it 'should return three resources' do
+        expect(described_class.parse(%q{
+          admin:elastic
+          logstash:user
+          kibana:kibana
+        }).length).to eq(3)
+      end
+    end
+  end # of describe instances
+
+  describe 'prefetch' do
+    it 'should have a prefetch method' do
+      expect(described_class).to respond_to :prefetch
+    end
+  end
+end # of describe puppet type
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/unit/provider/elasticsearch_template/ruby.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,160 @@
+require 'spec_helper'
+require 'webmock/rspec'
+
+describe Puppet::Type.type(:elasticsearch_template).provider(:ruby) do
+
+  describe 'instances' do
+    context 'with no templates' do
+      before :all do
+        stub_request(:get, 'http://localhost:9200/_template').
+          to_return(
+            :status => 200,
+            :body => '{}'
+        )
+      end
+
+      it 'returns an empty list' do
+        expect(described_class.instances).to eq([])
+      end
+    end
+  end
+
+  describe 'multiple templates' do
+    before :all do
+      stub_request(:get, 'http://localhost:9200/_template').
+        to_return(
+          :status => 200,
+          :body => <<-EOS
+            {
+              "foobar1": {
+                "aliases": {},
+                "mappings": {},
+                "order": 1,
+                "settings": {},
+                "template": "foobar1-*"
+              },
+              "foobar2": {
+                "aliases": {},
+                "mappings": {},
+                "order": "2",
+                "settings": {},
+                "template": "foobar2-*"
+              }
+            }
+          EOS
+      )
+    end
+
+    it 'returns two templates' do
+      expect(described_class.instances.map { |provider|
+        provider.instance_variable_get(:@property_hash)
+      }).to contain_exactly({
+        :name => 'foobar1',
+        :ensure => :present,
+        :provider => :ruby,
+        :content => {
+          'aliases' => {},
+          'mappings' => {},
+          'settings' => {},
+          'template' => 'foobar1-*',
+          'order' => 1,
+        }
+      },{
+        :name => 'foobar2',
+        :ensure => :present,
+        :provider => :ruby,
+        :content => {
+          'aliases' => {},
+          'mappings' => {},
+          'settings' => {},
+          'template' => 'foobar2-*',
+          'order' => 2,
+        }
+      })
+    end
+  end
+
+  describe 'basic authentication' do
+    before :all do
+      stub_request(:get, 'http://localhost:9200/_template').
+        with(:basic_auth => ['elastic', 'password']).
+        to_return(
+          :status => 200,
+          :body => <<-EOS
+            {
+              "foobar3": {
+                "aliases": {},
+                "mappings": {},
+                "order": 3,
+                "settings": {},
+                "template": "foobar3-*"
+              }
+            }
+          EOS
+      )
+    end
+
+    it 'authenticates' do
+      expect(described_class.templates(
+        'http', true, 'localhost', '9200', 10, 'elastic', 'password'
+      ).map { |provider|
+        described_class.new(
+          provider
+        ).instance_variable_get(:@property_hash)
+      }).to contain_exactly({
+        :name => 'foobar3',
+        :ensure => :present,
+        :provider => :ruby,
+        :content => {
+          'aliases' => {},
+          'mappings' => {},
+          'settings' => {},
+          'template' => 'foobar3-*',
+          'order' => 3,
+        }
+      })
+    end
+  end
+
+  describe 'https' do
+    before :all do
+      stub_request(:get, 'https://localhost:9200/_template').
+        to_return(
+          :status => 200,
+          :body => <<-EOS
+            {
+              "foobar-ssl": {
+                "aliases": {},
+                "mappings": {},
+                "order": 10,
+                "settings": {},
+                "template": "foobar-ssl-*"
+              }
+            }
+          EOS
+      )
+    end
+
+    it 'uses ssl' do
+      expect(described_class.templates(
+        'https', true, 'localhost', '9200', 10
+      ).map { |provider|
+        described_class.new(
+          provider
+        ).instance_variable_get(:@property_hash)
+      }).to contain_exactly({
+        :name => 'foobar-ssl',
+        :ensure => :present,
+        :provider => :ruby,
+        :content => {
+          'aliases' => {},
+          'mappings' => {},
+          'settings' => {},
+          'template' => 'foobar-ssl-*',
+          'order' => 10,
+        }
+      })
+    end
+  end
+
+end # of describe puppet type
--- a/dev/provisioning/modules/elasticsearch/spec/unit/provider/plugin_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,112 +0,0 @@
-require 'spec_helper'
-
-provider_class = Puppet::Type.type(:elasticsearch_plugin).provider(:plugin)
-
-describe provider_class do
-
-  let(:resource_name) { 'lmenezes/elasticsearch-kopf' }
-  let(:resource) do
-    Puppet::Type.type(:elasticsearch_plugin).new(
-      :name     => resource_name,
-      :ensure   => :present,
-      :provider => 'plugin'
-    )
-  end
-
-  let(:provider) do
-    provider = provider_class.new
-    provider.resource = resource
-    provider
-  end
-
-  describe "ES 1.x" do
-    before(:each) do
-      provider_class.expects(:es).with('-v').returns("Version: 1.7.1, Build: b88f43f/2015-07-29T09:54:16Z, JVM: 1.7.0_79")
-      allow(File).to receive(:open)
-      provider.es_version
-    end
-
-    let(:shortname) { provider.plugin_name(resource_name) }
-
-    describe 'install' do
-      it 'installs plugin' do
-        provider.expects(:plugin).with(['install', [ resource_name] ])
-        provider.create
-      end
-
-
-      it 'with url' do
-        resource[:url] = 'http://url/to/my/plugin.zip'
-        provider.expects(:plugin).with(['install', [ shortname, '--url', 'http://url/to/my/plugin.zip' ] ])
-        provider.create
-      end
-
-      it 'with local file' do
-        resource[:source] = '/tmp/plugin.zip'
-        provider.expects(:plugin).with(['install', [ shortname, '--url', 'file:///tmp/plugin.zip' ] ])
-        provider.create
-      end
-
-      it 'with proxy' do
-        resource[:proxy_args] = '-dproxyport=3128 -dproxyhost=localhost'
-        provider.expects(:plugin).with([['-dproxyport=3128', '-dproxyhost=localhost'], 'install', [resource_name] ])
-        provider.create
-      end
-
-    end
-
-    describe 'removal' do
-      it 'destroys' do
-        provider.expects(:plugin).with(['remove', resource_name])
-        provider.destroy
-      end
-    end
-
-  end
-
-  describe "ES 2.x" do
-
-    before(:each) do
-      allow(provider_class).to receive(:es).with('-v').and_raise(Puppet::ExecutionFailure)
-      allow(provider_class).to receive(:es).with('--version').and_return("Version: 2.0.0, Build: de54438/2015-10-22T08:09:48Z, JVM: 1.8.0_66")
-      allow(File).to receive(:open)
-      provider.es_version
-    end
-
-    let(:shortname) { provider.plugin_name(resource_name) }
-
-    describe 'install' do
-      it 'installs plugin' do
-        provider.expects(:plugin).with(['install', [ resource_name] ])
-        provider.create
-      end
-
-      it 'with url' do
-        resource[:url] = 'http://url/to/my/plugin.zip'
-        provider.expects(:plugin).with(['install', [ 'http://url/to/my/plugin.zip' ] ])
-        provider.create
-      end
-
-      it 'with local file' do
-        resource[:source] = '/tmp/plugin.zip'
-        provider.expects(:plugin).with(['install', [ 'file:///tmp/plugin.zip' ] ])
-        provider.create
-      end
-
-      it 'with proxy' do
-        resource[:proxy_args] = '-dproxyport=3128 -dproxyhost=localhost'
-        provider.expects(:plugin).with([['-dproxyport=3128', '-dproxyhost=localhost'], 'install', [resource_name] ])
-        provider.create
-      end
-    end
-
-    describe 'removal' do
-      it 'destroys' do
-        provider.expects(:plugin).with(['remove', resource_name])
-        provider.destroy
-      end
-    end
-
-  end
-
-end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/unit/type/elasticsearch_plugin_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,61 @@
+require 'spec_helper'
+
+describe Puppet::Type.type(:elasticsearch_plugin).provider(:plugin) do
+
+  let(:resource_name) { "lmenezes/elasticsearch-kopf" }
+
+  describe "input validation" do
+
+    let(:type) { Puppet::Type.type(:elasticsearch_plugin) }
+
+    before do
+      Process.stubs(:euid).returns 0
+      Puppet::Util::Storage.stubs(:store)
+    end
+
+    it "should default to being installed" do
+      plugin = Puppet::Type.type(:elasticsearch_plugin).new(:name => resource_name )
+      expect(plugin.should(:ensure)).to eq(:present)
+    end
+
+    describe "when validating attributes" do
+      [:name, :source, :url, :proxy].each do |param|
+        it "should have a #{param} parameter" do
+          expect(type.attrtype(param)).to eq(:param)
+        end
+      end
+
+      it "should have an ensure property" do
+        expect(type.attrtype(:ensure)).to eq(:property)
+      end
+    end
+
+  end
+
+end
+
+describe 'other tests' do
+
+  prov_c = Puppet::Type.type(:elasticsearch_plugin).provider(:plugin)
+
+  describe prov_c do
+
+    it 'should install a plugin' do
+      resource = Puppet::Type.type(:elasticsearch_plugin).new(
+        :name => "lmenezes/elasticsearch-kopf",
+        :ensure => :present
+      )
+      allow(File).to receive(:open)
+      provider = prov_c.new(resource)
+      provider.expects(:es)
+        .with('-version')
+        .returns('Version: 1.7.3, Build: b88f43f/2015-07-29T09:54:16Z, JVM: 1.7.0_79')
+      provider.expects(:plugin).with([
+        'install',
+        'lmenezes/elasticsearch-kopf'
+      ])
+      provider.create
+    end
+
+  end
+end
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/unit/type/elasticsearch_shield_role_mapping_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,67 @@
+require 'spec_helper'
+
+describe Puppet::Type.type(:elasticsearch_shield_role_mapping) do
+
+  let(:resource_name) { 'elastic_role' }
+
+  describe 'when validating attributes' do
+    [:name].each do |param|
+      it "should have a #{param} parameter" do
+        expect(described_class.attrtype(param)).to eq(:param)
+      end
+    end
+
+    [:ensure, :mappings].each do |prop|
+      it "should have a #{prop} property" do
+        expect(described_class.attrtype(prop)).to eq(:property)
+      end
+    end
+  end # of describe when validating attributes
+
+  describe 'when validating values' do
+    describe 'ensure' do
+      it 'should support present as a value for ensure' do
+        expect { described_class.new(
+          :name => resource_name,
+          :ensure => :present,
+        ) }.to_not raise_error
+      end
+
+      it 'should support absent as a value for ensure' do
+        expect { described_class.new(
+          :name => resource_name,
+          :ensure => :absent,
+        ) }.to_not raise_error
+      end
+
+      it 'should not support other values' do
+        expect { described_class.new(
+          :name => resource_name,
+          :ensure => :foo,
+        ) }.to raise_error(Puppet::Error, /Invalid value/)
+      end
+    end
+
+    describe 'name' do
+      it 'should reject long role names' do
+        expect { described_class.new(
+          :name => 'a'*31,
+        ) }.to raise_error(
+          Puppet::ResourceError,
+          /valid values/i
+        )
+      end
+
+      it 'should reject invalid role characters' do
+        ['@foobar', '0foobar'].each do |role|
+          expect { described_class.new(
+            :name => role,
+        ) }.to raise_error(
+          Puppet::ResourceError,
+          /valid values/i
+        )
+        end
+      end
+    end
+  end # of describing when validing values
+end # of describe Puppet::Type
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/unit/type/elasticsearch_shield_role_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,67 @@
+require 'spec_helper'
+
+describe Puppet::Type.type(:elasticsearch_shield_role) do
+
+  let(:resource_name) { 'elastic_role' }
+
+  describe 'when validating attributes' do
+    [:name].each do |param|
+      it "should have a #{param} parameter" do
+        expect(described_class.attrtype(param)).to eq(:param)
+      end
+    end
+
+    [:ensure, :privileges].each do |prop|
+      it "should have a #{prop} property" do
+        expect(described_class.attrtype(prop)).to eq(:property)
+      end
+    end
+  end # of describe when validating attributes
+
+  describe 'when validating values' do
+    describe 'ensure' do
+      it 'should support present as a value for ensure' do
+        expect { described_class.new(
+          :name => resource_name,
+          :ensure => :present,
+        ) }.to_not raise_error
+      end
+
+      it 'should support absent as a value for ensure' do
+        expect { described_class.new(
+          :name => resource_name,
+          :ensure => :absent,
+        ) }.to_not raise_error
+      end
+
+      it 'should not support other values' do
+        expect { described_class.new(
+          :name => resource_name,
+          :ensure => :foo,
+        ) }.to raise_error(Puppet::Error, /Invalid value/)
+      end
+    end
+
+    describe 'name' do
+      it 'should reject long role names' do
+        expect { described_class.new(
+          :name => 'a'*31,
+        ) }.to raise_error(
+          Puppet::ResourceError,
+          /valid values/i
+        )
+      end
+
+      it 'should reject invalid role characters' do
+        ['@foobar', '0foobar'].each do |role|
+          expect { described_class.new(
+            :name => role,
+        ) }.to raise_error(
+          Puppet::ResourceError,
+          /valid values/i
+        )
+        end
+      end
+    end
+  end # of describing when validing values
+end # of describe Puppet::Type
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/unit/type/elasticsearch_shield_user_roles_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,51 @@
+require 'spec_helper'
+
+describe Puppet::Type.type(:elasticsearch_shield_user_roles) do
+
+  let(:resource_name) { 'elastic' }
+
+  describe 'when validating attributes' do
+    [:name].each do |param|
+      it "should have a #{param} parameter" do
+        expect(described_class.attrtype(param)).to eq(:param)
+      end
+    end
+
+    [:ensure, :roles].each do |prop|
+      it "should have a #{prop} property" do
+        expect(described_class.attrtype(prop)).to eq(:property)
+      end
+    end
+
+    describe 'namevar validation' do
+      it 'should have :name as its namevar' do
+        expect(described_class.key_attributes).to eq([:name])
+      end
+    end
+  end # of describe when validating attributes
+
+  describe 'when validating values' do
+    describe 'ensure' do
+      it 'should support present as a value for ensure' do
+        expect { described_class.new(
+          :name => resource_name,
+          :ensure => :present,
+        ) }.to_not raise_error
+      end
+
+      it 'should support absent as a value for ensure' do
+        expect { described_class.new(
+          :name => resource_name,
+          :ensure => :absent,
+        ) }.to_not raise_error
+      end
+
+      it 'should not support other values' do
+        expect { described_class.new(
+          :name => resource_name,
+          :ensure => :foo,
+        ) }.to raise_error(Puppet::Error, /Invalid value/)
+      end
+    end
+  end # of describing when validing values
+end # of describe Puppet::Type
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/unit/type/elasticsearch_shield_user_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,60 @@
+require 'spec_helper'
+
+describe Puppet::Type.type(:elasticsearch_shield_user) do
+
+  let(:resource_name) { 'elastic' }
+
+  describe 'when validating attributes' do
+    [:name, :password].each do |param|
+      it "should have a #{param} parameter" do
+        expect(described_class.attrtype(param)).to eq(:param)
+      end
+    end
+
+    [:ensure].each do |prop|
+      it "should have a #{prop} property" do
+        expect(described_class.attrtype(prop)).to eq(:property)
+      end
+    end
+
+    describe 'namevar validation' do
+      it 'should have :name as its namevar' do
+        expect(described_class.key_attributes).to eq([:name])
+      end
+    end
+  end # of describe when validating attributes
+
+  describe 'when validating values' do
+    describe 'ensure' do
+      it 'should support present as a value for ensure' do
+        expect { described_class.new(
+          :name => resource_name,
+          :ensure => :present,
+        ) }.to_not raise_error
+      end
+
+      it 'should support absent as a value for ensure' do
+        expect { described_class.new(
+          :name => resource_name,
+          :ensure => :absent,
+        ) }.to_not raise_error
+      end
+
+      it 'should not support other values' do
+        expect { described_class.new(
+          :name => resource_name,
+          :ensure => :foo,
+        ) }.to raise_error(Puppet::Error, /Invalid value/)
+      end
+    end
+
+    describe 'password' do
+      it 'should reject short passwords' do
+        expect { described_class.new(
+          :name => resource_name,
+          :password => 'foo',
+        ) }.to raise_error(Puppet::Error, /must be at least/)
+      end
+    end
+  end # of describing when validing values
+end # of describe Puppet::Type
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/spec/unit/type/elasticsearch_template_spec.rb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,227 @@
+require 'spec_helper'
+
+describe Puppet::Type.type(:elasticsearch_template) do
+
+  let(:resource_name) { 'test_template' }
+
+  describe 'attribute validation' do
+    [
+      :name,
+      :source,
+      :host,
+      :port,
+      :protocol,
+      :validate_tls,
+      :ca_file,
+      :ca_path,
+      :timeout,
+      :username,
+      :password
+    ].each do |param|
+      it "should have a #{param} parameter" do
+        expect(described_class.attrtype(param)).to eq(:param)
+      end
+    end
+
+    [:content, :ensure].each do |prop|
+      it "should have a #{prop} property" do
+        expect(described_class.attrtype(prop)).to eq(:property)
+      end
+    end
+
+    describe 'namevar validation' do
+      it 'should have :name as its namevar' do
+        expect(described_class.key_attributes).to eq([:name])
+      end
+    end
+
+    describe 'content' do
+      it 'should reject non-hash values' do
+        expect { described_class.new(
+          :name => resource_name,
+          :content => '{"foo":}'
+        ) }.to raise_error(Puppet::Error, /hash expected/i)
+
+        expect { described_class.new(
+          :name => resource_name,
+          :content => 0
+        ) }.to raise_error(Puppet::Error, /hash expected/i)
+
+        expect { described_class.new(
+          :name => resource_name,
+          :content => {}
+        ) }.not_to raise_error
+      end
+
+      it 'should deeply parse PSON-like values' do
+        expect(described_class.new(
+          :name => resource_name,
+          :content => {'key'=>{'value'=>'0'}}
+        )[:content]).to include(
+          'key'=>{'value'=>0}
+        )
+      end
+    end
+
+    describe 'ensure' do
+      it 'should support present as a value for ensure' do
+        expect { described_class.new(
+          :name => resource_name,
+          :ensure => :present,
+          :content => {}
+        ) }.to_not raise_error
+      end
+
+      it 'should support absent as a value for ensure' do
+        expect { described_class.new(
+          :name => resource_name,
+          :ensure => :absent
+        ) }.to_not raise_error
+      end
+
+      it 'should not support other values' do
+        expect { described_class.new(
+          :name => resource_name,
+          :ensure => :foo,
+          :content => {}
+        ) }.to raise_error(Puppet::Error, /Invalid value/)
+      end
+    end
+
+    describe 'host' do
+      it 'should accept IP addresses' do
+        expect { described_class.new(
+          :name => resource_name,
+          :content => {},
+          :host => '127.0.0.1'
+        ) }.not_to raise_error
+      end
+    end
+
+    describe 'port' do
+      [-1, 0, 70000, 'foo'].each do |value|
+        it "should reject invalid port value #{value}" do
+          expect { described_class.new(
+            :name => resource_name,
+            :content => {},
+            :port => value
+          ) }.to raise_error(Puppet::Error, /invalid port/i)
+        end
+      end
+    end
+
+    describe 'validate_tls' do
+      [-1, 0, {}, [], 'foo'].each do |value|
+        it "should reject invalid ssl_verify value #{value}" do
+          expect { described_class.new(
+            :name => resource_name,
+            :content => {},
+            :validate_tls => value
+          ) }.to raise_error(Puppet::Error, /invalid value/i)
+        end
+      end
+
+      [true, false, 'true', 'false', 'yes', 'no'].each do |value|
+        it "should accept validate_tls value #{value}" do
+          expect { described_class.new(
+            :name => resource_name,
+            :content => {},
+            :validate_tls => value
+          ) }.not_to raise_error
+        end
+      end
+    end
+
+    describe 'timeout' do
+      it 'should reject string values' do
+        expect { described_class.new(
+          :name => resource_name,
+          :content => {},
+          :timeout => 'foo'
+        ) }.to raise_error(Puppet::Error, /must be a/)
+      end
+
+      it 'should reject negative integers' do
+        expect { described_class.new(
+          :name => resource_name,
+          :content => {},
+          :timeout => -10
+        ) }.to raise_error(Puppet::Error, /must be a/)
+      end
+
+      it 'should accept integers' do
+        expect { described_class.new(
+          :name => resource_name,
+          :content => {},
+          :timeout => 10
+        ) }.to_not raise_error
+      end
+
+      it 'should accept quoted integers' do
+        expect { described_class.new(
+          :name => resource_name,
+          :content => {},
+          :timeout => '10'
+        ) }.to_not raise_error
+      end
+    end
+
+    describe 'content and source validation' do
+      it 'should require either "content" or "source"' do
+        expect { described_class.new(
+          :name => resource_name,
+        ) }.to raise_error(Puppet::Error, /content.*or.*source.*required/)
+      end
+
+      it 'should fail with both defined' do
+        expect { described_class.new(
+          :name => resource_name,
+          :content => {},
+          :source => 'puppet:///example.json'
+        ) }.to raise_error(Puppet::Error, /simultaneous/)
+      end
+
+      it 'should parse source paths into the content property' do
+        file_stub = 'foo'
+        [
+          Puppet::FileServing::Metadata,
+          Puppet::FileServing::Content
+        ].each do |klass|
+          allow(klass).to receive(:indirection)
+            .and_return(Object)
+        end
+        allow(Object).to receive(:find)
+          .and_return(file_stub)
+        allow(file_stub).to receive(:content)
+          .and_return('{"template":"foobar-*", "order": 1}')
+        expect(described_class.new(
+          :name => resource_name,
+          :source => '/example.json'
+        )[:content]).to include(
+          'template' => 'foobar-*',
+          'order' => 1
+        )
+      end
+
+      it 'should qualify settings' do
+        expect(described_class.new(
+          :name => resource_name,
+          :content => { 'settings' => {
+            'number_of_replicas' => '2',
+            'index' => { 'number_of_shards' => '3' }
+          } }
+        )[:content]).to eq({
+          'order' => 0,
+          'aliases' => {},
+          'mappings' => {},
+          'settings' => {
+            'index' => {
+              'number_of_replicas' => 2,
+              'number_of_shards' => 3
+            }
+          }
+        })
+      end
+    end
+  end # of describing when validing values
+end # of describe Puppet::Type
--- a/dev/provisioning/modules/elasticsearch/spec/unit/type/plugin_spec.rb	Tue Nov 08 18:23:01 2016 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,57 +0,0 @@
-require 'spec_helper'
-
-describe Puppet::Type.type(:elasticsearch_plugin).provider(:plugin) do
-
-  let(:resource_name) { "lmenezes/elasticsearch-kopf" }
-
-  describe "input validation" do
-
-    let(:type) { Puppet::Type.type(:elasticsearch_plugin) }
-
-    before do
-      Process.stubs(:euid).returns 0
-      Puppet::Util::Storage.stubs(:store)
-    end
-
-    it "should default to being installed" do
-      plugin = Puppet::Type.type(:elasticsearch_plugin).new(:name => resource_name )
-      expect(plugin.should(:ensure)).to eq(:present)
-    end
-
-    describe "when validating attributes" do
-      [:name, :source, :url, :proxy_args].each do |param|
-        it "should have a #{param} parameter" do
-          expect(type.attrtype(param)).to eq(:param)
-        end
-      end
-
-      it "should have an ensure property" do
-        expect(type.attrtype(:ensure)).to eq(:property)
-      end
-    end
-
-  end
-
-end
-
-  describe 'other tests' do
-
-    prov_c = Puppet::Type.type(:elasticsearch_plugin).provider(:plugin)
-
-    describe prov_c do
-
-      it 'should install a plugin' do
-        resource = Puppet::Type.type(:elasticsearch_plugin).new(
-          :name => "lmenezes/elasticsearch-kopf",
-          :ensure => :present
-        )
-        allow(File).to receive(:open)
-        provider = prov_c.new(resource)
-        provider.expects(:es).with('-v').returns('Version: 1.7.3, Build: b88f43f/2015-07-29T09:54:16Z, JVM: 1.7.0_79')
-        provider.expects(:plugin).with(['install', ['lmenezes/elasticsearch-kopf']])
-        provider.create
-      end
-
-    end
-  end
-
--- a/dev/provisioning/modules/elasticsearch/templates/etc/elasticsearch/elasticsearch.yml.erb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/templates/etc/elasticsearch/elasticsearch.yml.erb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,93 +1,26 @@
+### MANAGED BY PUPPET ###
 <%-
-
-  # Function to make a structured and sorted yaml representation out of a hash
-  def recursive_hash_to_yml_string(hash, depth=0)
-    spacer = ""
-    depth.times { spacer += "  "}
-    hash.keys.sort.each do |sorted_key|
-      @yml_string += spacer + sorted_key + ": "
-      if hash[sorted_key].is_a?(::Array)
-         keyspacer = ""
-         sorted_key.length.times { keyspacer += " " }
-         @yml_string += "\n"
-         hash[sorted_key].each do |item|
-           @yml_string += spacer + keyspacer + "- " + item +"\n"
-         end
-      elsif hash[sorted_key].is_a?(::Hash)
-        @yml_string += "\n"
-        recursive_hash_to_yml_string(hash[sorted_key], depth+1)
-      else
-        @yml_string += "#{hash[sorted_key].to_s}\n"
-      end
-    end
-  end
-
-  # Function to transform shorted write up of the keys into full hash representation
-  def transform(hash)
-  return_vals = []
+  $LOAD_PATH.unshift(File.join(File.dirname(__FILE__),"..","..","..","lib"))
+  require 'puppet_x/elastic/deep_to_i'
+  require 'puppet_x/elastic/hash'
 
-  hash.each do |key,val|
-    if m = /^([^.]+)\.(.*)$/.match(key)
-      temp = { m[1] => { m[2] => val } }
-      transform(temp).each do |stuff|
-        return_vals << stuff
-      end
-    else
-      if val.is_a?(::Hash)
-        transform(val).each do |stuff|
-          return_vals << { key => stuff }
-        end
-      else
-        return_vals << { key => val }
-      end
-    end
-  end
-
-  return_vals
-  end
-
-  # Function to deep merge hashes with same keys
-  class ::Hash
-    def deep_merge_with_array_values_concatenated(hash)
-    target = dup
-
-    hash.keys.each do |key|
-      if hash[key].is_a? ::Hash and self[key].is_a? ::Hash
-        target[key] = target[key].deep_merge_with_array_values_concatenated(hash[key])
-        next
-      end
-
-      if hash[key].is_a?(::Array) && target[key].is_a?(::Array)
-        target[key] = target[key] + hash[key]
-      else
-        target[key] = hash[key]
-      end
-    end
-
-    target
-    end
-  end
-
-  # initial string
-  @yml_string = "### MANAGED BY PUPPET ###\n"
+  @yml_string = ''
 
   if !@data.empty?
 
-    @yml_string += "---\n"
-
-    ## Transform shorted keys into full write up
-    transformed_config = transform(@data)
+    # Sort Hash and transform it into yaml
+    @yml_string += Puppet_X::Elastic::deep_to_i(
+      @data
+    ).extend(
+      Puppet_X::Elastic::SortedHash
+    ).to_yaml
 
-    # Merge it back into a hash
-    tmphash = { }
-    transformed_config.each do |subhash|
-      tmphash = tmphash.deep_merge_with_array_values_concatenated(subhash)
+    # Puppet < 4 uses ZAML, which has some deviations from Puppet 4 YAML
+    # implementation
+    unless Puppet::Util::Package.versioncmp(Puppet.version, '4') >= 0
+      @yml_string.gsub!(/^\s{2}/, '')
     end
 
-    # Transform it into yaml
-    recursive_hash_to_yml_string(tmphash)
-
   end
-
 -%>
-<%= @yml_string -%>
+<%= @yml_string %>
--- a/dev/provisioning/modules/elasticsearch/templates/etc/elasticsearch/logging.yml.erb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/templates/etc/elasticsearch/logging.yml.erb	Wed Nov 09 15:05:41 2016 +0100
@@ -28,9 +28,14 @@
       conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n"
 
   file:
-    type: dailyRollingFile
+    type: <%= @file_rolling_type %>
     file: ${path.logs}/${cluster.name}.log
-    datePattern: "'.'yyyy-MM-dd"
+    <%- if @file_rolling_type == 'dailyRollingFile' -%>
+    datePattern: <%= @daily_rolling_date_pattern %>
+    <%- elsif @file_rolling_type == 'rollingFile' -%>
+    maxBackupIndex: <%= @rolling_file_max_backup_index %>
+    maxFileSize: <%= @rolling_file_max_file_size %>
+    <%- end -%>
     layout:
       type: pattern
       conversionPattern: "[%d{ISO8601}][%-5p][%-25c] %m%n"
--- a/dev/provisioning/modules/elasticsearch/templates/etc/init.d/elasticsearch.Debian.erb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/templates/etc/init.d/elasticsearch.Debian.erb	Wed Nov 09 15:05:41 2016 +0100
@@ -56,7 +56,7 @@
 export JAVA_HOME
 
 # Directory where the Elasticsearch binary distribution resides
-ES_HOME=/usr/share/$NAME
+#ES_HOME=/usr/share/$NAME
 
 # Heap Size (defaults to 256m min, 1g max)
 #ES_HEAP_SIZE=2g
@@ -77,19 +77,19 @@
 #MAX_LOCKED_MEMORY=
 
 # Elasticsearch log directory
-LOG_DIR=/var/log/$NAME
+#LOG_DIR=/var/log/$NAME
 
 # Elasticsearch data directory
-DATA_DIR=/var/lib/$NAME
+#DATA_DIR=/var/lib/$NAME
 
 # Elasticsearch work directory
 WORK_DIR=/tmp/$NAME
 
 # Elasticsearch configuration directory
-CONF_DIR=/etc/$NAME
+#CONF_DIR=/etc/$NAME
 
 # Elasticsearch configuration file (elasticsearch.yml)
-CONF_FILE=$CONF_DIR/elasticsearch.yml
+#CONF_FILE=$CONF_DIR/elasticsearch.yml
 
 # Maximum number of VMA (Virtual Memory Areas) a process can own
 MAX_MAP_COUNT=262144
@@ -112,6 +112,7 @@
 export ES_JAVA_OPTS
 export ES_CLASSPATH
 export ES_INCLUDE
+export ES_GC_LOG_FILE
 
 # Check DAEMON exists
 test -x $DAEMON || exit 0
@@ -148,7 +149,9 @@
 	fi
 
 	# Prepare environment
-	mkdir -p "$LOG_DIR" "$DATA_DIR" "$WORK_DIR" && chown "$ES_USER":"$ES_GROUP" "$LOG_DIR" "$DATA_DIR" "$WORK_DIR"
+	for DIR in "$DATA_DIR" "$LOG_DIR" "$WORK_DIR"; do
+		[ ! -z "$DIR" ] && mkdir -p "$DIR" && chown "$ES_USER":"$ES_GROUP" "$DIR"
+	done
 	touch "$PID_FILE" && chown "$ES_USER":"$ES_GROUP" "$PID_FILE"
 
 	if [ -n "$MAX_OPEN_FILES" ]; then
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/templates/etc/init.d/elasticsearch.OpenBSD.erb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,27 @@
+#!/bin/sh
+#
+# This file is managed via PUPPET
+
+daemon="/usr/local/elasticsearch/bin/elasticsearch"
+daemon_flags="-d -Des.default.path.conf=/etc/elasticsearch/<%= @name %> -p <%= @pid_dir %>/elasticsearch-<%= @name %>.pid"
+daemon_user="_elasticsearch"
+
+. /etc/rc.d/rc.subr
+
+pexp="$(/usr/local/bin/javaPathHelper -c elasticsearch) .*org.elasticsearch.bootstrap.Elasticsearch.*"
+
+rc_reload=NO
+
+rc_start() {
+	${rcexec} \
+		"ES_INCLUDE=\"/etc/elasticsearch/elasticsearch.in.sh\" \
+		"CONF_DIR=\"/etc/elasticsearch\"" \
+		JAVA_HOME=\"$(/usr/local/bin/javaPathHelper -h elasticsearch)\" \
+		${daemon} ${daemon_flags}"
+}
+
+rc_pre() {
+	install -d -o _elasticsearch /var/run/elasticsearch/
+}
+
+rc_cmd $1
--- a/dev/provisioning/modules/elasticsearch/templates/etc/init.d/elasticsearch.RedHat.erb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/templates/etc/init.d/elasticsearch.RedHat.erb	Wed Nov 09 15:05:41 2016 +0100
@@ -44,6 +44,7 @@
 export ES_CLASSPATH
 export JAVA_HOME 
 export ES_INCLUDE
+export ES_GC_LOG_FILE
 
 lockfile=/var/lock/subsys/$prog
 
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/templates/etc/init.d/elasticsearch.SLES.erb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,149 @@
+#!/bin/sh
+#
+# elasticsearch <%= @name %> <summary>
+#
+# chkconfig:   2345 80 20
+# description: Starts and stops a single elasticsearch instance on this system
+#
+
+### BEGIN INIT INFO
+# Provides: Elasticsearch-<%= @name %>
+# Required-Start: $network $named
+# Required-Stop: $network $named
+# Default-Start: 2 3 4 5
+# Default-Stop: 0 1 6
+# Short-Description: This service manages the elasticsearch daemon
+# Description: Elasticsearch is a very scalable, schema-free and high-performance search solution supporting multi-tenancy and near realtime search.
+### END INIT INFO
+
+#
+# init.d / servicectl compatibility (openSUSE)
+#
+if [ -f /etc/rc.status ]; then
+    . /etc/rc.status
+    rc_reset
+fi
+
+#
+# Source function library.
+#
+if [ -f /etc/rc.d/init.d/functions ]; then
+    . /etc/rc.d/init.d/functions
+fi
+
+EXE="/usr/share/elasticsearch/bin/elasticsearch"
+prog="elasticsearch-<%= @name %>"
+pidfile=/var/run/elasticsearch/${prog}.pid
+
+export JAVA_HOME=/usr/java/latest
+JAVAPROG=${JAVA_HOME}/bin/java
+
+[ -e /etc/sysconfig/$prog ] && . /etc/sysconfig/$prog
+
+export ES_HEAP_SIZE
+export ES_HEAP_NEWSIZE
+export ES_DIRECT_SIZE
+export ES_JAVA_OPTS
+
+lockfile=/var/lock/subsys/$prog
+
+# backwards compatibility for old config sysconfig files, pre 0.90.1
+if [ -n $USER ] && [ -z $ES_USER ] ; then
+   ES_USER=$USER
+fi
+
+checkJava() {
+    if [ -x "$JAVA_HOME/bin/java" ]; then
+        JAVA="$JAVA_HOME/bin/java"
+    else
+        JAVA=`which java`
+    fi
+
+    if [ ! -x "$JAVA" ]; then
+        echo "Could not find any executable java binary. Please install java in your PATH or set JAVA_HOME"
+        exit 1
+    fi
+}
+
+start() {
+    checkJava
+    [ -x $EXE ] || exit 5
+    [ -f $CONF_FILE ] || exit 6
+    if [ -n "$MAX_LOCKED_MEMORY" -a -z "$ES_HEAP_SIZE" ]; then
+        echo "MAX_LOCKED_MEMORY is set - ES_HEAP_SIZE must also be set"
+        return 7
+    fi
+    if [ -n "$MAX_OPEN_FILES" ]; then
+        ulimit -n $MAX_OPEN_FILES
+    fi
+    if [ -n "$MAX_LOCKED_MEMORY" ]; then
+        ulimit -l $MAX_LOCKED_MEMORY
+    fi
+    if [ -n "$MAX_MAP_COUNT" ]; then
+        sysctl -q -w vm.max_map_count=$MAX_MAP_COUNT
+    fi
+    if [ -n "$WORK_DIR" ]; then
+        mkdir -p "$WORK_DIR"
+        chown "$ES_USER":"$ES_GROUP" "$WORK_DIR"
+    fi
+    echo -n $"Starting $prog: "
+    # if not running, start it up here, usually something like "daemon $EXE"
+    startproc -u $ES_USER $EXE -d -p $pidfile -Des.default.path.home=$ES_HOME -Des.default.path.logs=$LOG_DIR -Des.default.path.data=$DATA_DIR -Des.default.path.work=$WORK_DIR -Des.default.path.conf=$CONF_DIR &
+    retval=$?
+    if [ $retval -eq 0 ]; then
+	touch $lockfile
+    fi
+    rc_status -v
+    return $retval
+}
+
+stop() {
+    echo -n $"Stopping $prog: "
+    #ps ax|grep $JAVA|grep `cat $pidfile`
+    killproc -p $pidfile $JAVAPROG
+    retval=$?
+
+    if [ $retval -eq 0 ]; then
+	rm -f $lockfile
+    fi
+    rc_status -v
+    return $retval
+}
+
+restart() {
+    stop
+    start
+}
+
+reload() {
+    restart
+}
+
+status() {
+    # run checks to determine if the service is running or use generic status
+    echo "Checking processes for elasticsearch"
+    checkproc -p $pidfile $JAVAPROG
+    rc_status -v
+}
+
+case "$1" in
+    start)
+	start
+        ;;
+    stop)
+        stop
+        ;;
+    restart)
+        restart
+        ;;
+    reload)
+        reload
+        ;;
+    status)
+        status
+        ;;
+    *)
+        echo $"Usage: $0 {start|stop|status|restart|reload}"
+        exit 2
+esac
+exit $?
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/elasticsearch/templates/etc/init.d/elasticsearch.openrc.erb	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,87 @@
+#!/sbin/runscript
+
+name="Elasticsearch"
+description=""
+
+ES_USER=${ES_USER:="elasticsearch"}
+ES_INSTANCE=${SVCNAME#*.}
+
+if [ -n "${ES_INSTANCE}" ] && [ ${SVCNAME} != "elasticsearch" ]; then
+    PIDFILE="/run/elasticsearch/elasticsearch.${ES_INSTANCE}.pid"
+    ES_BASE_PATH="/var/lib/elasticsearch/${ES_INSTANCE}"
+    ES_CONF_PATH="/etc/elasticsearch/${ES_INSTANCE}"
+    ES_LOG_PATH="/var/log/elasticsearch/${ES_INSTANCE}"
+else
+    PIDFILE="/run/elasticsearch/elasticsearch.pid"
+    ES_BASE_PATH="/var/lib/elasticsearch/_default"
+    ES_CONF_PATH="/etc/elasticsearch"
+    ES_LOG_PATH="/var/log/elasticsearch/_default"
+fi
+
+ES_DATA_PATH="${ES_BASE_PATH}/data"
+ES_WORK_PATH="${ES_BASE_PATH}/work"
+
+export ES_INCLUDE="/usr/share/elasticsearch/bin/elasticsearch.in.sh"
+export JAVA_OPTS
+export ES_JAVA_OPTS
+export ES_HEAP_SIZE
+export ES_HEAP_NEWSIZE
+export ES_DIRECT_SIZE
+export ES_USE_IPV4
+
+server_command="/usr/share/elasticsearch/bin/elasticsearch"
+server_args=" -p ${PIDFILE} -Des.default.path.conf=\"${ES_CONF_PATH}\" -Des.default.path.data=\"${ES_DATA_PATH}\" -Des.default.path.work=\"${ES_WORK_PATH}\" -Des.default.path.logs=\"${ES_LOG_PATH}\""
+
+depend() {
+    use net
+}
+
+start() {
+    # elasticsearch -Des.config=/path/to/config/file
+    # elasticsearch -Des.network.host=10.0.0.4
+
+    [ ! -f "${ES_INCLUDE}" ] && {
+        eerror "${ES_INCLUDE} must be copied into place"
+        return 1
+    }
+
+    local conf
+    local conf_file
+    for conf in elasticsearch.yml logging.yml; do
+        conf_file="${ES_CONF_PATH}/${conf}"
+        if [ ! -f "${conf_file}" ]; then
+            eerror "${conf_file} must be copied into place"
+            return 1
+        fi
+    done
+
+    ebegin "Starting ${SVCNAME}"
+
+    if [ -n "${ES_MAX_FD}" ]; then
+        ulimit -n ${ES_MAX_FD}
+        einfo "Max open filedescriptors  : ${ES_MAX_FD}"
+    fi
+
+    checkpath -d -o "${ES_USER}" -m750 "/var/lib/elasticsearch"
+    checkpath -d -o "${ES_USER}" -m750 "/var/log/elasticsearch"
+    checkpath -d -o "${ES_USER}" -m750 "$(dirname "${PIDFILE}")"
+    checkpath -d -o "${ES_USER}" -m750 "${ES_BASE_PATH}"
+    checkpath -d -o "${ES_USER}" -m750 "${ES_LOG_PATH}"
+
+    start-stop-daemon --start \
+        --background \
+        --chdir "${ES_BASE_PATH}" \
+        --user="${ES_USER}" \
+        --pidfile="${PIDFILE}" \
+        --exec ${server_command} -- ${server_args}
+    eend $?
+}
+
+stop() {
+    ebegin "Stopping ${SVCNAME}"
+    start-stop-daemon --stop \
+        --pidfile=${PIDFILE} \
+        --user="${ES_USER}" \
+        --retry=TERM/20/KILL/5
+    eend $?
+}
\ No newline at end of file
--- a/dev/provisioning/modules/elasticsearch/templates/etc/init.d/elasticsearch.systemd.erb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/elasticsearch/templates/etc/init.d/elasticsearch.systemd.erb	Wed Nov 09 15:05:41 2016 +0100
@@ -1,6 +1,8 @@
 [Unit]
 Description=Starts and stops a single elasticsearch instance on this system
 Documentation=http://www.elasticsearch.org
+Wants=network-online.target
+After=network-online.target
 
 [Service]
 Type=forking
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/sysconfig/files/elasticsearch/elasticsearch-es_01	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,6 @@
+CONF_DIR=/etc/elasticsearch/es_01
+ES_GROUP=elasticsearch
+ES_HOME=/usr/share/elasticsearch
+ES_USER=elasticsearch
+LOG_DIR=/var/log/elasticsearch/es_01
+MAX_OPEN_FILES=65535
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/sysconfig/files/elasticsearch/elasticsearch-es_01.service	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,61 @@
+[Unit]
+Description=Elasticsearch es-01
+Documentation=http://www.elastic.co
+Wants=network-online.target
+After=network-online.target
+
+[Service]
+Environment=ES_HOME=/usr/share/elasticsearch
+Environment=CONF_DIR=/etc/elasticsearch
+Environment=DATA_DIR=/var/lib/elasticsearch
+Environment=LOG_DIR=/var/log/elasticsearch
+Environment=PID_DIR=/var/run/elasticsearch
+EnvironmentFile=-/etc/sysconfig/elasticsearch-es_01
+
+WorkingDirectory=/usr/share/elasticsearch
+
+User=elasticsearch
+Group=elasticsearch
+
+ExecStartPre=/usr/share/elasticsearch/bin/elasticsearch-systemd-pre-exec
+
+ExecStart=/usr/share/elasticsearch/bin/elasticsearch \
+                                                -p ${PID_DIR}/elasticsearch-es_01.pid \
+                                                --quiet \
+                                                -Edefault.path.logs=${LOG_DIR} \
+                                                -Edefault.path.data=${DATA_DIR} \
+                                                -Edefault.path.conf=${CONF_DIR}
+
+# StandardOutput is configured to redirect to journalctl since
+# some error messages may be logged in standard output before
+# elasticsearch logging system is initialized. Elasticsearch
+# stores its logs in /var/log/elasticsearch and does not use
+# journalctl by default. If you also want to enable journalctl
+# logging, you can simply remove the "quiet" option from ExecStart.
+StandardOutput=journal
+StandardError=inherit
+
+# Specifies the maximum file descriptor number that can be opened by this process
+LimitNOFILE=65536
+
+# Specifies the maximum number of bytes of memory that may be locked into RAM
+# Set to "infinity" if you use the 'bootstrap.memory_lock: true' option
+# in elasticsearch.yml and 'MAX_LOCKED_MEMORY=unlimited' in /etc/sysconfig/elasticsearch
+#LimitMEMLOCK=infinity
+
+# Disable timeout logic and wait until process is stopped
+TimeoutStopSec=0
+
+# SIGTERM signal is used to stop the Java process
+KillSignal=SIGTERM
+
+# Java process is never killed
+SendSIGKILL=no
+
+# When a JVM receives a SIGTERM signal it exits with code 143
+SuccessExitStatus=143
+
+[Install]
+WantedBy=multi-user.target
+
+# Built for distribution-5.0.0 (distribution)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/sysconfig/files/elasticsearch/jvm.options	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,100 @@
+## JVM configuration
+
+################################################################
+## IMPORTANT: JVM heap size
+################################################################
+##
+## You should always set the min and max JVM heap
+## size to the same value. For example, to set
+## the heap to 4 GB, set:
+##
+## -Xms4g
+## -Xmx4g
+##
+## See https://www.elastic.co/guide/en/elasticsearch/reference/current/heap-size.html
+## for more information
+##
+################################################################
+
+# Xms represents the initial size of total heap space
+# Xmx represents the maximum size of total heap space
+
+-Xms512m
+-Xmx512m
+
+################################################################
+## Expert settings
+################################################################
+##
+## All settings below this section are considered
+## expert settings. Don't tamper with them unless
+## you understand what you are doing
+##
+################################################################
+
+## GC configuration
+-XX:+UseConcMarkSweepGC
+-XX:CMSInitiatingOccupancyFraction=75
+-XX:+UseCMSInitiatingOccupancyOnly
+
+## optimizations
+
+# disable calls to System#gc
+-XX:+DisableExplicitGC
+
+# pre-touch memory pages used by the JVM during initialization
+-XX:+AlwaysPreTouch
+
+## basic
+
+# force the server VM
+-server
+
+# set to headless, just in case
+-Djava.awt.headless=true
+
+# ensure UTF-8 encoding by default (e.g. filenames)
+-Dfile.encoding=UTF-8
+
+# use our provided JNA always versus the system one
+-Djna.nosys=true
+
+# flags to keep Netty from being unsafe
+-Dio.netty.noUnsafe=true
+-Dio.netty.noKeySetOptimization=true
+
+# log4j 2
+-Dlog4j.shutdownHookEnabled=false
+-Dlog4j2.disable.jmx=true
+-Dlog4j.skipJansi=true
+
+## heap dumps
+
+# generate a heap dump when an allocation from the Java heap fails
+# heap dumps are created in the working directory of the JVM
+-XX:+HeapDumpOnOutOfMemoryError
+
+# specify an alternative path for heap dumps
+# ensure the directory exists and has sufficient space
+#-XX:HeapDumpPath=${heap.dump.path}
+
+## GC logging
+
+#-XX:+PrintGCDetails
+#-XX:+PrintGCTimeStamps
+#-XX:+PrintGCDateStamps
+#-XX:+PrintClassHistogram
+#-XX:+PrintTenuringDistribution
+#-XX:+PrintGCApplicationStoppedTime
+
+# log GC status to a file with time stamps
+# ensure the directory exists
+#-Xloggc:${loggc}
+
+# Elasticsearch 5.0.0 will throw an exception on unquoted field names in JSON.
+# If documents were already indexed with unquoted fields in a previous version
+# of Elasticsearch, some operations may throw errors.
+#
+# WARNING: This option will be removed in Elasticsearch 6.0.0 and is provided
+# only for migration purposes.
+#-Delasticsearch.json.allow_unquoted_field_names=true
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/dev/provisioning/modules/sysconfig/files/elasticsearch/log4j2.properties	Wed Nov 09 15:05:41 2016 +0100
@@ -0,0 +1,74 @@
+status = error
+
+# log action execution errors for easier debugging
+logger.action.name = org.elasticsearch.action
+logger.action.level = debug
+
+appender.console.type = Console
+appender.console.name = console
+appender.console.layout.type = PatternLayout
+appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%m%n
+
+appender.rolling.type = RollingFile
+appender.rolling.name = rolling
+appender.rolling.fileName = ${sys:es.logs}.log
+appender.rolling.layout.type = PatternLayout
+appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%.10000m%n
+appender.rolling.filePattern = ${sys:es.logs}-%d{yyyy-MM-dd}.log
+appender.rolling.policies.type = Policies
+appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
+appender.rolling.policies.time.interval = 1
+appender.rolling.policies.time.modulate = true
+
+rootLogger.level = info
+rootLogger.appenderRef.console.ref = console
+rootLogger.appenderRef.rolling.ref = rolling
+
+appender.deprecation_rolling.type = RollingFile
+appender.deprecation_rolling.name = deprecation_rolling
+appender.deprecation_rolling.fileName = ${sys:es.logs}_deprecation.log
+appender.deprecation_rolling.layout.type = PatternLayout
+appender.deprecation_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%.10000m%n
+appender.deprecation_rolling.filePattern = ${sys:es.logs}_deprecation-%i.log.gz
+appender.deprecation_rolling.policies.type = Policies
+appender.deprecation_rolling.policies.size.type = SizeBasedTriggeringPolicy
+appender.deprecation_rolling.policies.size.size = 1GB
+appender.deprecation_rolling.strategy.type = DefaultRolloverStrategy
+appender.deprecation_rolling.strategy.max = 4
+
+logger.deprecation.name = org.elasticsearch.deprecation
+logger.deprecation.level = warn
+logger.deprecation.appenderRef.deprecation_rolling.ref = deprecation_rolling
+logger.deprecation.additivity = false
+
+appender.index_search_slowlog_rolling.type = RollingFile
+appender.index_search_slowlog_rolling.name = index_search_slowlog_rolling
+appender.index_search_slowlog_rolling.fileName = ${sys:es.logs}_index_search_slowlog.log
+appender.index_search_slowlog_rolling.layout.type = PatternLayout
+appender.index_search_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.10000m%n
+appender.index_search_slowlog_rolling.filePattern = ${sys:es.logs}_index_search_slowlog-%d{yyyy-MM-dd}.log
+appender.index_search_slowlog_rolling.policies.type = Policies
+appender.index_search_slowlog_rolling.policies.time.type = TimeBasedTriggeringPolicy
+appender.index_search_slowlog_rolling.policies.time.interval = 1
+appender.index_search_slowlog_rolling.policies.time.modulate = true
+
+logger.index_search_slowlog_rolling.name = index.search.slowlog
+logger.index_search_slowlog_rolling.level = trace
+logger.index_search_slowlog_rolling.appenderRef.index_search_slowlog_rolling.ref = index_search_slowlog_rolling
+logger.index_search_slowlog_rolling.additivity = false
+
+appender.index_indexing_slowlog_rolling.type = RollingFile
+appender.index_indexing_slowlog_rolling.name = index_indexing_slowlog_rolling
+appender.index_indexing_slowlog_rolling.fileName = ${sys:es.logs}_index_indexing_slowlog.log
+appender.index_indexing_slowlog_rolling.layout.type = PatternLayout
+appender.index_indexing_slowlog_rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %marker%.10000m%n
+appender.index_indexing_slowlog_rolling.filePattern = ${sys:es.logs}_index_indexing_slowlog-%d{yyyy-MM-dd}.log
+appender.index_indexing_slowlog_rolling.policies.type = Policies
+appender.index_indexing_slowlog_rolling.policies.time.type = TimeBasedTriggeringPolicy
+appender.index_indexing_slowlog_rolling.policies.time.interval = 1
+appender.index_indexing_slowlog_rolling.policies.time.modulate = true
+
+logger.index_indexing_slowlog.name = index.indexing.slowlog.index
+logger.index_indexing_slowlog.level = trace
+logger.index_indexing_slowlog.appenderRef.index_indexing_slowlog_rolling.ref = index_indexing_slowlog_rolling
+logger.index_indexing_slowlog.additivity = false
--- a/dev/provisioning/modules/sysconfig/manifests/elasticsearch.pp	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/sysconfig/manifests/elasticsearch.pp	Wed Nov 09 15:05:41 2016 +0100
@@ -10,7 +10,43 @@
     class { '::elasticsearch':
       manage_repo  => true,
       autoupgrade => true,
-      repo_version => '2.x',
+      repo_version => '5.x',
     }->
-    elasticsearch::instance { $es_instance: }
+    elasticsearch::instance { $es_instance: }->
+    file { '$es_instance jvm.options':
+        path => "/etc/elasticsearch/${es_instance}/jvm.options",
+        source => "puppet:///modules/sysconfig/elasticsearch/jvm.options",
+        owner => 'root',
+        group => 'elasticsearch',
+        mode => '0750'
+    } ->
+    file { '$es_instance log4j2.properties':
+        path => "/etc/elasticsearch/${es_instance}/log4j2.properties",
+        source => "puppet:///modules/sysconfig/elasticsearch/log4j2.properties",
+        owner => 'root',
+        group => 'elasticsearch',
+        mode => '0750'
+    } ->
+    file { '$es_instance logging.xml':
+        path => "/etc/elasticsearch/${es_instance}/logging.xml",
+        ensure => absent
+    } ->
+    file {  '$es_instance sysconfig':
+        path => "/etc/sysconfig/elasticsearch-${es_instance}",
+        source => "puppet:///modules/sysconfig/elasticsearch/elasticsearch-${es_instance}",
+        owner => 'root',
+        group => 'elasticsearch',
+        mode => '0750'
+
+    } ->
+    file {  '$es_instance service file':
+        path => "/usr/lib/systemd/system/elasticsearch-${es_instance}.service",
+        source => "puppet:///modules/sysconfig/elasticsearch/elasticsearch-${es_instance}.service",
+        owner => 'root',
+        group => 'root',
+        mode => '0750'
+
+    } ~>
+    Exec['systemctl-daemon-reload']
+
 }
--- a/dev/provisioning/modules/sysconfig/templates/corpus/corpus_env.conf.erb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/sysconfig/templates/corpus/corpus_env.conf.erb	Wed Nov 09 15:05:41 2016 +0100
@@ -2,6 +2,7 @@
 SetEnv APP_DEBUG false
 SetEnv APP_KEY <%= @corpus_app_key %>
 SetEnv APP_URL http://<%= @vhost %>/corpus-back/
+SetEnv APP_LOG daily
 
 SetEnv DATABASE_DRIVER mysql
 SetEnv DB_HOST <%= @db_host %>
--- a/dev/provisioning/modules/sysconfig/templates/corpus/local.env.erb	Tue Nov 08 18:23:01 2016 +0100
+++ b/dev/provisioning/modules/sysconfig/templates/corpus/local.env.erb	Wed Nov 09 15:05:41 2016 +0100
@@ -2,6 +2,7 @@
 APP_DEBUG=true
 APP_KEY=<%= @corpus_app_key %>
 APP_URL=http://<%= @vhost %>/corpus-back/
+APP_LOG="daily"
 
 DATABASE_DRIVER=mysql
 DB_HOST=<%= @db_host %>
--- a/server/bo_client/.gitignore	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/bo_client/.gitignore	Wed Nov 09 15:05:41 2016 +0100
@@ -13,5 +13,5 @@
 /connect.lock
 /coverage/*
 /libpeerconnection.log
-npm-debug.log
+npm-debug.log*
 testem.log
--- a/server/bo_client/app/app.js	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/bo_client/app/app.js	Wed Nov 09 15:05:41 2016 +0100
@@ -3,9 +3,11 @@
 import loadInitializers from 'ember-load-initializers';
 import config from './config/environment';
 
+let App;
+
 Ember.MODEL_FACTORY_INJECTIONS = true;
 
-const App = Ember.Application.extend({
+App = Ember.Application.extend({
   modulePrefix: config.modulePrefix,
   podModulePrefix: config.podModulePrefix,
   Resolver,
--- a/server/bo_client/bower.json	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/bo_client/bower.json	Wed Nov 09 15:05:41 2016 +0100
@@ -1,12 +1,9 @@
 {
   "name": "bo-client",
   "dependencies": {
-    "ember": "~2.8.0-beta.1",
-    "ember-cli-shims": "0.1.1",
-    "ember-qunit-notifications": "0.1.0",
+    "ember": "~2.9.0",
+    "ember-cli-shims": "0.1.3",
     "jquery": "^2.2",
-    "loader.js": "^4.0.10",
-    "qunit": "~1.20.0",
     "bootstrap-sass": "bootstrap-sass-official#~3.3.7",
     "font-awesome": "~4.6.3",
     "typeahead.js": "~0.11.1",
--- a/server/bo_client/package.json	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/bo_client/package.json	Wed Nov 09 15:05:41 2016 +0100
@@ -16,34 +16,34 @@
   },
   "repository": "",
   "engines": {
-    "node": ">= 0.10.0"
+    "node": ">= 0.12.0"
   },
   "author": "",
   "license": "MIT",
   "devDependencies": {
-    "bower": "^1.7.9",
-    "broccoli-asset-rev": "^2.4.6",
+    "bower": "^1.8.0",
+    "broccoli-asset-rev": "^2.4.5",
     "broccoli-funnel": "^1.0.6",
     "broccoli-merge-trees": "^1.1.4",
     "broccoli-static-compiler": "^0.2.2",
     "corpus-common-addon": "file:../../common/corpus-common-addon",
-    "ember-ajax": "^2.0.1",
+    "ember-ajax": "^2.4.1",
     "ember-aupac-typeahead": "IRI-Research/ember-aupac-typeahead#ember_2.8",
-    "ember-cli": "^2.8.0",
-    "ember-cli-app-version": "^1.0.0",
-    "ember-cli-babel": "^5.1.6",
-    "ember-cli-dependency-checker": "^1.2.0",
-    "ember-cli-htmlbars": "^1.0.3",
-    "ember-cli-htmlbars-inline-precompile": "^0.3.1",
-    "ember-cli-inject-live-reload": "^1.4.0",
-    "ember-cli-jshint": "^1.0.0",
-    "ember-cli-qunit": "^2.1.0",
+    "ember-cli": "2.9.1",
+    "ember-cli-app-version": "^2.0.0",
+    "ember-cli-babel": "^5.1.7",
+    "ember-cli-dependency-checker": "^1.3.0",
+    "ember-cli-htmlbars": "^1.0.10",
+    "ember-cli-htmlbars-inline-precompile": "^0.3.3",
+    "ember-cli-inject-live-reload": "^1.4.1",
+    "ember-cli-jshint": "^1.0.4",
+    "ember-cli-qunit": "^3.0.1",
     "ember-cli-release": "^0.2.9",
     "ember-cli-sass": "5.5.0",
     "ember-cli-sri": "^2.1.0",
     "ember-cli-test-loader": "^1.1.0",
     "ember-cli-uglify": "^1.2.0",
-    "ember-data": "^2.8.0-beta.1",
+    "ember-data": "^2.9.0",
     "ember-disable-proxy-controllers": "^1.0.1",
     "ember-export-application-global": "^1.0.5",
     "ember-font-awesome": "martndemus/ember-font-awesome#pull/91/head",
@@ -59,7 +59,7 @@
     "express": "^4.14.0",
     "glob": "^7.0.3",
     "http-proxy": "^1.13.2",
-    "loader.js": "^4.0.1",
+    "loader.js": "^4.0.10",
     "lodash": "^4.7.0",
     "morgan": "^1.7.0",
     "npm-check-updates": "^2.6.1",
--- a/server/bo_client/tests/index.html	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/bo_client/tests/index.html	Wed Nov 09 15:05:41 2016 +0100
@@ -21,7 +21,7 @@
     {{content-for "body"}}
     {{content-for "test-body"}}
 
-    <script src="{{rootURL}}testem.js" integrity=""></script>
+    <script src="/testem.js" integrity=""></script>
     <script src="{{rootURL}}assets/vendor.js"></script>
     <script src="{{rootURL}}assets/test-support.js"></script>
     <script src="{{rootURL}}assets/bo-client.js"></script>
--- a/server/src/.env.example	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/src/.env.example	Wed Nov 09 15:05:41 2016 +0100
@@ -2,6 +2,8 @@
 APP_DEBUG=true
 APP_KEY=SomeRandomString
 APP_URL=http://corpus-parole.local
+APP_LOG="daily"
+# Available Settings: "single", "daily", "syslog", "errorlog"
 
 DATABASE_DRIVER=pgsql
 DB_HOST=localhost
--- a/server/src/app/Console/Commands/IndexDocuments.php	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/src/app/Console/Commands/IndexDocuments.php	Wed Nov 09 15:05:41 2016 +0100
@@ -74,7 +74,7 @@
     private function resetIndex()
     {
         $indexParams = [
-            'index' => env('ELASTICSEARCH_INDEX')
+            'index' => config('elasticsearch.index')
         ];
         if(Es::indices()->exists($indexParams)){
             $response = Es::indices()->delete($indexParams);
@@ -600,7 +600,7 @@
             $this->comment('Index reset!');
         }
         else{
-            $this->error('Error resetting index ' . env('ELASTICSEARCH_INDEX'));
+            $this->error('Error resetting index ' . config('elasticsearch.index'));
         }
 
         $this->info('Indexing documents...');
--- a/server/src/app/Console/Commands/ManageHandles.php	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/src/app/Console/Commands/ManageHandles.php	Wed Nov 09 15:05:41 2016 +0100
@@ -47,7 +47,7 @@
     }
 
     private function registerHandle($doc) {
-        $this->handleClient->createHandleUrlRecord($doc->getId(), env('APP_URL')."/docs/".$doc->getId());
+        $this->handleClient->createHandleUrlRecord($doc->getId(), config('app.url')."/docs/".$doc->getId());
     }
 
     /**
--- a/server/src/app/Http/Controllers/Api/DateStatsController.php	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/src/app/Http/Controllers/Api/DateStatsController.php	Wed Nov 09 15:05:41 2016 +0100
@@ -29,7 +29,7 @@
         $query = $filterManager->buildQuery($qFilterParts);
 
         $esQuery = [
-            'index' => env('ELASTICSEARCH_INDEX'),
+            'index' => config('elasticsearch.index'),
             'body' => [
                 "size" => 0,
                 "query" => $query,
--- a/server/src/app/Http/Controllers/Api/DiscourseController.php	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/src/app/Http/Controllers/Api/DiscourseController.php	Wed Nov 09 15:05:41 2016 +0100
@@ -30,7 +30,7 @@
 
 
         $esQuery = [
-            'index' => env('ELASTICSEARCH_INDEX'),
+            'index' => config('elasticsearch.index'),
             'body' => [
                 "size" => 0,
                 "query" => $query,
--- a/server/src/app/Http/Controllers/Api/GeoStatsController.php	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/src/app/Http/Controllers/Api/GeoStatsController.php	Wed Nov 09 15:05:41 2016 +0100
@@ -40,7 +40,7 @@
         $query = $filterManager->buildQuery($qFilterParts);
 
         $queryES = [
-            'index' => env('ELASTICSEARCH_INDEX'),
+            'index' => config('elasticsearch.index'),
             'body' => [
                 "size" => 0,
                 "query" => $query,
--- a/server/src/app/Http/Controllers/Api/LanguageController.php	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/src/app/Http/Controllers/Api/LanguageController.php	Wed Nov 09 15:05:41 2016 +0100
@@ -25,7 +25,7 @@
         $query = $filterManager->buildQuery($qFilterParts);
 
         $esQuery = [
-            'index' => env('ELASTICSEARCH_INDEX'),
+            'index' => config('elasticsearch.index'),
             'body' => [
                 "size" => 0,
                 "query" => $query,
--- a/server/src/app/Http/Controllers/Api/ThemeController.php	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/src/app/Http/Controllers/Api/ThemeController.php	Wed Nov 09 15:05:41 2016 +0100
@@ -79,7 +79,7 @@
 
 
         $esQuery = [
-            'index' => env('ELASTICSEARCH_INDEX'),
+            'index' => config('elasticsearch.index'),
             'body' => [
                 'size' => 0,
                 'query' => $query,
--- a/server/src/composer.json	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/src/composer.json	Wed Nov 09 15:05:41 2016 +0100
@@ -8,6 +8,10 @@
       {
         "type": "vcs",
         "url": "https://github.com/phpseclib/phpseclib"
+      },
+      {
+        "type": "vcs",
+        "url": "https://github.com/IRI-Research/laravel-elasticsearch"
       }
     ],
     "require": {
@@ -18,7 +22,7 @@
         "caseyamcl/phpoaipmh": "~2.5.1",
         "guzzlehttp/guzzle":   ">=6.2",
         "laravelcollective/html": "5.3.*",
-        "shift31/laravel-elasticsearch": "~2.0",
+        "shift31/laravel-elasticsearch": "dev-master",
         "asm89/stack-cors": "~1.0",
         "barryvdh/laravel-cors": "^0.8.2",
         "phpseclib/phpseclib": "^2.0"
--- a/server/src/composer.lock	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/src/composer.lock	Wed Nov 09 15:05:41 2016 +0100
@@ -4,8 +4,8 @@
         "Read more about it at https://getcomposer.org/doc/01-basic-usage.md#composer-lock-the-lock-file",
         "This file is @generated automatically"
     ],
-    "hash": "467ec25509e20e90b276dcb351305854",
-    "content-hash": "29590f4eb7835ca58810297492ba993a",
+    "hash": "554fa49e9a9e82f8b48aead49cc49173",
+    "content-hash": "e9c464a2b6fcc93a4a3fdc6b09494ac9",
     "packages": [
         {
             "name": "asm89/stack-cors",
@@ -385,30 +385,31 @@
         },
         {
             "name": "elasticsearch/elasticsearch",
-            "version": "v2.2.1",
+            "version": "v5.0.0",
             "source": {
                 "type": "git",
                 "url": "https://github.com/elastic/elasticsearch-php.git",
-                "reference": "7b34186a58730d0a8963741bd62fa5ab45658ada"
+                "reference": "30be1c0367c865eec5ca0a9c09ce293869da7665"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/elastic/elasticsearch-php/zipball/7b34186a58730d0a8963741bd62fa5ab45658ada",
-                "reference": "7b34186a58730d0a8963741bd62fa5ab45658ada",
+                "url": "https://api.github.com/repos/elastic/elasticsearch-php/zipball/30be1c0367c865eec5ca0a9c09ce293869da7665",
+                "reference": "30be1c0367c865eec5ca0a9c09ce293869da7665",
                 "shasum": ""
             },
             "require": {
                 "guzzlehttp/ringphp": "~1.0",
-                "php": ">=5.4",
+                "php": "^5.6.6|^7.0",
                 "psr/log": "~1.0"
             },
             "require-dev": {
                 "cpliakas/git-wrapper": "~1.0",
+                "doctrine/inflector": "^1.1",
                 "mockery/mockery": "0.9.4",
-                "phpunit/phpunit": "~4.7",
+                "phpunit/phpunit": "^4.7|^5.4",
                 "sami/sami": "~3.2",
-                "symfony/yaml": "2.4.3 as 2.4.2",
-                "twig/twig": "1.*"
+                "symfony/finder": "^2.8",
+                "symfony/yaml": "^2.8"
             },
             "suggest": {
                 "ext-curl": "*",
@@ -435,20 +436,20 @@
                 "elasticsearch",
                 "search"
             ],
-            "time": "2016-07-14 14:13:40"
+            "time": "2016-10-26 14:35:32"
         },
         {
             "name": "guzzlehttp/guzzle",
-            "version": "6.2.1",
+            "version": "6.2.2",
             "source": {
                 "type": "git",
                 "url": "https://github.com/guzzle/guzzle.git",
-                "reference": "3f808fba627f2c5b69e2501217bf31af349c1427"
+                "reference": "ebf29dee597f02f09f4d5bbecc68230ea9b08f60"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/guzzle/guzzle/zipball/3f808fba627f2c5b69e2501217bf31af349c1427",
-                "reference": "3f808fba627f2c5b69e2501217bf31af349c1427",
+                "url": "https://api.github.com/repos/guzzle/guzzle/zipball/ebf29dee597f02f09f4d5bbecc68230ea9b08f60",
+                "reference": "ebf29dee597f02f09f4d5bbecc68230ea9b08f60",
                 "shasum": ""
             },
             "require": {
@@ -497,7 +498,7 @@
                 "rest",
                 "web service"
             ],
-            "time": "2016-07-15 17:22:37"
+            "time": "2016-10-08 15:01:37"
         },
         {
             "name": "guzzlehttp/promises",
@@ -856,16 +857,16 @@
         },
         {
             "name": "laravel/framework",
-            "version": "v5.3.10",
+            "version": "v5.3.22",
             "source": {
                 "type": "git",
                 "url": "https://github.com/laravel/framework.git",
-                "reference": "6febb0ee61999cde3bc7b2963c8903032bb22691"
+                "reference": "715328dd4fb1a7bcf3cb41b3472f9e3499d068fc"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/laravel/framework/zipball/6febb0ee61999cde3bc7b2963c8903032bb22691",
-                "reference": "6febb0ee61999cde3bc7b2963c8903032bb22691",
+                "url": "https://api.github.com/repos/laravel/framework/zipball/715328dd4fb1a7bcf3cb41b3472f9e3499d068fc",
+                "reference": "715328dd4fb1a7bcf3cb41b3472f9e3499d068fc",
                 "shasum": ""
             },
             "require": {
@@ -947,7 +948,7 @@
                 "pusher/pusher-php-server": "Required to use the Pusher broadcast driver (~2.0).",
                 "symfony/css-selector": "Required to use some of the crawler integration testing tools (3.1.*).",
                 "symfony/dom-crawler": "Required to use most of the crawler integration testing tools (3.1.*).",
-                "symfony/psr-http-message-bridge": "Required to psr7 bridging features (0.2.*)."
+                "symfony/psr-http-message-bridge": "Required to use psr7 bridging features (0.2.*)."
             },
             "type": "library",
             "extra": {
@@ -980,7 +981,7 @@
                 "framework",
                 "laravel"
             ],
-            "time": "2016-09-20 13:46:16"
+            "time": "2016-11-01 18:52:00"
         },
         {
             "name": "laravelcollective/html",
@@ -1038,20 +1039,20 @@
         },
         {
             "name": "league/flysystem",
-            "version": "1.0.27",
+            "version": "1.0.32",
             "source": {
                 "type": "git",
                 "url": "https://github.com/thephpleague/flysystem.git",
-                "reference": "50e2045ed70a7e75a5e30bc3662904f3b67af8a9"
+                "reference": "1b5c4a0031697f46e779a9d1b309c2e1b24daeab"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/thephpleague/flysystem/zipball/50e2045ed70a7e75a5e30bc3662904f3b67af8a9",
-                "reference": "50e2045ed70a7e75a5e30bc3662904f3b67af8a9",
+                "url": "https://api.github.com/repos/thephpleague/flysystem/zipball/1b5c4a0031697f46e779a9d1b309c2e1b24daeab",
+                "reference": "1b5c4a0031697f46e779a9d1b309c2e1b24daeab",
                 "shasum": ""
             },
             "require": {
-                "php": ">=5.4.0"
+                "php": ">=5.5.9"
             },
             "conflict": {
                 "league/flysystem-sftp": "<1.0.6"
@@ -1117,7 +1118,7 @@
                 "sftp",
                 "storage"
             ],
-            "time": "2016-08-10 08:55:11"
+            "time": "2016-10-19 20:38:46"
         },
         {
             "name": "ml/iri",
@@ -1168,17 +1169,17 @@
         },
         {
             "name": "ml/json-ld",
-            "version": "1.0.5",
+            "version": "1.0.7",
             "target-dir": "ML/JsonLD",
             "source": {
                 "type": "git",
                 "url": "https://github.com/lanthaler/JsonLD.git",
-                "reference": "2f7f00a9daed844289135cc1cc99a75fc72a5438"
+                "reference": "f9dfe184f0da9ce0e0ffdddabf6d93d01787ac91"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/lanthaler/JsonLD/zipball/2f7f00a9daed844289135cc1cc99a75fc72a5438",
-                "reference": "2f7f00a9daed844289135cc1cc99a75fc72a5438",
+                "url": "https://api.github.com/repos/lanthaler/JsonLD/zipball/f9dfe184f0da9ce0e0ffdddabf6d93d01787ac91",
+                "reference": "f9dfe184f0da9ce0e0ffdddabf6d93d01787ac91",
                 "shasum": ""
             },
             "require": {
@@ -1213,7 +1214,7 @@
                 "JSON-LD",
                 "jsonld"
             ],
-            "time": "2016-01-17 17:39:22"
+            "time": "2016-10-10 08:57:56"
         },
         {
             "name": "monolog/monolog",
@@ -1437,16 +1438,16 @@
         },
         {
             "name": "paragonie/random_compat",
-            "version": "v2.0.2",
+            "version": "v2.0.4",
             "source": {
                 "type": "git",
                 "url": "https://github.com/paragonie/random_compat.git",
-                "reference": "088c04e2f261c33bed6ca5245491cfca69195ccf"
+                "reference": "a9b97968bcde1c4de2a5ec6cbd06a0f6c919b46e"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/paragonie/random_compat/zipball/088c04e2f261c33bed6ca5245491cfca69195ccf",
-                "reference": "088c04e2f261c33bed6ca5245491cfca69195ccf",
+                "url": "https://api.github.com/repos/paragonie/random_compat/zipball/a9b97968bcde1c4de2a5ec6cbd06a0f6c919b46e",
+                "reference": "a9b97968bcde1c4de2a5ec6cbd06a0f6c919b46e",
                 "shasum": ""
             },
             "require": {
@@ -1481,20 +1482,20 @@
                 "pseudorandom",
                 "random"
             ],
-            "time": "2016-04-03 06:00:07"
+            "time": "2016-11-07 23:38:38"
         },
         {
             "name": "phpseclib/phpseclib",
-            "version": "2.0.3",
+            "version": "2.0.4",
             "source": {
                 "type": "git",
                 "url": "https://github.com/phpseclib/phpseclib.git",
-                "reference": "41f85e9c2582b3f6d1b7d20395fb40c687ad5370"
+                "reference": "ab8028c93c03cc8d9c824efa75dc94f1db2369bf"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/phpseclib/phpseclib/zipball/41f85e9c2582b3f6d1b7d20395fb40c687ad5370",
-                "reference": "41f85e9c2582b3f6d1b7d20395fb40c687ad5370",
+                "url": "https://api.github.com/repos/phpseclib/phpseclib/zipball/ab8028c93c03cc8d9c824efa75dc94f1db2369bf",
+                "reference": "ab8028c93c03cc8d9c824efa75dc94f1db2369bf",
                 "shasum": ""
             },
             "require": {
@@ -1573,10 +1574,10 @@
                 "x509"
             ],
             "support": {
-                "source": "https://github.com/phpseclib/phpseclib/tree/2.0.3",
+                "source": "https://github.com/phpseclib/phpseclib/tree/2.0.4",
                 "issues": "https://github.com/phpseclib/phpseclib/issues"
             },
-            "time": "2016-08-18 18:49:14"
+            "time": "2016-10-04 00:57:04"
         },
         {
             "name": "psr/http-message",
@@ -1630,16 +1631,16 @@
         },
         {
             "name": "psr/log",
-            "version": "1.0.1",
+            "version": "1.0.2",
             "source": {
                 "type": "git",
                 "url": "https://github.com/php-fig/log.git",
-                "reference": "5277094ed527a1c4477177d102fe4c53551953e0"
+                "reference": "4ebe3a8bf773a19edfe0a84b6585ba3d401b724d"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/php-fig/log/zipball/5277094ed527a1c4477177d102fe4c53551953e0",
-                "reference": "5277094ed527a1c4477177d102fe4c53551953e0",
+                "url": "https://api.github.com/repos/php-fig/log/zipball/4ebe3a8bf773a19edfe0a84b6585ba3d401b724d",
+                "reference": "4ebe3a8bf773a19edfe0a84b6585ba3d401b724d",
                 "shasum": ""
             },
             "require": {
@@ -1673,7 +1674,7 @@
                 "psr",
                 "psr-3"
             ],
-            "time": "2016-09-19 16:02:08"
+            "time": "2016-10-10 12:19:37"
         },
         {
             "name": "psy/psysh",
@@ -1749,16 +1750,16 @@
         },
         {
             "name": "ramsey/uuid",
-            "version": "3.5.0",
+            "version": "3.5.1",
             "source": {
                 "type": "git",
                 "url": "https://github.com/ramsey/uuid.git",
-                "reference": "a6d15c8618ea3951fd54d34e326b68d3d0bc0786"
+                "reference": "a07797b986671b0dc823885a81d5e3516b931599"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/ramsey/uuid/zipball/a6d15c8618ea3951fd54d34e326b68d3d0bc0786",
-                "reference": "a6d15c8618ea3951fd54d34e326b68d3d0bc0786",
+                "url": "https://api.github.com/repos/ramsey/uuid/zipball/a07797b986671b0dc823885a81d5e3516b931599",
+                "reference": "a07797b986671b0dc823885a81d5e3516b931599",
                 "shasum": ""
             },
             "require": {
@@ -1825,7 +1826,7 @@
                 "identifier",
                 "uuid"
             ],
-            "time": "2016-08-02 18:39:32"
+            "time": "2016-10-02 15:51:17"
         },
         {
             "name": "react/promise",
@@ -1873,20 +1874,20 @@
         },
         {
             "name": "shift31/laravel-elasticsearch",
-            "version": "2.0.3",
+            "version": "dev-master",
             "source": {
                 "type": "git",
-                "url": "https://github.com/shift31/laravel-elasticsearch.git",
-                "reference": "d3488ce48663db83bce4ae82ff6266d3e0255a0d"
+                "url": "https://github.com/IRI-Research/laravel-elasticsearch.git",
+                "reference": "64d0525987a2a465ca6f069f71a9490ede68652e"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/shift31/laravel-elasticsearch/zipball/d3488ce48663db83bce4ae82ff6266d3e0255a0d",
-                "reference": "d3488ce48663db83bce4ae82ff6266d3e0255a0d",
+                "url": "https://api.github.com/repos/IRI-Research/laravel-elasticsearch/zipball/64d0525987a2a465ca6f069f71a9490ede68652e",
+                "reference": "64d0525987a2a465ca6f069f71a9490ede68652e",
                 "shasum": ""
             },
             "require": {
-                "elasticsearch/elasticsearch": "~2.0",
+                "elasticsearch/elasticsearch": "~5.0",
                 "illuminate/support": "~4|~5",
                 "php": ">=5.4.0"
             },
@@ -1902,7 +1903,6 @@
                     "Shift31\\LaravelElasticsearch": "src/"
                 }
             },
-            "notification-url": "https://packagist.org/downloads/",
             "authors": [
                 {
                     "name": "Shift 31 Consulting",
@@ -1910,7 +1910,10 @@
                 }
             ],
             "description": "A Laravel Service Provider for the Elasticsearch API client",
-            "time": "2016-08-14 19:53:20"
+            "support": {
+                "source": "https://github.com/IRI-Research/laravel-elasticsearch/tree/master"
+            },
+            "time": "2016-11-08 22:22:17"
         },
         {
             "name": "swiftmailer/swiftmailer",
@@ -1967,20 +1970,21 @@
         },
         {
             "name": "symfony/console",
-            "version": "v3.1.4",
+            "version": "v3.1.6",
             "source": {
                 "type": "git",
                 "url": "https://github.com/symfony/console.git",
-                "reference": "8ea494c34f0f772c3954b5fbe00bffc5a435e563"
+                "reference": "c99da1119ae61e15de0e4829196b9fba6f73d065"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/symfony/console/zipball/8ea494c34f0f772c3954b5fbe00bffc5a435e563",
-                "reference": "8ea494c34f0f772c3954b5fbe00bffc5a435e563",
+                "url": "https://api.github.com/repos/symfony/console/zipball/c99da1119ae61e15de0e4829196b9fba6f73d065",
+                "reference": "c99da1119ae61e15de0e4829196b9fba6f73d065",
                 "shasum": ""
             },
             "require": {
                 "php": ">=5.5.9",
+                "symfony/debug": "~2.8|~3.0",
                 "symfony/polyfill-mbstring": "~1.0"
             },
             "require-dev": {
@@ -2023,20 +2027,20 @@
             ],
             "description": "Symfony Console Component",
             "homepage": "https://symfony.com",
-            "time": "2016-08-19 06:48:39"
+            "time": "2016-10-06 01:44:51"
         },
         {
             "name": "symfony/debug",
-            "version": "v3.1.4",
+            "version": "v3.1.6",
             "source": {
                 "type": "git",
                 "url": "https://github.com/symfony/debug.git",
-                "reference": "34f6ac18c2974ca5fce68adf419ee7d15def6f11"
+                "reference": "e2b3f74a67fc928adc3c1b9027f73e1bc01190a8"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/symfony/debug/zipball/34f6ac18c2974ca5fce68adf419ee7d15def6f11",
-                "reference": "34f6ac18c2974ca5fce68adf419ee7d15def6f11",
+                "url": "https://api.github.com/repos/symfony/debug/zipball/e2b3f74a67fc928adc3c1b9027f73e1bc01190a8",
+                "reference": "e2b3f74a67fc928adc3c1b9027f73e1bc01190a8",
                 "shasum": ""
             },
             "require": {
@@ -2080,20 +2084,20 @@
             ],
             "description": "Symfony Debug Component",
             "homepage": "https://symfony.com",
-            "time": "2016-08-23 13:39:15"
+            "time": "2016-09-06 11:02:40"
         },
         {
             "name": "symfony/event-dispatcher",
-            "version": "v3.1.4",
+            "version": "v3.1.6",
             "source": {
                 "type": "git",
                 "url": "https://github.com/symfony/event-dispatcher.git",
-                "reference": "c0c00c80b3a69132c4e55c3e7db32b4a387615e5"
+                "reference": "28b0832b2553ffb80cabef6a7a812ff1e670c0bc"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/symfony/event-dispatcher/zipball/c0c00c80b3a69132c4e55c3e7db32b4a387615e5",
-                "reference": "c0c00c80b3a69132c4e55c3e7db32b4a387615e5",
+                "url": "https://api.github.com/repos/symfony/event-dispatcher/zipball/28b0832b2553ffb80cabef6a7a812ff1e670c0bc",
+                "reference": "28b0832b2553ffb80cabef6a7a812ff1e670c0bc",
                 "shasum": ""
             },
             "require": {
@@ -2140,20 +2144,20 @@
             ],
             "description": "Symfony EventDispatcher Component",
             "homepage": "https://symfony.com",
-            "time": "2016-07-19 10:45:57"
+            "time": "2016-10-13 06:28:43"
         },
         {
             "name": "symfony/finder",
-            "version": "v3.1.4",
+            "version": "v3.1.6",
             "source": {
                 "type": "git",
                 "url": "https://github.com/symfony/finder.git",
-                "reference": "e568ef1784f447a0e54dcb6f6de30b9747b0f577"
+                "reference": "205b5ffbb518a98ba2ae60a52656c4a31ab00c6f"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/symfony/finder/zipball/e568ef1784f447a0e54dcb6f6de30b9747b0f577",
-                "reference": "e568ef1784f447a0e54dcb6f6de30b9747b0f577",
+                "url": "https://api.github.com/repos/symfony/finder/zipball/205b5ffbb518a98ba2ae60a52656c4a31ab00c6f",
+                "reference": "205b5ffbb518a98ba2ae60a52656c4a31ab00c6f",
                 "shasum": ""
             },
             "require": {
@@ -2189,20 +2193,20 @@
             ],
             "description": "Symfony Finder Component",
             "homepage": "https://symfony.com",
-            "time": "2016-08-26 12:04:02"
+            "time": "2016-09-28 00:11:12"
         },
         {
             "name": "symfony/http-foundation",
-            "version": "v3.1.4",
+            "version": "v3.1.6",
             "source": {
                 "type": "git",
                 "url": "https://github.com/symfony/http-foundation.git",
-                "reference": "63592e00fd90632b57ee50220a1ddb29b6bf3bb4"
+                "reference": "f21e5a8b88274b7720779aa88f9c02c6d6ec08d7"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/symfony/http-foundation/zipball/63592e00fd90632b57ee50220a1ddb29b6bf3bb4",
-                "reference": "63592e00fd90632b57ee50220a1ddb29b6bf3bb4",
+                "url": "https://api.github.com/repos/symfony/http-foundation/zipball/f21e5a8b88274b7720779aa88f9c02c6d6ec08d7",
+                "reference": "f21e5a8b88274b7720779aa88f9c02c6d6ec08d7",
                 "shasum": ""
             },
             "require": {
@@ -2242,20 +2246,20 @@
             ],
             "description": "Symfony HttpFoundation Component",
             "homepage": "https://symfony.com",
-            "time": "2016-08-22 12:11:19"
+            "time": "2016-10-24 15:52:44"
         },
         {
             "name": "symfony/http-kernel",
-            "version": "v3.1.4",
+            "version": "v3.1.6",
             "source": {
                 "type": "git",
                 "url": "https://github.com/symfony/http-kernel.git",
-                "reference": "aeda215d6b01f119508c090d2a09ebb5b0bc61f3"
+                "reference": "c235f1b13ba67012e283996a5427f22e2e04be14"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/symfony/http-kernel/zipball/aeda215d6b01f119508c090d2a09ebb5b0bc61f3",
-                "reference": "aeda215d6b01f119508c090d2a09ebb5b0bc61f3",
+                "url": "https://api.github.com/repos/symfony/http-kernel/zipball/c235f1b13ba67012e283996a5427f22e2e04be14",
+                "reference": "c235f1b13ba67012e283996a5427f22e2e04be14",
                 "shasum": ""
             },
             "require": {
@@ -2263,7 +2267,7 @@
                 "psr/log": "~1.0",
                 "symfony/debug": "~2.8|~3.0",
                 "symfony/event-dispatcher": "~2.8|~3.0",
-                "symfony/http-foundation": "~2.8.8|~3.0.8|~3.1.2|~3.2"
+                "symfony/http-foundation": "~2.8.13|~3.1.6|~3.2"
             },
             "conflict": {
                 "symfony/config": "<2.8"
@@ -2324,7 +2328,7 @@
             ],
             "description": "Symfony HttpKernel Component",
             "homepage": "https://symfony.com",
-            "time": "2016-09-03 15:28:24"
+            "time": "2016-10-27 02:38:31"
         },
         {
             "name": "symfony/polyfill-mbstring",
@@ -2495,16 +2499,16 @@
         },
         {
             "name": "symfony/process",
-            "version": "v3.1.4",
+            "version": "v3.1.6",
             "source": {
                 "type": "git",
                 "url": "https://github.com/symfony/process.git",
-                "reference": "e64e93041c80e77197ace5ab9385dedb5a143697"
+                "reference": "66de154ae86b1a07001da9fbffd620206e4faf94"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/symfony/process/zipball/e64e93041c80e77197ace5ab9385dedb5a143697",
-                "reference": "e64e93041c80e77197ace5ab9385dedb5a143697",
+                "url": "https://api.github.com/repos/symfony/process/zipball/66de154ae86b1a07001da9fbffd620206e4faf94",
+                "reference": "66de154ae86b1a07001da9fbffd620206e4faf94",
                 "shasum": ""
             },
             "require": {
@@ -2540,11 +2544,11 @@
             ],
             "description": "Symfony Process Component",
             "homepage": "https://symfony.com",
-            "time": "2016-08-16 14:58:24"
+            "time": "2016-09-29 14:13:09"
         },
         {
             "name": "symfony/routing",
-            "version": "v3.1.4",
+            "version": "v3.1.6",
             "source": {
                 "type": "git",
                 "url": "https://github.com/symfony/routing.git",
@@ -2619,16 +2623,16 @@
         },
         {
             "name": "symfony/translation",
-            "version": "v3.1.4",
+            "version": "v3.1.6",
             "source": {
                 "type": "git",
                 "url": "https://github.com/symfony/translation.git",
-                "reference": "a35edc277513c9bc0f063ca174c36b346f974528"
+                "reference": "ff1285087397d2f64041b35e591f3025881c90cd"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/symfony/translation/zipball/a35edc277513c9bc0f063ca174c36b346f974528",
-                "reference": "a35edc277513c9bc0f063ca174c36b346f974528",
+                "url": "https://api.github.com/repos/symfony/translation/zipball/ff1285087397d2f64041b35e591f3025881c90cd",
+                "reference": "ff1285087397d2f64041b35e591f3025881c90cd",
                 "shasum": ""
             },
             "require": {
@@ -2679,20 +2683,20 @@
             ],
             "description": "Symfony Translation Component",
             "homepage": "https://symfony.com",
-            "time": "2016-08-05 08:37:39"
+            "time": "2016-10-18 04:30:12"
         },
         {
             "name": "symfony/var-dumper",
-            "version": "v3.1.4",
+            "version": "v3.1.6",
             "source": {
                 "type": "git",
                 "url": "https://github.com/symfony/var-dumper.git",
-                "reference": "62ee73706c421654a4c840028954510277f7dfc8"
+                "reference": "4dc2f03b480c43f1665d3317d827a04ed6ffd11e"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/symfony/var-dumper/zipball/62ee73706c421654a4c840028954510277f7dfc8",
-                "reference": "62ee73706c421654a4c840028954510277f7dfc8",
+                "url": "https://api.github.com/repos/symfony/var-dumper/zipball/4dc2f03b480c43f1665d3317d827a04ed6ffd11e",
+                "reference": "4dc2f03b480c43f1665d3317d827a04ed6ffd11e",
                 "shasum": ""
             },
             "require": {
@@ -2742,7 +2746,7 @@
                 "debug",
                 "dump"
             ],
-            "time": "2016-08-31 09:05:42"
+            "time": "2016-10-18 15:46:07"
         },
         {
             "name": "vlucas/phpdotenv",
@@ -2852,16 +2856,16 @@
         },
         {
             "name": "friendsofphp/php-cs-fixer",
-            "version": "v1.12.1",
+            "version": "v1.12.3",
             "source": {
                 "type": "git",
                 "url": "https://github.com/FriendsOfPHP/PHP-CS-Fixer.git",
-                "reference": "d33ee60f3d3e6152888b7f3a385f49e5c43bf1bf"
+                "reference": "78a820c16d13f593303511461eefa939502fb2de"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/FriendsOfPHP/PHP-CS-Fixer/zipball/d33ee60f3d3e6152888b7f3a385f49e5c43bf1bf",
-                "reference": "d33ee60f3d3e6152888b7f3a385f49e5c43bf1bf",
+                "url": "https://api.github.com/repos/FriendsOfPHP/PHP-CS-Fixer/zipball/78a820c16d13f593303511461eefa939502fb2de",
+                "reference": "78a820c16d13f593303511461eefa939502fb2de",
                 "shasum": ""
             },
             "require": {
@@ -2906,7 +2910,7 @@
                 }
             ],
             "description": "A tool to automatically fix PHP code style",
-            "time": "2016-09-07 06:48:24"
+            "time": "2016-10-30 12:07:10"
         },
         {
             "name": "fzaninotto/faker",
@@ -3068,16 +3072,16 @@
         },
         {
             "name": "myclabs/deep-copy",
-            "version": "1.5.4",
+            "version": "1.5.5",
             "source": {
                 "type": "git",
                 "url": "https://github.com/myclabs/DeepCopy.git",
-                "reference": "ea74994a3dc7f8d2f65a06009348f2d63c81e61f"
+                "reference": "399c1f9781e222f6eb6cc238796f5200d1b7f108"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/myclabs/DeepCopy/zipball/ea74994a3dc7f8d2f65a06009348f2d63c81e61f",
-                "reference": "ea74994a3dc7f8d2f65a06009348f2d63c81e61f",
+                "url": "https://api.github.com/repos/myclabs/DeepCopy/zipball/399c1f9781e222f6eb6cc238796f5200d1b7f108",
+                "reference": "399c1f9781e222f6eb6cc238796f5200d1b7f108",
                 "shasum": ""
             },
             "require": {
@@ -3106,7 +3110,7 @@
                 "object",
                 "object graph"
             ],
-            "time": "2016-09-16 13:37:59"
+            "time": "2016-10-31 17:19:45"
         },
         {
             "name": "phpdocumentor/reflection-common",
@@ -3164,16 +3168,16 @@
         },
         {
             "name": "phpdocumentor/reflection-docblock",
-            "version": "3.1.0",
+            "version": "3.1.1",
             "source": {
                 "type": "git",
                 "url": "https://github.com/phpDocumentor/ReflectionDocBlock.git",
-                "reference": "9270140b940ff02e58ec577c237274e92cd40cdd"
+                "reference": "8331b5efe816ae05461b7ca1e721c01b46bafb3e"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/phpDocumentor/ReflectionDocBlock/zipball/9270140b940ff02e58ec577c237274e92cd40cdd",
-                "reference": "9270140b940ff02e58ec577c237274e92cd40cdd",
+                "url": "https://api.github.com/repos/phpDocumentor/ReflectionDocBlock/zipball/8331b5efe816ae05461b7ca1e721c01b46bafb3e",
+                "reference": "8331b5efe816ae05461b7ca1e721c01b46bafb3e",
                 "shasum": ""
             },
             "require": {
@@ -3205,7 +3209,7 @@
                 }
             ],
             "description": "With this component, a library can provide support for annotations via DocBlocks or otherwise retrieve information that is embedded in a DocBlock.",
-            "time": "2016-06-10 09:48:41"
+            "time": "2016-09-30 07:12:33"
         },
         {
             "name": "phpdocumentor/type-resolver",
@@ -3294,16 +3298,16 @@
         },
         {
             "name": "phpspec/phpspec",
-            "version": "3.1.0",
+            "version": "3.1.1",
             "source": {
                 "type": "git",
                 "url": "https://github.com/phpspec/phpspec.git",
-                "reference": "5602f67d429d0280c63a66f1c104186032259bbd"
+                "reference": "53d89ff6d328032c0e434a75af6b0e80ff2d669d"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/phpspec/phpspec/zipball/5602f67d429d0280c63a66f1c104186032259bbd",
-                "reference": "5602f67d429d0280c63a66f1c104186032259bbd",
+                "url": "https://api.github.com/repos/phpspec/phpspec/zipball/53d89ff6d328032c0e434a75af6b0e80ff2d669d",
+                "reference": "53d89ff6d328032c0e434a75af6b0e80ff2d669d",
                 "shasum": ""
             },
             "require": {
@@ -3372,7 +3376,7 @@
                 "testing",
                 "tests"
             ],
-            "time": "2016-09-17 09:09:54"
+            "time": "2016-09-26 21:11:31"
         },
         {
             "name": "phpspec/prophecy",
@@ -3438,16 +3442,16 @@
         },
         {
             "name": "phpunit/php-code-coverage",
-            "version": "4.0.1",
+            "version": "4.0.2",
             "source": {
                 "type": "git",
                 "url": "https://github.com/sebastianbergmann/php-code-coverage.git",
-                "reference": "5f3f7e736d6319d5f1fc402aff8b026da26709a3"
+                "reference": "6cba06ff75a1a63a71033e1a01b89056f3af1e8d"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/sebastianbergmann/php-code-coverage/zipball/5f3f7e736d6319d5f1fc402aff8b026da26709a3",
-                "reference": "5f3f7e736d6319d5f1fc402aff8b026da26709a3",
+                "url": "https://api.github.com/repos/sebastianbergmann/php-code-coverage/zipball/6cba06ff75a1a63a71033e1a01b89056f3af1e8d",
+                "reference": "6cba06ff75a1a63a71033e1a01b89056f3af1e8d",
                 "shasum": ""
             },
             "require": {
@@ -3497,7 +3501,7 @@
                 "testing",
                 "xunit"
             ],
-            "time": "2016-07-26 14:39:29"
+            "time": "2016-11-01 05:06:24"
         },
         {
             "name": "phpunit/php-file-iterator",
@@ -3682,16 +3686,16 @@
         },
         {
             "name": "phpunit/phpunit",
-            "version": "5.5.5",
+            "version": "5.6.2",
             "source": {
                 "type": "git",
                 "url": "https://github.com/sebastianbergmann/phpunit.git",
-                "reference": "a57126dc681b08289fef6ac96a48e30656f84350"
+                "reference": "cd13b23ac5a519a4708e00736c26ee0bb28b2e01"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/sebastianbergmann/phpunit/zipball/a57126dc681b08289fef6ac96a48e30656f84350",
-                "reference": "a57126dc681b08289fef6ac96a48e30656f84350",
+                "url": "https://api.github.com/repos/sebastianbergmann/phpunit/zipball/cd13b23ac5a519a4708e00736c26ee0bb28b2e01",
+                "reference": "cd13b23ac5a519a4708e00736c26ee0bb28b2e01",
                 "shasum": ""
             },
             "require": {
@@ -3725,7 +3729,6 @@
                 "ext-pdo": "*"
             },
             "suggest": {
-                "ext-tidy": "*",
                 "ext-xdebug": "*",
                 "phpunit/php-invoker": "~1.1"
             },
@@ -3735,7 +3738,7 @@
             "type": "library",
             "extra": {
                 "branch-alias": {
-                    "dev-master": "5.5.x-dev"
+                    "dev-master": "5.6.x-dev"
                 }
             },
             "autoload": {
@@ -3761,20 +3764,20 @@
                 "testing",
                 "xunit"
             ],
-            "time": "2016-09-21 14:40:13"
+            "time": "2016-10-25 07:40:25"
         },
         {
             "name": "phpunit/phpunit-mock-objects",
-            "version": "3.2.7",
+            "version": "3.4.0",
             "source": {
                 "type": "git",
                 "url": "https://github.com/sebastianbergmann/phpunit-mock-objects.git",
-                "reference": "546898a2c0c356ef2891b39dd7d07f5d82c8ed0a"
+                "reference": "238d7a2723bce689c79eeac9c7d5e1d623bb9dc2"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/sebastianbergmann/phpunit-mock-objects/zipball/546898a2c0c356ef2891b39dd7d07f5d82c8ed0a",
-                "reference": "546898a2c0c356ef2891b39dd7d07f5d82c8ed0a",
+                "url": "https://api.github.com/repos/sebastianbergmann/phpunit-mock-objects/zipball/238d7a2723bce689c79eeac9c7d5e1d623bb9dc2",
+                "reference": "238d7a2723bce689c79eeac9c7d5e1d623bb9dc2",
                 "shasum": ""
             },
             "require": {
@@ -3820,7 +3823,7 @@
                 "mock",
                 "xunit"
             ],
-            "time": "2016-09-06 16:07:45"
+            "time": "2016-10-09 07:01:45"
         },
         {
             "name": "sebastian/code-unit-reverse-lookup",
@@ -4337,16 +4340,16 @@
         },
         {
             "name": "symfony/css-selector",
-            "version": "v3.1.4",
+            "version": "v3.1.6",
             "source": {
                 "type": "git",
                 "url": "https://github.com/symfony/css-selector.git",
-                "reference": "2851e1932d77ce727776154d659b232d061e816a"
+                "reference": "ca809c64072e0fe61c1c7fb3c76cdc32265042ac"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/symfony/css-selector/zipball/2851e1932d77ce727776154d659b232d061e816a",
-                "reference": "2851e1932d77ce727776154d659b232d061e816a",
+                "url": "https://api.github.com/repos/symfony/css-selector/zipball/ca809c64072e0fe61c1c7fb3c76cdc32265042ac",
+                "reference": "ca809c64072e0fe61c1c7fb3c76cdc32265042ac",
                 "shasum": ""
             },
             "require": {
@@ -4386,20 +4389,20 @@
             ],
             "description": "Symfony CssSelector Component",
             "homepage": "https://symfony.com",
-            "time": "2016-06-29 05:41:56"
+            "time": "2016-09-06 11:02:40"
         },
         {
             "name": "symfony/dom-crawler",
-            "version": "v3.1.4",
+            "version": "v3.1.6",
             "source": {
                 "type": "git",
                 "url": "https://github.com/symfony/dom-crawler.git",
-                "reference": "bb7395e8b1db3654de82b9f35d019958276de4d7"
+                "reference": "59eee3c76eb89f21857798620ebdad7a05ad14f4"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/symfony/dom-crawler/zipball/bb7395e8b1db3654de82b9f35d019958276de4d7",
-                "reference": "bb7395e8b1db3654de82b9f35d019958276de4d7",
+                "url": "https://api.github.com/repos/symfony/dom-crawler/zipball/59eee3c76eb89f21857798620ebdad7a05ad14f4",
+                "reference": "59eee3c76eb89f21857798620ebdad7a05ad14f4",
                 "shasum": ""
             },
             "require": {
@@ -4442,20 +4445,20 @@
             ],
             "description": "Symfony DomCrawler Component",
             "homepage": "https://symfony.com",
-            "time": "2016-08-05 08:37:39"
+            "time": "2016-10-18 15:46:07"
         },
         {
             "name": "symfony/filesystem",
-            "version": "v3.1.4",
+            "version": "v3.1.6",
             "source": {
                 "type": "git",
                 "url": "https://github.com/symfony/filesystem.git",
-                "reference": "bb29adceb552d202b6416ede373529338136e84f"
+                "reference": "0565b61bf098cb4dc09f4f103f033138ae4f42c6"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/symfony/filesystem/zipball/bb29adceb552d202b6416ede373529338136e84f",
-                "reference": "bb29adceb552d202b6416ede373529338136e84f",
+                "url": "https://api.github.com/repos/symfony/filesystem/zipball/0565b61bf098cb4dc09f4f103f033138ae4f42c6",
+                "reference": "0565b61bf098cb4dc09f4f103f033138ae4f42c6",
                 "shasum": ""
             },
             "require": {
@@ -4491,11 +4494,11 @@
             ],
             "description": "Symfony Filesystem Component",
             "homepage": "https://symfony.com",
-            "time": "2016-07-20 05:44:26"
+            "time": "2016-10-18 04:30:12"
         },
         {
             "name": "symfony/stopwatch",
-            "version": "v3.1.4",
+            "version": "v3.1.6",
             "source": {
                 "type": "git",
                 "url": "https://github.com/symfony/stopwatch.git",
@@ -4544,16 +4547,16 @@
         },
         {
             "name": "symfony/yaml",
-            "version": "v3.1.4",
+            "version": "v3.1.6",
             "source": {
                 "type": "git",
                 "url": "https://github.com/symfony/yaml.git",
-                "reference": "f291ed25eb1435bddbe8a96caaef16469c2a092d"
+                "reference": "7ff51b06c6c3d5cc6686df69004a42c69df09e27"
             },
             "dist": {
                 "type": "zip",
-                "url": "https://api.github.com/repos/symfony/yaml/zipball/f291ed25eb1435bddbe8a96caaef16469c2a092d",
-                "reference": "f291ed25eb1435bddbe8a96caaef16469c2a092d",
+                "url": "https://api.github.com/repos/symfony/yaml/zipball/7ff51b06c6c3d5cc6686df69004a42c69df09e27",
+                "reference": "7ff51b06c6c3d5cc6686df69004a42c69df09e27",
                 "shasum": ""
             },
             "require": {
@@ -4589,7 +4592,7 @@
             ],
             "description": "Symfony Yaml Component",
             "homepage": "https://symfony.com",
-            "time": "2016-09-02 02:12:52"
+            "time": "2016-10-24 18:41:13"
         },
         {
             "name": "webmozart/assert",
@@ -4645,7 +4648,8 @@
     "aliases": [],
     "minimum-stability": "dev",
     "stability-flags": {
-        "easyrdf/easyrdf": 20
+        "easyrdf/easyrdf": 20,
+        "shift31/laravel-elasticsearch": 20
     },
     "prefer-stable": true,
     "prefer-lowest": false,
--- a/server/src/config/app.php	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/src/config/app.php	Wed Nov 09 15:05:41 2016 +0100
@@ -97,7 +97,7 @@
     |
     */
 
-    'log' => 'daily',
+    'log' => env('APP_LOG','daily'),
 
     /*
     |--------------------------------------------------------------------------
--- a/server/src/package.json	Tue Nov 08 18:23:01 2016 +0100
+++ b/server/src/package.json	Wed Nov 09 15:05:41 2016 +0100
@@ -1,7 +1,7 @@
 {
   "private": true,
   "devDependencies": {
-    "bower": "^1.5.3",
+    "bower": "^1.8.0",
     "gulp": "^3.8.8",
     "gulp-exec": "^2.1.2",
     "minimist": "^1.2.0",