diff --git a/cookbooks/logstash/.gitignore b/cookbooks/logstash/.gitignore
new file mode 100644
index 0000000..585d270
--- /dev/null
+++ b/cookbooks/logstash/.gitignore
@@ -0,0 +1,3 @@
+metadata.json
+.vagrant
+#*#
diff --git a/cookbooks/logstash/.travis.yml b/cookbooks/logstash/.travis.yml
new file mode 100644
index 0000000..8e1e518
--- /dev/null
+++ b/cookbooks/logstash/.travis.yml
@@ -0,0 +1,6 @@
+language: ruby
+gemfile:
+ - test/support/Gemfile
+rvm:
+ - 1.9.3
+script: BUNDLE_GEMFILE=test/support/Gemfile bundle exec rake foodcritic
\ No newline at end of file
diff --git a/cookbooks/logstash/Berksfile b/cookbooks/logstash/Berksfile
new file mode 100644
index 0000000..bd1edd9
--- /dev/null
+++ b/cookbooks/logstash/Berksfile
@@ -0,0 +1,12 @@
+metadata
+cookbook 'logrotate'
+cookbook 'yum'
+cookbook 'java'
+cookbook 'build-essential'
+cookbook 'runit'
+cookbook 'apache2'
+cookbook 'yumrepo', git: 'git://github.com/bryanwb/cookbook-yumrepo.git'
+cookbook 'rbenv', git: 'git://github.com/RiotGames/rbenv-cookbook.git'
+cookbook 'rabbitmq', git: 'git://github.com/opscode-cookbooks/rabbitmq.git'
+cookbook 'minitest-handler', git: 'git://github.com/btm/minitest-handler-cookbook.git'
+cookbook 'elasticsearch', git: 'git://github.com/elasticsearch/cookbook-elasticsearch.git'
diff --git a/cookbooks/logstash/Berksfile.lock b/cookbooks/logstash/Berksfile.lock
new file mode 100644
index 0000000..f169767
--- /dev/null
+++ b/cookbooks/logstash/Berksfile.lock
@@ -0,0 +1,29 @@
+cookbook 'logstash', :path => '/home/berryb/pr/logstash'
+cookbook 'apt', :locked_version => '1.4.2'
+cookbook 'logrotate', :locked_version => '0.8.2'
+cookbook 'vim', :locked_version => '1.0.2'
+cookbook 'monit', :locked_version => '0.7.0'
+cookbook 'nginx', :locked_version => '0.101.6'
+cookbook 'rabbitmq', :git => 'git://github.com/opscode-cookbooks/rabbitmq.git', :ref => '804e882e0f5876392de1b70da6af4d6e94d4b442'
+cookbook 'minitest-handler', :git => 'git://github.com/btm/minitest-handler-cookbook.git', :ref => 'db7590b6e38efb8a53360b0b470e9ad8c2932ed5'
+cookbook 'elasticsearch', :git => 'git://github.com/bryanwb/cookbook-elasticsearch.git', :ref => 'a9b47b0aec348806a570f76e69d3548a8d1fff62'
+cookbook 'apache2', :locked_version => '1.1.10'
+cookbook 'build-essential', :locked_version => '1.0.2'
+cookbook 'xml', :locked_version => '1.0.4'
+cookbook 'openssl', :locked_version => '1.0.0'
+cookbook 'chef_handler', :locked_version => '1.0.6'
+cookbook 'windows', :locked_version => '1.3.0'
+cookbook 'mysql', :locked_version => '1.2.6'
+cookbook 'php', :locked_version => '1.0.2'
+cookbook 'runit', :locked_version => '0.15.0'
+cookbook 'yum', :locked_version => '0.6.2'
+cookbook 'dmg', :locked_version => '1.0.0'
+cookbook 'git', :locked_version => '1.0.0'
+cookbook 'python', :locked_version => '1.0.6'
+cookbook 'java', :locked_version => '1.5.0'
+cookbook 'jpackage', :locked_version => '0.10.0'
+cookbook 'ant', :locked_version => '0.10.1'
+cookbook 'bluepill', :locked_version => '1.0.4'
+cookbook 'ohai', :locked_version => '1.0.2'
+cookbook 'erlang', :locked_version => '1.0.0'
+cookbook 'ark', :locked_version => '0.0.10'
\ No newline at end of file
diff --git a/cookbooks/logstash/CHANGELOG.md b/cookbooks/logstash/CHANGELOG.md
new file mode 100644
index 0000000..68cea0f
--- /dev/null
+++ b/cookbooks/logstash/CHANGELOG.md
@@ -0,0 +1,12 @@
+0.2.1 (June 26, 2012)
+---------------------
+
+New features
+ * Use ruby hashes supplied by roles to populate inputs, filters,
+ and outputs
+ * redhat-family support
+ * change default version of logstash to 1.1.1preview
+ * add in Travis-CI support
+
+Bug fixes
+ * keep apache default site from obscuring kibana
diff --git a/cookbooks/logstash/CONTRIBUTORS b/cookbooks/logstash/CONTRIBUTORS
new file mode 100644
index 0000000..9a63e68
--- /dev/null
+++ b/cookbooks/logstash/CONTRIBUTORS
@@ -0,0 +1,7 @@
+Bryan Berry (@bryanwb)
+Juanje Ojeda (@juanje)
+Richard Clamp (@richardc)
+Hector Castro (@hectcastro)
+@benattar
+ChrisLundquist (@ChrisLundquist)
+Phil Sturgeon (@philsturgeon)
diff --git a/cookbooks/logstash/Gemfile b/cookbooks/logstash/Gemfile
new file mode 100644
index 0000000..3bf0ec5
--- /dev/null
+++ b/cookbooks/logstash/Gemfile
@@ -0,0 +1,4 @@
+source "https://rubygems.org"
+
+gem 'berkshelf'
+gem 'vagrant', '~> 1.0.5'
diff --git a/cookbooks/logstash/Gemfile.lock b/cookbooks/logstash/Gemfile.lock
new file mode 100644
index 0000000..800ddae
--- /dev/null
+++ b/cookbooks/logstash/Gemfile.lock
@@ -0,0 +1,134 @@
+GEM
+ remote: https://rubygems.org/
+ specs:
+ activemodel (3.2.11)
+ activesupport (= 3.2.11)
+ builder (~> 3.0.0)
+ activesupport (3.2.11)
+ i18n (~> 0.6)
+ multi_json (~> 1.0)
+ addressable (2.3.2)
+ archive-tar-minitar (0.5.2)
+ berkshelf (1.1.2)
+ activesupport
+ chef (>= 10.16.2)
+ chozo (>= 0.2.3)
+ hashie
+ minitar
+ multi_json (>= 1.3.0)
+ ridley (>= 0.6.3)
+ solve (>= 0.4.0.rc1)
+ thor (~> 0.16.0)
+ vagrant (~> 1.0.5)
+ yajl-ruby
+ builder (3.0.4)
+ bunny (0.7.9)
+ celluloid (0.12.4)
+ facter (>= 1.6.12)
+ timers (>= 1.0.0)
+ chef (10.18.2)
+ bunny (>= 0.6.0, < 0.8.0)
+ erubis
+ highline (>= 1.6.9)
+ json (>= 1.4.4, <= 1.6.1)
+ mixlib-authentication (>= 1.3.0)
+ mixlib-cli (>= 1.1.0)
+ mixlib-config (>= 1.1.2)
+ mixlib-log (>= 1.3.0)
+ mixlib-shellout
+ moneta (< 0.7.0)
+ net-ssh (~> 2.2.2)
+ net-ssh-multi (~> 1.1.0)
+ ohai (>= 0.6.0)
+ rest-client (>= 1.0.4, < 1.7.0)
+ treetop (~> 1.4.9)
+ uuidtools
+ yajl-ruby (~> 1.1)
+ childprocess (0.3.7)
+ ffi (~> 1.0, >= 1.0.6)
+ chozo (0.4.2)
+ activesupport (>= 3.2.0)
+ hashie
+ multi_json (>= 1.3.0)
+ erubis (2.7.0)
+ facter (1.6.17)
+ faraday (0.8.5)
+ multipart-post (~> 1.1)
+ ffi (1.3.1)
+ hashie (1.2.0)
+ highline (1.6.15)
+ i18n (0.6.1)
+ ipaddress (0.8.0)
+ json (1.5.4)
+ log4r (1.1.10)
+ mime-types (1.20.1)
+ minitar (0.5.4)
+ mixlib-authentication (1.3.0)
+ mixlib-log
+ mixlib-cli (1.3.0)
+ mixlib-config (1.1.2)
+ mixlib-log (1.4.1)
+ mixlib-shellout (1.1.0)
+ moneta (0.6.0)
+ multi_json (1.5.0)
+ multipart-post (1.1.5)
+ net-http-persistent (2.8)
+ net-scp (1.0.4)
+ net-ssh (>= 1.99.1)
+ net-ssh (2.2.2)
+ net-ssh-gateway (1.1.0)
+ net-ssh (>= 1.99.1)
+ net-ssh-multi (1.1)
+ net-ssh (>= 2.1.4)
+ net-ssh-gateway (>= 0.99.0)
+ ohai (6.16.0)
+ ipaddress
+ mixlib-cli
+ mixlib-config
+ mixlib-log
+ mixlib-shellout
+ systemu
+ yajl-ruby
+ polyglot (0.3.3)
+ rest-client (1.6.7)
+ mime-types (>= 1.16)
+ ridley (0.6.3)
+ activemodel (>= 3.2.0)
+ activesupport (>= 3.2.0)
+ addressable
+ celluloid
+ chozo (>= 0.2.2)
+ erubis
+ faraday
+ json (>= 1.5.0)
+ mixlib-authentication
+ mixlib-log
+ multi_json (>= 1.0.4)
+ net-http-persistent (>= 2.8)
+ net-ssh
+ solve (0.4.1)
+ json
+ systemu (2.5.2)
+ thor (0.16.0)
+ timers (1.1.0)
+ treetop (1.4.12)
+ polyglot
+ polyglot (>= 0.3.1)
+ uuidtools (2.1.3)
+ vagrant (1.0.6)
+ archive-tar-minitar (= 0.5.2)
+ childprocess (~> 0.3.1)
+ erubis (~> 2.7.0)
+ i18n (~> 0.6.0)
+ json (~> 1.5.1)
+ log4r (~> 1.1.9)
+ net-scp (~> 1.0.4)
+ net-ssh (~> 2.2.2)
+ yajl-ruby (1.1.0)
+
+PLATFORMS
+ ruby
+
+DEPENDENCIES
+ berkshelf
+ vagrant (~> 1.0.5)
diff --git a/cookbooks/logstash/README.md b/cookbooks/logstash/README.md
new file mode 100644
index 0000000..a572b82
--- /dev/null
+++ b/cookbooks/logstash/README.md
@@ -0,0 +1,471 @@
+# chef-logstash [![Build Status](https://secure.travis-ci.org/lusis/chef-logstash.png?branch=master)](http://travis-ci.org/lusis/chef-logstash)
+
+Description
+===========
+
+This is the semi-official 'all-in-one' Logstash cookbook.
+
+Requirements
+============
+
+All of the requirements are explicitly defined in the recipes. Every
+effort has been made to utilize Opscode's cookbooks.
+
+However if you wish to use an external ElasticSearch cluster, you will
+need to install that yourself and change the relevant attributes for
+discovery. The same applies to integration with Graphite.
+
+This cookbook has been tested together with the following cookbooks,
+see the Berksfile for more details
+
+* [Heavywater Graphite Cookbook](https://github.com/heavywater/chef-graphite) - This is the one I use
+* [Karmi's ElasticSearch Cookbook](https://github.com/karmi/cookbook-elasticsearch)
+* [RiotGames RBENV cookbook](https://github.com/RiotGames/rbenv-cookbook)
+
+
+
+Attributes
+==========
+
+## Default
+
+* `node['logstash']['basedir']` - the base directory for all the
+ Logstash components
+* `node['logstash']['user']` - the owner for all Logstash components
+* `node['logstash']['group']` - the group for all Logstash components
+* `node['logstash']['graphite_role']` - the Chef role to search for
+ discovering your preexisting Graphite server
+* `node['logstash']['graphite_query']` - the search query used for
+ discovering your preexisting Graphite server. Defaults to
+ node['logstash']['graphite_role'] in the current node environment
+* `node['logstash']['elasticsearch_role']` - the Chef role to search
+ for discovering your preexisting ElasticSearch cluster.
+* `node['logstash']['elasticsearch_query']` - the search query used for
+ discovering your preexisting ElasticSearch cluster. Defaults to
+ node['logstash']['elasticsearch_role'] in the current node environment
+* `node['logstash']['elasticsearch_cluster']` - the cluster name
+ assigned to your preexisting ElasticSearch cluster. Only applies to
+ external ES clusters.
+* `node['logstash']['elasticsearch_ip']` - the IP address that will be
+ used for your elasticsearch server in case you are using Chef-solo
+* `node['logstash']['graphite_ip']` - the IP address that will be used
+ for your graphite server in case you are using Chef-solo
+* `node['logstash']['join_groups']` - An array of Operative System
+ groups to join. Usefull to gain read privileges on some logfiles.
+* `node['logstash']['patterns']` - A hash with grok patterns to be
+ used on grok and multiline filters.
+* `node['logstash']['create_account']` - create the account info from
+ `user` and `group`; this is `true` by default. Disable it to use an
+ existing account!
+* `node['logstash']['install_zeromq']` - Should this
+ recipe install zeromq packages?
+* `node['logstash']['zeromq_packages']` - zeromq_packages to install
+ if you use zeromq
+
+## Agent
+
+* `node['logstash']['agent']['install_method']` - The method to
+ install logstash - either `jar` or `source`, defaults to `jar`
+* `node['logstash']['agent']['version']` - The version of Logstash to
+ install. Only applies to `jar` install method.
+* `node['logstash']['agent']['source_url']` - The URL of the Logstash
+ jar to download. Only applies to `jar` install method.
+* `node['logstash']['agent']['checksum']` - The checksum of the jar
+ file. Only applies to `jar` install method.
+* `node['logstash']['agent']['base_config']` - The name of the
+ template to use for `logstash.conf` as a base config.
+* `node['logstash']['agent']['base_config_cookbook']` - Where to find
+ the base\_config template.
+* `node['logstash']['agent']['xms']` - The minimum memory to assign
+ the JVM.
+* `node['logstash']['agent']['xmx']` - The maximum memory to assign
+ the JVM.
+* `node['logstash']['agent']['java_opts']` - Additional params you
+ want to pass to the JVM
+* `node['logstash']['agent']['gc_opts']` - Specify your garbage
+ collection options to pass to the JVM
+* `node['logstash']['agent']['ipv4_only']` - Add jvm option
+ preferIPv4Stack?
+* `node['logstash']['agent']['debug']` - Run logstash with `-v`
+ option?
+* `node['logstash']['agent']['server_role']` - The role of the node
+ behaving as a Logstash `server`/`indexer`
+* `node['logstash']['agent']['inputs']` - Array of input plugins
+ configuration.
+* `node['logstash']['agent']['filters']` - Array of filter plugins
+ configuration.
+* `node['logstash']['agent']['outputs']` - Array of output plugins
+ configuration.
+* `node['logstash']['agent']['patterns_dir']` - The patterns directory
+ where pattern files will be generated. Relative to the basedir or
+ absolute.
+
+## Server
+
+* `node['logstash']['server']['install_method']` - The method to
+ install logstash - either `jar` or `source`
+* `node['logstash']['server']['version']` - The version of Logstash to
+ install. Only applies to `jar` install method.
+* `node['logstash']['server']['source_url']` - The URL of the Logstash
+ jar to download. Only applies to `jar` install method.
+* `node['logstash']['server']['checksum']` - The checksum of the jar
+ file. Only applies to `jar` install method.
+* `node['logstash']['server']['base_config']` - The name of the
+ template to use for `logstash.conf` as a base config.
+* `node['logstash']['server']['base_config_cookbook']` - Where to find
+ the base\_config template.
+* `node['logstash']['server']['xms']` - The minimum memory to assign
+ the JVM.
+* `node['logstash']['server']['xmx']` - The maximum memory to assign
+ the JVM.
+* `node['logstash']['server']['java_opts']` - Additional params you
+ want to pass to the JVM
+* `node['logstash']['server']['gc_opts']` - Specify your garbage
+ collection options to pass to the JVM
+* `node['logstash']['server']['ipv4_only']` - Add jvm option
+ preferIPv4Stack?
+* `node['logstash']['server']['debug']` - Run logstash with `-v`
+ option?
+* `node['logstash']['server']['enable_embedded_es']` - Should Logstash
+ run with the embedded ElasticSearch server or not?
+* `node['logstash']['server']['install_rabbitmq']` - Should this
+ recipe install rabbitmq?
+* `node['logstash']['server']['inputs']` - Array of input plugins
+ configuration.
+* `node['logstash']['server']['filters']` - Array of filter plugins
+ configuration.
+* `node['logstash']['server']['outputs']` - Array of output plugins
+ configuration.
+* `node['logstash']['server']['patterns_dir']` - The patterns
+ directory where pattern files will be generated. Relative to the
+ basedir or absolute.
+
+## Kibana
+
+* `node['logstash']['kibana']['repo']` - The git repo to install
+ Kibana from.
+* `node['logstash']['kibana']['sha']` - The sha/branch/tag of the repo
+ you wish to clone.
+* `node['logstash']['kibana']['apache_template']` - The name of the
+ template file to use for the Apache site file
+* `node['logstash']['kibana']['config']` - The name of the template to
+ use for the Kibana `config.php` file
+* `node['logstash']['kibana']['server_name']` - The value to use for
+ the Apache `ServerName` variable to use for the Kibana Apache
+ virtual host.
+* `node['logstash']['kibana']['http_port']` - The port the virtualhost
+ kibana listens on
+
+## Beaver (alternative to Logstash Agent)
+
+* `node['logstash']['beaver']['repo']` - URL or repository to install
+ beaver from (using pip).
+* `node['logstash']['beaver']['server_role']` - The role of the node
+ behaving as a Logstash `server`/`indexer`.
+* `node['logstash']['beaver']['server_ipaddress']` - Server IP address
+ to use (needed when not using server_role).
+* `node['logstash']['beaver']['inputs']` - Array of input plugins
+ configuration (Supported: file).
+* `node['logstash']['beaver']['outputs']` - Array of output plugins
+ configuration (Supported: amq, redis, stdout, zeromq).
+
+## Source
+
+* `node['logstash']['source']['repo']` - The git repo to use for the
+ source code of Logstash
+* `node['logstash']['source']['sha']` - The sha/branch/tag of the repo
+ you wish to clone. Uses `node['logstash']['server']['version']` by
+ default.
+* `node['logstash']['source']['java_home']` - your `JAVA_HOME`
+ location. Needed explicity for `ant` when building JRuby
+
+## Index Cleaner
+
+* `node['logstash']['index_cleaner']['days_to_keep']` - Integer number
+ of days from today of Logstash index to keep.
+
+Usage
+=====
+
+A proper readme is forthcoming but in the interim....
+
+There are 3 recipes you need to concern yourself with:
+
+* server - This would be your indexer node
+* agent - This would be a local host's agent for collection
+* kibana - This is the web interface
+
+Every attempt (and I mean this) was made to ensure that the following
+objectives were met:
+
+* Any agent install can talk to a server install
+* Kibana web interface can talk to the server install
+* Each component works OOB and with each other
+* Utilize official opscode cookbooks where possible
+
+This setup makes HEAVY use of roles. Additionally, ALL paths have been
+made into attributes. Everything I could think of that would need to
+be customized has been made an attribute.
+
+## Defaults
+
+By default, the recipes look for the following roles (defined as
+attributes so they can be overridden):
+
+* `graphite_server` - `node['logstash']['graphite_role']`
+* `elasticsearch_server` - `node['logstash']['elasticsearch_role']`
+* `logstash_server` -
+ `node['logstash']['kibana']['elasticsearch_role']` and
+ `node['logstash']['agent']['server_role']`
+
+The reason for giving `kibana` its own role assignment is to allow you
+to point to existing ES clusters/logstash installs.
+
+The reason for giving `agent` its own role assignment is to allow the
+`server` and `agent` recipes to work together.
+
+Yes, if you have a graphite installation with a role of
+`graphite_server`, logstash will send stats of events received to
+`logstash.events`.
+
+## Agent and Server configuration
+
+The template to use for configuration is made an attribute as well.
+This allows you to define your OWN logstash configuration file without
+mucking with the default templates.
+
+The `server` will, by default, enable the embedded ES server. This can
+be overriden as well.
+
+See the `server` and `agent` attributes for more details.
+
+## Source vs. Jar install methods
+
+Both `agent` and `server` support an attribute for how to install. By
+default this is set to `jar` to use the 1.1.1preview as it is required
+to use elasticsearch 0.19.4. The current release is defined in
+attributes if you choose to go the `source` route.
+
+## Out of the box behaviour
+
+Here are some basic steps
+
+* Create a role called `logstash_server` and assign it the following
+ recipes: `logstash::server` and `logstash::kibana`
+* Assign the role to a new server
+* Assign the `logstash::agent` recipe to another server
+
+If there is a system found with the `logstash_server` role, the agent
+will automatically configure itself to send logs to it over tcp port
+5959. This is, not coincidently, the port used by the chef logstash
+handler.
+
+If there is NOT a system with the `logstash_server` role, the agent
+will use a null output. The default input is to read files from
+`/var/log/*.log` excluding and gzipped files.
+
+If you point your browser to the `logstash_server` system's ip
+address, you should get the kibana web interface.
+
+Do something to generate a new line in any of the files in the agent's
+watch path (I like to SSH to the host), and the events will start
+showing up in kibana. You might have to issue a fresh empty search.
+
+The `pyshipper` recipe will work as well but it is NOT wired up to
+anything yet.
+
+## Letting data drive your templates
+
+The current templates for the agent and server are written so that you
+can provide ruby hashes in your roles that map to inputs, filters, and
+outputs. Here is a role for logstash_server
+
+ name "logstash_server"
+ description "Attributes and run_lists specific to FAO's logstash instance"
+ default_attributes(
+ :logstash => {
+ :server => {
+ :enable_embedded_es => false,
+ :inputs => [
+ :amqp => {
+ :type => "all",
+ :host => "127.0.0.1",
+ :exchange => "rawlogs",
+ :name => "rawlogs_consumer"
+ }
+ ],
+ :filters => [
+ :grok => {
+ :type => "haproxy",
+ :pattern => "%{HAPROXYHTTP}",
+ :patterns_dir => '/opt/logstash/server/etc/patterns/'
+ }
+ ],
+ :outputs => [
+ :file => {
+ :type => 'haproxy',
+ :path => '/opt/logstash/server/haproxy_logs/%{request_header_host}.log',
+ :message_format => '%{client_ip} - - [%{accept_date}] "%{http_request}" %{http_status_code} ....'
+ }
+ ]
+ }
+ }
+ )
+ run_list(
+ "role[elasticsearch_server]",
+ "recipe[logstash::server]",
+ "recipe[php::module_curl]",
+ "recipe[logstash::kibana]"
+ )
+
+
+It will produce the following logstash.conf file
+
+ input {
+
+ amqp {
+ exchange => 'rawlogs'
+ host => '127.0.0.1'
+ name => 'rawlogs_consumer'
+ type => 'all'
+ }
+ }
+
+ filter {
+
+ grok {
+ pattern => '%{HAPROXYHTTP}'
+ patterns_dir => '/opt/logstash/server/etc/patterns/'
+ type => 'haproxy'
+ }
+ }
+
+ output {
+ stdout { debug => true debug_format => "json" }
+ elasticsearch { host => "127.0.0.1" cluster => "logstash" }
+
+ file {
+ message_format => '%{client_ip} - - [%{accept_date}] "%{http_request}" %{http_status_code} ....'
+ path => '/opt/logstash/server/haproxy_logs/%{request_header_host}.log'
+ type => 'haproxy'
+ }
+ }
+
+Here is an example using multiple filters
+
+ default_attributes(
+ :logstash => {
+ :server => {
+ :filters => [
+ { :grep => {
+ :type => 'tomcat',
+ :match => { '@message' => '([Ee]xception|Failure:|Error:)' },
+ :add_tag => 'exception',
+ :drop => false
+ } },
+ { :grep => {
+ :type => 'tomcat',
+ :match => { '@message' => 'Unloading class ' },
+ :add_tag => 'unloading-class',
+ :drop => false
+ } },
+ { :multiline => {
+ :type => 'tomcat',
+ :pattern => '^\s',
+ :what => 'previous'
+ } }
+ ]
+ }
+ }
+ )
+
+It will produce the following logstash.conf file
+
+ filter {
+
+ grep {
+ add_tag => 'exception'
+ drop => false
+ match => ['@message', '([Ee]xception|Failure:|Error:)']
+ type => 'tomcat'
+ }
+
+ grep {
+ add_tag => 'unloading-class'
+ drop => false
+ match => ["@message", "Unloading class "]
+ type => 'tomcat'
+ }
+
+ multiline {
+ patterns_dir => '/opt/logstash/patterns'
+ pattern => '^\s'
+ type => 'tomcat'
+ what => 'previous'
+ }
+
+ }
+
+## Adding grok patterns
+
+Grok pattern files can be generated using attributes as follows
+
+ default_attributes(
+ :logstash => {
+ :patterns => {
+ :apache => {
+ :HTTP_ERROR_DATE => '%{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR}',
+ :APACHE_LOG_LEVEL => '[A-Za-z][A-Za-z]+',
+ :ERRORAPACHELOG => '^\[%{HTTP_ERROR_DATE:timestamp}\] \[%{APACHE_LOG_LEVEL:level}\](?: \[client %{IPORHOST:clientip}\])?',
+ },
+ :mywebapp => {
+ :MYWEBAPP_LOG => '\[mywebapp\]',
+ },
+ },
+ [...]
+ }
+ )
+
+This will generate the following files:
+
+`/opt/logstash/server/etc/patterns/apache`
+
+ APACHE_LOG_LEVEL [A-Za-z][A-Za-z]+
+ ERRORAPACHELOG ^\[%{HTTP_ERROR_DATE:timestamp}\] \[%{APACHE_LOG_LEVEL:level}\](?: \[client %{IPORHOST:clientip}\])?
+ HTTP_ERROR_DATE %{DAY} %{MONTH} %{MONTHDAY} %{TIME} %{YEAR}
+
+`/opt/logstash/server/etc/patterns/mywebapp`
+
+ MYWEBAPP_LOG \[mywebapp\]
+
+This patterns will be included by default in the grok and multiline
+filters.
+
+# BIG WARNING
+
+* Currently only tested on Ubuntu Natty, Precise, and RHEL 6.2.
+
+## License and Author
+
+- Author: John E. Vincent
+- Author: Bryan W. Berry ()
+- Author: Richard Clamp (@richardc)
+- Author: Juanje Ojeda (@juanje)
+- Author: @benattar
+- Copyright: 2012, John E. Vincent
+- Copyright: 2012, Bryan W. Berry
+- Copyright: 2012, Richard Clamp
+- Copyright: 2012, Juanje Ojeda
+- Copyright: 2012, @benattar
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/cookbooks/logstash/Rakefile b/cookbooks/logstash/Rakefile
new file mode 100644
index 0000000..4bca650
--- /dev/null
+++ b/cookbooks/logstash/Rakefile
@@ -0,0 +1,35 @@
+#!/usr/bin/env rake
+
+cookbook_path = '/tmp/logstash-cookbooks'
+@cookbook = "logstash"
+
+desc "install dependencies using Berkshelf"
+task :install_deps do
+ system("berks install --shims #{cookbook_path}")
+end
+
+desc "Runs foodcritic linter"
+task :foodcritic do
+ if Gem::Version.new("1.9.2") <= Gem::Version.new(RUBY_VERSION.dup)
+ sandbox = File.join(File.dirname(__FILE__), %w{tmp foodcritic}, @cookbook)
+ prepare_foodcritic_sandbox(sandbox)
+
+ sh "foodcritic --epic-fail any #{File.dirname(sandbox)}"
+ else
+ puts "WARN: foodcritic run is skipped as Ruby #{RUBY_VERSION} is < 1.9.2."
+ end
+end
+
+task :default => 'foodcritic'
+
+private
+
+def prepare_foodcritic_sandbox(sandbox)
+ files = %w{*.md *.rb attributes definitions files providers
+ recipes resources templates}
+
+ rm_rf sandbox
+ mkdir_p sandbox
+ cp_r Dir.glob("{#{files.join(',')}}"), sandbox
+ puts "\n\n"
+end
diff --git a/cookbooks/logstash/Vagrantfile b/cookbooks/logstash/Vagrantfile
new file mode 100644
index 0000000..2ca0627
--- /dev/null
+++ b/cookbooks/logstash/Vagrantfile
@@ -0,0 +1,165 @@
+require 'berkshelf/vagrant'
+
+Vagrant::Config.run do |config|
+
+ config.vm.define :lucid32 do |dist_config|
+ dist_config.vm.box = 'lucid32'
+ dist_config.vm.box_url = 'http://files.vagrantup.com/lucid32.box'
+
+ dist_config.vm.customize do |vm|
+ vm.name = 'logstash'
+ vm.memory_size = 1024
+ end
+
+ dist_config.vm.network :bridged, '33.33.33.10'
+
+ dist_config.vm.provision :chef_solo do |chef|
+
+ chef.cookbooks_path = [ '/tmp/logstash-cookbooks' ]
+ chef.provisioning_path = '/etc/vagrant-chef'
+ chef.log_level = :debug
+
+ chef.run_list = %w[
+ minitest-handler
+ apt
+ java
+ monit
+ erlang
+ git
+ elasticsearch
+ php::module_curl
+ logstash::server
+ logstash::kibana
+ ]
+
+ chef.json = {
+ elasticsearch: {
+ cluster_name: "logstash_vagrant",
+ min_mem: '64m',
+ max_mem: '64m',
+ limits: {
+ nofile: 1024,
+ memlock: 512
+ }
+ },
+ logstash: {
+ server: {
+ xms: '128m',
+ xmx: '128m',
+ enable_embedded_es: false,
+ elasticserver_ip: '127.0.0.1'
+ },
+ kibana: {
+ server_name: '33.33.33.10',
+ http_port: '8080'
+ }
+ }
+ }
+ end
+ end
+
+ config.vm.define :lucid64 do |dist_config|
+ dist_config.vm.box = 'lucid64'
+ dist_config.vm.box_url = 'http://files.vagrantup.com/lucid64.box'
+
+ dist_config.vm.customize do |vm|
+ vm.name = 'logstash'
+ vm.memory_size = 1024
+ end
+
+ dist_config.vm.network :bridged, '33.33.33.10'
+
+ dist_config.vm.provision :chef_solo do |chef|
+ chef.cookbooks_path = [ '/tmp/logstash-cookbooks' ]
+ chef.provisioning_path = '/etc/vagrant-chef'
+ chef.log_level = :debug
+
+ chef.run_list = %w[
+ minitest-handler
+ apt
+ java
+ monit
+ erlang
+ git
+ elasticsearch
+ php::module_curl
+ logstash::server
+ logstash::kibana
+ ]
+
+ chef.json = {
+ elasticsearch: {
+ cluster_name: "logstash_vagrant",
+ min_mem: '64m',
+ max_mem: '64m',
+ limits: {
+ nofile: 1024,
+ memlock: 512
+ }
+ },
+ logstash: {
+ server: {
+ xms: '128m',
+ xmx: '128m',
+ enable_embedded_es: false,
+ elasticserver_ip: '127.0.0.1'
+ },
+ kibana: {
+ server_name: '33.33.33.10',
+ http_port: '8080'
+ }
+ }
+ }
+ end
+ end
+
+ config.vm.define :centos6_32 do |dist_config|
+ dist_config.vm.box = 'centos6_32'
+ dist_config.vm.box_url = 'http://vagrant.sensuapp.org/centos-6-i386.box'
+
+ dist_config.vm.customize do |vm|
+ vm.name = 'logstash'
+ vm.memory_size = 1024
+ end
+
+ dist_config.vm.network :bridged, '33.33.33.10'
+
+ dist_config.vm.provision :chef_solo do |chef|
+ chef.cookbooks_path = [ '/tmp/logstash-cookbooks' ]
+ chef.provisioning_path = '/etc/vagrant-chef'
+ chef.log_level = :debug
+
+ chef.run_list = %w[
+ minitest-handler
+ java
+ yum::epel
+ erlang
+ git
+ elasticsearch
+ php::module_curl
+ logstash::server
+ logstash::kibana
+ ]
+
+ chef.json = {
+ elasticsearch: {
+ cluster_name: "logstash_vagrant",
+ min_mem: '64m',
+ max_mem: '64m',
+ limits: {
+ nofile: 1024,
+ memlock: 512
+ }
+ },
+ logstash: {
+ server: {
+ xms: '128m',
+ xmx: '128m',
+ enable_embedded_es: false,
+ elasticserver_ip: '127.0.0.1'
+ }
+ }
+ }
+ end
+ end
+end
diff --git a/cookbooks/logstash/attributes/agent.rb b/cookbooks/logstash/attributes/agent.rb
new file mode 100644
index 0000000..1c98b3f
--- /dev/null
+++ b/cookbooks/logstash/attributes/agent.rb
@@ -0,0 +1,23 @@
+default['logstash']['agent']['version'] = '1.1.9'
+default['logstash']['agent']['source_url'] = 'https://logstash.objects.dreamhost.com/release/logstash-1.1.9-monolithic.jar'
+default['logstash']['agent']['checksum'] = 'e444e89a90583a75c2d6539e5222e2803621baa0ae94cb77dbbcebacdc0c3fc7'
+default['logstash']['agent']['install_method'] = 'jar' # Either `source` or `jar`
+default['logstash']['agent']['patterns_dir'] = 'agent/etc/patterns'
+default['logstash']['agent']['base_config'] = 'agent.conf.erb'
+default['logstash']['agent']['base_config_cookbook'] = 'logstash'
+default['logstash']['agent']['xms'] = '384M'
+default['logstash']['agent']['xmx'] = '384M'
+default['logstash']['agent']['java_opts'] = ''
+default['logstash']['agent']['gc_opts'] = '-XX:+UseParallelOldGC'
+default['logstash']['agent']['ipv4_only'] = false
+default['logstash']['agent']['debug'] = false
+
+# roles/flasgs for various autoconfig/discovery components
+default['logstash']['agent']['server_role'] = 'logstash_server'
+
+# for use in case recipe used w/ chef-solo, default to self
+default['logstash']['agent']['server_ipaddress'] = ''
+
+default['logstash']['agent']['inputs'] = []
+default['logstash']['agent']['filters'] = []
+default['logstash']['agent']['outputs'] = []
diff --git a/cookbooks/logstash/attributes/beaver.rb b/cookbooks/logstash/attributes/beaver.rb
new file mode 100644
index 0000000..17bea0d
--- /dev/null
+++ b/cookbooks/logstash/attributes/beaver.rb
@@ -0,0 +1,8 @@
+
+default['logstash']['beaver']['pip_package'] = "beaver==22"
+default['logstash']['beaver']['zmq']['pip_package'] = "pyzmq==2.1.11"
+default['logstash']['beaver']['server_role'] = "logstash_server"
+default['logstash']['beaver']['server_ipaddress'] = nil
+default['logstash']['beaver']['inputs'] = []
+default['logstash']['beaver']['outputs'] = []
+
diff --git a/cookbooks/logstash/attributes/default.rb b/cookbooks/logstash/attributes/default.rb
new file mode 100644
index 0000000..54eefb8
--- /dev/null
+++ b/cookbooks/logstash/attributes/default.rb
@@ -0,0 +1,27 @@
+default['logstash']['basedir'] = '/opt/logstash'
+default['logstash']['user'] = 'logstash'
+default['logstash']['group'] = 'logstash'
+default['logstash']['join_groups'] = []
+default['logstash']['log_dir'] = '/var/log/logstash'
+default['logstash']['pid_dir'] = '/var/run/logstash'
+default['logstash']['create_account'] = true
+
+# roles/flags for various search/discovery
+default['logstash']['graphite_role'] = 'graphite_server'
+default['logstash']['graphite_query'] = "roles:#{node['logstash']['graphite_role']} AND chef_environment:#{node.chef_environment}"
+default['logstash']['elasticsearch_role'] = 'elasticsearch_server'
+default['logstash']['elasticsearch_query'] = "roles:#{node['logstash']['elasticsearch_role']} AND chef_environment:#{node.chef_environment}"
+default['logstash']['elasticsearch_cluster'] = 'logstash'
+default['logstash']['elasticsearch_ip'] = ''
+default['logstash']['elasticsearch_port'] = ''
+default['logstash']['graphite_ip'] = ''
+
+default['logstash']['patterns'] = {}
+default['logstash']['install_zeromq'] = false
+
+case
+when platform_family?("rhel")
+ node.set['logstash']['zeromq_packages'] = [ "zeromq", "zeromq-devel"]
+when platform_family?("debian")
+ node.set['logstash']['zeromq_packages'] = [ "zeromq", "libzmq-dev"]
+end
diff --git a/cookbooks/logstash/attributes/index_cleaner.rb b/cookbooks/logstash/attributes/index_cleaner.rb
new file mode 100644
index 0000000..4fbeef5
--- /dev/null
+++ b/cookbooks/logstash/attributes/index_cleaner.rb
@@ -0,0 +1 @@
+default['logstash']['index_cleaner']['days_to_keep'] = 31
diff --git a/cookbooks/logstash/attributes/kibana.rb b/cookbooks/logstash/attributes/kibana.rb
new file mode 100644
index 0000000..189c3a2
--- /dev/null
+++ b/cookbooks/logstash/attributes/kibana.rb
@@ -0,0 +1,19 @@
+default['logstash']['kibana']['repo'] = 'git://github.com/rashidkpc/Kibana.git'
+default['logstash']['kibana']['sha'] = '806d9b4d7a88b102777cca8ec3cb472f3eb7b5b1'
+default['logstash']['kibana']['apache_template'] = 'kibana.conf.erb'
+default['logstash']['kibana']['basedir'] = "#{node['logstash']['basedir']}/kibana"
+default['logstash']['kibana']['log_dir'] = '/var/log/kibana'
+default['logstash']['kibana']['pid_dir'] = '/var/run/kibana'
+default['logstash']['kibana']['home'] = "#{node['logstash']['kibana']['basedir']}/current"
+default['logstash']['kibana']['config'] = 'kibana-config.php.erb'
+default['logstash']['kibana']['server_name'] = node['ipaddress']
+default['logstash']['kibana']['http_port'] = 80
+default['logstash']['kibana']['auth']['enabled'] = false
+default['logstash']['kibana']['auth']['user'] = 'admin'
+default['logstash']['kibana']['auth']['password'] = 'unauthorized'
+default['apache']['default_site_enabled'] = false
+
+#Smart_index_pattern = 'logstash-%Y.%m.%d'
+default['logstash']['kibana']['smart_index_pattern'] = 'logstash-%Y.%m.%d'
+default['logstash']['kibana']['language'] = "ruby"
+
diff --git a/cookbooks/logstash/attributes/pyshipper.rb b/cookbooks/logstash/attributes/pyshipper.rb
new file mode 100644
index 0000000..e69de29
diff --git a/cookbooks/logstash/attributes/server.rb b/cookbooks/logstash/attributes/server.rb
new file mode 100644
index 0000000..e8f3db4
--- /dev/null
+++ b/cookbooks/logstash/attributes/server.rb
@@ -0,0 +1,24 @@
+default['logstash']['server']['version'] = '1.1.9'
+default['logstash']['server']['source_url'] = 'https://logstash.objects.dreamhost.com/release/logstash-1.1.9-monolithic.jar'
+default['logstash']['server']['checksum'] = 'e444e89a90583a75c2d6539e5222e2803621baa0ae94cb77dbbcebacdc0c3fc7'
+default['logstash']['server']['install_method'] = 'jar' # Either `source` or `jar`
+default['logstash']['server']['patterns_dir'] = 'server/etc/patterns'
+default['logstash']['server']['base_config'] = 'server.conf.erb'
+default['logstash']['server']['base_config_cookbook'] = 'logstash'
+default['logstash']['server']['xms'] = '1024M'
+default['logstash']['server']['xmx'] = '1024M'
+default['logstash']['server']['java_opts'] = ''
+default['logstash']['server']['gc_opts'] = '-XX:+UseParallelOldGC'
+default['logstash']['server']['ipv4_only'] = false
+default['logstash']['server']['debug'] = false
+default['logstash']['server']['home'] = '/opt/logstash/server'
+default['logstash']['server']['install_rabbitmq'] = true
+
+# roles/flags for various autoconfig/discovery components
+default['logstash']['server']['enable_embedded_es'] = true
+
+default['logstash']['server']['inputs'] = []
+default['logstash']['server']['filters'] = []
+default['logstash']['server']['outputs'] = []
+
+
diff --git a/cookbooks/logstash/attributes/source.rb b/cookbooks/logstash/attributes/source.rb
new file mode 100644
index 0000000..106c0e9
--- /dev/null
+++ b/cookbooks/logstash/attributes/source.rb
@@ -0,0 +1,3 @@
+default['logstash']['source']['repo'] = 'git://github.com/logstash/logstash.git'
+default['logstash']['source']['sha'] = nil
+default['logstash']['source']['java_home'] = '/usr/lib/jvm/java-6-openjdk/' # openjdk6 on ubuntu
diff --git a/cookbooks/logstash/files/default/haproxy b/cookbooks/logstash/files/default/haproxy
new file mode 100644
index 0000000..35674a5
--- /dev/null
+++ b/cookbooks/logstash/files/default/haproxy
@@ -0,0 +1,3 @@
+# a fixup for haproxy, should be merged into https://github.com/logstash/logstash/blob/master/patterns/haproxy
+HAPROXYCAPTUREDREQUESTHEADERS %{NOTSPACE:request_header_host}\|%{DATA:request_header_x_forwarded_for}\|%{DATA:request_header_accept_language}\|%{NOTSPACE:request_header_referer}\|%{DATA:request_header_user_agent}
+HAPROXYCAPTUREDRESPONSEHEADERS %{DATA:response_header_content_type}\|%{DATA:response_header_content_encoding}\|%{DATA:response_header_cache_control}\|%{DATA:response_header_last_modified}
\ No newline at end of file
diff --git a/cookbooks/logstash/files/default/logstash_index_cleaner.py b/cookbooks/logstash/files/default/logstash_index_cleaner.py
new file mode 100644
index 0000000..d700957
--- /dev/null
+++ b/cookbooks/logstash/files/default/logstash_index_cleaner.py
@@ -0,0 +1,153 @@
+#!/usr/bin/env python
+#
+# Deletes all indices with a datestamp older than "days-to-keep" for daily
+# if you have hourly indices, it will delete all of those older than "hours-to-keep"
+#
+# This script presumes an index is named typically, e.g. logstash-YYYY.MM.DD
+# It will work with any name-YYYY.MM.DD or name-YYYY.MM.DD.HH type sequence
+#
+# Requires python and the following dependencies (all pip/easy_installable):
+#
+# pyes (python elasticsearch bindings, which might need simplejson)
+# argparse (built-in in python2.7 and higher, python 2.6 and lower will have to easy_install it)
+#
+# TODO: Proper logging instead of just print statements, being able to configure a decent logging level.
+# Unit tests. The code is somewhat broken up into logical parts that may be tested separately.
+# Better error reporting?
+# Improve the get_index_epoch method to parse more date formats. Consider renaming (to "parse_date_to_timestamp"?)
+
+import sys
+import time
+import argparse
+from datetime import timedelta
+
+import pyes
+
+
+__version__ = '0.1.1'
+
+
+def make_parser():
+ """ Creates an ArgumentParser to parse the command line options. """
+ parser = argparse.ArgumentParser(description='Delete old logstash indices from Elasticsearch.')
+
+ parser.add_argument('-v', '--version', action='version', version='%(prog)s '+__version__)
+
+ parser.add_argument('--host', help='Elasticsearch host.', default='localhost')
+ parser.add_argument('--port', help='Elasticsearch port', default=9200, type=int)
+ parser.add_argument('-t', '--timeout', help='Elasticsearch timeout', default=30, type=int)
+
+ parser.add_argument('-p', '--prefix', help='Prefix for the indices. Indices that do not have this prefix are skipped.', default='logstash-')
+ parser.add_argument('-s', '--separator', help='Time unit separator', default='.')
+
+ parser.add_argument('-H', '--hours-to-keep', action='store', help='Number of hours to keep.', type=int)
+ parser.add_argument('-d', '--days-to-keep', action='store', help='Number of days to keep.', type=int)
+
+ parser.add_argument('-n', '--dry-run', action='store_true', help='If true, does not perform any changes to the Elasticsearch indices.', default=False)
+
+ return parser
+
+
+def get_index_epoch(index_timestamp, separator='.'):
+ """ Gets the epoch of the index.
+
+ :param index_timestamp: A string on the format YYYY.MM.DD[.HH]
+ :return The creation time (epoch) of the index.
+ """
+ year_month_day_optionalhour = index_timestamp.split(separator)
+ if len(year_month_day_optionalhour) == 3:
+ year_month_day_optionalhour.append('3')
+
+ return time.mktime([int(part) for part in year_month_day_optionalhour] + [0,0,0,0,0])
+
+
+def find_expired_indices(connection, days_to_keep=None, hours_to_keep=None, separator='.', prefix='logstash-', out=sys.stdout, err=sys.stderr):
+ """ Generator that yields expired indices.
+
+ :return: Yields tuples on the format ``(index_name, expired_by)`` where index_name
+ is the name of the expired index and expired_by is the number of seconds (a float value) that the
+ index was expired by.
+ """
+ utc_now_time = time.time() + time.altzone
+ days_cutoff = utc_now_time - days_to_keep * 24 * 60 * 60 if days_to_keep is not None else None
+ hours_cutoff = utc_now_time - hours_to_keep * 60 * 60 if hours_to_keep is not None else None
+
+ for index_name in sorted(set(connection.get_indices().keys())):
+ if not index_name.startswith(prefix):
+ print >> out, 'Skipping index due to missing prefix {0}: {1}'.format(prefix, index_name)
+ continue
+
+ unprefixed_index_name = index_name[len(prefix):]
+
+ # find the timestamp parts (i.e ['2011', '01', '05'] from '2011.01.05') using the configured separator
+ parts = unprefixed_index_name.split(separator)
+
+ # perform some basic validation
+ if len(parts) < 3 or len(parts) > 4 or not all([item.isdigit() for item in parts]):
+ print >> err, 'Could not find a valid timestamp from the index: {0}'.format(index_name)
+ continue
+
+ # find the cutoff. if we have more than 3 parts in the timestamp, the timestamp includes the hours and we
+ # should compare it to the hours_cutoff, otherwise, we should use the days_cutoff
+ cutoff = hours_cutoff
+ if len(parts) == 3:
+ cutoff = days_cutoff
+
+ # but the cutoff might be none, if the current index only has three parts (year.month.day) and we're only
+ # removing hourly indices:
+ if cutoff is None:
+ print >> out, 'Skipping {0} because it is of a type (hourly or daily) that I\'m not asked to delete.'.format(index_name)
+ continue
+
+ index_epoch = get_index_epoch(unprefixed_index_name)
+
+ # if the index is older than the cutoff
+ if index_epoch < cutoff:
+ yield index_name, cutoff-index_epoch
+
+ else:
+ print >> out, '{0} is {1} above the cutoff.'.format(index_name, timedelta(seconds=index_epoch-cutoff))
+
+
+def main():
+ start = time.time()
+
+ parser = make_parser()
+ arguments = parser.parse_args()
+
+ if not arguments.hours_to_keep and not arguments.days_to_keep:
+ print >> sys.stderr, 'Invalid arguments: You must specify either the number of hours or the number of days to keep.'
+ parser.print_help()
+ return
+
+ connection = pyes.ES('{0}:{1}'.format(arguments.host, arguments.port), timeout=arguments.timeout)
+
+ if arguments.days_to_keep:
+ print 'Deleting daily indices older than {0} days.'.format(arguments.days_to_keep)
+ if arguments.hours_to_keep:
+ print 'Deleting hourly indices older than {0} hours.'.format(arguments.hours_to_keep)
+
+ print ''
+
+ for index_name, expired_by in find_expired_indices(connection, arguments.days_to_keep, arguments.hours_to_keep, arguments.separator, arguments.prefix):
+ expiration = timedelta(seconds=expired_by)
+
+ if arguments.dry_run:
+ print 'Would have attempted deleting index {0} because it is {1} older than the calculated cutoff.'.format(index_name, expiration)
+ continue
+
+ print 'Deleting index {0} because it was {1} older than cutoff.'.format(index_name, expiration)
+
+ deletion = connection.delete_index_if_exists(index_name)
+ # ES returns a dict on the format {u'acknowledged': True, u'ok': True} on success.
+ if deletion.get('ok'):
+ print 'Successfully deleted index: {0}'.format(index_name)
+ else:
+ print 'Error deleting index: {0}. ({1})'.format(index_name, deletion)
+
+ print ''
+ print 'Done in {0}.'.format(timedelta(seconds=time.time()-start))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/cookbooks/logstash/libraries/logstash_conf.rb b/cookbooks/logstash/libraries/logstash_conf.rb
new file mode 100644
index 0000000..374cfae
--- /dev/null
+++ b/cookbooks/logstash/libraries/logstash_conf.rb
@@ -0,0 +1,68 @@
+
+require 'rubygems'
+
+class Erubis::RubyEvaluator::LogstashConf
+
+ private
+
+ def self.key_to_str(k)
+ case k.class.to_s
+ when "String"
+ return "'#{k}'"
+ when "Fixnum", "Float"
+ return k.to_s
+ when "Regex"
+ return k.inspect
+ end
+ return k
+ end
+
+ def self.value_to_str(v)
+ case v.class.to_s
+ when "String", "Symbol", "Fixnum", "Float"
+ "'#{v}'"
+ when "Array"
+ "[#{v.map{|e| value_to_str e}.join(", ")}]"
+ when "Hash", "Mash"
+ value_to_str(v.to_a.flatten)
+ when "TrueClass", "FalseClass"
+ v.to_s
+ else
+ v.inspect
+ end
+ end
+
+ def self.key_value_to_str(k, v)
+ unless v.nil?
+ key_to_str(k) + " => " + value_to_str(v)
+ else
+ key_to_str(k)
+ end
+ end
+
+ public
+
+ def self.section_to_str(section, version=nil, patterns_dir=nil)
+ result = []
+ patterns_dir_plugins = [ 'grok' ]
+ unless version.nil?
+ patterns_dir_plugins << 'multiline' if Gem::Version.new(version) >= Gem::Version.new('1.1.2')
+ end
+ section.each do |output|
+ output.sort.each do |name, hash|
+ result << ''
+ result << ' ' + name.to_s + ' {'
+ if patterns_dir_plugins.include?(name.to_s) and not patterns_dir.nil? and not hash.has_key?('patterns_dir')
+ result << ' ' + key_value_to_str('patterns_dir', patterns_dir)
+ end
+ hash.sort.each do |k,v|
+ result << ' ' + key_value_to_str(k, v)
+ end
+ result << ' }'
+ end
+ end
+ return result.join("\n")
+ end
+
+end
+
diff --git a/cookbooks/logstash/metadata.rb b/cookbooks/logstash/metadata.rb
new file mode 100644
index 0000000..69537d0
--- /dev/null
+++ b/cookbooks/logstash/metadata.rb
@@ -0,0 +1,19 @@
+name "logstash"
+maintainer "John E. Vincent"
+maintainer_email "lusis.org+github.com@gmail.com"
+license "Apache 2.0"
+description "Installs/Configures logstash"
+long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
+version "0.5.9"
+
+%w{ ubuntu debian redhat centos scientific amazon fedora }.each do |os|
+ supports os
+end
+
+%w{ apache2 php build-essential git rbenv runit python java ant logrotate rabbitmq yumrepo }.each do |ckbk|
+ depends ckbk
+end
+
+%w{ yumrepo apt }.each do |ckbk|
+ recommends ckbk
+end
diff --git a/cookbooks/logstash/recipes/agent.rb b/cookbooks/logstash/recipes/agent.rb
new file mode 100644
index 0000000..8f31317
--- /dev/null
+++ b/cookbooks/logstash/recipes/agent.rb
@@ -0,0 +1,184 @@
+#
+# Cookbook Name:: logstash
+# Recipe:: agent
+#
+#
+include_recipe "logstash::default"
+
+if node['logstash']['agent']['patterns_dir'][0] == '/'
+ patterns_dir = node['logstash']['agent']['patterns_dir']
+else
+ patterns_dir = node['logstash']['basedir'] + '/' + node['logstash']['agent']['patterns_dir']
+end
+
+if node['logstash']['install_zeromq']
+ case
+ when platform_family?("rhel")
+ include_recipe "yumrepo::zeromq"
+ when platform_family?("debian")
+ apt_repository "zeromq-ppa" do
+ uri "http://ppa.launchpad.net/chris-lea/zeromq/ubuntu"
+ distribution node['lsb']['codename']
+ components ["main"]
+ keyserver "keyserver.ubuntu.com"
+ key "C7917B12"
+ action :add
+ end
+ apt_repository "libpgm-ppa" do
+ uri "http://ppa.launchpad.net/chris-lea/libpgm/ubuntu"
+ distribution node['lsb']['codename']
+ components ["main"]
+ keyserver "keyserver.ubuntu.com"
+ key "C7917B12"
+ action :add
+ notifies :run, "execute[apt-get update]", :immediately
+ end
+ end
+ node['logstash']['zeromq_packages'].each {|p| package p }
+end
+
+# check if running chef-solo. If not, detect the logstash server/ip by role. If I can't do that, fall back to using ['logstash']['agent']['server_ipaddress']
+if Chef::Config[:solo]
+ logstash_server_ip = node['logstash']['agent']['server_ipaddress']
+else
+ logstash_server_results = search(:node, "roles:#{node['logstash']['agent']['server_role']}")
+ unless logstash_server_results.empty?
+ logstash_server_ip = logstash_server_results[0]['ipaddress']
+ else
+ logstash_server_ip = node['logstash']['agent']['server_ipaddress']
+ end
+end
+
+directory "#{node['logstash']['basedir']}/agent" do
+ action :create
+ mode "0755"
+ owner node['logstash']['user']
+ group node['logstash']['group']
+end
+
+%w{bin etc lib tmp log}.each do |ldir|
+ directory "#{node['logstash']['basedir']}/agent/#{ldir}" do
+ action :create
+ mode "0755"
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ end
+
+ link "/var/lib/logstash/#{ldir}" do
+ to "#{node['logstash']['basedir']}/agent/#{ldir}"
+ end
+end
+
+directory "#{node['logstash']['basedir']}/agent/etc/conf.d" do
+ action :create
+ mode "0755"
+ owner node['logstash']['user']
+ group node['logstash']['group']
+end
+
+directory patterns_dir do
+ action :create
+ mode "0755"
+ owner node['logstash']['user']
+ group node['logstash']['group']
+end
+
+node['logstash']['patterns'].each do |file, hash|
+ template_name = patterns_dir + '/' + file
+ template template_name do
+ source 'patterns.erb'
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ variables( :patterns => hash )
+ mode '0644'
+ notifies :restart, 'service[logstash_agent]'
+ end
+end
+
+if platform_family? "debian"
+ if ["12.04", "12.10"].include? node["platform_version"]
+ template "/etc/init/logstash_agent.conf" do
+ mode "0644"
+ source "logstash_agent.conf.erb"
+ end
+
+ service "logstash_agent" do
+ provider Chef::Provider::Service::Upstart
+ action [ :enable, :start ]
+ end
+ else
+ runit_service "logstash_agent"
+ end
+elsif platform_family? "rhel", "fedora"
+ template "/etc/init.d/logstash_agent" do
+ source "init.erb"
+ owner "root"
+ group "root"
+ mode "0774"
+ variables(
+ :config_file => "shipper.conf",
+ :name => 'agent',
+ :max_heap => node['logstash']['agent']['xmx'],
+ :min_heap => node['logstash']['agent']['xms']
+ )
+ end
+
+ service "logstash_agent" do
+ supports :restart => true, :reload => true, :status => true
+ action :enable
+ end
+end
+
+if node['logstash']['agent']['install_method'] == "jar"
+ remote_file "#{node['logstash']['basedir']}/agent/lib/logstash-#{node['logstash']['agent']['version']}.jar" do
+ owner "root"
+ group "root"
+ mode "0755"
+ source node['logstash']['agent']['source_url']
+ checksum node['logstash']['agent']['checksum']
+ action :create_if_missing
+ end
+
+ link "#{node['logstash']['basedir']}/agent/lib/logstash.jar" do
+ to "#{node['logstash']['basedir']}/agent/lib/logstash-#{node['logstash']['agent']['version']}.jar"
+ notifies :restart, "service[logstash_agent]"
+ end
+else
+ include_recipe "logstash::source"
+
+ logstash_version = node['logstash']['source']['sha'] || "v#{node['logstash']['server']['version']}"
+ link "#{node['logstash']['basedir']}/agent/lib/logstash.jar" do
+ to "#{node['logstash']['basedir']}/source/build/logstash-#{logstash_version}-monolithic.jar"
+ notifies :restart, "service[logstash_agent]"
+ end
+end
+
+template "#{node['logstash']['basedir']}/agent/etc/shipper.conf" do
+ source node['logstash']['agent']['base_config']
+ cookbook node['logstash']['agent']['base_config_cookbook']
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ mode "0644"
+ variables(
+ :logstash_server_ip => logstash_server_ip,
+ :patterns_dir => patterns_dir)
+ notifies :restart, "service[logstash_agent]"
+end
+
+directory node['logstash']['log_dir'] do
+ action :create
+ mode "0755"
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ recursive true
+end
+
+logrotate_app "logstash" do
+ path "#{node['logstash']['log_dir']}/*.log"
+ frequency "daily"
+ rotate "30"
+ options [ "missingok", "notifempty" ]
+ create "664 #{node['logstash']['user']} #{node['logstash']['group']}"
+ notifies :restart, "service[rsyslog]"
+end
+
diff --git a/cookbooks/logstash/recipes/beaver.rb b/cookbooks/logstash/recipes/beaver.rb
new file mode 100644
index 0000000..bc7194e
--- /dev/null
+++ b/cookbooks/logstash/recipes/beaver.rb
@@ -0,0 +1,234 @@
+#
+# Cookbook Name:: logstash
+# Recipe:: beaver
+#
+#
+include_recipe "logstash::default"
+include_recipe "python::default"
+include_recipe "logrotate"
+
+if node['logstash']['agent']['install_zeromq']
+ case
+ when platform_family?("rhel")
+ include_recipe "yumrepo::zeromq"
+ when platform_family?("debian")
+ apt_repository "zeromq-ppa" do
+ uri "http://ppa.launchpad.net/chris-lea/zeromq/ubuntu"
+ distribution node['lsb']['codename']
+ components ["main"]
+ keyserver "keyserver.ubuntu.com"
+ key "C7917B12"
+ action :add
+ end
+ apt_repository "libpgm-ppa" do
+ uri "http://ppa.launchpad.net/chris-lea/libpgm/ubuntu"
+ distribution node['lsb']['codename']
+ components ["main"]
+ keyserver "keyserver.ubuntu.com"
+ key "C7917B12"
+ action :add
+ notifies :run, "execute[apt-get update]", :immediately
+ end
+ end
+ node['logstash']['zeromq_packages'].each {|p| package p }
+ python_pip node['logstash']['beaver']['zmq']['pip_package'] do
+ action :install
+ end
+end
+
+package 'git'
+
+basedir = node['logstash']['basedir'] + '/beaver'
+
+conf_file = "#{basedir}/etc/beaver.conf"
+log_file = "#{node['logstash']['log_dir']}/logstash_beaver.log"
+pid_file = "#{node['logstash']['pid_dir']}/logstash_beaver.pid"
+
+logstash_server_ip = nil
+if Chef::Config[:solo]
+ logstash_server_ip = node['logstash']['beaver']['server_ipaddress'] if node['logstash']['beaver']['server_ipaddress']
+elsif !node['logstash']['beaver']['server_ipaddress'].nil?
+ logstash_server_ip = node['logstash']['beaver']['server_ipaddress']
+elsif node['logstash']['beaver']['server_role']
+ logstash_server_results = search(:node, "roles:#{node['logstash']['beaver']['server_role']}")
+ unless logstash_server_results.empty?
+ logstash_server_ip = logstash_server_results[0]['ipaddress']
+ end
+end
+
+
+# create some needed directories and files
+directory basedir do
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ recursive true
+end
+
+[
+ File.dirname(conf_file),
+ File.dirname(log_file),
+ File.dirname(pid_file),
+].each do |dir|
+ directory dir do
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ recursive true
+ not_if do ::File.exists?(dir) end
+ end
+end
+
+[ log_file, pid_file ].each do |f|
+ file f do
+ action :touch
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ mode '0640'
+ end
+end
+
+python_pip node['logstash']['beaver']['pip_package'] do
+ action :install
+end
+
+# inputs
+files = []
+node['logstash']['beaver']['inputs'].each do |ins|
+ ins.each do |name, hash|
+ case name
+ when "file" then
+ if hash.has_key?('path')
+ files << hash
+ else
+ log("input file has no path.") { level :warn }
+ end
+ else
+ log("input type not supported: #{name}") { level :warn }
+ end
+ end
+end
+
+# outputs
+outputs = []
+conf = {}
+node['logstash']['beaver']['outputs'].each do |outs|
+ outs.each do |name, hash|
+ case name
+ when "rabbitmq", "amqp" then
+ outputs << "rabbitmq"
+ host = hash['host'] || logstash_server_ip || 'localhost'
+ conf['rabbitmq_host'] = hash['host'] if hash.has_key?('host')
+ conf['rabbitmq_port'] = hash['port'] if hash.has_key?('port')
+ conf['rabbitmq_vhost'] = hash['vhost'] if hash.has_key?('vhost')
+ conf['rabbitmq_username'] = hash['user'] if hash.has_key?('user')
+ conf['rabbitmq_password'] = hash['password'] if hash.has_key?('password')
+ conf['rabbitmq_queue'] = hash['queue'] if hash.has_key?('queue')
+ conf['rabbitmq_exchange_type'] = hash['rabbitmq_exchange_type'] if hash.has_key?('rabbitmq_exchange_type')
+ conf['rabbitmq_exchange'] = hash['exchange'] if hash.has_key?('exchange')
+ conf['rabbitmq_exchange_durable'] = hash['durable'] if hash.has_key?('durable')
+ conf['rabbitmq_key'] = hash['key'] if hash.has_key?('key')
+ when "redis" then
+ outputs << "redis"
+ host = hash['host'] || logstash_server_ip || 'localhost'
+ port = hash['port'] || '6379'
+ db = hash['db'] || '0'
+ conf['redis_url'] = "redis://#{host}:#{port}/#{db}"
+ conf['redis_namespace'] = hash['key'] if hash.has_key?('key')
+ when "stdout" then
+ outputs << "stdout"
+ when "zmq", "zeromq" then
+ outputs << "zmq"
+ host = hash['host'] || logstash_server_ip || 'localhost'
+ port = hash['port'] || '2120'
+ conf['zeromq_address'] = "tcp://#{host}:#{port}"
+ else
+ log("output type not supported: #{name}") { level :warn }
+ end
+ end
+end
+
+output = outputs[0]
+if outputs.length > 1
+ log("multiple outpus detected, will consider only the first: #{output}") { level :warn }
+end
+
+cmd = "beaver -t #{output} -c #{conf_file}"
+
+template conf_file do
+ source 'beaver.conf.erb'
+ mode 0640
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ variables(
+ :conf => conf,
+ :files => files
+ )
+ notifies :restart, "service[logstash_beaver]"
+end
+
+# use upstart when supported to get nice things like automatic respawns
+use_upstart = false
+supports_setuid = false
+case node['platform_family']
+when "rhel"
+ if node['platform_version'].to_i >= 6
+ use_upstart = true
+ end
+when "fedora"
+ if node['platform_version'].to_i >= 9
+ use_upstart = true
+ end
+when "ubuntu"
+ use_upstart = true
+ if node['platform_version'].to_f >= 12.04
+ supports_setuid = true
+ end
+end
+
+if use_upstart
+ template "/etc/init/logstash_beaver.conf" do
+ mode "0644"
+ source "logstash_beaver.conf.erb"
+ variables(
+ :cmd => cmd,
+ :group => node['logstash']['group'],
+ :user => node['logstash']['user'],
+ :log => log_file,
+ :supports_setuid => supports_setuid
+ )
+ notifies :restart, "service[logstash_beaver]"
+ end
+
+ service "logstash_beaver" do
+ supports :restart => true, :reload => false
+ action [:enable, :start]
+ provider Chef::Provider::Service::Upstart
+ end
+else
+ service "logstash_beaver" do
+ supports :restart => true, :reload => false, :status => true
+ action [:enable, :start]
+ end
+
+ template "/etc/init.d/logstash_beaver" do
+ mode "0755"
+ source "init-beaver.erb"
+ variables(
+ :cmd => cmd,
+ :pid_file => pid_file,
+ :user => node['logstash']['user'],
+ :log => log_file,
+ :platform => node['platform']
+ )
+ notifies :restart, "service[logstash_beaver]"
+ end
+end
+
+logrotate_app "logstash_beaver" do
+ cookbook "logrotate"
+ path log_file
+ frequency "daily"
+ postrotate "invoke-rc.d logstash_beaver force-reload >/dev/null 2>&1 || true"
+ options [ "missingok", "notifempty" ]
+ rotate 30
+ create "0440 #{node['logstash']['user']} #{node['logstash']['group']}"
+end
diff --git a/cookbooks/logstash/recipes/default.rb b/cookbooks/logstash/recipes/default.rb
new file mode 100644
index 0000000..c404dd3
--- /dev/null
+++ b/cookbooks/logstash/recipes/default.rb
@@ -0,0 +1,39 @@
+#
+# Cookbook Name:: logstash
+# Recipe:: default
+#
+include_recipe "runit" unless node["platform_version"] == "12.04"
+include_recipe "java"
+
+if node['logstash']['create_account']
+
+ group node['logstash']['group'] do
+ system true
+ end
+
+ user node['logstash']['user'] do
+ group node['logstash']['group']
+ home "/var/lib/logstash"
+ system true
+ action :create
+ manage_home true
+ end
+
+end
+
+directory node['logstash']['basedir'] do
+ action :create
+ owner "root"
+ group "root"
+ mode "0755"
+end
+
+node['logstash']['join_groups'].each do |grp|
+ group grp do
+ members node['logstash']['user']
+ action :modify
+ append true
+ only_if "grep -q '^#{grp}:' /etc/group"
+ end
+end
+
diff --git a/cookbooks/logstash/recipes/haproxy.rb b/cookbooks/logstash/recipes/haproxy.rb
new file mode 100644
index 0000000..fe6dc39
--- /dev/null
+++ b/cookbooks/logstash/recipes/haproxy.rb
@@ -0,0 +1,43 @@
+# this recipe lets output haproxy logs to file as if they were apache
+# virtual logs, in order to interface with legacy traffic measuring
+# applications like AWstats
+# Also requires changes to your haproxy configuration
+# and a file output on your logstash_server
+# I have no idea if it meets anyone's needs other than my own
+# only for those crazy enough to replace apache or Nginx as their
+# main front-end server - Bryan W. Berry 28 June 2012
+
+include_recipe "logrotate"
+
+directory "#{node['logstash']['basedir']}/server/apache_logs" do
+ action :create
+ mode "0755"
+ owner node['logstash']['user']
+ group node['logstash']['group']
+end
+
+link "/var/lib/logstash/apache_logs" do
+ to "#{node['logstash']['basedir']}/server/apache_logs"
+end
+
+directory "/opt/logstash/server/etc/patterns" do
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ mode "0774"
+end
+
+# create pattern_file for haproxy
+cookbook_file "/opt/logstash/server/etc/patterns/haproxy" do
+ source "haproxy"
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ mode "0774"
+end
+
+# set logrotate for /opt/logstash/server/apache_logs
+logrotate_app "apache_logs" do
+ path node['logstash']['server']['logrotate_target']
+ frequency "daily"
+ create "664 #{node['logstash']['user']} #{node['logstash']['user']}"
+ rotate "30"
+end
diff --git a/cookbooks/logstash/recipes/index_cleaner.rb b/cookbooks/logstash/recipes/index_cleaner.rb
new file mode 100644
index 0000000..b3d3967
--- /dev/null
+++ b/cookbooks/logstash/recipes/index_cleaner.rb
@@ -0,0 +1,31 @@
+include_recipe "python::pip"
+
+python_pip "pyes" do
+ action :install
+end
+
+directory "#{node['logstash']['basedir']}/index_cleaner" do
+ action :create
+ mode "0755"
+ owner node['logstash']['user']
+ group node['logstash']['group']
+end
+
+cookbook_file "#{node['logstash']['basedir']}/index_cleaner/logstash_index_cleaner.py" do
+ source "logstash_index_cleaner.py"
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ mode "0774"
+end
+
+# FIXME: http://tickets.opscode.com/browse/CHEF-3547
+file "#{node['logstash']['basedir']}/index_cleaner/logstash_index_cleaner.py" do
+ mode "0744"
+ action :touch
+ only_if { Chef::VERSION == "10.16.0" }
+end
+
+execute "index_cleaner" do
+ cwd "#{node['logstash']['basedir']}/index_cleaner"
+ command "./logstash_index_cleaner.py -d #{node['logstash']['index_cleaner']['days_to_keep']}"
+end
diff --git a/cookbooks/logstash/recipes/kibana.rb b/cookbooks/logstash/recipes/kibana.rb
new file mode 100644
index 0000000..5f823d5
--- /dev/null
+++ b/cookbooks/logstash/recipes/kibana.rb
@@ -0,0 +1,215 @@
+include_recipe "git"
+include_recipe "logrotate"
+
+kibana_base = node['logstash']['kibana']['basedir']
+kibana_home = node['logstash']['kibana']['home']
+kibana_log_dir = node['logstash']['kibana']['log_dir']
+kibana_pid_dir = node['logstash']['kibana']['pid_dir']
+
+include_recipe "rbenv::default"
+include_recipe "rbenv::ruby_build"
+
+rbenv_ruby "1.9.3-p194" do
+ global true
+end
+
+rbenv_gem "bundler" do
+ ruby_version "1.9.3-p194"
+end
+
+if Chef::Config[:solo]
+ es_server_ip = node['logstash']['elasticsearch_ip']
+else
+ es_server_results = search(:node, "roles:#{node['logstash']['elasticsearch_role']} AND chef_environment:#{node.chef_environment}")
+ unless es_server_results.empty?
+ es_server_ip = es_server_results[0]['ipaddress']
+ else
+ es_server_ip = node['logstash']['elasticsearch_ip'].empty? ? '127.0.0.1' : node['logstash']['elasticsearch_ip']
+ end
+end
+
+es_server_port = node['logstash']['elasticsearch_port'].empty? ? '9200' : node['logstash']['elasticsearch_port']
+
+#install new kibana version only if is true
+case node['logstash']['kibana']['language'].downcase
+when "ruby"
+
+ user "kibana" do
+ supports :manage_home => true
+ home "/home/kibana"
+ shell "/bin/bash"
+ end
+
+ node.set[:rbenv][:group_users] = [ "kibana" ]
+
+ [ kibana_pid_dir, kibana_log_dir ].each do |dir|
+ Chef::Log.debug(dir)
+ directory dir do
+ owner 'kibana'
+ group 'kibana'
+ recursive true
+ end
+ end
+
+ Chef::Log.debug(kibana_base)
+ directory kibana_base do
+ owner 'kibana'
+ group 'kibana'
+ recursive true
+ end
+
+ # for some annoying reason Gemfile.lock is shipped w/ kibana
+ file "gemfile_lock" do
+ path "#{node['logstash']['kibana']['basedir']}/#{node['logstash']['kibana']['sha']}/Gemfile.lock"
+ action :delete
+ end
+
+ git "#{node['logstash']['kibana']['basedir']}/#{node['logstash']['kibana']['sha']}" do
+ repository node['logstash']['kibana']['repo']
+ branch "kibana-ruby"
+ action :sync
+ user 'kibana'
+ group 'kibana'
+ notifies :delete, "file[gemfile_lock]", :immediately
+ end
+
+ link kibana_home do
+ to "#{node['logstash']['kibana']['basedir']}/#{node['logstash']['kibana']['sha']}"
+ end
+
+ template '/home/kibana/.bash_profile' do # let bash handle our env vars
+ source 'kibana-bash_profile.erb'
+ owner 'kibana'
+ group 'kibana'
+ variables(
+ :pid_dir => kibana_pid_dir,
+ :log_dir => kibana_log_dir,
+ :app_name => "kibana",
+ :kibana_port => node['logstash']['kibana']['http_port'],
+ :smart_index => node['logstash']['kibana']['smart_index_pattern'],
+ :es_ip => es_server_ip,
+ :es_port => es_server_port,
+ :server_name => node['logstash']['kibana']['server_name']
+ )
+ end
+
+ template "/etc/init.d/kibana" do
+ source "kibana.init.erb"
+ owner 'root'
+ mode "755"
+ variables(
+ :kibana_home => kibana_home,
+ :user => 'kibana'
+ )
+ end
+
+ template "#{kibana_home}/KibanaConfig.rb" do
+ source "kibana-config.rb.erb"
+ owner 'kibana'
+ mode 0755
+ end
+
+ template "#{kibana_home}/kibana-daemon.rb" do
+ source "kibana-daemon.rb.erb"
+ owner 'kibana'
+ mode 0755
+ end
+
+ bash "bundle install" do
+ cwd kibana_home
+ code "source /etc/profile.d/rbenv.sh && bundle install"
+ not_if { ::File.exists? "#{kibana_home}/Gemfile.lock" }
+ end
+
+
+ service "kibana" do
+ supports :status => true, :restart => true
+ action [:enable, :start]
+ subscribes :restart, [ "link[#{kibana_home}]", "template[#{kibana_home}/KibanaConfig.rb]", "template[#{kibana_home}/kibana-daemon.rb]" ]
+ end
+
+ logrotate_app "kibana" do
+ cookbook "logrotate"
+ path "/var/log/kibana/kibana.output"
+ frequency "daily"
+ options [ "missingok", "notifempty" ]
+ rotate 30
+ create "644 kibana kibana"
+ end
+
+when "php"
+
+ include_recipe "apache2"
+ include_recipe "apache2::mod_php5"
+ include_recipe "php::module_curl"
+
+ kibana_version = node['logstash']['kibana']['sha']
+
+ apache_module "php5" do
+ action :enable
+ end
+
+ apache_site "default" do
+ enable false
+ end
+
+ directory "#{node['logstash']['basedir']}/kibana/#{kibana_version}" do
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ recursive true
+ end
+
+ git "#{node['logstash']['basedir']}/kibana/#{kibana_version}" do
+ repository node['logstash']['kibana']['repo']
+ reference kibana_version
+ action :sync
+ user node['logstash']['user']
+ group node['logstash']['group']
+ end
+
+ if platform? "redhat", "centos", "amazon", "fedora", "scientific"
+ arch = node['kernel']['machine'] == "x86_64" ? "64" : ""
+ file '/etc/httpd/mods-available/php5.load' do
+ content "LoadModule php5_module /usr/lib#{arch}/httpd/modules/libphp5.so"
+ end
+ end
+
+ link "#{node['logstash']['basedir']}/kibana/current" do
+ to "#{node['logstash']['basedir']}/kibana/#{kibana_version}"
+ notifies :restart, "service[apache2]"
+ end
+
+ template "#{node['apache']['dir']}/sites-available/kibana" do
+ source node['logstash']['kibana']['apache_template']
+ variables(:docroot => "#{node['logstash']['basedir']}/kibana/current",
+ :server_name => node['logstash']['kibana']['server_name'])
+ end
+
+ apache_site "kibana"
+
+ template "#{node['logstash']['basedir']}/kibana/current/config.php" do
+ source node['logstash']['kibana']['config']
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ mode "0755"
+ variables(:es_server_ip => es_server_ip)
+ end
+
+ if node['logstash']['kibana']['auth']['enabled']
+ htpasswd_path = "#{node['logstash']['basedir']}/kibana/#{kibana_version}/htpasswd"
+ htpasswd_user = node['logstash']['kibana']['auth']['user']
+ htpasswd_password = node['logstash']['kibana']['auth']['password']
+
+ file htpasswd_path do
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ mode "0755"
+ end
+
+ execute "add htpasswd file" do
+ command "/usr/bin/htpasswd -b #{htpasswd_path} #{htpasswd_user} #{htpasswd_password}"
+ end
+ end
+ service "apache2"
+
+end
diff --git a/cookbooks/logstash/recipes/pyshipper.rb b/cookbooks/logstash/recipes/pyshipper.rb
new file mode 100644
index 0000000..245f8ad
--- /dev/null
+++ b/cookbooks/logstash/recipes/pyshipper.rb
@@ -0,0 +1,26 @@
+#
+# Author:: John E. Vincent
+# Copyright 2012, John E. Vincent
+# License: Apache 2.0
+# Cookbook Name:: logstash
+# Recipe:: pyshipper
+#
+#
+include_recipe "build-essential"
+include_recipe "logstash::default"
+include_recipe "python::pip"
+include_recipe "git"
+
+package "python-dev"
+
+git "#{node['logstash']['basedir']}/shipper" do
+ repository "git://github.com/lusis/logstash-shipper.git"
+ reference "master"
+ action :sync
+end
+
+%w{pyzmq-static simplejson argparse}.each do |ppkg|
+ python_pip ppkg do
+ action :install
+ end
+end
diff --git a/cookbooks/logstash/recipes/server.rb b/cookbooks/logstash/recipes/server.rb
new file mode 100644
index 0000000..382e9df
--- /dev/null
+++ b/cookbooks/logstash/recipes/server.rb
@@ -0,0 +1,179 @@
+#
+# Author:: John E. Vincent
+# Author:: Bryan W. Berry ()
+# Copyright 2012, John E. Vincent
+# Copyright 2012, Bryan W. Berry
+# License: Apache 2.0
+# Cookbook Name:: logstash
+# Recipe:: server
+#
+#
+
+include_recipe "logstash::default"
+include_recipe "logrotate"
+
+include_recipe "rabbitmq" if node['logstash']['server']['install_rabbitmq']
+
+if node['logstash']['install_zeromq']
+ include_recipe "yumrepo::zeromq" if platform_family?("rhel")
+ node['logstash']['zeromq_packages'].each {|p| package p }
+end
+
+if node['logstash']['server']['patterns_dir'][0] == '/'
+ patterns_dir = node['logstash']['server']['patterns_dir']
+else
+ patterns_dir = node['logstash']['basedir'] + '/' + node['logstash']['server']['patterns_dir']
+end
+
+if Chef::Config[:solo]
+ es_server_ip = node['logstash']['elasticsearch_ip']
+ graphite_server_ip = node['logstash']['graphite_ip']
+else
+ es_results = search(:node, node['logstash']['elasticsearch_query'])
+ graphite_results = search(:node, node['logstash']['graphite_query'])
+
+ unless es_results.empty?
+ es_server_ip = es_results[0]['ipaddress']
+ else
+ es_server_ip = node['logstash']['elasticsearch_ip']
+ end
+
+ unless graphite_results.empty?
+ graphite_server_ip = graphite_results[0]['ipaddress']
+ else
+ graphite_server_ip = node['logstash']['graphite_ip']
+ end
+end
+
+# Create directory for logstash
+directory "#{node['logstash']['basedir']}/server" do
+ action :create
+ mode "0755"
+ owner node['logstash']['user']
+ group node['logstash']['group']
+end
+
+%w{bin etc lib log tmp }.each do |ldir|
+ directory "#{node['logstash']['basedir']}/server/#{ldir}" do
+ action :create
+ mode "0755"
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ end
+end
+
+# installation
+if node['logstash']['server']['install_method'] == "jar"
+ remote_file "#{node['logstash']['basedir']}/server/lib/logstash-#{node['logstash']['server']['version']}.jar" do
+ owner "root"
+ group "root"
+ mode "0755"
+ source node['logstash']['server']['source_url']
+ checksum node['logstash']['server']['checksum']
+ action :create_if_missing
+ end
+
+ link "#{node['logstash']['basedir']}/server/lib/logstash.jar" do
+ to "#{node['logstash']['basedir']}/server/lib/logstash-#{node['logstash']['server']['version']}.jar"
+ notifies :restart, "service[logstash_server]"
+ end
+else
+ include_recipe "logstash::source"
+
+ logstash_version = node['logstash']['source']['sha'] || "v#{node['logstash']['server']['version']}"
+ link "#{node['logstash']['basedir']}/server/lib/logstash.jar" do
+ to "#{node['logstash']['basedir']}/source/build/logstash-#{logstash_version}-monolithic.jar"
+ notifies :restart, "service[logstash_server]"
+ end
+end
+
+directory "#{node['logstash']['basedir']}/server/etc/conf.d" do
+ action :create
+ mode "0755"
+ owner node['logstash']['user']
+ group node['logstash']['group']
+end
+
+directory patterns_dir do
+ action :create
+ mode "0755"
+ owner node['logstash']['user']
+ group node['logstash']['group']
+end
+
+node['logstash']['patterns'].each do |file, hash|
+ template_name = patterns_dir + '/' + file
+ template template_name do
+ source 'patterns.erb'
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ variables(:patterns => hash)
+ mode '0644'
+ notifies :restart, 'service[logstash_server]'
+ end
+end
+
+template "#{node['logstash']['basedir']}/server/etc/logstash.conf" do
+ source node['logstash']['server']['base_config']
+ cookbook node['logstash']['server']['base_config_cookbook']
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ mode "0644"
+ variables(:graphite_server_ip => graphite_server_ip,
+ :es_server_ip => es_server_ip,
+ :enable_embedded_es => node['logstash']['server']['enable_embedded_es'],
+ :es_cluster => node['logstash']['elasticsearch_cluster'],
+ :patterns_dir => patterns_dir)
+ notifies :restart, "service[logstash_server]"
+ action :create
+end
+
+if platform_family? "debian"
+ if node["platform_version"] == "12.04"
+ template "/etc/init/logstash_server.conf" do
+ mode "0644"
+ source "logstash_server.conf.erb"
+ end
+
+ service "logstash_server" do
+ provider Chef::Provider::Service::Upstart
+ action [ :enable, :start ]
+ end
+ else
+ runit_service "logstash_server"
+ end
+elsif platform_family? "rhel","fedora"
+ template "/etc/init.d/logstash_server" do
+ source "init.erb"
+ owner "root"
+ group "root"
+ mode "0774"
+ variables(:config_file => "logstash.conf",
+ :name => 'server',
+ :max_heap => node['logstash']['server']['xmx'],
+ :min_heap => node['logstash']['server']['xms']
+ )
+ end
+
+ service "logstash_server" do
+ supports :restart => true, :reload => true, :status => true
+ action [:enable, :start]
+ end
+end
+
+directory node['logstash']['log_dir'] do
+ action :create
+ mode "0755"
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ recursive true
+end
+
+logrotate_app "logstash_server" do
+ path "#{node['logstash']['log_dir']}/*.log"
+ frequency "daily"
+ rotate "30"
+ options [ "missingok", "notifempty" ]
+ create "664 #{node['logstash']['user']} #{node['logstash']['group']}"
+end
+
diff --git a/cookbooks/logstash/recipes/source.rb b/cookbooks/logstash/recipes/source.rb
new file mode 100644
index 0000000..c440546
--- /dev/null
+++ b/cookbooks/logstash/recipes/source.rb
@@ -0,0 +1,36 @@
+include_recipe "build-essential"
+include_recipe "java"
+include_recipe "ant"
+include_recipe "git"
+include_recipe "logstash::default"
+
+package "wget"
+
+logstash_version = node['logstash']['source']['sha'] || "v#{node['logstash']['server']['version']}"
+
+directory "#{node['logstash']['basedir']}/source" do
+ action :create
+ owner node['logstash']['user']
+ group node['logstash']['group']
+ mode "0755"
+end
+
+git "#{node['logstash']['basedir']}/source" do
+ repository node['logstash']['source']['repo']
+ reference logstash_version
+ action :sync
+ user node['logstash']['user']
+ group node['logstash']['group']
+end
+
+execute "build-logstash" do
+ cwd "#{node['logstash']['basedir']}/source"
+ environment ({'JAVA_HOME' => node['logstash']['source']['java_home']})
+ user "root"
+ # This variant is useful for troubleshooting stupid environment problems
+ # command "make clean && make VERSION=#{logstash_version} --debug > /tmp/make.log 2>&1"
+ command "make clean && make VERSION=#{logstash_version}"
+ action :run
+ creates "#{node['logstash']['basedir']}/source/build/logstash-#{logstash_version}-monolithic.jar"
+ not_if "test -f #{node['logstash']['basedir']}/source/build/logstash-#{logstash_version}-monolithic.jar"
+end
diff --git a/cookbooks/logstash/templates/default/agent.conf.erb b/cookbooks/logstash/templates/default/agent.conf.erb
new file mode 100644
index 0000000..dd52453
--- /dev/null
+++ b/cookbooks/logstash/templates/default/agent.conf.erb
@@ -0,0 +1,37 @@
+# This file was created for <%= node.name %>
+# by Chef
+# Manual changes will be lost
+input {
+ <% if node['logstash']['agent']['inputs'].empty? -%>
+ file {
+ type => "sample-logs"
+ path => ["/var/log/*.log"]
+ exclude => ["*.gz"]
+ debug => true
+ }
+ <% else %>
+ <%= LogstashConf.section_to_str(node['logstash']['agent']['inputs']) %>
+ <% end -%>
+}
+
+<% unless node['logstash']['agent']['filters'].empty? -%>
+filter {
+ <%= LogstashConf.section_to_str(node['logstash']['agent']['filters'], node['logstash']['agent']['version'], @patterns_dir) %>
+}
+<% end -%>
+
+output {
+ <% if node['logstash']['agent']['debug'] -%>
+ stdout { }
+ <% end -%>
+ <% if node['logstash']['agent']['outputs'].empty? -%>
+ <% if @logstash_server_ip.empty? -%>
+ # Provide a sane default
+ null { }
+ <% else -%>
+ tcp { host => "<%= @logstash_server_ip %>" port => "5959" }
+ <% end -%>
+ <% else -%>
+ <%= LogstashConf.section_to_str(node['logstash']['agent']['outputs']) %>
+ <% end -%>
+}
diff --git a/cookbooks/logstash/templates/default/beaver.conf.erb b/cookbooks/logstash/templates/default/beaver.conf.erb
new file mode 100644
index 0000000..14df987
--- /dev/null
+++ b/cookbooks/logstash/templates/default/beaver.conf.erb
@@ -0,0 +1,18 @@
+[beaver]
+<% @conf.each do |key, value| -%>
+<%= key %>: <%= value %>
+<% end -%>
+
+<% @files.each do |file| -%>
+<% file['path'].each do |path| -%>
+[<%= path %>]
+type: <%= file['type'] || 'file' %>
+<% if file.has_key?('tags') -%>
+tags: <%= file['tags'].join(',') %>
+<% end -%>
+<% if file.has_key?('add_field') -%>
+add_field: <%= file['add_field'].join(',') %>
+<% end -%>
+
+<% end -%>
+<% end -%>
diff --git a/cookbooks/logstash/templates/default/init-beaver.erb b/cookbooks/logstash/templates/default/init-beaver.erb
new file mode 100644
index 0000000..84de0ad
--- /dev/null
+++ b/cookbooks/logstash/templates/default/init-beaver.erb
@@ -0,0 +1,142 @@
+#!/bin/bash -
+<% if [ "redhat", "centos","amazon", "fedora" ].include?(@platform) -%>
+#
+# beaver
+#
+# chkconfig: - 57 47
+# description: Log Sender provided by beaver
+# processname: beaver
+<% else -%>
+### BEGIN INIT INFO
+# Provides: beaver
+# Required-Start: $local_fs $remote_fs $network
+# Required-Stop: $local_fs $remote_fs $network
+# Default-Start: 2 3 4 5
+# Default-Stop: 0 1 6
+# Short-Description: Start up the Beaver at boot time
+# Description: Enable Log Sender provided by beaver.
+### END INIT INFO
+<% end -%>
+
+
+BEAVER_NAME='beaver'
+BEAVER_CMD='<%= @cmd %>'
+BEAVER_PID='<%= @pid_file %>'
+BEAVER_USER='<%= @user %>'
+BEAVER_LOG='<%= @log %>'
+
+
+PATH='/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'
+export PATH
+IFS=$' \t\n'
+export IFS
+
+BEAVER_BIN="$(which "${BEAVER_NAME}")"
+
+[ -r /etc/init.d/functions ] && . /etc/init.d/functions
+[ -r /lib/lsb/init-functions ] && . /lib/lsb/init-functions
+[ -r "/etc/default/${BEAVER_NAME}" ] && . "/etc/default/${BEAVER_NAME}"
+
+do_start() {
+ test -f "${BEAVER_BIN}" || exit 0
+ if is_up
+ then
+ echo $'Log Sender server daemon already started.'
+ return 0
+ fi
+ echo -n $"Log Sender server daemon: ${BEAVER_NAME}"
+ su - "${BEAVER_USER}" -s '/bin/bash' -c "${BEAVER_CMD} >> ${BEAVER_LOG} 2>&1 & echo \$! > ${BEAVER_PID}"
+ echo '.'
+}
+
+do_stop() {
+ test -f "${BEAVER_BIN}" || exit 0
+ if ! is_up
+ then
+ echo $'Log Sender server daemon already stopped.'
+ return 0
+ fi
+ echo -n $"Stopping Log Sender server daemon: ${BEAVER_NAME}"
+ do_kill
+ local I='0'
+ while is_up
+ do
+ echo -n '.'
+ if [ "${I}" -gt 10 ]
+ then
+ do_kill_force
+ I='0'
+ else
+ do_kill
+ fi
+ sleep 1
+ I="$((I+1))"
+ done
+ echo '.'
+}
+
+beaver_pid() {
+ tail -1 "${BEAVER_PID}" 2> /dev/null
+}
+
+is_up() {
+ PID="$(beaver_pid)"
+ [ x"${PID}" != x ] && ps -p "${PID}" -o comm,args h 2> /dev/null | grep -qFw "${BEAVER_NAME}"
+}
+
+do_kill() {
+ PID="$(beaver_pid)"
+ [ x"${PID}" != x ] && su - "${BEAVER_USER}" -c "kill -TERM ${PID}"
+}
+
+do_kill_force() {
+ PID="$(beaver_pid)"
+ echo -n 'force'
+ [ x"${PID}" != x ] && su - "${BEAVER_USER}" -c "kill -KILL ${PID}"
+}
+
+do_restart() {
+ test -f "${BEAVER_BIN}" || exit 0
+ do_stop
+ sleep 1
+ do_start
+}
+
+do_status() {
+ test -f "${BEAVER_BIN}" || exit 0
+ if is_up
+ then
+ echo "${BEAVER_NAME} is running."
+ exit 0
+ else
+ echo "${BEAVER_NAME} is not running."
+ exit 1
+ fi
+}
+
+do_usage() {
+ echo $"Usage: $0 {start | stop | restart | force-reload | status}"
+ exit 1
+}
+
+case "$1" in
+start)
+ do_start
+ exit "$?"
+ ;;
+stop)
+ do_stop
+ exit "$?"
+ ;;
+restart|force-reload)
+ do_restart
+ exit "$?"
+ ;;
+status)
+ do_status
+ ;;
+*)
+ do_usage
+ ;;
+esac
+
diff --git a/cookbooks/logstash/templates/default/init.erb b/cookbooks/logstash/templates/default/init.erb
new file mode 100755
index 0000000..3c1ffdd
--- /dev/null
+++ b/cookbooks/logstash/templates/default/init.erb
@@ -0,0 +1,170 @@
+#!/usr/bin/env bash
+#
+# logstash
+#
+# chkconfig: - 57 47
+# description: logstash
+# processname: logstash
+
+
+PIDDIR="<%= node['logstash']['pid_dir'] %>"
+export PIDFILE="$PIDDIR/<%= @name %>.pid"
+export LS_HOME="<%= "#{node['logstash']['basedir']}/#{@name}" %>"
+export LS_CONFIG="$LS_HOME/etc/<%= @config_file %>"
+LS_USER="<%= node['logstash']['user'] %>"
+LOGDIR="<%= node['logstash']['log_dir'] %>"
+LS_LOG="$LOGDIR/<%= @name %>.log"
+export JAVA_OPTS="-server -Xms<%= @min_heap %> -Xmx<%= @max_heap %> -Djava.io.tmpdir=$LS_HOME/tmp/"
+BIN_SCRIPT="/usr/bin/env java $JAVA_OPTS -jar $LS_HOME/lib/logstash.jar agent -f $LS_CONFIG > $LS_LOG 2>&1 & echo \$! > $PIDFILE"
+
+if [ -f /etc/init.d/functions ] ; then
+ . /etc/init.d/functions
+fi
+
+start() {
+
+ if [ ! -d "$PIDDIR" ] ; then
+ mkdir "$PIDDIR"
+ chown -R $LS_USER:$LS_USER $PIDDIR
+ fi
+
+ if [ ! -d "$LOGDIR" ] ; then
+ mkdir "$LOGDIR"
+ fi
+
+ chown -R $LS_USER:$LS_USER $LOGDIR $PIDDIR
+
+
+ if [ -f $PIDFILE ]; then
+ echo -e "\033[31;1mPID file found in $PIDFILE, already running?\033[0m"
+ ls_pid="$(cat $PIDFILE)"
+ pid_running="$( ps ax | grep 'java' | grep $ls_pid )"
+
+ if [ ! -z "$pid_running" ] ; then
+ echo -e "\033[31;1mPID $ls_pid still alive, logstash is already running. Doing nothing\033[0m"
+ return 1
+ fi
+ fi
+
+ echo -e "\033[1mStarting logstash...\033[0m"
+ pushd $LS_HOME > /dev/null 2>&1
+ su $LS_USER -c "$BIN_SCRIPT" > /dev/null 2>&1
+ ls_pid=$!
+ result=$?
+ popd > /dev/null 2>&1
+
+ if [ $result -ne 0 ] ; then
+ failure
+ echo -e "Logstash did not start successfully"
+ exit 1
+ else
+ success
+ echo -e "Logstash started successfully"
+ fi
+
+}
+
+
+
+function stop() {
+ echo -n -e "\033[1mStopping logstash...\033[0m"
+
+ if [ -z "$SHUTDOWN_WAIT" ]; then
+ SHUTDOWN_WAIT=5
+ fi
+
+ if [ ! -z "$PIDFILE" ]; then
+ if [ -f "$PIDFILE" ]; then
+ kill -0 `cat $PIDFILE` >/dev/null 2>&1
+ if [ $? -gt 0 ]; then
+ echo "PID file ($PIDFILE) found but no matching process was found. Nothing to do."
+ return 0
+ fi
+ else
+ echo "\$PIDFILE was set ($PIDFILE) but the specified file does not exist. Is Logstash running? Assuming it has stopped and pro\
+ ceeding."
+ return 0
+ fi
+ fi
+
+ kill `cat $PIDFILE` >/dev/null 2>&1
+
+ if [ ! -z "$PIDFILE" ]; then
+ if [ -f "$PIDFILE" ]; then
+ while [ $SHUTDOWN_WAIT -ge 0 ]; do
+ kill -0 `cat $PIDFILE` >/dev/null 2>&1
+ if [ $? -gt 0 ]; then
+ rm $PIDFILE
+ break
+ fi
+ if [ $SHUTDOWN_WAIT -gt 0 ]; then
+ sleep 1
+ fi
+ SHUTDOWN_WAIT=`expr $SHUTDOWN_WAIT - 1 `
+ done
+ # still not dead, we may need to resort to drastic measures
+ if [ -f "$PIDFILE" ]; then
+ kill -0 `cat $PIDFILE` >/dev/null 2>&1
+ if [ $? -eq 0 ]; then
+ echo "Application still alive, sleeping for 20 seconds before sending SIGKILL"
+ sleep 20
+ kill -0 `cat $PIDFILE` >/dev/null 2>&1
+ if [ $? -eq 0 ]; then
+ kill -9 `cat $PIDFILE` >/dev/null 2>&1
+ echo "Killed with extreme prejudice"
+ else
+ echo "Application stopped, no need to use SIGKILL"
+ fi
+ rm $PIDFILE
+ fi
+ fi
+ fi
+ fi
+}
+
+restart() {
+ stop
+ start
+}
+
+status() {
+ # GOT PIDFILE?
+ [ -f $PIDFILE ] && pid=$(cat $PIDFILE)
+
+ # RUNNING
+ if [[ $pid && -d "/proc/$pid" ]]; then
+ success
+ echo -e "Logstash is running with pid $pid"
+ fi
+
+ # NOT RUNNING
+ if [[ ! $pid || ! -d "/proc/$pid" ]]; then
+ echo "Logstash not running"
+ fi
+
+ # STALE PID FOUND
+ if [[ ! -d "/proc/$pid" && -f $PIDFILE ]]; then
+ echo -e "\033[1;31;40m[!] Stale PID found in $PIDFILE\033[0m"
+ fi
+}
+
+
+case "$1" in
+ start)
+ start
+ ;;
+ stop)
+ stop
+ ;;
+ restart)
+ restart
+ ;;
+ status)
+ status $2
+ ;;
+ *)
+ echo $"Usage: $0 {start|stop|restart|status [-v]|}"
+ exit 1
+esac
+
+exit $?
diff --git a/cookbooks/logstash/templates/default/kibana-bash_profile.erb b/cookbooks/logstash/templates/default/kibana-bash_profile.erb
new file mode 100644
index 0000000..9fd5937
--- /dev/null
+++ b/cookbooks/logstash/templates/default/kibana-bash_profile.erb
@@ -0,0 +1,9 @@
+export PID_DIR="<%= @pid_dir %>"
+export LOG_DIR="<%= @log_dir %>"
+export ES_PORT="<%= @es_port %>"
+export ES_IP="<%= @es_ip %>"
+export KIBANA_HOST="<%= @server_name %>"
+export SMART_INDEX="<%= @smart_index %>"
+export KIBANA_APP="<%= @app_name %>"
+export KIBANA_PORT="<%= @kibana_port %>"
+export RACK_ENV="production"
diff --git a/cookbooks/logstash/templates/default/kibana-config.php.erb b/cookbooks/logstash/templates/default/kibana-config.php.erb
new file mode 100644
index 0000000..8251f4a
--- /dev/null
+++ b/cookbooks/logstash/templates/default/kibana-config.php.erb
@@ -0,0 +1,101 @@
+
+ 'elasticsearch_server' => "<%= @es_server_ip %>:9200",
+ <% else -%>
+ 'elasticsearch_server' => "127.0.0.1:9200",
+ <% end -%>
+
+ // URL path to kibana. Apache users can safely leave this
+ // blank in most situations.
+ 'app_path' => '',
+
+ // The record type as defined in your logstash configuration.
+ // Seperate multiple types with a comma, no spaces. Leave blank
+ // for all.
+ 'type' => '',
+
+ // Results to show per page
+ 'results_per_page' => 50,
+
+ // You may wish to insert a default search which all user searches
+ // must match. For example @source_host:www1 might only show results
+ // from www1.
+ 'filter_string' => '',
+
+ // When searching, Kibana will attempt to only search indices
+ // that match your timeframe, to make searches faster. You can
+ // turn this behavior off if you use something other than daily
+ // indexing
+ 'smart_index' => true,
+
+ // *NOTE*: With the move to segmented loading the setting below is largely
+ // obsolete since Kibana only hits one index at a time. I'm leaving the
+ // setting anyway since it is imaginable that there may be some case in which
+ // searching 1 index at a time over a certain threshold may not be desirable
+
+ // ElasticSearch has a default limit on URL size for REST calls,
+ // so Kibana will fall back to _all if a search spans too many
+ // indices. Use this to set that 'too many' number.
+ 'smart_index_limit' => 60,
+
+ // When using analyze, use this many of the most recent
+ // results for user's query
+ 'analyze_limit' => 20000,
+
+ // Show this many results in analyze/ mode
+ 'analyze_show' => 25,
+
+ // Show this many results in an rss feed
+ 'rss_show' => 20,
+
+ // Show this many results in an exported file
+ 'export_show' => 2000,
+
+ // Delimit exported file fields with what?
+ // You may want to change this to something like "\t" (tab) if you have
+ // commas in your logs
+ 'export_delimiter' => ",",
+
+ // By default, Kibana will look for grok/filter defined fields
+ // in your results. Logstash has some default fields that it also
+ // supplies. You might want to enable or disable some of those.
+ 'default_fields' => array(
+ '@type',
+ '@tags',
+ '@source_host',
+ '@source_path',
+ '@timestamp',
+ '@source',
+ ),
+
+ // You probably don't want to touch anything below this line
+ // unless you really know what you're doing
+
+ // Primary field. By default Elastic Search has a special
+ // field called _all that is searched when no field is specified.
+ // Dropping _all can reduce index size significantly. If you do that
+ // you'll need to change primary_field to be '@message'
+ 'primary_field' => '_all',
+
+ // Default Elastic Search index to query
+ 'default_index' => '_all',
+
+ // default search settings
+ 'default_search' => array(
+ 'search' => '*',
+ 'fields' => '',
+ 'time' => '',
+ 'offset' => 0,
+ 'analyze_field' => '',
+ ),
+
+
+ 'local_timezone' => date_default_timezone_get(),
+
+ // Prevent wildcard search terms which result in extremely slow queries
+ // See: http://www.elasticsearch.org/guide/reference/query-dsl/wildcard-query.html
+ 'disable_fullscan' => false,
+ );
diff --git a/cookbooks/logstash/templates/default/kibana-config.rb.erb b/cookbooks/logstash/templates/default/kibana-config.rb.erb
new file mode 100644
index 0000000..7ff1688
--- /dev/null
+++ b/cookbooks/logstash/templates/default/kibana-config.rb.erb
@@ -0,0 +1,131 @@
+module KibanaConfig
+
+ es_ip = ENV['ES_IP'] ? ENV['ES_IP'] : '127.0.0.1'
+ es_port = ENV['ES_PORT'] ? ENV['ES_PORT'] : 9200
+ Elasticsearch = "#{es_ip}:#{es_port}"
+ KibanaPort = ENV['KIBANA_PORT'] ? ENV['KIBANA_PORT'] : 80
+ KibanaHost = ENV['KIBANA_HOST'] ? ENV['KIBANA_HOST'] : 'localhost'
+
+ #Set the Net::HTTP read/open timeouts for the connection to the ES backend
+ ElasticsearchTimeout = 500
+
+ # The record type as defined in your logstash configuration.
+ # Seperate multiple types with a comma, no spaces. Leave blank
+ # for all.
+ Type = ''
+
+ # Results to show per page
+ Per_page = 50
+
+ # Timezone. Leave this set to 'user' to have the user's browser autocorrect.
+ # Otherwise, set a timezone string
+ # Examples: 'UTC', 'America/Phoenix', 'Europe/Athens', MST
+ # You can use `date +%Z` on linux to get your timezone string
+ Timezone = 'user'
+
+ # Format for timestamps. Defaults to mm/dd HH:MM:ss.
+ # For syntax see: http://blog.stevenlevithan.com/archives/date-time-format
+ # Time_format = 'isoDateTime'
+ Time_format = 'mm/dd HH:MM:ss'
+
+ # Change which fields are shown by default. Must be set as an array
+ # Default_fields = ['@fields.vhost','@fields.response','@fields.request']
+ Default_fields = ['@message']
+
+ # If set to true, Kibana will use the Highlight feature of Elasticsearch to
+ # display highlighted search results
+ Highlight_results = true
+
+ # A field needs to be specified for the highlight feature. By default,
+ # Elasticsearch doesn't allow highlighting on _all because the field has to
+ # be either stored or part of the _source field.
+ Highlighted_field = "@message"
+
+ # Make URLs clickable in detailed view
+ Clickable_URLs = true
+
+ # The default operator used if no explicit operator is specified.
+ # For example, with a default operator of OR, the query capital of
+ # Hungary is translated to capital OR of OR Hungary, and with default
+ # operator of AND, the same query is translated to capital AND of AND
+ # Hungary. The default value is OR.
+ Default_operator = 'OR'
+
+ # When using analyze, use this many of the most recent
+ # results for user's query
+ Analyze_limit = 2000
+
+ # Show this many results in analyze/trend/terms/stats modes
+ Analyze_show = 25
+
+ # Show this many results in an rss feed
+ Rss_show = 25
+
+ # Show this many results in an exported file
+ Export_show = 2000
+
+ # Delimit exported file fields with what?
+ # You may want to change this to something like "\t" (tab) if you have
+ # commas in your logs
+ Export_delimiter = ","
+
+ # You may wish to insert a default search which all user searches
+ # must match. For example @source_host:www1 might only show results
+ # from www1.
+ Filter = ''
+
+ # When searching, Kibana will attempt to only search indices
+ # that match your timeframe, to make searches faster. You can
+ # turn this behavior off if you use something other than daily
+ # indexing
+ Smart_index = true
+
+ # You can define your custom pattern here for index names if you
+ # use something other than daily indexing. Pattern needs to have
+ # date formatting like '%Y.%m.%d'. Will accept an array of smart
+ # indexes.
+ # Smart_index_pattern = ['logstash-web-%Y.%m.%d', 'logstash-mail-%Y.%m.%d']
+ # Smart_index_pattern = 'logstash-%Y.%m.%d'
+ Smart_index_pattern = ENV['SMART_INDEX'] ? ENV['SMART_INDEX'] : 'logstash-%Y.%m.%d'
+
+ # Number of seconds between each index. 86400 = 1 day.
+ Smart_index_step = 86400
+
+ # ElasticSearch has a default limit on URL size for REST calls,
+ # so Kibana will fall back to _all if a search spans too many
+ # indices. Use this to set that 'too many' number. By default this
+ # is set really high, ES might not like this
+ Smart_index_limit = 150
+
+ # Elasticsearch has an internal mechanism called "faceting" for performing
+ # analysis that we use for the "Stats" and "Terms" modes. However, on large
+ # data sets/queries facetting can cause ES to crash if there isn't enough
+ # memory available. It is suggested that you limit the number of indices that
+ # Kibana will use for the "Stats" and "Terms" to prevent ES crashes. For very
+ # large data sets and undersized ES clusers, a limit of 1 is not unreasonable.
+ # Default is 0 (unlimited)
+ Facet_index_limit = 0
+
+ # You probably don't want to touch anything below this line
+ # unless you really know what you're doing
+
+ # Primary field. By default Elastic Search has a special
+ # field called _all that is searched when no field is specified.
+ # Dropping _all can reduce index size significantly. If you do that
+ # you'll need to change primary_field to be '@message'
+ Primary_field = '_all'
+
+ # Default Elastic Search index to query
+ Default_index = '_all'
+
+ # TODO: This isn't functional yet
+ # Prevent wildcard search terms which result in extremely slow queries
+ # See: http:#www.elasticsearch.org/guide/reference/query-dsl/wildcard-query.html
+ Disable_fullscan = false
+
+ # Set headers to allow kibana to be loaded in an iframe from a different origin.
+ Allow_iframed = false
+
+ # Use this interval as fallback.
+ Fallback_interval = 900
+end
diff --git a/cookbooks/logstash/templates/default/kibana-daemon.rb.erb b/cookbooks/logstash/templates/default/kibana-daemon.rb.erb
new file mode 100644
index 0000000..64d393c
--- /dev/null
+++ b/cookbooks/logstash/templates/default/kibana-daemon.rb.erb
@@ -0,0 +1,28 @@
+#!/usr/bin/env ruby
+
+require 'rubygems'
+require 'daemons'
+require 'pathname'
+require 'fileutils'
+
+# Get the full path to this script's directory since Daemons does a chdir to
+# / just after forking..
+scriptdir = Pathname.new(File.dirname(__FILE__)).realpath
+
+# populate environment variables
+pid_dir = !ENV['PID_DIR'].nil? ? ENV['PID_DIR'] : File.join(scriptdir, "tmp")
+app_name = !ENV['KIBANA_APP'].nil? ? ENV['KIBANA_APP'] : "kibana"
+log_output = !ENV['LOG_DIR'].nil? ? true : false
+log_dir = log_output ? ENV['LOG_DIR'] : nil
+
+options = {
+ :dir_mode => :normal,
+ :dir => pid_dir,
+ :log_output => log_output,
+ :log_dir => log_dir
+ }
+
+Daemons.run_proc(app_name, options) do
+ Dir.chdir(scriptdir)
+ exec "ruby kibana.rb"
+end
diff --git a/cookbooks/logstash/templates/default/kibana.conf.erb b/cookbooks/logstash/templates/default/kibana.conf.erb
new file mode 100644
index 0000000..e9fe5f7
--- /dev/null
+++ b/cookbooks/logstash/templates/default/kibana.conf.erb
@@ -0,0 +1,16 @@
+NameVirtualHost *:<%= node['logstash']['kibana']['http_port'] %>
+>
+ ServerName <%= @server_name %>
+ DocumentRoot "<%= @docroot %>"
+
+
+ ">
+ DirectoryIndex index.php
+ <% if node['logstash']['kibana']['auth']["enabled"] %>
+ AuthType Basic
+ AuthName "Restricted Files"
+ AuthUserFile <%= "#{node['logstash']['basedir']}/kibana/#{node['logstash']['kibana']['sha']}/htpasswd" %>
+ Require user <%= node['logstash']['kibana']['auth']['user'] %>
+ <% end %>
+
+
diff --git a/cookbooks/logstash/templates/default/kibana.init.erb b/cookbooks/logstash/templates/default/kibana.init.erb
new file mode 100755
index 0000000..a968e75
--- /dev/null
+++ b/cookbooks/logstash/templates/default/kibana.init.erb
@@ -0,0 +1,50 @@
+#!/bin/sh -e
+### BEGIN INIT INFO
+# Provides: kibana
+# Required-Start: $remote_fs $syslog
+# Required-Stop: $remote_fs $syslog
+# Default-Start: 2 3 4 5
+# Default-Stop: 0 1 6
+# Short-Description: Make sense of a mountain of logs.
+### END INIT INFO
+
+#
+# chkconfig: 2345 20 80
+#
+KIBANA_HOME=<%= @kibana_home %>
+USER=<%= @user %>
+
+KIBANA_CMD="cd $KIBANA_HOME && bundle exec ruby kibana-daemon.rb $1"
+
+case "$1" in
+start)
+ su - $USER -c "$KIBANA_CMD"
+ RETVAL=$?
+;;
+
+stop)
+ su - $USER -c "$KIBANA_CMD"
+ RETVAL=$?
+;;
+
+restart)
+ su - $USER -c "$KIBANA_CMD"
+ RETVAL=$?
+;;
+
+status)
+ su - $USER -c "$KIBANA_CMD"
+;;
+
+force-reload)
+ su - $USER -c "cd $KIBANA_HOME && bundle exec kibana-daemon.rb zap"
+ su - $USER -c "cd $KIBANA_HOME && bundle exec kibana-daemon.rb start"
+ RETVAL=$?
+;;
+
+*)
+ echo "Usage: $0 {start|stop|restart|status|force-reload}"
+ exit 1
+esac
+
+exit $RETVAL
diff --git a/cookbooks/logstash/templates/default/logstash_agent.conf.erb b/cookbooks/logstash/templates/default/logstash_agent.conf.erb
new file mode 100644
index 0000000..8869e96
--- /dev/null
+++ b/cookbooks/logstash/templates/default/logstash_agent.conf.erb
@@ -0,0 +1,27 @@
+description "Logstash agent"
+author "Chef"
+
+start on (filesystem and net-device-up)
+stop on runlevel [!2345]
+
+respawn
+respawn limit 5 30
+
+chdir <%= node['logstash']['basedir'] %>/agent
+setuid <%= node['logstash']['user'] %>
+
+script
+ export LOGSTASH_HOME="<%= node['logstash']['basedir'] %>/agent"
+ export HOME=$LOGSTASH_HOME
+ export GC_OPTS="<%= node['logstash']['agent']['gc_opts'] %>"
+ export JAVA_OPTS="-server -Xms<%= node['logstash']['agent']['xms'] %> -Xmx<%= node['logstash']['agent']['xmx'] %> -Djava.io.tmpdir=$LOGSTASH_HOME/tmp/ <%= node['logstash']['agent']['java_opts'] %> <%= '-Djava.net.preferIPv4Stack=true' if node['logstash']['agent']['ipv4_only'] %>"
+ export LOGSTASH_OPTS="agent -f $LOGSTASH_HOME/etc/shipper.conf -l <%= node['logstash']['log_dir'] %>/logstash.log"
+ <% if node['logstash']['agent']['debug'] -%>
+ export LOGSTASH_OPTS="$LOGSTASH_OPTS -vv"
+ <% end -%>
+ export OPTS="$JAVA_OPTS $GC_OPTS -jar $LOGSTASH_HOME/lib/logstash.jar $LOGSTASH_OPTS"
+
+ exec /usr/bin/java $OPTS
+end script
+
+emits logstash-agent-running
diff --git a/cookbooks/logstash/templates/default/logstash_beaver.conf.erb b/cookbooks/logstash/templates/default/logstash_beaver.conf.erb
new file mode 100644
index 0000000..f3d6bfe
--- /dev/null
+++ b/cookbooks/logstash/templates/default/logstash_beaver.conf.erb
@@ -0,0 +1,20 @@
+description "Logstash beaver"
+author "Chef"
+
+start on (filesystem and net-device-up)
+stop on runlevel [!2345]
+
+respawn
+respawn limit 5 30
+
+<% if @supports_setuid %>
+setuid <%= @user %>
+setgid <%= @group %>
+<% end %>
+chdir <%= node['logstash']['basedir'] %>/beaver
+
+console output
+
+exec <%= @cmd %> >> <%= @log %> 2>&1
+
+emits logstash-beaver-running
diff --git a/cookbooks/logstash/templates/default/logstash_server.conf.erb b/cookbooks/logstash/templates/default/logstash_server.conf.erb
new file mode 100644
index 0000000..905a558
--- /dev/null
+++ b/cookbooks/logstash/templates/default/logstash_server.conf.erb
@@ -0,0 +1,27 @@
+description "Logstash"
+author "Chef"
+
+start on (filesystem and net-device-up)
+stop on runlevel [!2345]
+
+respawn
+respawn limit 5 30
+
+chdir <%= node['logstash']['basedir'] %>/server
+setuid <%= node['logstash']['user'] %>
+
+script
+ export LOGSTASH_HOME="<%= node['logstash']['basedir'] %>/server"
+ export HOME=$LOGSTASH_HOME
+ export GC_OPTS="<%= node['logstash']['server']['gc_opts'] %>"
+ export JAVA_OPTS="-server -Xms<%= node['logstash']['server']['xms'] %> -Xmx<%= node['logstash']['server']['xmx'] %> -Djava.io.tmpdir=$LOGSTASH_HOME/tmp/ <%= node['logstash']['server']['java_opts'] %> <%= '-Djava.net.preferIPv4Stack=true' if node['logstash']['agent']['ipv4_only'] %>"
+ export LOGSTASH_OPTS="agent -f $LOGSTASH_HOME/etc/logstash.conf -l <%= node['logstash']['log_dir'] %>/logstash.log"
+ <% if node['logstash']['server']['debug'] -%>
+ export LOGSTASH_OPTS="$LOGSTASH_OPTS -vv"
+ <% end -%>
+ export OPTS="$JAVA_OPTS $GC_OPTS -jar $LOGSTASH_HOME/lib/logstash.jar $LOGSTASH_OPTS"
+
+ exec /usr/bin/java $OPTS
+end script
+
+emits logstash-server-running
diff --git a/cookbooks/logstash/templates/default/patterns.erb b/cookbooks/logstash/templates/default/patterns.erb
new file mode 100644
index 0000000..448ee7c
--- /dev/null
+++ b/cookbooks/logstash/templates/default/patterns.erb
@@ -0,0 +1,3 @@
+<% @patterns.sort.each do |name, pattern| -%>
+<%= name %> <%= pattern %>
+<% end -%>
diff --git a/cookbooks/logstash/templates/default/server.conf.erb b/cookbooks/logstash/templates/default/server.conf.erb
new file mode 100644
index 0000000..8bab10e
--- /dev/null
+++ b/cookbooks/logstash/templates/default/server.conf.erb
@@ -0,0 +1,34 @@
+input {
+ <% if node['logstash']['server']['inputs'].empty? -%>
+ tcp {
+ type => "tcp-input"
+ port => "5959"
+ format => "json_event"
+ }
+ <% else -%>
+ <%= LogstashConf.section_to_str(node['logstash']['server']['inputs']) %>
+ <% end -%>
+}
+
+<% unless node['logstash']['server']['filters'].empty? -%>
+filter {
+ <%= LogstashConf.section_to_str(node['logstash']['server']['filters'], node['logstash']['server']['version'], @patterns_dir) %>
+}
+<% end -%>
+
+output {
+ <% if node['logstash']['server']['debug'] -%>
+ stdout { debug => true debug_format => "json" }
+ <% end -%>
+ <% if @enable_embedded_es -%>
+ elasticsearch { embedded => true }
+ <% elsif not @es_server_ip.empty? -%>
+ elasticsearch { host => "<%= @es_server_ip %>" cluster => "<%= @es_cluster %>" }
+ <% end -%>
+ <% unless @graphite_server_ip.empty? -%>
+ graphite { host => "<%= @graphite_server_ip %>" metrics => ["logstash.events", "1"] }
+ <% end -%>
+ <% # unless node['logstash']['server']['outputs'].empty? -%>
+ <%= LogstashConf.section_to_str(node['logstash']['server']['outputs']) %>
+ <% # end -%>
+}
diff --git a/cookbooks/logstash/templates/default/sv-logstash_agent-log-run.erb b/cookbooks/logstash/templates/default/sv-logstash_agent-log-run.erb
new file mode 100644
index 0000000..a79a518
--- /dev/null
+++ b/cookbooks/logstash/templates/default/sv-logstash_agent-log-run.erb
@@ -0,0 +1,2 @@
+#!/bin/sh
+exec svlogd -tt ./main
diff --git a/cookbooks/logstash/templates/default/sv-logstash_agent-run.erb b/cookbooks/logstash/templates/default/sv-logstash_agent-run.erb
new file mode 100644
index 0000000..6a176c8
--- /dev/null
+++ b/cookbooks/logstash/templates/default/sv-logstash_agent-run.erb
@@ -0,0 +1,13 @@
+#!/bin/sh
+
+cd /<%= node['logstash']['basedir'] %>/agent/
+exec 2>&1
+# Need to set LOGSTASH_HOME and HOME so sincedb will work
+LOGSTASH_HOME="<%= node['logstash']['basedir'] %>/agent"
+GC_OPTS="-XX:+UseParallelOldGC"
+JAVA_OPTS="-server -Xms<%= node['logstash']['agent']['xms'] %> -Xmx<%= node['logstash']['agent']['xmx'] %> -Djava.io.tmpdir=$LOGSTASH_HOME/tmp/"
+LOGSTASH_OPTS="agent -f $LOGSTASH_HOME/etc/shipper.conf -l <%= node['logstash']['log_dir'] %>/logstash.log"
+<% if node['logstash']['agent']['debug'] -%>
+LOGSTASH_OPTS="$LOGSTASH_OPTS -v"
+<% end -%>
+HOME=$LOGSTASH_HOME exec java $JAVA_OPTS $GC_OPTS -jar $LOGSTASH_HOME/lib/logstash.jar $LOGSTASH_OPTS
diff --git a/cookbooks/logstash/templates/default/sv-logstash_server-log-run.erb b/cookbooks/logstash/templates/default/sv-logstash_server-log-run.erb
new file mode 100644
index 0000000..a79a518
--- /dev/null
+++ b/cookbooks/logstash/templates/default/sv-logstash_server-log-run.erb
@@ -0,0 +1,2 @@
+#!/bin/sh
+exec svlogd -tt ./main
diff --git a/cookbooks/logstash/templates/default/sv-logstash_server-run.erb b/cookbooks/logstash/templates/default/sv-logstash_server-run.erb
new file mode 100644
index 0000000..18a47e7
--- /dev/null
+++ b/cookbooks/logstash/templates/default/sv-logstash_server-run.erb
@@ -0,0 +1,13 @@
+#!/bin/sh
+
+cd /<%= node['logstash']['basedir'] %>/server/
+exec 2>&1
+# Need to set LOGSTASH_HOME and HOME so sincedb will work
+LOGSTASH_HOME="<%= node['logstash']['basedir'] %>/server"
+GC_OPTS="-XX:+UseParallelOldGC"
+JAVA_OPTS="-server -Xms<%= node['logstash']['server']['xms'] %> -Xmx<%= node['logstash']['server']['xmx'] %> -Djava.io.tmpdir=$LOGSTASH_HOME/tmp/"
+LOGSTASH_OPTS="agent -f $LOGSTASH_HOME/etc/logstash.conf -l <%= node['logstash']['log_dir'] %>/logstash.log"
+<% if node['logstash']['agent']['debug'] -%>
+LOGSTASH_OPTS="$LOGSTASH_OPTS -v"
+<% end -%>
+HOME=$LOGSTASH_HOME exec java $JAVA_OPTS $GC_OPTS -jar $LOGSTASH_HOME/lib/logstash.jar $LOGSTASH_OPTS