Merge pull request #114 from ppetit/sensu
Adding support for sensu as alerting destination
diff --git a/.gitignore b/.gitignore
index b80d7e8..aa8e42a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,9 +1,5 @@
+.kitchen
tests/build/
*.swp
*.pyc
.ropeproject
-tests/lua/mocks/inspect.lua
-tests/lua/mocks/anomaly.lua
-tests/lua/mocks/annotation.lua
-tests/lua/mocks/date_time.lua
-tests/circular_buffer.so
diff --git a/Makefile b/Makefile
index fc83783..1043fbe 100644
--- a/Makefile
+++ b/Makefile
@@ -1,12 +1,38 @@
DESTDIR=/
SALTENVDIR=/usr/share/salt-formulas/env
RECLASSDIR=/usr/share/salt-formulas/reclass
-FORMULANAME=$(shell grep name: metadata.yml|head -1|cut -d : -f 2|grep -Eo '[a-z0-9\-]*')
+FORMULANAME=$(shell grep name: metadata.yml|head -1|cut -d : -f 2|grep -Eo '[a-z0-9\-\_]*')
+VERSION=$(shell grep version: metadata.yml|head -1|cut -d : -f 2|grep -Eo '[a-z0-9\.\-\_]*')
+VERSION_MAJOR := $(shell echo $(VERSION)|cut -d . -f 1-2)
+VERSION_MINOR := $(shell echo $(VERSION)|cut -d . -f 3)
+
+NEW_MAJOR_VERSION ?= $(shell date +%Y.%m|sed 's,\.0,\.,g')
+NEW_MINOR_VERSION ?= $(shell /bin/bash -c 'echo $$[ $(VERSION_MINOR) + 1 ]')
+
+MAKE_PID := $(shell echo $$PPID)
+JOB_FLAG := $(filter -j%, $(subst -j ,-j,$(shell ps T | grep "^\s*$(MAKE_PID).*$(MAKE)")))
+
+ifneq ($(subst -j,,$(JOB_FLAG)),)
+JOBS := $(subst -j,,$(JOB_FLAG))
+else
+JOBS := 1
+endif
+
+KITCHEN_LOCAL_YAML?=.kitchen.yml
+KITCHEN_OPTS?="--concurrency=$(JOBS)"
+KITCHEN_OPTS_CREATE?=""
+KITCHEN_OPTS_CONVERGE?=""
+KITCHEN_OPTS_VERIFY?=""
+KITCHEN_OPTS_TEST?=""
all:
@echo "make install - Install into DESTDIR"
@echo "make test - Run tests"
+ @echo "make kitchen - Run Kitchen CI tests (create, converge, verify)"
@echo "make clean - Cleanup after tests run"
+ @echo "make release-major - Generate new major release"
+ @echo "make release-minor - Generate new minor release"
+ @echo "make changelog - Show changes since last release"
install:
# Formula
@@ -14,6 +40,7 @@
cp -a $(FORMULANAME) $(DESTDIR)/$(SALTENVDIR)/
[ ! -d _modules ] || cp -a _modules $(DESTDIR)/$(SALTENVDIR)/
[ ! -d _states ] || cp -a _states $(DESTDIR)/$(SALTENVDIR)/ || true
+ [ ! -d _grains ] || cp -a _grains $(DESTDIR)/$(SALTENVDIR)/ || true
# Metadata
[ -d $(DESTDIR)/$(RECLASSDIR)/service/$(FORMULANAME) ] || mkdir -p $(DESTDIR)/$(RECLASSDIR)/service/$(FORMULANAME)
cp -a metadata/service/* $(DESTDIR)/$(RECLASSDIR)/service/$(FORMULANAME)
@@ -21,6 +48,71 @@
test:
[ ! -d tests ] || (cd tests; ./run_tests.sh)
+release-major: check-changes
+ @echo "Current version is $(VERSION), new version is $(NEW_MAJOR_VERSION)"
+ @[ $(VERSION_MAJOR) != $(NEW_MAJOR_VERSION) ] || (echo "Major version $(NEW_MAJOR_VERSION) already released, nothing to do. Do you want release-minor?" && exit 1)
+ echo "$(NEW_MAJOR_VERSION)" > VERSION
+ sed -i 's,version: .*,version: "$(NEW_MAJOR_VERSION)",g' metadata.yml
+ [ ! -f debian/changelog ] || dch -v $(NEW_MAJOR_VERSION) -m --force-distribution -D `dpkg-parsechangelog -S Distribution` "New version"
+ make genchangelog-$(NEW_MAJOR_VERSION)
+ (git add -u; git commit -m "Version $(NEW_MAJOR_VERSION)")
+ git tag -s -m $(NEW_MAJOR_VERSION) $(NEW_MAJOR_VERSION)
+
+release-minor: check-changes
+ @echo "Current version is $(VERSION), new version is $(VERSION_MAJOR).$(NEW_MINOR_VERSION)"
+ echo "$(VERSION_MAJOR).$(NEW_MINOR_VERSION)" > VERSION
+ sed -i 's,version: .*,version: "$(VERSION_MAJOR).$(NEW_MINOR_VERSION)",g' metadata.yml
+ [ ! -f debian/changelog ] || dch -v $(VERSION_MAJOR).$(NEW_MINOR_VERSION) -m --force-distribution -D `dpkg-parsechangelog -S Distribution` "New version"
+ make genchangelog-$(VERSION_MAJOR).$(NEW_MINOR_VERSION)
+ (git add -u; git commit -m "Version $(VERSION_MAJOR).$(NEW_MINOR_VERSION)")
+ git tag -s -m $(NEW_MAJOR_VERSION) $(VERSION_MAJOR).$(NEW_MINOR_VERSION)
+
+check-changes:
+ @git log --pretty=oneline --decorate $(VERSION)..HEAD | grep -Eqc '.*' || (echo "No new changes since version $(VERSION)"; exit 1)
+
+changelog:
+ git log --pretty=short --invert-grep --grep="Merge pull request" --decorate $(VERSION)..HEAD
+
+genchangelog: genchangelog-$(VERSION_MAJOR).$(NEW_MINOR_VERSION)
+
+genchangelog-%:
+ $(eval NEW_VERSION := $(patsubst genchangelog-%,%,$@))
+ (echo "=========\nChangelog\n=========\n"; \
+ (echo $(NEW_VERSION);git tag) | sort -r | grep -E '^[0-9\.]+' | while read i; do \
+ cur=$$i; \
+ test $$i = $(NEW_VERSION) && i=HEAD; \
+ prev=`(echo $(NEW_VERSION);git tag)|sort|grep -E '^[0-9\.]+'|grep -B1 "$$cur\$$"|head -1`; \
+ echo "Version $$cur\n=============================\n"; \
+ git log --pretty=short --invert-grep --grep="Merge pull request" --decorate $$prev..$$i; \
+ echo; \
+ done) > CHANGELOG.rst
+
+kitchen-check:
+ @[ -e $(KITCHEN_LOCAL_YAML) ] || (echo "Kitchen tests not available, there's no $(KITCHEN_LOCAL_YAML)." && exit 1)
+
+kitchen: kitchen-check kitchen-create kitchen-converge kitchen-verify kitchen-list
+
+kitchen-create: kitchen-check
+ kitchen create ${KITCHEN_OPTS} ${KITCHEN_OPTS_CREATE}
+ [ "$(shell echo $(KITCHEN_LOCAL_YAML)|grep -Eo docker)" = "docker" ] || sleep 120
+
+kitchen-converge: kitchen-check
+ kitchen converge ${KITCHEN_OPTS} ${KITCHEN_OPTS_CONVERGE} &&\
+ kitchen converge ${KITCHEN_OPTS} ${KITCHEN_OPTS_CONVERGE}
+
+kitchen-verify: kitchen-check
+ [ ! -d tests/integration ] || kitchen verify -t tests/integration ${KITCHEN_OPTS} ${KITCHEN_OPTS_VERIFY}
+ [ -d tests/integration ] || kitchen verify ${KITCHEN_OPTS} ${KITCHEN_OPTS_VERIFY}
+
+kitchen-test: kitchen-check
+ [ ! -d tests/integration ] || kitchen test -t tests/integration ${KITCHEN_OPTS} ${KITCHEN_OPTS_TEST}
+ [ -d tests/integration ] || kitchen test ${KITCHEN_OPTS} ${KITCHEN_OPTS_TEST}
+
+kitchen-list: kitchen-check
+ kitchen list
+
clean:
+ [ ! -x "$(shell which kitchen)" ] || kitchen destroy
+ [ ! -d .kitchen ] || rm -rf .kitchen
[ ! -d tests/build ] || rm -rf tests/build
[ ! -d build ] || rm -rf build
diff --git a/README.rst b/README.rst
index 2aaef14..33a3bf4 100644
--- a/README.rst
+++ b/README.rst
@@ -180,3 +180,36 @@
=========
* https://hekad.readthedocs.org/en/latest/index.html
+
+Documentation and Bugs
+======================
+
+To learn how to install and update salt-formulas, consult the documentation
+available online at:
+
+ http://salt-formulas.readthedocs.io/
+
+In the unfortunate event that bugs are discovered, they should be reported to
+the appropriate issue tracker. Use Github issue tracker for specific salt
+formula:
+
+ https://github.com/salt-formulas/salt-formula-heka/issues
+
+For feature requests, bug reports or blueprints affecting entire ecosystem,
+use Launchpad salt-formulas project:
+
+ https://launchpad.net/salt-formulas
+
+You can also join salt-formulas-users team and subscribe to mailing list:
+
+ https://launchpad.net/~salt-formulas-users
+
+Developers wishing to work on the salt-formulas projects should always base
+their work on master branch and submit pull request against specific formula.
+
+ https://github.com/salt-formulas/salt-formula-heka
+
+Any questions or feedback is always welcome so feel free to join our IRC
+channel:
+
+ #salt-formulas @ irc.freenode.net
diff --git a/heka/files/lua/common/contrail_patterns.lua b/heka/files/lua/common/contrail_patterns.lua
index 3383a3b..9499875 100644
--- a/heka/files/lua/common/contrail_patterns.lua
+++ b/heka/files/lua/common/contrail_patterns.lua
@@ -37,7 +37,7 @@
-- Common patterns
local modulename = l.Cg(pgname, "Module")
local ip_address = l.Cg((l.digit + patt.dot)^1, "ip_address")
-local http_status = l.Cg(patt.Number, "status")
+local http_status = l.Cg(patt.Number / tonumber, "status")
local http_request_time = l.Cg(patt.Number, "request_time")
local delim = (patt.sp + patt.dash)^1
local hostname = l.Cg(patt.programname, "Hostname")
diff --git a/heka/files/lua/common/patterns.lua b/heka/files/lua/common/patterns.lua
index 09be695..be43d08 100644
--- a/heka/files/lua/common/patterns.lua
+++ b/heka/files/lua/common/patterns.lua
@@ -121,7 +121,7 @@
-- TODO(pasquier-s): build the LPEG grammar based on the log_format parameter
-- passed to eventlet.wsgi.server similar to what the build_rsyslog_grammar
-- function does for RSyslog.
-local openstack_http_status = l.P"status: "^-1 * l.Cg(l.digit^3, "http_status")
+local openstack_http_status = l.P"status: "^-1 * l.Cg(l.digit^3 / tonumber, "http_status")
local openstack_response_size = l.P"len: "^-1 * l.Cg(l.digit^1 / tonumber, "http_response_size")
local openstack_response_time = l.P"time: "^-1 * l.Cg(l.digit^1 * dot^0 * l.digit^0 / tonumber, "http_response_time")
diff --git a/heka/files/lua/decoders/collectd.lua b/heka/files/lua/decoders/collectd.lua
index b206100..ef5f27d 100644
--- a/heka/files/lua/decoders/collectd.lua
+++ b/heka/files/lua/decoders/collectd.lua
@@ -252,6 +252,14 @@
else
msg['Fields']['name'] = msg['Fields']['name'] .. sample['type_instance']
end
+ if sample['meta'] and sample['meta']['aggregate'] then
+ msg['Fields']['aggregate'] = sample['meta']['aggregate']
+ table.insert(msg['Fields']['tag_fields'], 'aggregate')
+ end
+ if sample['meta'] and sample['meta']['aggregate_id'] then
+ msg['Fields']['aggregate_id'] = sample['meta']['aggregate_id']
+ table.insert(msg['Fields']['tag_fields'], 'aggregate_id')
+ end
elseif metric_source == 'rabbitmq_info' then
msg['Fields']['name'] = 'rabbitmq' .. sep .. sample['type_instance']
if sample['meta'] and sample['meta']['queue'] then
diff --git a/heka/files/lua/decoders/keystone_wsgi_log.lua b/heka/files/lua/decoders/keystone_wsgi_log.lua
index a3c970b..8ed9551 100644
--- a/heka/files/lua/decoders/keystone_wsgi_log.lua
+++ b/heka/files/lua/decoders/keystone_wsgi_log.lua
@@ -20,12 +20,8 @@
local utils = require 'lma_utils'
local msg = {
- Timestamp = nil,
+ Logger = 'openstack.keystone',
Type = 'log',
- Hostname = nil,
- Payload = nil,
- Pid = nil,
- Fields = nil,
Severity = 6,
}
@@ -33,29 +29,43 @@
local apache_log_pattern = read_config("apache_log_pattern") or error(
"apache_log_pattern configuration must be specificed")
-local apache_grammar = common_log_format.build_apache_grammar(apache_log_pattern)
+local apache_grammar
+if string.match(apache_log_pattern, '%%') then
+ -- don't parse log format if it's a nickname (eg 'vhost_combined')
+ apache_grammar = common_log_format.build_apache_grammar(apache_log_pattern)
+end
local request_grammar = l.Ct(patt.http_request)
function process_message ()
-
- -- logger is either "keystone-wsgi-main" or "keystone-wsgi-admin"
local logger = read_message("Logger")
-
local log = read_message("Payload")
- local m
+ msg.Fields = {}
+ msg.Payload = log
+ msg.Fields.programname = logger
+ msg.Fields.severity_label = severity_label
- m = apache_grammar:match(log)
+ if not apache_grammar then
+ utils.inject_tags(msg)
+ return utils.safe_inject_message(msg)
+ end
+
+ local m = apache_grammar:match(log)
if m then
- msg.Logger = 'openstack.keystone'
- msg.Payload = log
- msg.Timestamp = m.time
+ if m.time then
+ msg.Timestamp = m.time
+ end
- msg.Fields = {}
- msg.Fields.http_status = m.status
- msg.Fields.http_response_time = m.request_time.value / 1e6 -- us to sec
- msg.Fields.programname = logger
- msg.Fields.severity_label = severity_label
+ if m.status then
+ msg.Fields.http_status = m.status
+ end
+ if m.request_time then
+ msg.Fields.http_response_time = m.request_time.value
+ if m.request_time.representation == 'us' then
+ -- convert us to sec, otherwise the value is already in sec
+ msg.Fields.http_response_time = msg.Fields.http_response_time / 1e6
+ end
+ end
local request = m.request
m = request_grammar:match(request)
diff --git a/heka/files/lua/decoders/notification.lua b/heka/files/lua/decoders/notification.lua
index 5b1195d..3f10e0d 100644
--- a/heka/files/lua/decoders/notification.lua
+++ b/heka/files/lua/decoders/notification.lua
@@ -17,13 +17,6 @@
local patt = require 'patterns'
local utils = require 'lma_utils'
-local msg = {
- Timestamp = nil,
- Type = "notification",
- Payload = nil,
- Fields = nil
-}
-
-- Mapping table from event_type prefixes to notification loggers
local logger_map = {
--cinder
@@ -108,7 +101,56 @@
local include_full_notification = read_config("include_full_notification") or false
-function process_message ()
+function process_cadf_event(notif, msg)
+ local cadf_event = notif.payload
+
+ msg.Type = 'audit'
+ msg.Logger = notif.publisher_id
+ msg.Severity = utils.label_to_severity_map[notif.priority]
+ msg.Timestamp = patt.Timestamp:match(cadf_event.eventTime)
+
+ msg.Fields.action = cadf_event.action
+ -- notif.event_type can be 'http.request' or 'http.response'
+ msg.Fields.notification_type = notif.event_type
+ -- cadf_event.eventType can be 'activity', 'monitor', ...
+ msg.Fields.event_type = cadf_event.eventType
+ msg.Fields.outcome = cadf_event.outcome
+ msg.Fields.severity_label = notif.priority
+end
+
+function process_notification(notif, msg)
+ local openstack_notif = notif.payload
+
+ msg.Type = 'notification'
+ msg.Logger = logger_map[string.match(notif.event_type, '([^.]+)')]
+ msg.Severity = utils.label_to_severity_map[notif.priority]
+ msg.Timestamp = patt.Timestamp:match(notif.timestamp)
+
+ msg.Fields.publisher, msg.Hostname = string.match(notif.publisher_id, '([^.]+)%.([%w_-]+)')
+ if openstack_notif.host ~= nil then
+ msg.Hostname = string.match(openstack_notif.host, '([%w_-]+)')
+ end
+
+ msg.Fields.event_type = notif.event_type
+ msg.Fields.severity_label = notif.priority
+ msg.Fields.hostname = msg.Hostname
+
+ for k, v in pairs(payload_fields) do
+ local val = openstack_notif[k]
+ if val ~= nil then
+ local name = payload_fields[k] or k
+ local transform = transform_functions[k]
+ if transform ~= nil then
+ msg.Fields[name] = transform(val)
+ else
+ msg.Fields[name] = val
+ end
+ end
+ end
+end
+
+function process_message()
+ local msg = {Fields={}}
local data = read_message("Payload")
local ok, notif = pcall(cjson.decode, data)
if not ok then
@@ -130,32 +172,20 @@
msg.Payload = utils.safe_json_encode(notif.payload) or '{}'
end
- msg.Fields = {}
- msg.Logger = logger_map[string.match(notif.event_type, '([^.]+)')]
- msg.Severity = utils.label_to_severity_map[notif.priority]
- msg.Timestamp = patt.Timestamp:match(notif.timestamp)
- msg.Fields.publisher, msg.Hostname = string.match(notif.publisher_id, '([^.]+)%.([%w_-]+)')
- if notif.payload.host ~= nil then
- msg.Hostname = string.match(notif.payload.host, '([%w_-]+)')
+ local ok, error_msg
+ if notif.payload.eventType and notif.payload.eventTime then
+ -- Payload of CADF event notifications always contain at least
+ -- eventType and eventTime fields
+ -- http://docs.openstack.org/developer/pycadf/specification/events.html
+ ok, error_msg = pcall(process_cadf_event, notif, msg)
+ else
+ ok, error_msg = pcall(process_notification, notif, msg)
end
- msg.Fields.event_type = notif.event_type
- msg.Fields.severity_label = notif.priority
- msg.Fields.hostname = msg.Hostname
-
- for k, v in pairs(payload_fields) do
- local val = notif.payload[k]
- if val ~= nil then
- local name = payload_fields[k] or k
- local transform = transform_functions[k]
- if transform ~= nil then
- msg.Fields[name] = transform(val)
- else
- msg.Fields[name] = val
- end
- end
+ if not ok then
+ return -1, error_msg
end
+
utils.inject_tags(msg)
-
return utils.safe_inject_message(msg)
end
diff --git a/heka/files/toml/output/udp.toml b/heka/files/toml/output/udp.toml
new file mode 100644
index 0000000..1b2854e
--- /dev/null
+++ b/heka/files/toml/output/udp.toml
@@ -0,0 +1,13 @@
+[{{ output_name }}_output]
+type="UdpOutput"
+address = "{{ output.host }}:{{ output.port }}"
+encoder = "{{ output.encoder }}"
+message_matcher = "{{ output.message_matcher }}"
+
+{%- if output.get('use_buffering', True) %}
+use_buffering = true
+[{{ output_name }}_output.buffering]
+max_buffer_size = {{ output.buffering_max_buffer_size|default(268435456) }}
+max_file_size = {{ output.buffering_max_file_size|default(67108864) }}
+full_action = "{{ output.output.buffering_full_action|default("drop") }}"
+{%- endif %}
diff --git a/heka/meta/heka.yml b/heka/meta/heka.yml
index e0d433d..149a23a 100644
--- a/heka/meta/heka.yml
+++ b/heka/meta/heka.yml
@@ -266,7 +266,7 @@
engine: elasticsearch
server: "http://{{ remote_collector.elasticsearch_host }}:{{ remote_collector.elasticsearch_port }}"
encoder: elasticsearch_encoder
- message_matcher: "Type == 'notification'"
+ message_matcher: "Type == 'notification' || Type == 'audit'"
{%- endif %}
{%- endif %}
aggregator:
diff --git a/tests/lua/test_patterns.lua b/tests/lua/test_patterns.lua
index 86d9507..d15b49d 100644
--- a/tests/lua/test_patterns.lua
+++ b/tests/lua/test_patterns.lua
@@ -87,12 +87,12 @@
assertEquals(patt.openstack_http:match(
'"OPTIONS / HTTP/1.0" status: 200 len: 497 time: 0.0006731'),
{http_method = 'OPTIONS', http_url = '/', http_version = '1.0',
- http_status = '200', http_response_size = 497,
+ http_status = 200, http_response_size = 497,
http_response_time = 0.0006731})
assertEquals(patt.openstack_http:match(
'foo "OPTIONS / HTTP/1.0" status: 200 len: 497 time: 0.0006731 bar'),
{http_method = 'OPTIONS', http_url = '/', http_version = '1.0',
- http_status = '200', http_response_size = 497,
+ http_status = 200, http_response_size = 497,
http_response_time = 0.0006731})
end
@@ -100,12 +100,12 @@
assertEquals(patt.openstack_http:match(
'"OPTIONS / HTTP/1.0" status: 200 len: 497 time: 0.0006731'),
{http_method = 'OPTIONS', http_url = '/', http_version = '1.0',
- http_status = '200', http_response_size = 497,
+ http_status = 200, http_response_size = 497,
http_response_time = 0.0006731})
assertEquals(patt.openstack_http:match(
'foo "OPTIONS / HTTP/1.0" status: 200 len: 497 time: 0.0006731 bar'),
{http_method = 'OPTIONS', http_url = '/', http_version = '1.0',
- http_status = '200', http_response_size = 497,
+ http_status = 200, http_response_size = 497,
http_response_time = 0.0006731})
end