Weitere ähnliche Inhalte Ähnlich wie Como encontrar uma agulha num palheiro de logs (20) Mehr von Dickson S. Guedes (20) Kürzlich hochgeladen (20) Como encontrar uma agulha num palheiro de logs11. bash, grep, awk, sed, ruby, python, perl …
syslog
Usar uma pilha pronta como a ELK:
Montar sua própria pilha
QUEM SERIA NOSSO IMÃ?
Elasticsearch Logstash Kibana
PostgreSQL Hadoop Flume Fluentd GNUplot
D3.js
12. e podem nos ajudar a enxergar melhor …
apresentar CSS e HTML e imagens é o mínimo que se espera
manipulação da DOM
manipulação de imagens SVG
"OS NAVEGADORES ESTÃO MAIS MODERNOS"
14. pipeline
input | filter | output
file | grep | csv
twitter | grep | json
imap | ruby | xmpp
file | grok | elasticsearch
LOGSTASH
15. banco de dados não relacional
indices
documentos
fields
full text search
ELASTICSEARCH
18. SHOW ME THE CODE!
cd $work
mkdir downloads
cd downloads
wget https://download.elastic.co/elasticsearch/elasticsearch/elasticsear
ch-1.6.0.tar.gz
wget https://download.elastic.co/logstash/logstash/logstash-1.5.2.tar.gz
wget https://download.elastic.co/kibana/kibana/kibana-4.1.1-linux-x64.ta
r.gz
cd ..
tar zxvf download/elasticsearch-1.6.0.tar.gz
tar zxvf download/kibana-4.1.1-linux-x64.tar.gz
tar zxvf download/logstash-1.5.2.tar.gz
./elasticsearch-1.6.0/bin/elasticsearch
curl -X GET http://localhost:9200
# {
# "status" : 200,
# "name" : "Kick-Ass",
# "cluster_name" : "elasticsearch",
# "version" : {
# "number" : "1.6.0",
# "build_hash" : "cdd3ac4dde4f69524ec0a14de3828cb95bbb86d0",
# "build_timestamp" : "2015-06-09T13:36:34Z",
# "build_snapshot" : false,
# "lucene_version" : "4.10.4"
# },
# "tagline" : "You Know, for Search"
# }
./logstash-1.5.2/bin/logstash -e 'input { stdin { } } output { stdout {
19. } }'
# Logstash startup completed
teste
# 2015-07-08T21:42:43.129Z dba01 teste
ola mundo
# 2015-07-08T21:42:47.899Z dba01 ola mundo
^C
# SIGINT received. Shutting down the pipeline. {:level=>:warn}
# Logstash shutdown completed
./logstash-1.5.2/bin/logstash -e 'input { stdin { } } output { stdout {
codec => rubydebug } }'
# Logstash startup completed
ola mundo!
# {
# "message" => "ola mundo!",
# "@version" => "1",
# "@timestamp" => "2015-07-08T21:44:00.804Z",
# "host" => "dba01"
# }
teste
# {
# "message" => "teste",
# "@version" => "1",
# "@timestamp" => "2015-07-08T21:45:00.075Z",
# "host" => "dba01"
# }
./logstash-1.5.2/bin/logstash -e 'input { stdin { } } output { elasticse
arch { } }'
# Jul 08, 2015 6:48:49 PM org.elasticsearch.node.internal.InternalNode <
init>
# INFORMAÇÕES: [logstash-dba01-8559-11620] version[1.5.1], pid[8559], bu
ild[5e38401/2015-04-09T13:4
# 1:35Z]
# Jul 08, 2015 6:48:49 PM org.elasticsearch.node.internal.InternalNode <
init>
20. # INFORMAÇÕES: [logstash-dba01-8559-11620] initializing ...
# Jul 08, 2015 6:48:49 PM org.elasticsearch.plugins.PluginsService <init
>
# INFORMAÇÕES: [logstash-dba01-8559-11620] loaded [], sites []
# Jul 08, 2015 6:48:50 PM org.elasticsearch.node.internal.InternalNode <
init>
# INFORMAÇÕES: [logstash-dba01-8559-11620] initialized
# Jul 08, 2015 6:48:50 PM org.elasticsearch.node.internal.InternalNode s
tart
# INFORMAÇÕES: [logstash-dba01-8559-11620] starting ...
# Jul 08, 2015 6:48:50 PM org.elasticsearch.transport.TransportService d
oStart
# INFORMAÇÕES: [logstash-dba01-8559-11620] bound_address {inet[/0:0:0:0:
0:0:0:0:9301]}, publish_add
# ress {inet[/10.1.6.88:9301]}
# Jul 08, 2015 6:48:50 PM org.elasticsearch.discovery.DiscoveryService d
oStart
# INFORMAÇÕES: [logstash-dba01-8559-11620] elasticsearch/4htwknhiS2S9swI
vVsWTxQ
# Jul 08, 2015 6:48:53 PM org.elasticsearch.cluster.service.InternalClus
terService$UpdateTask run
# INFORMAÇÕES: [logstash-dba01-8559-11620] detected_master [Kick-Ass][-e
atoEY8TWecIb34yKjM8w][dba01
# ][inet[/10.1.6.88:9300]], added {[Kick-Ass][-eatoEY8TWecIb34yKjM8w][db
a01][inet[/10.1.6.88:9300]]
# ,}, reason: zen-disco-receive(from master [[Kick-Ass][-eatoEY8TWecIb34
yKjM8w][dba01][inet[/10.1.6
# .88:9300]]])
# Jul 08, 2015 6:48:53 PM org.elasticsearch.node.internal.InternalNode s
tart
# INFORMAÇÕES: [logstash-dba01-8559-11620] started
# Logstash startup completed
# no elasticsearch
# [2015-07-08 18:48:53,736][INFO ][cluster.service ] [Kick-Ass] added
# {[logstash-dba01-8559-11620][4htwknhiS2S9swIvVsWTxQ][dba01][inet[/10.1
21. .6.88:9301]]{client=true,
# data=false},}, reason: zen-disco-receive(join from
# node[[logstash-dba01-8559-11620][4htwknhiS2S9swIvVsWTxQ][dba01][inet[/
10.1.6.88:9301]]{client=true,
# data=false}])
# no logstash
teste 123
teste 123 4
# em outro shell
curl 'http://localhost:9200/_search?pretty'
# {
# "took" : 1,
# "timed_out" : false,
# "_shards" : {
# "total" : 5,
# "successful" : 5,
# "failed" : 0
# },
# "hits" : {
# "total" : 3,
# "max_score" : 1.0,
# "hits" : [ {
# "_index" : "logstash-2015.07.08",
# "_type" : "logs",
# "_id" : "AU5vpqcN6gKLePtWPjrH",
# "_score" : 1.0,
# "_source":{"message":"teste 123","@version":"1","@timestamp":"20
15-07-08T21:51:09.580Z","host":"dba01"}
# }, {
# "_index" : "logstash-2015.07.08",
# "_type" : "logs",
# "_id" : "AU5vpr0F6gKLePtWPjrI",
# "_score" : 1.0,
# "_source":{"message":"teste 123 4","@version":"1","@timestamp":"
22. 2015-07-08T21:51:15.204Z","host":"dba01"}
# }, {
# "_index" : "logstash-2015.07.08",
# "_type" : "logs",
# "_id" : "AU5vpqDe6gKLePtWPjrG",
# "_score" : 1.0,
# "_source":{"message":"","@version":"1","@timestamp":"2015-07-08T
21:51:07.436Z","host":"dba01"}
# } ]
# }
# }
./kibana-4.1.1-linux-x64/bin/kibana
firefox http://localhost:5601/
cd downloads
wget https://www.elastic.co/guide/en/kibana/3.0/snippets/shakespeare.jso
n
wget https://github.com/bly2k/files/blob/master/accounts.zip?raw=true
wget https://download.elastic.co/demos/kibana/gettingstarted/logs.jsonl.
gz
unzip accounts.zip
gunzip logs.jsonl.gz
curl -XPUT http://localhost:9200/shakespeare -d '
{
"mappings" : {
"_default_" : {
"properties" : {
"speaker" : {"type": "string", "index" : "not_analyzed" },
"play_name" : {"type": "string", "index" : "not_analyzed" },
"line_id" : { "type" : "integer" },
"speech_number" : { "type" : "integer" }
}
}
23. }
}
';
# {"acknowledged":true}
curl -XPOST 'localhost:9200/accounts/account/_bulk?pretty' --data-binary
@accounts.json
curl -XPOST 'localhost:9200/shakespeare/_bulk?pretty' --data-binary @sha
kespeare.json
curl -XPOST 'localhost:9200/_bulk?pretty' --data-binary @logs.jsonl
curl 'localhost:9200/_cat/indices?v'