ELK Stack Setup

Complete ELK stack for log aggregation, processing, and visualization

Script Author

Rowan de Haas's avatar
Rowan de Haas
Script Author

Script Details

Created 5 months ago
Size 3 KB

Tags

Script Content

Raw
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
#cloud-config
package_update: true
package_upgrade: true

packages:
  - openjdk-11-jdk
  - wget
  - gnupg
  - apt-transport-https

write_files:
  - path: /etc/elasticsearch/elasticsearch.yml
    content: |
      cluster.name: elk-cluster
      node.name: elk-node-1
      path.data: /var/lib/elasticsearch
      path.logs: /var/log/elasticsearch
      network.host: localhost
      http.port: 9200
      discovery.type: single-node
      xpack.security.enabled: false
      xpack.security.enrollment.enabled: false

  - path: /etc/logstash/conf.d/logstash.conf
    content: |
      input {
        beats {
          port => 5044
        }
        syslog {
          port => 5140
        }
      }
      
      filter {
        if [fileset][module] == "system" {
          if [fileset][name] == "auth" {
            grok {
              match => { "message" => ["%{SYSLOGTIMESTAMP:[system][auth][timestamp]} %{IPORHOST:[system][auth][hostname]} sshd(?:\\[%{POSINT:[system][auth][pid]}\\])?: %{DATA:[system][auth][ssh][event]} %{DATA:[system][auth][ssh][method]} for (invalid user )?%{DATA:[system][auth][user]} from %{IPORHOST:[system][auth][ssh][ip]} port %{INT:[system][auth][ssh][port]} ssh2"] }
            }
          }
        }
      }
      
      output {
        elasticsearch {
          hosts => ["localhost:9200"]
          manage_template => false
          index => "%{[@metadata][beat]}-%{[@metadata][version]}-%{+YYYY.MM.dd}"
        }
      }

  - path: /etc/kibana/kibana.yml
    content: |
      server.port: 5601
      server.host: "localhost"
      elasticsearch.hosts: ["http://localhost:9200"]
      kibana.index: ".kibana"
      logging.dest: /var/log/kibana/kibana.log

runcmd:
  # Add Elastic repository
  - wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add -
  - echo "deb https://artifacts.elastic.co/packages/7.x/apt stable main" | tee /etc/apt/sources.list.d/elastic-7.x.list
  
  # Install Elasticsearch, Logstash, and Kibana
  - apt-get update
  - apt-get install -y elasticsearch logstash kibana
  
  # Configure JVM settings
  - sed -i 's/-Xms1g/-Xms512m/g' /etc/elasticsearch/jvm.options
  - sed -i 's/-Xmx1g/-Xmx512m/g' /etc/elasticsearch/jvm.options
  
  # Enable and start services
  - systemctl enable elasticsearch
  - systemctl start elasticsearch
  - sleep 30
  - systemctl enable logstash
  - systemctl start logstash
  - systemctl enable kibana
  - systemctl start kibana
  
  # Configure firewall
  - ufw allow 5601/tcp
  - ufw allow 9200/tcp
  - ufw allow 5044/tcp
  
  - echo "Kibana available at http://localhost:5601"
  - echo "Elasticsearch available at http://localhost:9200"

How to Use This Script

Cloud Provider Examples

Amazon EC2

aws ec2 run-instances
  --image-id ami-12345678
  --instance-type t3.micro
  --user-data file://script.yaml

DigitalOcean

doctl compute droplet create
  --image ubuntu-22-04-x64
  --size s-1vcpu-1gb
  --user-data-file script.yaml
  my-droplet

Google Cloud

gcloud compute instances create
  my-instance
  --metadata-from-file
  user-data=script.yaml