Sending Logstash logs to C:/busapps/rrsb/gbl1/logstash/7.0.0/logs which is now configured via log4j2.properties [2019-11-05T06:01:44,272][DEBUG][logstash.modules.scaffold] Found module {:module_name=>"fb_apache", :directory=>"C:/busapps/rrsb/gbl1/logstash/7.0.0/modules/fb_apache/configuration"} [2019-11-05T06:01:44,495][DEBUG][logstash.plugins.registry] Adding plugin to the registry {:name=>"fb_apache", :type=>:modules, :class=>#<LogStash::Modules::Scaffold:0x41d2528f @directory="C:/busapps/rrsb/gbl1/logstash/7.0.0/modules/fb_apache/configuration", @module_name="fb_apache", @kibana_version_parts=["6", "0", "0"]>} [2019-11-05T06:01:44,757][DEBUG][logstash.modules.scaffold] Found module {:module_name=>"netflow", :directory=>"C:/busapps/rrsb/gbl1/logstash/7.0.0/modules/netflow/configuration"} [2019-11-05T06:01:44,775][DEBUG][logstash.plugins.registry] Adding plugin to the registry {:name=>"netflow", :type=>:modules, :class=>#<LogStash::Modules::Scaffold:0x31daef26 @directory="C:/busapps/rrsb/gbl1/logstash/7.0.0/modules/netflow/configuration", @module_name="netflow", @kibana_version_parts=["6", "0", "0"]>} [2019-11-05T06:01:46,527][DEBUG][logstash.runner ] -------- Logstash Settings ( means modified) --------- [2019-11-05T06:01:46,743][DEBUG][logstash.runner ] node.name: "xxxxxxx" [2019-11-05T06:01:47,044][DEBUG][logstash.runner ] path.config: "C:\busapps\rrsb\gbl1\logstash\7.0.0\bin\pipelines" [2019-11-05T06:01:47,070][DEBUG][logstash.runner ] path.data: "C:/busapps/rrsb/gbl1/logstash/7.0.0/data" [2019-11-05T06:01:47,790][DEBUG][logstash.runner ] modules.cli: [] [2019-11-05T06:01:47,790][DEBUG][logstash.runner ] modules: [] [2019-11-05T06:01:47,881][DEBUG][logstash.runner ] modules_list: [] [2019-11-05T06:01:47,881][DEBUG][logstash.runner ] modules_variable_list: [] [2019-11-05T06:01:47,881][DEBUG][logstash.runner ] modules_setup: false [2019-11-05T06:01:47,898][DEBUG][logstash.runner ] config.test_and_exit: true (default: false) [2019-11-05T06:01:47,916][DEBUG][logstash.runner ] config.reload.automatic: false [2019-11-05T06:01:47,944][DEBUG][logstash.runner ] config.reload.interval: 3000000000 [2019-11-05T06:01:47,961][DEBUG][logstash.runner ] config.support_escapes: false [2019-11-05T06:01:47,961][DEBUG][logstash.runner ] config.field_reference.parser: "STRICT" [2019-11-05T06:01:47,978][DEBUG][logstash.runner ] metric.collect: true [2019-11-05T06:01:48,024][DEBUG][logstash.runner ] pipeline.id: "main" [2019-11-05T06:01:48,214][DEBUG][logstash.runner ] pipeline.system: false [2019-11-05T06:01:48,241][DEBUG][logstash.runner ] pipeline.workers: 4 [2019-11-05T06:01:48,241][DEBUG][logstash.runner ] pipeline.batch.size: 125 [2019-11-05T06:01:48,304][DEBUG][logstash.runner ] pipeline.batch.delay: 50 [2019-11-05T06:01:48,304][DEBUG][logstash.runner ] pipeline.unsafe_shutdown: false [2019-11-05T06:01:48,340][DEBUG][logstash.runner ] pipeline.java_execution: true [2019-11-05T06:01:48,350][DEBUG][logstash.runner ] pipeline.reloadable: true [2019-11-05T06:01:48,367][DEBUG][logstash.runner ] path.plugins: [] [2019-11-05T06:01:48,384][DEBUG][logstash.runner ] config.debug: true (default: false) [2019-11-05T06:01:48,447][DEBUG][logstash.runner ] *log.level: "debug" (default: "info") [2019-11-05T06:01:48,447][DEBUG][logstash.runner ] version: false [2019-11-05T06:01:48,465][DEBUG][logstash.runner ] help: false [2019-11-05T06:01:48,529][DEBUG][logstash.runner ] log.format: "plain" [2019-11-05T06:01:48,538][DEBUG][logstash.runner ] http.host: "127.0.0.1" [2019-11-05T06:01:48,538][DEBUG][logstash.runner ] http.port: 9600..9700 [2019-11-05T06:01:48,538][DEBUG][logstash.runner ] http.environment: "production" [2019-11-05T06:01:48,538][DEBUG][logstash.runner ] queue.type: "memory" [2019-11-05T06:01:48,538][DEBUG][logstash.runner ] queue.drain: false [2019-11-05T06:01:48,538][DEBUG][logstash.runner ] queue.page_capacity: 67108864 [2019-11-05T06:01:48,538][DEBUG][logstash.runner ] queue.max_bytes: 1073741824 [2019-11-05T06:01:48,574][DEBUG][logstash.runner ] queue.max_events: 0 [2019-11-05T06:01:48,574][DEBUG][logstash.runner ] queue.checkpoint.acks: 1024 [2019-11-05T06:01:48,638][DEBUG][logstash.runner ] queue.checkpoint.writes: 1024 [2019-11-05T06:01:48,694][DEBUG][logstash.runner ] queue.checkpoint.interval: 1000 [2019-11-05T06:01:48,756][DEBUG][logstash.runner ] queue.checkpoint.retry: false [2019-11-05T06:01:48,918][DEBUG][logstash.runner ] dead_letter_queue.enable: false [2019-11-05T06:01:48,928][DEBUG][logstash.runner ] dead_letter_queue.max_bytes: 1073741824 [2019-11-05T06:01:49,043][DEBUG][logstash.runner ] slowlog.threshold.warn: -1 [2019-11-05T06:01:49,053][DEBUG][logstash.runner ] slowlog.threshold.info: -1 [2019-11-05T06:01:49,053][DEBUG][logstash.runner ] slowlog.threshold.debug: -1 [2019-11-05T06:01:49,070][DEBUG][logstash.runner ] slowlog.threshold.trace: -1 [2019-11-05T06:01:49,070][DEBUG][logstash.runner ] keystore.classname: "org.logstash.secret.store.backend.JavaKeyStore" [2019-11-05T06:01:49,116][DEBUG][logstash.runner ] keystore.file: "C:/busapps/rrsb/gbl1/logstash/7.0.0/config/logstash.keystore" [2019-11-05T06:01:49,116][DEBUG][logstash.runner ] path.queue: "C:/busapps/rrsb/gbl1/logstash/7.0.0/data/queue" [2019-11-05T06:01:49,116][DEBUG][logstash.runner ] path.dead_letter_queue: "C:/busapps/rrsb/gbl1/logstash/7.0.0/data/dead_letter_queue" [2019-11-05T06:01:49,116][DEBUG][logstash.runner ] path.settings: "C:/busapps/rrsb/gbl1/logstash/7.0.0/config" [2019-11-05T06:01:49,150][DEBUG][logstash.runner ] path.logs: "C:/busapps/rrsb/gbl1/logstash/7.0.0/logs" [2019-11-05T06:01:49,150][DEBUG][logstash.runner ] xpack.management.enabled: false [2019-11-05T06:01:49,150][DEBUG][logstash.runner ] xpack.management.logstash.poll_interval: 5000000000 [2019-11-05T06:01:49,167][DEBUG][logstash.runner ] xpack.management.pipeline.id: ["main"] [2019-11-05T06:01:49,178][DEBUG][logstash.runner ] xpack.management.elasticsearch.username: "logstash_system" [2019-11-05T06:01:49,178][DEBUG][logstash.runner ] xpack.management.elasticsearch.hosts: ["https://localhost:9200"] [2019-11-05T06:01:49,178][DEBUG][logstash.runner ] xpack.management.elasticsearch.ssl.verification_mode: "certificate" [2019-11-05T06:01:49,194][DEBUG][logstash.runner ] xpack.management.elasticsearch.sniffing: false [2019-11-05T06:01:49,194][DEBUG][logstash.runner ] xpack.monitoring.enabled: false [2019-11-05T06:01:49,194][DEBUG][logstash.runner ] xpack.monitoring.elasticsearch.hosts: ["http://localhost:9200"] [2019-11-05T06:01:49,289][DEBUG][logstash.runner ] xpack.monitoring.collection.interval: 10000000000 [2019-11-05T06:01:49,289][DEBUG][logstash.runner ] xpack.monitoring.collection.timeout_interval: 600000000000 [2019-11-05T06:01:49,307][DEBUG][logstash.runner ] xpack.monitoring.elasticsearch.username: "logstash_system" [2019-11-05T06:01:49,325][DEBUG][logstash.runner ] xpack.monitoring.elasticsearch.ssl.verification_mode: "certificate" [2019-11-05T06:01:49,334][DEBUG][logstash.runner ] xpack.monitoring.elasticsearch.sniffing: false [2019-11-05T06:01:49,334][DEBUG][logstash.runner ] xpack.monitoring.collection.pipeline.details.enabled: true [2019-11-05T06:01:49,822][DEBUG][logstash.runner ] xpack.monitoring.collection.config.enabled: true [2019-11-05T06:01:49,867][DEBUG][logstash.runner ] node.uuid: "" [2019-11-05T06:01:49,931][DEBUG][logstash.runner ] --------------- Logstash Settings ------------------- [2019-11-05T06:01:50,309][WARN ][logstash.config.source.multilocal] Ignoring the 'pipelines.yml' file because modules or command line options are specified [2019-11-05T06:01:50,760][DEBUG][logstash.config.source.local.configpathloader] Skipping the following files while reading config since they don't match the specified glob pattern {:files=>["C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/benchmark.sh", "C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/cpdump", "C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/dependencies-report", "C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/ingest-convert.sh", "C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/logstash", "C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/logstash-keystore", "C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/logstash-keystore.bat", "C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/logstash-plugin", "C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/logstash-plugin.bat", "C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/logstash.bat", "C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/logstash.lib.sh", "C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/pipelines", "C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/pqcheck", "C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/pqrepair", "C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/ruby", "C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/setup.bat", "C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/system-install"]} [2019-11-05T06:01:50,788][DEBUG][logstash.config.source.local.configpathloader] Reading config file {:config_file=>"C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/pipelines/logstash - Copy.conf"} [2019-11-05T06:01:50,834][DEBUG][logstash.config.source.local.configpathloader] Reading config file {:config_file=>"C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/pipelines/logstash.conf"} [2019-11-05T06:01:51,167][DEBUG][logstash.config.source.local.configpathloader] Reading config file {:config_file=>"C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/pipelines/logstash_bkp.conf"} [2019-11-05T06:01:51,445][DEBUG][logstash.config.pipelineconfig] -------- Logstash Config --------- [2019-11-05T06:01:51,463][DEBUG][logstash.config.pipelineconfig] Config from source {:source=>LogStash::Config::Source::Local, :pipeline_id=>:main} [2019-11-05T06:01:51,491][DEBUG][logstash.config.pipelineconfig] Config string {:protocol=>"file", :id=>"C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/pipelines/logstash - Copy.conf"} [2019-11-05T06:01:51,571][DEBUG][logstash.config.pipelineconfig]

input { beats { port => 5044 } }

filter { grok { match => { "message" => "%{LOGLEVEL:loglevel} %{GREEDYDATA:message}"} overwrite => [ "message" ] }

    json{
            source => "message"
    }

    if [message] != "" {
            mutate {
                    add_tag => [ "spam_log" ]
            }
    }
    else {
       mutate{
          add_tag => [ "analyze" ]
               add_field => {"index_name" => "%{Process}"}
       }
            mutate {
                    gsub => ["index_name","[ *<>%|?]","_"]
            }
            ruby {
                    code => 'event.set("index_name", event.get("index_name").to_s.force_encoding("ISO-8859-1").downcase)'
            }

       mutate{
          convert => { "[RunTime]" => "integer" }
          add_tag => [ "processed log" ]
            }
       mutate {
          lowercase => [ "[ItemStatus]" ]
              }

    }

    if "spam_log" in [tags] {
            if "execution ended" in [message] {
                    mutate { add_tag => ["total"]}
                    ruby {
                            path => "C:/busapps/rrsb/gbl1/logstash/7.0.0/ruby_scripts/remove_env_suffix.rb"
                    }

            }
            else {
                    drop{}
            }
    }

}

output { if ("total" in [tags]) { elasticsearch { hosts => ["localhost:9200"] index => "totalexecution-%{+YYYY}" user => elastic password => 3wUwULD3QJaKke

            }
    }
    else {
            elasticsearch {
                    hosts => ["localhost:9200"]
            index => "robot_%{index_name}-%{+YYYY.MM}"
                            user => elastic
                            password => 3wUwULD3QJaKke

            }
    }

}

[2019-11-05T06:01:51,604][DEBUG][logstash.config.pipelineconfig] Config string {:protocol=>"file", :id=>"C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/pipelines/logstash.conf"} [2019-11-05T06:01:51,632][DEBUG][logstash.config.pipelineconfig]

input { beats { port => 5044 } }

filter { grok { match => { "message" => "%{LOGLEVEL:loglevel} %{GREEDYDATA:message}"} overwrite => [ "message" ] }

    json{
            source => "message"
    }

    if [message] != "" {
            mutate {
                    add_tag => [ "spam_log" ]
            }
    }
    else {
       mutate{
          add_tag => [ "analyze" ]
               add_field => {"index_name" => "%{Process}"}
       }
            mutate {
                    gsub => ["index_name","[ *<>%|?]","_"]
            }
            ruby {
                    code => 'event.set("index_name", event.get("index_name").to_s.force_encoding("ISO-8859-1").downcase)'
            }

       mutate{
          convert => { "[RunTime]" => "integer" }
          add_tag => [ "processed log" ]
            }
       mutate {
          lowercase => [ "[ItemStatus]" ]
              }

    }

    if "spam_log" in [tags] {
            if "execution ended" in [message] {
                    mutate { add_tag => ["total"]}
                    ruby {
                            path => "C:/busapps/rrsb/gbl1/logstash/7.0.0/ruby_scripts/remove_env_suffix.rb"
                    }

            }
            else {
                    drop{}
            }
    }

}

output { stdout{} if ("total" in [tags]) { elasticsearch { hosts => ["localhost:9200"] index => "totalexecution-%{+YYYY}" user => "elastic" password => "3wUwULD3QJaKke"

            }
    }
    else {
            elasticsearch {
                    hosts => ["localhost:9200"]
            index => "robot_%{index_name}-%{+YYYY.MM}"
                            user => "elastic"
                            password => "3wUwULD3QJaKke"

            }
    }

}

[2019-11-05T06:01:51,632][DEBUG][logstash.config.pipelineconfig] Config string {:protocol=>"file", :id=>"C:/busapps/rrsb/gbl1/logstash/7.0.0/bin/pipelines/logstash_bkp.conf"} [2019-11-05T06:01:51,632][DEBUG][logstash.config.pipelineconfig]

input { beats { port => 5044 } }

filter { grok { match => { "message" => "%{LOGLEVEL:loglevel} %{GREEDYDATA:message}"} overwrite => [ "message" ] }

    json{
            source => "message"
    }

    if [message] != "" {
            mutate {
                    add_tag => [ "spam_log" ]
            }
    }
    else {
       mutate{
          add_tag => [ "analyze" ]
               add_field => {"index_name" => "%{Process}"}
       }
            mutate {
                    gsub => ["index_name","[ *<>%|?]","_"]
            }
            ruby {
                    code => 'event.set("index_name", event.get("index_name").to_s.force_encoding("ISO-8859-1").downcase)'
            }

       mutate{
          convert => { "[RunTime]" => "integer" }
          add_tag => [ "processed log" ]
            }
       mutate {
          lowercase => [ "[ItemStatus]" ]
              }

    }

    if "spam_log" in [tags] {
            if "execution ended" in [message] {
                    mutate { add_tag => ["total"]}
                    ruby {
                            path => "C:/busapps/rrsb/gbl1/logstash/7.0.0/ruby_scripts/remove_env_suffix.rb"
                    }

            }
            else {
                    drop{}
            }
    }

}

output { if ("total" in [tags]) { elasticsearch { hosts => ["localhost:9200"] index => "totalexecution-%{+YYYY}" user => elastic password => "3wUwULD3QJaKke"

            }
    }
    else {
            elasticsearch {
                    hosts => ["localhost:9200"]
            index => "robot_%{index_name}-%{+YYYY.MM}"
                            user => elastic
                            password => "3wUwULD3QJaKke"

            }
    }

}

[2019-11-05T06:01:51,868][DEBUG][logstash.config.pipelineconfig] Merged config [2019-11-05T06:01:51,931][DEBUG][logstash.config.pipelineconfig]

input { beats { port => 5044 } }

filter { grok { match => { "message" => "%{LOGLEVEL:loglevel} %{GREEDYDATA:message}"} overwrite => [ "message" ] }

    json{
            source => "message"
    }

    if [message] != "" {
            mutate {
                    add_tag => [ "spam_log" ]
            }
    }
    else {
       mutate{
          add_tag => [ "analyze" ]
               add_field => {"index_name" => "%{Process}"}
       }
            mutate {
                    gsub => ["index_name","[ *<>%|?]","_"]
            }
            ruby {
                    code => 'event.set("index_name", event.get("index_name").to_s.force_encoding("ISO-8859-1").downcase)'
            }

       mutate{
          convert => { "[RunTime]" => "integer" }
          add_tag => [ "processed log" ]
            }
       mutate {
          lowercase => [ "[ItemStatus]" ]
              }

    }

    if "spam_log" in [tags] {
            if "execution ended" in [message] {
                    mutate { add_tag => ["total"]}
                    ruby {
                            path => "C:/busapps/rrsb/gbl1/logstash/7.0.0/ruby_scripts/remove_env_suffix.rb"
                    }

            }
            else {
                    drop{}
            }
    }

}

output { if ("total" in [tags]) { elasticsearch { hosts => ["localhost:9200"] index => "totalexecution-%{+YYYY}" user => elastic password => 3wUwULD3QJaKke

            }
    }
    else {
            elasticsearch {
                    hosts => ["localhost:9200"]
            index => "robot_%{index_name}-%{+YYYY.MM}"
                            user => elastic
                            password => 3wUwULD3QJaKke

            }
    }

}

input { beats { port => 5044 } }

filter { grok { match => { "message" => "%{LOGLEVEL:loglevel} %{GREEDYDATA:message}"} overwrite => [ "message" ] }

    json{
            source => "message"
    }

    if [message] != "" {
            mutate {
                    add_tag => [ "spam_log" ]
            }
    }
    else {
       mutate{
          add_tag => [ "analyze" ]
               add_field => {"index_name" => "%{Process}"}
       }
            mutate {
                    gsub => ["index_name","[ *<>%|?]","_"]
            }
            ruby {
                    code => 'event.set("index_name", event.get("index_name").to_s.force_encoding("ISO-8859-1").downcase)'
            }

       mutate{
          convert => { "[RunTime]" => "integer" }
          add_tag => [ "processed log" ]
            }
       mutate {
          lowercase => [ "[ItemStatus]" ]
              }

    }

    if "spam_log" in [tags] {
            if "execution ended" in [message] {
                    mutate { add_tag => ["total"]}
                    ruby {
                            path => "C:/busapps/rrsb/gbl1/logstash/7.0.0/ruby_scripts/remove_env_suffix.rb"
                    }

            }
            else {
                    drop{}
            }
    }

}

output { stdout{} if ("total" in [tags]) { elasticsearch { hosts => ["localhost:9200"] index => "totalexecution-%{+YYYY}" user => "elastic" password => "3wUwULD3QJaKke"

            }
    }
    else {
            elasticsearch {
                    hosts => ["localhost:9200"]
            index => "robot_%{index_name}-%{+YYYY.MM}"
                            user => "elastic"
                            password => "3wUwULD3QJaKke"

            }
    }

}

input { beats { port => 5044 } }

filter { grok { match => { "message" => "%{LOGLEVEL:loglevel} %{GREEDYDATA:message}"} overwrite => [ "message" ] }

    json{
            source => "message"
    }

    if [message] != "" {
            mutate {
                    add_tag => [ "spam_log" ]
            }
    }
    else {
       mutate{
          add_tag => [ "analyze" ]
               add_field => {"index_name" => "%{Process}"}
       }
            mutate {
                    gsub => ["index_name","[ *<>%|?]","_"]
            }
            ruby {
                    code => 'event.set("index_name", event.get("index_name").to_s.force_encoding("ISO-8859-1").downcase)'
            }

       mutate{
          convert => { "[RunTime]" => "integer" }
          add_tag => [ "processed log" ]
            }
       mutate {
          lowercase => [ "[ItemStatus]" ]
              }

    }

    if "spam_log" in [tags] {
            if "execution ended" in [message] {
                    mutate { add_tag => ["total"]}
                    ruby {
                            path => "C:/busapps/rrsb/gbl1/logstash/7.0.0/ruby_scripts/remove_env_suffix.rb"
                    }

            }
            else {
                    drop{}
            }
    }

}

output { if ("total" in [tags]) { elasticsearch { hosts => ["localhost:9200"] index => "totalexecution-%{+YYYY}" user => elastic password => "3wUwULD3QJaKke"

            }
    }
    else {
            elasticsearch {
                    hosts => ["localhost:9200"]
            index => "robot_%{index_name}-%{+YYYY.MM}"
                            user => elastic
                            password => "3wUwULD3QJaKke"

            }
    }

}

[2019-11-05T06:02:08,100][FATAL][logstash.runner ] The given configuration is invalid. Reason: Expected one of #, { at line 68, column 5 (byte 1448) after output { if ("total" in [tags]) { elasticsearch { hosts => ["localhost:9200"] index => "totalexecution-%{+YYYY}" user => elastic password => 3wUwULD3QJaKke

[2019-11-05T06:02:08,184][ERROR][org.logstash.Logstash ] java.lang.IllegalStateException: Logstash stopped processing because of an error: (SystemExit) exit