Elimina archivos del controls de versiones

debian-package
Nicolas Arenas 2025-07-30 13:25:40 +02:00
parent 1a74077844
commit b9e05cc771
10 changed files with 11 additions and 663 deletions

View File

@ -1,6 +0,0 @@
oglog (0.0.1-1) UNRELEASED; urgency=low
* Initial release. Closes: #nnnn
<nnnn is the bug number of your ITP>
-- Nicolas Arenas <narenas@qindel.com> Mon, 07 Jul 2025 11:17:41 +0000

2
debian/changelog vendored
View File

@ -1,4 +1,4 @@
oglog (0.0.1-1) UNRELEASED; urgency=low
oglog (0.0.1-2) UNRELEASED; urgency=low
* Initial release. Closes: #nnnn
<nnnn is the bug number of your ITP>

2
debian/files vendored
View File

@ -1,2 +0,0 @@
oglog_0.0.1-1_amd64.buildinfo unknown optional
oglog_0.0.1-1_amd64.deb unknown optional

View File

@ -1,33 +0,0 @@
[server]
protocol = https
cert_file = /opt/opengnsys/oglog/etc/certs/server.crt
cert_key = /opt/opengnsys/oglog/etc/certs/server.key
http_port = 3030
[analytics]
reporting_enabled = false
check_for_updates = false
check_for_plugin_updates = false
[database]
type = sqlite3
path = /var/lib/grafana/grafana.db
[auth]
disable_login_form = false
#################################### SMTP / Emailing ##########################
[smtp]
enabled = true
host = smtp.qindel.es:465
user = lgromero@qindel.com
password = ***
;cert_file =
;key_file =
skip_verify = true
from_address = lgromero@qindel.com
from_name = Grafana
[emails]
welcome_email_on_sign_up = false

View File

@ -1,8 +0,0 @@
apiVersion: 1
datasources:
- name: Prometheus
type: prometheus
access: proxy
url: https://${OGLOG_SERVER}:9090
isDefault: true

View File

@ -1,605 +0,0 @@
{
"filter_ogdhcp_pipeline" : {
"description" : "Parse logs to extract http_code and desc, while preserving original message",
"processors" : [
{
"script" : {
"if" : "ctx.syslog?.identifier != 'ogdhcp'",
"source" : "\n ctx.debug = 'Skipped: identifier is ' + (ctx.syslog?.identifier ?: 'undefined');\n ctx.pipeline_stop = true; // Stops further processing but retains the document\n "
}
},
{
"set" : {
"field" : "debug",
"value" : "Processed: identifier is ogdhcp"
}
},
{
"script" : {
"source" : "\n ctx.processed_message = ctx.message;\n "
}
},
{
"gsub" : {
"field" : "processed_message",
"pattern" : "^app\\.[A-Z]+: ",
"replacement" : "",
"ignore_failure" : true
}
},
{
"gsub" : {
"field" : "processed_message",
"pattern" : "^request\\.INFO: Matched route \".*?\"\\. ",
"replacement" : "",
"ignore_failure" : true
}
},
{
"json" : {
"field" : "processed_message",
"target_field" : "parsed_message",
"ignore_failure" : true
}
},
{
"set" : {
"field" : "route",
"value" : "{{parsed_message.route}}",
"ignore_empty_value" : true,
"if" : "ctx.parsed_message?.route != null"
}
},
{
"set" : {
"field" : "route_parameters",
"value" : "{{parsed_message.route_parameters}}",
"ignore_empty_value" : true,
"if" : "ctx.parsed_message?.route_parameters != null"
}
},
{
"set" : {
"field" : "request_uri",
"value" : "{{parsed_message.request_uri}}",
"ignore_empty_value" : true,
"if" : "ctx.parsed_message?.request_uri != null"
}
},
{
"set" : {
"field" : "method",
"value" : "{{parsed_message.method}}",
"ignore_empty_value" : true,
"if" : "ctx.parsed_message?.method != null"
}
},
{
"set" : {
"field" : "http_code",
"value" : "{{parsed_message.http_code}}",
"ignore_empty_value" : true
}
},
{
"set" : {
"field" : "description",
"value" : "{{parsed_message.desc}}",
"ignore_empty_value" : true
}
}
]
},
"master_pipeline" : {
"description" : "Master pipeline to route logs based on syslog.identifier",
"processors" : [
{
"pipeline" : {
"name" : "filter_tftp_pipeline",
"if" : "ctx.syslog?.identifier == 'in.tftpd'"
}
},
{
"pipeline" : {
"name" : "filter_ogboot_pipeline",
"if" : "ctx.syslog?.identifier == 'ogboot'"
}
},
{
"pipeline" : {
"name" : "filter_ogdhcp_pipeline",
"if" : "ctx.syslog?.identifier == 'ogdhcp'"
}
},
{
"pipeline" : {
"name" : "kea_dhcp_pipeline",
"if" : "ctx.syslog?.identifier == 'kea-dhcp4'"
}
},
{
"pipeline" : {
"name" : "ogrepo_pipeline",
"if" : "ctx.syslog?.identifier == 'ogrepo-api'"
}
},
{
"pipeline" : {
"name" : "docker_logs_pipeline",
"if" : "ctx.syslog?.identifier == 'docker'"
}
},
{
"json": {
"field": "message",
"target_field": "parsed_message",
"ignore_failure": true,
"if": "ctx.syslog?.identifier == 'ogcore'"
}
},
{
"set" : {
"field" : "debug",
"value" : "No matching pipeline, skipping further processing.",
"if" : "ctx.syslog?.identifier != 'in.tftpd' && ctx.syslog?.identifier != 'ogboot' && ctx.syslog?.identifier != 'kea-dhcp4' && ctx.syslog?.identifier != 'ogrepo-api' && ctx.syslog?.identifier != 'docker'"
}
}
]
},
"json_parse_pipeline" : {
"description" : "Parse JSON payload from logs",
"processors" : [
{
"json" : {
"field" : "message",
"target_field" : "parsed_json",
"ignore_failure" : true
}
}
]
},
"docker_logs_pipeline" : {
"description" : "Parse Docker logs and route based on container name",
"processors" : [
{
"grok" : {
"field" : "message",
"patterns" : [
"%{DATA:container.name}\\s*\\|%{GREEDYDATA:log_details}"
],
"ignore_failure" : true
}
},
{
"pipeline" : {
"name" : "parse_nginx_logs",
"if" : "ctx.container?.name == 'ogcore-nginx'",
"ignore_failure" : true
}
},
{
"json" : {
"field" : "log_details",
"target_field" : "parsed_json",
"ignore_failure" : true
}
}
]
},
"json_parse_with_replacement_debug" : {
"description" : "Debug replacement of single quotes with double quotes and parse JSON",
"processors" : [
{
"script" : {
"source" : "\n ctx.message = ctx.message.replace(\"'\", \"\\\"\");\n "
}
},
{
"set" : {
"field" : "debug_message",
"value" : "{{ message }}"
}
},
{
"json" : {
"field" : "message",
"target_field" : "parsed_json",
"ignore_failure" : true
}
}
]
},
"ogrepo_parse_pipeline" : {
"description" : "Parse ogRepo logs for detailed JSON information",
"processors" : [
{
"grok" : {
"field" : "message",
"patterns" : [
"%{TIMESTAMP_ISO8601:timestamp} %{DATA:hostname} %{DATA:service}\\[%{NUMBER:pid}\\]: %{GREEDYDATA:json_payload}"
],
"ignore_failure" : true
}
},
{
"json" : {
"field" : "json_payload",
"target_field" : "parsed_json",
"ignore_failure" : true
}
},
{
"rename" : {
"field" : "parsed_json.component",
"target_field" : "component",
"ignore_failure" : true
}
},
{
"rename" : {
"field" : "parsed_json.severity",
"target_field" : "severity",
"ignore_failure" : true
}
},
{
"rename" : {
"field" : "parsed_json.http_code",
"target_field" : "http_code",
"ignore_failure" : true
}
},
{
"rename" : {
"field" : "parsed_json.operation",
"target_field" : "operation",
"ignore_failure" : true
}
},
{
"rename" : {
"field" : "parsed_json.desc",
"target_field" : "description",
"ignore_failure" : true
}
}
]
},
"kea_dhcp_pipeline" : {
"description" : "Parse logs from kea-dhcp4 to extract key fields",
"processors" : [
{
"grok" : {
"field" : "message",
"patterns" : [
"%{TIMESTAMP_ISO8601:timestamp} %{LOGLEVEL:level} \\[%{DATA:service}/%{NUMBER:pid}\\.%{DATA:thread_id}\\] %{DATA:event_type} \\[hwtype=%{NUMBER:hw_type} %{MAC:mac_address}\\](?:, cid=\\[%{DATA:cid}\\])?, tid=%{DATA:transaction_id}: (?:lease %{IP:ip_address} %{GREEDYDATA:event_details})?"
],
"ignore_failure" : true
}
},
{
"set" : {
"field" : "service",
"value" : "kea-dhcp4",
"ignore_failure" : true
}
}
]
},
"kea_dhcp_filebeat_pipeline" : {
"description" : "Parse Kea DHCP logs from Filebeat",
"processors" : [
{
"grok" : {
"field" : "message",
"patterns" : [
"%{TIMESTAMP_ISO8601:timestamp} %{LOGLEVEL:level} \\[%{DATA:service}/%{NUMBER:pid}\\.%{DATA:thread_id}\\] %{DATA:event_type} \\[hwtype=%{NUMBER:hw_type} %{MAC:mac_address}\\](?:, cid=\\[%{DATA:cid}\\])?, tid=%{DATA:transaction_id}: (?:lease %{IP:ip_address} %{GREEDYDATA:event_details})?"
],
"ignore_failure" : true
}
},
{
"set" : {
"field" : "service",
"value" : "kea-dhcp4",
"ignore_failure" : true
}
},
{
"date" : {
"field" : "timestamp",
"formats" : [
"yyyy-MM-dd HH:mm:ss.SSS"
],
"target_field" : "@timestamp",
"ignore_failure" : true
}
}
]
},
"filter_ogboot_pipeline" : {
"description" : "Parse logs to extract http_code and desc, while preserving original message",
"processors" : [
{
"script" : {
"if" : "ctx.syslog?.identifier != 'ogboot'",
"source" : "\n ctx.debug = 'Skipped: identifier is ' + (ctx.syslog?.identifier ?: 'undefined');\n ctx.pipeline_stop = true; // Stops further processing but retains the document\n "
}
},
{
"set" : {
"field" : "debug",
"value" : "Processed: identifier is ogboot"
}
},
{
"script" : {
"source" : "\n ctx.processed_message = ctx.message;\n "
}
},
{
"gsub" : {
"field" : "processed_message",
"pattern" : "^app\\.[A-Z]+: ",
"replacement" : "",
"ignore_failure" : true
}
},
{
"gsub" : {
"field" : "processed_message",
"pattern" : "^request\\.INFO: Matched route \".*?\"\\. ",
"replacement" : "",
"ignore_failure" : true
}
},
{
"json" : {
"field" : "processed_message",
"target_field" : "parsed_message",
"ignore_failure" : true
}
},
{
"set" : {
"field" : "route",
"value" : "{{parsed_message.route}}",
"ignore_empty_value" : true,
"if" : "ctx.parsed_message?.route != null"
}
},
{
"set" : {
"field" : "route_parameters",
"value" : "{{parsed_message.route_parameters}}",
"ignore_empty_value" : true,
"if" : "ctx.parsed_message?.route_parameters != null"
}
},
{
"set" : {
"field" : "request_uri",
"value" : "{{parsed_message.request_uri}}",
"ignore_empty_value" : true,
"if" : "ctx.parsed_message?.request_uri != null"
}
},
{
"set" : {
"field" : "method",
"value" : "{{parsed_message.method}}",
"ignore_empty_value" : true,
"if" : "ctx.parsed_message?.method != null"
}
},
{
"set" : {
"field" : "http_code",
"value" : "{{parsed_message.http_code}}",
"ignore_empty_value" : true
}
},
{
"set" : {
"field" : "description",
"value" : "{{parsed_message.desc}}",
"ignore_empty_value" : true
}
}
]
},
"ogrepo_pipeline" : {
"description" : "Pipeline to parse ogRepo logs",
"processors" : [
{
"set" : {
"field" : "debug_message",
"value" : "{{message}}"
}
},
{
"script" : {
"source" : "\n if (ctx.message != null) {\n ctx.message = ctx.message.replace(\"'\", \"\\\"\")\n }\n "
}
},
{
"json" : {
"field" : "message",
"target_field" : "parsed_json",
"ignore_failure" : true
}
},
{
"remove" : {
"field" : "message",
"ignore_failure" : true
}
}
]
},
"parse_nginx_logs" : {
"description" : "Parse logs from Nginx in the 'main' log format with debug information",
"processors" : [
{
"set" : {
"field" : "debug",
"value" : "Entered parse_nginx_logs pipeline",
"ignore_failure" : true
}
},
{
"gsub" : {
"field" : "log_details",
"pattern" : "^\\s+",
"replacement" : "",
"ignore_failure" : true
}
},
{
"grok" : {
"field" : "log_details",
"patterns" : [
"%{IP:client_ip} %{GREEDYDATA:rest}"
],
"ignore_failure" : true
}
},
{
"grok" : {
"field" : "rest",
"patterns" : [
"- %{DATA:remote_user} \\[%{HTTPDATE:timestamp}\\] %{GREEDYDATA:rest_after_timestamp}"
],
"ignore_failure" : true
}
},
{
"grok" : {
"field" : "rest_after_timestamp",
"patterns" : [
"\"%{WORD:method} %{DATA:request_path} HTTP/%{NUMBER:http_version}\" %{NUMBER:status} %{NUMBER:body_bytes} %{GREEDYDATA:rest_referer}"
],
"ignore_failure" : true
}
},
{
"grok" : {
"field" : "rest_referer",
"patterns" : [
"\"%{DATA:referer}\" \"%{GREEDYDATA:nginx_user_agent}\""
],
"ignore_failure" : true
}
},
{
"date" : {
"field" : "timestamp",
"formats" : [
"dd/MMM/yyyy:HH:mm:ss Z"
],
"target_field" : "@timestamp",
"ignore_failure" : true
}
},
{
"remove" : {
"field" : [
"rest"
],
"ignore_missing" : true
}
}
]
},
"kea_dhcp_parse_pipeline" : {
"description" : "Parse Kea DHCP logs for detailed information",
"processors" : [
{
"grok" : {
"field" : "message",
"patterns" : [
"%{TIMESTAMP_ISO8601:timestamp} +%{LOGLEVEL:log_level} \\[%{DATA:source}/%{NUMBER:pid}.%{NUMBER:thread_id}\\] %{WORD:message_id} \\[%{DATA:hwtype}\\], cid=%{DATA:cid}, tid=%{DATA:tid}: lease %{IP:lease} has been allocated for %{NUMBER:lease_duration} seconds"
],
"ignore_failure" : true
}
},
{
"set" : {
"field" : "service",
"value" : "kea-dhcp4",
"ignore_failure" : true
}
}
]
},
"json_parse_with_replacement" : {
"description" : "Replace single quotes with double quotes and parse JSON",
"processors" : [
{
"script" : {
"source" : "\n ctx.message = ctx.message.replace(\"'\", \"\\\"\");\n "
}
},
{
"json" : {
"field" : "message",
"target_field" : "parsed_json"
}
}
]
},
"tftp_parse_pipeline" : {
"description" : "Parse logs from in.tftpd to extract filename and client IP",
"processors" : [
{
"grok" : {
"field" : "message",
"patterns" : [
"RRQ from %{HOSTNAME:client_ip} filename %{GREEDYDATA:filename}"
],
"ignore_failure" : true
}
},
{
"set" : {
"field" : "service",
"value" : "tftpd",
"ignore_failure" : true
}
}
]
},
"filter_tftp_pipeline" : {
"description" : "Parse logs from in.tftpd to extract filename and client IP",
"processors" : [
{
"grok" : {
"field" : "message",
"patterns" : [
"RRQ from %{HOSTNAME:client_ip} filename %{GREEDYDATA:filename}"
],
"ignore_failure" : true
}
},
{
"set" : {
"field" : "service_name",
"value" : "tftpd",
"ignore_failure" : true
}
}
]
},
"copy-message-pipeline" : {
"description" : "Pipeline que copia el campo message a message_raw",
"processors" : [
{
"set" : {
"field" : "message_raw",
"value" : "{{message}}"
}
}
]
}
}

View File

@ -1 +1,2 @@
# You must remove unused comment lines for the released package.
patch001

View File

@ -18,7 +18,7 @@ API_TOKEN=$(cat "$TOKEN_FILE")
if [ -f "$RESOURCE_DIR/datasources/datasources.json" ]; then
echo "Importando datasources..."
jq -c '.[]' "$RESOURCE_DIR/datasources/datasources.json" | while read -r datasource; do
curl -s -X POST "$GRAFANA_URL/api/datasources" \
curl -k -X POST "$GRAFANA_URL/api/datasources" \
-H "Authorization: Bearer $API_TOKEN" \
-H "Content-Type: application/json" \
-d "$datasource" > /dev/null
@ -32,7 +32,7 @@ for f in "$RESOURCE_DIR/dashboards"/*.json; do
echo "Importando $(basename "$f")"
jq 'del(.dashboard.id) | {dashboard: .dashboard, overwrite: true}' "$f" | \
curl -s -X POST "$GRAFANA_URL/api/dashboards/db" \
curl -k -X POST "$GRAFANA_URL/api/dashboards/db" \
-H "Authorization: Bearer $API_TOKEN" \
-H "Content-Type: application/json" \
--data-binary @-

View File

@ -1,10 +1,11 @@
#!/bin/bash
set -x
# Variables
#GRAFANA_URL="https://oglog-graf.mytld:3000"
GRAFANA_URL=$1
GRAFANA_USER="admin"
GRAFANA_PASS="admin"
GRAFANA_PASS="qindel"
#CERT="/etc/grafana/oglog-graf.mytld.crt.pem"
CERT=$2
#KEY="/etc/grafana/oglog-graf.mytld.key.pem"
@ -15,14 +16,14 @@ TOKEN_TTL=3600
TOKEN_FILE="./grafana_token.txt"
# 1. Buscar si ya existe el Service Account
SA_ID=$(curl -s --cert "$CERT" --key "$KEY" -u $GRAFANA_USER:$GRAFANA_PASS \
SA_ID=$(curl -k -u $GRAFANA_USER:$GRAFANA_PASS \
"$GRAFANA_URL/api/serviceaccounts/search" \
| jq -r '.serviceAccounts[] | select(.name=="'"$SA_NAME"'") | .id')
# 2. Crear si no existe
if [ -z "$SA_ID" ]; then
echo "Creando Service Account: $SA_NAME"
SA_ID=$(curl -s --cert "$CERT" --key "$KEY" -u $GRAFANA_USER:$GRAFANA_PASS \
SA_ID=$(curl -k -u $GRAFANA_USER:$GRAFANA_PASS \
-H "Content-Type: application/json" \
-d '{ "name": "'"$SA_NAME"'", "role": "Admin" }' \
"$GRAFANA_URL/api/serviceaccounts" | jq -r '.id')
@ -38,18 +39,18 @@ if [ -f "$TOKEN_FILE" ]; then
else
echo "Buscando token existente con nombre $TOKEN_NAME..."
TOKEN_ID=$(curl -s --cert "$CERT" --key "$KEY" -u $GRAFANA_USER:$GRAFANA_PASS \
TOKEN_ID=$(curl -k -u $GRAFANA_USER:$GRAFANA_PASS \
"$GRAFANA_URL/api/serviceaccounts/$SA_ID/tokens" \
| jq -r '.[] | select(.name=="'"$TOKEN_NAME"'") | .id')
if [ -n "$TOKEN_ID" ]; then
echo "Borrando token anterior con ID $TOKEN_ID..."
curl -s -X DELETE --cert "$CERT" --key "$KEY" -u $GRAFANA_USER:$GRAFANA_PASS \
curl -k -X DELETE -u $GRAFANA_USER:$GRAFANA_PASS \
"$GRAFANA_URL/api/serviceaccounts/$SA_ID/tokens/$TOKEN_ID" > /dev/null
fi
echo "Creando nuevo token..."
TOKEN=$(curl -s --cert "$CERT" --key "$KEY" -u $GRAFANA_USER:$GRAFANA_PASS \
TOKEN=$(curl -k -u $GRAFANA_USER:$GRAFANA_PASS \
-H "Content-Type: application/json" \
-d '{ "name": "'"$TOKEN_NAME"'", "secondsToLive": '"$TOKEN_TTL"' }' \
"$GRAFANA_URL/api/serviceaccounts/$SA_ID/tokens" | jq -r '.key')