-
Notifications
You must be signed in to change notification settings - Fork 3
Expand file tree
/
Copy pathrun.bash
More file actions
executable file
·210 lines (177 loc) · 8.69 KB
/
run.bash
File metadata and controls
executable file
·210 lines (177 loc) · 8.69 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
#!/usr/bin/env bash
set -exo pipefail
YAML_CONFIG_FILE="${1}"
[ -z "${YAML_CONFIG_FILE}" ] && echo "ARG 1 (Yaml Config File; org-receiver-config.yml) is not set" && exit 1
SCRIPT_DIR=$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)
cd $SCRIPT_DIR
# Function to check for duplicate JSON keys in the log file
# Usage: check_duplicate_keys <log_file>
check_duplicate_keys() {
local log_file="$1"
if [[ -z "$log_file" ]]; then
echo "ERROR: log_file parameter is required for check_duplicate_keys"
return 1
fi
if [[ ! -f "$log_file" ]]; then
echo "ERROR: log file $log_file does not exist"
return 0
fi
echo "Checking for duplicate JSON keys in log file..."
LOG_FILE="$log_file" python3 << 'EOF'
import sys
import re
import os
def detect_duplicate_keys_in_log(log_file):
"""Check for duplicate keys within each JSON object in log file"""
lines_with_duplicates = []
try:
with open(log_file, 'r') as f:
for line_num, line in enumerate(f, 1):
original_line = line.strip()
if not original_line:
continue
duplicates = detect_duplicates_in_line(original_line)
if duplicates:
lines_with_duplicates.append((line_num, original_line, duplicates))
except Exception as e:
print('Error reading log file: {}'.format(e), file=sys.stderr)
return False
if lines_with_duplicates:
print('\n❌ Duplicate JSON keys found within individual objects:', file=sys.stderr)
print('=' * 80, file=sys.stderr)
for line_num, original_line, dups in lines_with_duplicates:
print('📍 Line {} - Duplicate keys: {}'.format(line_num, ', '.join(sorted(dups))), file=sys.stderr)
print(' Object: {}'.format(original_line), file=sys.stderr)
print('', file=sys.stderr)
print('📊 Summary: {} lines contain duplicate keys'.format(len(lines_with_duplicates)), file=sys.stderr)
return False
else:
print('✅ No duplicate JSON keys found within individual objects.')
return True
def detect_duplicates_in_line(json_str):
"""Detect duplicate keys by parsing JSON structure and checking each object individually"""
try:
import json
def check_object_for_duplicates(obj, path="root"):
"""Check a single object for duplicate keys"""
if not isinstance(obj, dict):
return []
duplicates = []
seen_keys = set()
for key in obj.keys():
if key in seen_keys:
duplicates.append(key)
else:
seen_keys.add(key)
return duplicates
# Parse the JSON and check each object individually
parsed = json.loads(json_str)
all_duplicates = []
# Check root level
root_dups = check_object_for_duplicates(parsed, "root")
all_duplicates.extend(root_dups)
# Recursively check nested objects
def check_nested(obj, path="root"):
dups = []
if isinstance(obj, dict):
for key, value in obj.items():
current_path = "{}.{}".format(path, key)
if isinstance(value, dict):
nested_dups = check_object_for_duplicates(value, current_path)
dups.extend(nested_dups)
deeper_dups = check_nested(value, current_path)
dups.extend(deeper_dups)
elif isinstance(value, list):
for i, item in enumerate(value):
if isinstance(item, dict):
array_path = "{}[{}]".format(current_path, i)
array_dups = check_object_for_duplicates(item, array_path)
dups.extend(array_dups)
array_nested_dups = check_nested(item, array_path)
dups.extend(array_nested_dups)
return dups
nested_duplicates = check_nested(parsed)
all_duplicates.extend(nested_duplicates)
return list(set(all_duplicates))
except (json.JSONDecodeError, ValueError):
return []
if __name__ == '__main__':
log_file = os.environ.get('LOG_FILE')
if not log_file:
print("ERROR: LOG_FILE environment variable not set", file=sys.stderr)
sys.exit(1)
success = detect_duplicate_keys_in_log(log_file)
sys.exit(0 if success else 1)
EOF
return $?
}
# Cleanup function to check logs and clean up
cleanup() {
echo ""
echo "Performing cleanup and validation..."
# Kill the Go process if it's still running
if kill -0 $go_pid 2>/dev/null; then
echo "Stopping Go process (PID: $go_pid)..."
kill -SIGINT $go_pid 2>/dev/null || true
wait $go_pid 2>/dev/null || true
fi
# Check for duplicate keys in the log file
log_file="/tmp/$(basename ${YAML_CONFIG_FILE}).log"
check_duplicate_keys "$log_file"
# Log file location
if [ -f "$log_file" ]; then
echo "Log file saved at: $log_file"
echo "You can review it or remove it manually."
fi
}
# we use JQ here instead of pretty print in the logging.go so that we can ensure valid JSON is output from go
# Feed stdout through jq so we still validate JSON, but surface time/name/level/msg/attributes for readability (errors red, warnings yellow, debug grey; names get stable colors).
# Using -Rr to handle non-JSON lines gracefully (e.g., startup messages, errors)
LOG_LEVEL=${LOG_LEVEL:-dev} go run main.go -c ${YAML_CONFIG_FILE} 2>&1 \
| tee /tmp/$(basename ${YAML_CONFIG_FILE}).log \
| jq -Rr '
def palette: ["\u001b[32m", "\u001b[35m", "\u001b[36m", "\u001b[94m", "\u001b[95m", "\u001b[96m"];
def name_color($name):
palette as $palette |
(reduce ($name | tostring | explode[]) as $c (0; . + $c)) as $sum |
($palette | length) as $plen |
(if $plen > 0 then $palette[$sum % $plen] else "" end);
# Try to parse as JSON, if it fails, output the raw line
. as $raw |
try (fromjson |
if type == "object" then
(.level // .severity // "") as $level |
(.time // .ts // .timestamp // "") as $time |
(.msg // "") as $msg |
(.attributes // {}) as $attrs |
(if ($attrs | type) == "object" and ($attrs | has("name")) then ($attrs.name // "") else "" end) as $rawName |
(if ($rawName | tostring | length) > 0 then ($rawName | tostring) else "-" end) as $nameStr |
(if ($rawName | tostring | length) > 0 then name_color($rawName) else "\u001b[37m" end) as $nameColor |
($nameColor + $nameStr + "\u001b[0m") as $coloredName |
($level | tostring) as $levelStr |
($time | tostring) as $timeStr |
($msg | tostring) as $msgStr |
($attrs | tojson) as $attrsJson |
(if ($attrs | type) == "object" and ($attrs | has("workflowJobRunID")) then " workflowJobRunID=" + ($attrs.workflowJobRunID | tostring) else "" end) as $workflowJobRunIDStr |
(if has("error") then " error=" + (.error | tojson)
elif has("err") then " error=" + (.err | tojson)
else "" end) as $errorStr |
($levelStr | ascii_upcase) as $levelUpper |
(if $levelUpper == "ERROR" then
"\u001b[31mtime=" + $timeStr + " name=" + $coloredName + "\u001b[31m level=" + $levelStr + " msg=" + $msgStr + $workflowJobRunIDStr + $errorStr + " attributes=" + $attrsJson + "\u001b[0m"
elif ($levelUpper == "WARN" or $levelUpper == "WARNING") then
"\u001b[33mtime=" + $timeStr + " name=" + $coloredName + "\u001b[33m level=" + $levelStr + " msg=" + $msgStr + $workflowJobRunIDStr + $errorStr + " attributes=" + $attrsJson + "\u001b[0m"
elif $levelUpper == "DEBUG" then
"\u001b[90mtime=" + $timeStr + " name=" + $coloredName + "\u001b[90m level=" + $levelStr + " msg=" + $msgStr + $workflowJobRunIDStr + $errorStr + " attributes=" + $attrsJson + "\u001b[0m"
else
"\u001b[33mtime\u001b[0m=\u001b[37m" + $timeStr + "\u001b[0m name=" + $coloredName + " level=" + $levelStr + " msg=" + $msgStr + $workflowJobRunIDStr + $errorStr + " attributes=" + $attrsJson
end)
else
$raw
end
) catch $raw' &
go_pid=$!
# Set up trap to run cleanup on exit
trap cleanup EXIT
echo "Anklet is running... Press Ctrl+C to stop and check for duplicate keys."
wait $go_pid