Skip to content

Commit 4e9b67b

Browse files
feat: add api for preparing demo data
API GET /api/v1/demodata?action=ingest to ingest 10k records from the script GET /api/v1/demodata?action=filters to create saved sqls and saved filters from the script
1 parent c4533be commit 4e9b67b

File tree

9 files changed

+868
-4
lines changed

9 files changed

+868
-4
lines changed

resources/filters_demo_data.sh

Lines changed: 280 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,280 @@
1+
#!/usr/bin/env bash
2+
3+
# Configuration with validation
4+
P_URL=${P_URL:-"http://localhost:8000"}
5+
P_USERNAME=${P_USERNAME:-"admin"}
6+
P_PASSWORD=${P_PASSWORD:-"admin"}
7+
P_STREAM=${P_STREAM:-"demodata"}
8+
9+
# Silent mode handling
10+
SILENT=${SILENT:-false}
11+
for arg in "$@"; do
12+
case $arg in
13+
--silent)
14+
SILENT=true
15+
shift
16+
;;
17+
-h|--help)
18+
echo "Usage: $0 [--silent]"
19+
echo " --silent Run in silent mode"
20+
exit 0
21+
;;
22+
esac
23+
done
24+
25+
# Only show config if not silent
26+
if [[ "$SILENT" != "true" ]]; then
27+
echo "Configuration:"
28+
echo " URL: $P_URL"
29+
echo " Username: $P_USERNAME"
30+
echo " Stream: $P_STREAM"
31+
echo
32+
fi
33+
34+
# Pre-compute auth header
35+
AUTH_HEADER="Authorization: Basic $(echo -n "$P_USERNAME:$P_PASSWORD" | base64)"
36+
37+
# Common curl function with retry logic
38+
curl_with_retry() {
39+
local url="$1"
40+
local method="$2"
41+
local data="$3"
42+
local content_type="${4:-application/json}"
43+
local max_retries="${5:-3}"
44+
local base_timeout="${6:-15}"
45+
local retry_count=0
46+
47+
# Set timeout based on retry attempt: 10s, 20s, 30s
48+
local max_time=$((10 + (retry_count * 10)))
49+
local connect_timeout=5
50+
51+
# Create temp file if data is provided
52+
if [[ -n "$data" ]]; then
53+
temp_file=$(mktemp)
54+
if [[ $? -ne 0 ]]; then
55+
print_error "Failed to create temporary file"
56+
return 1
57+
fi
58+
echo "$data" > "$temp_file"
59+
fi
60+
61+
while [[ $retry_count -lt $max_retries ]]; do
62+
# Current timeout: 10s, 20s, 30s for attempts 1, 2, 3
63+
max_time=$((10 + (retry_count * 10)))
64+
65+
local curl_cmd="curl -s -w \"\n%{http_code}\" --max-time $max_time --connect-timeout $connect_timeout"
66+
67+
# Add headers
68+
curl_cmd+=" -H \"Content-Type: $content_type\""
69+
curl_cmd+=" -H \"$AUTH_HEADER\""
70+
71+
# Add stream header for ingestion requests
72+
if [[ "$url" == *"/ingest"* ]]; then
73+
curl_cmd+=" -H \"X-P-STREAM: $P_STREAM\""
74+
fi
75+
76+
# Add method and data
77+
if [[ "$method" == "POST" ]]; then
78+
curl_cmd+=" -X POST"
79+
if [[ -n "$temp_file" ]]; then
80+
curl_cmd+=" --data-binary \"@$temp_file\""
81+
elif [[ -n "$data" ]]; then
82+
curl_cmd+=" -d \"$data\""
83+
fi
84+
fi
85+
86+
# Add URL
87+
curl_cmd+=" \"$url\""
88+
89+
# Execute curl
90+
local response
91+
response=$(eval "$curl_cmd" 2>&1)
92+
local curl_exit_code=$?
93+
94+
# Check curl exit code
95+
if [[ $curl_exit_code -eq 0 ]]; then
96+
# Success - extract status code and return
97+
local status_code
98+
if [[ -n "$response" ]]; then
99+
status_code=$(echo "$response" | tail -n1)
100+
local response_body=$(echo "$response" | sed '$d')
101+
102+
# Clean up temp file
103+
[[ -n "$temp_file" ]] && rm -f "$temp_file"
104+
105+
if [[ "$status_code" == "200" || "$status_code" == "201" ]]; then
106+
return 0
107+
else
108+
print_error "HTTP $status_code: $response_body"
109+
return 1
110+
fi
111+
else
112+
print_error "No response from server"
113+
return 1
114+
fi
115+
elif [[ $curl_exit_code -eq 28 ]]; then
116+
# Timeout - retry immediately with next timeout level
117+
retry_count=$((retry_count + 1))
118+
119+
if [[ "$SILENT" != "true" && -n "$data" ]]; then
120+
echo "Timeout (${#data} chars) - retry $retry_count with $((10 + (retry_count * 10)))s timeout"
121+
elif [[ "$SILENT" != "true" ]]; then
122+
echo "Timeout - retry $retry_count with $((10 + (retry_count * 10)))s timeout"
123+
fi
124+
125+
# Brief pause before retry
126+
sleep 1
127+
else
128+
# Other error - break and report
129+
break
130+
fi
131+
done
132+
133+
# Clean up temp file on failure
134+
[[ -n "$temp_file" ]] && rm -f "$temp_file"
135+
136+
# Final error reporting
137+
print_error "curl failed with exit code $curl_exit_code after $retry_count retries"
138+
if [[ -n "$data" ]]; then
139+
print_error "Data size: ${#data} characters, Final timeout: ${max_time}s"
140+
fi
141+
[[ "$SILENT" != "true" ]] && print_error "Response: $response"
142+
143+
return 1
144+
}
145+
146+
# Colors
147+
RED='\033[0;31m'
148+
GREEN='\033[0;32m'
149+
BLUE='\033[0;34m'
150+
NC='\033[0m'
151+
152+
print_info() { [[ "$SILENT" != "true" ]] && echo -e "${BLUE}[INFO]${NC} $1"; }
153+
print_success() { [[ "$SILENT" != "true" ]] && echo -e "${GREEN}[SUCCESS]${NC} $1"; }
154+
print_error() { echo -e "${RED}[ERROR]${NC} $1" >&2; }
155+
156+
# Test connection before creating filters
157+
if [[ "$SILENT" != "true" ]]; then
158+
print_info "Testing connectivity..."
159+
if curl_with_retry "$P_URL" "GET" "" "text/html" 1 5; then
160+
print_info "Basic connectivity OK"
161+
else
162+
print_error "Cannot connect to $P_URL - check if server is running"
163+
exit 1
164+
fi
165+
fi
166+
167+
# Create comprehensive SQL filters (10 filters)
168+
create_sql_filters() {
169+
print_info "Creating 10 SQL filters..."
170+
171+
sql_filters=(
172+
"error_logs|Monitor all ERROR and FATAL severity events|SELECT * FROM $P_STREAM WHERE severity_text IN ('ERROR', 'FATAL') ORDER BY time_unix_nano DESC LIMIT 100"
173+
"high_response_time|Identify requests with extended response times|SELECT \"service.name\", \"url.path\", body FROM $P_STREAM WHERE body LIKE '%duration%' ORDER BY time_unix_nano DESC LIMIT 50"
174+
"service_health_summary|Service health metrics by severity|SELECT \"service.name\", severity_text, COUNT(*) as count FROM $P_STREAM GROUP BY \"service.name\", severity_text ORDER BY count DESC"
175+
"api_endpoint_performance|API endpoint request patterns|SELECT \"url.path\", COUNT(*) as request_count, \"service.name\" FROM $P_STREAM GROUP BY \"url.path\", \"service.name\" ORDER BY request_count DESC LIMIT 20"
176+
"authentication_failures|Monitor auth-related warnings and errors|SELECT * FROM $P_STREAM WHERE \"url.path\" LIKE '%login%' AND severity_text IN ('WARN', 'ERROR') ORDER BY time_unix_nano DESC LIMIT 100"
177+
"upstream_cluster_analysis|Request distribution across clusters|SELECT \"upstream.cluster\", COUNT(*) as request_count, \"service.name\" FROM $P_STREAM GROUP BY \"upstream.cluster\", \"service.name\" ORDER BY request_count DESC"
178+
"trace_analysis|Multi-span traces for distributed tracking|SELECT trace_id, COUNT(*) as span_count, \"service.name\" FROM $P_STREAM GROUP BY trace_id, \"service.name\" HAVING span_count > 1 ORDER BY span_count DESC LIMIT 10"
179+
"user_agent_distribution|Client types and user agent patterns|SELECT \"user_agent.original\", COUNT(*) as usage_count FROM $P_STREAM GROUP BY \"user_agent.original\" ORDER BY usage_count DESC LIMIT 15"
180+
"source_address_analysis|Request distribution by source IP|SELECT \"source.address\", COUNT(*) as request_count, COUNT(DISTINCT \"service.name\") as services_accessed FROM $P_STREAM GROUP BY \"source.address\" ORDER BY request_count DESC LIMIT 20"
181+
"severity_timeline|Severity trends over time|SELECT \"severity_text\", COUNT(*) as count, \"service.name\" FROM $P_STREAM GROUP BY \"severity_text\", \"service.name\" ORDER BY count DESC"
182+
)
183+
184+
sql_success_count=0
185+
filter_number=1
186+
187+
for filter_config in "${sql_filters[@]}"; do
188+
IFS='|' read -r name description query <<< "$filter_config"
189+
190+
[[ "$SILENT" != "true" ]] && echo "Creating SQL filter $filter_number/10: $name"
191+
192+
# Escape quotes for JSON
193+
escaped_query=$(echo "$query" | sed 's/"/\\"/g')
194+
escaped_desc=$(echo "$description" | sed 's/"/\\"/g')
195+
196+
json="{\"stream_name\":\"sql\",\"filter_name\":\"$name\",\"filter_description\":\"$escaped_desc\",\"query\":{\"filter_type\":\"sql\",\"filter_query\":\"$escaped_query\"},\"time_filter\":null}"
197+
198+
# Add timeout and better error handling
199+
if curl_with_retry "$P_URL/api/v1/filters" "POST" "$json" "application/json" 3 10; then
200+
[[ "$SILENT" != "true" ]] && echo "✓ SQL Filter: $name"
201+
sql_success_count=$((sql_success_count + 1))
202+
else
203+
[[ "$SILENT" != "true" ]] && echo "✗ Failed after retries: $name"
204+
fi
205+
206+
# Small delay between requests to avoid overwhelming server
207+
sleep 0.5
208+
filter_number=$((filter_number + 1))
209+
done
210+
211+
[[ "$SILENT" != "true" ]] && print_success "Created $sql_success_count/10 SQL filters"
212+
213+
# Wait a bit before creating saved filters
214+
[[ "$SILENT" != "true" ]] && echo "Waiting 3 seconds before creating saved filters..."
215+
sleep 3
216+
}
217+
218+
# Create comprehensive saved filters (10 filters)
219+
create_saved_filters() {
220+
print_info "Creating 10 saved filters..."
221+
222+
saved_filters=(
223+
"service_errors|Monitor service errors and failures|SELECT * FROM $P_STREAM WHERE severity_text IN ('ERROR', 'FATAL') LIMIT 500|Ingestion Time,Data,service.name,severity_text,url.path|service.name"
224+
"auth_security_events|Authentication and authorization monitoring|SELECT * FROM $P_STREAM WHERE url.path LIKE '%login%' AND severity_text IN ('WARN', 'ERROR', 'FATAL') LIMIT 500|Ingestion Time,Data,service.name,severity_text,source.address,user_agent.original|severity_text"
225+
"high_latency_requests|High response time requests|SELECT * FROM $P_STREAM WHERE body LIKE '%duration%' LIMIT 500|Ingestion Time,Data,service.name,url.path,upstream.cluster,body|service.name"
226+
"upstream_cluster_health|Upstream cluster performance|SELECT * FROM $P_STREAM WHERE upstream.cluster IS NOT NULL LIMIT 500|Ingestion Time,Data,upstream.cluster,service.name,severity_text,destination.address|upstream.cluster"
227+
"api_endpoint_monitoring|API endpoint usage patterns|SELECT * FROM $P_STREAM WHERE url.path IS NOT NULL LIMIT 500|Ingestion Time,Data,url.path,service.name,severity_text,source.address|url.path"
228+
"trace_correlation_view|Correlated traces for distributed tracking|SELECT * FROM $P_STREAM WHERE trace_id IS NOT NULL AND span_id IS NOT NULL LIMIT 500|Ingestion Time,Data,trace_id,span_id,service.name,url.path|trace_id"
229+
"user_agent_analysis|Client types and patterns|SELECT * FROM $P_STREAM WHERE user_agent.original IS NOT NULL LIMIT 500|Ingestion Time,Data,user_agent.original,source.address,url.path,service.name|user_agent.original"
230+
"network_monitoring|Network traffic and server interactions|SELECT * FROM $P_STREAM WHERE source.address IS NOT NULL LIMIT 500|Ingestion Time,Data,source.address,destination.address,service.name,severity_text,url.path|source.address"
231+
"service_overview|Comprehensive service activity view|SELECT * FROM $P_STREAM LIMIT 500|Ingestion Time,Data,service.name,url.path,source.address,destination.address,upstream.cluster|service.name"
232+
"recent_activity|Most recent system activity|SELECT * FROM $P_STREAM ORDER BY time_unix_nano DESC LIMIT 500|Ingestion Time,Data,service.name,severity_text,url.path,source.address|severity_text"
233+
)
234+
235+
saved_success_count=0
236+
filter_number=1
237+
238+
for filter_config in "${saved_filters[@]}"; do
239+
IFS='|' read -r name description query visible_columns group_by <<< "$filter_config"
240+
241+
[[ "$SILENT" != "true" ]] && echo "Creating saved filter $filter_number/10: $name"
242+
243+
# Escape quotes
244+
escaped_query=$(echo "$query" | sed 's/"/\\"/g')
245+
escaped_desc=$(echo "$description" | sed 's/"/\\"/g')
246+
247+
# Convert visible columns to JSON array
248+
IFS=',' read -ra col_array <<< "$visible_columns"
249+
visible_cols_json=""
250+
for i in "${!col_array[@]}"; do
251+
[[ $i -gt 0 ]] && visible_cols_json+=","
252+
visible_cols_json+="\"${col_array[$i]}\""
253+
done
254+
255+
json="{\"stream_name\":\"$P_STREAM\",\"filter_name\":\"$name\",\"filter_description\":\"$escaped_desc\",\"query\":{\"filter_type\":\"filter\",\"filter_query\":\"$escaped_query\"},\"time_filter\":null,\"tableConfig\":{\"visibleColumns\":[$visible_cols_json],\"pinnedColumns\":[]},\"groupBy\":\"$group_by\"}"
256+
257+
# Add timeout and better error handling for saved filters
258+
if curl_with_retry "$P_URL/api/v1/filters" "POST" "$json" "application/json" 3 10; then
259+
[[ "$SILENT" != "true" ]] && echo "✓ Saved Filter: $name"
260+
saved_success_count=$((saved_success_count + 1))
261+
else
262+
[[ "$SILENT" != "true" ]] && echo "✗ Failed after retries: $name"
263+
fi
264+
265+
# Small delay between requests
266+
sleep 0.5
267+
filter_number=$((filter_number + 1))
268+
done
269+
270+
[[ "$SILENT" != "true" ]] && print_success "Created $saved_success_count/10 saved filters"
271+
}
272+
273+
# Create all filters
274+
create_sql_filters
275+
create_saved_filters
276+
277+
print_success "Filter creation completed successfully!"
278+
279+
# Always exit with success if we get here
280+
exit 0

0 commit comments

Comments
 (0)