Skip to content

Commit 07d8f04

Browse files
update demo script
reduce event count to 5000 reduce sql filters to 5 reduce saved views to 5 remove sleep between the calls
1 parent 651f067 commit 07d8f04

File tree

1 file changed

+5
-29
lines changed

1 file changed

+5
-29
lines changed

resources/ingest_demo_data.sh

Lines changed: 5 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,8 @@ P_USERNAME=${P_USERNAME:-"admin"}
66
P_PASSWORD=${P_PASSWORD:-"admin"}
77
P_STREAM=${P_STREAM:-"demodata"}
88
ACTION=${ACTION:-"ingest"}
9-
TARGET_RECORDS=10000
10-
BATCH_SIZE=1000
9+
TARGET_RECORDS=5000
10+
BATCH_SIZE=500
1111

1212
# Pre-compute auth header
1313
AUTH_HEADER="Authorization: Basic $(echo -n "$P_USERNAME:$P_PASSWORD" | base64)"
@@ -150,8 +150,8 @@ init_ingest_data() {
150150
done
151151

152152
# Static arrays
153-
METHODS=("GET" "GET" "GET" "GET" "POST" "PUT")
154-
STATUS_CODES=(200 200 200 201 400 500)
153+
METHODS=("GET" "POST" "PUT" "DELETE" "PATCH" "HEAD")
154+
STATUS_CODES=(200 400 401 500 503)
155155
SERVICES=("frontend" "api" "auth" "cart" "payment")
156156
ENDPOINTS=("/products" "/cart" "/login" "/checkout" "/search")
157157
USER_AGENTS=("curl/7.88.1" "python-requests/2.32.3" "Mozilla/5.0")
@@ -176,7 +176,7 @@ generate_batch() {
176176
# Use modulo for cycling through pre-computed arrays
177177
idx=$((i % 100))
178178
method_idx=$((i % 6))
179-
status_idx=$((i % 6))
179+
status_idx=$((i % 5))
180180
service_idx=$((i % 5))
181181
endpoint_idx=$((i % 5))
182182
agent_idx=$((i % 3))
@@ -269,8 +269,6 @@ run_ingest() {
269269
echo "Failed to send batch"
270270
exit 1
271271
fi
272-
273-
sleep 0.1
274272
done
275273

276274
# Final statistics
@@ -292,11 +290,6 @@ create_sql_filters() {
292290
"service_health_summary|Service health metrics by severity|SELECT \"service.name\", severity_text, COUNT(*) as count FROM $P_STREAM GROUP BY \"service.name\", severity_text ORDER BY count DESC"
293291
"api_endpoint_performance|API endpoint request patterns|SELECT \"url.path\", COUNT(*) as request_count, \"service.name\" FROM $P_STREAM GROUP BY \"url.path\", \"service.name\" ORDER BY request_count DESC LIMIT 20"
294292
"authentication_failures|Monitor auth-related warnings and errors|SELECT * FROM $P_STREAM WHERE \"url.path\" LIKE '%login%' AND severity_text IN ('WARN', 'ERROR') ORDER BY time_unix_nano DESC LIMIT 100"
295-
"upstream_cluster_analysis|Request distribution across clusters|SELECT \"upstream.cluster\", COUNT(*) as request_count, \"service.name\" FROM $P_STREAM GROUP BY \"upstream.cluster\", \"service.name\" ORDER BY request_count DESC"
296-
"trace_analysis|Multi-span traces for distributed tracking|SELECT trace_id, COUNT(*) as span_count, \"service.name\" FROM $P_STREAM GROUP BY trace_id, \"service.name\" HAVING span_count > 1 ORDER BY span_count DESC LIMIT 10"
297-
"user_agent_distribution|Client types and user agent patterns|SELECT \"user_agent.original\", COUNT(*) as usage_count FROM $P_STREAM GROUP BY \"user_agent.original\" ORDER BY usage_count DESC LIMIT 15"
298-
"source_address_analysis|Request distribution by source IP|SELECT \"source.address\", COUNT(*) as request_count, COUNT(DISTINCT \"service.name\") as services_accessed FROM $P_STREAM GROUP BY \"source.address\" ORDER BY request_count DESC LIMIT 20"
299-
"severity_timeline|Severity trends over time|SELECT \"severity_text\", COUNT(*) as count, \"service.name\" FROM $P_STREAM GROUP BY \"severity_text\", \"service.name\" ORDER BY count DESC"
300293
)
301294

302295
sql_success_count=0
@@ -316,12 +309,9 @@ create_sql_filters() {
316309
else
317310
echo "Failed to create SQL filter: $name"
318311
fi
319-
320-
sleep 0.5
321312
done
322313

323314
echo "Created $sql_success_count SQL filters"
324-
sleep 3
325315
}
326316

327317
# Create saved filters
@@ -334,11 +324,6 @@ create_saved_filters() {
334324
"high_latency_requests|High response time requests|SELECT * FROM $P_STREAM WHERE body LIKE '%duration%' LIMIT 500|Ingestion Time,Data,service.name,url.path,upstream.cluster,body|service.name"
335325
"upstream_cluster_health|Upstream cluster performance|SELECT * FROM $P_STREAM WHERE upstream.cluster IS NOT NULL LIMIT 500|Ingestion Time,Data,upstream.cluster,service.name,severity_text,destination.address|upstream.cluster"
336326
"api_endpoint_monitoring|API endpoint usage patterns|SELECT * FROM $P_STREAM WHERE url.path IS NOT NULL LIMIT 500|Ingestion Time,Data,url.path,service.name,severity_text,source.address|url.path"
337-
"trace_correlation_view|Correlated traces for distributed tracking|SELECT * FROM $P_STREAM WHERE trace_id IS NOT NULL AND span_id IS NOT NULL LIMIT 500|Ingestion Time,Data,trace_id,span_id,service.name,url.path|trace_id"
338-
"user_agent_analysis|Client types and patterns|SELECT * FROM $P_STREAM WHERE user_agent.original IS NOT NULL LIMIT 500|Ingestion Time,Data,user_agent.original,source.address,url.path,service.name|user_agent.original"
339-
"network_monitoring|Network traffic and server interactions|SELECT * FROM $P_STREAM WHERE source.address IS NOT NULL LIMIT 500|Ingestion Time,Data,source.address,destination.address,service.name,severity_text,url.path|source.address"
340-
"service_overview|Comprehensive service activity view|SELECT * FROM $P_STREAM LIMIT 500|Ingestion Time,Data,service.name,url.path,source.address,destination.address,upstream.cluster|service.name"
341-
"recent_activity|Most recent system activity|SELECT * FROM $P_STREAM ORDER BY time_unix_nano DESC LIMIT 500|Ingestion Time,Data,service.name,severity_text,url.path,source.address|severity_text"
342327
)
343328

344329
saved_success_count=0
@@ -366,8 +351,6 @@ create_saved_filters() {
366351
else
367352
echo "Failed to create saved filter: $name"
368353
fi
369-
370-
sleep 0.5
371354
done
372355

373356
echo "Created $saved_success_count saved filters"
@@ -457,8 +440,6 @@ create_alerts() {
457440
echo "Failed to create Alert 3 (Trace ID null)"
458441
echo "Response: $response3"
459442
fi
460-
461-
sleep 1
462443
}
463444

464445
# Main alerts function
@@ -470,7 +451,6 @@ run_alerts() {
470451

471452
if [[ $? -eq 0 && -n "$target_id" ]]; then
472453
echo "Target creation successful, proceeding with alerts..."
473-
sleep 2
474454

475455
# Create alerts using the target ID
476456
create_alerts "$target_id"
@@ -834,7 +814,6 @@ run_dashboards() {
834814

835815
if [[ $? -eq 0 && -n "$dashboard_id" ]]; then
836816
echo "Dashboard creation successful, proceeding with tiles..."
837-
sleep 2
838817

839818
# Update dashboard with tiles
840819
update_dashboard "$dashboard_id"
@@ -891,13 +870,10 @@ main() {
891870
echo "Running all actions..."
892871
run_ingest
893872
echo "Waiting before creating filters..."
894-
sleep 5
895873
run_filters
896874
echo "Waiting before creating alerts..."
897-
sleep 5
898875
run_alerts
899876
echo "Waiting before creating dashboards..."
900-
sleep 5
901877
run_dashboards
902878
echo "All actions completed"
903879
;;

0 commit comments

Comments
 (0)