Skip to content

Commit

Permalink
Deployed 013ac1f with MkDocs version: 1.6.0
Browse files Browse the repository at this point in the history
  • Loading branch information
Unknown committed May 14, 2024
0 parents commit ac24a21
Show file tree
Hide file tree
Showing 129 changed files with 44,225 additions and 0 deletions.
Empty file added .nojekyll
Empty file.
808 changes: 808 additions & 0 deletions 00-getting-started/about/index.html

Large diffs are not rendered by default.

727 changes: 727 additions & 0 deletions 01-how-to-guides/index.html

Large diffs are not rendered by default.

737 changes: 737 additions & 0 deletions 02-docs/index.html

Large diffs are not rendered by default.

23 changes: 23 additions & 0 deletions 03-tools/cloudflare/delete_records.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
#!/bin/bash

ZONE_ID="${ZONE_ID}"
AUTH_EMAIL="${AUTH_EMAIL}"
AUTH_KEY="${AUTH_KEY}"

# Get the DNS record IDs
record_ids=$(curl -s --request GET \
--url "https://api.cloudflare.com/client/v4/zones/$ZONE_ID/dns_records" \
--header "Content-Type: application/json" \
--header "X-Auth-Email: $AUTH_EMAIL" \
--header "X-Auth-Key: $AUTH_KEY" | jq -r '.result[].id')

# Loop through the IDs and fetch details for each DNS record
for id in $record_ids; do
curl -s --request DELETE \
--url "https://api.cloudflare.com/client/v4/zones/$ZONE_ID/dns_records/$id" \
--header "Content-Type: application/json" \
--header "X-Auth-Email: $AUTH_EMAIL" \
--header "X-Auth-Key: $AUTH_KEY"
echo "" # Add an empty line for separation between records
done

15 changes: 15 additions & 0 deletions 03-tools/elastic/cat_indices.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
#!/bin/bash
# Define the Elasticsearch credentials
ES_USER="${ES_USER}"
ES_PASS=$(gopass show -o personal/homeops/logging/elastic)

if [ -z "$ES_URL" ] || [ -z "$ES_USER" ] || [ -z "$ES_PASS" ]; then
echo "Please provide ES_URL, ES_USER, and ES_PASS environment variables."
exit 1
fi

# Make a curl request to Elasticsearch and process the output
curl_output=$(curl -sSL -u "$ES_USER":"$ES_PASS" "$ES_URL/_cat/indices/?v&s=index&bytes=b")
indices=$(echo "$curl_output" )

echo "$indices"
19 changes: 19 additions & 0 deletions 03-tools/elastic/datastream_total_store_size.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#!/bin/bash

# Define Elasticsearch credentials
ES_USER="${ES_USER}"
ES_PASS=$(gopass show -o personal/homeops/logging/elastic)

# Define Elasticsearch URL
ES_URL="${ES_URL}"

# Fetch the data_stream_stats using _data_stream/_stats
DATA_STREAM_STATS=$(curl -sSL -u "$ES_USER:$ES_PASS" "$ES_URL/_data_stream/_stats")

# Extract the relevant information from the response
STORE_SIZE_BYTES=$(echo "$DATA_STREAM_STATS" | jq -r '.total_store_size_bytes')
TIMESTAMP=$(date -u +"%Y-%m-%dT%H:%M:%S.000Z")
MESSAGE='{"@timestamp":"'"$TIMESTAMP"'","message":'"$DATA_STREAM_STATS"'}'

# Ingest the JSON message into the index
curl -sSL -u "$ES_USER:$ES_PASS" -X POST "$ES_URL/logs-data-stream-test/_doc/" -H 'Content-Type: application/json' -d "$MESSAGE"
11 changes: 11 additions & 0 deletions 03-tools/elastic/datastreams.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
logs-k3s_prod-cert-manager
logs-k3s_prod-datahub
logs-k3s_prod-dev
logs-k3s_prod-flux-system
logs-k3s_prod-kube-system
logs-k3s_prod-kyverno
logs-k3s_prod-longhorn
logs-k3s_prod-networking
logs-k3s_prod-tekton-chains
logs-k3s_prod-tekton-pipelines
logs-k3s_prod-tekton-pipelines-resolvers
43 changes: 43 additions & 0 deletions 03-tools/elastic/es_kg_sync.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
#!/bin/bash

# Define Grafana API URL and API Key
GRAFANA_URL="https://grafana.oscaromeu.io"
API_KEY="glsa_35731F5zTsra7FmtyTQOEtR9I89eD5xg_45a98d02"

# Define the Elasticsearch data source configuration
DATASOURCE_NAME="Test"
DATASOURCE_TYPE="elasticsearch"
DATASOURCE_URL="http://elasticsearch.oscaromeu.io:9200" # Elasticsearch endpoint URL
DATASOURCE_ACCESS="proxy" # Change to "direct" if needed
DATASOURCE_BASIC_AUTH="false" # Set to "true" if your Elasticsearch requires basic auth
DATASOURCE_USER="elastic" # Elasticsearch username if basic auth is enabled
DATASOURCE_PASSWORD="$(op item get elastic-api --vault home-ops --fields label=password)" # Elasticsearch password if basic auth is enabled

# Create the JSON payload for the data source
DATA_SOURCE_JSON=$(cat <<EOF
{
"name": "$DATASOURCE_NAME",
"type": "$DATASOURCE_TYPE",
"url": "$DATASOURCE_URL",
"access": "$DATASOURCE_ACCESS",
"basicAuth": $DATASOURCE_BASIC_AUTH,
"basicAuthUser": "$DATASOURCE_USER",
"basicAuthPassword": "$DATASOURCE_PASSWORD"
}
EOF
)

# Create the data source in Grafana using the API
RESPONSE=$(curl -s -X POST -H "Authorization: Bearer $API_KEY" \
-H "Content-Type: application/json" \
-H "CF-Access-Client-Id: 459e68d0c7fb875509b31d104566f142.access" \
-H "CF-Access-Client-Secret: dc7183c7d473d534dbd92e5a39e5695717a032dd02036a36d88383e298b4a0e1" \
--data "$DATA_SOURCE_JSON" \
"$GRAFANA_URL/api/datasources")

# Check the response for success or failure
if [[ "$RESPONSE" == *"datasource created"* ]]; then
echo "Elasticsearch data source created successfully."
else
echo "Error creating Elasticsearch data source: $RESPONSE"
fi
16 changes: 16 additions & 0 deletions 03-tools/elastic/get_datastreams.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
#!/bin/bash
# Define the Elasticsearch credentials

ES_USER="${ES_USER}"
ES_PASS=$(gopass show -o personal/homeops/logging/elastic)

if [ -z "$ES_URL" ] || [ -z "$ES_USER" ] || [ -z "$ES_PASS" ]; then
echo "Please provide ES_URL, ES_USER, and ES_PASS environment variables."
exit 1
fi

# Make a curl request to Elasticsearch and process the output
curl_output=$(curl -sSL -u "$ES_USER":"$ES_PASS" "$ES_URL/_data_stream" | jq -r '.[][].name')
data_streams=$(echo "$curl_output")

echo "$data_streams"
32 changes: 32 additions & 0 deletions 03-tools/elastic/get_ilm_policy.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
#!/bin/bash

# Define the Elasticsearch URL
ES_URL="${ES_URL}"

# Define the Elasticsearch credentials
ES_USER="${ES_USER}"
ES_PASS=$(gopass show -o personal/homeops/logging/elastic)

# Get the ILM policy name from the command-line argument
POLICY_NAME="$1"

# Check if an ILM policy name was provided
if [ -z "$POLICY_NAME" ]; then
echo "Usage: $0 <ilm_policy_name>"
exit 1
fi

# Make a GET request to the specific ILM policy endpoint and save the JSON response
POLICY_JSON=$(curl -sSL -u "$ES_USER:$ES_PASS" "$ES_URL/_ilm/policy/$POLICY_NAME")

# Extract and format the desired portion of the JSON response using jq
FORMATTED_POLICY_JSON=$(echo "$POLICY_JSON" | jq '.[].policy | { _meta, phases }')

# Create a JSON file with the formatted policy content
SAVE_DIR="ilm_policy"
JSON_FILE="$SAVE_DIR/$POLICY_NAME.json"

# Add the "policy" object to the beginning of the formatted JSON content
echo "{\"policy\": $FORMATTED_POLICY_JSON}" | jq '.' > "$JSON_FILE"

echo "ILM policy JSON saved to $JSON_FILE"
19 changes: 19 additions & 0 deletions 03-tools/elastic/get_ilm_policy_list.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#!/bin/bash

# Define the Elasticsearch URL
ES_URL="${ES_URL}"

# Define the Elasticsearch credentials
ES_USER="${ES_USER}"
ES_PASS=$(gopass show -o personal/homeops/logging/elastic)

# Make a GET request to the ILM policy endpoint and extract policy names without "managed" field using jq
POLICY_NAMES=$(curl -sSL -u "$ES_USER:$ES_PASS" "$ES_URL/_ilm/policy" | jq -r '. | to_entries[] | select(.value.policy._meta.managed == null) | .key')

# Iterate through the policy names
for POLICY_NAME in $POLICY_NAMES; do
echo "Processing policy: $POLICY_NAME"

# Perform any desired action with each policy name here
# For example, you could call another script or perform some operation
done
45 changes: 45 additions & 0 deletions 03-tools/elastic/ilm_policy/logs-k3s.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
{
"policy": {
"_meta": {
"project": {
"name": "homeops",
"modified_date": "2023-08-28T16:00:00Z",
"version": 2
}
},
"phases": {
"delete": {
"min_age": "20d",
"actions": {
"delete": {
"delete_searchable_snapshot": true
}
}
},
"warm": {
"min_age": "10d",
"actions": {
"set_priority": {
"priority": 50
}
}
},
"hot": {
"min_age": "0ms",
"actions": {
"rollover": {
"max_age": "5d",
"min_primary_shard_size": "10gb",
"max_primary_shard_size": "10gb"
},
"set_priority": {
"priority": 100
},
"shrink": {
"number_of_shards": 1
}
}
}
}
}
}
Loading

0 comments on commit ac24a21

Please sign in to comment.