apiVersion: kafka.strimzi.io/v1beta2
kind: Kafka
metadata:
name: edge-kafka
spec:
kafka:
version: 3.5.1
replicas: 3
resources:
requests:
memory: 2Gi
cpu: "500m"
limits:
memory: 4Gi
cpu: "2"
config:
offsets.topic.replication.factor: 3
transaction.state.log.replication.factor: 3
transaction.state.log.min.isr: 2
default.replication.factor: 3
min.insync.replicas: 2
storage:
type: jbod
volumes:
- id: 0
type: persistent-claim
size: 100Gi
deleteClaim: false
apiVersion: flink.apache.org/v1beta1
kind: FlinkDeployment
metadata:
name: edge-processing
spec:
image: flink:1.17
flinkVersion: v1_17
flinkConfiguration:
taskmanager.numberOfTaskSlots: "2"
parallelism.default: "2"
serviceAccount: flink
jobManager:
resource:
memory: "2048m"
cpu: 1
taskManager:
resource:
memory: "2048m"
cpu: 1
job:
jarURI: local:///opt/flink/edge-processor.jar
parallelism: 2
upgradeMode: stateless
apiVersion: v1
kind: ConfigMap
metadata:
name: vector-config
data:
vector.toml: |
[sources.edge_metrics]
type = "prometheus_scrape"
endpoints = ["http://localhost:9090/metrics"]
scrape_interval_secs = 15
[transforms.edge_processing]
type = "remap"
inputs = ["edge_metrics"]
source = '''
. = parse_json!(.message)
.timestamp = parse_timestamp!(.timestamp, format: "%Y-%m-%d %H:%M:%S")
'''
[sinks.edge_storage]
type = "aws_s3"
inputs = ["edge_processing"]
bucket = "edge-metrics"
compression = "gzip"
encoding.codec = "json"
batch.timeout_secs = 300