Introduction
Core Architectural Components
Step‑by‑Step Implementation
module "vpc" { source = "terraform-aws-modules/vpc/aws" version = "5.0" name = "marketplace-vpc" cidr = "10.0.0.0/16" azs = ["us-east-1a", "us-east-1b"] public_subnets = ["10.0.1.0/24", "10.0.2.0/24"] private_subnets = ["10.0.101.0/24", "10.0.102.0/24"] }
module "eks" { source = "terraform-aws-modules/eks/aws" version = "19.21.0" cluster_name = "marketplace-eks" cluster_version = "1.29" subnets = module.vpc.private_subnets vpc_id = module.vpc.vpc_id node_groups = { market-node-group = { desired_capacity = 3 max_capacity = 5 min_capacity = 1 instance_type = "t3.medium" } } }
<p>Run <code>terraform init && terraform apply</code> to spin up the infrastructure. The EKS context is automatically added to your <code>kubeconfig</code>.</p> <h3>Step 2 - Deploy the Service Mesh (Istio)</h3> <p>Install Istio via Helm:</p> bash helm repo add istio https://istio-release.storage.googleapis.com/charts helm repo update helm install istio-base istio/base -n istio-system --create-namespace helm install istiod istio/istiod -n istio-system --wait helm install istio-ingressgateway istio/gateway -n istio-system <p>Enable automatic sidecar injection for the <code>marketplace</code> namespace:</p> bash kubectl create namespace marketplace kubectl label namespace marketplace istio-injection=enabled <h3>Step 3 - Create the Catalog Service</h3> <p>Initialize a Node.js project:</p> bash mkdir catalog-service && cd catalog-service npm init -y npm install express mongoose kafkajs dotenv <p>File <code>src/app.js</code> contains the HTTP layer and Kafka producer:</p> ```javascript require('dotenv').config(); const express = require('express'); const mongoose = require('mongoose'); const { Kafka } = require('kafkajs'); ```const app = express(); app.use(express.json());
// MongoDB connection mongoose.connect(process.env.MONGO_URI, { useNewUrlParser: true, useUnifiedTopology: true }) .then(() => console.log('MongoDB connected')) .catch(err => console.error('MongoDB error:', err));
// Product schema const productSchema = new mongoose.Schema({ title: String, description: String, price: Number, category: String, createdAt: { type: Date, default: Date.now } }); const Product = mongoose.model('Product', productSchema);
// Kafka producer setup const kafka = new Kafka({ brokers: [process.env.KAFKA_BROKER] }); const producer = kafka.producer(); await producer.connect();
// Create product endpoint - publishes ProductCreated event app.post('/products', async (req, res) => { const product = await Product.create(req.body); await producer.send({ topic: 'product-events', messages: [{ key: product.id, value: JSON.stringify({ type: 'ProductCreated', data: product }) }] }); res.status(201).json(product); });
// Simple read endpoint app.get('/products/:id', async (req, res) => { const product = await Product.findById(req.params.id); if (!product) return res.sendStatus(404); res.json(product); });
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => console.log(Catalog Service listening on ${PORT}));
