Building High-Performance Microservices with Go and gRPC
Our REST microservices were slow. 100ms latency, JSON overhead, HTTP/1.1 limitations.
Migrated to gRPC with Go. Latency 100ms → 5ms, 10x throughput. Here’s how.
Table of Contents
Why gRPC?
REST Issues:
- Latency: 100ms
- JSON parsing overhead
- HTTP/1.1 (no multiplexing)
- No type safety
gRPC Benefits:
- Protocol Buffers (binary)
- HTTP/2 (multiplexing)
- Type-safe
- Streaming support
Protocol Buffers Definition
// user.proto
syntax = "proto3";
package user;
option go_package = "github.com/myapp/proto/user";
service UserService {
rpc GetUser(GetUserRequest) returns (User);
rpc ListUsers(ListUsersRequest) returns (stream User);
rpc CreateUser(CreateUserRequest) returns (User);
rpc UpdateUser(UpdateUserRequest) returns (User);
}
message User {
string id = 1;
string name = 2;
string email = 3;
int64 created_at = 4;
}
message GetUserRequest {
string id = 1;
}
message ListUsersRequest {
int32 page = 1;
int32 page_size = 2;
}
message CreateUserRequest {
string name = 1;
string email = 2;
}
message UpdateUserRequest {
string id = 1;
string name = 2;
string email = 3;
}
Server Implementation
package main
import (
"context"
"log"
"net"
"google.golang.org/grpc"
pb "github.com/myapp/proto/user"
)
type server struct {
pb.UnimplementedUserServiceServer
}
func (s *server) GetUser(ctx context.Context, req *pb.GetUserRequest) (*pb.User, error) {
// Get user from database
user := &pb.User{
Id: req.Id,
Name: "John Doe",
Email: "john@example.com",
CreatedAt: time.Now().Unix(),
}
return user, nil
}
func (s *server) ListUsers(req *pb.ListUsersRequest, stream pb.UserService_ListUsersServer) error {
// Stream users
users := getUsersFromDB(req.Page, req.PageSize)
for _, user := range users {
if err := stream.Send(user); err != nil {
return err
}
}
return nil
}
func (s *server) CreateUser(ctx context.Context, req *pb.CreateUserRequest) (*pb.User, error) {
// Create user in database
user := &pb.User{
Id: generateID(),
Name: req.Name,
Email: req.Email,
CreatedAt: time.Now().Unix(),
}
saveUserToDB(user)
return user, nil
}
func main() {
lis, err := net.Listen("tcp", ":50051")
if err != nil {
log.Fatalf("failed to listen: %v", err)
}
s := grpc.NewServer()
pb.RegisterUserServiceServer(s, &server{})
log.Printf("server listening at %v", lis.Addr())
if err := s.Serve(lis); err != nil {
log.Fatalf("failed to serve: %v", err)
}
}
Client Implementation
package main
import (
"context"
"log"
"time"
"google.golang.org/grpc"
pb "github.com/myapp/proto/user"
)
func main() {
// Connect to server
conn, err := grpc.Dial("localhost:50051", grpc.WithInsecure())
if err != nil {
log.Fatalf("did not connect: %v", err)
}
defer conn.Close()
client := pb.NewUserServiceClient(conn)
// Get user
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
defer cancel()
user, err := client.GetUser(ctx, &pb.GetUserRequest{Id: "123"})
if err != nil {
log.Fatalf("could not get user: %v", err)
}
log.Printf("User: %v", user)
// Stream users
stream, err := client.ListUsers(ctx, &pb.ListUsersRequest{
Page: 1,
PageSize: 10,
})
if err != nil {
log.Fatalf("could not list users: %v", err)
}
for {
user, err := stream.Recv()
if err == io.EOF {
break
}
if err != nil {
log.Fatalf("error receiving: %v", err)
}
log.Printf("User: %v", user)
}
}
Interceptors (Middleware)
// Logging interceptor
func loggingInterceptor(
ctx context.Context,
req interface{},
info *grpc.UnaryServerInfo,
handler grpc.UnaryHandler,
) (interface{}, error) {
start := time.Now()
// Call handler
resp, err := handler(ctx, req)
// Log
log.Printf(
"method=%s duration=%s error=%v",
info.FullMethod,
time.Since(start),
err,
)
return resp, err
}
// Authentication interceptor
func authInterceptor(
ctx context.Context,
req interface{},
info *grpc.UnaryServerInfo,
handler grpc.UnaryHandler,
) (interface{}, error) {
// Get metadata
md, ok := metadata.FromIncomingContext(ctx)
if !ok {
return nil, status.Error(codes.Unauthenticated, "missing metadata")
}
// Check token
tokens := md.Get("authorization")
if len(tokens) == 0 {
return nil, status.Error(codes.Unauthenticated, "missing token")
}
if !validateToken(tokens[0]) {
return nil, status.Error(codes.Unauthenticated, "invalid token")
}
return handler(ctx, req)
}
// Use interceptors
s := grpc.NewServer(
grpc.UnaryInterceptor(grpc.ChainUnaryInterceptor(
loggingInterceptor,
authInterceptor,
)),
)
Load Balancing
// Client-side load balancing
conn, err := grpc.Dial(
"dns:///user-service:50051",
grpc.WithInsecure(),
grpc.WithBalancerName("round_robin"),
)
Results
Performance:
| Metric | REST | gRPC | Improvement |
|---|---|---|---|
| Latency | 100ms | 5ms | 95% |
| Throughput | 1K req/s | 10K req/s | 10x |
| Payload size | 500B | 50B | 90% |
| CPU usage | 80% | 30% | 62% |
Developer Experience:
- Type safety: ✅
- Code generation: ✅
- Streaming: ✅
- Documentation: Auto-generated
Lessons Learned
- gRPC much faster: 95% latency reduction
- Protobuf efficient: 90% smaller payloads
- Streaming powerful: Real-time updates
- Type safety helps: Fewer bugs
- HTTP/2 matters: Multiplexing wins
Conclusion
gRPC transformed our microservices. Latency 100ms → 5ms, 10x throughput, 90% smaller payloads.
Key takeaways:
- Latency: 100ms → 5ms (-95%)
- Throughput: 10x improvement
- Payload size: -90%
- CPU usage: -62%
- Type-safe communication
Use gRPC for microservices. Performance matters.