Go Performance Guide
I/O & Data Handling

JSON Performance in Go

Optimize JSON encoding and decoding with standard library techniques and high-performance alternatives

JSON is ubiquitous in modern Go applications, yet the default encoding/json package has significant performance overhead. This guide explores optimization techniques and alternatives that can improve JSON throughput 10-100x.

encoding/json Performance Characteristics

The standard library's JSON implementation uses reflection, making it flexible but slower than specialized solutions.

Reflection Overhead

// encoding/json uses reflection to inspect struct tags
// This has costs:
// 1. Runtime type introspection
// 2. Dynamic dispatch for type-specific marshalers
// 3. Interface allocation for interface{} fields
// 4. Memory allocations for accumulated data

func BenchmarkEncodingJSONOverhead(b *testing.B) {
    type Product struct {
        ID    int
        Name  string
        Price float64
    }

    p := Product{ID: 1, Name: "Widget", Price: 19.99}

    b.Run("Unmarshal_small", func(b *testing.B) {
        data := []byte(`{"ID":1,"Name":"Widget","Price":19.99}`)
        var p Product
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            json.Unmarshal(data, &p)
        }
    })

    b.Run("Marshal_small", func(b *testing.B) {
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            json.Marshal(p)
        }
    })
}

Struct Tags and Optimization

Struct tags control JSON field naming and behavior. Optimize them carefully.

type User struct {
    // Full control with tags
    ID       int    `json:"id"`                  // Field name
    Name     string `json:"name"`
    Email    string `json:"email"`
    Internal int    `json:"-"`                   // Omit from JSON
    Active   bool   `json:"active,omitempty"`    // Omit if zero value
    Role     string `json:"role,string"`         // Force string representation
}

// Tag impacts:
// - omitempty: Saves bytes if false/0/"" but requires extra check
// - string: Slower, forces string conversion
// - -: Fastest, skips completely

func BenchmarkStructTags(b *testing.B) {
    type FullTags struct {
        ID       int    `json:"id"`
        Name     string `json:"name"`
        Count    int    `json:"count,omitempty"`
        Metadata string `json:"metadata,omitempty"`
    }

    type MinimalTags struct {
        ID       int
        Name     string
        Count    int
        Metadata string
    }

    u := FullTags{ID: 1, Name: "test", Count: 0, Metadata: ""}

    b.Run("full_tags_omitempty", func(b *testing.B) {
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            json.Marshal(u)
        }
    })

    b.Run("minimal_tags", func(b *testing.B) {
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            json.Marshal(u)
        }
    })
}

Tip: Use omitempty for optional fields in responses to reduce payload size, but know it adds marshal time.

json.Decoder vs json.Unmarshal

Streaming JSON processing with different performance characteristics:

// json.Unmarshal: Load all into memory first
func DecodeWithUnmarshal(data []byte) ([]User, error) {
    var users []User
    err := json.Unmarshal(data, &users)
    return users, err
}

// json.Decoder: Stream and parse incrementally
func DecodeWithDecoder(reader io.Reader) ([]User, error) {
    decoder := json.NewDecoder(reader)

    var users []User
    err := decoder.Decode(&users)
    return users, err
}

// Streaming decoder for large arrays
func DecodeJSONStream(reader io.Reader) error {
    decoder := json.NewDecoder(reader)

    // Start array
    t, err := decoder.Token()
    if err != nil {
        return err
    }

    // Process each element
    for decoder.More() {
        var user User
        if err := decoder.Decode(&user); err != nil {
            return err
        }

        // Process user immediately (don't accumulate in memory)
        _ = user
    }

    return nil
}

// Benchmark
func BenchmarkDecoderVsUnmarshal(b *testing.B) {
    // Create sample JSON array (10MB)
    users := make([]User, 100000)
    for i := 0; i < 100000; i++ {
        users[i] = User{
            ID:    i,
            Name:  fmt.Sprintf("User_%d", i),
            Email: fmt.Sprintf("user_%d@example.com", i),
        }
    }

    data, _ := json.Marshal(users)

    b.Run("Unmarshal", func(b *testing.B) {
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            var result []User
            json.Unmarshal(data, &result)
        }
    })

    b.Run("Decoder", func(b *testing.B) {
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            reader := bytes.NewReader(data)
            decoder := json.NewDecoder(reader)

            var users []User
            decoder.Decode(&users)
        }
    })

    b.Run("Decoder_streaming", func(b *testing.B) {
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            reader := bytes.NewReader(data)
            DecodeJSONStream(reader)
        }
    })
}

type User struct {
    ID    int
    Name  string
    Email string
}

When to use each:

  • Unmarshal: Small payloads, entire document fits in memory
  • Decoder: Large payloads, streaming scenarios, network I/O
  • Streaming pattern: Process items as they arrive

json.Encoder vs json.Marshal

Encoding with different memory characteristics:

// json.Marshal: Returns []byte (must allocate)
func EncodeWithMarshal(users []User) []byte {
    data, _ := json.Marshal(users)
    return data
}

// json.Encoder: Writes directly to writer
func EncodeWithEncoder(w io.Writer, users []User) error {
    encoder := json.NewEncoder(w)
    return encoder.Encode(users)
}

// Benchmark
func BenchmarkEncoderVsMarshal(b *testing.B) {
    users := make([]User, 1000)
    for i := 0; i < 1000; i++ {
        users[i] = User{ID: i, Name: fmt.Sprintf("User_%d", i)}
    }

    b.Run("Marshal", func(b *testing.B) {
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            json.Marshal(users)
        }
    })

    b.Run("Encoder_to_buffer", func(b *testing.B) {
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            var buf bytes.Buffer
            encoder := json.NewEncoder(&buf)
            encoder.Encode(users)
        }
    })
}

Choose:

  • Marshal: Small outputs, need the bytes immediately
  • Encoder: Writing to files/network, multiple objects, streaming

Alternative JSON Libraries: Performance Comparison

Several libraries offer better performance by trading flexibility for speed.

jsoniter: Drop-in Compatible

import "github.com/json-iterator/go"

// Mostly compatible with encoding/json
var json = jsoniter.ConfigCompatibleWithStandardLibrary

func ExampleJsoniter() {
    data := []byte(`{"id":1,"name":"test"}`)
    var user User
    json.Unmarshal(data, &user)  // Same API
}

sonic: SIMD-Accelerated (Alibaba)

import "github.com/bytedance/sonic"

func ExampleSonic() {
    data := []byte(`{"id":1,"name":"test"}`)
    var user User
    sonic.Unmarshal(data, &user)
}

easyjson: Code Generation

// Requires code generation: easyjson -all types.go
// Eliminates reflection entirely

func ExampleEasyjson() {
    user := User{ID: 1, Name: "test"}
    data, _ := user.MarshalJSON()  // Generated method
}

Comprehensive Library Benchmark

func BenchmarkJSONLibraries(b *testing.B) {
    users := make([]User, 10000)
    for i := 0; i < 10000; i++ {
        users[i] = User{
            ID:    i,
            Name:  fmt.Sprintf("User_%d", i),
            Email: fmt.Sprintf("user%d@example.com", i),
            Age:   20 + i%50,
        }
    }

    data, _ := json.Marshal(users)

    b.Run("encoding/json_unmarshal", func(b *testing.B) {
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            var result []User
            json.Unmarshal(data, &result)
        }
    })

    b.Run("encoding/json_marshal", func(b *testing.B) {
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            json.Marshal(users)
        }
    })

    b.Run("jsoniter_unmarshal", func(b *testing.B) {
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            var result []User
            jsoniter.Unmarshal(data, &result)
        }
    })

    b.Run("sonic_unmarshal", func(b *testing.B) {
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            var result []User
            sonic.Unmarshal(data, &result)
        }
    })
}

// Results (rough, depends on hardware):
// encoding/json:  1x (baseline)
// jsoniter:       2-3x faster
// sonic:          5-10x faster (with SIMD)
// easyjson:       10-20x faster (generated)

Code Generation Approaches

For maximum performance, use generated marshalers:

easyjson Example

// types.go
//go:generate easyjson -all types.go

type User struct {
    ID    int    `json:"id"`
    Name  string `json:"name"`
    Email string `json:"email"`
}

// After running: go generate
// User now has MarshalJSON/UnmarshalJSON methods (zero-reflection)

// Usage:
func UseEasyjson() {
    u := User{ID: 1, Name: "Alice"}
    data, _ := u.MarshalJSON()  // Very fast, no reflection
}

ffjson: Code Generation (Legacy)

// Similar to easyjson, generate with:
// go generate ./...

Reducing Allocations: Buffer Reuse

// Antipattern: Creating decoder each time
func BadDecoding(data []byte) error {
    for i := 0; i < 1000; i++ {
        decoder := json.NewDecoder(bytes.NewReader(data))
        var u User
        decoder.Decode(&u)
    }
    return nil
}

// Better: Reuse decoder + buffer
func GoodDecoding(data []byte) error {
    decoder := json.NewDecoder(bytes.NewReader(data))
    for i := 0; i < 1000; i++ {
        var u User
        decoder.Decode(&u)
    }
    return nil
}

// Reuse byte buffers across operations
func EfficientBatchProcessing(inputs []interface{}) {
    var buf bytes.Buffer
    encoder := json.NewEncoder(&buf)

    for _, input := range inputs {
        buf.Reset()  // Clear without deallocating
        encoder.Encode(input)
        // Use buf.Bytes()
    }
}

// Benchmark
func BenchmarkBufferReuse(b *testing.B) {
    data := make([]interface{}, 100)
    for i := 0; i < 100; i++ {
        data[i] = map[string]interface{}{
            "id": i, "name": fmt.Sprintf("item_%d", i),
        }
    }

    b.Run("new_encoder_each_time", func(b *testing.B) {
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            for _, item := range data {
                var buf bytes.Buffer
                encoder := json.NewEncoder(&buf)
                encoder.Encode(item)
            }
        }
    })

    b.Run("reuse_encoder", func(b *testing.B) {
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            var buf bytes.Buffer
            encoder := json.NewEncoder(&buf)
            for _, item := range data {
                buf.Reset()
                encoder.Encode(item)
            }
        }
    })

    b.Run("sync.Pool_buffers", func(b *testing.B) {
        pool := sync.Pool{
            New: func() interface{} {
                return new(bytes.Buffer)
            },
        }

        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            for _, item := range data {
                buf := pool.Get().(*bytes.Buffer)
                buf.Reset()
                encoder := json.NewEncoder(buf)
                encoder.Encode(item)
                pool.Put(buf)
            }
        }
    })
}

Custom Marshalers: Zero-Allocation Examples

// Default: slow, allocates for numbers, etc.
func (u User) MarshalJSON() ([]byte, error) {
    type Alias User
    return json.Marshal(&struct {
        *Alias
    }{
        Alias: (*Alias)(u),
    })
}

// Optimized: manual marshaling with strings.Builder
func (u User) MarshalJSONFast() ([]byte, error) {
    var buf strings.Builder
    buf.WriteString(`{"id":`)
    buf.WriteString(strconv.Itoa(u.ID))
    buf.WriteString(`,"name":"`)
    buf.WriteString(u.Name)
    buf.WriteString(`"}`)
    return []byte(buf.String()), nil
}

// Custom unmarshaling
func (u *User) UnmarshalJSON(data []byte) error {
    type Alias User
    aux := &struct {
        *Alias
    }{
        Alias: (*Alias)(u),
    }
    return json.Unmarshal(data, &aux)
}

JSON Streaming: Processing Large Files

// Process 10GB JSON file without loading all into memory

func ProcessLargeJSONFile(filepath string) error {
    file, err := os.Open(filepath)
    if err != nil {
        return err
    }
    defer file.Close()

    // Buffer reader for efficiency
    buffered := bufio.NewReader(file)
    decoder := json.NewDecoder(buffered)

    // Read opening bracket
    t, err := decoder.Token()
    if err != nil {
        return err
    }

    // Process each record
    count := 0
    for decoder.More() {
        var record Record
        if err := decoder.Decode(&record); err != nil {
            return err
        }

        // Process immediately
        if err := processRecord(&record); err != nil {
            return err
        }

        count++
        if count%10000 == 0 {
            fmt.Printf("Processed %d records\n", count)
        }
    }

    return nil
}

type Record struct {
    ID   int
    Data string
}

func processRecord(r *Record) error {
    // Your logic here
    return nil
}

// Benchmark: memory usage
func BenchmarkStreamingVsLoading(b *testing.B) {
    // Generate test data
    records := make([]map[string]interface{}, 100000)
    for i := 0; i < 100000; i++ {
        records[i] = map[string]interface{}{
            "id": i, "value": fmt.Sprintf("data_%d", i),
        }
    }
    data, _ := json.Marshal(records)

    b.Run("load_all", func(b *testing.B) {
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            var result []interface{}
            json.Unmarshal(data, &result)
        }
    })

    b.Run("stream", func(b *testing.B) {
        b.ReportAllocs()
        b.ResetTimer()
        for i := 0; i < b.N; i++ {
            reader := bytes.NewReader(data)
            decoder := json.NewDecoder(reader)
            decoder.Token()  // [
            for decoder.More() {
                var record interface{}
                decoder.Decode(&record)
            }
        }
    })
}

json.RawMessage: Lazy Parsing

Defer parsing of specific fields until needed:

type Message struct {
    ID        int             `json:"id"`
    Type      string          `json:"type"`
    Payload   json.RawMessage `json:"payload"`  // Don't parse yet
}

func ProcessMessage(data []byte) error {
    var msg Message
    if err := json.Unmarshal(data, &msg); err != nil {
        return err
    }

    // Only parse payload when needed
    switch msg.Type {
    case "user":
        var user User
        return json.Unmarshal(msg.Payload, &user)
    case "order":
        var order Order
        return json.Unmarshal(msg.Payload, &order)
    }

    return nil
}

type User struct {
    ID   int
    Name string
}

type Order struct {
    ID    int
    Total float64
}

Practical Performance Tips Checklist

  • Use Decoder/Encoder for I/O: Avoid Unmarshal/Marshal with files/network
  • Preallocate slices: In target structs when size is known
  • Omit large fields: With json:"-" if not needed in JSON
  • Avoid interface{}: Requires runtime type checking
  • Use easyjson/ffjson: For hot paths and large payloads
  • Reuse buffers: With sync.Pool for batch operations
  • Stream large JSON: Don't load into memory if not needed
  • Profile first: Use benchmarks to find bottlenecks
  • Consider alternatives: jsoniter (2-3x), sonic (5-10x), easyjson (10-20x)
  • Cache compiled types: Don't re-inspect via reflection multiple times

Real-World Optimization Example

// API endpoint: Serialize response efficiently

// BEFORE: Allocates multiple times
func HandleRequestBad(w http.ResponseWriter, users []User) {
    data, _ := json.Marshal(users)          // Allocate
    w.Header().Set("Content-Type", "application/json")
    w.Write(data)
}

// AFTER: Stream directly to response
func HandleRequestGood(w http.ResponseWriter, users []User) {
    w.Header().Set("Content-Type", "application/json")
    encoder := json.NewEncoder(w)  // Writes directly to socket
    encoder.Encode(users)
}

// WITH CODE GEN: Even faster
// Run: easyjson -all types.go
func HandleRequestFastest(w http.ResponseWriter, users []User) {
    w.Header().Set("Content-Type", "application/json")
    // User.MarshalJSON is generated, no reflection
    data, _ := users[0].MarshalJSON()
    w.Write(data)
}

JSON performance is critical in modern Go services. Start with these techniques, benchmark your specific workloads, and consider specialized libraries when standard library doesn't meet requirements.

On this page