0
votes

I am writing a program to process millions of lines from a text file, 500k was taking 5seconds to validate the file, I wanted to speed this up.

I wanted to loop over the items and process x of them async, then wait for the response to see whether I should continue.

I have written some dummy code, I am not sure whether or not what I have written makes much sense, it just seems rather complicated, is there a simpler more elegant way of doing this.

package main

import (
    "fmt"
    "sync"
    "time"
)

func main() {
    // Need an object to loop over
    // need a loop to read the response
    items := 100000
    concurrency := 20
    sem := make(chan bool, concurrency)
    returnChan := make(chan error)
    finChan := make(chan bool)

    var wg sync.WaitGroup

    go func() {
        for x := 0; x < items; x++ {
            // loop over all items
            // only do maxitems at a time
            wg.Add(1)
            sem <- true
            go delayFunc(x, sem, returnChan, &wg)
        }
        wg.Wait()
        finChan <- true
    }()

    var err error
    finished := false
    for {
        select {
        case err = <-returnChan:
            if err != nil {
                break
            }
        case _ = <-finChan:
            finished = true
            break
        default:
            continue
        }

        if err != nil || finished == true {
            break
        }
    }
    fmt.Println(err)
}

func delayFunc(x int, sem chan bool, returnChan chan error, wg *sync.WaitGroup) {
    //fmt.Printf("PROCESSING (%v)\n", x)
    time.Sleep(10 * time.Millisecond)
    <-sem // release the lock
    wg.Done()
    if x == 95000 {
        returnChan <- fmt.Errorf("Something not right")
    } else {
        returnChan <- nil
    }
}
1
Instead of adding to wg one by one, do something like wg.Add(items)after initialization of the WaitGroup. This is not what you are asking but this might create a hard to catch bug, as "items+1" different go routines are accessing the wg variable.atakanyenel
Thanks for that, I am not sure my example fully illustrates what I am doing, I will be reading quite large files, so it will be buffered line by lineCharles Bryant

1 Answers

5
votes

Your code looks fine, you implement commonly used in Go pattern. The downside is - you spawn worker goroutine for every item. Spawning goroutine while cheap isn't free. Another approach is to spawn N workers and provide them items thru channel. Something like this

package main
import (
    "fmt"
    "time"
)

func main() {
    items := 100
    concurrency := 10
    in := make(chan int)
    ret := make(chan error)

    for x := 0; x < concurrency; x++ {
        go worker(in, ret)
    }
    go func() {
        for x := 0; x < items; x++ {
            // loop over all items
            in <- x
        }
        close(in)
    }()
    for err := range ret {
        if err != nil {
            fmt.Println(err.Error())
            break
        }
    }
}
func worker(in chan int, returnChan chan error) {
    //fmt.Printf("PROCESSING (%v)\n", x)
    for x := range in {
        if x == 95 {
            returnChan <- fmt.Errorf("Something not right")
        } else {
            returnChan <- nil
        }
        time.Sleep(10 * time.Millisecond)
    }
    returnChan <- fmt.Errorf("The End")
}

Playground