Here's what I have thus far:
package main
import (
"encoding/json"
"fmt"
"io/ioutil"
"log"
"net/http"
)
func subscribe(urls Urls) []byte {
req, err := http.NewRequest("GET", urls.Url, nil)
if err != nil {
log.Fatal(err)
}
req.Header.Set("authentication", "Bearer " + urls.Token)
http_client := &http.Client{}
res, err := http_client.Do(req)
if err != nil {
log.Fatal(err)
}
defer res.Body.Close()
resourceResp, err := ioutil.ReadAll(res.Body)
if err != nil {
log.Fatal(err)
}
fmt.Println(string(resourceResp))
var data map[string]interface{}
error := json.Unmarshal([]byte(resourceResp), &data)
if error != nil {
log.Fatal(error)
}
return subscribe(urls)
}
type Urls struct {
Url string
Token string
}
func main() {
var urls [2]Urls
urls[0] = Urls{
Url: "https://example.com/users/8",
Token: "abcdefg",
}
urls[1] = Urls{
Url: "https://example.com/users/9",
Token: "hijklmnop",
}
subscribe(urls[0])
subscribe(urls[1])
}
The end goal is to "subscribe" to the multiple urls and pull any updated data (eventually adding it to a queue, but one step at a time). After that, reestablish the connection. Right now, only the first subscribe gets run. Thanks!
I think what you're asking is for the subscribe
functions to be run in parallel. One way is to wrap them in goroutines and wait for all the goroutines to finish:
func main() {
...
...
var wg sync.WaitGroup
wg.Add(len(urls))
for _, url := range(urls) {
go func() {
defer wg.Done()
subscribe(url)
}()
}
wg.Wait()
}