簡體   English   中英

消費分頁 REST API

[英]Consuming Paginated REST API

我正在嘗試使用一個 API,它在每個成功的請求中都有一個 json 鍵,如下所示:

{
    "hasNextPage": true,
    "endCursor": "some_guid_present_here"
}

API 的工作方式(我在 Python 中多次使用它,但正在嘗試使用 Go 作為單獨的用例)基本上是檢查是否有下一頁,然后使用適當的 cursor 繼續分頁。

但是,每次我使用這個結構時,它有時會一直循環,即使response.Paginations.HasNextPage為假。

我試圖了解是否是我這樣使用分頁 API 的結構導致了這種情況或其他情況。

另外,我已經說了 4-5 個請求開始,我通過 goroutines 單獨發送了這些請求。 我不確定這是否會導致問題,但我已將其附加在etl.go中。

主要的請求結構在api.go中。

我已經確認我確實收到了回復並且它們正在正確解組,但我正在努力解決這種不確定的行為。

api.go

package models


import (
    "encoding/json"
    "io/ioutil"
    "net/http"
    "fmt"
)

type Request struct {
    Url string
    ApiKey string
}

type Response struct {
    ...some fields...
    Paginations Pagination `json:"pagination"`
}

type Pagination struct {
    EndCursor   string `json:"endCursor"`
    HasNextPage bool   `json:"hasNextPage"`
}


func (request *Request) Get() ([]Response, error) {
    
    var responses []Response
    var response Response

    // Set up new request
    req, err := http.NewRequest("GET", request.Url, nil)
    if err != nil {
        fmt.Println("Error creating request...")
        return responses, err
    }

    // Add request headers
    req.Header = http.Header{
        "accept":        {"application/json"},
        "authorization": {"Bearer " + request.ApiKey},
    }

    // Get our initial response from the API and capture status code
    resp, _ := http.DefaultClient.Do(req)
    response.Status = resp.StatusCode

    // Read the response body and Unmarshal into struct
    respBody, err := ioutil.ReadAll(resp.Body)
    json.Unmarshal(respBody, &response)

    // If there was a parsing error, log it
    if err != nil {
        fmt.Println(err)
    }
    defer resp.Body.Close()
    
    // This field will be in the response payload
    // It is verified to be of type bool (not string)
    fmt.Printf("Has Next Page? %t\n", resp.Paginations.HasNextPage)

    // Append response to our slice of responses
    responses = append(responses, response)

    // If paginations are available, continue to loop through until all paginations are exhausted
    for response.Paginations.HasNextPage == true {
        req, err := http.NewRequest("GET", request.Url, nil)
        if err != nil {
            return responses, err
        }

        // Append "after" cursor to query in order to redirect to paginated response
        qry := req.URL.Query()
        qry.Set("after", response.Paginations.EndCursor)
        req.URL.RawQuery = qry.Encode()
        fmt.Println("Paginated request query: ", req.URL.String())

        // Make request
        resp, err := http.DefaultClient.Do(req)
        response.Status = resp.StatusCode
        fmt.Printf("Status Code: %d\n", response.Status)

        // Read response and deserialize it
        respBody, err := ioutil.ReadAll(resp.Body)
        json.Unmarshal(respBody, &response)
        fmt.Println("Pagination Cursor: ", response.Paginations.EndCursor)
        fmt.Printf("Has Next Page? %t\n", response.Paginations.HasNextPage)

        // If there was a parsing error, log it
        if err != nil {
            fmt.Println(err)
        }
        defer resp.Body.Close()

        // Append response to our slice of responses
        responses = append(responses, response)
    }
    return responses, nil


}

etl.go

package models

import (
    "fmt"
    "sync"
)

type Etl struct {
    Requests []Request
}

func (etl *Etl) Extract() [][]Response {

    var wg sync.WaitGroup
    ch := make(chan []Response)
    defer close(ch)
    for _, req := range etl.Requests {
        wg.Add(1) // Increment
        fmt.Println("Incremented wait group")
        go func(i Request) {
            defer wg.Done() // Decrement
            resp, err := req.Get()
            if err != nil {
                fmt.Println(err)
            }
            ch <- resp
            fmt.Println("Decremented wait group")
        }(req)
    }

    results := make([][]Response, len(etl.Requests))
    for i, _ := range results {
        results[i] = <-ch
        //fmt.Println(results[i])
    }
    wg.Wait()
    return nil
}


我相信我找到了問題所在。 在我for response.Paginations.HasNextPage == true分頁循環中,我在每次迭代中都創建了一個新請求 object ( http.NewRequest ),它沒有從之前的(初始請求)添加標頭。

這導致返回 401 未經授權的錯誤並繼續查詢 API,因為它沒有收到新的response.Paginations.HasNextPage值。

我的解決方案是像這樣簡單地改變 for 循環:

package models


import (
    "encoding/json"
    "io/ioutil"
    "net/http"
    "fmt"
)

type Request struct {
    Url string
    ApiKey string
}

type Response struct {
    ...some fields...
    Paginations Pagination `json:"pagination"`
}

type Pagination struct {
    EndCursor   string `json:"endCursor"`
    HasNextPage bool   `json:"hasNextPage"`
}


func (request *Request) Get() ([]Response, error) {
    
    var responses []Response
    var response Response

    // Set up new request
    req, err := http.NewRequest("GET", request.Url, nil)
    if err != nil {
        fmt.Println("Error creating request...")
        return responses, err
    }

    // Add request headers
    req.Header = http.Header{
        "accept":        {"application/json"},
        "authorization": {"Bearer " + request.ApiKey},
    }

    // Get our initial response from the API and capture status code
    resp, _ := http.DefaultClient.Do(req)
    response.Status = resp.StatusCode

    // Read the response body and Unmarshal into struct
    respBody, err := ioutil.ReadAll(resp.Body)
    json.Unmarshal(respBody, &response)

    // If there was a parsing error, log it
    if err != nil {
        fmt.Println(err)
    }
    defer resp.Body.Close()
    
    // This field will be in the response payload
    // It is verified to be of type bool (not string)
    fmt.Printf("Has Next Page? %t\n", resp.Paginations.HasNextPage)

    // Append response to our slice of responses
    responses = append(responses, response)

    // If paginations are available, continue to loop through until all paginations are exhausted
    for response.Paginations.HasNextPage == true {

        // Append "after" cursor to query in order to redirect to paginated response
        qry := req.URL.Query()
        qry.Set("after", response.Paginations.EndCursor)
        req.URL.RawQuery = qry.Encode()
        fmt.Println("Paginated request query: ", req.URL.String())

        // Make request
        resp, err := http.DefaultClient.Do(req)
        response.Status = resp.StatusCode
        fmt.Printf("Status Code: %d\n", response.Status)

        // Read response and deserialize it
        respBody, err := ioutil.ReadAll(resp.Body)
        json.Unmarshal(respBody, &response)
        fmt.Println("Pagination Cursor: ", response.Paginations.EndCursor)
        fmt.Printf("Has Next Page? %t\n", response.Paginations.HasNextPage)

        // If there was a parsing error, log it
        if err != nil {
            fmt.Println(err)
        }
        defer resp.Body.Close()

        // Append response to our slice of responses
        responses = append(responses, response)
    }
    return responses, nil


}

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM