Skip to content

Commit

Permalink
Fix leak when remote scraper not return 200 OK
Browse files Browse the repository at this point in the history
  • Loading branch information
Wikidepia committed Aug 14, 2024
1 parent a06d828 commit 53d27b3
Showing 1 changed file with 4 additions and 7 deletions.
11 changes: 4 additions & 7 deletions handlers/scraper/data.go
Original file line number Diff line number Diff line change
Expand Up @@ -149,26 +149,22 @@ func GetData(postID string) (*InstaData, error) {
func (i *InstaData) ScrapeData() error {
// Scrape from remote scraper if available
if len(RemoteScraperAddr) > 0 {
var err error
remoteClient := http.Client{Transport: transportNoProxy, Timeout: timeout}
req, err := http.NewRequest("GET", RemoteScraperAddr+"/scrape/"+i.PostID, nil)
if err != nil {
return err
}
req.Header.Set("Accept-Encoding", "zstd.dict")
res, err := remoteClient.Do(req)
if res != nil && res.StatusCode == 200 {
if err == nil && res != nil {
defer res.Body.Close()
if err != nil {
return err
}
remoteData, err := io.ReadAll(res.Body)
if err == nil {
if err == nil && res.StatusCode == 200 {
remoteDecomp, err := remoteZSTDReader.DecodeAll(remoteData, nil)
if err != nil {
return err
}
if err = binary.Unmarshal(remoteDecomp, i); err == nil {
if err := binary.Unmarshal(remoteDecomp, i); err == nil {
if len(i.Username) > 0 {
slog.Info("Data parsed from remote scraper", "postID", i.PostID)
return nil
Expand All @@ -177,6 +173,7 @@ func (i *InstaData) ScrapeData() error {
}
slog.Error("Failed to scrape data from remote scraper", "postID", i.PostID, "status", res.StatusCode, "err", err)
}
slog.Error("Failed when trying to scrape data from remote scraper", "postID", i.PostID, "err", err)
}

client := http.Client{Transport: transport, Timeout: timeout}
Expand Down

0 comments on commit 53d27b3

Please sign in to comment.