Skip to content

Commit

Permalink
Merge pull request #99 from sonroyaalmerol/max-retry
Browse files Browse the repository at this point in the history
Add MAX_RETRIES to specify max retry loop quantity
  • Loading branch information
sonroyaalmerol authored Aug 24, 2024
2 parents e5b084f + a6503c2 commit b3aeca1
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 8 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ Access the generated M3U playlist at `http://<server ip>:8080/playlist.m3u`.
|-----------------------------|----------------------------------------------------------|---------------|------------------------------------------------|
| M3U_URL_1, M3U_URL_2, M3U_URL_X | Set M3U URLs as environment variables. | N/A | Any valid M3U URLs |
| M3U_MAX_CONCURRENCY_1, M3U_MAX_CONCURRENCY_2, M3U_MAX_CONCURRENCY_X | Set max concurrency. | 1 | Any integer |
| MAX_RETRIES | Set max number of retries (loop) across all M3Us while streaming. 0 to never stop retrying (beware of throttling from provider). | 5 | Any integer greater than or equal 0 |
| REDIS_ADDR | Set Redis server address | N/A | e.g. localhost:6379 |
| REDIS_PASS | Set Redis server password | N/A | Any string |
| REDIS_DB | Set Redis server database to be used | 0 | 0 to 15 |
Expand Down
25 changes: 17 additions & 8 deletions stream_handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import (
"strings"
)

func loadBalancer(stream database.StreamInfo, previous []int) (*http.Response, string, int, error) {
func loadBalancer(stream database.StreamInfo, previous *[]int) (*http.Response, string, int, error) {
debug := os.Getenv("DEBUG") == "true"

m3uIndexes := utils.GetM3UIndexes()
Expand All @@ -24,14 +24,22 @@ func loadBalancer(stream database.StreamInfo, previous []int) (*http.Response, s
return db.ConcurrencyPriorityValue(i) > db.ConcurrencyPriorityValue(j)
})

const maxLaps = 5
maxLapsString := os.Getenv("MAX_RETRIES")
maxLaps, err := strconv.Atoi(strings.TrimSpace(maxLapsString))
if err != nil || maxLaps < 0 {
maxLaps = 5
}

lap := 0

for lap < maxLaps {
for lap < maxLaps || maxLaps == 0 {
if debug {
log.Printf("[DEBUG] Stream attempt %d out of %d\n", lap+1, maxLaps)
}
allSkipped := true // Assume all URLs might be skipped

for _, index := range m3uIndexes {
if slices.Contains(previous, index) {
if slices.Contains(*previous, index) {
log.Printf("Skipping M3U_%d: marked as previous stream\n", index+1)
continue
}
Expand Down Expand Up @@ -66,7 +74,7 @@ func loadBalancer(stream database.StreamInfo, previous []int) (*http.Response, s
if debug {
log.Printf("[DEBUG] All streams skipped in lap %d\n", lap)
}
break
*previous = []int{}
}

lap++
Expand Down Expand Up @@ -146,6 +154,7 @@ func streamHandler(w http.ResponseWriter, r *http.Request, db *database.Instance
var selectedUrl string

testedIndexes := []int{}
firstWrite := true

var resp *http.Response

Expand All @@ -158,15 +167,14 @@ func streamHandler(w http.ResponseWriter, r *http.Request, db *database.Instance
}
return
default:
resp, selectedUrl, selectedIndex, err = loadBalancer(stream, testedIndexes)
resp, selectedUrl, selectedIndex, err = loadBalancer(stream, &testedIndexes)
if err != nil {
log.Printf("Error reloading stream for %s: %v\n", streamSlug, err)
http.Error(w, "Error fetching stream. Exhausted all streams.", http.StatusInternalServerError)
return
}

// HTTP header initialization
if len(testedIndexes) == 0 {
if firstWrite {
w.Header().Set("Cache-Control", "no-cache")
w.Header().Set("Access-Control-Allow-Origin", "*")
for k, v := range resp.Header {
Expand All @@ -179,6 +187,7 @@ func streamHandler(w http.ResponseWriter, r *http.Request, db *database.Instance
if debug {
log.Printf("[DEBUG] Headers set for response: %v\n", w.Header())
}
firstWrite = false
}

exitStatus := make(chan int)
Expand Down

0 comments on commit b3aeca1

Please sign in to comment.