2024-09-06 13:28:12 +02:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2024-09-13 22:00:44 +02:00
|
|
|
"encoding/json"
|
2024-09-06 14:27:09 +02:00
|
|
|
"fmt"
|
2024-09-13 22:00:44 +02:00
|
|
|
"io"
|
2024-09-06 13:28:12 +02:00
|
|
|
"net/url"
|
2024-09-13 22:00:44 +02:00
|
|
|
"os"
|
2024-09-13 22:17:16 +02:00
|
|
|
"os/exec"
|
2024-09-13 22:29:03 +02:00
|
|
|
"path/filepath"
|
2024-09-06 13:28:12 +02:00
|
|
|
"regexp"
|
2024-09-06 14:27:09 +02:00
|
|
|
"strconv"
|
2024-09-06 13:28:12 +02:00
|
|
|
"strings"
|
2024-09-13 22:15:20 +02:00
|
|
|
"sync"
|
2024-09-06 14:27:09 +02:00
|
|
|
|
|
|
|
"github.com/beevik/etree"
|
2024-09-06 13:28:12 +02:00
|
|
|
)
|
|
|
|
|
2024-09-13 22:17:16 +02:00
|
|
|
type JobInfo struct {
|
2024-09-13 22:29:03 +02:00
|
|
|
AbortChan chan struct{}
|
|
|
|
ResumeChan chan struct{}
|
|
|
|
Cmd *exec.Cmd
|
|
|
|
Paused bool
|
2024-09-13 22:17:16 +02:00
|
|
|
}
|
|
|
|
|
2024-09-13 22:15:20 +02:00
|
|
|
var (
|
|
|
|
jobsMutex sync.Mutex
|
2024-09-13 22:17:16 +02:00
|
|
|
jobs = make(map[string]*JobInfo)
|
2024-09-13 22:15:20 +02:00
|
|
|
)
|
|
|
|
|
2024-09-06 13:28:12 +02:00
|
|
|
func sanitizeFilename(filename string) string {
|
|
|
|
filename = regexp.MustCompile(`[<>:"/\\|?*]`).ReplaceAllString(filename, "_")
|
|
|
|
|
|
|
|
filename = strings.Trim(filename, ".")
|
|
|
|
|
|
|
|
return filename
|
|
|
|
}
|
|
|
|
|
|
|
|
func isValidURL(toTest string) bool {
|
|
|
|
_, err := url.ParseRequestURI(toTest)
|
|
|
|
return err == nil
|
|
|
|
}
|
2024-09-06 14:27:09 +02:00
|
|
|
|
|
|
|
func fixGoPlay(mpdContent string) (string, error) {
|
|
|
|
doc := etree.NewDocument()
|
|
|
|
if err := doc.ReadFromString(mpdContent); err != nil {
|
|
|
|
return "", fmt.Errorf("error parsing MPD content: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
root := doc.Root()
|
|
|
|
|
|
|
|
// Remove ad periods
|
|
|
|
for _, period := range root.SelectElements("Period") {
|
|
|
|
if strings.Contains(period.SelectAttrValue("id", ""), "-ad-") {
|
|
|
|
root.RemoveChild(period)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Find highest bandwidth for video
|
|
|
|
highestBandwidth := 0
|
|
|
|
for _, adaptationSet := range root.FindElements("//AdaptationSet") {
|
|
|
|
if strings.Contains(adaptationSet.SelectAttrValue("mimeType", ""), "video") {
|
|
|
|
for _, representation := range adaptationSet.SelectElements("Representation") {
|
|
|
|
bandwidth, _ := strconv.Atoi(representation.SelectAttrValue("bandwidth", "0"))
|
|
|
|
if bandwidth > highestBandwidth {
|
|
|
|
highestBandwidth = bandwidth
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Remove lower bitrate representations
|
|
|
|
for _, adaptationSet := range root.FindElements("//AdaptationSet") {
|
|
|
|
if strings.Contains(adaptationSet.SelectAttrValue("mimeType", ""), "video") {
|
|
|
|
for _, representation := range adaptationSet.SelectElements("Representation") {
|
|
|
|
bandwidth, _ := strconv.Atoi(representation.SelectAttrValue("bandwidth", "0"))
|
|
|
|
if bandwidth != highestBandwidth {
|
|
|
|
adaptationSet.RemoveChild(representation)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Combine periods
|
|
|
|
periods := root.SelectElements("Period")
|
|
|
|
if len(periods) > 1 {
|
|
|
|
firstPeriod := periods[0]
|
|
|
|
var newVideoTimeline, newAudioTimeline *etree.Element
|
|
|
|
|
|
|
|
// Find or create SegmentTimeline elements
|
|
|
|
for _, adaptationSet := range firstPeriod.SelectElements("AdaptationSet") {
|
|
|
|
mimeType := adaptationSet.SelectAttrValue("mimeType", "")
|
|
|
|
if strings.Contains(mimeType, "video") && newVideoTimeline == nil {
|
|
|
|
newVideoTimeline = findOrCreateSegmentTimeline(adaptationSet)
|
|
|
|
} else if strings.Contains(mimeType, "audio") && newAudioTimeline == nil {
|
|
|
|
newAudioTimeline = findOrCreateSegmentTimeline(adaptationSet)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, period := range periods[1:] {
|
|
|
|
for _, adaptationSet := range period.SelectElements("AdaptationSet") {
|
|
|
|
mimeType := adaptationSet.SelectAttrValue("mimeType", "")
|
|
|
|
var timeline *etree.Element
|
|
|
|
if strings.Contains(mimeType, "video") {
|
|
|
|
timeline = newVideoTimeline
|
|
|
|
} else if strings.Contains(mimeType, "audio") {
|
|
|
|
timeline = newAudioTimeline
|
|
|
|
}
|
|
|
|
|
|
|
|
if timeline != nil {
|
|
|
|
segmentTimeline := findOrCreateSegmentTimeline(adaptationSet)
|
|
|
|
for _, s := range segmentTimeline.SelectElements("S") {
|
|
|
|
timeline.AddChild(s.Copy())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
root.RemoveChild(period)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return doc.WriteToString()
|
|
|
|
}
|
|
|
|
|
|
|
|
func findOrCreateSegmentTimeline(adaptationSet *etree.Element) *etree.Element {
|
|
|
|
for _, representation := range adaptationSet.SelectElements("Representation") {
|
|
|
|
for _, segmentTemplate := range representation.SelectElements("SegmentTemplate") {
|
|
|
|
timeline := segmentTemplate.SelectElement("SegmentTimeline")
|
|
|
|
if timeline != nil {
|
|
|
|
return timeline
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// If no SegmentTimeline found, create one
|
|
|
|
representation := adaptationSet.CreateElement("Representation")
|
|
|
|
segmentTemplate := representation.CreateElement("SegmentTemplate")
|
|
|
|
return segmentTemplate.CreateElement("SegmentTimeline")
|
|
|
|
}
|
2024-09-13 22:00:44 +02:00
|
|
|
|
|
|
|
func parseInputFile(inputFile string) ([]Item, error) {
|
|
|
|
jsonFile, err := os.Open(inputFile)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("error opening file %s: %v", inputFile, err)
|
|
|
|
}
|
|
|
|
defer jsonFile.Close()
|
|
|
|
|
|
|
|
byteValue, err := io.ReadAll(jsonFile)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("error reading file %s: %v", inputFile, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
byteValue = removeBOM(byteValue)
|
|
|
|
|
|
|
|
var items Items
|
|
|
|
err = json.Unmarshal(byteValue, &items)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("error unmarshaling JSON: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
return items.Items, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func groupItemsBySeason(items []Item) map[string][]Item {
|
|
|
|
grouped := make(map[string][]Item)
|
|
|
|
for _, item := range items {
|
|
|
|
metadata := parseMetadata(item.Metadata)
|
|
|
|
if metadata.Type == "serie" {
|
|
|
|
key := fmt.Sprintf("%s - %s", metadata.Title, metadata.Season)
|
|
|
|
grouped[key] = append(grouped[key], item)
|
|
|
|
} else {
|
|
|
|
grouped["Movies"] = append(grouped["Movies"], item)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return grouped
|
|
|
|
}
|
|
|
|
|
|
|
|
func filterSelectedItems(items []Item, selectedItems []string) []Item {
|
|
|
|
var filtered []Item
|
|
|
|
for _, item := range items {
|
|
|
|
for _, selected := range selectedItems {
|
|
|
|
if item.Filename == selected {
|
|
|
|
filtered = append(filtered, item)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return filtered
|
|
|
|
}
|
|
|
|
|
|
|
|
func processItems(filename string, items []Item) error {
|
2024-09-13 22:15:20 +02:00
|
|
|
jobsMutex.Lock()
|
2024-09-13 22:17:16 +02:00
|
|
|
jobInfo := &JobInfo{
|
2024-09-13 22:29:03 +02:00
|
|
|
AbortChan: make(chan struct{}),
|
|
|
|
ResumeChan: make(chan struct{}),
|
2024-09-13 22:17:16 +02:00
|
|
|
}
|
|
|
|
jobs[filename] = jobInfo
|
2024-09-13 22:15:20 +02:00
|
|
|
jobsMutex.Unlock()
|
|
|
|
|
|
|
|
defer func() {
|
|
|
|
jobsMutex.Lock()
|
|
|
|
delete(jobs, filename)
|
|
|
|
jobsMutex.Unlock()
|
|
|
|
}()
|
|
|
|
|
2024-09-13 22:29:03 +02:00
|
|
|
for i := 0; i < len(items); i++ {
|
2024-09-13 22:15:20 +02:00
|
|
|
select {
|
2024-09-13 22:17:16 +02:00
|
|
|
case <-jobInfo.AbortChan:
|
2024-09-13 22:15:20 +02:00
|
|
|
updateProgress(filename, 100, "Aborted")
|
|
|
|
return fmt.Errorf("download aborted")
|
|
|
|
default:
|
2024-09-13 22:29:03 +02:00
|
|
|
if jobInfo.Paused {
|
|
|
|
select {
|
|
|
|
case <-jobInfo.ResumeChan:
|
|
|
|
jobInfo.Paused = false
|
|
|
|
fmt.Printf("Resuming download for %s\n", filename)
|
|
|
|
case <-jobInfo.AbortChan:
|
|
|
|
updateProgress(filename, 100, "Aborted")
|
|
|
|
return fmt.Errorf("download aborted")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
updateProgress(filename, float64(i)/float64(len(items))*100, items[i].Filename)
|
|
|
|
err := downloadFile(items[i], jobInfo)
|
2024-09-13 22:15:20 +02:00
|
|
|
if err != nil {
|
2024-09-13 22:29:03 +02:00
|
|
|
if err.Error() == "download paused" {
|
|
|
|
removeCompletedEpisodes(filename, items[:i])
|
|
|
|
i--
|
|
|
|
continue
|
|
|
|
}
|
2024-09-13 22:15:20 +02:00
|
|
|
fmt.Printf("Error downloading file: %v\n", err)
|
|
|
|
}
|
2024-09-13 22:00:44 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
updateProgress(filename, 100, "")
|
|
|
|
return nil
|
|
|
|
}
|
2024-09-13 22:29:03 +02:00
|
|
|
|
|
|
|
func removeCompletedEpisodes(filename string, completedItems []Item) error {
|
|
|
|
inputFile := filepath.Join(uploadDir, filename)
|
|
|
|
items, err := parseInputFile(inputFile)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("error parsing input file: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
remainingItems := make([]Item, 0)
|
|
|
|
for _, item := range items {
|
|
|
|
if !isItemCompleted(item, completedItems) || isLastCompletedItem(item, completedItems) {
|
|
|
|
remainingItems = append(remainingItems, item)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
updatedItems := Items{Items: remainingItems}
|
|
|
|
jsonData, err := json.MarshalIndent(updatedItems, "", " ")
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("error marshaling updated items: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
err = os.WriteFile(inputFile, jsonData, 0644)
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf("error writing updated DRMD file: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func isItemCompleted(item Item, completedItems []Item) bool {
|
|
|
|
for _, completedItem := range completedItems {
|
|
|
|
if item.Filename == completedItem.Filename {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
|
|
|
func isLastCompletedItem(item Item, completedItems []Item) bool {
|
|
|
|
if len(completedItems) == 0 {
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
return item.Filename == completedItems[len(completedItems)-1].Filename
|
|
|
|
}
|