url-img-pdf/url-img-pdf.go

89 lines
1.8 KiB
Go
Raw Normal View History

2023-10-26 19:01:31 +03:00
package main
// Download images from web URLs and convert them to a pdf file with one click
import (
"fmt"
"io"
"log"
"net/http"
"os"
"strconv"
)
func main() {
fmt.Println("SCRIPT SUCCESFULLY STARTED")
flag := 0
fmt.Println("Is files ready? (1/0) (to just convert images to pdf and skip download section press 1)")
fmt.Scanln(&flag)
var name string
if flag == 0 {
var baseURL, fileFormat string
var startNumber, endNumber int
fmt.Println("Folder Name: (enter for default) ")
fmt.Scanln(&name)
fmt.Println("Base URL: ")
fmt.Scanln(&baseURL)
fmt.Println("Start Number: ")
fmt.Scanln(&startNumber)
fmt.Println("End Number: ")
fmt.Scanln(&endNumber)
fmt.Println("File Format: (enter for default .jpg format) ")
fmt.Scanln(&fileFormat)
if name == "" {
name = "url-img-pdf_FOLDER"
}
if fileFormat == "" {
fileFormat = "jpg"
}
name = "/output/" + name
if err := os.Mkdir(name, os.ModePerm); err != nil {
log.Fatal(err)
}
for i := startNumber; i <= endNumber; i++ {
err := DownloadFile(name+"/img"+strconv.Itoa(i)+"."+fileFormat, baseURL+strconv.Itoa(i))
if err != nil {
panic(err)
}
fmt.Println("Downloaded: " + baseURL + strconv.Itoa(i))
}
}
fmt.Println("installed all images, you can use a tool like pdfarranger for converting (too lazy to implement)")
}
// DownloadFile will download a url to a local file. It's efficient because it will
// write as it downloads and not load the whole file into memory. (NEEDS TEST!)
func DownloadFile(filepath string, url string) error {
// Get the data
resp, err := http.Get(url)
if err != nil {
return err
}
defer resp.Body.Close()
// Create the file
out, err := os.Create(filepath)
if err != nil {
return err
}
defer out.Close()
// Write the body to file
_, err = io.Copy(out, resp.Body)
return err
}