init
This commit is contained in:
commit
38a5c47dab
11 changed files with 550 additions and 0 deletions
296
main.go
Normal file
296
main.go
Normal file
|
|
@ -0,0 +1,296 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"embed"
|
||||
"flag"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"log"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
//go:embed templates/*
|
||||
var templatesFS embed.FS
|
||||
|
||||
//go:embed assets/*
|
||||
var assetsFS embed.FS
|
||||
|
||||
type CoverData struct {
|
||||
Comic int
|
||||
Path string
|
||||
Prefix string
|
||||
}
|
||||
|
||||
var (
|
||||
db *sql.DB
|
||||
tmpl *template.Template
|
||||
imaginaryURL string
|
||||
galleryPath string
|
||||
databasePath string
|
||||
randomCovers []CoverData
|
||||
)
|
||||
|
||||
func main() {
|
||||
flag.StringVar(&imaginaryURL, "imaginary", "http://192.168.88.54:10001", "imaginary root URL")
|
||||
flag.StringVar(&galleryPath, "gallery", "/home/user/mnt/panda/galleries/", "gallery path")
|
||||
flag.StringVar(&databasePath, "database", "./db.sqlite", "database path")
|
||||
flag.Parse()
|
||||
|
||||
var err error
|
||||
|
||||
db, err = sql.Open("sqlite3", "./db.sqlite")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
defer db.Close()
|
||||
|
||||
if err = db.Ping(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
log.Println("SQLite connected")
|
||||
|
||||
tmpl, err = template.ParseFS(templatesFS, "templates/*.html")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
http.HandleFunc("/gallery/", handleGallery)
|
||||
http.HandleFunc("/random/", handleRandom)
|
||||
http.HandleFunc("/random", handleRandom)
|
||||
http.HandleFunc("/", handleIndex)
|
||||
http.Handle("/assets/", http.FileServer(http.FS(assetsFS)))
|
||||
http.Handle("/static/", http.StripPrefix("/static/", http.FileServer(http.Dir(galleryPath))))
|
||||
|
||||
log.Println("Server listening on :8080")
|
||||
log.Fatal(http.ListenAndServe(":10000", nil))
|
||||
}
|
||||
|
||||
func handleRandom(w http.ResponseWriter, r *http.Request) {
|
||||
path := strings.TrimPrefix(r.URL.Path, "/random")
|
||||
path = strings.TrimPrefix(path, "/")
|
||||
|
||||
// If accessing /random without chapter, shuffle and redirect to /random/0
|
||||
if path == "" {
|
||||
query := `
|
||||
select page.id_comic, comic.directory || '/' || page.filename path, page.prefix
|
||||
from page
|
||||
join comic on comic.id_comic = page.id_comic
|
||||
where page.number = 0
|
||||
`
|
||||
rows, err := db.Query(query)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
randomCovers = nil
|
||||
for rows.Next() {
|
||||
var c CoverData
|
||||
if err := rows.Scan(&c.Comic, &c.Path, &c.Prefix); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
randomCovers = append(randomCovers, c)
|
||||
}
|
||||
|
||||
rand.Shuffle(len(randomCovers), func(i, j int) {
|
||||
randomCovers[i], randomCovers[j] = randomCovers[j], randomCovers[i]
|
||||
})
|
||||
|
||||
http.Redirect(w, r, "/random/0", http.StatusFound)
|
||||
return
|
||||
}
|
||||
|
||||
chapter, err := strconv.Atoi(path)
|
||||
if err != nil {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
const limit = 18
|
||||
offset := chapter * limit
|
||||
|
||||
type Cover struct {
|
||||
Comic int
|
||||
Url string
|
||||
}
|
||||
|
||||
end := offset + limit*2
|
||||
if end > len(randomCovers) {
|
||||
end = len(randomCovers)
|
||||
}
|
||||
start := offset
|
||||
if start > len(randomCovers) {
|
||||
start = len(randomCovers)
|
||||
}
|
||||
|
||||
all := make([]Cover, 0, limit*2)
|
||||
for _, c := range randomCovers[start:end] {
|
||||
var coverUrl string
|
||||
if c.Prefix == "imaginary" {
|
||||
coverUrl = fmt.Sprintf("%s/smartcrop?width=300&height=370&file=%s", imaginaryURL, url.QueryEscape(c.Path))
|
||||
} else {
|
||||
coverUrl = "/static/" + c.Path
|
||||
}
|
||||
all = append(all, Cover{Comic: c.Comic, Url: coverUrl})
|
||||
}
|
||||
|
||||
covers := all
|
||||
var preload []Cover
|
||||
if len(all) > limit {
|
||||
covers = all[:limit]
|
||||
preload = all[limit:]
|
||||
}
|
||||
|
||||
var nextChapter int
|
||||
if len(preload) > 0 {
|
||||
nextChapter = chapter + 1
|
||||
} else {
|
||||
nextChapter = chapter
|
||||
}
|
||||
|
||||
data := struct {
|
||||
Covers []Cover
|
||||
Preload []Cover
|
||||
Chapter int
|
||||
Prefix string
|
||||
}{covers, preload, nextChapter, "/random"}
|
||||
|
||||
tmpl.ExecuteTemplate(w, "index.html", data)
|
||||
}
|
||||
|
||||
func handleGallery(w http.ResponseWriter, r *http.Request) {
|
||||
comic := strings.TrimPrefix(r.URL.Path, "/gallery/")
|
||||
if comic == "" {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
query := `
|
||||
select comic.title, comic.directory || '/' || page.filename, page.prefix
|
||||
from page
|
||||
join comic on page.id_comic = comic.id_comic
|
||||
where comic.id_comic = ?
|
||||
order by page.number
|
||||
`
|
||||
rows, err := db.Query(query, comic)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
type Page struct {
|
||||
Path string
|
||||
Prefix string
|
||||
}
|
||||
|
||||
var title string
|
||||
var pages []Page
|
||||
for rows.Next() {
|
||||
var path, prefix string
|
||||
if err := rows.Scan(&title, &path, &prefix); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
pages = append(pages, Page{Path: path, Prefix: prefix})
|
||||
}
|
||||
|
||||
if len(pages) == 0 {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
data := struct {
|
||||
Title string
|
||||
Pages []Page
|
||||
ImaginaryURL string
|
||||
}{title, pages, imaginaryURL}
|
||||
|
||||
tmpl.ExecuteTemplate(w, "gallery.html", data)
|
||||
}
|
||||
|
||||
func handleIndex(w http.ResponseWriter, r *http.Request) {
|
||||
chapter := 0
|
||||
path := strings.TrimPrefix(r.URL.Path, "/")
|
||||
if path != "" {
|
||||
var err error
|
||||
chapter, err = strconv.Atoi(path)
|
||||
if err != nil {
|
||||
http.NotFound(w, r)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
const limit = 18
|
||||
offset := chapter * limit
|
||||
|
||||
query := `
|
||||
select page.id_comic, comic.directory || '/' || page.filename path, page.prefix
|
||||
from page
|
||||
join comic on comic.id_comic = page.id_comic
|
||||
where page.number = 0
|
||||
order by comic.id_comic desc
|
||||
limit ? offset ?
|
||||
`
|
||||
|
||||
rows, err := db.Query(query, limit*2, offset)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
type Cover struct {
|
||||
Comic int
|
||||
Url string
|
||||
}
|
||||
|
||||
all := make([]Cover, 0, limit*2)
|
||||
for rows.Next() {
|
||||
var comic int
|
||||
var path, prefix string
|
||||
if err := rows.Scan(&comic, &path, &prefix); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
var coverUrl string
|
||||
if prefix == "imaginary" {
|
||||
coverUrl = fmt.Sprintf("%s/smartcrop?width=300&height=370&file=%s", imaginaryURL, url.QueryEscape(path))
|
||||
} else {
|
||||
coverUrl = "/static/" + path
|
||||
}
|
||||
all = append(all, Cover{Comic: comic, Url: coverUrl})
|
||||
}
|
||||
|
||||
covers := all
|
||||
var preload []Cover
|
||||
if len(all) > limit {
|
||||
covers = all[:limit]
|
||||
preload = all[limit:]
|
||||
}
|
||||
|
||||
var nextChapter int
|
||||
if len(preload) > 0 {
|
||||
nextChapter = chapter + 1
|
||||
} else {
|
||||
nextChapter = chapter
|
||||
}
|
||||
data := struct {
|
||||
Covers []Cover
|
||||
Preload []Cover
|
||||
Chapter int
|
||||
Prefix string
|
||||
}{covers, preload, nextChapter, ""}
|
||||
|
||||
tmpl.ExecuteTemplate(w, "index.html", data)
|
||||
}
|
||||
|
||||
Loading…
Add table
Add a link
Reference in a new issue