a
This commit is contained in:
parent
4af3988df9
commit
b4bbe2a7c6
2 changed files with 40 additions and 25 deletions
52
main.go
52
main.go
|
|
@ -10,12 +10,28 @@ import (
|
|||
"math/rand"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
)
|
||||
|
||||
var imageExtensions = map[string]bool{
|
||||
".png": true,
|
||||
".jpg": true,
|
||||
".jpeg": true,
|
||||
".gif": true,
|
||||
".webp": true,
|
||||
".bmp": true,
|
||||
".avif": true,
|
||||
}
|
||||
|
||||
func isImageFile(path string) bool {
|
||||
ext := strings.ToLower(filepath.Ext(path))
|
||||
return imageExtensions[ext]
|
||||
}
|
||||
|
||||
//go:embed templates/*
|
||||
var templatesFS embed.FS
|
||||
|
||||
|
|
@ -23,9 +39,8 @@ var templatesFS embed.FS
|
|||
var assetsFS embed.FS
|
||||
|
||||
type CoverData struct {
|
||||
Comic int
|
||||
Path string
|
||||
Prefix string
|
||||
Comic int
|
||||
Path string
|
||||
}
|
||||
|
||||
var (
|
||||
|
|
@ -84,7 +99,7 @@ func handleRandom(w http.ResponseWriter, r *http.Request) {
|
|||
// If accessing /random without chapter, shuffle and redirect to /random/0
|
||||
if path == "" {
|
||||
query := `
|
||||
select page.id_comic, comic.directory || '/' || page.filename path, page.prefix
|
||||
select page.id_comic, comic.directory || '/' || page.filename path
|
||||
from page
|
||||
join comic on comic.id_comic = page.id_comic
|
||||
where page.number = 0
|
||||
|
|
@ -99,7 +114,7 @@ func handleRandom(w http.ResponseWriter, r *http.Request) {
|
|||
randomCovers = nil
|
||||
for rows.Next() {
|
||||
var c CoverData
|
||||
if err := rows.Scan(&c.Comic, &c.Path, &c.Prefix); err != nil {
|
||||
if err := rows.Scan(&c.Comic, &c.Path); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
|
@ -140,7 +155,7 @@ func handleRandom(w http.ResponseWriter, r *http.Request) {
|
|||
all := make([]Cover, 0, limit*2)
|
||||
for _, c := range randomCovers[start:end] {
|
||||
var coverUrl string
|
||||
if c.Prefix == "imaginary" {
|
||||
if isImageFile(c.Path) {
|
||||
coverUrl = fmt.Sprintf("%s/smartcrop?width=300&height=370&file=%s", imaginaryURL, url.QueryEscape(c.Path))
|
||||
} else {
|
||||
coverUrl = "/static/" + c.Path
|
||||
|
|
@ -180,7 +195,7 @@ func handleGallery(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
query := `
|
||||
select comic.title, comic.directory || '/' || page.filename, page.prefix
|
||||
select comic.title, comic.directory || '/' || page.filename
|
||||
from page
|
||||
join comic on page.id_comic = comic.id_comic
|
||||
where comic.id_comic = ?
|
||||
|
|
@ -193,20 +208,15 @@ func handleGallery(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
defer rows.Close()
|
||||
|
||||
type Page struct {
|
||||
Path string
|
||||
Prefix string
|
||||
}
|
||||
|
||||
var title string
|
||||
var pages []Page
|
||||
var pages []string
|
||||
for rows.Next() {
|
||||
var path, prefix string
|
||||
if err := rows.Scan(&title, &path, &prefix); err != nil {
|
||||
var path string
|
||||
if err := rows.Scan(&title, &path); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
pages = append(pages, Page{Path: path, Prefix: prefix})
|
||||
pages = append(pages, path)
|
||||
}
|
||||
|
||||
if len(pages) == 0 {
|
||||
|
|
@ -216,7 +226,7 @@ func handleGallery(w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
data := struct {
|
||||
Title string
|
||||
Pages []Page
|
||||
Pages []string
|
||||
ImaginaryURL string
|
||||
}{title, pages, imaginaryURL}
|
||||
|
||||
|
|
@ -239,7 +249,7 @@ func handleIndex(w http.ResponseWriter, r *http.Request) {
|
|||
offset := chapter * limit
|
||||
|
||||
query := `
|
||||
select page.id_comic, comic.directory || '/' || page.filename path, page.prefix
|
||||
select page.id_comic, comic.directory || '/' || page.filename path
|
||||
from page
|
||||
join comic on comic.id_comic = page.id_comic
|
||||
where page.number = 0
|
||||
|
|
@ -262,13 +272,13 @@ func handleIndex(w http.ResponseWriter, r *http.Request) {
|
|||
all := make([]Cover, 0, limit*2)
|
||||
for rows.Next() {
|
||||
var comic int
|
||||
var path, prefix string
|
||||
if err := rows.Scan(&comic, &path, &prefix); err != nil {
|
||||
var path string
|
||||
if err := rows.Scan(&comic, &path); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
var coverUrl string
|
||||
if prefix == "imaginary" {
|
||||
if isImageFile(path) {
|
||||
coverUrl = fmt.Sprintf("%s/smartcrop?width=300&height=370&file=%s", imaginaryURL, url.QueryEscape(path))
|
||||
} else {
|
||||
coverUrl = "/static/" + path
|
||||
|
|
|
|||
|
|
@ -31,15 +31,20 @@
|
|||
width: 1080,
|
||||
height: 1920,
|
||||
imaginary: '{{.ImaginaryURL}}',
|
||||
pages: [{{range $i, $p := .Pages}}{{if $i}},{{end}}{path: '{{$p.Path}}', prefix: '{{$p.Prefix}}'}{{end}}],
|
||||
pages: [{{range $i, $p := .Pages}}{{if $i}},{{end}}'{{$p}}'{{end}}],
|
||||
urls: [],
|
||||
prefetched: new Set(),
|
||||
imageExts: ['.png', '.jpg', '.jpeg', '.gif', '.webp', '.bmp', '.avif'],
|
||||
isImage(path) {
|
||||
const ext = path.substring(path.lastIndexOf('.')).toLowerCase();
|
||||
return this.imageExts.includes(ext);
|
||||
},
|
||||
buildUrls() {
|
||||
this.urls = this.pages.map(p => {
|
||||
if (p.prefix === 'imaginary') {
|
||||
return this.imaginary + '/fit?width=' + this.width + '&height=' + this.height + '&file=' + encodeURIComponent(p.path);
|
||||
if (this.isImage(p)) {
|
||||
return this.imaginary + '/fit?width=' + this.width + '&height=' + this.height + '&file=' + encodeURIComponent(p);
|
||||
}
|
||||
return '/static/' + p.path;
|
||||
return '/static/' + p;
|
||||
});
|
||||
this.prefetched.clear();
|
||||
},
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue