diff --git a/assets/js/controllers/upload.js b/assets/js/controllers/upload.js new file mode 100644 index 0000000..a810d41 --- /dev/null +++ b/assets/js/controllers/upload.js @@ -0,0 +1,97 @@ +import { Controller } from "@hotwired/stimulus" + +export default class UploadController extends Controller { + static values = { + siteId: Number, + }; + + upload(ev) { + ev.preventDefault(); + + this._promptForUpload((files) => { + this._doUploads(files); + }) + } + + _promptForUpload(onAccept) { + const input = document.createElement('input'); + input.type = 'file'; + input.accept = 'image/*'; + input.multiple = true; + + input.onchange = (e) => { + const files = Array.from(e.target.files); + if (files.length > 0) { + onAccept(files); + } + }; + + input.click(); + } + + async _doUploads(files) { + for (let file of files) { + await this._doUpload(file); + } + } + + async _doUpload(file) { + console.log(`Uploading ${file.name}: new pending`); + + // Prepare upload of file supplying size and mime-type + let newPending = await (await fetch(`/sites/${this.siteIdValue}/uploads/pending`, { + method: 'POST', + headers: { + 'Accept': 'application/json', + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + size: file.size, + mime: file.type, + name: file.name, + }) + })).json(); + + // Upload file in 2 MB blocks + let offset = 0; + let chunkSize = 2 * 1024 * 1024; + while (offset < file.size) { + let chunk = file.slice(offset, offset + chunkSize); + + console.log(`Uploading ${file.name}: uploading part`); + await fetch(`/sites/${this.siteIdValue}/uploads/pending/${newPending.guid}`, { + method: 'POST', + headers: { + 'Content-Type': 'application/octet-stream' + }, + body: chunk + }); + + offset += chunkSize; + } + + // Calculate SHA256 hash + const hash = await this._calculateSHA256(file); + + // Finalise upload + console.log(`Uploading ${file.name}: finalise`); + await fetch(`/sites/${this.siteIdValue}/uploads/pending/${newPending.guid}/finalize`, { + method: 'POST', + headers: { + 'Accept': 'application/json', + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + hash: hash + }) + }); + } + + async _calculateSHA256(file) { + const arrayBuffer = await file.arrayBuffer(); + const hashBuffer = await crypto.subtle.digest('SHA-256', arrayBuffer); + const hashArray = Array.from(new Uint8Array(hashBuffer)); + const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join(''); + return hashHex; + } +} \ No newline at end of file diff --git a/assets/js/main.js b/assets/js/main.js index 6bca555..3e8c345 100644 --- a/assets/js/main.js +++ b/assets/js/main.js @@ -5,10 +5,12 @@ import PostlistController from "./controllers/postlist"; import PosteditController from "./controllers/postedit"; import LogoutController from "./controllers/logout"; import FirstRunController from "./controllers/firstrun"; +import UploadController from "./controllers/upload"; window.Stimulus = Application.start() Stimulus.register("toast", ToastController); Stimulus.register("postlist", PostlistController); Stimulus.register("postedit", PosteditController); Stimulus.register("logout", LogoutController); -Stimulus.register("first-run", FirstRunController); \ No newline at end of file +Stimulus.register("first-run", FirstRunController); +Stimulus.register("upload", UploadController); \ No newline at end of file diff --git a/cmds/server.go b/cmds/server.go index ccf11f7..e12d265 100644 --- a/cmds/server.go +++ b/cmds/server.go @@ -108,6 +108,7 @@ Starting weiro without any arguments will start the server. ih := handlers.IndexHandler{SiteService: svcs.Sites} lh := handlers.LoginHandler{Config: cfg, AuthService: svcs.Auth} ph := handlers.PostsHandler{PostService: svcs.Posts} + uh := handlers.UploadsHandler{UploadsService: svcs.Uploads} app.Get("/login", lh.Login) app.Post("/login", lh.DoLogin) @@ -122,6 +123,11 @@ Starting weiro without any arguments will start the server. siteGroup.Patch("/posts/:postID", ph.Patch) siteGroup.Delete("/posts/:postID", ph.Delete) + siteGroup.Get("/uploads", uh.Index) + siteGroup.Post("/uploads/pending", uh.New) + siteGroup.Post("/uploads/pending/:guid", uh.UploadPart) + siteGroup.Post("/uploads/pending/:guid/finalize", uh.UploadComplete) + app.Get("/", middleware.OptionalUser(svcs.Auth), ih.Index) app.Get("/first-run", ih.FirstRun) app.Post("/first-run", ih.FirstRunSubmit) diff --git a/config/config.go b/config/config.go index 38cde39..56585a7 100644 --- a/config/config.go +++ b/config/config.go @@ -9,6 +9,7 @@ import ( type Config struct { DataDir string `env:"DATA_DIR"` + ScratchDir string `env:"SCRATCH_DIR"` SiteDomain string `env:"SITE_DOMAIN"` LoginLocked bool `env:"LOGIN_LOCKED,default=false"` Env string `env:"ENV,default=prod"` diff --git a/handlers/posts.go b/handlers/posts.go index 041ec9d..1bab15d 100644 --- a/handlers/posts.go +++ b/handlers/posts.go @@ -2,7 +2,6 @@ package handlers import ( "fmt" - "log" "strconv" "github.com/gofiber/fiber/v3" @@ -92,8 +91,6 @@ func (ph PostsHandler) Update(c fiber.Ctx) error { } func (ph PostsHandler) Patch(c fiber.Ctx) error { - log.Println("PATCH") - postIDStr := c.Params("postID") if postIDStr == "" { return fiber.ErrBadRequest @@ -110,8 +107,6 @@ func (ph PostsHandler) Patch(c fiber.Ctx) error { return err } - log.Println("Request") - switch req.Action { case "restore": if err := ph.PostService.RestorePost(c.Context(), postID); err != nil { diff --git a/handlers/uploads.go b/handlers/uploads.go new file mode 100644 index 0000000..3d6e891 --- /dev/null +++ b/handlers/uploads.go @@ -0,0 +1,62 @@ +package handlers + +import ( + "github.com/gofiber/fiber/v3" + "lmika.dev/lmika/weiro/services/uploads" +) + +type UploadsHandler struct { + UploadsService *uploads.Service +} + +func (uh UploadsHandler) Index(c fiber.Ctx) error { + return c.Render("uploads/index", nil) +} + +func (uh UploadsHandler) New(c fiber.Ctx) error { + var req uploads.NewPendingRequest + + if err := c.Bind().Body(&req); err != nil { + return err + } + + res, err := uh.UploadsService.NewPending(c.Context(), req) + if err != nil { + return err + } + + return c.JSON(res) +} + +func (uh UploadsHandler) UploadPart(c fiber.Ctx) error { + guid := c.Params("guid") + if guid == "" { + return fiber.ErrBadRequest + } + + if err := uh.UploadsService.WriteToPending(c.Context(), guid, c.Body()); err != nil { + return err + } + + return c.Status(fiber.StatusAccepted).JSON(fiber.Map{}) +} + +func (uh UploadsHandler) UploadComplete(c fiber.Ctx) error { + guid := c.Params("guid") + if guid == "" { + return fiber.ErrBadRequest + } + + var res struct { + Hash string `json:"hash"` + } + if err := c.Bind().Body(&res); err != nil { + return err + } + + if err := uh.UploadsService.FinalizePending(c.Context(), guid, res.Hash); err != nil { + return err + } + + return c.Status(fiber.StatusAccepted).JSON(fiber.Map{}) +} diff --git a/models/ctx.go b/models/ctx.go index 8eca001..60f93fe 100644 --- a/models/ctx.go +++ b/models/ctx.go @@ -6,7 +6,7 @@ type userKeyType struct{} type siteKeyType struct{} var userKey = userKeyType{} -var siteKey = userKeyType{} +var siteKey = siteKeyType{} func WithUser(ctx context.Context, user User) context.Context { return context.WithValue(ctx, userKey, user) diff --git a/models/uploads.go b/models/uploads.go new file mode 100644 index 0000000..0e818c5 --- /dev/null +++ b/models/uploads.go @@ -0,0 +1,23 @@ +package models + +import "time" + +type Upload struct { + ID int64 `json:"id"` + SiteID int64 `json:"site_id"` + GUID string `json:"guid"` + MIMEType string `json:"mime_type"` + Filename string `json:"filename"` + CreatedAt int64 `json:"created_at"` + Alt string `json:"alt"` +} + +type PendingUpload struct { + GUID string `json:"guid"` + SiteID int64 `json:"site_id"` + UserID int64 `json:"user_id"` + FileSize int64 `json:"file_size"` + Filename string `json:"filename"` + MIMEType string `json:"mime_type"` + UploadStarted time.Time `json:"upload_started"` +} diff --git a/providers/db/gen/sqlgen/models.go b/providers/db/gen/sqlgen/models.go index 5317ef2..6aed257 100644 --- a/providers/db/gen/sqlgen/models.go +++ b/providers/db/gen/sqlgen/models.go @@ -4,6 +4,17 @@ package sqlgen +type PendingUpload struct { + ID interface{} + SiteID int64 + Guid string + UserID int64 + Filename string + FileSize int64 + MimeType string + UploadStartedAt int64 +} + type Post struct { ID int64 SiteID int64 @@ -38,6 +49,16 @@ type Site struct { CreatedAt int64 } +type Upload struct { + ID interface{} + SiteID int64 + Guid string + MimeType string + Filename string + Alt string + CreatedAt int64 +} + type User struct { ID int64 Username string diff --git a/providers/db/gen/sqlgen/pending_uploads.sql.go b/providers/db/gen/sqlgen/pending_uploads.sql.go new file mode 100644 index 0000000..c2ca66b --- /dev/null +++ b/providers/db/gen/sqlgen/pending_uploads.sql.go @@ -0,0 +1,68 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 +// source: pending_uploads.sql + +package sqlgen + +import ( + "context" +) + +const insertPendingUpload = `-- name: InsertPendingUpload :one +INSERT INTO pending_uploads ( + site_id, + guid, + user_id, + filename, + file_size, + mime_type, + upload_started_at +) VALUES (?, ?, ?, ?, ?, ?, ?) +RETURNING id +` + +type InsertPendingUploadParams struct { + SiteID int64 + Guid string + UserID int64 + Filename string + FileSize int64 + MimeType string + UploadStartedAt int64 +} + +func (q *Queries) InsertPendingUpload(ctx context.Context, arg InsertPendingUploadParams) (interface{}, error) { + row := q.db.QueryRowContext(ctx, insertPendingUpload, + arg.SiteID, + arg.Guid, + arg.UserID, + arg.Filename, + arg.FileSize, + arg.MimeType, + arg.UploadStartedAt, + ) + var id interface{} + err := row.Scan(&id) + return id, err +} + +const selectPendingUploadByGUID = `-- name: SelectPendingUploadByGUID :one +SELECT id, site_id, guid, user_id, filename, file_size, mime_type, upload_started_at FROM pending_uploads WHERE guid = ? LIMIT 1 +` + +func (q *Queries) SelectPendingUploadByGUID(ctx context.Context, guid string) (PendingUpload, error) { + row := q.db.QueryRowContext(ctx, selectPendingUploadByGUID, guid) + var i PendingUpload + err := row.Scan( + &i.ID, + &i.SiteID, + &i.Guid, + &i.UserID, + &i.Filename, + &i.FileSize, + &i.MimeType, + &i.UploadStartedAt, + ) + return i, err +} diff --git a/providers/db/gen/sqlgen/uploads.sql.go b/providers/db/gen/sqlgen/uploads.sql.go new file mode 100644 index 0000000..6891422 --- /dev/null +++ b/providers/db/gen/sqlgen/uploads.sql.go @@ -0,0 +1,120 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.30.0 +// source: uploads.sql + +package sqlgen + +import ( + "context" +) + +const deleteUpload = `-- name: DeleteUpload :exec +DELETE FROM uploads WHERE id = ? +` + +func (q *Queries) DeleteUpload(ctx context.Context, id interface{}) error { + _, err := q.db.ExecContext(ctx, deleteUpload, id) + return err +} + +const insertUpload = `-- name: InsertUpload :exec +INSERT INTO uploads ( + site_id, + guid, + mime_type, + filename, + created_at, + alt +) VALUES (?, ?, ?, ?, ?, ?) +RETURNING id +` + +type InsertUploadParams struct { + SiteID int64 + Guid string + MimeType string + Filename string + CreatedAt int64 + Alt string +} + +func (q *Queries) InsertUpload(ctx context.Context, arg InsertUploadParams) error { + _, err := q.db.ExecContext(ctx, insertUpload, + arg.SiteID, + arg.Guid, + arg.MimeType, + arg.Filename, + arg.CreatedAt, + arg.Alt, + ) + return err +} + +const selectUploadByID = `-- name: SelectUploadByID :one +SELECT id, site_id, guid, mime_type, filename, alt, created_at FROM uploads WHERE id = ? +` + +func (q *Queries) SelectUploadByID(ctx context.Context, id interface{}) (Upload, error) { + row := q.db.QueryRowContext(ctx, selectUploadByID, id) + var i Upload + err := row.Scan( + &i.ID, + &i.SiteID, + &i.Guid, + &i.MimeType, + &i.Filename, + &i.Alt, + &i.CreatedAt, + ) + return i, err +} + +const selectUploadsOfSite = `-- name: SelectUploadsOfSite :many +SELECT id, site_id, guid, mime_type, filename, alt, created_at FROM uploads WHERE site_id = ? ORDER BY created_at DESC +` + +func (q *Queries) SelectUploadsOfSite(ctx context.Context, siteID int64) ([]Upload, error) { + rows, err := q.db.QueryContext(ctx, selectUploadsOfSite, siteID) + if err != nil { + return nil, err + } + defer rows.Close() + var items []Upload + for rows.Next() { + var i Upload + if err := rows.Scan( + &i.ID, + &i.SiteID, + &i.Guid, + &i.MimeType, + &i.Filename, + &i.Alt, + &i.CreatedAt, + ); err != nil { + return nil, err + } + items = append(items, i) + } + if err := rows.Close(); err != nil { + return nil, err + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} + +const updateUpload = `-- name: UpdateUpload :exec +UPDATE uploads SET alt = ? WHERE id = ? +` + +type UpdateUploadParams struct { + Alt string + ID interface{} +} + +func (q *Queries) UpdateUpload(ctx context.Context, arg UpdateUploadParams) error { + _, err := q.db.ExecContext(ctx, updateUpload, arg.Alt, arg.ID) + return err +} diff --git a/providers/db/uploads.go b/providers/db/uploads.go new file mode 100644 index 0000000..f65b794 --- /dev/null +++ b/providers/db/uploads.go @@ -0,0 +1,107 @@ +package db + +import ( + "context" + "time" + + "lmika.dev/lmika/weiro/models" + "lmika.dev/lmika/weiro/providers/db/gen/sqlgen" +) + +func (db *Provider) SelectUploadByID(ctx context.Context, id int64) (models.Upload, error) { + row, err := db.queries.SelectUploadByID(ctx, id) + if err != nil { + return models.Upload{}, err + } + + return dbUploadToUpload(row), nil +} + +func (db *Provider) SelectUploadsOfSite(ctx context.Context, siteID int64) ([]models.Upload, error) { + rows, err := db.queries.SelectUploadsOfSite(ctx, siteID) + if err != nil { + return nil, err + } + + uploads := make([]models.Upload, len(rows)) + for i, row := range rows { + uploads[i] = dbUploadToUpload(row) + } + return uploads, nil +} + +func (db *Provider) SaveUpload(ctx context.Context, upload *models.Upload) error { + if upload.ID == 0 { + if err := db.queries.InsertUpload(ctx, sqlgen.InsertUploadParams{ + SiteID: upload.SiteID, + Guid: upload.GUID, + MimeType: upload.MIMEType, + Filename: upload.Filename, + CreatedAt: upload.CreatedAt, + Alt: upload.Alt, + }); err != nil { + return err + } + return nil + } + + return db.queries.UpdateUpload(ctx, sqlgen.UpdateUploadParams{ + Alt: upload.Alt, + ID: upload.ID, + }) +} + +func (db *Provider) DeleteUpload(ctx context.Context, id int64) error { + return db.queries.DeleteUpload(ctx, id) +} + +func (db *Provider) SelectPendingUploadByGUID(ctx context.Context, guid string) (models.PendingUpload, error) { + row, err := db.queries.SelectPendingUploadByGUID(ctx, guid) + if err != nil { + return models.PendingUpload{}, err + } + + return dbPendingUploadToPendingUpload(row), nil +} + +func (db *Provider) SavePendingUpload(ctx context.Context, pending *models.PendingUpload) error { + _, err := db.queries.InsertPendingUpload(ctx, sqlgen.InsertPendingUploadParams{ + SiteID: pending.SiteID, + Guid: pending.GUID, + UserID: pending.UserID, + Filename: pending.Filename, + FileSize: pending.FileSize, + MimeType: pending.MIMEType, + UploadStartedAt: pending.UploadStarted.Unix(), + }) + return err +} + +func dbUploadToUpload(row sqlgen.Upload) models.Upload { + var id int64 + if idVal, ok := row.ID.(int64); ok { + id = idVal + } + + return models.Upload{ + ID: id, + SiteID: row.SiteID, + GUID: row.Guid, + MIMEType: row.MimeType, + Filename: row.Filename, + Alt: row.Alt, + CreatedAt: row.CreatedAt, + } +} + +func dbPendingUploadToPendingUpload(row sqlgen.PendingUpload) models.PendingUpload { + return models.PendingUpload{ + GUID: row.Guid, + SiteID: row.SiteID, + UserID: row.UserID, + FileSize: row.FileSize, + Filename: row.Filename, + MIMEType: row.MimeType, + UploadStarted: time.Unix(row.UploadStartedAt, 0), + } +} diff --git a/services/posts/delete.go b/services/posts/delete.go index df220ef..c7c1b17 100644 --- a/services/posts/delete.go +++ b/services/posts/delete.go @@ -57,6 +57,13 @@ func (s *Service) fetchPostAndSite(ctx context.Context, pid int64) (*models.Post return nil, models.Site{}, models.SiteRequiredError } + user, ok := models.GetUser(ctx) + if !ok { + return nil, models.Site{}, models.UserRequiredError + } else if site.OwnerID != user.ID { + return nil, models.Site{}, models.PermissionError + } + post, err := s.db.SelectPost(ctx, pid) if err != nil { return nil, models.Site{}, err diff --git a/services/services.go b/services/services.go index edf52cd..7bfd120 100644 --- a/services/services.go +++ b/services/services.go @@ -9,6 +9,7 @@ import ( "lmika.dev/lmika/weiro/services/posts" "lmika.dev/lmika/weiro/services/publisher" "lmika.dev/lmika/weiro/services/sites" + "lmika.dev/lmika/weiro/services/uploads" ) type Services struct { @@ -18,6 +19,7 @@ type Services struct { PublisherQueue *publisher.Queue Posts *posts.Service Sites *sites.Service + Uploads *uploads.Service } func New(cfg config.Config) (*Services, error) { @@ -31,6 +33,7 @@ func New(cfg config.Config) (*Services, error) { publisherQueue := publisher.NewQueue(publisherSvc) postService := posts.New(dbp, publisherQueue) siteService := sites.New(dbp) + uploadService := uploads.New(dbp, filepath.Join(cfg.ScratchDir, "uploads", "pending")) return &Services{ DB: dbp, @@ -39,6 +42,7 @@ func New(cfg config.Config) (*Services, error) { PublisherQueue: publisherQueue, Posts: postService, Sites: siteService, + Uploads: uploadService, }, nil } diff --git a/services/uploads/pending.go b/services/uploads/pending.go new file mode 100644 index 0000000..a6dd5d3 --- /dev/null +++ b/services/uploads/pending.go @@ -0,0 +1,124 @@ +package uploads + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/hex" + "io" + "os" + "path/filepath" + "time" + + "emperror.dev/errors" + "lmika.dev/lmika/weiro/models" +) + +type NewPendingRequest struct { + FileSize int64 `json:"size"` + Filename string `json:"name"` + MIMEType string `json:"type"` +} + +func (s *Service) NewPending(ctx context.Context, req NewPendingRequest) (models.PendingUpload, error) { + site, user, err := s.fetchSiteAndUser(ctx) + if err != nil { + return models.PendingUpload{}, err + } + + pending := models.PendingUpload{ + GUID: models.NewNanoID(), + SiteID: site.ID, + UserID: user.ID, + FileSize: req.FileSize, + Filename: req.Filename, + MIMEType: req.MIMEType, + UploadStarted: time.Now(), + } + if err := s.db.SavePendingUpload(ctx, &pending); err != nil { + return models.PendingUpload{}, err + } + + if err := os.MkdirAll(s.pendingDir, 0755); err != nil { + return models.PendingUpload{}, err + } + + pendingDataFile, err := os.Create(filepath.Join(s.pendingDir, pending.GUID+".upload")) + if err != nil { + return models.PendingUpload{}, err + } + return pending, pendingDataFile.Close() +} + +func (s *Service) WriteToPending(ctx context.Context, pendingGUID string, data []byte) error { + site, user, err := s.fetchSiteAndUser(ctx) + if err != nil { + return err + } + + pu, err := s.db.SelectPendingUploadByGUID(ctx, pendingGUID) + if err != nil { + return err + } else if pu.SiteID != site.ID || pu.UserID != user.ID { + return errors.New("invalid pending upload") + } + + pendingDataFilename := filepath.Join(s.pendingDir, pu.GUID+".upload") + if _, err := os.Stat(pendingDataFilename); err != nil { + return err + } + + pendingDataFile, err := os.OpenFile(pendingDataFilename, os.O_WRONLY|os.O_APPEND, 0644) + if err != nil { + return err + } + defer pendingDataFile.Close() + + pendingDataFile.Seek(0, io.SeekEnd) + if _, err := pendingDataFile.Write(data); err != nil { + return err + } + + return nil +} + +func (s *Service) FinalizePending(ctx context.Context, pendingGUID string, expectedHash string) error { + site, user, err := s.fetchSiteAndUser(ctx) + if err != nil { + return err + } + + pu, err := s.db.SelectPendingUploadByGUID(ctx, pendingGUID) + if err != nil { + return err + } else if pu.SiteID != site.ID || pu.UserID != user.ID { + return errors.New("invalid pending upload") + } + + expectedHashBytes, err := hex.DecodeString(expectedHash) + if err != nil { + return err + } + + pendingDataFilename := filepath.Join(s.pendingDir, pu.GUID+".upload") + if _, err := os.Stat(pendingDataFilename); err != nil { + return err + } + + pendingDataFile, err := os.Open(pendingDataFilename) + if err != nil { + return err + } + defer pendingDataFile.Close() + + shaSum := sha256.New() + if _, err := io.Copy(shaSum, pendingDataFile); err != nil { + return err + } + + if !bytes.Equal(shaSum.Sum(nil), expectedHashBytes) { + return errors.New("hash mismatch") + } + + return nil +} diff --git a/services/uploads/services.go b/services/uploads/services.go new file mode 100644 index 0000000..b8a264f --- /dev/null +++ b/services/uploads/services.go @@ -0,0 +1,38 @@ +package uploads + +import ( + "context" + + "lmika.dev/lmika/weiro/models" + "lmika.dev/lmika/weiro/providers/db" +) + +type Service struct { + db *db.Provider + pendingDir string +} + +func New(db *db.Provider, pendingDir string) *Service { + return &Service{ + db: db, + pendingDir: pendingDir, + } +} + +func (s *Service) fetchSiteAndUser(ctx context.Context) (models.Site, models.User, error) { + user, ok := models.GetUser(ctx) + if !ok { + return models.Site{}, models.User{}, models.UserRequiredError + } + + site, ok := models.GetSite(ctx) + if !ok { + return models.Site{}, models.User{}, models.SiteRequiredError + } + + if site.OwnerID != user.ID { + return models.Site{}, models.User{}, models.PermissionError + } + + return site, user, nil +} diff --git a/sql/queries/pending_uploads.sql b/sql/queries/pending_uploads.sql new file mode 100644 index 0000000..423b9fb --- /dev/null +++ b/sql/queries/pending_uploads.sql @@ -0,0 +1,14 @@ +-- name: SelectPendingUploadByGUID :one +SELECT * FROM pending_uploads WHERE guid = ? LIMIT 1; + +-- name: InsertPendingUpload :one +INSERT INTO pending_uploads ( + site_id, + guid, + user_id, + filename, + file_size, + mime_type, + upload_started_at +) VALUES (?, ?, ?, ?, ?, ?, ?) +RETURNING id; \ No newline at end of file diff --git a/sql/queries/uploads.sql b/sql/queries/uploads.sql new file mode 100644 index 0000000..2c37525 --- /dev/null +++ b/sql/queries/uploads.sql @@ -0,0 +1,22 @@ +-- name: SelectUploadsOfSite :many +SELECT * FROM uploads WHERE site_id = ? ORDER BY created_at DESC; + +-- name: SelectUploadByID :one +SELECT * FROM uploads WHERE id = ?; + +-- name: InsertUpload :exec +INSERT INTO uploads ( + site_id, + guid, + mime_type, + filename, + created_at, + alt +) VALUES (?, ?, ?, ?, ?, ?) +RETURNING id; + +-- name: UpdateUpload :exec +UPDATE uploads SET alt = ? WHERE id = ?; + +-- name: DeleteUpload :exec +DELETE FROM uploads WHERE id = ?; \ No newline at end of file diff --git a/sql/schema/02_upload.up.sql b/sql/schema/02_upload.up.sql new file mode 100644 index 0000000..8dfec03 --- /dev/null +++ b/sql/schema/02_upload.up.sql @@ -0,0 +1,27 @@ +CREATE TABLE uploads ( + id SERIAL PRIMARY KEY, + site_id INT NOT NULL, + guid TEXT NOT NULL, + mime_type TEXT NOT NULL, + filename TEXT NOT NULL, + alt TEXT NOT NULL, + created_at INT NOT NULL, + + FOREIGN KEY (site_id) REFERENCES sites (id) ON DELETE CASCADE +); +CREATE INDEX idx_uploads_site ON uploads (site_id); +CREATE UNIQUE INDEX idx_uploads_guid ON sites (guid); + +CREATE TABLE pending_uploads ( + id SERIAL PRIMARY KEY, + site_id INT NOT NULL, + guid TEXT NOT NULL, + user_id INT NOT NULL, + filename TEXT NOT NULL, + file_size INT NOT NULL, + mime_type TEXT NOT NULL, + upload_started_at INT NOT NULL, + FOREIGN KEY (site_id) REFERENCES sites (id) ON DELETE CASCADE, + FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE +); +CREATE UNIQUE INDEX idx_pending_uploads_guid ON pending_uploads (guid); \ No newline at end of file diff --git a/views/uploads/index.html b/views/uploads/index.html new file mode 100644 index 0000000..4bc9627 --- /dev/null +++ b/views/uploads/index.html @@ -0,0 +1,6 @@ +
+
+ +
+
\ No newline at end of file