Compare commits
No commits in common. "main" and "feature/uploads" have entirely different histories.
main
...
feature/up
|
|
@ -25,7 +25,6 @@ FROM alpine:latest
|
|||
|
||||
RUN apk --no-cache add ca-certificates
|
||||
RUN mkdir -p /data
|
||||
RUN mkdir -p /scratch
|
||||
|
||||
WORKDIR /root/
|
||||
|
||||
|
|
@ -35,7 +34,6 @@ COPY --from=builder /app/static ./static
|
|||
COPY --from=builder /app/views ./views
|
||||
|
||||
ENV DATA_DIR=/data
|
||||
ENV SCRATCH_DIR=/scratch
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
|
|
|
|||
|
|
@ -10,16 +10,21 @@ $container-max-widths: (
|
|||
|
||||
@import "bootstrap/scss/bootstrap.scss";
|
||||
|
||||
// Navbar
|
||||
// Local classes
|
||||
|
||||
.navbar-site-visit {
|
||||
display: inline-block;
|
||||
line-height: 2em;
|
||||
margin-bottom: 4px;
|
||||
margin-right: 10px;
|
||||
.post-form {
|
||||
display: grid;
|
||||
grid-template-rows: min-content auto min-content;
|
||||
position: absolute;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
}
|
||||
|
||||
// Post list
|
||||
.post-form textarea {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.postlist .post img {
|
||||
max-width: 300px;
|
||||
|
|
@ -27,54 +32,6 @@ $container-max-widths: (
|
|||
max-height: 300px;
|
||||
}
|
||||
|
||||
.postlist .post-date {
|
||||
font-size: 0.9rem;
|
||||
}
|
||||
|
||||
// Large editor
|
||||
//
|
||||
// Used for edit canvases which take up the entire window
|
||||
|
||||
.large-editor {
|
||||
height: 100vh;
|
||||
}
|
||||
|
||||
.large-editor main {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
// Post form
|
||||
|
||||
// Post edit page styling
|
||||
|
||||
.post-edit-page .post-form {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
min-height: 0;
|
||||
}
|
||||
|
||||
.post-edit-page .post-form .row {
|
||||
flex: 1;
|
||||
display: flex;
|
||||
min-height: 0;
|
||||
}
|
||||
|
||||
.post-edit-page .post-form .col-md-9 {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
.post-edit-page .post-form textarea {
|
||||
flex: 1;
|
||||
resize: vertical;
|
||||
min-height: 300px;
|
||||
}
|
||||
|
||||
|
||||
|
||||
.show-upload figure img {
|
||||
max-width: 100vw;
|
||||
height: auto;
|
||||
|
|
|
|||
|
|
@ -1,233 +0,0 @@
|
|||
import feather from "feather-icons/dist/feather.js";
|
||||
import Handlebars from "handlebars";
|
||||
import {Controller} from "@hotwired/stimulus";
|
||||
|
||||
Handlebars.registerHelper("submit_on", function (id, event) {
|
||||
return `data-action="${event}->edit-upload#updateProcessor" data-edit-upload-id-param="${id}"`
|
||||
});
|
||||
|
||||
const processorFrame = Handlebars.compile(`
|
||||
<div class="card mb-3">
|
||||
<div class="card-header d-flex justify-content-between align-items-center">
|
||||
<span>{{name}}</span>
|
||||
<a href="#" class="float-end"
|
||||
data-action="edit-upload#removeProcessor"
|
||||
data-edit-upload-id-param="{{id}}"
|
||||
><i data-feather="x" width="18" height="18"></i></a>
|
||||
</div>
|
||||
<div class="card-body">
|
||||
<form data-role="processor-params" data-params-id="{{id}}">{{{props}}}</form>
|
||||
</div>
|
||||
</div>
|
||||
`);
|
||||
|
||||
const processorUIs = {
|
||||
"shadow": {
|
||||
label: "Shadow",
|
||||
template: Handlebars.compile(`
|
||||
<div class="row mb-3 align-items-center">
|
||||
<label for="{{id}}_color" class="col-sm col-form-label">Colour</label>
|
||||
<div class="col-sm">
|
||||
<input name="color" class="form-control" id="{{id}}_color" type="color" value="{{props.color}}" {{{submit_on id 'change'}}}>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row mb-3">
|
||||
<label for="{{id}}_offset_y" class="col-sm col-form-label">Offset Y</label>
|
||||
<div class="col-sm">
|
||||
<input name="offset_y" class="form-control" id="{{id}}_{{props.color}}" type="number" value="{{props.offset_y}}" {{{submit_on id 'blur'}}}>
|
||||
</div>
|
||||
</div>
|
||||
`),
|
||||
},
|
||||
"resize": {
|
||||
label: "Resize",
|
||||
template: Handlebars.compile(`
|
||||
<div class="mb-3">
|
||||
<label for="{{id}}_width" class="form-label">Width</label>
|
||||
<input name="width" class="form-control" id="{{id}}_width">
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<label for="{{id}}_height" class="form-label">Height</label>
|
||||
<input name="width" class="form-control" id="{{id}}_width">
|
||||
</div>
|
||||
`),
|
||||
},
|
||||
};
|
||||
|
||||
export default class UploadEditController extends Controller {
|
||||
static targets = ['processList', 'preview'];
|
||||
static values = {
|
||||
uploadId: Number,
|
||||
siteId: Number,
|
||||
};
|
||||
|
||||
connect() {
|
||||
this._rebuildProcessList();
|
||||
this._createSession();
|
||||
}
|
||||
|
||||
async addProcessor(ev) {
|
||||
ev.preventDefault();
|
||||
await this._addProcessor({
|
||||
type: "shadow"
|
||||
});
|
||||
}
|
||||
|
||||
async removeProcessor(ev) {
|
||||
ev.preventDefault();
|
||||
let id = ev.params.id;
|
||||
await this._removeProcessor(id);
|
||||
}
|
||||
|
||||
async saveUpload(ev) {
|
||||
ev.preventDefault();
|
||||
await this._save("replace");
|
||||
}
|
||||
|
||||
async saveNewUpload(ev) {
|
||||
ev.preventDefault();
|
||||
await this._save("copy");
|
||||
}
|
||||
|
||||
async updateProcessor(ev) {
|
||||
ev.preventDefault();
|
||||
let id = ev.params.id;
|
||||
|
||||
let paramParentEl = ev.target.closest('[data-role="processor-params"]');
|
||||
let params = Object.fromEntries(new FormData(paramParentEl).entries());
|
||||
|
||||
await this._updateProcessor(id, params);
|
||||
}
|
||||
|
||||
_rebuildProcessList() {
|
||||
let el = this.processListTarget;
|
||||
|
||||
if ((!this._state) || (!this._state.session) || (!this._state.session.processors)) {
|
||||
return;
|
||||
}
|
||||
|
||||
el.innerHTML = "";
|
||||
for (let p of this._state.session.processors) {
|
||||
let ui = processorUIs[p.type];
|
||||
if (!ui) {
|
||||
continue;
|
||||
}
|
||||
let cardOuter = processorFrame({
|
||||
id: p.id,
|
||||
name: ui.label,
|
||||
props: ui.template(p),
|
||||
});
|
||||
el.innerHTML += cardOuter;
|
||||
}
|
||||
|
||||
feather.replace();
|
||||
}
|
||||
|
||||
async _createSession() {
|
||||
try {
|
||||
let resp = await fetch(`/sites/${this.siteIdValue}/imageedit/`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
"base_upload": this.uploadIdValue,
|
||||
})
|
||||
});
|
||||
|
||||
this._state = await resp.json();
|
||||
|
||||
this._rebuildProcessList();
|
||||
this.previewTarget.src = this._state.preview_url;
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
|
||||
async _addProcessor(processor) {
|
||||
try {
|
||||
let resp = await fetch(`/sites/${this.siteIdValue}/imageedit/${this._state.session.guid}/processors`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify(processor)
|
||||
});
|
||||
|
||||
this._state = await resp.json();
|
||||
|
||||
this._rebuildProcessList();
|
||||
this.previewTarget.src = this._state.preview_url;
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
|
||||
async _updateProcessor(processorID, params) {
|
||||
await this._doReturningState(async () => {
|
||||
return (await fetch(`/sites/${this.siteIdValue}/imageedit/${this._state.session.guid}`, {
|
||||
method: 'PATCH',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
processor: {
|
||||
id: processorID,
|
||||
props: params,
|
||||
}
|
||||
})
|
||||
})).json();
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
async _removeProcessor(processorID) {
|
||||
await this._doReturningState(async () => {
|
||||
return (await fetch(`/sites/${this.siteIdValue}/imageedit/${this._state.session.guid}/processors/${processorID}`, {
|
||||
method: 'DELETE',
|
||||
})).json();
|
||||
})
|
||||
}
|
||||
|
||||
async _save(mode) {
|
||||
if (!this._state || !this._state.session) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
let resp = await fetch(`/sites/${this.siteIdValue}/imageedit/${this._state.session.guid}/save`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({ mode })
|
||||
});
|
||||
|
||||
if (!resp.ok) {
|
||||
console.error("Save failed:", resp.statusText);
|
||||
return;
|
||||
}
|
||||
|
||||
let result = await resp.json();
|
||||
window.location.href = `/sites/${this.siteIdValue}/uploads/${result.upload_id}`;
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
}
|
||||
|
||||
async _doReturningState(fn) {
|
||||
try {
|
||||
this._state = await fn();
|
||||
|
||||
this._rebuildProcessList();
|
||||
this.previewTarget.src = this._state.preview_url;
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
@ -1,63 +0,0 @@
|
|||
import { Controller } from "@hotwired/stimulus"
|
||||
import { showToast } from "../services/toast";
|
||||
|
||||
export default class PagelistController extends Controller {
|
||||
static values = {
|
||||
siteId: Number,
|
||||
};
|
||||
|
||||
static targets = ["list"];
|
||||
|
||||
dragStart(ev) {
|
||||
this.draggedRow = ev.currentTarget;
|
||||
ev.currentTarget.classList.add("opacity-50");
|
||||
ev.dataTransfer.effectAllowed = "move";
|
||||
}
|
||||
|
||||
dragOver(ev) {
|
||||
ev.preventDefault();
|
||||
ev.dataTransfer.dropEffect = "move";
|
||||
}
|
||||
|
||||
drop(ev) {
|
||||
ev.preventDefault();
|
||||
const targetRow = ev.currentTarget;
|
||||
if (this.draggedRow && this.draggedRow !== targetRow) {
|
||||
const rows = [...this.listTarget.children];
|
||||
const draggedIdx = rows.indexOf(this.draggedRow);
|
||||
const targetIdx = rows.indexOf(targetRow);
|
||||
if (draggedIdx < targetIdx) {
|
||||
targetRow.after(this.draggedRow);
|
||||
} else {
|
||||
targetRow.before(this.draggedRow);
|
||||
}
|
||||
this.saveOrder();
|
||||
}
|
||||
}
|
||||
|
||||
dragEnd(ev) {
|
||||
ev.currentTarget.classList.remove("opacity-50");
|
||||
this.draggedRow = null;
|
||||
}
|
||||
|
||||
async saveOrder() {
|
||||
const rows = [...this.listTarget.children];
|
||||
const pageIds = rows.map(row => parseInt(row.dataset.pageId, 10));
|
||||
|
||||
try {
|
||||
await fetch(`/sites/${this.siteIdValue}/pages/reorder`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
},
|
||||
body: JSON.stringify({ page_ids: pageIds }),
|
||||
});
|
||||
} catch (error) {
|
||||
showToast({
|
||||
title: "Error",
|
||||
body: "Failed to reorder pages.",
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -14,16 +14,11 @@ export default class PosteditController extends Controller {
|
|||
async save(ev) {
|
||||
ev.preventDefault();
|
||||
|
||||
showToast({
|
||||
title: "💾 Saving Post",
|
||||
body: (this.saveActionValue === "Save Draft") ? "Saving post as draft…" : "Updating post…",
|
||||
});
|
||||
|
||||
try {
|
||||
await this._postForm(this.saveActionValue);
|
||||
|
||||
showToast({
|
||||
title: "💾 Saving Post",
|
||||
title: "💾 Post Saved",
|
||||
body: (this.saveActionValue === "Save Draft") ? "Post saved as draft." : "Post updated.",
|
||||
});
|
||||
} catch (e) {
|
||||
|
|
@ -60,16 +55,6 @@ export default class PosteditController extends Controller {
|
|||
try {
|
||||
const formData = new FormData(this.element);
|
||||
let data = Object.fromEntries(formData.entries());
|
||||
|
||||
// Special handling for categories
|
||||
let categoryIDs = [];
|
||||
for (let i of formData.entries()) {
|
||||
if (i[0] === "category_ids") {
|
||||
categoryIDs.push(parseInt(i[1]))
|
||||
}
|
||||
}
|
||||
|
||||
data["category_ids"] = categoryIDs;
|
||||
data = {...data, action: action || 'save'};
|
||||
|
||||
const response = await fetch(this.element.getAttribute("action"), {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
import feather from "feather-icons/dist/feather.js";
|
||||
import { Application } from "@hotwired/stimulus";
|
||||
|
||||
import ToastController from "./controllers/toast";
|
||||
|
|
@ -8,8 +7,6 @@ import LogoutController from "./controllers/logout";
|
|||
import FirstRunController from "./controllers/firstrun";
|
||||
import UploadController from "./controllers/upload";
|
||||
import ShowUploadController from "./controllers/show_upload";
|
||||
import EditUploadController from "./controllers/edit_upload";
|
||||
import PagelistController from "./controllers/pagelist";
|
||||
|
||||
window.Stimulus = Application.start()
|
||||
Stimulus.register("toast", ToastController);
|
||||
|
|
@ -18,8 +15,4 @@ Stimulus.register("postedit", PosteditController);
|
|||
Stimulus.register("logout", LogoutController);
|
||||
Stimulus.register("first-run", FirstRunController);
|
||||
Stimulus.register("upload", UploadController);
|
||||
Stimulus.register("show-upload", ShowUploadController);
|
||||
Stimulus.register("edit-upload", EditUploadController);
|
||||
Stimulus.register("pagelist", PagelistController);
|
||||
|
||||
feather.replace();
|
||||
Stimulus.register("show-upload", ShowUploadController);
|
||||
|
|
@ -109,28 +109,14 @@ Starting weiro without any arguments will start the server.
|
|||
|
||||
ih := handlers.IndexHandler{SiteService: svcs.Sites}
|
||||
lh := handlers.LoginHandler{Config: cfg, AuthService: svcs.Auth}
|
||||
ph := handlers.PostsHandler{PostService: svcs.Posts, CategoryService: svcs.Categories}
|
||||
ph := handlers.PostsHandler{PostService: svcs.Posts}
|
||||
uh := handlers.UploadsHandler{UploadsService: svcs.Uploads}
|
||||
ieh := handlers.ImageEditHandlers{ImageEditService: svcs.ImageEdit}
|
||||
ssh := handlers.SiteSettingsHandler{SiteService: svcs.Sites}
|
||||
ch := handlers.CategoriesHandler{CategoryService: svcs.Categories}
|
||||
pgh := handlers.PagesHandler{PageService: svcs.Pages}
|
||||
|
||||
app.Get("/login", lh.Login)
|
||||
app.Post("/login", lh.DoLogin)
|
||||
app.Post("/logout", lh.Logout)
|
||||
|
||||
app.Get("/", middleware.OptionalUser(svcs.Auth), ih.Index)
|
||||
app.Get("/first-run", ih.FirstRun)
|
||||
app.Post("/first-run", ih.FirstRunSubmit)
|
||||
|
||||
app.Get("/static/*", static.New("./static"))
|
||||
|
||||
app.Use(middleware.LogErrors(), middleware.RequireUser(svcs.Auth))
|
||||
|
||||
app.Get("/sites/new", ssh.New)
|
||||
app.Post("/sites", ssh.Create)
|
||||
siteGroup := app.Group("/sites/:siteID", middleware.RequiresSite(svcs.Sites))
|
||||
siteGroup := app.Group("/sites/:siteID", middleware.RequireUser(svcs.Auth), middleware.RequiresSite(svcs.Sites))
|
||||
|
||||
siteGroup.Get("/posts", ph.Index)
|
||||
siteGroup.Get("/posts/new", ph.New)
|
||||
|
|
@ -139,9 +125,6 @@ Starting weiro without any arguments will start the server.
|
|||
siteGroup.Patch("/posts/:postID", ph.Patch)
|
||||
siteGroup.Delete("/posts/:postID", ph.Delete)
|
||||
|
||||
// TODO Move
|
||||
siteGroup.Post("/rebuild", ph.Rebuild)
|
||||
|
||||
siteGroup.Get("/uploads", uh.Index)
|
||||
siteGroup.Get("/uploads/slug/+", uh.ShowFromSlug)
|
||||
siteGroup.Get("/uploads/:uploadID", uh.Show)
|
||||
|
|
@ -150,32 +133,12 @@ Starting weiro without any arguments will start the server.
|
|||
siteGroup.Post("/uploads/pending/:guid", uh.UploadPart)
|
||||
siteGroup.Post("/uploads/pending/:guid/finalize", uh.UploadComplete)
|
||||
siteGroup.Delete("/uploads/:uploadID", uh.Delete)
|
||||
siteGroup.Get("/uploads/:uploadID/edit", uh.Edit)
|
||||
|
||||
siteGroup.Post("/imageedit", ieh.Create)
|
||||
siteGroup.Patch("/imageedit/:sessionID", ieh.PatchSession)
|
||||
siteGroup.Post("/imageedit/:sessionID/processors", ieh.AddProcessor)
|
||||
siteGroup.Delete("/imageedit/:sessionID/processors/:processorID", ieh.DeleteProcessor)
|
||||
siteGroup.Post("/imageedit/:sessionID/save", ieh.Save)
|
||||
siteGroup.Get("/imageedit/:sessionID/preview/:versionID", ieh.Preview)
|
||||
app.Get("/", middleware.OptionalUser(svcs.Auth), ih.Index)
|
||||
app.Get("/first-run", ih.FirstRun)
|
||||
app.Post("/first-run", ih.FirstRunSubmit)
|
||||
|
||||
siteGroup.Get("/settings", ssh.General)
|
||||
siteGroup.Post("/settings", ssh.UpdateGeneral)
|
||||
|
||||
siteGroup.Get("/categories", ch.Index)
|
||||
siteGroup.Get("/categories/new", ch.New)
|
||||
siteGroup.Get("/categories/:categoryID", ch.Edit)
|
||||
siteGroup.Post("/categories", ch.Create)
|
||||
siteGroup.Post("/categories/:categoryID", ch.Update)
|
||||
siteGroup.Post("/categories/:categoryID/delete", ch.Delete)
|
||||
|
||||
siteGroup.Get("/pages", pgh.Index)
|
||||
siteGroup.Get("/pages/new", pgh.New)
|
||||
siteGroup.Get("/pages/:pageID", pgh.Edit)
|
||||
siteGroup.Post("/pages", pgh.Create)
|
||||
siteGroup.Post("/pages/reorder", pgh.Reorder)
|
||||
siteGroup.Post("/pages/:pageID", pgh.Update)
|
||||
siteGroup.Post("/pages/:pageID/delete", pgh.Delete)
|
||||
app.Get("/static/*", static.New("./static"))
|
||||
|
||||
if err := app.Listen(":3000"); err != nil {
|
||||
log.Println(err)
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import (
|
|||
|
||||
type Config struct {
|
||||
DataDir string `env:"DATA_DIR"`
|
||||
ScratchDir string `env:"SCRATCH_DIR,default=/tmp"`
|
||||
ScratchDir string `env:"SCRATCH_DIR"`
|
||||
SiteDomain string `env:"SITE_DOMAIN"`
|
||||
LoginLocked bool `env:"LOGIN_LOCKED,default=false"`
|
||||
Env string `env:"ENV,default=prod"`
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
|
@ -1,888 +0,0 @@
|
|||
# Paging Feature Implementation Plan
|
||||
|
||||
> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking.
|
||||
|
||||
**Goal:** Add offset-based pagination to the admin post list and the generated static site (posts and category listings).
|
||||
|
||||
**Architecture:** Add a `posts_per_page` column to the `sites` table for configurable page size on the generated site. Admin uses a hardcoded page size of 25. The existing `db.PagingParams` and `LIMIT/OFFSET` SQL infrastructure is reused. A shared `models.PageInfo` type carries pagination state to templates.
|
||||
|
||||
**Tech Stack:** Go, SQLite, sqlc, Fiber v3, html/template, Bootstrap
|
||||
|
||||
---
|
||||
|
||||
### Task 1: Add `posts_per_page` column and regenerate sqlc
|
||||
|
||||
**Files:**
|
||||
- Create: `sql/schema/05_posts_per_page.up.sql`
|
||||
- Modify: `sql/queries/sites.sql:10-19` (InsertSite query)
|
||||
- Modify: `sql/queries/sites.sql:24-25` (UpdateSite query)
|
||||
- Regenerate: `providers/db/gen/sqlgen/` (sqlc output)
|
||||
|
||||
- [ ] **Step 1: Create migration file**
|
||||
|
||||
Create `sql/schema/05_posts_per_page.up.sql`:
|
||||
```sql
|
||||
ALTER TABLE sites ADD COLUMN posts_per_page INTEGER NOT NULL DEFAULT 10;
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Update the InsertSite SQL query**
|
||||
|
||||
In `sql/queries/sites.sql`, update the InsertSite query (lines 10-19) to include `posts_per_page`:
|
||||
```sql
|
||||
-- name: InsertSite :one
|
||||
INSERT INTO sites (
|
||||
owner_id,
|
||||
guid,
|
||||
title,
|
||||
tagline,
|
||||
timezone,
|
||||
posts_per_page,
|
||||
created_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
RETURNING id;
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Update the UpdateSite SQL query**
|
||||
|
||||
In `sql/queries/sites.sql`, update line 24-25:
|
||||
```sql
|
||||
-- name: UpdateSite :exec
|
||||
UPDATE sites SET title = ?, tagline = ?, timezone = ?, posts_per_page = ? WHERE id = ?;
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Regenerate sqlc**
|
||||
|
||||
Run: `sqlc generate`
|
||||
Expected: `providers/db/gen/sqlgen/` files updated with new `PostsPerPage` field on `Site` struct, updated `InsertSiteParams` and `UpdateSiteParams`.
|
||||
|
||||
- [ ] **Step 5: Run tests to verify nothing broke**
|
||||
|
||||
Run: `go test ./...`
|
||||
Expected: All existing tests pass.
|
||||
|
||||
- [ ] **Step 6: Commit**
|
||||
|
||||
```bash
|
||||
git add sql/schema/05_posts_per_page.up.sql sql/queries/sites.sql providers/db/gen/sqlgen/
|
||||
git commit -m "feat: add posts_per_page column to sites table"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 2: Update Site model and DB provider for `PostsPerPage`
|
||||
|
||||
**Files:**
|
||||
- Modify: `models/sites.go:24-33` (Site struct)
|
||||
- Modify: `providers/db/sites.go:42-65` (SaveSite)
|
||||
- Modify: `providers/db/sites.go:102-112` (dbSiteToSite)
|
||||
|
||||
- [ ] **Step 1: Add `PostsPerPage` to `models.Site`**
|
||||
|
||||
In `models/sites.go`, add to the `Site` struct (after `Timezone`):
|
||||
```go
|
||||
PostsPerPage int
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Update `dbSiteToSite` in `providers/db/sites.go`**
|
||||
|
||||
In `providers/db/sites.go`, update `dbSiteToSite` (line 102) to map the new field:
|
||||
```go
|
||||
func dbSiteToSite(row sqlgen.Site) models.Site {
|
||||
return models.Site{
|
||||
ID: row.ID,
|
||||
OwnerID: row.OwnerID,
|
||||
GUID: row.Guid,
|
||||
Title: row.Title,
|
||||
Timezone: row.Timezone,
|
||||
Tagline: row.Tagline,
|
||||
PostsPerPage: int(row.PostsPerPage),
|
||||
Created: time.Unix(row.CreatedAt, 0).UTC(),
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Update `SaveSite` to include `PostsPerPage`**
|
||||
|
||||
In `providers/db/sites.go`, update the `InsertSite` call (line 44) to include `PostsPerPage`:
|
||||
```go
|
||||
newID, err := db.queries.InsertSite(ctx, sqlgen.InsertSiteParams{
|
||||
OwnerID: site.OwnerID,
|
||||
Guid: site.GUID,
|
||||
Title: site.Title,
|
||||
Tagline: site.Tagline,
|
||||
Timezone: site.Timezone,
|
||||
PostsPerPage: int64(site.PostsPerPage),
|
||||
CreatedAt: timeToInt(site.Created),
|
||||
})
|
||||
```
|
||||
|
||||
Update the `UpdateSite` call (line 59) to include `PostsPerPage`:
|
||||
```go
|
||||
return db.queries.UpdateSite(ctx, sqlgen.UpdateSiteParams{
|
||||
Title: site.Title,
|
||||
Tagline: site.Tagline,
|
||||
Timezone: site.Timezone,
|
||||
PostsPerPage: int64(site.PostsPerPage),
|
||||
ID: site.ID,
|
||||
})
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Run tests**
|
||||
|
||||
Run: `go test ./...`
|
||||
Expected: All tests pass.
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add models/sites.go providers/db/sites.go sql/queries/sites.sql providers/db/gen/sqlgen/
|
||||
git commit -m "feat: add PostsPerPage to Site model and DB provider"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 3: Add `CountPostsOfSite` SQL query and DB method
|
||||
|
||||
**Files:**
|
||||
- Modify: `sql/queries/posts.sql` (add count query)
|
||||
- Modify: `providers/db/posts.go` (add CountPostsOfSite method)
|
||||
- Modify: `providers/db/provider_test.go` (add test)
|
||||
- Regenerate: `providers/db/gen/sqlgen/`
|
||||
|
||||
- [ ] **Step 1: Write the failing test**
|
||||
|
||||
Add to `providers/db/provider_test.go` inside `TestProvider_Posts`:
|
||||
```go
|
||||
t.Run("count posts of site", func(t *testing.T) {
|
||||
countSite := &models.Site{
|
||||
OwnerID: user.ID,
|
||||
GUID: models.NewNanoID(),
|
||||
Title: "Count Blog",
|
||||
}
|
||||
require.NoError(t, p.SaveSite(ctx, countSite))
|
||||
|
||||
now := time.Date(2026, 3, 22, 12, 0, 0, 0, time.UTC)
|
||||
for i := 0; i < 3; i++ {
|
||||
post := &models.Post{
|
||||
SiteID: countSite.ID,
|
||||
GUID: models.NewNanoID(),
|
||||
Title: fmt.Sprintf("Post %d", i),
|
||||
Body: "body",
|
||||
Slug: fmt.Sprintf("/post-%d", i),
|
||||
CreatedAt: now,
|
||||
}
|
||||
require.NoError(t, p.SavePost(ctx, post))
|
||||
}
|
||||
|
||||
count, err := p.CountPostsOfSite(ctx, countSite.ID, false)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int64(3), count)
|
||||
|
||||
// Soft-delete one post
|
||||
posts, err := p.SelectPostsOfSite(ctx, countSite.ID, false, db.PagingParams{Limit: 10, Offset: 0})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, p.SoftDeletePost(ctx, posts[0].ID))
|
||||
|
||||
count, err = p.CountPostsOfSite(ctx, countSite.ID, false)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int64(2), count)
|
||||
|
||||
count, err = p.CountPostsOfSite(ctx, countSite.ID, true)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int64(1), count)
|
||||
})
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Run test to verify it fails**
|
||||
|
||||
Run: `go test ./providers/db/ -run TestProvider_Posts/count_posts_of_site -v`
|
||||
Expected: FAIL — `CountPostsOfSite` method does not exist.
|
||||
|
||||
- [ ] **Step 3: Add SQL query**
|
||||
|
||||
Add to `sql/queries/posts.sql`:
|
||||
```sql
|
||||
-- name: CountPostsOfSite :one
|
||||
SELECT COUNT(*) FROM posts
|
||||
WHERE site_id = sqlc.arg(site_id) AND (
|
||||
CASE CAST (sqlc.arg(post_filter) AS TEXT)
|
||||
WHEN 'deleted' THEN deleted_at > 0
|
||||
ELSE deleted_at = 0
|
||||
END
|
||||
);
|
||||
```
|
||||
|
||||
Run: `sqlc generate`
|
||||
|
||||
- [ ] **Step 4: Add DB provider method**
|
||||
|
||||
Add to `providers/db/posts.go`:
|
||||
```go
|
||||
func (db *Provider) CountPostsOfSite(ctx context.Context, siteID int64, showDeleted bool) (int64, error) {
|
||||
filter := "active"
|
||||
if showDeleted {
|
||||
filter = "deleted"
|
||||
}
|
||||
return db.queries.CountPostsOfSite(ctx, sqlgen.CountPostsOfSiteParams{
|
||||
SiteID: siteID,
|
||||
PostFilter: filter,
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
Note: check the generated `sqlgen.CountPostsOfSiteParams` struct name and fields after `sqlc generate` — adjust if the field names differ.
|
||||
|
||||
- [ ] **Step 5: Run test to verify it passes**
|
||||
|
||||
Run: `go test ./providers/db/ -run TestProvider_Posts/count_posts_of_site -v`
|
||||
Expected: PASS
|
||||
|
||||
- [ ] **Step 6: Run all tests**
|
||||
|
||||
Run: `go test ./...`
|
||||
Expected: All pass.
|
||||
|
||||
- [ ] **Step 7: Commit**
|
||||
|
||||
```bash
|
||||
git add sql/queries/posts.sql providers/db/posts.go providers/db/provider_test.go providers/db/gen/sqlgen/
|
||||
git commit -m "feat: add CountPostsOfSite query and DB method"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 4: Add `models.PageInfo` type
|
||||
|
||||
**Files:**
|
||||
- Create: `models/paging.go`
|
||||
|
||||
- [ ] **Step 1: Create `models/paging.go`**
|
||||
|
||||
```go
|
||||
package models
|
||||
|
||||
// PageInfo carries pagination state for templates.
|
||||
type PageInfo struct {
|
||||
CurrentPage int
|
||||
TotalPages int
|
||||
PostsPerPage int
|
||||
}
|
||||
|
||||
// HasPrevious returns true if there is a previous page.
|
||||
func (p PageInfo) HasPrevious() bool {
|
||||
return p.CurrentPage > 1
|
||||
}
|
||||
|
||||
// HasNext returns true if there is a next page.
|
||||
func (p PageInfo) HasNext() bool {
|
||||
return p.CurrentPage < p.TotalPages
|
||||
}
|
||||
|
||||
// PreviousPage returns the previous page number.
|
||||
func (p PageInfo) PreviousPage() int {
|
||||
return p.CurrentPage - 1
|
||||
}
|
||||
|
||||
// NextPage returns the next page number.
|
||||
func (p PageInfo) NextPage() int {
|
||||
return p.CurrentPage + 1
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Run tests**
|
||||
|
||||
Run: `go test ./...`
|
||||
Expected: All pass (no tests yet for this type, but it should compile).
|
||||
|
||||
- [ ] **Step 3: Commit**
|
||||
|
||||
```bash
|
||||
git add models/paging.go
|
||||
git commit -m "feat: add PageInfo model for pagination"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 5: Add pagination to admin post list (service + handler)
|
||||
|
||||
**Files:**
|
||||
- Modify: `services/posts/list.go:15-38` (ListPosts signature and implementation)
|
||||
- Modify: `handlers/posts.go:18-39` (Index handler)
|
||||
|
||||
- [ ] **Step 1: Update `ListPosts` to accept paging params and return count**
|
||||
|
||||
Replace `services/posts/list.go` `ListPosts` method:
|
||||
```go
|
||||
type ListPostsResult struct {
|
||||
Posts []*PostWithCategories
|
||||
TotalCount int64
|
||||
}
|
||||
|
||||
func (s *Service) ListPosts(ctx context.Context, showDeleted bool, paging db.PagingParams) (ListPostsResult, error) {
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return ListPostsResult{}, models.SiteRequiredError
|
||||
}
|
||||
|
||||
posts, err := s.db.SelectPostsOfSite(ctx, site.ID, showDeleted, paging)
|
||||
if err != nil {
|
||||
return ListPostsResult{}, err
|
||||
}
|
||||
|
||||
count, err := s.db.CountPostsOfSite(ctx, site.ID, showDeleted)
|
||||
if err != nil {
|
||||
return ListPostsResult{}, err
|
||||
}
|
||||
|
||||
result := make([]*PostWithCategories, len(posts))
|
||||
for i, post := range posts {
|
||||
cats, err := s.db.SelectCategoriesOfPost(ctx, post.ID)
|
||||
if err != nil {
|
||||
return ListPostsResult{}, err
|
||||
}
|
||||
result[i] = &PostWithCategories{Post: post, Categories: cats}
|
||||
}
|
||||
return ListPostsResult{Posts: result, TotalCount: count}, nil
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Update the admin handler**
|
||||
|
||||
Replace `handlers/posts.go` `Index` method:
|
||||
```go
|
||||
func (ph PostsHandler) Index(c fiber.Ctx) error {
|
||||
var req struct {
|
||||
Filter string `query:"filter"`
|
||||
Page int `query:"page"`
|
||||
}
|
||||
if err := c.Bind().Query(&req); err != nil {
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
||||
const perPage = 25
|
||||
if req.Page < 1 {
|
||||
req.Page = 1
|
||||
}
|
||||
|
||||
result, err := ph.PostService.ListPosts(c.Context(), req.Filter == "deleted", db.PagingParams{
|
||||
Offset: int64((req.Page - 1) * perPage),
|
||||
Limit: perPage,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
totalPages := int(result.TotalCount+int64(perPage)-1) / perPage
|
||||
if totalPages < 1 {
|
||||
totalPages = 1
|
||||
}
|
||||
|
||||
pageInfo := models.PageInfo{
|
||||
CurrentPage: req.Page,
|
||||
TotalPages: totalPages,
|
||||
PostsPerPage: perPage,
|
||||
}
|
||||
|
||||
return accepts(c, json(func() any {
|
||||
return result.Posts
|
||||
}), html(func(c fiber.Ctx) error {
|
||||
return c.Render("posts/index", fiber.Map{
|
||||
"req": req,
|
||||
"posts": result.Posts,
|
||||
"pageInfo": pageInfo,
|
||||
})
|
||||
}))
|
||||
}
|
||||
```
|
||||
|
||||
Note: add `"lmika.dev/lmika/weiro/providers/db"` and `"lmika.dev/lmika/weiro/models"` to imports in `handlers/posts.go`.
|
||||
|
||||
- [ ] **Step 3: Verify it compiles**
|
||||
|
||||
Run: `go build ./...`
|
||||
Expected: Compiles successfully.
|
||||
|
||||
- [ ] **Step 4: Run tests**
|
||||
|
||||
Run: `go test ./...`
|
||||
Expected: All pass.
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add services/posts/list.go handlers/posts.go
|
||||
git commit -m "feat: add pagination to admin post list handler and service"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 6: Add pagination UI to admin post list template
|
||||
|
||||
**Files:**
|
||||
- Modify: `views/posts/index.html`
|
||||
|
||||
- [ ] **Step 1: Add pagination controls to admin template**
|
||||
|
||||
Add pagination controls after the post list in `views/posts/index.html`. Insert before the closing `</main>` tag:
|
||||
|
||||
```html
|
||||
{{ if gt .pageInfo.TotalPages 1 }}
|
||||
<nav aria-label="Page navigation" class="my-4">
|
||||
<ul class="pagination justify-content-center">
|
||||
<li class="page-item{{ if not .pageInfo.HasPrevious }} disabled{{ end }}">
|
||||
<a class="page-link" href="?page={{ .pageInfo.PreviousPage }}{{ if .req.Filter }}&filter={{ .req.Filter }}{{ end }}">Previous</a>
|
||||
</li>
|
||||
{{ range $p := .pageInfo.Pages }}
|
||||
<li class="page-item{{ if eq $p $.pageInfo.CurrentPage }} active{{ end }}">
|
||||
<a class="page-link" href="?page={{ $p }}{{ if $.req.Filter }}&filter={{ $.req.Filter }}{{ end }}">{{ $p }}</a>
|
||||
</li>
|
||||
{{ end }}
|
||||
<li class="page-item{{ if not .pageInfo.HasNext }} disabled{{ end }}">
|
||||
<a class="page-link" href="?page={{ .pageInfo.NextPage }}{{ if .req.Filter }}&filter={{ .req.Filter }}{{ end }}">Next</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
||||
{{ end }}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Add `Pages` method to `PageInfo`**
|
||||
|
||||
Add to `models/paging.go`:
|
||||
```go
|
||||
// Pages returns a slice of page numbers for rendering numbered pagination.
|
||||
func (p PageInfo) Pages() []int {
|
||||
pages := make([]int, p.TotalPages)
|
||||
for i := range pages {
|
||||
pages[i] = i + 1
|
||||
}
|
||||
return pages
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Verify it compiles and test manually**
|
||||
|
||||
Run: `go build ./...`
|
||||
Expected: Compiles.
|
||||
|
||||
- [ ] **Step 4: Commit**
|
||||
|
||||
```bash
|
||||
git add views/posts/index.html models/paging.go
|
||||
git commit -m "feat: add pagination controls to admin post list"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 7: Add site settings form for `PostsPerPage`
|
||||
|
||||
**Files:**
|
||||
- Modify: `views/sitesettings/general.html:17-48` (form)
|
||||
- Modify: `services/sites/services.go:131-158` (UpdateSiteSettingsParams and UpdateSiteSettings)
|
||||
|
||||
- [ ] **Step 1: Add `PostsPerPage` to `UpdateSiteSettingsParams`**
|
||||
|
||||
In `services/sites/services.go`, update the struct (line 131):
|
||||
```go
|
||||
type UpdateSiteSettingsParams struct {
|
||||
SiteID int64 `form:"siteID"`
|
||||
Name string `form:"name"`
|
||||
Tagline string `form:"tagline"`
|
||||
Timezone string `form:"timezone"`
|
||||
PostsPerPage int `form:"postsPerPage"`
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Update `UpdateSiteSettings` to handle `PostsPerPage`**
|
||||
|
||||
In `services/sites/services.go`, update `UpdateSiteSettings` (line 138) to validate and set the new field:
|
||||
```go
|
||||
func (s *Service) UpdateSiteSettings(ctx context.Context, params UpdateSiteSettingsParams) (models.Site, error) {
|
||||
site, err := s.GetSiteByID(ctx, params.SiteID)
|
||||
if err != nil {
|
||||
return models.Site{}, err
|
||||
}
|
||||
|
||||
_, err = time.LoadLocation(params.Timezone)
|
||||
if err != nil {
|
||||
return models.Site{}, errors.Wrap(err, "invalid timezone")
|
||||
}
|
||||
|
||||
postsPerPage := params.PostsPerPage
|
||||
if postsPerPage < 1 {
|
||||
postsPerPage = 1
|
||||
} else if postsPerPage > 100 {
|
||||
postsPerPage = 100
|
||||
}
|
||||
|
||||
site.Title = params.Name
|
||||
site.Tagline = params.Tagline
|
||||
site.Timezone = params.Timezone
|
||||
site.PostsPerPage = postsPerPage
|
||||
|
||||
if err := s.db.SaveSite(ctx, &site); err != nil {
|
||||
return models.Site{}, err
|
||||
}
|
||||
|
||||
return site, nil
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Add form field to settings template**
|
||||
|
||||
In `views/sitesettings/general.html`, add after the Timezone field (after line 43, before the submit button row):
|
||||
```html
|
||||
<div class="row mb-3">
|
||||
<label for="postsPerPage" class="col-sm-3 col-form-label text-end">Posts Per Page</label>
|
||||
<div class="col-sm-3">
|
||||
<input type="number" class="form-control" id="postsPerPage" name="postsPerPage" value="{{ .site.PostsPerPage }}" min="1" max="100">
|
||||
<div class="form-text">Number of posts per page on the generated site.</div>
|
||||
</div>
|
||||
</div>
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Verify it compiles**
|
||||
|
||||
Run: `go build ./...`
|
||||
Expected: Compiles.
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add services/sites/services.go views/sitesettings/general.html
|
||||
git commit -m "feat: add posts per page setting to site settings"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 8: Add pagination to generated site post list
|
||||
|
||||
**Files:**
|
||||
- Modify: `providers/sitebuilder/tmpls.go:62-65` (postListData)
|
||||
- Modify: `providers/sitebuilder/builder.go:124-146` (renderPostListWithCategories)
|
||||
- Modify: `layouts/simplecss/templates/posts_list.html`
|
||||
|
||||
- [ ] **Step 1: Update `postListData` to include `PageInfo`**
|
||||
|
||||
In `providers/sitebuilder/tmpls.go`, update `postListData` (line 62):
|
||||
```go
|
||||
type postListData struct {
|
||||
commonData
|
||||
Posts []postSingleData
|
||||
PageInfo models.PageInfo
|
||||
PrevURL string
|
||||
NextURL string
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Rewrite `renderPostListWithCategories` to paginate**
|
||||
|
||||
Replace `renderPostListWithCategories` in `providers/sitebuilder/builder.go` (line 124):
|
||||
```go
|
||||
func (b *Builder) renderPostListWithCategories(bctx buildContext, ctx context.Context) error {
|
||||
// Collect all posts
|
||||
var allPosts []postSingleData
|
||||
for mp := range b.site.PostIter(ctx) {
|
||||
post, err := mp.Get()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rp, err := b.renderPostWithCategories(ctx, post)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
allPosts = append(allPosts, rp)
|
||||
}
|
||||
|
||||
postsPerPage := b.site.PostsPerPage
|
||||
if postsPerPage < 1 {
|
||||
postsPerPage = 10
|
||||
}
|
||||
|
||||
totalPages := (len(allPosts) + postsPerPage - 1) / postsPerPage
|
||||
if totalPages < 1 {
|
||||
totalPages = 1
|
||||
}
|
||||
|
||||
for page := 1; page <= totalPages; page++ {
|
||||
start := (page - 1) * postsPerPage
|
||||
end := start + postsPerPage
|
||||
if end > len(allPosts) {
|
||||
end = len(allPosts)
|
||||
}
|
||||
|
||||
pageInfo := models.PageInfo{
|
||||
CurrentPage: page,
|
||||
TotalPages: totalPages,
|
||||
PostsPerPage: postsPerPage,
|
||||
}
|
||||
|
||||
var prevURL, nextURL string
|
||||
if page > 1 {
|
||||
if page == 2 {
|
||||
prevURL = "/posts/"
|
||||
} else {
|
||||
prevURL = fmt.Sprintf("/posts/page/%d/", page-1)
|
||||
}
|
||||
}
|
||||
if page < totalPages {
|
||||
nextURL = fmt.Sprintf("/posts/page/%d/", page+1)
|
||||
}
|
||||
|
||||
pl := postListData{
|
||||
commonData: commonData{Site: b.site},
|
||||
Posts: allPosts[start:end],
|
||||
PageInfo: pageInfo,
|
||||
PrevURL: prevURL,
|
||||
NextURL: nextURL,
|
||||
}
|
||||
|
||||
// Determine output path(s) for this page
|
||||
var paths []string
|
||||
if page == 1 {
|
||||
// Page 1 renders at both root and /posts/
|
||||
paths = []string{"", "/posts"}
|
||||
} else {
|
||||
paths = []string{fmt.Sprintf("/posts/page/%d", page)}
|
||||
}
|
||||
|
||||
for _, path := range paths {
|
||||
if err := b.createAtPath(bctx, path, func(f io.Writer) error {
|
||||
return b.renderTemplate(f, tmplNamePostList, pl)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Update the post list template with prev/next links**
|
||||
|
||||
Replace `layouts/simplecss/templates/posts_list.html`:
|
||||
```html
|
||||
{{ range .Posts }}
|
||||
<div class="h-entry">
|
||||
{{ if .Post.Title }}<h3>{{ .Post.Title }}</h3>{{ end }}
|
||||
{{ .HTML }}
|
||||
{{ template "_post_meta.html" . }}
|
||||
</div>
|
||||
{{ end }}
|
||||
{{ if or .PrevURL .NextURL }}
|
||||
<nav class="pagination">
|
||||
{{ if .PrevURL }}<a href="{{ .PrevURL }}">← Newer posts</a>{{ end }}
|
||||
{{ if .NextURL }}<a href="{{ .NextURL }}">Older posts →</a>{{ end }}
|
||||
</nav>
|
||||
{{ end }}
|
||||
```
|
||||
|
||||
- [ ] **Step 4: Run tests**
|
||||
|
||||
Run: `go test ./...`
|
||||
Expected: Existing builder test may need updating (see next step).
|
||||
|
||||
- [ ] **Step 5: Update builder test**
|
||||
|
||||
The test in `providers/sitebuilder/builder_test.go` creates a `pubmodel.Site` without `PostsPerPage`, which will default to 0. Update the test site to set `PostsPerPage`:
|
||||
```go
|
||||
site := pubmodel.Site{
|
||||
Site: models.Site{PostsPerPage: 10},
|
||||
BaseURL: "https://example.com",
|
||||
PostIter: func(ctx context.Context) iter.Seq[models.Maybe[*models.Post]] {
|
||||
// ... existing code ...
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
The expected `index.html` content stays the same since both posts fit on one page.
|
||||
|
||||
- [ ] **Step 6: Run tests**
|
||||
|
||||
Run: `go test ./...`
|
||||
Expected: All pass.
|
||||
|
||||
- [ ] **Step 7: Commit**
|
||||
|
||||
```bash
|
||||
git add providers/sitebuilder/tmpls.go providers/sitebuilder/builder.go layouts/simplecss/templates/posts_list.html providers/sitebuilder/builder_test.go
|
||||
git commit -m "feat: add pagination to generated site post list"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 9: Add pagination to generated site category pages
|
||||
|
||||
**Files:**
|
||||
- Modify: `providers/sitebuilder/tmpls.go:82-88` (categorySingleData)
|
||||
- Modify: `providers/sitebuilder/builder.go:315-362` (renderCategoryPages)
|
||||
- Modify: `layouts/simplecss/templates/categories_single.html`
|
||||
|
||||
- [ ] **Step 1: Update `categorySingleData` to include pagination**
|
||||
|
||||
In `providers/sitebuilder/tmpls.go`, update `categorySingleData` (line 82):
|
||||
```go
|
||||
type categorySingleData struct {
|
||||
commonData
|
||||
Category *models.Category
|
||||
DescriptionHTML template.HTML
|
||||
Posts []postSingleData
|
||||
Path string
|
||||
PageInfo models.PageInfo
|
||||
PrevURL string
|
||||
NextURL string
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 2: Rewrite `renderCategoryPages` to paginate**
|
||||
|
||||
Replace `renderCategoryPages` in `providers/sitebuilder/builder.go` (line 315):
|
||||
```go
|
||||
func (b *Builder) renderCategoryPages(ctx buildContext, goCtx context.Context) error {
|
||||
for _, cwc := range b.site.Categories {
|
||||
if cwc.PostCount == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Collect all posts for this category
|
||||
var allPosts []postSingleData
|
||||
for mp := range b.site.PostIterByCategory(goCtx, cwc.ID) {
|
||||
post, err := mp.Get()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rp, err := b.renderPostWithCategories(goCtx, post)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
allPosts = append(allPosts, rp)
|
||||
}
|
||||
|
||||
var descHTML bytes.Buffer
|
||||
if cwc.Description != "" {
|
||||
if err := b.mdRenderer.RenderTo(goCtx, &descHTML, cwc.Description); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
postsPerPage := b.site.PostsPerPage
|
||||
if postsPerPage < 1 {
|
||||
postsPerPage = 10
|
||||
}
|
||||
|
||||
totalPages := (len(allPosts) + postsPerPage - 1) / postsPerPage
|
||||
if totalPages < 1 {
|
||||
totalPages = 1
|
||||
}
|
||||
|
||||
basePath := fmt.Sprintf("/categories/%s", cwc.Slug)
|
||||
|
||||
for page := 1; page <= totalPages; page++ {
|
||||
start := (page - 1) * postsPerPage
|
||||
end := start + postsPerPage
|
||||
if end > len(allPosts) {
|
||||
end = len(allPosts)
|
||||
}
|
||||
|
||||
pageInfo := models.PageInfo{
|
||||
CurrentPage: page,
|
||||
TotalPages: totalPages,
|
||||
PostsPerPage: postsPerPage,
|
||||
}
|
||||
|
||||
var prevURL, nextURL string
|
||||
if page > 1 {
|
||||
if page == 2 {
|
||||
prevURL = basePath + "/"
|
||||
} else {
|
||||
prevURL = fmt.Sprintf("%s/page/%d/", basePath, page-1)
|
||||
}
|
||||
}
|
||||
if page < totalPages {
|
||||
nextURL = fmt.Sprintf("%s/page/%d/", basePath, page+1)
|
||||
}
|
||||
|
||||
path := basePath
|
||||
if page > 1 {
|
||||
path = fmt.Sprintf("%s/page/%d", basePath, page)
|
||||
}
|
||||
|
||||
data := categorySingleData{
|
||||
commonData: commonData{Site: b.site},
|
||||
Category: &cwc.Category,
|
||||
DescriptionHTML: template.HTML(descHTML.String()),
|
||||
Posts: allPosts[start:end],
|
||||
Path: path,
|
||||
PageInfo: pageInfo,
|
||||
PrevURL: prevURL,
|
||||
NextURL: nextURL,
|
||||
}
|
||||
|
||||
if err := b.createAtPath(ctx, path, func(f io.Writer) error {
|
||||
return b.renderTemplate(f, tmplNameCategorySingle, data)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Per-category feeds (use all posts, not paginated)
|
||||
if err := b.renderCategoryFeed(ctx, cwc, allPosts); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
```
|
||||
|
||||
- [ ] **Step 3: Update category single template with prev/next links**
|
||||
|
||||
Replace `layouts/simplecss/templates/categories_single.html`:
|
||||
```html
|
||||
{{ if .DescriptionHTML }}<div class="category-description">{{ .DescriptionHTML }}</div>{{ end }}
|
||||
{{ range .Posts }}
|
||||
<div class="h-entry">
|
||||
{{ if .Post.Title }}<h3>{{ .Post.Title }}</h3>{{ end }}
|
||||
{{ .HTML }}
|
||||
{{ template "_post_meta.html" . }}
|
||||
</div>
|
||||
{{ end }}
|
||||
{{ if or .PrevURL .NextURL }}
|
||||
<nav class="pagination">
|
||||
{{ if .PrevURL }}<a href="{{ .PrevURL }}">← Newer posts</a>{{ end }}
|
||||
{{ if .NextURL }}<a href="{{ .NextURL }}">Older posts →</a>{{ end }}
|
||||
</nav>
|
||||
{{ end }}
|
||||
```
|
||||
|
||||
Note: check the current content of `categories_single.html` first — preserve any existing structure (like `<h2>` headings) that may not have been captured in the exploration. Read the file before editing.
|
||||
|
||||
- [ ] **Step 4: Run tests**
|
||||
|
||||
Run: `go test ./...`
|
||||
Expected: All pass.
|
||||
|
||||
- [ ] **Step 5: Commit**
|
||||
|
||||
```bash
|
||||
git add providers/sitebuilder/tmpls.go providers/sitebuilder/builder.go layouts/simplecss/templates/categories_single.html
|
||||
git commit -m "feat: add pagination to generated site category pages"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Task 10: Final verification
|
||||
|
||||
- [ ] **Step 1: Run full test suite**
|
||||
|
||||
Run: `go test ./...`
|
||||
Expected: All tests pass.
|
||||
|
||||
- [ ] **Step 2: Build the project**
|
||||
|
||||
Run: `go build ./...`
|
||||
Expected: Clean build with no errors.
|
||||
|
||||
- [ ] **Step 3: Commit any remaining changes**
|
||||
|
||||
If any files were missed, stage and commit them.
|
||||
|
|
@ -1,169 +0,0 @@
|
|||
# Categories Feature Design
|
||||
|
||||
## Overview
|
||||
|
||||
Add flat, many-to-many categories to Weiro. Categories are managed via a dedicated admin page and assigned to posts on the post edit form. On the published static site, categories appear as labels on posts, archive pages per category, a category index page, and per-category RSS/JSON feeds. Categories with no published posts are hidden from the published site.
|
||||
|
||||
## Data Model
|
||||
|
||||
### New Tables (migration `04_categories.up.sql`)
|
||||
|
||||
```sql
|
||||
CREATE TABLE categories (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
site_id INTEGER NOT NULL,
|
||||
guid TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
slug TEXT NOT NULL,
|
||||
description TEXT NOT NULL DEFAULT '',
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
FOREIGN KEY (site_id) REFERENCES sites (id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX idx_categories_site ON categories (site_id);
|
||||
CREATE UNIQUE INDEX idx_categories_guid ON categories (guid);
|
||||
CREATE UNIQUE INDEX idx_categories_site_slug ON categories (site_id, slug);
|
||||
|
||||
CREATE TABLE post_categories (
|
||||
post_id INTEGER NOT NULL,
|
||||
category_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (post_id, category_id),
|
||||
FOREIGN KEY (post_id) REFERENCES posts (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (category_id) REFERENCES categories (id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX idx_post_categories_category ON post_categories (category_id);
|
||||
```
|
||||
|
||||
### New Go Model (`models/categories.go`)
|
||||
|
||||
```go
|
||||
type Category struct {
|
||||
ID int64 `json:"id"`
|
||||
SiteID int64 `json:"site_id"`
|
||||
GUID string `json:"guid"`
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
Description string `json:"description"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
```
|
||||
|
||||
- `slug` is auto-generated from `name` (e.g. "Go Programming" -> `go-programming`), editable by the user.
|
||||
- `description` is Markdown, rendered on the category archive page. Defaults to empty string.
|
||||
- DB provider must use the existing `timeToInt()`/`time.Unix()` helpers for timestamp conversion, consistent with how posts are handled.
|
||||
|
||||
## Admin UI
|
||||
|
||||
### Category Management Page
|
||||
|
||||
Route: `/sites/:siteID/categories`
|
||||
|
||||
- Lists all categories for the site showing name, slug, and post count.
|
||||
- "New category" button navigates to a create/edit form.
|
||||
- Edit form fields: Name, Slug (auto-generated but editable), Description (Markdown textarea).
|
||||
- Delete button with confirmation. Deletes the category and its post associations; does not delete the posts.
|
||||
|
||||
Handler: `CategoriesHandler` (new, in `handlers/categories.go`).
|
||||
Templates: `views/categories/index.html`, `views/categories/edit.html`.
|
||||
|
||||
### Post Edit Form Changes
|
||||
|
||||
- A multi-select checkbox list of all available categories (sorted alphabetically by name), displayed in a **right sidebar** alongside the main title/body editing area on the left.
|
||||
- Selected category IDs sent with the form submission.
|
||||
- `CreatePostParams` gains `CategoryIDs []int64`.
|
||||
|
||||
### Post List (Admin)
|
||||
|
||||
- Category names shown as small labels next to each post title.
|
||||
|
||||
## Static Site Output
|
||||
|
||||
### Category Index Page (`/categories/`)
|
||||
|
||||
Lists all categories that have at least one published post. For each category:
|
||||
|
||||
- Category name as a clickable link to the archive page
|
||||
- Post count
|
||||
- First sentence/line of the description as a brief excerpt
|
||||
|
||||
### Category Archive Pages (`/categories/<slug>/`)
|
||||
|
||||
- Category name as heading
|
||||
- Full Markdown description rendered below the heading
|
||||
- List of published posts in the category, ordered by `published_at` descending
|
||||
|
||||
### Post Pages
|
||||
|
||||
Each post page displays its category names as clickable links to the corresponding category archive pages.
|
||||
|
||||
### Feeds
|
||||
|
||||
Per-category feeds:
|
||||
- `/categories/<slug>/feed.xml` (RSS)
|
||||
- `/categories/<slug>/feed.json` (JSON Feed)
|
||||
|
||||
Main site feeds (`/feed.xml`, `/feed.json`) gain category metadata on each post entry.
|
||||
|
||||
### Empty Category Handling
|
||||
|
||||
Categories with no published posts are hidden from the published site: no index entry, no archive page, no feed generated. They remain visible and manageable in the admin UI.
|
||||
|
||||
## SQL Queries
|
||||
|
||||
New file: `sql/queries/categories.sql`
|
||||
|
||||
- `SelectCategoriesOfSite` — all categories for a site, ordered by name
|
||||
- `SelectCategory` — single category by ID
|
||||
- `SelectCategoryByGUID` — single category by GUID
|
||||
- `SelectCategoriesOfPost` — categories for a given post (via join table)
|
||||
- `SelectPostsOfCategory` — published, non-deleted posts in a category (`state = 0 AND deleted_at = 0`), ordered by `published_at` desc
|
||||
- `CountPostsOfCategory` — count of published posts per category (same `state = 0 AND deleted_at = 0` filter)
|
||||
- `InsertCategory` / `UpdateCategory` / `DeleteCategory` — CRUD
|
||||
- `InsertPostCategory` / `DeletePostCategory` — manage the join table
|
||||
- `DeletePostCategoriesByPost` — clear all categories for a post (delete-then-reinsert on save)
|
||||
|
||||
## Service Layer
|
||||
|
||||
### New `services/categories` Package
|
||||
|
||||
`Service` struct with methods:
|
||||
|
||||
- `ListCategories(ctx) ([]Category, error)` — all categories for the current site (from context)
|
||||
- `GetCategory(ctx, id) (*Category, error)`
|
||||
- `CreateCategory(ctx, params) (*Category, error)` — auto-generates slug from name. If the slug collides with an existing one for the same site, return a validation error.
|
||||
- `UpdateCategory(ctx, params) (*Category, error)` — same slug collision check on update.
|
||||
- `DeleteCategory(ctx, id) error` — deletes category and post associations, queues site rebuild
|
||||
|
||||
All mutation methods verify site ownership (same pattern as post service authorization checks).
|
||||
|
||||
### Changes to `services/posts`
|
||||
|
||||
- `UpdatePost` — after saving the post, deletes existing `post_categories` rows and re-inserts for the selected category IDs. The post save and category reassignment must run within a single database transaction to ensure atomicity.
|
||||
- `GetPost` / `ListPosts` — loads each post's categories for admin display
|
||||
|
||||
### Changes to Publishing Pipeline
|
||||
|
||||
- `pubmodel.Site` gains new fields:
|
||||
- `Categories []CategoryWithCount` — category list with post counts and description excerpts for the index page
|
||||
- `PostIterByCategory func(ctx context.Context, categoryID int64) iter.Seq[models.Maybe[*models.Post]]` — iterator for posts in a specific category
|
||||
- `sitebuilder.Builder.BuildSite` gains additional goroutines for:
|
||||
- Rendering the category index page
|
||||
- Rendering each category archive page
|
||||
- Rendering per-category feeds
|
||||
- New templates: `tmplNameCategoryList`, `tmplNameCategorySingle` (must be added to the `ParseFS` call in `sitebuilder.New()`)
|
||||
- `postSingleData` gains a `Categories []Category` field so post templates can render category links
|
||||
|
||||
### Rebuild Triggers
|
||||
|
||||
Saving or deleting a category queues a site rebuild, same as post state changes.
|
||||
|
||||
## DB Provider
|
||||
|
||||
`providers/db/` gains wrapper methods for all new sqlc queries, following the same pattern as existing post methods (e.g. `SaveCategory`, `SelectCategoriesOfPost`, etc.).
|
||||
|
||||
## Design Decisions
|
||||
|
||||
- **Hard delete for categories** — unlike posts which use soft-delete, categories are hard-deleted. They are simpler entities and don't need a trash/restore workflow.
|
||||
- **No sort_order column** — categories are sorted alphabetically by name. Manual ordering can be added later if needed.
|
||||
- **Existing microblog-crosspost feed** — kept as-is. Per-category feeds are a separate, additive feature.
|
||||
|
|
@ -1,148 +0,0 @@
|
|||
# Arbitrary Pages Feature Design
|
||||
|
||||
## Overview
|
||||
|
||||
Allow users to create arbitrary pages for their site. Each page has a title, user-editable slug, markdown body, page type, nav visibility flag, and sort order. Pages are a separate entity from posts with their own admin section and generated site template. Pages rendered at conflicting slugs silently override auto-generated content.
|
||||
|
||||
## Data Layer
|
||||
|
||||
### New `pages` table
|
||||
|
||||
```sql
|
||||
CREATE TABLE pages (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
site_id INTEGER NOT NULL,
|
||||
guid TEXT NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
slug TEXT NOT NULL,
|
||||
body TEXT NOT NULL,
|
||||
page_type INTEGER NOT NULL DEFAULT 0,
|
||||
show_in_nav INTEGER NOT NULL DEFAULT 0,
|
||||
sort_order INTEGER NOT NULL DEFAULT 0,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
FOREIGN KEY (site_id) REFERENCES sites (id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX idx_pages_site ON pages (site_id);
|
||||
CREATE UNIQUE INDEX idx_pages_guid ON pages (guid);
|
||||
CREATE UNIQUE INDEX idx_pages_site_slug ON pages (site_id, slug);
|
||||
```
|
||||
|
||||
### Model
|
||||
|
||||
```go
|
||||
type Page struct {
|
||||
ID int64
|
||||
SiteID int64
|
||||
GUID string
|
||||
Title string
|
||||
Slug string
|
||||
Body string
|
||||
PageType int
|
||||
ShowInNav bool
|
||||
SortOrder int
|
||||
CreatedAt time.Time
|
||||
UpdatedAt time.Time
|
||||
}
|
||||
```
|
||||
|
||||
Page type constants: `PageTypeNormal = 0` (extensible later for archive, search, etc.).
|
||||
|
||||
### SQL queries
|
||||
|
||||
- `SelectPagesOfSite(siteID)` — all pages for a site, ordered by `sort_order ASC`
|
||||
- `SelectPage(id)` — single page by ID
|
||||
- `SelectPageByGUID(guid)` — single page by GUID
|
||||
- `InsertPage` — create new page, returns ID
|
||||
- `UpdatePage` — update page fields
|
||||
- `DeletePage(id)` — delete page
|
||||
- `UpdatePageSortOrder(id, sortOrder)` — update sort order for a single page
|
||||
|
||||
## Admin Section
|
||||
|
||||
### Navigation
|
||||
|
||||
Add "Pages" item to the admin nav bar (`views/_common/nav.html`), linking to `/sites/:siteID/pages`.
|
||||
|
||||
### Routes
|
||||
|
||||
```
|
||||
GET /sites/:siteID/pages - List pages
|
||||
GET /sites/:siteID/pages/new - New page form
|
||||
GET /sites/:siteID/pages/:pageID - Edit page form
|
||||
POST /sites/:siteID/pages - Create/update page
|
||||
DELETE /sites/:siteID/pages/:pageID - Delete page
|
||||
POST /sites/:siteID/pages/reorder - Update sort order (AJAX)
|
||||
```
|
||||
|
||||
### Page list view (`views/pages/index.html`)
|
||||
|
||||
- Lists pages ordered by `sort_order`
|
||||
- Each row shows title, slug, and nav visibility indicator
|
||||
- Drag-and-drop reordering via Stimulus + HTML drag API
|
||||
- On drop, sends new order to `POST /pages/reorder` via AJAX
|
||||
- "New Page" button
|
||||
|
||||
### Page edit form (`views/pages/edit.html`)
|
||||
|
||||
Two-column layout mirroring the post edit form:
|
||||
|
||||
**Main area (left):**
|
||||
- Title input
|
||||
- Body textarea (markdown)
|
||||
|
||||
**Sidebar (right):**
|
||||
- Slug (editable text input, auto-derived from title via client-side JS, user can override)
|
||||
- Page Type (select dropdown, just "Normal" for now)
|
||||
- Show in Nav (checkbox)
|
||||
|
||||
Save button below.
|
||||
|
||||
### Service layer (`services/pages/`)
|
||||
|
||||
- `Service` struct with DB provider dependency
|
||||
- `CreatePage(ctx, params)` — generates GUID, derives slug from title if not provided, sets timestamps
|
||||
- `UpdatePage(ctx, params)` — updates fields, sets `updated_at`
|
||||
- `DeletePage(ctx, pageID)` — deletes page
|
||||
- `ListPages(ctx)` — returns all pages for the site from context, ordered by `sort_order`
|
||||
- `GetPage(ctx, pageID)` — returns single page
|
||||
- `ReorderPages(ctx, pageIDs []int64)` — accepts ordered list of page IDs, updates `sort_order` for each (sort_order = index in list)
|
||||
|
||||
### Handler (`handlers/pages.go`)
|
||||
|
||||
- `PagesHandler` struct with `PageService`
|
||||
- Standard CRUD handlers following the existing posts handler pattern
|
||||
- `Reorder` handler accepts JSON array of page IDs, calls `ReorderPages`
|
||||
|
||||
## Generated Site
|
||||
|
||||
### Template
|
||||
|
||||
New template `pages_single.html` — receives rendered page HTML, rendered inside `layout_main.html` (same wrapping as posts).
|
||||
|
||||
Template data:
|
||||
```go
|
||||
type pageSingleData struct {
|
||||
commonData
|
||||
Page *models.Page
|
||||
HTML template.HTML
|
||||
}
|
||||
```
|
||||
|
||||
### Builder changes
|
||||
|
||||
New method `renderPages` on the builder:
|
||||
- Iterates all pages from `pubmodel.Site.Pages`
|
||||
- For each page, renders markdown body and writes to the page's slug path using `createAtPath`
|
||||
- Pages are rendered **after** all other content (posts, post lists, categories, feeds, uploads, static assets)
|
||||
- This ensures pages at conflicting slugs silently overwrite auto-generated content
|
||||
- Implementation: `renderPages` runs as a sequential step after `eg.Wait()` returns in `BuildSite`
|
||||
|
||||
### Publisher changes
|
||||
|
||||
- `pubmodel.Site` gets a new `Pages []models.Page` field
|
||||
- The publisher fetches all pages for the site via `SelectPagesOfSite` and populates this field
|
||||
|
||||
## Approach
|
||||
|
||||
Pages are a separate entity from posts with their own table, service, handler, and templates. The override mechanism is file-system-based: the site builder renders pages last, so any page slug that conflicts with an auto-generated path wins by overwriting the file. The `show_in_nav` field is stored and editable in admin but not yet consumed by the generated site layout — that integration is deferred for a future change.
|
||||
|
|
@ -1,100 +0,0 @@
|
|||
# Paging Feature Design
|
||||
|
||||
## Overview
|
||||
|
||||
Introduce offset-based pagination to the admin post list and the generated static site (both post listings and category listings).
|
||||
|
||||
## Data Layer
|
||||
|
||||
### New `sites` column
|
||||
|
||||
Add `posts_per_page INTEGER NOT NULL DEFAULT 10` to the `sites` table. This setting controls the number of posts per page on the **generated static site only**.
|
||||
|
||||
### New SQL queries
|
||||
|
||||
- `CountPostsOfSite(siteID, showDeleted)` — returns total post count for the site
|
||||
- `CountPostsOfCategory(categoryID)` — returns total published post count for a category
|
||||
|
||||
### Model changes
|
||||
|
||||
**`models.Site`** — add field:
|
||||
```go
|
||||
PostsPerPage int
|
||||
```
|
||||
|
||||
**New shared type** (`models/paging.go`):
|
||||
```go
|
||||
type PageInfo struct {
|
||||
CurrentPage int
|
||||
TotalPages int
|
||||
PostsPerPage int
|
||||
}
|
||||
```
|
||||
|
||||
Existing `db.PagingParams` and queries (`SelectPostsOfSite`, `SelectPostsOfCategory`) already support `LIMIT/OFFSET` and remain unchanged.
|
||||
|
||||
## Admin Section
|
||||
|
||||
### Post list pagination
|
||||
|
||||
- **Page size: hardcoded at 25** (not tied to the `PostsPerPage` site setting)
|
||||
- Handler (`handlers/posts.go` `Index()`) reads a `page` query parameter (default 1)
|
||||
- Computes offset as `(page - 1) * 25`
|
||||
- Fetches total post count via new `CountPosts()` service method to build `PageInfo`
|
||||
- Passes `PageInfo` to template
|
||||
|
||||
### Service changes
|
||||
|
||||
- `ListPosts()` accepts paging params from the handler instead of hardcoding them
|
||||
- New `CountPosts()` method that calls the count query
|
||||
|
||||
### Template (`views/posts/index.html`)
|
||||
|
||||
- Full numbered pagination with Previous/Next below the post list: `< 1 2 3 ... 10 >`
|
||||
- Preserves existing query params (e.g. `?filter=deleted`) when paginating
|
||||
- Both regular post list and trash view are paginated
|
||||
|
||||
### Site settings form
|
||||
|
||||
- Add "Posts per page" number input to `views/sitesettings/general.html`
|
||||
- Add `PostsPerPage` field to `UpdateSiteSettingsParams`
|
||||
- Server-side validation: minimum 1, maximum 100
|
||||
|
||||
## Generated Static Site
|
||||
|
||||
### URL structure
|
||||
|
||||
Post listing pages:
|
||||
- `/posts/` — page 1
|
||||
- `/posts/page/2/` — page 2
|
||||
- `/posts/page/N/` — page N
|
||||
|
||||
Category listing pages:
|
||||
- `/categories/<slug>/` — page 1
|
||||
- `/categories/<slug>/page/2/` — page 2
|
||||
- `/categories/<slug>/page/N/` — page N
|
||||
|
||||
### Site root
|
||||
|
||||
`/` (site root) shows the same content as `/posts/` (page 1 of all posts).
|
||||
|
||||
### Builder changes (`providers/sitebuilder/builder.go`)
|
||||
|
||||
- Instead of rendering one `posts_list.html` with all posts, generate multiple page files
|
||||
- Uses `site.PostsPerPage` from the site setting to determine page size
|
||||
- Same pattern for category pages
|
||||
|
||||
### Publisher changes (`services/publisher/iter.go`)
|
||||
|
||||
- Existing iterator fetches posts in batches of 50 internally — this stays as-is
|
||||
- The builder chunks posts into pages of `PostsPerPage` size and renders each page as a separate HTML file
|
||||
|
||||
### Template (`layouts/simplecss/templates/posts_list.html`)
|
||||
|
||||
- Receives `PageInfo` plus the posts for that page
|
||||
- Renders **Previous / Next** links only (no numbered pagination)
|
||||
- Previous link hidden on page 1; Next link hidden on last page
|
||||
|
||||
## Approach
|
||||
|
||||
Offset-based pagination using the existing `db.PagingParams` infrastructure. Page number maps to offset: `offset = (page - 1) * postsPerPage`.
|
||||
1
go.mod
1
go.mod
|
|
@ -52,7 +52,6 @@ require (
|
|||
github.com/gofiber/template/v2 v2.1.0 // indirect
|
||||
github.com/gofiber/utils/v2 v2.0.2 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/gopherlibs/feedhub v1.2.0 // indirect
|
||||
github.com/gorilla/css v1.0.1 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
|
|
|
|||
2
go.sum
2
go.sum
|
|
@ -279,8 +279,6 @@ github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+
|
|||
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
|
||||
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gopherlibs/feedhub v1.2.0 h1:1nfM8gRoiA+VNjKc1FzrwiXkrBKsnAghA3PVvgAiSI0=
|
||||
github.com/gopherlibs/feedhub v1.2.0/go.mod h1:vvQEZzTKr2KhO0mCdEUGfKLvUJFfO8U+WUpaMyoZttc=
|
||||
github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
|
||||
github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
|
||||
github.com/gorilla/handlers v1.4.2/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ=
|
||||
|
|
|
|||
|
|
@ -1,101 +0,0 @@
|
|||
package handlers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"github.com/gofiber/fiber/v3"
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/lmika/weiro/services/categories"
|
||||
)
|
||||
|
||||
type CategoriesHandler struct {
|
||||
CategoryService *categories.Service
|
||||
}
|
||||
|
||||
func (ch CategoriesHandler) Index(c fiber.Ctx) error {
|
||||
cats, err := ch.CategoryService.ListCategoriesWithCounts(c.Context())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return c.Render("categories/index", fiber.Map{
|
||||
"categories": cats,
|
||||
})
|
||||
}
|
||||
|
||||
func (ch CategoriesHandler) New(c fiber.Ctx) error {
|
||||
cat := models.Category{
|
||||
GUID: models.NewNanoID(),
|
||||
}
|
||||
return c.Render("categories/edit", fiber.Map{
|
||||
"category": cat,
|
||||
"isNew": true,
|
||||
})
|
||||
}
|
||||
|
||||
func (ch CategoriesHandler) Edit(c fiber.Ctx) error {
|
||||
catID, err := strconv.ParseInt(c.Params("categoryID"), 10, 64)
|
||||
if err != nil {
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
||||
cat, err := ch.CategoryService.GetCategory(c.Context(), catID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return c.Render("categories/edit", fiber.Map{
|
||||
"category": cat,
|
||||
"isNew": false,
|
||||
})
|
||||
}
|
||||
|
||||
func (ch CategoriesHandler) Create(c fiber.Ctx) error {
|
||||
var req categories.CreateCategoryParams
|
||||
if err := c.Bind().Body(&req); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err := ch.CategoryService.CreateCategory(c.Context(), req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
site := models.MustGetSite(c.Context())
|
||||
return c.Redirect().To(fmt.Sprintf("/sites/%v/categories", site.ID))
|
||||
}
|
||||
|
||||
func (ch CategoriesHandler) Update(c fiber.Ctx) error {
|
||||
catID, err := strconv.ParseInt(c.Params("categoryID"), 10, 64)
|
||||
if err != nil {
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
||||
var req categories.CreateCategoryParams
|
||||
if err := c.Bind().Body(&req); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = ch.CategoryService.UpdateCategory(c.Context(), catID, req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
site := models.MustGetSite(c.Context())
|
||||
return c.Redirect().To(fmt.Sprintf("/sites/%v/categories", site.ID))
|
||||
}
|
||||
|
||||
func (ch CategoriesHandler) Delete(c fiber.Ctx) error {
|
||||
catID, err := strconv.ParseInt(c.Params("categoryID"), 10, 64)
|
||||
if err != nil {
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
||||
if err := ch.CategoryService.DeleteCategory(c.Context(), catID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
site := models.MustGetSite(c.Context())
|
||||
return c.Redirect().To(fmt.Sprintf("/sites/%v/categories", site.ID))
|
||||
}
|
||||
|
|
@ -1,165 +0,0 @@
|
|||
package handlers
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
|
||||
"github.com/gofiber/fiber/v3"
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/lmika/weiro/services/imgedit"
|
||||
)
|
||||
|
||||
type ImageEditHandlers struct {
|
||||
ImageEditService *imgedit.Service
|
||||
}
|
||||
|
||||
type sessionResponse struct {
|
||||
Session *models.ImageEditSession `json:"session"`
|
||||
PreviewURL string `json:"preview_url"`
|
||||
}
|
||||
|
||||
func (ieh ImageEditHandlers) Create(c fiber.Ctx) error {
|
||||
var req struct {
|
||||
BaseUploadID int64 `json:"base_upload"`
|
||||
}
|
||||
|
||||
if err := c.Bind().JSON(&req); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
res, err := ieh.ImageEditService.NewSession(c.Context(), req.BaseUploadID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var resp = sessionResponse{
|
||||
Session: res,
|
||||
PreviewURL: res.PreviewURL(),
|
||||
}
|
||||
|
||||
return c.Status(http.StatusCreated).JSON(resp)
|
||||
}
|
||||
|
||||
func (ieh ImageEditHandlers) Preview(c fiber.Ctx) error {
|
||||
log.Printf("Previewing image edit session %v/%v", c.Params("sessionID"), c.Params("versionID"))
|
||||
sessionID := c.Params("sessionID")
|
||||
versionID := c.Params("versionID")
|
||||
|
||||
mimeTime, rw, err := ieh.ImageEditService.LoadImageVersion(c.Context(), sessionID, versionID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c.Set("Content-Type", mimeTime)
|
||||
c.Status(http.StatusOK)
|
||||
return c.SendStreamWriter(func(w *bufio.Writer) {
|
||||
rw, err := rw()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer rw.Close()
|
||||
|
||||
_, err = io.Copy(w, rw)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func (ieh ImageEditHandlers) AddProcessor(c fiber.Ctx) error {
|
||||
sessionID := c.Params("sessionID")
|
||||
if sessionID == "" {
|
||||
log.Println("No session ID")
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
||||
var req imgedit.AddProcessorReq
|
||||
if err := c.Bind().Body(&req); err != nil {
|
||||
log.Printf("Failed to parse request body: %v", err)
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
||||
res, err := ieh.ImageEditService.AddProcessor(c.Context(), sessionID, req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return c.Status(http.StatusOK).JSON(sessionResponse{
|
||||
Session: res,
|
||||
PreviewURL: res.PreviewURL(),
|
||||
})
|
||||
}
|
||||
|
||||
func (ieh ImageEditHandlers) DeleteProcessor(c fiber.Ctx) error {
|
||||
sessionID := c.Params("sessionID")
|
||||
if sessionID == "" {
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
||||
processorID := c.Params("processorID")
|
||||
if processorID == "" {
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
||||
res, err := ieh.ImageEditService.DeleteProcessor(c.Context(), sessionID, processorID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return c.Status(http.StatusOK).JSON(sessionResponse{
|
||||
Session: res,
|
||||
PreviewURL: res.PreviewURL(),
|
||||
})
|
||||
}
|
||||
|
||||
func (ieh ImageEditHandlers) Save(c fiber.Ctx) error {
|
||||
sessionID := c.Params("sessionID")
|
||||
if sessionID == "" {
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
||||
var req struct {
|
||||
Mode string `json:"mode"`
|
||||
}
|
||||
if err := c.Bind().JSON(&req); err != nil {
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
||||
result, err := ieh.ImageEditService.Save(c.Context(), sessionID, req.Mode)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return c.Status(http.StatusOK).JSON(result)
|
||||
}
|
||||
|
||||
func (ieh ImageEditHandlers) PatchSession(c fiber.Ctx) error {
|
||||
var req struct {
|
||||
UpdateProc *imgedit.UpdateProcessorReq `json:"processor"`
|
||||
}
|
||||
|
||||
sessionID := c.Params("sessionID")
|
||||
if sessionID == "" {
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
||||
if err := c.Bind().Body(&req); err != nil {
|
||||
return err
|
||||
}
|
||||
log.Printf("Got request: %v", *req.UpdateProc)
|
||||
|
||||
if req.UpdateProc != nil {
|
||||
res, err := ieh.ImageEditService.UpdateProcessor(c.Context(), sessionID, *req.UpdateProc)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return c.Status(http.StatusOK).JSON(sessionResponse{
|
||||
Session: res,
|
||||
PreviewURL: res.PreviewURL(),
|
||||
})
|
||||
}
|
||||
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
|
@ -2,7 +2,6 @@ package handlers
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"net/url"
|
||||
"regexp"
|
||||
|
||||
|
|
@ -38,13 +37,6 @@ func (h IndexHandler) Index(c fiber.Ctx) error {
|
|||
}
|
||||
}
|
||||
|
||||
sess := session.FromContext(c)
|
||||
lastSiteID, ok := sess.Get("last_site_id").(int64)
|
||||
log.Printf("last site id: %v", lastSiteID)
|
||||
if ok {
|
||||
return c.Redirect().To(fmt.Sprintf("/sites/%v/posts", lastSiteID))
|
||||
}
|
||||
|
||||
site, err := h.SiteService.BestSite(c.Context(), user)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
|
|||
|
|
@ -37,8 +37,9 @@ func (lh *LoginHandler) Logout(c fiber.Ctx) error {
|
|||
|
||||
func (lh *LoginHandler) DoLogin(c fiber.Ctx) error {
|
||||
var req struct {
|
||||
Username string `form:"username"`
|
||||
Password string `form:"password"`
|
||||
Username string `form:"username"`
|
||||
Password string `form:"password"`
|
||||
LoginChallenge string `form:"_login_challenge"`
|
||||
}
|
||||
if err := c.Bind().Body(&req); err != nil {
|
||||
return c.Status(fiber.StatusBadRequest).SendString("Failed to parse request body")
|
||||
|
|
@ -50,6 +51,11 @@ func (lh *LoginHandler) DoLogin(c fiber.Ctx) error {
|
|||
|
||||
sess := session.FromContext(c)
|
||||
|
||||
challenge, _ := sess.Get("_login_challenge").(string)
|
||||
if challenge != req.LoginChallenge {
|
||||
return c.Redirect().To("/login")
|
||||
}
|
||||
|
||||
user, err := lh.AuthService.Login(c.Context(), req.Username, req.Password)
|
||||
if err != nil {
|
||||
return c.Status(fiber.StatusInternalServerError).SendString("Failed to login")
|
||||
|
|
|
|||
|
|
@ -1,17 +0,0 @@
|
|||
package middleware
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/gofiber/fiber/v3"
|
||||
)
|
||||
|
||||
func LogErrors() func(c fiber.Ctx) error {
|
||||
return func(c fiber.Ctx) error {
|
||||
if err := c.Next(); err != nil {
|
||||
log.Printf("%v: error: %v\n", c.Path(), err)
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
|
@ -5,7 +5,6 @@ import (
|
|||
|
||||
"emperror.dev/errors"
|
||||
"github.com/gofiber/fiber/v3"
|
||||
"github.com/gofiber/fiber/v3/middleware/session"
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/lmika/weiro/providers/db"
|
||||
"lmika.dev/lmika/weiro/services/sites"
|
||||
|
|
@ -33,22 +32,9 @@ func RequiresSite(sites *sites.Service) func(c fiber.Ctx) error {
|
|||
return err
|
||||
}
|
||||
}
|
||||
|
||||
c.Locals("site", site)
|
||||
c.SetContext(models.WithSite(c.Context(), site))
|
||||
|
||||
sitesOwnedByUser, err := sites.ListSites(c.Context())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c.Locals("allSites", sitesOwnedByUser)
|
||||
|
||||
sess := session.FromContext(c)
|
||||
sess.Set("last_site_id", siteID)
|
||||
|
||||
if pubTargets, err := sites.BestPubTarget(c.Context(), site); err == nil {
|
||||
c.Locals("pubTarget", pubTargets)
|
||||
}
|
||||
|
||||
return c.Next()
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,118 +0,0 @@
|
|||
package handlers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"github.com/gofiber/fiber/v3"
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/lmika/weiro/services/pages"
|
||||
)
|
||||
|
||||
type PagesHandler struct {
|
||||
PageService *pages.Service
|
||||
}
|
||||
|
||||
func (ph PagesHandler) Index(c fiber.Ctx) error {
|
||||
pagesList, err := ph.PageService.ListPages(c.Context())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return c.Render("pages/index", fiber.Map{
|
||||
"pages": pagesList,
|
||||
})
|
||||
}
|
||||
|
||||
func (ph PagesHandler) New(c fiber.Ctx) error {
|
||||
page := models.Page{
|
||||
GUID: models.NewNanoID(),
|
||||
}
|
||||
return c.Render("pages/edit", fiber.Map{
|
||||
"page": page,
|
||||
"isNew": true,
|
||||
"bodyClass": "post-edit-page",
|
||||
})
|
||||
}
|
||||
|
||||
func (ph PagesHandler) Edit(c fiber.Ctx) error {
|
||||
pageID, err := strconv.ParseInt(c.Params("pageID"), 10, 64)
|
||||
if err != nil {
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
||||
page, err := ph.PageService.GetPage(c.Context(), pageID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return c.Render("pages/edit", fiber.Map{
|
||||
"page": page,
|
||||
"isNew": false,
|
||||
"bodyClass": "post-edit-page",
|
||||
})
|
||||
}
|
||||
|
||||
func (ph PagesHandler) Create(c fiber.Ctx) error {
|
||||
var req pages.CreatePageParams
|
||||
if err := c.Bind().Body(&req); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err := ph.PageService.CreatePage(c.Context(), req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
site := models.MustGetSite(c.Context())
|
||||
return c.Redirect().To(fmt.Sprintf("/sites/%v/pages", site.ID))
|
||||
}
|
||||
|
||||
func (ph PagesHandler) Update(c fiber.Ctx) error {
|
||||
pageID, err := strconv.ParseInt(c.Params("pageID"), 10, 64)
|
||||
if err != nil {
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
||||
var req pages.CreatePageParams
|
||||
if err := c.Bind().Body(&req); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = ph.PageService.UpdatePage(c.Context(), pageID, req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
site := models.MustGetSite(c.Context())
|
||||
return c.Redirect().To(fmt.Sprintf("/sites/%v/pages", site.ID))
|
||||
}
|
||||
|
||||
func (ph PagesHandler) Delete(c fiber.Ctx) error {
|
||||
pageID, err := strconv.ParseInt(c.Params("pageID"), 10, 64)
|
||||
if err != nil {
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
||||
if err := ph.PageService.DeletePage(c.Context(), pageID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
site := models.MustGetSite(c.Context())
|
||||
return c.Redirect().To(fmt.Sprintf("/sites/%v/pages", site.ID))
|
||||
}
|
||||
|
||||
func (ph PagesHandler) Reorder(c fiber.Ctx) error {
|
||||
var req struct {
|
||||
PageIDs []int64 `json:"page_ids"`
|
||||
}
|
||||
if err := c.Bind().Body(&req); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := ph.PageService.ReorderPages(c.Context(), req.PageIDs); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return c.JSON(fiber.Map{"ok": true})
|
||||
}
|
||||
|
|
@ -6,56 +6,32 @@ import (
|
|||
|
||||
"github.com/gofiber/fiber/v3"
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/lmika/weiro/providers/db"
|
||||
"lmika.dev/lmika/weiro/services/categories"
|
||||
"lmika.dev/lmika/weiro/services/posts"
|
||||
)
|
||||
|
||||
type PostsHandler struct {
|
||||
PostService *posts.Service
|
||||
CategoryService *categories.Service
|
||||
PostService *posts.Service
|
||||
}
|
||||
|
||||
func (ph PostsHandler) Index(c fiber.Ctx) error {
|
||||
var req struct {
|
||||
Filter string `query:"filter"`
|
||||
Page int `query:"page"`
|
||||
}
|
||||
if err := c.Bind().Query(&req); err != nil {
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
||||
const perPage = 25
|
||||
if req.Page < 1 {
|
||||
req.Page = 1
|
||||
}
|
||||
|
||||
result, err := ph.PostService.ListPosts(c.Context(), req.Filter == "deleted", db.PagingParams{
|
||||
Offset: int64((req.Page - 1) * perPage),
|
||||
Limit: perPage,
|
||||
})
|
||||
posts, err := ph.PostService.ListPosts(c.Context(), req.Filter == "deleted")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
totalPages := int(result.TotalCount+int64(perPage)-1) / perPage
|
||||
if totalPages < 1 {
|
||||
totalPages = 1
|
||||
}
|
||||
|
||||
pageInfo := models.PageInfo{
|
||||
CurrentPage: req.Page,
|
||||
TotalPages: totalPages,
|
||||
PostsPerPage: perPage,
|
||||
}
|
||||
|
||||
return accepts(c, json(func() any {
|
||||
return result.Posts
|
||||
return posts
|
||||
}), html(func(c fiber.Ctx) error {
|
||||
return c.Render("posts/index", fiber.Map{
|
||||
"req": req,
|
||||
"posts": result.Posts,
|
||||
"pageInfo": pageInfo,
|
||||
"req": req,
|
||||
"posts": posts,
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
|
@ -66,16 +42,8 @@ func (ph PostsHandler) New(c fiber.Ctx) error {
|
|||
State: models.StateDraft,
|
||||
}
|
||||
|
||||
cats, err := ph.CategoryService.ListCategories(c.Context())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return c.Render("posts/edit", fiber.Map{
|
||||
"post": p,
|
||||
"categories": cats,
|
||||
"selectedCategories": map[int64]bool{},
|
||||
"bodyClass": "large-editor",
|
||||
"post": p,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -94,29 +62,11 @@ func (ph PostsHandler) Edit(c fiber.Ctx) error {
|
|||
return err
|
||||
}
|
||||
|
||||
cats, err := ph.CategoryService.ListCategories(c.Context())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
postCats, err := ph.PostService.GetPostCategories(c.Context(), postID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
selectedCategories := make(map[int64]bool)
|
||||
for _, pc := range postCats {
|
||||
selectedCategories[pc.ID] = true
|
||||
}
|
||||
|
||||
return accepts(c, json(func() any {
|
||||
return post
|
||||
}), html(func(c fiber.Ctx) error {
|
||||
return c.Render("posts/edit", fiber.Map{
|
||||
"post": post,
|
||||
"categories": cats,
|
||||
"selectedCategories": selectedCategories,
|
||||
"bodyClass": "large-editor",
|
||||
"post": post,
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
|
@ -169,7 +119,8 @@ func (ph PostsHandler) Patch(c fiber.Ctx) error {
|
|||
return accepts(c, json(func() any {
|
||||
return struct{}{}
|
||||
}), html(func(c fiber.Ctx) error {
|
||||
return c.Redirect().To(fmt.Sprintf("/sites/%v/posts", models.MustGetSite(c.Context()).ID))
|
||||
|
||||
return c.Redirect().To(fmt.Sprintf("/sites/%v/posts"))
|
||||
}))
|
||||
}
|
||||
|
||||
|
|
@ -198,6 +149,6 @@ func (ph PostsHandler) Delete(c fiber.Ctx) error {
|
|||
return accepts(c, json(func() any {
|
||||
return fiber.Map{}
|
||||
}), html(func(c fiber.Ctx) error {
|
||||
return c.Redirect().To("/")
|
||||
return c.Redirect().To("/sites")
|
||||
}))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,70 +0,0 @@
|
|||
package handlers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/gofiber/fiber/v3"
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/lmika/weiro/services/sites"
|
||||
)
|
||||
|
||||
type SiteSettingsHandler struct {
|
||||
SiteService *sites.Service
|
||||
}
|
||||
|
||||
func (s *SiteSettingsHandler) New(c fiber.Ctx) error {
|
||||
return c.Render("sitesettings/new", fiber.Map{}, "layouts/bare_with_scripts")
|
||||
}
|
||||
|
||||
func (s *SiteSettingsHandler) Create(c fiber.Ctx) error {
|
||||
var params sites.CreateSiteParams
|
||||
if err := c.Bind().Body(¶ms); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
newSite, err := s.SiteService.CreateSite(c.Context(), params)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return c.Redirect().To(fmt.Sprintf("/sites/%v/posts", newSite.ID))
|
||||
}
|
||||
|
||||
func (s *SiteSettingsHandler) General(c fiber.Ctx) error {
|
||||
site := c.Locals("site").(models.Site)
|
||||
|
||||
return c.Render("sitesettings/general", fiber.Map{
|
||||
"site": site,
|
||||
"tzones": sites.ListZones(),
|
||||
})
|
||||
}
|
||||
|
||||
func (s *SiteSettingsHandler) UpdateGeneral(c fiber.Ctx) error {
|
||||
site := c.Locals("site").(models.Site)
|
||||
|
||||
var params sites.UpdateSiteSettingsParams
|
||||
if err := c.Bind().Body(¶ms); err != nil {
|
||||
return err
|
||||
}
|
||||
params.SiteID = site.ID
|
||||
|
||||
if _, err := s.SiteService.UpdateSiteSettings(c.Context(), params); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return c.Redirect().To(fmt.Sprintf("/sites/%v/settings", +site.ID))
|
||||
}
|
||||
|
||||
func (ph PostsHandler) Rebuild(c fiber.Ctx) error {
|
||||
site := c.Locals("site").(models.Site)
|
||||
|
||||
if err := ph.PostService.RebuildSite(c.Context()); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return accepts(c, json(func() any {
|
||||
return fiber.Map{}
|
||||
}), html(func(c fiber.Ctx) error {
|
||||
return c.Redirect().To(fmt.Sprintf("/sites/%v/settings", +site.ID))
|
||||
}))
|
||||
}
|
||||
|
|
@ -162,24 +162,3 @@ func (uh UploadsHandler) UploadComplete(c fiber.Ctx) error {
|
|||
|
||||
return c.Status(fiber.StatusAccepted).JSON(fiber.Map{})
|
||||
}
|
||||
|
||||
func (uh UploadsHandler) Edit(c fiber.Ctx) error {
|
||||
uploadIDStr := c.Params("uploadID")
|
||||
if uploadIDStr == "" {
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
uploadID, err := strconv.ParseInt(uploadIDStr, 10, 64)
|
||||
if err != nil {
|
||||
return fiber.ErrBadRequest
|
||||
}
|
||||
|
||||
upload, err := uh.UploadsService.FetchUpload(c.Context(), uploadID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return c.Render("uploads/edit", fiber.Map{
|
||||
"upload": upload,
|
||||
"bodyClass": "large-editor",
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,6 +2,5 @@ package simplecss
|
|||
|
||||
import "embed"
|
||||
|
||||
//go:embed templates/*.html
|
||||
//go:embed static/*
|
||||
//go:embed *.html
|
||||
var FS embed.FS
|
||||
|
|
|
|||
23
layouts/simplecss/layout_main.html
Normal file
23
layouts/simplecss/layout_main.html
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>{{ .Site.Title }}</title>
|
||||
<link rel="stylesheet" href="https://cdn.simplecss.org/simple.min.css">
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<h1>{{ .Site.Title }}</h1>
|
||||
<p>{{ .Site.Tagline }}</p>
|
||||
</header>
|
||||
|
||||
<main>
|
||||
{{ .Body }}
|
||||
</main>
|
||||
|
||||
<footer>
|
||||
<p>This site under construction.</p>
|
||||
</footer>
|
||||
</body>
|
||||
</html>
|
||||
5
layouts/simplecss/posts_list.html
Normal file
5
layouts/simplecss/posts_list.html
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
{{ range .Posts }}
|
||||
{{ if .Post.Title }}<h3>{{ .Post.Title }}</h3>{{ end }}
|
||||
{{ .HTML }}
|
||||
<a href="{{ url_abs .Path }}">{{ format_date .Post.PublishedAt }}</a>
|
||||
{{ end }}
|
||||
3
layouts/simplecss/posts_single.html
Normal file
3
layouts/simplecss/posts_single.html
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
{{ if .Post.Title }}<h3>{{ .Post.Title }}</h3>{{ end }}
|
||||
{{ .HTML }}
|
||||
<a href="{{ url_abs .Path }}">{{ format_date .Post.PublishedAt }}</a>
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
.h-entry {
|
||||
margin-block-start: 1.5rem;
|
||||
margin-block-end: 2.5rem;
|
||||
}
|
||||
|
||||
.post-meta {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: space-between;
|
||||
font-size: 0.95rem;
|
||||
}
|
||||
|
||||
.post-meta a {
|
||||
color: var(--text-light);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.post-meta a:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.post-categories {
|
||||
display: inline-flex;
|
||||
gap: 0.5rem;
|
||||
}
|
||||
|
||||
.post-categories a:before {
|
||||
content: "#";
|
||||
}
|
||||
|
||||
/* Category list */
|
||||
|
||||
ul.category-list {
|
||||
list-style: none;
|
||||
padding-inline-start: 0;
|
||||
}
|
||||
|
||||
ul.category-list li {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
|
||||
justify-content: start;
|
||||
gap: 4rem;
|
||||
}
|
||||
|
||||
ul.category-list span.category-list-name {
|
||||
min-width: 15vw;
|
||||
}
|
||||
|
||||
/* Category single */
|
||||
|
||||
.category-description {
|
||||
margin-block-start: 1.5rem;
|
||||
margin-block-end: 2.5rem;
|
||||
}
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
<div class="post-meta">
|
||||
<a href="{{ url_abs .Path }}">{{ format_date .Post.PublishedAt }}</a>
|
||||
{{ if .Categories }}
|
||||
<div class="post-categories">
|
||||
{{ range .Categories }}
|
||||
<a href="{{ url_abs (printf "/categories/%s" .Slug) }}">{{ .Name }}</a>
|
||||
{{ end }}
|
||||
</div>
|
||||
{{ end }}
|
||||
</div>
|
||||
|
|
@ -1,9 +0,0 @@
|
|||
<h2>Categories</h2>
|
||||
<ul class="category-list">
|
||||
{{ range .Categories }}
|
||||
<li>
|
||||
<span class="category-list-name"><a href="{{ url_abs .Path }}">{{ .Name }}</a> ({{ .PostCount }})</span>
|
||||
{{ if .DescriptionBrief }}<small>{{ .DescriptionBrief }}</small>{{ end }}
|
||||
</li>
|
||||
{{ end }}
|
||||
</ul>
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
<h2>{{ .Category.Name }}</h2>
|
||||
{{ if .DescriptionHTML }}
|
||||
<div class="notice category-description">{{ .DescriptionHTML }}</div>
|
||||
{{ end }}
|
||||
{{ range .Posts }}
|
||||
<div class="h-entry">
|
||||
{{ if .Post.Title }}<h3>{{ .Post.Title }}</h3>{{ end }}
|
||||
{{ .HTML }}
|
||||
{{ template "_post_meta.html" . }}
|
||||
</div>
|
||||
{{ end }}
|
||||
{{ if or .PrevURL .NextURL }}
|
||||
<nav class="pagination">
|
||||
{{ if .PrevURL }}<a href="{{ url_abs .PrevURL }}">← Newer posts</a>{{ end }}
|
||||
{{ if .NextURL }}<a href="{{ url_abs .NextURL }}">Older posts →</a>{{ end }}
|
||||
</nav>
|
||||
{{ end }}
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>{{ .Site.Title }}</title>
|
||||
<link rel="alternate" type="application/rss+xml" title="RSS Feed" href="{{ url_abs "/feed.xml" }}"/>
|
||||
<link rel="alternate" type="application/json" title="JSON feed" href="{{ url_abs "/feed.json" }}"/>
|
||||
<link rel="stylesheet" href="https://cdn.simplecss.org/simple.min.css">
|
||||
<link rel="stylesheet" href="{{ url_abs "/static/style.css" }}">
|
||||
</head>
|
||||
<body>
|
||||
<header>
|
||||
<h1>{{ .Site.Title }}</h1>
|
||||
<p>{{ .Site.Tagline }}</p>
|
||||
{{ if .Site.NavItems }}
|
||||
<nav>
|
||||
{{ range .Site.NavItems }}
|
||||
{{ if .ShowInNav }}<a href="{{ url_abs .Slug }}">{{ .Title }}</a>{{ end }}
|
||||
{{ end }}
|
||||
</nav>
|
||||
{{ end }}
|
||||
</header>
|
||||
|
||||
<main>
|
||||
{{ .Body }}
|
||||
</main>
|
||||
|
||||
<footer>
|
||||
<p>This site under construction.</p>
|
||||
</footer>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
{{ if .Page.Title }}<h2>{{ .Page.Title }}</h2>{{ end }}
|
||||
{{ .HTML }}
|
||||
|
|
@ -1,14 +0,0 @@
|
|||
{{ range .Posts }}
|
||||
<div class="h-entry">
|
||||
{{ if .Post.Title }}<h3>{{ .Post.Title }}</h3>{{ end }}
|
||||
{{ .HTML }}
|
||||
|
||||
{{ template "_post_meta.html" . }}
|
||||
</div>
|
||||
{{ end }}
|
||||
{{ if or .PrevURL .NextURL }}
|
||||
<nav class="pagination">
|
||||
{{ if .PrevURL }}<a href="{{ url_abs .PrevURL }}">← Newer posts</a>{{ end }}
|
||||
{{ if .NextURL }}<a href="{{ url_abs .NextURL }}">Older posts →</a>{{ end }}
|
||||
</nav>
|
||||
{{ end }}
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
<div class="h-entry">
|
||||
{{ if .Post.Title }}<h3>{{ .Post.Title }}</h3>{{ end }}
|
||||
{{ .HTML }}
|
||||
{{ template "_post_meta.html" . }}
|
||||
</div>
|
||||
|
|
@ -1,61 +0,0 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
type Category struct {
|
||||
ID int64 `json:"id"`
|
||||
SiteID int64 `json:"site_id"`
|
||||
GUID string `json:"guid"`
|
||||
Name string `json:"name"`
|
||||
Slug string `json:"slug"`
|
||||
Description string `json:"description"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// CategoryWithCount is a Category plus the count of published posts in it.
|
||||
type CategoryWithCount struct {
|
||||
Category
|
||||
PostCount int
|
||||
DescriptionBrief string
|
||||
}
|
||||
|
||||
// GenerateCategorySlug creates a URL-safe slug from a category name.
|
||||
// e.g. "Go Programming" -> "go-programming"
|
||||
func GenerateCategorySlug(name string) string {
|
||||
var sb strings.Builder
|
||||
prevDash := false
|
||||
for _, c := range strings.TrimSpace(name) {
|
||||
if unicode.IsLetter(c) || unicode.IsNumber(c) {
|
||||
sb.WriteRune(unicode.ToLower(c))
|
||||
prevDash = false
|
||||
} else if unicode.IsSpace(c) || c == '-' || c == '_' {
|
||||
if !prevDash && sb.Len() > 0 {
|
||||
sb.WriteRune('-')
|
||||
prevDash = true
|
||||
}
|
||||
}
|
||||
}
|
||||
result := sb.String()
|
||||
return strings.TrimRight(result, "-")
|
||||
}
|
||||
|
||||
// BriefDescription returns the first sentence or line of the description.
|
||||
func BriefDescription(desc string) string {
|
||||
if desc == "" {
|
||||
return ""
|
||||
}
|
||||
for i, c := range desc {
|
||||
if c == '\n' {
|
||||
return desc[:i]
|
||||
}
|
||||
if c == '.' && i+1 < len(desc) {
|
||||
return desc[:i+1]
|
||||
}
|
||||
}
|
||||
return desc
|
||||
}
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
package models_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
)
|
||||
|
||||
func TestGenerateCategorySlug(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
want string
|
||||
}{
|
||||
{"Go Programming", "go-programming"},
|
||||
{" Travel ", "travel"},
|
||||
{"hello---world", "hello-world"},
|
||||
{"UPPER CASE", "upper-case"},
|
||||
{"one", "one"},
|
||||
{"with_underscores", "with-underscores"},
|
||||
{"special!@#chars", "specialchars"},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
assert.Equal(t, tt.want, models.GenerateCategorySlug(tt.name))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -7,5 +7,3 @@ var PermissionError = errors.New("permission denied")
|
|||
var NotFoundError = errors.New("not found")
|
||||
var SiteRequiredError = errors.New("site required")
|
||||
var DeleteDebounceError = errors.New("permanent delete too soon, try again in a few seconds")
|
||||
var SlugConflictError = errors.New("a record with this slug already exists")
|
||||
var UnsupportedImageFormat = errors.New("unsupported image format")
|
||||
|
|
|
|||
|
|
@ -7,8 +7,8 @@ import (
|
|||
func TestNewNanoID(t *testing.T) {
|
||||
id := NewNanoID()
|
||||
|
||||
if len(id) != 16 {
|
||||
t.Errorf("Expected ID length of 16, got %d", len(id))
|
||||
if len(id) != 12 {
|
||||
t.Errorf("Expected ID length of 12, got %d", len(id))
|
||||
}
|
||||
|
||||
if id == "" {
|
||||
|
|
|
|||
|
|
@ -1,62 +0,0 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ImageEditSession struct {
|
||||
GUID string `json:"guid"`
|
||||
SiteID int64 `json:"siteId"`
|
||||
UserID int64 `json:"userId"`
|
||||
BaseUploadID int64 `json:"baseUploadId"`
|
||||
ImageExt string `json:"imageExt"`
|
||||
CreatedAt time.Time `json:"createdAt"`
|
||||
UpdatedAt time.Time `json:"updatedAt"`
|
||||
Processors []ImageEditProcessor `json:"processors"`
|
||||
}
|
||||
|
||||
func (ieh ImageEditSession) PreviewURL() string {
|
||||
return fmt.Sprintf("/sites/%v/imageedit/%v/preview/%v", ieh.SiteID, ieh.GUID, ieh.Processors[len(ieh.Processors)-1].VersionID)
|
||||
}
|
||||
|
||||
func (ieh *ImageEditSession) RecalcVersionIDs() {
|
||||
for i, p := range ieh.Processors {
|
||||
if i == 0 {
|
||||
p.SetVersionID("")
|
||||
} else {
|
||||
p.SetVersionID(ieh.Processors[i-1].VersionID)
|
||||
}
|
||||
|
||||
ieh.Processors[i] = p
|
||||
}
|
||||
}
|
||||
|
||||
type ImageEditProcessor struct {
|
||||
ID string `json:"id"`
|
||||
Type string `json:"type"`
|
||||
Props json.RawMessage `json:"props"`
|
||||
|
||||
// VersionID is a unique hash of the particular processor. This includes the version ID of the previous processor,
|
||||
// thereby causing a change of one processor to affect the version IDs of processors down the line.
|
||||
VersionID string `json:"versionId"`
|
||||
}
|
||||
|
||||
func (ieh *ImageEditProcessor) SetVersionID(previousVersionID string) {
|
||||
var sb strings.Builder
|
||||
sb.WriteString(ieh.ID)
|
||||
sb.WriteString("-")
|
||||
sb.WriteString(previousVersionID)
|
||||
sb.WriteString("-")
|
||||
sb.WriteString(ieh.Type)
|
||||
sb.WriteString("-")
|
||||
sb.WriteString(string(ieh.Props))
|
||||
ieh.VersionID = fmt.Sprintf("%x", md5.Sum([]byte(sb.String())))
|
||||
}
|
||||
|
||||
type CopyUploadProps struct {
|
||||
UploadID int64 `json:"uploadId"`
|
||||
}
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
package models
|
||||
|
||||
type Maybe[T any] struct {
|
||||
Value T
|
||||
Err error
|
||||
}
|
||||
|
||||
func (m Maybe[T]) Get() (T, error) {
|
||||
return m.Value, m.Err
|
||||
}
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
package models
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
const (
|
||||
PageTypeNormal = 0
|
||||
)
|
||||
|
||||
type Page struct {
|
||||
ID int64 `json:"id"`
|
||||
SiteID int64 `json:"site_id"`
|
||||
GUID string `json:"guid"`
|
||||
Title string `json:"title"`
|
||||
Slug string `json:"slug"`
|
||||
Body string `json:"body"`
|
||||
PageType int `json:"page_type"`
|
||||
ShowInNav bool `json:"show_in_nav"`
|
||||
SortOrder int `json:"sort_order"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
// GeneratePageSlug creates a URL-safe slug from a page title.
|
||||
// e.g. "About Me" -> "about-me"
|
||||
func GeneratePageSlug(title string) string {
|
||||
var sb strings.Builder
|
||||
prevDash := false
|
||||
for _, c := range strings.TrimSpace(title) {
|
||||
if unicode.IsLetter(c) || unicode.IsNumber(c) {
|
||||
sb.WriteRune(unicode.ToLower(c))
|
||||
prevDash = false
|
||||
} else if unicode.IsSpace(c) || c == '-' || c == '_' {
|
||||
if !prevDash && sb.Len() > 0 {
|
||||
sb.WriteRune('-')
|
||||
prevDash = true
|
||||
}
|
||||
}
|
||||
}
|
||||
result := sb.String()
|
||||
return strings.TrimRight(result, "-")
|
||||
}
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
package models_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
)
|
||||
|
||||
func TestGeneratePageSlug(t *testing.T) {
|
||||
tests := []struct {
|
||||
title string
|
||||
want string
|
||||
}{
|
||||
{"About Me", "about-me"},
|
||||
{" Contact Us ", "contact-us"},
|
||||
{"Hello---World", "hello-world"},
|
||||
{"FAQ", "faq"},
|
||||
{"", ""},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.title, func(t *testing.T) {
|
||||
assert.Equal(t, tt.want, models.GeneratePageSlug(tt.title))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
package models
|
||||
|
||||
// PageInfo carries pagination state for templates.
|
||||
type PageInfo struct {
|
||||
CurrentPage int
|
||||
TotalPages int
|
||||
PostsPerPage int
|
||||
}
|
||||
|
||||
// HasPrevious returns true if there is a previous page.
|
||||
func (p PageInfo) HasPrevious() bool {
|
||||
return p.CurrentPage > 1
|
||||
}
|
||||
|
||||
// HasNext returns true if there is a next page.
|
||||
func (p PageInfo) HasNext() bool {
|
||||
return p.CurrentPage < p.TotalPages
|
||||
}
|
||||
|
||||
// PreviousPage returns the previous page number.
|
||||
func (p PageInfo) PreviousPage() int {
|
||||
return p.CurrentPage - 1
|
||||
}
|
||||
|
||||
// NextPage returns the next page number.
|
||||
func (p PageInfo) NextPage() int {
|
||||
return p.CurrentPage + 1
|
||||
}
|
||||
|
||||
// Pages returns a slice of page numbers for rendering numbered pagination.
|
||||
func (p PageInfo) Pages() []int {
|
||||
pages := make([]int, p.TotalPages)
|
||||
for i := range pages {
|
||||
pages[i] = i + 1
|
||||
}
|
||||
return pages
|
||||
}
|
||||
|
|
@ -1,27 +1,16 @@
|
|||
package pubmodel
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"iter"
|
||||
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/pkg/modash/moslice"
|
||||
)
|
||||
|
||||
type Site struct {
|
||||
models.Site
|
||||
BaseURL string
|
||||
Posts []*models.Post
|
||||
Uploads []models.Upload
|
||||
|
||||
OpenUpload func(u models.Upload) (io.ReadCloser, error)
|
||||
PostIter func(ctx context.Context) iter.Seq[models.Maybe[*models.Post]]
|
||||
Categories []models.CategoryWithCount
|
||||
PostIterByCategory func(ctx context.Context, categoryID int64) iter.Seq[models.Maybe[*models.Post]]
|
||||
CategoriesOfPost func(ctx context.Context, postID int64) ([]*models.Category, error)
|
||||
Pages []*models.Page
|
||||
}
|
||||
|
||||
func (s Site) NavItems() []*models.Page {
|
||||
return moslice.Filter(s.Pages, func(p *models.Page) bool { return p.ShowInNav })
|
||||
OpenUpload func(u models.Upload) (io.ReadCloser, error)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -27,10 +27,8 @@ type Site struct {
|
|||
GUID string
|
||||
Created time.Time
|
||||
|
||||
Title string
|
||||
Tagline string
|
||||
Timezone string
|
||||
PostsPerPage int
|
||||
Title string
|
||||
Tagline string
|
||||
}
|
||||
|
||||
type SitePublishTarget struct {
|
||||
|
|
|
|||
101
package-lock.json
generated
101
package-lock.json
generated
|
|
@ -7,9 +7,7 @@
|
|||
"dependencies": {
|
||||
"@hotwired/stimulus": "^3.2.2",
|
||||
"bootstrap": "^5.3.8",
|
||||
"esbuild-sass-plugin": "^3.6.0",
|
||||
"feather-icons": "^4.29.2",
|
||||
"handlebars": "^4.7.8"
|
||||
"esbuild-sass-plugin": "^3.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"esbuild": "0.27.3"
|
||||
|
|
@ -785,12 +783,6 @@
|
|||
"url": "https://paulmillr.com/funding/"
|
||||
}
|
||||
},
|
||||
"node_modules/classnames": {
|
||||
"version": "2.5.1",
|
||||
"resolved": "https://registry.npmjs.org/classnames/-/classnames-2.5.1.tgz",
|
||||
"integrity": "sha512-saHYOzhIQs6wy2sVxTM6bUDsQO4F50V9RQ22qBpEdCW+I+/Wmke2HOl6lS6dTpdxVhb88/I6+Hs+438c3lfUow==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/colorjs.io": {
|
||||
"version": "0.5.2",
|
||||
"resolved": "https://registry.npmjs.org/colorjs.io/-/colorjs.io-0.5.2.tgz",
|
||||
|
|
@ -798,17 +790,6 @@
|
|||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/core-js": {
|
||||
"version": "3.49.0",
|
||||
"resolved": "https://registry.npmjs.org/core-js/-/core-js-3.49.0.tgz",
|
||||
"integrity": "sha512-es1U2+YTtzpwkxVLwAFdSpaIMyQaq0PBgm3YD1W3Qpsn1NAmO3KSgZfu+oGSWVu6NvLHoHCV/aYcsE5wiB7ALg==",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/core-js"
|
||||
}
|
||||
},
|
||||
"node_modules/detect-libc": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz",
|
||||
|
|
@ -874,16 +855,6 @@
|
|||
"sass-embedded": "^1.97.2"
|
||||
}
|
||||
},
|
||||
"node_modules/feather-icons": {
|
||||
"version": "4.29.2",
|
||||
"resolved": "https://registry.npmjs.org/feather-icons/-/feather-icons-4.29.2.tgz",
|
||||
"integrity": "sha512-0TaCFTnBTVCz6U+baY2UJNKne5ifGh7sMG4ZC2LoBWCZdIyPa+y6UiR4lEYGws1JOFWdee8KAsAIvu0VcXqiqA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"classnames": "^2.2.5",
|
||||
"core-js": "^3.1.3"
|
||||
}
|
||||
},
|
||||
"node_modules/function-bind": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
|
||||
|
|
@ -893,27 +864,6 @@
|
|||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/handlebars": {
|
||||
"version": "4.7.8",
|
||||
"resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz",
|
||||
"integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"minimist": "^1.2.5",
|
||||
"neo-async": "^2.6.2",
|
||||
"source-map": "^0.6.1",
|
||||
"wordwrap": "^1.0.0"
|
||||
},
|
||||
"bin": {
|
||||
"handlebars": "bin/handlebars"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.4.7"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"uglify-js": "^3.1.4"
|
||||
}
|
||||
},
|
||||
"node_modules/has-flag": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
|
||||
|
|
@ -937,9 +887,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/immutable": {
|
||||
"version": "5.1.5",
|
||||
"resolved": "https://registry.npmjs.org/immutable/-/immutable-5.1.5.tgz",
|
||||
"integrity": "sha512-t7xcm2siw+hlUM68I+UEOK+z84RzmN59as9DZ7P1l0994DKUWV7UXBMQZVxaoMSRQ+PBZbHCOoBt7a2wxOMt+A==",
|
||||
"version": "5.1.4",
|
||||
"resolved": "https://registry.npmjs.org/immutable/-/immutable-5.1.4.tgz",
|
||||
"integrity": "sha512-p6u1bG3YSnINT5RQmx/yRZBpenIl30kVxkTLDyHLIMk0gict704Q9n+thfDI7lTRm9vXdDYutVzXhzcThxTnXA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/is-core-module": {
|
||||
|
|
@ -980,21 +930,6 @@
|
|||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/minimist": {
|
||||
"version": "1.2.8",
|
||||
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
|
||||
"integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/neo-async": {
|
||||
"version": "2.6.2",
|
||||
"resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz",
|
||||
"integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/node-addon-api": {
|
||||
"version": "7.1.1",
|
||||
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz",
|
||||
|
|
@ -1432,15 +1367,6 @@
|
|||
"node": ">=14.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/source-map": {
|
||||
"version": "0.6.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
|
||||
"integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
|
||||
"license": "BSD-3-Clause",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/source-map-js": {
|
||||
"version": "1.2.1",
|
||||
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
|
||||
|
|
@ -1508,31 +1434,12 @@
|
|||
"license": "0BSD",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/uglify-js": {
|
||||
"version": "3.19.3",
|
||||
"resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz",
|
||||
"integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==",
|
||||
"license": "BSD-2-Clause",
|
||||
"optional": true,
|
||||
"bin": {
|
||||
"uglifyjs": "bin/uglifyjs"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/varint": {
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/varint/-/varint-6.0.0.tgz",
|
||||
"integrity": "sha512-cXEIW6cfr15lFv563k4GuVuW/fiwjknytD37jIOLSdSWuOI6WnO/oKwmP2FQTU2l01LP8/M5TSAJpzUaGe3uWg==",
|
||||
"license": "MIT",
|
||||
"peer": true
|
||||
},
|
||||
"node_modules/wordwrap": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz",
|
||||
"integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==",
|
||||
"license": "MIT"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -5,8 +5,6 @@
|
|||
"dependencies": {
|
||||
"@hotwired/stimulus": "^3.2.2",
|
||||
"bootstrap": "^5.3.8",
|
||||
"esbuild-sass-plugin": "^3.6.0",
|
||||
"feather-icons": "^4.29.2",
|
||||
"handlebars": "^4.7.8"
|
||||
"esbuild-sass-plugin": "^3.6.0"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,132 +0,0 @@
|
|||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/lmika/weiro/providers/db/gen/sqlgen"
|
||||
)
|
||||
|
||||
func (db *Provider) SelectCategoriesOfSite(ctx context.Context, siteID int64) ([]*models.Category, error) {
|
||||
rows, err := db.queries.SelectCategoriesOfSite(ctx, siteID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cats := make([]*models.Category, len(rows))
|
||||
for i, row := range rows {
|
||||
cats[i] = dbCategoryToCategory(row)
|
||||
}
|
||||
return cats, nil
|
||||
}
|
||||
|
||||
func (db *Provider) SelectCategory(ctx context.Context, id int64) (*models.Category, error) {
|
||||
row, err := db.queries.SelectCategory(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return dbCategoryToCategory(row), nil
|
||||
}
|
||||
|
||||
func (db *Provider) SelectCategoryBySlugAndSite(ctx context.Context, siteID int64, slug string) (*models.Category, error) {
|
||||
row, err := db.queries.SelectCategoryBySlugAndSite(ctx, sqlgen.SelectCategoryBySlugAndSiteParams{
|
||||
SiteID: siteID,
|
||||
Slug: slug,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return dbCategoryToCategory(row), nil
|
||||
}
|
||||
|
||||
func (db *Provider) SaveCategory(ctx context.Context, cat *models.Category) error {
|
||||
if cat.ID == 0 {
|
||||
newID, err := db.queries.InsertCategory(ctx, sqlgen.InsertCategoryParams{
|
||||
SiteID: cat.SiteID,
|
||||
Guid: cat.GUID,
|
||||
Name: cat.Name,
|
||||
Slug: cat.Slug,
|
||||
Description: cat.Description,
|
||||
CreatedAt: timeToInt(cat.CreatedAt),
|
||||
UpdatedAt: timeToInt(cat.UpdatedAt),
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cat.ID = newID
|
||||
return nil
|
||||
}
|
||||
|
||||
return db.queries.UpdateCategory(ctx, sqlgen.UpdateCategoryParams{
|
||||
ID: cat.ID,
|
||||
Name: cat.Name,
|
||||
Slug: cat.Slug,
|
||||
Description: cat.Description,
|
||||
UpdatedAt: timeToInt(cat.UpdatedAt),
|
||||
})
|
||||
}
|
||||
|
||||
func (db *Provider) DeleteCategory(ctx context.Context, id int64) error {
|
||||
return db.queries.DeleteCategory(ctx, id)
|
||||
}
|
||||
|
||||
func (db *Provider) SelectCategoriesOfPost(ctx context.Context, postID int64) ([]*models.Category, error) {
|
||||
rows, err := db.queries.SelectCategoriesOfPost(ctx, postID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cats := make([]*models.Category, len(rows))
|
||||
for i, row := range rows {
|
||||
cats[i] = dbCategoryToCategory(row)
|
||||
}
|
||||
return cats, nil
|
||||
}
|
||||
|
||||
func (db *Provider) SelectPublishedPostsOfCategory(ctx context.Context, categoryID int64, pp PagingParams) ([]*models.Post, error) {
|
||||
rows, err := db.queries.SelectPublishedPostsOfCategory(ctx, sqlgen.SelectPublishedPostsOfCategoryParams{
|
||||
CategoryID: categoryID,
|
||||
Limit: pp.Limit,
|
||||
Offset: pp.Offset,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
posts := make([]*models.Post, len(rows))
|
||||
for i, row := range rows {
|
||||
posts[i] = dbPostToPost(row)
|
||||
}
|
||||
return posts, nil
|
||||
}
|
||||
|
||||
func (db *Provider) CountPostsOfCategory(ctx context.Context, categoryID int64) (int64, error) {
|
||||
return db.queries.CountPostsOfCategory(ctx, categoryID)
|
||||
}
|
||||
|
||||
// SetPostCategories replaces all category associations for a post.
|
||||
func (db *Provider) SetPostCategories(ctx context.Context, postID int64, categoryIDs []int64) error {
|
||||
if err := db.queries.DeletePostCategoriesByPost(ctx, postID); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, catID := range categoryIDs {
|
||||
if err := db.queries.InsertPostCategory(ctx, sqlgen.InsertPostCategoryParams{
|
||||
PostID: postID,
|
||||
CategoryID: catID,
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func dbCategoryToCategory(row sqlgen.Category) *models.Category {
|
||||
return &models.Category{
|
||||
ID: row.ID,
|
||||
SiteID: row.SiteID,
|
||||
GUID: row.Guid,
|
||||
Name: row.Name,
|
||||
Slug: row.Slug,
|
||||
Description: row.Description,
|
||||
CreatedAt: time.Unix(row.CreatedAt, 0).UTC(),
|
||||
UpdatedAt: time.Unix(row.UpdatedAt, 0).UTC(),
|
||||
}
|
||||
}
|
||||
|
|
@ -1,305 +0,0 @@
|
|||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.30.0
|
||||
// source: categories.sql
|
||||
|
||||
package sqlgen
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
const countPostsOfCategory = `-- name: CountPostsOfCategory :one
|
||||
SELECT COUNT(*) FROM posts p
|
||||
INNER JOIN post_categories pc ON pc.post_id = p.id
|
||||
WHERE pc.category_id = ? AND p.state = 0 AND p.deleted_at = 0
|
||||
`
|
||||
|
||||
func (q *Queries) CountPostsOfCategory(ctx context.Context, categoryID int64) (int64, error) {
|
||||
row := q.db.QueryRowContext(ctx, countPostsOfCategory, categoryID)
|
||||
var count int64
|
||||
err := row.Scan(&count)
|
||||
return count, err
|
||||
}
|
||||
|
||||
const deleteCategory = `-- name: DeleteCategory :exec
|
||||
DELETE FROM categories WHERE id = ?
|
||||
`
|
||||
|
||||
func (q *Queries) DeleteCategory(ctx context.Context, id int64) error {
|
||||
_, err := q.db.ExecContext(ctx, deleteCategory, id)
|
||||
return err
|
||||
}
|
||||
|
||||
const deletePostCategoriesByPost = `-- name: DeletePostCategoriesByPost :exec
|
||||
DELETE FROM post_categories WHERE post_id = ?
|
||||
`
|
||||
|
||||
func (q *Queries) DeletePostCategoriesByPost(ctx context.Context, postID int64) error {
|
||||
_, err := q.db.ExecContext(ctx, deletePostCategoriesByPost, postID)
|
||||
return err
|
||||
}
|
||||
|
||||
const insertCategory = `-- name: InsertCategory :one
|
||||
INSERT INTO categories (
|
||||
site_id, guid, name, slug, description, created_at, updated_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
RETURNING id
|
||||
`
|
||||
|
||||
type InsertCategoryParams struct {
|
||||
SiteID int64
|
||||
Guid string
|
||||
Name string
|
||||
Slug string
|
||||
Description string
|
||||
CreatedAt int64
|
||||
UpdatedAt int64
|
||||
}
|
||||
|
||||
func (q *Queries) InsertCategory(ctx context.Context, arg InsertCategoryParams) (int64, error) {
|
||||
row := q.db.QueryRowContext(ctx, insertCategory,
|
||||
arg.SiteID,
|
||||
arg.Guid,
|
||||
arg.Name,
|
||||
arg.Slug,
|
||||
arg.Description,
|
||||
arg.CreatedAt,
|
||||
arg.UpdatedAt,
|
||||
)
|
||||
var id int64
|
||||
err := row.Scan(&id)
|
||||
return id, err
|
||||
}
|
||||
|
||||
const insertPostCategory = `-- name: InsertPostCategory :exec
|
||||
INSERT OR IGNORE INTO post_categories (post_id, category_id) VALUES (?, ?)
|
||||
`
|
||||
|
||||
type InsertPostCategoryParams struct {
|
||||
PostID int64
|
||||
CategoryID int64
|
||||
}
|
||||
|
||||
func (q *Queries) InsertPostCategory(ctx context.Context, arg InsertPostCategoryParams) error {
|
||||
_, err := q.db.ExecContext(ctx, insertPostCategory, arg.PostID, arg.CategoryID)
|
||||
return err
|
||||
}
|
||||
|
||||
const selectCategoriesOfPost = `-- name: SelectCategoriesOfPost :many
|
||||
SELECT c.id, c.site_id, c.guid, c.name, c.slug, c.description, c.created_at, c.updated_at FROM categories c
|
||||
INNER JOIN post_categories pc ON pc.category_id = c.id
|
||||
WHERE pc.post_id = ?
|
||||
ORDER BY c.name ASC
|
||||
`
|
||||
|
||||
func (q *Queries) SelectCategoriesOfPost(ctx context.Context, postID int64) ([]Category, error) {
|
||||
rows, err := q.db.QueryContext(ctx, selectCategoriesOfPost, postID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []Category
|
||||
for rows.Next() {
|
||||
var i Category
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Guid,
|
||||
&i.Name,
|
||||
&i.Slug,
|
||||
&i.Description,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const selectCategoriesOfSite = `-- name: SelectCategoriesOfSite :many
|
||||
SELECT id, site_id, guid, name, slug, description, created_at, updated_at FROM categories
|
||||
WHERE site_id = ? ORDER BY name ASC
|
||||
`
|
||||
|
||||
func (q *Queries) SelectCategoriesOfSite(ctx context.Context, siteID int64) ([]Category, error) {
|
||||
rows, err := q.db.QueryContext(ctx, selectCategoriesOfSite, siteID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []Category
|
||||
for rows.Next() {
|
||||
var i Category
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Guid,
|
||||
&i.Name,
|
||||
&i.Slug,
|
||||
&i.Description,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const selectCategory = `-- name: SelectCategory :one
|
||||
SELECT id, site_id, guid, name, slug, description, created_at, updated_at FROM categories WHERE id = ? LIMIT 1
|
||||
`
|
||||
|
||||
func (q *Queries) SelectCategory(ctx context.Context, id int64) (Category, error) {
|
||||
row := q.db.QueryRowContext(ctx, selectCategory, id)
|
||||
var i Category
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Guid,
|
||||
&i.Name,
|
||||
&i.Slug,
|
||||
&i.Description,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const selectCategoryByGUID = `-- name: SelectCategoryByGUID :one
|
||||
SELECT id, site_id, guid, name, slug, description, created_at, updated_at FROM categories WHERE guid = ? LIMIT 1
|
||||
`
|
||||
|
||||
func (q *Queries) SelectCategoryByGUID(ctx context.Context, guid string) (Category, error) {
|
||||
row := q.db.QueryRowContext(ctx, selectCategoryByGUID, guid)
|
||||
var i Category
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Guid,
|
||||
&i.Name,
|
||||
&i.Slug,
|
||||
&i.Description,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const selectCategoryBySlugAndSite = `-- name: SelectCategoryBySlugAndSite :one
|
||||
SELECT id, site_id, guid, name, slug, description, created_at, updated_at FROM categories WHERE site_id = ? AND slug = ? LIMIT 1
|
||||
`
|
||||
|
||||
type SelectCategoryBySlugAndSiteParams struct {
|
||||
SiteID int64
|
||||
Slug string
|
||||
}
|
||||
|
||||
func (q *Queries) SelectCategoryBySlugAndSite(ctx context.Context, arg SelectCategoryBySlugAndSiteParams) (Category, error) {
|
||||
row := q.db.QueryRowContext(ctx, selectCategoryBySlugAndSite, arg.SiteID, arg.Slug)
|
||||
var i Category
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Guid,
|
||||
&i.Name,
|
||||
&i.Slug,
|
||||
&i.Description,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const selectPublishedPostsOfCategory = `-- name: SelectPublishedPostsOfCategory :many
|
||||
SELECT p.id, p.site_id, p.state, p.guid, p.title, p.body, p.slug, p.created_at, p.updated_at, p.published_at, p.deleted_at FROM posts p
|
||||
INNER JOIN post_categories pc ON pc.post_id = p.id
|
||||
WHERE pc.category_id = ? AND p.state = 0 AND p.deleted_at = 0
|
||||
ORDER BY p.published_at DESC
|
||||
LIMIT ? OFFSET ?
|
||||
`
|
||||
|
||||
type SelectPublishedPostsOfCategoryParams struct {
|
||||
CategoryID int64
|
||||
Limit int64
|
||||
Offset int64
|
||||
}
|
||||
|
||||
func (q *Queries) SelectPublishedPostsOfCategory(ctx context.Context, arg SelectPublishedPostsOfCategoryParams) ([]Post, error) {
|
||||
rows, err := q.db.QueryContext(ctx, selectPublishedPostsOfCategory, arg.CategoryID, arg.Limit, arg.Offset)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []Post
|
||||
for rows.Next() {
|
||||
var i Post
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.State,
|
||||
&i.Guid,
|
||||
&i.Title,
|
||||
&i.Body,
|
||||
&i.Slug,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.PublishedAt,
|
||||
&i.DeletedAt,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const updateCategory = `-- name: UpdateCategory :exec
|
||||
UPDATE categories SET
|
||||
name = ?,
|
||||
slug = ?,
|
||||
description = ?,
|
||||
updated_at = ?
|
||||
WHERE id = ?
|
||||
`
|
||||
|
||||
type UpdateCategoryParams struct {
|
||||
Name string
|
||||
Slug string
|
||||
Description string
|
||||
UpdatedAt int64
|
||||
ID int64
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateCategory(ctx context.Context, arg UpdateCategoryParams) error {
|
||||
_, err := q.db.ExecContext(ctx, updateCategory,
|
||||
arg.Name,
|
||||
arg.Slug,
|
||||
arg.Description,
|
||||
arg.UpdatedAt,
|
||||
arg.ID,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
|
@ -4,31 +4,6 @@
|
|||
|
||||
package sqlgen
|
||||
|
||||
type Category struct {
|
||||
ID int64
|
||||
SiteID int64
|
||||
Guid string
|
||||
Name string
|
||||
Slug string
|
||||
Description string
|
||||
CreatedAt int64
|
||||
UpdatedAt int64
|
||||
}
|
||||
|
||||
type Page struct {
|
||||
ID int64
|
||||
SiteID int64
|
||||
Guid string
|
||||
Title string
|
||||
Slug string
|
||||
Body string
|
||||
PageType int64
|
||||
ShowInNav int64
|
||||
SortOrder int64
|
||||
CreatedAt int64
|
||||
UpdatedAt int64
|
||||
}
|
||||
|
||||
type PendingUpload struct {
|
||||
ID int64
|
||||
SiteID int64
|
||||
|
|
@ -54,11 +29,6 @@ type Post struct {
|
|||
DeletedAt int64
|
||||
}
|
||||
|
||||
type PostCategory struct {
|
||||
PostID int64
|
||||
CategoryID int64
|
||||
}
|
||||
|
||||
type PublishTarget struct {
|
||||
ID int64
|
||||
SiteID int64
|
||||
|
|
@ -71,14 +41,12 @@ type PublishTarget struct {
|
|||
}
|
||||
|
||||
type Site struct {
|
||||
ID int64
|
||||
OwnerID int64
|
||||
Guid string
|
||||
Title string
|
||||
Tagline string
|
||||
CreatedAt int64
|
||||
Timezone string
|
||||
PostsPerPage int64
|
||||
ID int64
|
||||
OwnerID int64
|
||||
Guid string
|
||||
Title string
|
||||
Tagline string
|
||||
CreatedAt int64
|
||||
}
|
||||
|
||||
type Upload struct {
|
||||
|
|
|
|||
|
|
@ -1,219 +0,0 @@
|
|||
// Code generated by sqlc. DO NOT EDIT.
|
||||
// versions:
|
||||
// sqlc v1.30.0
|
||||
// source: pages.sql
|
||||
|
||||
package sqlgen
|
||||
|
||||
import (
|
||||
"context"
|
||||
)
|
||||
|
||||
const deletePage = `-- name: DeletePage :exec
|
||||
DELETE FROM pages WHERE id = ?
|
||||
`
|
||||
|
||||
func (q *Queries) DeletePage(ctx context.Context, id int64) error {
|
||||
_, err := q.db.ExecContext(ctx, deletePage, id)
|
||||
return err
|
||||
}
|
||||
|
||||
const insertPage = `-- name: InsertPage :one
|
||||
INSERT INTO pages (
|
||||
site_id, guid, title, slug, body, page_type, show_in_nav, sort_order, created_at, updated_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
RETURNING id
|
||||
`
|
||||
|
||||
type InsertPageParams struct {
|
||||
SiteID int64
|
||||
Guid string
|
||||
Title string
|
||||
Slug string
|
||||
Body string
|
||||
PageType int64
|
||||
ShowInNav int64
|
||||
SortOrder int64
|
||||
CreatedAt int64
|
||||
UpdatedAt int64
|
||||
}
|
||||
|
||||
func (q *Queries) InsertPage(ctx context.Context, arg InsertPageParams) (int64, error) {
|
||||
row := q.db.QueryRowContext(ctx, insertPage,
|
||||
arg.SiteID,
|
||||
arg.Guid,
|
||||
arg.Title,
|
||||
arg.Slug,
|
||||
arg.Body,
|
||||
arg.PageType,
|
||||
arg.ShowInNav,
|
||||
arg.SortOrder,
|
||||
arg.CreatedAt,
|
||||
arg.UpdatedAt,
|
||||
)
|
||||
var id int64
|
||||
err := row.Scan(&id)
|
||||
return id, err
|
||||
}
|
||||
|
||||
const selectPage = `-- name: SelectPage :one
|
||||
SELECT id, site_id, guid, title, slug, body, page_type, show_in_nav, sort_order, created_at, updated_at FROM pages WHERE id = ? LIMIT 1
|
||||
`
|
||||
|
||||
func (q *Queries) SelectPage(ctx context.Context, id int64) (Page, error) {
|
||||
row := q.db.QueryRowContext(ctx, selectPage, id)
|
||||
var i Page
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Guid,
|
||||
&i.Title,
|
||||
&i.Slug,
|
||||
&i.Body,
|
||||
&i.PageType,
|
||||
&i.ShowInNav,
|
||||
&i.SortOrder,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const selectPageByGUID = `-- name: SelectPageByGUID :one
|
||||
SELECT id, site_id, guid, title, slug, body, page_type, show_in_nav, sort_order, created_at, updated_at FROM pages WHERE guid = ? LIMIT 1
|
||||
`
|
||||
|
||||
func (q *Queries) SelectPageByGUID(ctx context.Context, guid string) (Page, error) {
|
||||
row := q.db.QueryRowContext(ctx, selectPageByGUID, guid)
|
||||
var i Page
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Guid,
|
||||
&i.Title,
|
||||
&i.Slug,
|
||||
&i.Body,
|
||||
&i.PageType,
|
||||
&i.ShowInNav,
|
||||
&i.SortOrder,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const selectPageBySlugAndSite = `-- name: SelectPageBySlugAndSite :one
|
||||
SELECT id, site_id, guid, title, slug, body, page_type, show_in_nav, sort_order, created_at, updated_at FROM pages WHERE site_id = ? AND slug = ? LIMIT 1
|
||||
`
|
||||
|
||||
type SelectPageBySlugAndSiteParams struct {
|
||||
SiteID int64
|
||||
Slug string
|
||||
}
|
||||
|
||||
func (q *Queries) SelectPageBySlugAndSite(ctx context.Context, arg SelectPageBySlugAndSiteParams) (Page, error) {
|
||||
row := q.db.QueryRowContext(ctx, selectPageBySlugAndSite, arg.SiteID, arg.Slug)
|
||||
var i Page
|
||||
err := row.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Guid,
|
||||
&i.Title,
|
||||
&i.Slug,
|
||||
&i.Body,
|
||||
&i.PageType,
|
||||
&i.ShowInNav,
|
||||
&i.SortOrder,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const selectPagesOfSite = `-- name: SelectPagesOfSite :many
|
||||
SELECT id, site_id, guid, title, slug, body, page_type, show_in_nav, sort_order, created_at, updated_at FROM pages
|
||||
WHERE site_id = ? ORDER BY sort_order ASC
|
||||
`
|
||||
|
||||
func (q *Queries) SelectPagesOfSite(ctx context.Context, siteID int64) ([]Page, error) {
|
||||
rows, err := q.db.QueryContext(ctx, selectPagesOfSite, siteID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []Page
|
||||
for rows.Next() {
|
||||
var i Page
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.Guid,
|
||||
&i.Title,
|
||||
&i.Slug,
|
||||
&i.Body,
|
||||
&i.PageType,
|
||||
&i.ShowInNav,
|
||||
&i.SortOrder,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const updatePage = `-- name: UpdatePage :exec
|
||||
UPDATE pages SET
|
||||
title = ?,
|
||||
slug = ?,
|
||||
body = ?,
|
||||
page_type = ?,
|
||||
show_in_nav = ?,
|
||||
updated_at = ?
|
||||
WHERE id = ?
|
||||
`
|
||||
|
||||
type UpdatePageParams struct {
|
||||
Title string
|
||||
Slug string
|
||||
Body string
|
||||
PageType int64
|
||||
ShowInNav int64
|
||||
UpdatedAt int64
|
||||
ID int64
|
||||
}
|
||||
|
||||
func (q *Queries) UpdatePage(ctx context.Context, arg UpdatePageParams) error {
|
||||
_, err := q.db.ExecContext(ctx, updatePage,
|
||||
arg.Title,
|
||||
arg.Slug,
|
||||
arg.Body,
|
||||
arg.PageType,
|
||||
arg.ShowInNav,
|
||||
arg.UpdatedAt,
|
||||
arg.ID,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
||||
const updatePageSortOrder = `-- name: UpdatePageSortOrder :exec
|
||||
UPDATE pages SET sort_order = ? WHERE id = ?
|
||||
`
|
||||
|
||||
type UpdatePageSortOrderParams struct {
|
||||
SortOrder int64
|
||||
ID int64
|
||||
}
|
||||
|
||||
func (q *Queries) UpdatePageSortOrder(ctx context.Context, arg UpdatePageSortOrderParams) error {
|
||||
_, err := q.db.ExecContext(ctx, updatePageSortOrder, arg.SortOrder, arg.ID)
|
||||
return err
|
||||
}
|
||||
|
|
@ -9,28 +9,6 @@ import (
|
|||
"context"
|
||||
)
|
||||
|
||||
const countPostsOfSite = `-- name: CountPostsOfSite :one
|
||||
SELECT COUNT(*) FROM posts
|
||||
WHERE site_id = ?1 AND (
|
||||
CASE CAST (?2 AS TEXT)
|
||||
WHEN 'deleted' THEN deleted_at > 0
|
||||
ELSE deleted_at = 0
|
||||
END
|
||||
)
|
||||
`
|
||||
|
||||
type CountPostsOfSiteParams struct {
|
||||
SiteID int64
|
||||
PostFilter string
|
||||
}
|
||||
|
||||
func (q *Queries) CountPostsOfSite(ctx context.Context, arg CountPostsOfSiteParams) (int64, error) {
|
||||
row := q.db.QueryRowContext(ctx, countPostsOfSite, arg.SiteID, arg.PostFilter)
|
||||
var count int64
|
||||
err := row.Scan(&count)
|
||||
return count, err
|
||||
}
|
||||
|
||||
const hardDeletePost = `-- name: HardDeletePost :exec
|
||||
DELETE FROM posts WHERE id = ?
|
||||
`
|
||||
|
|
@ -145,76 +123,21 @@ func (q *Queries) SelectPostByGUID(ctx context.Context, guid string) (Post, erro
|
|||
const selectPostsOfSite = `-- name: SelectPostsOfSite :many
|
||||
SELECT id, site_id, state, guid, title, body, slug, created_at, updated_at, published_at, deleted_at
|
||||
FROM posts
|
||||
WHERE site_id = ?1 AND (
|
||||
WHERE site_id = ? AND (
|
||||
CASE CAST (?2 AS TEXT)
|
||||
WHEN 'deleted' THEN deleted_at > 0
|
||||
ELSE deleted_at = 0
|
||||
END
|
||||
) ORDER BY created_at DESC LIMIT ?4 OFFSET ?3
|
||||
) ORDER BY created_at DESC LIMIT 10
|
||||
`
|
||||
|
||||
type SelectPostsOfSiteParams struct {
|
||||
SiteID int64
|
||||
PostFilter string
|
||||
Offset int64
|
||||
Limit int64
|
||||
}
|
||||
|
||||
func (q *Queries) SelectPostsOfSite(ctx context.Context, arg SelectPostsOfSiteParams) ([]Post, error) {
|
||||
rows, err := q.db.QueryContext(ctx, selectPostsOfSite,
|
||||
arg.SiteID,
|
||||
arg.PostFilter,
|
||||
arg.Offset,
|
||||
arg.Limit,
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
var items []Post
|
||||
for rows.Next() {
|
||||
var i Post
|
||||
if err := rows.Scan(
|
||||
&i.ID,
|
||||
&i.SiteID,
|
||||
&i.State,
|
||||
&i.Guid,
|
||||
&i.Title,
|
||||
&i.Body,
|
||||
&i.Slug,
|
||||
&i.CreatedAt,
|
||||
&i.UpdatedAt,
|
||||
&i.PublishedAt,
|
||||
&i.DeletedAt,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items = append(items, i)
|
||||
}
|
||||
if err := rows.Close(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const selectPublishedPostsOfSite = `-- name: SelectPublishedPostsOfSite :many
|
||||
SELECT id, site_id, state, guid, title, body, slug, created_at, updated_at, published_at, deleted_at
|
||||
FROM posts
|
||||
WHERE site_id = ?1 AND state = 0 AND deleted_at = 0
|
||||
ORDER BY published_at DESC LIMIT ?3 OFFSET ?2
|
||||
`
|
||||
|
||||
type SelectPublishedPostsOfSiteParams struct {
|
||||
SiteID int64
|
||||
Offset int64
|
||||
Limit int64
|
||||
}
|
||||
|
||||
func (q *Queries) SelectPublishedPostsOfSite(ctx context.Context, arg SelectPublishedPostsOfSiteParams) ([]Post, error) {
|
||||
rows, err := q.db.QueryContext(ctx, selectPublishedPostsOfSite, arg.SiteID, arg.Offset, arg.Limit)
|
||||
rows, err := q.db.QueryContext(ctx, selectPostsOfSite, arg.SiteID, arg.PostFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -27,21 +27,17 @@ INSERT INTO sites (
|
|||
guid,
|
||||
title,
|
||||
tagline,
|
||||
timezone,
|
||||
posts_per_page,
|
||||
created_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
) VALUES (?, ?, ?, ?, ?)
|
||||
RETURNING id
|
||||
`
|
||||
|
||||
type InsertSiteParams struct {
|
||||
OwnerID int64
|
||||
Guid string
|
||||
Title string
|
||||
Tagline string
|
||||
Timezone string
|
||||
PostsPerPage int64
|
||||
CreatedAt int64
|
||||
OwnerID int64
|
||||
Guid string
|
||||
Title string
|
||||
Tagline string
|
||||
CreatedAt int64
|
||||
}
|
||||
|
||||
func (q *Queries) InsertSite(ctx context.Context, arg InsertSiteParams) (int64, error) {
|
||||
|
|
@ -50,8 +46,6 @@ func (q *Queries) InsertSite(ctx context.Context, arg InsertSiteParams) (int64,
|
|||
arg.Guid,
|
||||
arg.Title,
|
||||
arg.Tagline,
|
||||
arg.Timezone,
|
||||
arg.PostsPerPage,
|
||||
arg.CreatedAt,
|
||||
)
|
||||
var id int64
|
||||
|
|
@ -104,7 +98,7 @@ func (q *Queries) SelectAllSitesWithOwners(ctx context.Context) ([]SelectAllSite
|
|||
}
|
||||
|
||||
const selectSiteByGUID = `-- name: SelectSiteByGUID :one
|
||||
SELECT id, owner_id, guid, title, tagline, created_at, timezone, posts_per_page FROM sites WHERE guid = ?
|
||||
SELECT id, owner_id, guid, title, tagline, created_at FROM sites WHERE guid = ?
|
||||
`
|
||||
|
||||
func (q *Queries) SelectSiteByGUID(ctx context.Context, guid string) (Site, error) {
|
||||
|
|
@ -117,14 +111,12 @@ func (q *Queries) SelectSiteByGUID(ctx context.Context, guid string) (Site, erro
|
|||
&i.Title,
|
||||
&i.Tagline,
|
||||
&i.CreatedAt,
|
||||
&i.Timezone,
|
||||
&i.PostsPerPage,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const selectSiteByID = `-- name: SelectSiteByID :one
|
||||
SELECT id, owner_id, guid, title, tagline, created_at, timezone, posts_per_page FROM sites WHERE id = ?
|
||||
SELECT id, owner_id, guid, title, tagline, created_at FROM sites WHERE id = ?
|
||||
`
|
||||
|
||||
func (q *Queries) SelectSiteByID(ctx context.Context, id int64) (Site, error) {
|
||||
|
|
@ -137,14 +129,12 @@ func (q *Queries) SelectSiteByID(ctx context.Context, id int64) (Site, error) {
|
|||
&i.Title,
|
||||
&i.Tagline,
|
||||
&i.CreatedAt,
|
||||
&i.Timezone,
|
||||
&i.PostsPerPage,
|
||||
)
|
||||
return i, err
|
||||
}
|
||||
|
||||
const selectSitesOwnedByUser = `-- name: SelectSitesOwnedByUser :many
|
||||
SELECT id, owner_id, guid, title, tagline, created_at, timezone, posts_per_page FROM sites WHERE owner_id = ? ORDER BY title ASC
|
||||
SELECT id, owner_id, guid, title, tagline, created_at FROM sites WHERE owner_id = ? ORDER BY title ASC
|
||||
`
|
||||
|
||||
func (q *Queries) SelectSitesOwnedByUser(ctx context.Context, ownerID int64) ([]Site, error) {
|
||||
|
|
@ -163,8 +153,6 @@ func (q *Queries) SelectSitesOwnedByUser(ctx context.Context, ownerID int64) ([]
|
|||
&i.Title,
|
||||
&i.Tagline,
|
||||
&i.CreatedAt,
|
||||
&i.Timezone,
|
||||
&i.PostsPerPage,
|
||||
); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -178,26 +166,3 @@ func (q *Queries) SelectSitesOwnedByUser(ctx context.Context, ownerID int64) ([]
|
|||
}
|
||||
return items, nil
|
||||
}
|
||||
|
||||
const updateSite = `-- name: UpdateSite :exec
|
||||
UPDATE sites SET title = ?, tagline = ?, timezone = ?, posts_per_page = ? WHERE id = ?
|
||||
`
|
||||
|
||||
type UpdateSiteParams struct {
|
||||
Title string
|
||||
Tagline string
|
||||
Timezone string
|
||||
PostsPerPage int64
|
||||
ID int64
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateSite(ctx context.Context, arg UpdateSiteParams) error {
|
||||
_, err := q.db.ExecContext(ctx, updateSite,
|
||||
arg.Title,
|
||||
arg.Tagline,
|
||||
arg.Timezone,
|
||||
arg.PostsPerPage,
|
||||
arg.ID,
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ func (q *Queries) DeleteUpload(ctx context.Context, id int64) error {
|
|||
return err
|
||||
}
|
||||
|
||||
const insertUpload = `-- name: InsertUpload :one
|
||||
const insertUpload = `-- name: InsertUpload :exec
|
||||
INSERT INTO uploads (
|
||||
site_id,
|
||||
guid,
|
||||
|
|
@ -43,8 +43,8 @@ type InsertUploadParams struct {
|
|||
CreatedAt int64
|
||||
}
|
||||
|
||||
func (q *Queries) InsertUpload(ctx context.Context, arg InsertUploadParams) (int64, error) {
|
||||
row := q.db.QueryRowContext(ctx, insertUpload,
|
||||
func (q *Queries) InsertUpload(ctx context.Context, arg InsertUploadParams) error {
|
||||
_, err := q.db.ExecContext(ctx, insertUpload,
|
||||
arg.SiteID,
|
||||
arg.Guid,
|
||||
arg.MimeType,
|
||||
|
|
@ -54,9 +54,7 @@ func (q *Queries) InsertUpload(ctx context.Context, arg InsertUploadParams) (int
|
|||
arg.Alt,
|
||||
arg.CreatedAt,
|
||||
)
|
||||
var id int64
|
||||
err := row.Scan(&id)
|
||||
return id, err
|
||||
return err
|
||||
}
|
||||
|
||||
const selectUploadByID = `-- name: SelectUploadByID :one
|
||||
|
|
@ -156,17 +154,3 @@ func (q *Queries) UpdateUpload(ctx context.Context, arg UpdateUploadParams) erro
|
|||
_, err := q.db.ExecContext(ctx, updateUpload, arg.Alt, arg.ID)
|
||||
return err
|
||||
}
|
||||
|
||||
const updateUploadFileSize = `-- name: UpdateUploadFileSize :exec
|
||||
UPDATE uploads SET file_size = ? WHERE id = ?
|
||||
`
|
||||
|
||||
type UpdateUploadFileSizeParams struct {
|
||||
FileSize int64
|
||||
ID int64
|
||||
}
|
||||
|
||||
func (q *Queries) UpdateUploadFileSize(ctx context.Context, arg UpdateUploadFileSizeParams) error {
|
||||
_, err := q.db.ExecContext(ctx, updateUploadFileSize, arg.FileSize, arg.ID)
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,115 +0,0 @@
|
|||
package db
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/lmika/weiro/providers/db/gen/sqlgen"
|
||||
)
|
||||
|
||||
func (db *Provider) SelectPagesOfSite(ctx context.Context, siteID int64) ([]*models.Page, error) {
|
||||
rows, err := db.queries.SelectPagesOfSite(ctx, siteID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pages := make([]*models.Page, len(rows))
|
||||
for i, row := range rows {
|
||||
pages[i] = dbPageToPage(row)
|
||||
}
|
||||
return pages, nil
|
||||
}
|
||||
|
||||
func (db *Provider) SelectPage(ctx context.Context, id int64) (*models.Page, error) {
|
||||
row, err := db.queries.SelectPage(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return dbPageToPage(row), nil
|
||||
}
|
||||
|
||||
func (db *Provider) SelectPageByGUID(ctx context.Context, guid string) (*models.Page, error) {
|
||||
row, err := db.queries.SelectPageByGUID(ctx, guid)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return dbPageToPage(row), nil
|
||||
}
|
||||
|
||||
func (db *Provider) SelectPageBySlugAndSite(ctx context.Context, siteID int64, slug string) (*models.Page, error) {
|
||||
row, err := db.queries.SelectPageBySlugAndSite(ctx, sqlgen.SelectPageBySlugAndSiteParams{
|
||||
SiteID: siteID,
|
||||
Slug: slug,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return dbPageToPage(row), nil
|
||||
}
|
||||
|
||||
func (db *Provider) SavePage(ctx context.Context, page *models.Page) error {
|
||||
if page.ID == 0 {
|
||||
showInNav := int64(0)
|
||||
if page.ShowInNav {
|
||||
showInNav = 1
|
||||
}
|
||||
newID, err := db.queries.InsertPage(ctx, sqlgen.InsertPageParams{
|
||||
SiteID: page.SiteID,
|
||||
Guid: page.GUID,
|
||||
Title: page.Title,
|
||||
Slug: page.Slug,
|
||||
Body: page.Body,
|
||||
PageType: int64(page.PageType),
|
||||
ShowInNav: showInNav,
|
||||
SortOrder: int64(page.SortOrder),
|
||||
CreatedAt: timeToInt(page.CreatedAt),
|
||||
UpdatedAt: timeToInt(page.UpdatedAt),
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
page.ID = newID
|
||||
return nil
|
||||
}
|
||||
|
||||
showInNav := int64(0)
|
||||
if page.ShowInNav {
|
||||
showInNav = 1
|
||||
}
|
||||
return db.queries.UpdatePage(ctx, sqlgen.UpdatePageParams{
|
||||
Title: page.Title,
|
||||
Slug: page.Slug,
|
||||
Body: page.Body,
|
||||
PageType: int64(page.PageType),
|
||||
ShowInNav: showInNav,
|
||||
UpdatedAt: timeToInt(page.UpdatedAt),
|
||||
ID: page.ID,
|
||||
})
|
||||
}
|
||||
|
||||
func (db *Provider) UpdatePageSortOrder(ctx context.Context, id int64, sortOrder int) error {
|
||||
return db.queries.UpdatePageSortOrder(ctx, sqlgen.UpdatePageSortOrderParams{
|
||||
SortOrder: int64(sortOrder),
|
||||
ID: id,
|
||||
})
|
||||
}
|
||||
|
||||
func (db *Provider) DeletePage(ctx context.Context, id int64) error {
|
||||
return db.queries.DeletePage(ctx, id)
|
||||
}
|
||||
|
||||
func dbPageToPage(row sqlgen.Page) *models.Page {
|
||||
return &models.Page{
|
||||
ID: row.ID,
|
||||
SiteID: row.SiteID,
|
||||
GUID: row.Guid,
|
||||
Title: row.Title,
|
||||
Slug: row.Slug,
|
||||
Body: row.Body,
|
||||
PageType: int(row.PageType),
|
||||
ShowInNav: row.ShowInNav != 0,
|
||||
SortOrder: int(row.SortOrder),
|
||||
CreatedAt: time.Unix(row.CreatedAt, 0).UTC(),
|
||||
UpdatedAt: time.Unix(row.UpdatedAt, 0).UTC(),
|
||||
}
|
||||
}
|
||||
|
|
@ -8,23 +8,7 @@ import (
|
|||
"lmika.dev/lmika/weiro/providers/db/gen/sqlgen"
|
||||
)
|
||||
|
||||
type PagingParams struct {
|
||||
Limit int64
|
||||
Offset int64
|
||||
}
|
||||
|
||||
func (db *Provider) CountPostsOfSite(ctx context.Context, siteID int64, showDeleted bool) (int64, error) {
|
||||
filter := "active"
|
||||
if showDeleted {
|
||||
filter = "deleted"
|
||||
}
|
||||
return db.queries.CountPostsOfSite(ctx, sqlgen.CountPostsOfSiteParams{
|
||||
SiteID: siteID,
|
||||
PostFilter: filter,
|
||||
})
|
||||
}
|
||||
|
||||
func (db *Provider) SelectPostsOfSite(ctx context.Context, siteID int64, showDeleted bool, pp PagingParams) ([]*models.Post, error) {
|
||||
func (db *Provider) SelectPostsOfSite(ctx context.Context, siteID int64, showDeleted bool) ([]*models.Post, error) {
|
||||
var filter = ""
|
||||
if showDeleted {
|
||||
filter = "deleted"
|
||||
|
|
@ -33,25 +17,6 @@ func (db *Provider) SelectPostsOfSite(ctx context.Context, siteID int64, showDel
|
|||
rows, err := db.queries.SelectPostsOfSite(ctx, sqlgen.SelectPostsOfSiteParams{
|
||||
SiteID: siteID,
|
||||
PostFilter: filter,
|
||||
Limit: pp.Limit,
|
||||
Offset: pp.Offset,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
posts := make([]*models.Post, len(rows))
|
||||
for i, row := range rows {
|
||||
posts[i] = dbPostToPost(row)
|
||||
}
|
||||
return posts, nil
|
||||
}
|
||||
|
||||
func (db *Provider) SelectPublishedPostsOfSite(ctx context.Context, siteID int64, pp PagingParams) ([]*models.Post, error) {
|
||||
rows, err := db.queries.SelectPublishedPostsOfSite(ctx, sqlgen.SelectPublishedPostsOfSiteParams{
|
||||
SiteID: siteID,
|
||||
Limit: pp.Limit,
|
||||
Offset: pp.Offset,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
|
|||
|
|
@ -40,17 +40,6 @@ func (db *Provider) Close() error {
|
|||
return db.drvr.Close()
|
||||
}
|
||||
|
||||
func (db *Provider) BeginTx(ctx context.Context) (*sql.Tx, error) {
|
||||
return db.drvr.BeginTx(ctx, nil)
|
||||
}
|
||||
|
||||
func (db *Provider) QueriesWithTx(tx *sql.Tx) *Provider {
|
||||
return &Provider{
|
||||
drvr: db.drvr,
|
||||
queries: db.queries.WithTx(tx),
|
||||
}
|
||||
}
|
||||
|
||||
func (db *Provider) SoftDeletePost(ctx context.Context, postID int64) error {
|
||||
return db.queries.SoftDeletePost(ctx, sqlgen.SoftDeletePostParams{
|
||||
DeletedAt: time.Now().Unix(),
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ package db_test
|
|||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
"time"
|
||||
|
|
@ -99,7 +98,6 @@ func TestProvider_Sites(t *testing.T) {
|
|||
t.Run("select site by id", func(t *testing.T) {
|
||||
site := &models.Site{
|
||||
OwnerID: user.ID,
|
||||
GUID: models.NewNanoID(),
|
||||
Title: "Lookup Blog",
|
||||
Tagline: "Find me by ID",
|
||||
}
|
||||
|
|
@ -145,11 +143,10 @@ func TestProvider_Posts(t *testing.T) {
|
|||
require.NoError(t, p.SaveSite(ctx, site))
|
||||
|
||||
t.Run("save and select posts", func(t *testing.T) {
|
||||
guid := models.NewNanoID()
|
||||
now := time.Date(2026, 2, 19, 12, 0, 0, 0, time.UTC)
|
||||
post := &models.Post{
|
||||
SiteID: site.ID,
|
||||
GUID: guid,
|
||||
GUID: "post-001",
|
||||
Title: "First Post",
|
||||
Body: "Hello world",
|
||||
Slug: "/2026/02/19/first-post",
|
||||
|
|
@ -161,12 +158,12 @@ func TestProvider_Posts(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
assert.NotZero(t, post.ID)
|
||||
|
||||
posts, err := p.SelectPostsOfSite(ctx, site.ID, false, db.PagingParams{Limit: 10, Offset: 0})
|
||||
posts, err := p.SelectPostsOfSite(ctx, site.ID, false)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, posts, 1)
|
||||
assert.Equal(t, post.ID, posts[0].ID)
|
||||
assert.Equal(t, site.ID, posts[0].SiteID)
|
||||
assert.Equal(t, guid, posts[0].GUID)
|
||||
assert.Equal(t, "post-001", posts[0].GUID)
|
||||
assert.Equal(t, "First Post", posts[0].Title)
|
||||
assert.Equal(t, "Hello world", posts[0].Body)
|
||||
assert.Equal(t, "/2026/02/19/first-post", posts[0].Slug)
|
||||
|
|
@ -176,10 +173,8 @@ func TestProvider_Posts(t *testing.T) {
|
|||
|
||||
t.Run("posts ordered by created_at desc", func(t *testing.T) {
|
||||
// Create a second site to isolate this test
|
||||
guid := models.NewNanoID()
|
||||
site2 := &models.Site{
|
||||
OwnerID: user.ID,
|
||||
GUID: models.NewNanoID(),
|
||||
Title: "Second Blog",
|
||||
Tagline: "",
|
||||
}
|
||||
|
|
@ -190,7 +185,7 @@ func TestProvider_Posts(t *testing.T) {
|
|||
|
||||
post1 := &models.Post{
|
||||
SiteID: site2.ID,
|
||||
GUID: guid,
|
||||
GUID: "old-post",
|
||||
Title: "Old Post",
|
||||
Body: "old",
|
||||
Slug: "/old",
|
||||
|
|
@ -199,7 +194,7 @@ func TestProvider_Posts(t *testing.T) {
|
|||
}
|
||||
post2 := &models.Post{
|
||||
SiteID: site2.ID,
|
||||
GUID: models.NewNanoID(),
|
||||
GUID: "new-post",
|
||||
Title: "New Post",
|
||||
Body: "new",
|
||||
Slug: "/new",
|
||||
|
|
@ -210,7 +205,7 @@ func TestProvider_Posts(t *testing.T) {
|
|||
require.NoError(t, p.SavePost(ctx, post1))
|
||||
require.NoError(t, p.SavePost(ctx, post2))
|
||||
|
||||
posts, err := p.SelectPostsOfSite(ctx, site2.ID, false, db.PagingParams{Limit: 10, Offset: 0})
|
||||
posts, err := p.SelectPostsOfSite(ctx, site2.ID, false)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, posts, 2)
|
||||
assert.Equal(t, "New Post", posts[0].Title)
|
||||
|
|
@ -220,55 +215,15 @@ func TestProvider_Posts(t *testing.T) {
|
|||
t.Run("select posts for site with no posts", func(t *testing.T) {
|
||||
emptySite := &models.Site{
|
||||
OwnerID: user.ID,
|
||||
GUID: models.NewNanoID(),
|
||||
Title: "Empty Blog",
|
||||
Tagline: "",
|
||||
}
|
||||
require.NoError(t, p.SaveSite(ctx, emptySite))
|
||||
|
||||
posts, err := p.SelectPostsOfSite(ctx, emptySite.ID, false, db.PagingParams{})
|
||||
posts, err := p.SelectPostsOfSite(ctx, emptySite.ID, false)
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, posts)
|
||||
})
|
||||
|
||||
t.Run("count posts of site", func(t *testing.T) {
|
||||
countSite := &models.Site{
|
||||
OwnerID: user.ID,
|
||||
GUID: models.NewNanoID(),
|
||||
Title: "Count Blog",
|
||||
}
|
||||
require.NoError(t, p.SaveSite(ctx, countSite))
|
||||
|
||||
now := time.Date(2026, 3, 22, 12, 0, 0, 0, time.UTC)
|
||||
for i := 0; i < 3; i++ {
|
||||
post := &models.Post{
|
||||
SiteID: countSite.ID,
|
||||
GUID: models.NewNanoID(),
|
||||
Title: fmt.Sprintf("Post %d", i),
|
||||
Body: "body",
|
||||
Slug: fmt.Sprintf("/post-%d", i),
|
||||
CreatedAt: now,
|
||||
}
|
||||
require.NoError(t, p.SavePost(ctx, post))
|
||||
}
|
||||
|
||||
count, err := p.CountPostsOfSite(ctx, countSite.ID, false)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int64(3), count)
|
||||
|
||||
// Soft-delete one post
|
||||
posts, err := p.SelectPostsOfSite(ctx, countSite.ID, false, db.PagingParams{Limit: 10, Offset: 0})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, p.SoftDeletePost(ctx, posts[0].ID))
|
||||
|
||||
count, err = p.CountPostsOfSite(ctx, countSite.ID, false)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int64(2), count)
|
||||
|
||||
count, err = p.CountPostsOfSite(ctx, countSite.ID, true)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int64(1), count)
|
||||
})
|
||||
}
|
||||
|
||||
func TestProvider_PublishTargets(t *testing.T) {
|
||||
|
|
@ -284,7 +239,6 @@ func TestProvider_PublishTargets(t *testing.T) {
|
|||
|
||||
site := &models.Site{
|
||||
OwnerID: user.ID,
|
||||
GUID: models.NewNanoID(),
|
||||
Title: "My Blog",
|
||||
Tagline: "A test blog",
|
||||
}
|
||||
|
|
@ -318,7 +272,6 @@ func TestProvider_PublishTargets(t *testing.T) {
|
|||
t.Run("select targets for site with no targets", func(t *testing.T) {
|
||||
emptySite := &models.Site{
|
||||
OwnerID: user.ID,
|
||||
GUID: models.NewNanoID(),
|
||||
Title: "No Targets",
|
||||
Tagline: "",
|
||||
}
|
||||
|
|
@ -330,165 +283,6 @@ func TestProvider_PublishTargets(t *testing.T) {
|
|||
})
|
||||
}
|
||||
|
||||
func TestProvider_Categories(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
p := newTestDB(t)
|
||||
|
||||
user := &models.User{Username: "testuser", PasswordHashed: []byte("password")}
|
||||
require.NoError(t, p.SaveUser(ctx, user))
|
||||
|
||||
site := &models.Site{OwnerID: user.ID, Title: "My Blog", Tagline: "test"}
|
||||
require.NoError(t, p.SaveSite(ctx, site))
|
||||
|
||||
t.Run("save and select categories", func(t *testing.T) {
|
||||
now := time.Date(2026, 3, 18, 12, 0, 0, 0, time.UTC)
|
||||
cat := &models.Category{
|
||||
SiteID: site.ID,
|
||||
GUID: "cat-001",
|
||||
Name: "Go Programming",
|
||||
Slug: "go-programming",
|
||||
Description: "Posts about Go",
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
|
||||
err := p.SaveCategory(ctx, cat)
|
||||
require.NoError(t, err)
|
||||
assert.NotZero(t, cat.ID)
|
||||
|
||||
cats, err := p.SelectCategoriesOfSite(ctx, site.ID)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, cats, 1)
|
||||
assert.Equal(t, "Go Programming", cats[0].Name)
|
||||
assert.Equal(t, "go-programming", cats[0].Slug)
|
||||
assert.Equal(t, "Posts about Go", cats[0].Description)
|
||||
})
|
||||
|
||||
t.Run("update category", func(t *testing.T) {
|
||||
now := time.Date(2026, 3, 18, 12, 0, 0, 0, time.UTC)
|
||||
cat := &models.Category{
|
||||
SiteID: site.ID,
|
||||
GUID: "cat-002",
|
||||
Name: "Original",
|
||||
Slug: "original",
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
require.NoError(t, p.SaveCategory(ctx, cat))
|
||||
|
||||
cat.Name = "Updated"
|
||||
cat.Slug = "updated"
|
||||
cat.UpdatedAt = now.Add(time.Hour)
|
||||
require.NoError(t, p.SaveCategory(ctx, cat))
|
||||
|
||||
got, err := p.SelectCategory(ctx, cat.ID)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "Updated", got.Name)
|
||||
assert.Equal(t, "updated", got.Slug)
|
||||
})
|
||||
|
||||
t.Run("delete category", func(t *testing.T) {
|
||||
now := time.Date(2026, 3, 18, 12, 0, 0, 0, time.UTC)
|
||||
cat := &models.Category{
|
||||
SiteID: site.ID,
|
||||
GUID: "cat-003",
|
||||
Name: "ToDelete",
|
||||
Slug: "to-delete",
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
require.NoError(t, p.SaveCategory(ctx, cat))
|
||||
|
||||
err := p.DeleteCategory(ctx, cat.ID)
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = p.SelectCategory(ctx, cat.ID)
|
||||
assert.Error(t, err)
|
||||
})
|
||||
}
|
||||
|
||||
func TestProvider_PostCategories(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
p := newTestDB(t)
|
||||
|
||||
user := &models.User{Username: "testuser", PasswordHashed: []byte("password")}
|
||||
require.NoError(t, p.SaveUser(ctx, user))
|
||||
|
||||
site := &models.Site{OwnerID: user.ID, Title: "My Blog", Tagline: "test"}
|
||||
require.NoError(t, p.SaveSite(ctx, site))
|
||||
|
||||
now := time.Date(2026, 3, 18, 12, 0, 0, 0, time.UTC)
|
||||
post := &models.Post{
|
||||
SiteID: site.ID,
|
||||
GUID: "post-pc-001",
|
||||
Title: "Test Post",
|
||||
Body: "body",
|
||||
Slug: "/test",
|
||||
CreatedAt: now,
|
||||
}
|
||||
require.NoError(t, p.SavePost(ctx, post))
|
||||
|
||||
cat1 := &models.Category{SiteID: site.ID, GUID: "cat-pc-1", Name: "Alpha", Slug: "alpha", CreatedAt: now, UpdatedAt: now}
|
||||
cat2 := &models.Category{SiteID: site.ID, GUID: "cat-pc-2", Name: "Beta", Slug: "beta", CreatedAt: now, UpdatedAt: now}
|
||||
require.NoError(t, p.SaveCategory(ctx, cat1))
|
||||
require.NoError(t, p.SaveCategory(ctx, cat2))
|
||||
|
||||
t.Run("set and get post categories", func(t *testing.T) {
|
||||
err := p.SetPostCategories(ctx, post.ID, []int64{cat1.ID, cat2.ID})
|
||||
require.NoError(t, err)
|
||||
|
||||
cats, err := p.SelectCategoriesOfPost(ctx, post.ID)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, cats, 2)
|
||||
assert.Equal(t, "Alpha", cats[0].Name)
|
||||
assert.Equal(t, "Beta", cats[1].Name)
|
||||
})
|
||||
|
||||
t.Run("replace post categories", func(t *testing.T) {
|
||||
err := p.SetPostCategories(ctx, post.ID, []int64{cat2.ID})
|
||||
require.NoError(t, err)
|
||||
|
||||
cats, err := p.SelectCategoriesOfPost(ctx, post.ID)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, cats, 1)
|
||||
assert.Equal(t, "Beta", cats[0].Name)
|
||||
})
|
||||
|
||||
t.Run("clear post categories", func(t *testing.T) {
|
||||
err := p.SetPostCategories(ctx, post.ID, []int64{})
|
||||
require.NoError(t, err)
|
||||
|
||||
cats, err := p.SelectCategoriesOfPost(ctx, post.ID)
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, cats)
|
||||
})
|
||||
|
||||
t.Run("count posts of category", func(t *testing.T) {
|
||||
post.State = models.StatePublished
|
||||
post.PublishedAt = now
|
||||
require.NoError(t, p.SavePost(ctx, post))
|
||||
require.NoError(t, p.SetPostCategories(ctx, post.ID, []int64{cat1.ID}))
|
||||
|
||||
count, err := p.CountPostsOfCategory(ctx, cat1.ID)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int64(1), count)
|
||||
|
||||
count, err = p.CountPostsOfCategory(ctx, cat2.ID)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, int64(0), count)
|
||||
})
|
||||
|
||||
t.Run("cascade delete category removes associations", func(t *testing.T) {
|
||||
require.NoError(t, p.SetPostCategories(ctx, post.ID, []int64{cat1.ID, cat2.ID}))
|
||||
require.NoError(t, p.DeleteCategory(ctx, cat1.ID))
|
||||
|
||||
cats, err := p.SelectCategoriesOfPost(ctx, post.ID)
|
||||
require.NoError(t, err)
|
||||
require.Len(t, cats, 1)
|
||||
assert.Equal(t, "Beta", cats[0].Name)
|
||||
})
|
||||
}
|
||||
|
||||
// Verify that password encoding roundtrips correctly through base64
|
||||
func TestProvider_UserPasswordEncoding(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
|
|
|||
|
|
@ -42,13 +42,11 @@ func (db *Provider) SelectSitesOwnedByUser(ctx context.Context, ownerID int64) (
|
|||
func (db *Provider) SaveSite(ctx context.Context, site *models.Site) error {
|
||||
if site.ID == 0 {
|
||||
newID, err := db.queries.InsertSite(ctx, sqlgen.InsertSiteParams{
|
||||
OwnerID: site.OwnerID,
|
||||
Guid: site.GUID,
|
||||
Title: site.Title,
|
||||
Tagline: site.Tagline,
|
||||
Timezone: site.Timezone,
|
||||
PostsPerPage: int64(site.PostsPerPage),
|
||||
CreatedAt: timeToInt(site.Created),
|
||||
OwnerID: site.OwnerID,
|
||||
Guid: site.GUID,
|
||||
Title: site.Title,
|
||||
Tagline: site.Tagline,
|
||||
CreatedAt: timeToInt(site.Created),
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
@ -57,13 +55,8 @@ func (db *Provider) SaveSite(ctx context.Context, site *models.Site) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
return db.queries.UpdateSite(ctx, sqlgen.UpdateSiteParams{
|
||||
Title: site.Title,
|
||||
Tagline: site.Tagline,
|
||||
Timezone: site.Timezone,
|
||||
PostsPerPage: int64(site.PostsPerPage),
|
||||
ID: site.ID,
|
||||
})
|
||||
// No update query defined in sqlgen yet
|
||||
return nil
|
||||
}
|
||||
|
||||
func (db *Provider) HasUsersAndSites(ctx context.Context) (bool, error) {
|
||||
|
|
@ -103,13 +96,11 @@ func (db *Provider) SelectAllSitesWithOwners(ctx context.Context) ([]SiteWithOwn
|
|||
|
||||
func dbSiteToSite(row sqlgen.Site) models.Site {
|
||||
return models.Site{
|
||||
ID: row.ID,
|
||||
OwnerID: row.OwnerID,
|
||||
GUID: row.Guid,
|
||||
Title: row.Title,
|
||||
Timezone: row.Timezone,
|
||||
Tagline: row.Tagline,
|
||||
PostsPerPage: int(row.PostsPerPage),
|
||||
Created: time.Unix(row.CreatedAt, 0).UTC(),
|
||||
ID: row.ID,
|
||||
OwnerID: row.OwnerID,
|
||||
GUID: row.Guid,
|
||||
Title: row.Title,
|
||||
Tagline: row.Tagline,
|
||||
Created: time.Unix(row.CreatedAt, 0).UTC(),
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ func (db *Provider) SelectUploadBySiteIDAndSlug(ctx context.Context, siteID int6
|
|||
|
||||
func (db *Provider) SaveUpload(ctx context.Context, upload *models.Upload) error {
|
||||
if upload.ID == 0 {
|
||||
newID, err := db.queries.InsertUpload(ctx, sqlgen.InsertUploadParams{
|
||||
if err := db.queries.InsertUpload(ctx, sqlgen.InsertUploadParams{
|
||||
SiteID: upload.SiteID,
|
||||
Guid: upload.GUID,
|
||||
MimeType: upload.MIMEType,
|
||||
|
|
@ -53,11 +53,9 @@ func (db *Provider) SaveUpload(ctx context.Context, upload *models.Upload) error
|
|||
Slug: upload.Slug,
|
||||
Alt: upload.Alt,
|
||||
CreatedAt: upload.CreatedAt.Unix(),
|
||||
})
|
||||
if err != nil {
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
upload.ID = newID
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
@ -67,13 +65,6 @@ func (db *Provider) SaveUpload(ctx context.Context, upload *models.Upload) error
|
|||
})
|
||||
}
|
||||
|
||||
func (db *Provider) UpdateUploadFileSize(ctx context.Context, id int64, fileSize int64) error {
|
||||
return db.queries.UpdateUploadFileSize(ctx, sqlgen.UpdateUploadFileSizeParams{
|
||||
FileSize: fileSize,
|
||||
ID: id,
|
||||
})
|
||||
}
|
||||
|
||||
func (db *Provider) DeleteUpload(ctx context.Context, id int64) error {
|
||||
return db.queries.DeleteUpload(ctx, id)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ type Renderer struct {
|
|||
|
||||
func NewRendererForUI() *Renderer {
|
||||
mdParser := goldmark.New(
|
||||
goldmark.WithExtensions(extension.GFM, extension.Footnote),
|
||||
goldmark.WithExtensions(extension.GFM),
|
||||
goldmark.WithRendererOptions(
|
||||
gm_html.WithUnsafe(),
|
||||
),
|
||||
|
|
@ -48,7 +48,7 @@ func NewRendererForUI() *Renderer {
|
|||
|
||||
func NewRendererForSite() *Renderer {
|
||||
mdParser := goldmark.New(
|
||||
goldmark.WithExtensions(extension.GFM, extension.Footnote),
|
||||
goldmark.WithExtensions(extension.GFM),
|
||||
goldmark.WithParserOptions(
|
||||
parser.WithAutoHeadingID(),
|
||||
),
|
||||
|
|
|
|||
|
|
@ -6,16 +6,11 @@ import (
|
|||
"fmt"
|
||||
"html/template"
|
||||
"io"
|
||||
"io/fs"
|
||||
"iter"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"github.com/gopherlibs/feedhub/feedhub"
|
||||
"golang.org/x/sync/errgroup"
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/lmika/weiro/models/pubmodel"
|
||||
|
|
@ -23,34 +18,25 @@ import (
|
|||
)
|
||||
|
||||
type Builder struct {
|
||||
site pubmodel.Site
|
||||
mdRenderer *markdown.Renderer
|
||||
opts Options
|
||||
tmpls *template.Template
|
||||
postMDProcessors []postMDProcessor
|
||||
site pubmodel.Site
|
||||
mdRenderer *markdown.Renderer
|
||||
opts Options
|
||||
tmpls *template.Template
|
||||
}
|
||||
|
||||
func New(site pubmodel.Site, opts Options) (*Builder, error) {
|
||||
tmpls, err := template.New("").
|
||||
Funcs(templateFns(site, opts)).
|
||||
ParseFS(opts.TemplatesFS, "*.html")
|
||||
ParseFS(opts.TemplatesFS, tmplNamePostSingle, tmplNamePostList, tmplNameLayoutMain)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, t := range tmpls.Templates() {
|
||||
log.Printf("Loaded template %s", t.Name())
|
||||
}
|
||||
|
||||
return &Builder{
|
||||
site: site,
|
||||
opts: opts,
|
||||
tmpls: tmpls,
|
||||
mdRenderer: markdown.NewRendererForSite(),
|
||||
postMDProcessors: []postMDProcessor{
|
||||
uploadAbsoluteURL,
|
||||
removeFootnoteHRs,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
|
@ -61,21 +47,11 @@ func (b *Builder) BuildSite(outDir string) error {
|
|||
return err
|
||||
}
|
||||
|
||||
eg, ctx := errgroup.WithContext(context.Background())
|
||||
eg := errgroup.Group{}
|
||||
|
||||
eg.Go(func() error {
|
||||
for mp := range b.site.PostIter(ctx) {
|
||||
post, err := mp.Get()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rp, err := b.renderPostWithCategories(ctx, post)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := b.createAtPath(buildCtx, rp.Path, func(f io.Writer) error {
|
||||
return b.renderTemplate(f, tmplNamePostSingle, rp)
|
||||
}); err != nil {
|
||||
for _, post := range b.site.Posts {
|
||||
if err := b.writePost(buildCtx, post); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
|
@ -83,203 +59,49 @@ func (b *Builder) BuildSite(outDir string) error {
|
|||
})
|
||||
|
||||
eg.Go(func() error {
|
||||
return b.renderPostListWithCategories(buildCtx, ctx)
|
||||
})
|
||||
|
||||
eg.Go(func() error {
|
||||
if err := b.renderFeeds(buildCtx, b.site.PostIter(ctx), feedOptions{
|
||||
targetNamePrefix: "/feed",
|
||||
titlePrefix: "",
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := b.renderFeeds(buildCtx, b.site.PostIter(ctx), feedOptions{
|
||||
targetNamePrefix: "/feeds/microblog-crosspost",
|
||||
titlePrefix: "Devlog: ",
|
||||
}); err != nil {
|
||||
if err := b.renderPostList(buildCtx, b.site.Posts); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
// Category pages
|
||||
eg.Go(func() error {
|
||||
if err := b.renderCategoryList(buildCtx); err != nil {
|
||||
return err
|
||||
}
|
||||
return b.renderCategoryPages(buildCtx, ctx)
|
||||
})
|
||||
|
||||
// Copy uploads
|
||||
eg.Go(func() error {
|
||||
return b.writeUploads(buildCtx, b.site.Uploads)
|
||||
if err := b.writeUploads(buildCtx, b.site.Uploads); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
// Build static assets
|
||||
eg.Go(func() error { return b.writeStaticAssets(buildCtx) })
|
||||
|
||||
if err := eg.Wait(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Render pages last so they can override auto-generated content
|
||||
return b.renderPages(buildCtx)
|
||||
}
|
||||
|
||||
func (b *Builder) renderPostListWithCategories(bctx buildContext, ctx context.Context) error {
|
||||
// Collect all posts
|
||||
var allPosts []postSingleData
|
||||
for mp := range b.site.PostIter(ctx) {
|
||||
post, err := mp.Get()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rp, err := b.renderPostWithCategories(ctx, post)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
allPosts = append(allPosts, rp)
|
||||
}
|
||||
|
||||
postsPerPage := b.site.PostsPerPage
|
||||
if postsPerPage < 1 {
|
||||
postsPerPage = 10
|
||||
}
|
||||
|
||||
totalPages := (len(allPosts) + postsPerPage - 1) / postsPerPage
|
||||
if totalPages < 1 {
|
||||
totalPages = 1
|
||||
}
|
||||
|
||||
for page := 1; page <= totalPages; page++ {
|
||||
start := (page - 1) * postsPerPage
|
||||
end := start + postsPerPage
|
||||
if end > len(allPosts) {
|
||||
end = len(allPosts)
|
||||
}
|
||||
|
||||
pageInfo := models.PageInfo{
|
||||
CurrentPage: page,
|
||||
TotalPages: totalPages,
|
||||
PostsPerPage: postsPerPage,
|
||||
}
|
||||
|
||||
var prevURL, nextURL string
|
||||
if page > 1 {
|
||||
prevURL = fmt.Sprintf("%v/%d", b.opts.BasePostList, page-1)
|
||||
}
|
||||
if page < totalPages {
|
||||
nextURL = fmt.Sprintf("%v/%d", b.opts.BasePostList, page+1)
|
||||
}
|
||||
|
||||
pl := postListData{
|
||||
commonData: commonData{Site: b.site},
|
||||
Posts: allPosts[start:end],
|
||||
PageInfo: pageInfo,
|
||||
PrevURL: prevURL,
|
||||
NextURL: nextURL,
|
||||
}
|
||||
|
||||
// Page 1 renders at both root and /posts/
|
||||
var paths []string
|
||||
if page == 1 {
|
||||
paths = []string{"", fmt.Sprintf("%v/1", b.opts.BasePostList)}
|
||||
} else {
|
||||
paths = []string{fmt.Sprintf("%v/%d", b.opts.BasePostList, page)}
|
||||
}
|
||||
|
||||
for _, path := range paths {
|
||||
if err := b.createAtPath(bctx, path, func(f io.Writer) error {
|
||||
return b.renderTemplate(f, tmplNamePostList, pl)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *Builder) renderFeeds(ctx buildContext, postIter iter.Seq[models.Maybe[*models.Post]], opts feedOptions) error {
|
||||
now := time.Now()
|
||||
feed := &feedhub.Feed{
|
||||
Title: b.site.Title,
|
||||
Link: &feedhub.Link{Href: b.site.BaseURL},
|
||||
Description: b.site.Tagline,
|
||||
// TO FIX: Author
|
||||
Created: now,
|
||||
func (b *Builder) renderPostList(ctx buildContext, postList []*models.Post) error {
|
||||
// TODO: paging
|
||||
postCopy := make([]*models.Post, len(postList))
|
||||
copy(postCopy, postList)
|
||||
|
||||
sort.Slice(postCopy, func(i, j int) bool {
|
||||
return postCopy[i].PublishedAt.After(postCopy[j].PublishedAt)
|
||||
})
|
||||
|
||||
pl := postListData{
|
||||
commonData: commonData{Site: b.site},
|
||||
}
|
||||
|
||||
items := 0
|
||||
for mp := range postIter {
|
||||
post, err := mp.Get()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get post: %w", err)
|
||||
}
|
||||
|
||||
renderedPost, err := b.renderPost(post)
|
||||
for _, post := range postCopy {
|
||||
rp, err := b.renderPost(post)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var catName string
|
||||
if b.site.CategoriesOfPost != nil {
|
||||
cats, err := b.site.CategoriesOfPost(context.Background(), post.ID)
|
||||
if err == nil && len(cats) > 0 {
|
||||
names := make([]string, len(cats))
|
||||
for i, c := range cats {
|
||||
names[i] = c.Name
|
||||
}
|
||||
catName = strings.Join(names, ", ")
|
||||
}
|
||||
}
|
||||
|
||||
postTitle := post.Title
|
||||
if postTitle != "" {
|
||||
postTitle = opts.titlePrefix + postTitle
|
||||
}
|
||||
|
||||
feed.Items = append(feed.Items, &feedhub.Item{
|
||||
Id: filepath.Join(b.site.BaseURL, post.GUID),
|
||||
Title: postTitle,
|
||||
Link: &feedhub.Link{Href: renderedPost.PostURL},
|
||||
Content: string(renderedPost.HTML),
|
||||
// TO FIX: Why the heck does this only include the first category?
|
||||
Category: catName,
|
||||
// TO FIX: Created should be first published
|
||||
Created: post.PublishedAt,
|
||||
Updated: post.UpdatedAt,
|
||||
})
|
||||
|
||||
items++
|
||||
if items >= b.opts.FeedItems {
|
||||
break
|
||||
}
|
||||
pl.Posts = append(pl.Posts, rp)
|
||||
}
|
||||
|
||||
if err := b.createAtPath(ctx, opts.targetNamePrefix+".xml", func(f io.Writer) error {
|
||||
rss, err := feed.ToRss()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to convert feed to RSS: %w", err)
|
||||
}
|
||||
_, err = io.WriteString(f, rss)
|
||||
return err
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := b.createAtPath(ctx, opts.targetNamePrefix+".json", func(f io.Writer) error {
|
||||
rss, err := feed.ToJSON()
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to convert feed to JSON feed: %w", err)
|
||||
}
|
||||
_, err = io.WriteString(f, rss)
|
||||
return err
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
return b.createAtPath(ctx, "", func(f io.Writer) error {
|
||||
return b.renderTemplate(f, tmplNamePostList, pl)
|
||||
})
|
||||
}
|
||||
|
||||
func (b *Builder) renderPost(post *models.Post) (postSingleData, error) {
|
||||
|
|
@ -293,220 +115,22 @@ func (b *Builder) renderPost(post *models.Post) (postSingleData, error) {
|
|||
return postSingleData{}, fmt.Errorf("failed to write post %s: %w", post.Slug, err)
|
||||
}
|
||||
|
||||
if len(b.postMDProcessors) > 0 {
|
||||
dom, err := goquery.NewDocumentFromReader(&md)
|
||||
if err != nil {
|
||||
return postSingleData{}, fmt.Errorf("failed to parse post %s: %w", post.Slug, err)
|
||||
}
|
||||
|
||||
for _, processor := range b.postMDProcessors {
|
||||
if err := processor(b.site, dom); err != nil {
|
||||
return postSingleData{}, fmt.Errorf("failed to process post %s: %w", post.Slug, err)
|
||||
}
|
||||
}
|
||||
|
||||
outHTML, err := dom.Find("body").Html()
|
||||
if err != nil {
|
||||
return postSingleData{}, fmt.Errorf("failed to render post %s: %w", post.Slug, err)
|
||||
}
|
||||
md.Reset()
|
||||
md.WriteString(outHTML)
|
||||
}
|
||||
|
||||
postURL := strings.TrimSuffix(b.site.BaseURL, "/") + "/" + strings.TrimPrefix(postPath, "/")
|
||||
|
||||
return postSingleData{
|
||||
commonData: commonData{Site: b.site},
|
||||
Path: postPath,
|
||||
Post: post,
|
||||
PostURL: postURL,
|
||||
HTML: template.HTML(md.String()),
|
||||
}, nil
|
||||
}
|
||||
|
||||
// renderPostWithCategories renders a post and attaches its categories.
|
||||
func (b *Builder) renderPostWithCategories(ctx context.Context, post *models.Post) (postSingleData, error) {
|
||||
func (b *Builder) writePost(ctx buildContext, post *models.Post) error {
|
||||
rp, err := b.renderPost(post)
|
||||
if err != nil {
|
||||
return postSingleData{}, err
|
||||
}
|
||||
|
||||
if b.site.CategoriesOfPost != nil {
|
||||
cats, err := b.site.CategoriesOfPost(ctx, post.ID)
|
||||
if err != nil {
|
||||
return postSingleData{}, err
|
||||
}
|
||||
rp.Categories = cats
|
||||
}
|
||||
|
||||
return rp, nil
|
||||
}
|
||||
|
||||
func (b *Builder) renderCategoryList(ctx buildContext) error {
|
||||
var items []categoryListItem
|
||||
for _, cwc := range b.site.Categories {
|
||||
if cwc.PostCount == 0 {
|
||||
continue
|
||||
}
|
||||
items = append(items, categoryListItem{
|
||||
CategoryWithCount: cwc,
|
||||
Path: fmt.Sprintf("/categories/%s", cwc.Slug),
|
||||
})
|
||||
}
|
||||
|
||||
if len(items) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
data := categoryListData{
|
||||
commonData: commonData{Site: b.site},
|
||||
Categories: items,
|
||||
}
|
||||
|
||||
return b.createAtPath(ctx, "/categories", func(f io.Writer) error {
|
||||
return b.renderTemplate(f, tmplNameCategoryList, data)
|
||||
})
|
||||
}
|
||||
|
||||
func (b *Builder) renderCategoryPages(ctx buildContext, goCtx context.Context) error {
|
||||
for _, cwc := range b.site.Categories {
|
||||
if cwc.PostCount == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
// Collect all posts for this category
|
||||
var allPosts []postSingleData
|
||||
for mp := range b.site.PostIterByCategory(goCtx, cwc.ID) {
|
||||
post, err := mp.Get()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rp, err := b.renderPostWithCategories(goCtx, post)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
allPosts = append(allPosts, rp)
|
||||
}
|
||||
|
||||
var descHTML bytes.Buffer
|
||||
if cwc.Description != "" {
|
||||
if err := b.mdRenderer.RenderTo(goCtx, &descHTML, cwc.Description); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
postsPerPage := b.site.PostsPerPage
|
||||
if postsPerPage < 1 {
|
||||
postsPerPage = 10
|
||||
}
|
||||
|
||||
totalPages := (len(allPosts) + postsPerPage - 1) / postsPerPage
|
||||
if totalPages < 1 {
|
||||
totalPages = 1
|
||||
}
|
||||
|
||||
basePath := fmt.Sprintf("/categories/%s", cwc.Slug)
|
||||
|
||||
for page := 1; page <= totalPages; page++ {
|
||||
start := (page - 1) * postsPerPage
|
||||
end := start + postsPerPage
|
||||
if end > len(allPosts) {
|
||||
end = len(allPosts)
|
||||
}
|
||||
|
||||
pageInfo := models.PageInfo{
|
||||
CurrentPage: page,
|
||||
TotalPages: totalPages,
|
||||
PostsPerPage: postsPerPage,
|
||||
}
|
||||
|
||||
var prevURL, nextURL string
|
||||
if page > 1 {
|
||||
if page == 2 {
|
||||
prevURL = basePath + "/"
|
||||
} else {
|
||||
prevURL = fmt.Sprintf("%s/%d/", basePath, page-1)
|
||||
}
|
||||
}
|
||||
if page < totalPages {
|
||||
nextURL = fmt.Sprintf("%s/%d/", basePath, page+1)
|
||||
}
|
||||
|
||||
path := basePath
|
||||
if page > 1 {
|
||||
path = fmt.Sprintf("%s/%d", basePath, page)
|
||||
}
|
||||
|
||||
data := categorySingleData{
|
||||
commonData: commonData{Site: b.site},
|
||||
Category: &cwc.Category,
|
||||
DescriptionHTML: template.HTML(descHTML.String()),
|
||||
Posts: allPosts[start:end],
|
||||
Path: path,
|
||||
PageInfo: pageInfo,
|
||||
PrevURL: prevURL,
|
||||
NextURL: nextURL,
|
||||
}
|
||||
|
||||
if err := b.createAtPath(ctx, path, func(f io.Writer) error {
|
||||
return b.renderTemplate(f, tmplNameCategorySingle, data)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Per-category feeds (use all posts, not paginated)
|
||||
if err := b.renderCategoryFeed(ctx, cwc, allPosts); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *Builder) renderCategoryFeed(ctx buildContext, cwc models.CategoryWithCount, posts []postSingleData) error {
|
||||
now := time.Now()
|
||||
feed := &feedhub.Feed{
|
||||
Title: b.site.Title + " - " + cwc.Name,
|
||||
Link: &feedhub.Link{Href: b.site.BaseURL},
|
||||
Description: cwc.DescriptionBrief,
|
||||
Created: now,
|
||||
}
|
||||
|
||||
for i, rp := range posts {
|
||||
if i >= b.opts.FeedItems {
|
||||
break
|
||||
}
|
||||
feed.Items = append(feed.Items, &feedhub.Item{
|
||||
Id: filepath.Join(b.site.BaseURL, rp.Post.GUID),
|
||||
Title: rp.Post.Title,
|
||||
Link: &feedhub.Link{Href: rp.PostURL},
|
||||
Content: string(rp.HTML),
|
||||
Created: rp.Post.PublishedAt,
|
||||
Updated: rp.Post.UpdatedAt,
|
||||
})
|
||||
}
|
||||
|
||||
prefix := fmt.Sprintf("/categories/%s/feed", cwc.Slug)
|
||||
|
||||
if err := b.createAtPath(ctx, prefix+".xml", func(f io.Writer) error {
|
||||
rss, err := feed.ToRss()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = io.WriteString(f, rss)
|
||||
return err
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return b.createAtPath(ctx, prefix+".json", func(f io.Writer) error {
|
||||
j, err := feed.ToJSON()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = io.WriteString(f, j)
|
||||
return err
|
||||
return b.createAtPath(ctx, rp.Path, func(f io.Writer) error {
|
||||
return b.renderTemplate(f, tmplNamePostSingle, rp)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -544,7 +168,7 @@ func (b *Builder) renderTemplate(w io.Writer, name string, data interface{}) err
|
|||
|
||||
func (b *Builder) writeUploads(ctx buildContext, uploads []models.Upload) error {
|
||||
for _, u := range uploads {
|
||||
fullPath := filepath.Join(ctx.outDir, b.opts.BaseUploads, u.Slug)
|
||||
fullPath := filepath.Join(ctx.outDir, "uploads", u.Slug)
|
||||
if err := os.MkdirAll(filepath.Dir(fullPath), 0755); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -573,39 +197,6 @@ func (b *Builder) writeUploads(ctx buildContext, uploads []models.Upload) error
|
|||
return nil
|
||||
}
|
||||
|
||||
func (b *Builder) writeStaticAssets(ctx buildContext) error {
|
||||
if b.opts.StaticFS == nil {
|
||||
return nil
|
||||
}
|
||||
return fs.WalkDir(b.opts.StaticFS, ".", func(path string, d os.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
} else if d.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
fullPath := filepath.Join(ctx.outDir, b.opts.BaseStatic, path)
|
||||
if err := os.MkdirAll(filepath.Dir(fullPath), 0755); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return func() error {
|
||||
r, err := b.opts.StaticFS.Open(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer r.Close()
|
||||
|
||||
w, err := os.Create(fullPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer w.Close()
|
||||
|
||||
if _, err := io.Copy(w, r); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}()
|
||||
})
|
||||
type buildContext struct {
|
||||
outDir string
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
package sitebuilder_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"iter"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
|
@ -17,48 +15,30 @@ import (
|
|||
func TestBuilder_BuildSite(t *testing.T) {
|
||||
t.Run("build site", func(t *testing.T) {
|
||||
tmpls := fstest.MapFS{
|
||||
"posts_single.html": {Data: []byte(`{{ .HTML }}`)},
|
||||
"posts_list.html": {Data: []byte(`{{ range .Posts}}<a href="{{url_abs .Path}}">{{.Post.Title}}</a>,{{ end }}`)},
|
||||
"layout_main.html": {Data: []byte(`{{ .Body }}`)},
|
||||
"categories_list.html": {Data: []byte(`{{ range .Categories}}<a href="{{url_abs .Path}}">{{.Name}}</a>,{{ end }}`)},
|
||||
"categories_single.html": {Data: []byte(`<h2>{{.Category.Name}}</h2>`)},
|
||||
"pages_single.html": {Data: []byte(`{{ if .Page.Title }}<h2>{{ .Page.Title }}</h2>{{ end }}{{ .HTML }}`)},
|
||||
}
|
||||
|
||||
posts := []*models.Post{
|
||||
{
|
||||
Title: "Test Post",
|
||||
Slug: "/2026/02/18/test-post",
|
||||
Body: "This is a test post",
|
||||
},
|
||||
{
|
||||
Title: "Another Post",
|
||||
Slug: "/2026/02/20/another-post",
|
||||
Body: "This is **another** test post",
|
||||
},
|
||||
"posts_single.html": {Data: []byte(`{{ .HTML }}`)},
|
||||
"posts_list.html": {Data: []byte(`{{ range .Posts}}<a href="{{url_abs .Path}}">{{.Post.Title}}</a>,{{ end }}`)},
|
||||
"layout_main.html": {Data: []byte(`{{ .Body }}`)},
|
||||
}
|
||||
|
||||
site := pubmodel.Site{
|
||||
Site: models.Site{PostsPerPage: 10},
|
||||
BaseURL: "https://example.com",
|
||||
PostIter: func(ctx context.Context) iter.Seq[models.Maybe[*models.Post]] {
|
||||
return func(yield func(models.Maybe[*models.Post]) bool) {
|
||||
for _, p := range posts {
|
||||
if !yield(models.Maybe[*models.Post]{Value: p}) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Pages: []*models.Page{
|
||||
{Title: "About", Slug: "about", Body: "About this site"},
|
||||
Posts: []*models.Post{
|
||||
{
|
||||
Title: "Test Post",
|
||||
Slug: "/2026/02/18/test-post",
|
||||
Body: "This is a test post",
|
||||
},
|
||||
{
|
||||
Title: "Another Post",
|
||||
Slug: "/2026/02/20/another-post",
|
||||
Body: "This is **another** test post",
|
||||
},
|
||||
},
|
||||
}
|
||||
wantFiles := map[string]string{
|
||||
"2026/02/18/test-post/index.html": "<p>This is a test post</p>\n",
|
||||
"2026/02/20/another-post/index.html": "<p>This is <strong>another</strong> test post</p>\n",
|
||||
"index.html": "<a href=\"https://example.com/2026/02/18/test-post\">Test Post</a>,<a href=\"https://example.com/2026/02/20/another-post\">Another Post</a>,",
|
||||
"about/index.html": "<h2>About</h2><p>About this site</p>\n",
|
||||
}
|
||||
|
||||
outDir := t.TempDir()
|
||||
|
|
@ -78,4 +58,5 @@ func TestBuilder_BuildSite(t *testing.T) {
|
|||
assert.Equal(t, content, string(fileContent))
|
||||
}
|
||||
})
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,10 +0,0 @@
|
|||
package sitebuilder
|
||||
|
||||
type buildContext struct {
|
||||
outDir string
|
||||
}
|
||||
|
||||
type feedOptions struct {
|
||||
targetNamePrefix string
|
||||
titlePrefix string
|
||||
}
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
package sitebuilder
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/PuerkitoBio/goquery"
|
||||
"lmika.dev/lmika/weiro/models/pubmodel"
|
||||
)
|
||||
|
||||
type postMDProcessor func(site pubmodel.Site, dom *goquery.Document) error
|
||||
|
||||
func uploadAbsoluteURL(site pubmodel.Site, dom *goquery.Document) error {
|
||||
siteURL, err := url.Parse(site.BaseURL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
dom.Find("img").Each(func(i int, s *goquery.Selection) {
|
||||
srcUrl := s.AttrOr("src", "")
|
||||
if site.BaseURL == "" {
|
||||
return
|
||||
} else if strings.HasPrefix(srcUrl, "http:") || strings.HasPrefix(srcUrl, "https:") {
|
||||
return
|
||||
}
|
||||
|
||||
pu, err := url.Parse(srcUrl)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
absURL := siteURL.ResolveReference(pu)
|
||||
|
||||
s.SetAttr("src", absURL.String())
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
func removeFootnoteHRs(site pubmodel.Site, dom *goquery.Document) error {
|
||||
dom.Find("div.footnotes > hr").Remove()
|
||||
return nil
|
||||
}
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
package sitebuilder
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"html/template"
|
||||
"io"
|
||||
)
|
||||
|
||||
func (b *Builder) renderPages(bctx buildContext) error {
|
||||
for _, page := range b.site.Pages {
|
||||
var md bytes.Buffer
|
||||
if err := b.mdRenderer.RenderTo(context.Background(), &md, page.Body); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
data := pageSingleData{
|
||||
commonData: commonData{Site: b.site},
|
||||
Page: page,
|
||||
HTML: template.HTML(md.String()),
|
||||
}
|
||||
|
||||
path := "/" + page.Slug
|
||||
if err := b.createAtPath(bctx, path, func(f io.Writer) error {
|
||||
return b.renderTemplate(f, tmplNamePageSingle, data)
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
@ -3,7 +3,7 @@ package sitebuilder
|
|||
import (
|
||||
"html/template"
|
||||
"net/url"
|
||||
"strings"
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"lmika.dev/lmika/weiro/models/pubmodel"
|
||||
|
|
@ -20,7 +20,7 @@ func templateFns(site pubmodel.Site, opts Options) template.FuncMap {
|
|||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
pu.Path = joinPath(pu.Path, basePath)
|
||||
pu.Path = filepath.Join(pu.Path, basePath)
|
||||
return pu.String(), nil
|
||||
},
|
||||
"format_date": func(date time.Time) string {
|
||||
|
|
@ -32,7 +32,3 @@ func templateFns(site pubmodel.Site, opts Options) template.FuncMap {
|
|||
},
|
||||
}
|
||||
}
|
||||
|
||||
func joinPath(basePath, path string) string {
|
||||
return strings.TrimSuffix(basePath, "/") + "/" + strings.TrimPrefix(path, "/")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,33 +20,15 @@ const (
|
|||
|
||||
// tmplNameLayoutMain is the template for the main layout (layoutMainData)
|
||||
tmplNameLayoutMain = "layout_main.html"
|
||||
|
||||
// tmplNameCategoryList is the template for the category index page
|
||||
tmplNameCategoryList = "categories_list.html"
|
||||
|
||||
// tmplNameCategorySingle is the template for a single category page
|
||||
tmplNameCategorySingle = "categories_single.html"
|
||||
|
||||
// tmplNamePageSingle is the template for a single page (pageSingleData)
|
||||
tmplNamePageSingle = "pages_single.html"
|
||||
)
|
||||
|
||||
type Options struct {
|
||||
BasePosts string // BasePosts is the base path for posts.
|
||||
BasePostList string // BasePostList is the base path for post lists.
|
||||
BaseUploads string // BaseUploads is the base path for uploads.
|
||||
BaseStatic string // BaseStatic is the base path for static assets.
|
||||
// BasePosts is the base path for posts.
|
||||
BasePosts string
|
||||
|
||||
// TemplatesFS provides the raw templates for rendering the site.
|
||||
TemplatesFS fs.FS
|
||||
|
||||
// StaticFS provides the raw assets for the site. This will be written as is
|
||||
// from the BaseStatic dir.
|
||||
StaticFS fs.FS
|
||||
|
||||
// FeedItems holds the number of posts to show in the feed.
|
||||
FeedItems int
|
||||
|
||||
RenderTZ *time.Location
|
||||
}
|
||||
|
||||
|
|
@ -56,49 +38,17 @@ type commonData struct {
|
|||
|
||||
type postSingleData struct {
|
||||
commonData
|
||||
Post *models.Post
|
||||
HTML template.HTML
|
||||
Path string
|
||||
PostURL string
|
||||
Categories []*models.Category
|
||||
Post *models.Post
|
||||
HTML template.HTML
|
||||
Path string
|
||||
}
|
||||
|
||||
type postListData struct {
|
||||
commonData
|
||||
Posts []postSingleData
|
||||
PageInfo models.PageInfo
|
||||
PrevURL string
|
||||
NextURL string
|
||||
Posts []postSingleData
|
||||
}
|
||||
|
||||
type layoutData struct {
|
||||
commonData
|
||||
Body template.HTML
|
||||
}
|
||||
|
||||
type categoryListData struct {
|
||||
commonData
|
||||
Categories []categoryListItem
|
||||
}
|
||||
|
||||
type categoryListItem struct {
|
||||
models.CategoryWithCount
|
||||
Path string
|
||||
}
|
||||
|
||||
type categorySingleData struct {
|
||||
commonData
|
||||
Category *models.Category
|
||||
DescriptionHTML template.HTML
|
||||
Posts []postSingleData
|
||||
Path string
|
||||
PageInfo models.PageInfo
|
||||
PrevURL string
|
||||
NextURL string
|
||||
}
|
||||
|
||||
type pageSingleData struct {
|
||||
commonData
|
||||
Page *models.Page
|
||||
HTML template.HTML
|
||||
}
|
||||
|
|
|
|||
94
providers/sitereader/provider.go
Normal file
94
providers/sitereader/provider.go
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
package sitereader
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"io/fs"
|
||||
"time"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
)
|
||||
|
||||
type Provider struct {
|
||||
fs fs.FS
|
||||
}
|
||||
|
||||
func New(fs fs.FS) *Provider {
|
||||
return &Provider{
|
||||
fs: fs,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Provider) ReadSite() (ReadSiteModels, error) {
|
||||
posts, err := p.ListPosts()
|
||||
if err != nil {
|
||||
return ReadSiteModels{}, err
|
||||
}
|
||||
|
||||
meta := siteMeta{}
|
||||
metaBytes, err := fs.ReadFile(p.fs, "site.yaml")
|
||||
if err != nil {
|
||||
return ReadSiteModels{}, err
|
||||
}
|
||||
if err := yaml.Unmarshal(metaBytes, &meta); err != nil {
|
||||
return ReadSiteModels{}, err
|
||||
}
|
||||
|
||||
site := models.Site{
|
||||
Title: meta.Title,
|
||||
Tagline: meta.Tagline,
|
||||
}
|
||||
|
||||
return ReadSiteModels{
|
||||
Site: site,
|
||||
Posts: posts,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (p *Provider) ListPosts() (posts []*models.Post, err error) {
|
||||
err = fs.WalkDir(p.fs, "posts", func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
} else if d.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
post, err := p.ReadPost(path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
posts = append(posts, post)
|
||||
return nil
|
||||
})
|
||||
return posts, err
|
||||
}
|
||||
|
||||
func (p *Provider) ReadPost(path string) (*models.Post, error) {
|
||||
data, err := fs.ReadFile(p.fs, path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Split front matter and content
|
||||
parts := bytes.SplitN(data, []byte("---"), 3)
|
||||
if len(parts) < 3 {
|
||||
return nil, io.ErrUnexpectedEOF
|
||||
}
|
||||
|
||||
var meta postMeta
|
||||
if err := yaml.Unmarshal(parts[1], &meta); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
post := models.Post{
|
||||
Slug: meta.Slug,
|
||||
Title: meta.Title,
|
||||
GUID: meta.ID,
|
||||
PublishedAt: meta.Date,
|
||||
CreatedAt: time.Now(),
|
||||
}
|
||||
|
||||
post.Body = string(bytes.TrimPrefix(parts[2], []byte("\n")))
|
||||
return &post, nil
|
||||
}
|
||||
106
providers/sitereader/provider_test.go
Normal file
106
providers/sitereader/provider_test.go
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
package sitereader_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"testing/fstest"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"lmika.dev/lmika/weiro/providers/sitereader"
|
||||
)
|
||||
|
||||
func TestProvider_ReadPost(t *testing.T) {
|
||||
t.Run("with meta", func(t *testing.T) {
|
||||
testFS := fstest.MapFS{
|
||||
"site.yaml": {Data: []byte(`base_url: https://example.com`)},
|
||||
"posts/test.md": {Data: []byte(`---
|
||||
date: 2026-02-18T19:59:00Z
|
||||
title: Test Post Here
|
||||
tags: [test, example]
|
||||
---
|
||||
This is just a test post.
|
||||
`)},
|
||||
}
|
||||
|
||||
pr := sitereader.New(testFS)
|
||||
|
||||
post, err := pr.ReadPost("posts/test.md")
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "Test Post Here", post.Title)
|
||||
assert.Equal(t, time.Date(2026, 2, 18, 19, 59, 0, 0, time.UTC), post.PublishedAt)
|
||||
assert.Equal(t, "This is just a test post.\n", post.Body)
|
||||
})
|
||||
|
||||
t.Run("without meta", func(t *testing.T) {
|
||||
testFS := fstest.MapFS{
|
||||
"posts/test.md": {Data: []byte(`---
|
||||
---
|
||||
This is just a test post.
|
||||
`)},
|
||||
}
|
||||
|
||||
pr := sitereader.New(testFS)
|
||||
|
||||
post, err := pr.ReadPost("posts/test.md")
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "", post.Title)
|
||||
assert.Equal(t, "This is just a test post.\n", post.Body)
|
||||
})
|
||||
}
|
||||
|
||||
func TestProvider_ListPosts(t *testing.T) {
|
||||
testFS := fstest.MapFS{
|
||||
"posts/01-post1.md": {Data: []byte(`---
|
||||
id: 111
|
||||
date: 2026-02-18T19:59:00Z
|
||||
title: Test Post Here
|
||||
tags: [test, example]
|
||||
---
|
||||
This is just a test post.
|
||||
`)},
|
||||
"posts/02-post2.md": {Data: []byte(`---
|
||||
id: 222
|
||||
---
|
||||
This is just a test post.
|
||||
`)},
|
||||
}
|
||||
|
||||
pr := sitereader.New(testFS)
|
||||
|
||||
posts, err := pr.ListPosts()
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, 2, len(posts))
|
||||
|
||||
assert.Equal(t, "111", posts[0].GUID)
|
||||
assert.Equal(t, "222", posts[1].GUID)
|
||||
}
|
||||
|
||||
func TestProvider_ReadSite(t *testing.T) {
|
||||
testFS := fstest.MapFS{
|
||||
"site.yaml": {Data: []byte(`base_url: https://example.com`)},
|
||||
"posts/01-post1.md": {Data: []byte(`---
|
||||
id: 111
|
||||
date: 2026-02-18T19:59:00Z
|
||||
title: Test Post Here
|
||||
tags: [test, example]
|
||||
---
|
||||
This is just a test post.
|
||||
`)},
|
||||
"posts/02-post2.md": {Data: []byte(`---
|
||||
id: 222
|
||||
---
|
||||
This is just a test post.
|
||||
`)},
|
||||
}
|
||||
|
||||
pr := sitereader.New(testFS)
|
||||
|
||||
sites, err := pr.ReadSite()
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, 2, len(sites.Posts))
|
||||
|
||||
assert.Equal(t, "111", sites.Posts[0].GUID)
|
||||
assert.Equal(t, "222", sites.Posts[1].GUID)
|
||||
}
|
||||
|
|
@ -26,51 +26,12 @@ func (p *Provider) AdoptFile(site models.Site, up models.Upload, filename string
|
|||
return err
|
||||
}
|
||||
|
||||
if err := os.Rename(filename, fullPath); err == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Can't rename, possibly because of a cross-link device issue. So copy instead
|
||||
if err := moveFile(filename, fullPath); err != nil {
|
||||
if err := os.Rename(filename, fullPath); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func moveFile(src, dst string) error {
|
||||
if err := copyFile(src, dst); err != nil {
|
||||
_ = os.Remove(dst)
|
||||
return err
|
||||
}
|
||||
_ = os.Remove(src)
|
||||
return nil
|
||||
}
|
||||
|
||||
func copyFile(src, dst string) error {
|
||||
in, err := os.Open(src)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer in.Close()
|
||||
|
||||
out, err := os.Create(dst)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer out.Close()
|
||||
|
||||
if _, err = io.Copy(out, in); err != nil {
|
||||
return err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (p *Provider) ReplaceFile(site models.Site, up models.Upload, srcPath string) error {
|
||||
fullPath := p.uploadFileName(site, up)
|
||||
return copyFile(srcPath, fullPath)
|
||||
}
|
||||
|
||||
func (p *Provider) OpenUpload(site models.Site, up models.Upload) (io.ReadCloser, error) {
|
||||
fullPath := p.uploadFileName(site, up)
|
||||
return os.Open(fullPath)
|
||||
|
|
|
|||
|
|
@ -1,178 +0,0 @@
|
|||
package categories
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/lmika/weiro/providers/db"
|
||||
"lmika.dev/lmika/weiro/services/publisher"
|
||||
)
|
||||
|
||||
type CreateCategoryParams struct {
|
||||
GUID string `form:"guid" json:"guid"`
|
||||
Name string `form:"name" json:"name"`
|
||||
Slug string `form:"slug" json:"slug"`
|
||||
Description string `form:"description" json:"description"`
|
||||
}
|
||||
|
||||
type Service struct {
|
||||
db *db.Provider
|
||||
publisher *publisher.Queue
|
||||
}
|
||||
|
||||
func New(db *db.Provider, publisher *publisher.Queue) *Service {
|
||||
return &Service{db: db, publisher: publisher}
|
||||
}
|
||||
|
||||
func (s *Service) ListCategories(ctx context.Context) ([]*models.Category, error) {
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return nil, models.SiteRequiredError
|
||||
}
|
||||
return s.db.SelectCategoriesOfSite(ctx, site.ID)
|
||||
}
|
||||
|
||||
// ListCategoriesWithCounts returns all categories for the site with published post counts.
|
||||
func (s *Service) ListCategoriesWithCounts(ctx context.Context) ([]models.CategoryWithCount, error) {
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return nil, models.SiteRequiredError
|
||||
}
|
||||
|
||||
cats, err := s.db.SelectCategoriesOfSite(ctx, site.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make([]models.CategoryWithCount, len(cats))
|
||||
for i, cat := range cats {
|
||||
count, err := s.db.CountPostsOfCategory(ctx, cat.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result[i] = models.CategoryWithCount{
|
||||
Category: *cat,
|
||||
PostCount: int(count),
|
||||
DescriptionBrief: models.BriefDescription(cat.Description),
|
||||
}
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (s *Service) GetCategory(ctx context.Context, id int64) (*models.Category, error) {
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return nil, models.SiteRequiredError
|
||||
}
|
||||
|
||||
cat, err := s.db.SelectCategory(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if cat.SiteID != site.ID {
|
||||
return nil, models.NotFoundError
|
||||
}
|
||||
return cat, nil
|
||||
}
|
||||
|
||||
func (s *Service) CreateCategory(ctx context.Context, params CreateCategoryParams) (*models.Category, error) {
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return nil, models.SiteRequiredError
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
slug := params.Slug
|
||||
if slug == "" {
|
||||
slug = models.GenerateCategorySlug(params.Name)
|
||||
}
|
||||
|
||||
// Check for slug collision
|
||||
if _, err := s.db.SelectCategoryBySlugAndSite(ctx, site.ID, slug); err == nil {
|
||||
return nil, models.SlugConflictError
|
||||
} else if !db.ErrorIsNoRows(err) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cat := &models.Category{
|
||||
SiteID: site.ID,
|
||||
GUID: params.GUID,
|
||||
Name: params.Name,
|
||||
Slug: slug,
|
||||
Description: params.Description,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
if cat.GUID == "" {
|
||||
cat.GUID = models.NewNanoID()
|
||||
}
|
||||
|
||||
if err := s.db.SaveCategory(ctx, cat); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
s.publisher.Queue(site)
|
||||
return cat, nil
|
||||
}
|
||||
|
||||
func (s *Service) UpdateCategory(ctx context.Context, id int64, params CreateCategoryParams) (*models.Category, error) {
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return nil, models.SiteRequiredError
|
||||
}
|
||||
|
||||
cat, err := s.db.SelectCategory(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if cat.SiteID != site.ID {
|
||||
return nil, models.NotFoundError
|
||||
}
|
||||
|
||||
slug := params.Slug
|
||||
if slug == "" {
|
||||
slug = models.GenerateCategorySlug(params.Name)
|
||||
}
|
||||
|
||||
// Check slug collision (exclude self)
|
||||
if existing, err := s.db.SelectCategoryBySlugAndSite(ctx, site.ID, slug); err == nil && existing.ID != cat.ID {
|
||||
return nil, models.SlugConflictError
|
||||
} else if err != nil && !db.ErrorIsNoRows(err) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cat.Name = params.Name
|
||||
cat.Slug = slug
|
||||
cat.Description = params.Description
|
||||
cat.UpdatedAt = time.Now()
|
||||
|
||||
if err := s.db.SaveCategory(ctx, cat); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
s.publisher.Queue(site)
|
||||
return cat, nil
|
||||
}
|
||||
|
||||
func (s *Service) DeleteCategory(ctx context.Context, id int64) error {
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return models.SiteRequiredError
|
||||
}
|
||||
|
||||
cat, err := s.db.SelectCategory(ctx, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if cat.SiteID != site.ID {
|
||||
return models.NotFoundError
|
||||
}
|
||||
|
||||
if err := s.db.DeleteCategory(ctx, id); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.publisher.Queue(site)
|
||||
return nil
|
||||
}
|
||||
|
|
@ -1,171 +0,0 @@
|
|||
package imgedit
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"image"
|
||||
"image/color"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/disintegration/imaging"
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
)
|
||||
|
||||
type imageProcessor struct {
|
||||
newParams func() any
|
||||
processImage func(ctx context.Context, srcImg image.Image, params any) (image.Image, error)
|
||||
}
|
||||
|
||||
type shadowProcessorArgs struct {
|
||||
Color string `json:"color"`
|
||||
OffsetY int `json:"offset_y,string"`
|
||||
}
|
||||
|
||||
var processors = map[string]imageProcessor{
|
||||
"shadow": {
|
||||
newParams: func() any {
|
||||
return &shadowProcessorArgs{
|
||||
Color: "#000000",
|
||||
OffsetY: 0,
|
||||
}
|
||||
},
|
||||
processImage: func(ctx context.Context, srcImg image.Image, params any) (image.Image, error) {
|
||||
p := params.(*shadowProcessorArgs)
|
||||
|
||||
shadowColor, err := parseHexColor(p.Color)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid shadow color: %w", err)
|
||||
}
|
||||
|
||||
shadow := makeBoxShadow(srcImg, shadowColor, 4, 10, p.OffsetY)
|
||||
composit := imaging.OverlayCenter(shadow, srcImg, 1.0)
|
||||
return composit, nil
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func (s *Service) reprocess(ctx context.Context, session *models.ImageEditSession) (imageSource, error) {
|
||||
var img imageSource
|
||||
|
||||
for _, p := range session.Processors {
|
||||
// Check if there's currently a cached image of this processor
|
||||
cachedImageFile := filepath.Join(s.scratchDir, session.GUID, fmt.Sprintf("%v.%v", p.VersionID, session.ImageExt))
|
||||
if s, err := os.Stat(cachedImageFile); err == nil && !s.IsDir() {
|
||||
img = fileImageSource(cachedImageFile)
|
||||
continue
|
||||
}
|
||||
|
||||
// Need to process the image
|
||||
var srcImg image.Image
|
||||
if img != nil {
|
||||
var err error
|
||||
srcImg, err = img.image()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
resImg, err := s.processImage(ctx, srcImg, p)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Cache the processed image
|
||||
if err := imaging.Save(resImg, cachedImageFile); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
img = imageImageSource{resImg}
|
||||
}
|
||||
|
||||
return img, nil
|
||||
}
|
||||
|
||||
func (s *Service) processImage(ctx context.Context, srcImg image.Image, processor models.ImageEditProcessor) (image.Image, error) {
|
||||
switch processor.Type {
|
||||
case "copy-upload":
|
||||
var p models.CopyUploadProps
|
||||
if err := json.Unmarshal(processor.Props, &p); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, rc, err := s.uploadService.OpenUpload(ctx, p.UploadID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
f, err := rc()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
return imaging.Decode(f)
|
||||
}
|
||||
|
||||
proc, ok := processors[processor.Type]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unknown processor type: %v", processor.Type)
|
||||
}
|
||||
|
||||
paramType := proc.newParams()
|
||||
if err := json.Unmarshal(processor.Props, paramType); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return proc.processImage(ctx, srcImg, paramType)
|
||||
}
|
||||
|
||||
type imageSource interface {
|
||||
image() (image.Image, error)
|
||||
}
|
||||
|
||||
type fileImageSource string
|
||||
|
||||
func (f fileImageSource) image() (image.Image, error) {
|
||||
return imaging.Open(string(f))
|
||||
}
|
||||
|
||||
type imageImageSource struct {
|
||||
img image.Image
|
||||
}
|
||||
|
||||
func (i imageImageSource) image() (image.Image, error) {
|
||||
return i.img, nil
|
||||
}
|
||||
|
||||
func parseHexColor(s string) (color.Color, error) {
|
||||
// Remove leading hash if present
|
||||
if len(s) > 0 && s[0] == '#' {
|
||||
s = s[1:]
|
||||
}
|
||||
|
||||
// Parse based on length
|
||||
var r, g, b, a uint8
|
||||
switch len(s) {
|
||||
case 6:
|
||||
// RGB format
|
||||
var rgb uint32
|
||||
if _, err := fmt.Sscanf(s, "%06x", &rgb); err != nil {
|
||||
return nil, fmt.Errorf("invalid hex color format: %w", err)
|
||||
}
|
||||
r = uint8((rgb >> 16) & 0xFF)
|
||||
g = uint8((rgb >> 8) & 0xFF)
|
||||
b = uint8(rgb & 0xFF)
|
||||
a = 0xFF
|
||||
case 8:
|
||||
// RGBA format
|
||||
var rgba uint32
|
||||
if _, err := fmt.Sscanf(s, "%08x", &rgba); err != nil {
|
||||
return nil, fmt.Errorf("invalid hex color format: %w", err)
|
||||
}
|
||||
r = uint8((rgba >> 24) & 0xFF)
|
||||
g = uint8((rgba >> 16) & 0xFF)
|
||||
b = uint8((rgba >> 8) & 0xFF)
|
||||
a = uint8(rgba & 0xFF)
|
||||
default:
|
||||
return nil, fmt.Errorf("invalid hex color length: expected 6 or 8 characters, got %d", len(s))
|
||||
}
|
||||
|
||||
return color.RGBA{R: r, G: g, B: b, A: a}, nil
|
||||
}
|
||||
|
|
@ -1,266 +0,0 @@
|
|||
package imgedit
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/lmika/weiro/services/uploads"
|
||||
"lmika.dev/pkg/modash/moslice"
|
||||
)
|
||||
|
||||
type Service struct {
|
||||
scratchDir string
|
||||
uploadService *uploads.Service
|
||||
sessionStore *sessionStore
|
||||
}
|
||||
|
||||
func New(
|
||||
uploadService *uploads.Service,
|
||||
scratchDir string,
|
||||
) *Service {
|
||||
return &Service{
|
||||
scratchDir: scratchDir,
|
||||
uploadService: uploadService,
|
||||
sessionStore: &sessionStore{baseDir: scratchDir},
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Service) NewSession(ctx context.Context, baseUploadID int64) (*models.ImageEditSession, error) {
|
||||
site, user, err := s.fetchSiteAndUser(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
upload, _, err := s.uploadService.OpenUpload(ctx, baseUploadID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var ext string
|
||||
switch upload.MIMEType {
|
||||
case "image/jpeg":
|
||||
ext = "jpg"
|
||||
case "image/png":
|
||||
ext = "png"
|
||||
default:
|
||||
return nil, models.UnsupportedImageFormat
|
||||
}
|
||||
|
||||
newSession := models.ImageEditSession{
|
||||
GUID: models.NewNanoID(),
|
||||
SiteID: site.ID,
|
||||
UserID: user.ID,
|
||||
BaseUploadID: baseUploadID,
|
||||
ImageExt: ext,
|
||||
CreatedAt: time.Now().UTC(),
|
||||
UpdatedAt: time.Now().UTC(),
|
||||
Processors: []models.ImageEditProcessor{
|
||||
{
|
||||
ID: models.NewNanoID(),
|
||||
Type: "copy-upload",
|
||||
Props: mustToJSON(models.CopyUploadProps{UploadID: baseUploadID}),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
newSession.RecalcVersionIDs()
|
||||
if err := s.sessionStore.save(&newSession); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, err := s.reprocess(ctx, &newSession); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &newSession, nil
|
||||
}
|
||||
|
||||
func (s *Service) LoadImageVersion(ctx context.Context, sessionID string, versionID string) (mimeType string, rw func() (io.ReadCloser, error), err error) {
|
||||
session, err := s.loadAndVerifySession(ctx, sessionID)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
return s.sessionStore.getImage(session, versionID+"."+session.ImageExt)
|
||||
}
|
||||
|
||||
type AddProcessorReq struct {
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
func (s *Service) AddProcessor(ctx context.Context, sessionID string, req AddProcessorReq) (*models.ImageEditSession, error) {
|
||||
session, err := s.loadAndVerifySession(ctx, sessionID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
proc, ok := processors[req.Type]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unknown processor type: %v", req.Type)
|
||||
}
|
||||
|
||||
paramType := proc.newParams()
|
||||
paramBytes, err := json.Marshal(paramType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
session.Processors = append(session.Processors, models.ImageEditProcessor{
|
||||
ID: models.NewNanoID(),
|
||||
Type: req.Type,
|
||||
Props: paramBytes,
|
||||
})
|
||||
|
||||
session.RecalcVersionIDs()
|
||||
if err := s.sessionStore.save(session); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, err := s.reprocess(ctx, session); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return session, nil
|
||||
}
|
||||
|
||||
func (s *Service) DeleteProcessor(ctx context.Context, sessionID, processorID string) (*models.ImageEditSession, error) {
|
||||
session, err := s.loadAndVerifySession(ctx, sessionID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
session.Processors = moslice.Filter(session.Processors, func(p models.ImageEditProcessor) bool { return p.ID != processorID })
|
||||
session.RecalcVersionIDs()
|
||||
if err := s.sessionStore.save(session); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, err := s.reprocess(ctx, session); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return session, nil
|
||||
}
|
||||
|
||||
type UpdateProcessorReq struct {
|
||||
ID string `json:"id"`
|
||||
Props json.RawMessage `json:"props"`
|
||||
}
|
||||
|
||||
func (s *Service) UpdateProcessor(ctx context.Context, sessionID string, req UpdateProcessorReq) (*models.ImageEditSession, error) {
|
||||
session, err := s.loadAndVerifySession(ctx, sessionID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for i, p := range session.Processors {
|
||||
if p.ID == req.ID {
|
||||
session.Processors[i].Props = req.Props
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
session.RecalcVersionIDs()
|
||||
if err := s.sessionStore.save(session); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if _, err := s.reprocess(ctx, session); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return session, nil
|
||||
}
|
||||
|
||||
type SaveResult struct {
|
||||
UploadID int64 `json:"upload_id"`
|
||||
}
|
||||
|
||||
func (s *Service) Save(ctx context.Context, sessionID string, mode string) (*SaveResult, error) {
|
||||
session, err := s.loadAndVerifySession(ctx, sessionID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(session.Processors) == 0 {
|
||||
return nil, fmt.Errorf("no processors in session")
|
||||
}
|
||||
|
||||
lastProc := session.Processors[len(session.Processors)-1]
|
||||
finalImagePath := fmt.Sprintf("%v/%v/%v.%v", s.scratchDir, session.GUID, lastProc.VersionID, session.ImageExt)
|
||||
|
||||
var mimeType string
|
||||
switch session.ImageExt {
|
||||
case "jpg", "jpeg":
|
||||
mimeType = "image/jpeg"
|
||||
case "png":
|
||||
mimeType = "image/png"
|
||||
}
|
||||
|
||||
var uploadID int64
|
||||
switch mode {
|
||||
case "replace":
|
||||
upload, err := s.uploadService.ReplaceUploadFile(ctx, session.BaseUploadID, finalImagePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
uploadID = upload.ID
|
||||
case "copy":
|
||||
baseUpload, _, err := s.uploadService.OpenUpload(ctx, session.BaseUploadID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
upload, err := s.uploadService.CreateUploadFromFile(ctx, finalImagePath, baseUpload.Filename, mimeType)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
uploadID = upload.ID
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown save mode: %v", mode)
|
||||
}
|
||||
|
||||
s.sessionStore.delete(session.GUID)
|
||||
|
||||
return &SaveResult{UploadID: uploadID}, nil
|
||||
}
|
||||
|
||||
func (s *Service) loadAndVerifySession(ctx context.Context, sessionID string) (*models.ImageEditSession, error) {
|
||||
site, user, err := s.fetchSiteAndUser(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
session, err := s.sessionStore.get(sessionID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if session.SiteID != site.ID || session.UserID != user.ID {
|
||||
return nil, models.PermissionError
|
||||
}
|
||||
return session, nil
|
||||
}
|
||||
|
||||
func (s *Service) fetchSiteAndUser(ctx context.Context) (models.Site, models.User, error) {
|
||||
user, ok := models.GetUser(ctx)
|
||||
if !ok {
|
||||
return models.Site{}, models.User{}, models.UserRequiredError
|
||||
}
|
||||
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return models.Site{}, models.User{}, models.SiteRequiredError
|
||||
}
|
||||
|
||||
if site.OwnerID != user.ID {
|
||||
return models.Site{}, models.User{}, models.PermissionError
|
||||
}
|
||||
|
||||
return site, user, nil
|
||||
}
|
||||
|
||||
func mustToJSON(a any) json.RawMessage {
|
||||
b, _ := json.Marshal(a)
|
||||
return b
|
||||
}
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
package imgedit
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
|
||||
"github.com/disintegration/imaging"
|
||||
)
|
||||
|
||||
func makeBoxShadow(maskImg image.Image, shadowColor color.Color, sigma float64, shadowMargin, offsetY int) image.Image {
|
||||
w, h := maskImg.Bounds().Dx(), maskImg.Bounds().Dy()
|
||||
cr, cg, cb, _ := shadowColor.RGBA()
|
||||
cr8, cg8, cb8 := uint8(cr>>8), uint8(cg>>8), uint8(cb>>8)
|
||||
|
||||
// New box image
|
||||
backing := image.NewNRGBA(image.Rect(0, 0, w+shadowMargin*2, h+shadowMargin*2+offsetY))
|
||||
newImg := image.NewNRGBA(image.Rect(0, 0, w+shadowMargin*2, h+shadowMargin*2+offsetY))
|
||||
for x := 0; x < w+shadowMargin*2; x++ {
|
||||
for y := 0; y < h+shadowMargin*2; y++ {
|
||||
var c = color.NRGBA{R: 255, G: 255, B: 255, A: 0}
|
||||
if x >= shadowMargin-4 && y >= shadowMargin-4 && x <= w+shadowMargin+4 && y <= h+shadowMargin+4 {
|
||||
_, _, _, a := maskImg.At(x-shadowMargin, y-shadowMargin).RGBA()
|
||||
c = color.NRGBA{R: cr8, G: cg8, B: cb8, A: uint8(a >> 8)}
|
||||
}
|
||||
backing.SetNRGBA(x, y, color.NRGBA{R: 255, G: 255, B: 255, A: 0})
|
||||
newImg.SetNRGBA(x, y+offsetY, c)
|
||||
}
|
||||
}
|
||||
|
||||
// Blur
|
||||
blurredImage := imaging.Blur(newImg, sigma)
|
||||
backing = imaging.OverlayCenter(backing, blurredImage, 0.6)
|
||||
|
||||
return backing
|
||||
}
|
||||
|
|
@ -1,70 +0,0 @@
|
|||
package imgedit
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
)
|
||||
|
||||
type sessionStore struct {
|
||||
baseDir string
|
||||
}
|
||||
|
||||
func (ss *sessionStore) save(newSession *models.ImageEditSession) error {
|
||||
sessionMeta, err := json.Marshal(newSession)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(filepath.Join(ss.baseDir, newSession.GUID), 0755); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(ss.baseDir, newSession.GUID, "session.json"), sessionMeta, 0644); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ss *sessionStore) get(guid string) (*models.ImageEditSession, error) {
|
||||
sessionDataBts, err := os.ReadFile(filepath.Join(ss.baseDir, guid, "session.json"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sessionData := models.ImageEditSession{}
|
||||
if err := json.Unmarshal(sessionDataBts, &sessionData); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &sessionData, nil
|
||||
}
|
||||
|
||||
func (ss *sessionStore) delete(guid string) {
|
||||
os.RemoveAll(filepath.Join(ss.baseDir, guid))
|
||||
}
|
||||
|
||||
func (ss *sessionStore) getImage(session *models.ImageEditSession, imageFilename string) (string, func() (io.ReadCloser, error), error) {
|
||||
fullPath := filepath.Join(ss.baseDir, session.GUID, imageFilename)
|
||||
if s, err := os.Stat(fullPath); err != nil {
|
||||
return "", nil, err
|
||||
} else if s.IsDir() {
|
||||
return "", nil, os.ErrNotExist
|
||||
}
|
||||
|
||||
var mimeType string
|
||||
switch filepath.Ext(imageFilename) {
|
||||
case ".jpg", ".jpeg":
|
||||
mimeType = "image/jpeg"
|
||||
case ".png":
|
||||
mimeType = "image/png"
|
||||
default:
|
||||
return "", nil, models.UnsupportedImageFormat
|
||||
}
|
||||
|
||||
return mimeType, func() (io.ReadCloser, error) {
|
||||
return os.Open(fullPath)
|
||||
}, nil
|
||||
}
|
||||
54
services/import/service.go
Normal file
54
services/import/service.go
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
package _import
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
|
||||
"emperror.dev/errors"
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/lmika/weiro/providers/db"
|
||||
"lmika.dev/lmika/weiro/providers/sitereader"
|
||||
)
|
||||
|
||||
type Service struct {
|
||||
db *db.Provider
|
||||
}
|
||||
|
||||
func New(db *db.Provider) *Service {
|
||||
return &Service{
|
||||
db: db,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Service) Import(ctx context.Context, sitePath string) (models.Site, error) {
|
||||
user, ok := models.GetUser(ctx)
|
||||
if !ok {
|
||||
return models.Site{}, models.UserRequiredError
|
||||
}
|
||||
|
||||
sr := sitereader.New(os.DirFS(sitePath))
|
||||
|
||||
readSite, err := sr.ReadSite()
|
||||
if err != nil {
|
||||
return models.Site{}, errors.Wrap(err, "failed to read site")
|
||||
}
|
||||
|
||||
site := readSite.Site
|
||||
site.OwnerID = user.ID
|
||||
|
||||
if err := s.db.SaveSite(ctx, &site); err != nil {
|
||||
return models.Site{}, errors.Wrap(err, "failed to save site")
|
||||
}
|
||||
|
||||
for _, post := range readSite.Posts {
|
||||
post.SiteID = site.ID
|
||||
if post.GUID == "" {
|
||||
post.GUID = models.NewNanoID()
|
||||
}
|
||||
if err := s.db.SavePost(ctx, post); err != nil {
|
||||
return models.Site{}, errors.Wrap(err, "failed to save post")
|
||||
}
|
||||
}
|
||||
|
||||
return site, nil
|
||||
}
|
||||
|
|
@ -1,198 +0,0 @@
|
|||
package pages
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/lmika/weiro/providers/db"
|
||||
"lmika.dev/lmika/weiro/services/publisher"
|
||||
)
|
||||
|
||||
type CreatePageParams struct {
|
||||
GUID string `form:"guid" json:"guid"`
|
||||
Title string `form:"title" json:"title"`
|
||||
Slug string `form:"slug" json:"slug"`
|
||||
Body string `form:"body" json:"body"`
|
||||
PageType int `form:"page_type" json:"page_type"`
|
||||
ShowInNav bool `form:"show_in_nav" json:"show_in_nav"`
|
||||
}
|
||||
|
||||
type Service struct {
|
||||
db *db.Provider
|
||||
publisher *publisher.Queue
|
||||
}
|
||||
|
||||
func New(db *db.Provider, publisher *publisher.Queue) *Service {
|
||||
return &Service{db: db, publisher: publisher}
|
||||
}
|
||||
|
||||
func (s *Service) ListPages(ctx context.Context) ([]*models.Page, error) {
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return nil, models.SiteRequiredError
|
||||
}
|
||||
return s.db.SelectPagesOfSite(ctx, site.ID)
|
||||
}
|
||||
|
||||
func (s *Service) GetPage(ctx context.Context, id int64) (*models.Page, error) {
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return nil, models.SiteRequiredError
|
||||
}
|
||||
|
||||
page, err := s.db.SelectPage(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if page.SiteID != site.ID {
|
||||
return nil, models.NotFoundError
|
||||
}
|
||||
return page, nil
|
||||
}
|
||||
|
||||
func (s *Service) CreatePage(ctx context.Context, params CreatePageParams) (*models.Page, error) {
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return nil, models.SiteRequiredError
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
slug := params.Slug
|
||||
if slug == "" {
|
||||
slug = models.GeneratePageSlug(params.Title)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(slug, "/") {
|
||||
slug = "/" + slug
|
||||
}
|
||||
|
||||
// Check slug collision
|
||||
if _, err := s.db.SelectPageBySlugAndSite(ctx, site.ID, slug); err == nil {
|
||||
return nil, models.SlugConflictError
|
||||
} else if !db.ErrorIsNoRows(err) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Determine sort order: place at end
|
||||
existingPages, err := s.db.SelectPagesOfSite(ctx, site.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sortOrder := len(existingPages)
|
||||
|
||||
page := &models.Page{
|
||||
SiteID: site.ID,
|
||||
GUID: params.GUID,
|
||||
Title: params.Title,
|
||||
Slug: slug,
|
||||
Body: params.Body,
|
||||
PageType: params.PageType,
|
||||
ShowInNav: params.ShowInNav,
|
||||
SortOrder: sortOrder,
|
||||
CreatedAt: now,
|
||||
UpdatedAt: now,
|
||||
}
|
||||
if page.GUID == "" {
|
||||
page.GUID = models.NewNanoID()
|
||||
}
|
||||
|
||||
if err := s.db.SavePage(ctx, page); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
s.publisher.Queue(site)
|
||||
return page, nil
|
||||
}
|
||||
|
||||
func (s *Service) UpdatePage(ctx context.Context, id int64, params CreatePageParams) (*models.Page, error) {
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return nil, models.SiteRequiredError
|
||||
}
|
||||
|
||||
page, err := s.db.SelectPage(ctx, id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if page.SiteID != site.ID {
|
||||
return nil, models.NotFoundError
|
||||
}
|
||||
|
||||
slug := params.Slug
|
||||
if slug == "" {
|
||||
slug = models.GeneratePageSlug(params.Title)
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(slug, "/") {
|
||||
slug = "/" + slug
|
||||
}
|
||||
|
||||
// Check slug collision (exclude self)
|
||||
if existing, err := s.db.SelectPageBySlugAndSite(ctx, site.ID, slug); err == nil && existing.ID != page.ID {
|
||||
return nil, models.SlugConflictError
|
||||
} else if err != nil && !db.ErrorIsNoRows(err) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
page.Title = params.Title
|
||||
page.Slug = slug
|
||||
page.Body = params.Body
|
||||
page.PageType = params.PageType
|
||||
page.ShowInNav = params.ShowInNav
|
||||
page.UpdatedAt = time.Now()
|
||||
|
||||
if err := s.db.SavePage(ctx, page); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
s.publisher.Queue(site)
|
||||
return page, nil
|
||||
}
|
||||
|
||||
func (s *Service) DeletePage(ctx context.Context, id int64) error {
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return models.SiteRequiredError
|
||||
}
|
||||
|
||||
page, err := s.db.SelectPage(ctx, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if page.SiteID != site.ID {
|
||||
return models.NotFoundError
|
||||
}
|
||||
|
||||
if err := s.db.DeletePage(ctx, id); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.publisher.Queue(site)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Service) ReorderPages(ctx context.Context, pageIDs []int64) error {
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return models.SiteRequiredError
|
||||
}
|
||||
|
||||
// Verify all pages belong to this site
|
||||
for i, id := range pageIDs {
|
||||
page, err := s.db.SelectPage(ctx, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if page.SiteID != site.ID {
|
||||
return models.NotFoundError
|
||||
}
|
||||
if err := s.db.UpdatePageSortOrder(ctx, id, i); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
s.publisher.Queue(site)
|
||||
return nil
|
||||
}
|
||||
|
|
@ -10,16 +10,13 @@ import (
|
|||
)
|
||||
|
||||
type CreatePostParams struct {
|
||||
GUID string `form:"guid" json:"guid"`
|
||||
Title string `form:"title" json:"title"`
|
||||
Body string `form:"body" json:"body"`
|
||||
Action string `form:"action" json:"action"`
|
||||
CategoryIDs []int64 `form:"category_ids" json:"category_ids"`
|
||||
GUID string `form:"guid" json:"guid"`
|
||||
Title string `form:"title" json:"title"`
|
||||
Body string `form:"body" json:"body"`
|
||||
Action string `form:"action" json:"action"`
|
||||
}
|
||||
|
||||
func (s *Service) UpdatePost(ctx context.Context, params CreatePostParams) (*models.Post, error) {
|
||||
now := time.Now()
|
||||
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return nil, models.SiteRequiredError
|
||||
|
|
@ -32,21 +29,14 @@ func (s *Service) UpdatePost(ctx context.Context, params CreatePostParams) (*mod
|
|||
|
||||
post.Title = params.Title
|
||||
post.Body = params.Body
|
||||
post.UpdatedAt = now
|
||||
post.UpdatedAt = time.Now()
|
||||
post.Slug = post.BestSlug()
|
||||
oldState := post.State
|
||||
|
||||
switch strings.ToLower(params.Action) {
|
||||
case "publish":
|
||||
post.State = models.StatePublished
|
||||
|
||||
// Set the published at with the site timezone, and reset the slug, so that the date
|
||||
// is in the site timezone.
|
||||
renderTZ, err := time.LoadLocation(site.Timezone)
|
||||
if err != nil {
|
||||
renderTZ = time.UTC
|
||||
}
|
||||
post.PublishedAt = now.In(renderTZ)
|
||||
post.Slug = post.BestSlug()
|
||||
post.PublishedAt = time.Now()
|
||||
case "save draft":
|
||||
post.State = models.StateDraft
|
||||
post.PublishedAt = time.Time{}
|
||||
|
|
@ -54,21 +44,7 @@ func (s *Service) UpdatePost(ctx context.Context, params CreatePostParams) (*mod
|
|||
// Leave unchanged
|
||||
}
|
||||
|
||||
// Use a transaction for atomicity of post save + category reassignment
|
||||
tx, err := s.db.BeginTx(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
txDB := s.db.QueriesWithTx(tx)
|
||||
if err := txDB.SavePost(ctx, post); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := txDB.SetPostCategories(ctx, post.ID, params.CategoryIDs); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := tx.Commit(); err != nil {
|
||||
if err := s.db.SavePost(ctx, post); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
|
@ -101,13 +77,3 @@ func (s *Service) fetchOrCreatePost(ctx context.Context, site models.Site, param
|
|||
}
|
||||
return post, nil
|
||||
}
|
||||
|
||||
// TEMP - to move
|
||||
func (s *Service) RebuildSite(ctx context.Context) error {
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return models.SiteRequiredError
|
||||
}
|
||||
s.publisher.Queue(site)
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,44 +4,20 @@ import (
|
|||
"context"
|
||||
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/lmika/weiro/providers/db"
|
||||
)
|
||||
|
||||
type PostWithCategories struct {
|
||||
*models.Post
|
||||
Categories []*models.Category
|
||||
}
|
||||
|
||||
type ListPostsResult struct {
|
||||
Posts []*PostWithCategories
|
||||
TotalCount int64
|
||||
}
|
||||
|
||||
func (s *Service) ListPosts(ctx context.Context, showDeleted bool, paging db.PagingParams) (ListPostsResult, error) {
|
||||
func (s *Service) ListPosts(ctx context.Context, showDeleted bool) ([]*models.Post, error) {
|
||||
site, ok := models.GetSite(ctx)
|
||||
if !ok {
|
||||
return ListPostsResult{}, models.SiteRequiredError
|
||||
return nil, models.SiteRequiredError
|
||||
}
|
||||
|
||||
posts, err := s.db.SelectPostsOfSite(ctx, site.ID, showDeleted, paging)
|
||||
posts, err := s.db.SelectPostsOfSite(ctx, site.ID, showDeleted)
|
||||
if err != nil {
|
||||
return ListPostsResult{}, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
count, err := s.db.CountPostsOfSite(ctx, site.ID, showDeleted)
|
||||
if err != nil {
|
||||
return ListPostsResult{}, err
|
||||
}
|
||||
|
||||
result := make([]*PostWithCategories, len(posts))
|
||||
for i, post := range posts {
|
||||
cats, err := s.db.SelectCategoriesOfPost(ctx, post.ID)
|
||||
if err != nil {
|
||||
return ListPostsResult{}, err
|
||||
}
|
||||
result[i] = &PostWithCategories{Post: post, Categories: cats}
|
||||
}
|
||||
return ListPostsResult{Posts: result, TotalCount: count}, nil
|
||||
return posts, nil
|
||||
}
|
||||
|
||||
func (s *Service) GetPost(ctx context.Context, pid int64) (*models.Post, error) {
|
||||
|
|
@ -52,7 +28,3 @@ func (s *Service) GetPost(ctx context.Context, pid int64) (*models.Post, error)
|
|||
|
||||
return post, nil
|
||||
}
|
||||
|
||||
func (s *Service) GetPostCategories(ctx context.Context, postID int64) ([]*models.Category, error) {
|
||||
return s.db.SelectCategoriesOfPost(ctx, postID)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,64 +0,0 @@
|
|||
package publisher
|
||||
|
||||
import (
|
||||
"context"
|
||||
"iter"
|
||||
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/lmika/weiro/providers/db"
|
||||
)
|
||||
|
||||
// postIter returns a post iterator which returns posts in reverse chronological order.
|
||||
func (s *Publisher) publishedPostIter(ctx context.Context, site int64) iter.Seq[models.Maybe[*models.Post]] {
|
||||
return func(yield func(models.Maybe[*models.Post]) bool) {
|
||||
paging := db.PagingParams{Offset: 0, Limit: 50}
|
||||
page, err := s.db.SelectPublishedPostsOfSite(ctx, site, paging)
|
||||
if err != nil {
|
||||
yield(models.Maybe[*models.Post]{Err: err})
|
||||
return
|
||||
}
|
||||
|
||||
for {
|
||||
for _, post := range page {
|
||||
if post.State != models.StatePublished {
|
||||
continue
|
||||
}
|
||||
|
||||
if !yield(models.Maybe[*models.Post]{Value: post}) {
|
||||
return
|
||||
}
|
||||
}
|
||||
paging.Offset += paging.Limit
|
||||
page, err = s.db.SelectPostsOfSite(ctx, site, false, paging)
|
||||
if err != nil {
|
||||
yield(models.Maybe[*models.Post]{Err: err})
|
||||
return
|
||||
} else if len(page) == 0 {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// postIterByCategory returns a post iterator for posts in a specific category.
|
||||
func (s *Publisher) postIterByCategory(ctx context.Context, categoryID int64) iter.Seq[models.Maybe[*models.Post]] {
|
||||
return func(yield func(models.Maybe[*models.Post]) bool) {
|
||||
paging := db.PagingParams{Offset: 0, Limit: 50}
|
||||
for {
|
||||
page, err := s.db.SelectPublishedPostsOfCategory(ctx, categoryID, paging)
|
||||
if err != nil {
|
||||
yield(models.Maybe[*models.Post]{Err: err})
|
||||
return
|
||||
}
|
||||
if len(page) == 0 {
|
||||
return
|
||||
}
|
||||
for _, post := range page {
|
||||
if !yield(models.Maybe[*models.Post]{Value: post}) {
|
||||
return
|
||||
}
|
||||
}
|
||||
paging.Offset += paging.Limit
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -3,11 +3,8 @@ package publisher
|
|||
import (
|
||||
"context"
|
||||
"io"
|
||||
"io/fs"
|
||||
"iter"
|
||||
"log"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"emperror.dev/errors"
|
||||
"github.com/go-openapi/runtime"
|
||||
|
|
@ -41,56 +38,28 @@ func (p *Publisher) Publish(ctx context.Context, site models.Site) error {
|
|||
return err
|
||||
}
|
||||
|
||||
// Fetch all content of site
|
||||
posts, err := p.db.SelectPostsOfSite(ctx, site.ID, false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Fetch all uploads of site
|
||||
uploads, err := p.db.SelectUploadsOfSite(ctx, site.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Fetch categories with counts
|
||||
cats, err := p.db.SelectCategoriesOfSite(ctx, site.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var catsWithCounts []models.CategoryWithCount
|
||||
for _, cat := range cats {
|
||||
count, err := p.db.CountPostsOfCategory(ctx, cat.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
catsWithCounts = append(catsWithCounts, models.CategoryWithCount{
|
||||
Category: *cat,
|
||||
PostCount: int(count),
|
||||
DescriptionBrief: models.BriefDescription(cat.Description),
|
||||
})
|
||||
}
|
||||
|
||||
// Fetch pages
|
||||
sitePages, err := p.db.SelectPagesOfSite(ctx, site.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, target := range targets {
|
||||
if !target.Enabled {
|
||||
continue
|
||||
}
|
||||
|
||||
pubSite := pubmodel.Site{
|
||||
Site: site,
|
||||
PostIter: func(ctx context.Context) iter.Seq[models.Maybe[*models.Post]] {
|
||||
return p.publishedPostIter(ctx, site.ID)
|
||||
},
|
||||
BaseURL: target.BaseURL,
|
||||
Uploads: uploads,
|
||||
Categories: catsWithCounts,
|
||||
PostIterByCategory: func(ctx context.Context, categoryID int64) iter.Seq[models.Maybe[*models.Post]] {
|
||||
return p.postIterByCategory(ctx, categoryID)
|
||||
},
|
||||
CategoriesOfPost: func(ctx context.Context, postID int64) ([]*models.Category, error) {
|
||||
return p.db.SelectCategoriesOfPost(ctx, postID)
|
||||
},
|
||||
Pages: sitePages,
|
||||
Site: site,
|
||||
Posts: posts,
|
||||
BaseURL: target.BaseURL,
|
||||
Uploads: uploads,
|
||||
OpenUpload: func(u models.Upload) (io.ReadCloser, error) {
|
||||
return p.up.OpenUpload(site, u)
|
||||
},
|
||||
|
|
@ -105,30 +74,9 @@ func (p *Publisher) Publish(ctx context.Context, site models.Site) error {
|
|||
}
|
||||
|
||||
func (p *Publisher) publishSite(ctx context.Context, pubSite pubmodel.Site, target models.SitePublishTarget) error {
|
||||
renderTZ, err := time.LoadLocation(pubSite.Timezone)
|
||||
if err != nil {
|
||||
renderTZ = time.UTC
|
||||
}
|
||||
|
||||
templateFS, err := fs.Sub(simplecss.FS, "templates")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
staticFS, err := fs.Sub(simplecss.FS, "static")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sb, err := sitebuilder.New(pubSite, sitebuilder.Options{
|
||||
BasePosts: "/posts",
|
||||
BasePostList: "/pages",
|
||||
BaseUploads: "/uploads",
|
||||
BaseStatic: "/static",
|
||||
TemplatesFS: templateFS,
|
||||
StaticFS: staticFS,
|
||||
FeedItems: 30,
|
||||
RenderTZ: renderTZ,
|
||||
BasePosts: "/posts",
|
||||
TemplatesFS: simplecss.FS,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
@ -140,11 +88,7 @@ func (p *Publisher) publishSite(ctx context.Context, pubSite pubmodel.Site, targ
|
|||
if err := exporter.WriteSiteYAML(); err != nil {
|
||||
return err
|
||||
}
|
||||
for mp := range pubSite.PostIter(ctx) {
|
||||
p, err := mp.Get()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, p := range pubSite.Posts {
|
||||
if err := exporter.WritePost(p); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,9 +7,6 @@ import (
|
|||
"lmika.dev/lmika/weiro/providers/db"
|
||||
"lmika.dev/lmika/weiro/providers/uploadfiles"
|
||||
"lmika.dev/lmika/weiro/services/auth"
|
||||
"lmika.dev/lmika/weiro/services/categories"
|
||||
"lmika.dev/lmika/weiro/services/imgedit"
|
||||
"lmika.dev/lmika/weiro/services/pages"
|
||||
"lmika.dev/lmika/weiro/services/posts"
|
||||
"lmika.dev/lmika/weiro/services/publisher"
|
||||
"lmika.dev/lmika/weiro/services/sites"
|
||||
|
|
@ -24,9 +21,6 @@ type Services struct {
|
|||
Posts *posts.Service
|
||||
Sites *sites.Service
|
||||
Uploads *uploads.Service
|
||||
ImageEdit *imgedit.Service
|
||||
Categories *categories.Service
|
||||
Pages *pages.Service
|
||||
}
|
||||
|
||||
func New(cfg config.Config) (*Services, error) {
|
||||
|
|
@ -43,9 +37,6 @@ func New(cfg config.Config) (*Services, error) {
|
|||
postService := posts.New(dbp, publisherQueue)
|
||||
siteService := sites.New(dbp)
|
||||
uploadService := uploads.New(dbp, ufp, filepath.Join(cfg.ScratchDir, "uploads", "pending"))
|
||||
imageEditService := imgedit.New(uploadService, filepath.Join(cfg.ScratchDir, "imageedit"))
|
||||
categoriesService := categories.New(dbp, publisherQueue)
|
||||
pagesService := pages.New(dbp, publisherQueue)
|
||||
|
||||
return &Services{
|
||||
DB: dbp,
|
||||
|
|
@ -55,9 +46,6 @@ func New(cfg config.Config) (*Services, error) {
|
|||
Posts: postService,
|
||||
Sites: siteService,
|
||||
Uploads: uploadService,
|
||||
ImageEdit: imageEditService,
|
||||
Categories: categoriesService,
|
||||
Pages: pagesService,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ import (
|
|||
"github.com/gofiber/fiber/v3"
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
"lmika.dev/lmika/weiro/providers/db"
|
||||
"lmika.dev/pkg/modash/moslice"
|
||||
)
|
||||
|
||||
type Service struct {
|
||||
|
|
@ -26,22 +25,6 @@ func (s *Service) HasUsersAsSites(ctx context.Context) (bool, error) {
|
|||
return s.db.HasUsersAndSites(ctx)
|
||||
}
|
||||
|
||||
func (s *Service) ListSites(ctx context.Context) ([]models.Site, error) {
|
||||
user, ok := models.GetUser(ctx)
|
||||
if !ok {
|
||||
return nil, models.UserRequiredError
|
||||
}
|
||||
|
||||
sites, err := s.db.SelectSitesOwnedByUser(ctx, user.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
} else if len(sites) == 0 {
|
||||
return nil, errors.New("no sites found")
|
||||
}
|
||||
|
||||
return sites, nil
|
||||
}
|
||||
|
||||
func (s *Service) BestSite(ctx context.Context, user models.User) (models.Site, error) {
|
||||
sites, err := s.db.SelectSitesOwnedByUser(ctx, user.ID)
|
||||
if err != nil {
|
||||
|
|
@ -53,20 +36,16 @@ func (s *Service) BestSite(ctx context.Context, user models.User) (models.Site,
|
|||
return sites[0], nil
|
||||
}
|
||||
|
||||
type CreateSiteParams struct {
|
||||
type FirstRunRequest struct {
|
||||
Username string `form:"username"`
|
||||
Password1 string `form:"password1"`
|
||||
Password2 string `form:"password2"`
|
||||
SiteName string `form:"siteName"`
|
||||
SiteURL string `form:"siteUrl"`
|
||||
NetlifySiteID string `form:"netlifySiteId"`
|
||||
NetlifyAPIKey string `form:"netlifyAPIToken"`
|
||||
}
|
||||
|
||||
type FirstRunRequest struct {
|
||||
CreateSiteParams
|
||||
Username string `form:"username"`
|
||||
Password1 string `form:"password1"`
|
||||
Password2 string `form:"password2"`
|
||||
}
|
||||
|
||||
func (frr FirstRunRequest) Validate() error {
|
||||
return validation.ValidateStruct(&frr,
|
||||
validation.Field(&frr.Username, validation.Required, validation.Match(models.ValidUserName)),
|
||||
|
|
@ -97,31 +76,14 @@ func (s *Service) FirstRun(ctx context.Context, req FirstRunRequest) (newUser mo
|
|||
return newUser, newSite, err
|
||||
}
|
||||
|
||||
ctx = models.WithUser(ctx, newUser)
|
||||
newSite, err = s.CreateSite(ctx, req.CreateSiteParams)
|
||||
if err != nil {
|
||||
return newUser, newSite, err
|
||||
}
|
||||
|
||||
return newUser, newSite, nil
|
||||
}
|
||||
|
||||
func (s *Service) CreateSite(ctx context.Context, req CreateSiteParams) (newSite models.Site, _ error) {
|
||||
user, ok := models.GetUser(ctx)
|
||||
if !ok {
|
||||
return newSite, models.UserRequiredError
|
||||
}
|
||||
|
||||
newSite = models.Site{
|
||||
Title: defaultIfEmpty(req.SiteName, "New Site"),
|
||||
GUID: models.NewNanoID(),
|
||||
OwnerID: user.ID,
|
||||
Timezone: "UTC",
|
||||
PostsPerPage: 10,
|
||||
Created: time.Now(),
|
||||
Title: defaultIfEmpty(req.SiteName, "New Site"),
|
||||
GUID: models.NewNanoID(),
|
||||
OwnerID: newUser.ID,
|
||||
Created: time.Now(),
|
||||
}
|
||||
if err := s.db.SaveSite(ctx, &newSite); err != nil {
|
||||
return newSite, err
|
||||
return newUser, newSite, err
|
||||
}
|
||||
|
||||
hasNetlifyConfig := req.SiteURL != "" && req.NetlifySiteID != "" && req.NetlifyAPIKey != ""
|
||||
|
|
@ -136,11 +98,11 @@ func (s *Service) CreateSite(ctx context.Context, req CreateSiteParams) (newSite
|
|||
TargetKey: req.NetlifyAPIKey,
|
||||
}
|
||||
if err := s.db.SavePublishTarget(ctx, &target); err != nil {
|
||||
return newSite, err
|
||||
return newUser, newSite, err
|
||||
}
|
||||
}
|
||||
|
||||
return newSite, nil
|
||||
return newUser, newSite, nil
|
||||
}
|
||||
|
||||
func (s *Service) GetSiteByID(ctx context.Context, siteID int64) (models.Site, error) {
|
||||
|
|
@ -164,55 +126,3 @@ func (s *Service) GetSiteByID(ctx context.Context, siteID int64) (models.Site, e
|
|||
func (s *Service) ListAllSitesWithOwners(ctx context.Context) ([]db.SiteWithOwner, error) {
|
||||
return s.db.SelectAllSitesWithOwners(ctx)
|
||||
}
|
||||
|
||||
type UpdateSiteSettingsParams struct {
|
||||
SiteID int64 `form:"siteID"`
|
||||
Name string `form:"name"`
|
||||
Tagline string `form:"tagline"`
|
||||
Timezone string `form:"timezone"`
|
||||
PostsPerPage int `form:"postsPerPage"`
|
||||
}
|
||||
|
||||
func (s *Service) UpdateSiteSettings(ctx context.Context, params UpdateSiteSettingsParams) (models.Site, error) {
|
||||
site, err := s.GetSiteByID(ctx, params.SiteID)
|
||||
if err != nil {
|
||||
return models.Site{}, err
|
||||
}
|
||||
|
||||
_, err = time.LoadLocation(params.Timezone)
|
||||
if err != nil {
|
||||
return models.Site{}, errors.Wrap(err, "invalid timezone")
|
||||
}
|
||||
|
||||
postsPerPage := params.PostsPerPage
|
||||
if postsPerPage < 1 {
|
||||
postsPerPage = 1
|
||||
} else if postsPerPage > 100 {
|
||||
postsPerPage = 100
|
||||
}
|
||||
|
||||
site.Title = params.Name
|
||||
site.Tagline = params.Tagline
|
||||
site.Timezone = params.Timezone
|
||||
site.PostsPerPage = postsPerPage
|
||||
|
||||
if err := s.db.SaveSite(ctx, &site); err != nil {
|
||||
return models.Site{}, err
|
||||
}
|
||||
|
||||
return site, nil
|
||||
}
|
||||
|
||||
func (s *Service) BestPubTarget(ctx context.Context, site models.Site) (models.SitePublishTarget, error) {
|
||||
pubTargets, err := s.db.SelectPublishTargetsOfSite(ctx, site.ID)
|
||||
if err != nil {
|
||||
return models.SitePublishTarget{}, err
|
||||
}
|
||||
|
||||
enabledPubTargets := moslice.Filter(pubTargets, func(pubTarget models.SitePublishTarget) bool { return pubTarget.Enabled })
|
||||
if len(enabledPubTargets) == 0 {
|
||||
return models.SitePublishTarget{}, errors.New("no publish targets found")
|
||||
}
|
||||
|
||||
return enabledPubTargets[0], nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,23 +0,0 @@
|
|||
package sites
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"strings"
|
||||
"sync"
|
||||
)
|
||||
|
||||
//go:embed tzones.txt
|
||||
var tzonesFS embed.FS
|
||||
|
||||
var loadZones = sync.OnceValue(func() []string {
|
||||
zones, err := tzonesFS.ReadFile("tzones.txt")
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return strings.Split(string(zones), "\n")
|
||||
})
|
||||
|
||||
func ListZones() []string {
|
||||
return loadZones()
|
||||
}
|
||||
|
|
@ -1,606 +0,0 @@
|
|||
Africa/Abidjan
|
||||
Africa/Accra
|
||||
Africa/Addis_Ababa
|
||||
Africa/Algiers
|
||||
Africa/Asmara
|
||||
Africa/Asmera
|
||||
Africa/Bamako
|
||||
Africa/Bangui
|
||||
Africa/Banjul
|
||||
Africa/Bissau
|
||||
Africa/Blantyre
|
||||
Africa/Brazzaville
|
||||
Africa/Bujumbura
|
||||
Africa/Cairo
|
||||
Africa/Casablanca
|
||||
Africa/Ceuta
|
||||
Africa/Conakry
|
||||
Africa/Dakar
|
||||
Africa/Dar_es_Salaam
|
||||
Africa/Djibouti
|
||||
Africa/Douala
|
||||
Africa/El_Aaiun
|
||||
Africa/Freetown
|
||||
Africa/Gaborone
|
||||
Africa/Harare
|
||||
Africa/Johannesburg
|
||||
Africa/Juba
|
||||
Africa/Kampala
|
||||
Africa/Khartoum
|
||||
Africa/Kigali
|
||||
Africa/Kinshasa
|
||||
Africa/Lagos
|
||||
Africa/Libreville
|
||||
Africa/Lome
|
||||
Africa/Luanda
|
||||
Africa/Lubumbashi
|
||||
Africa/Lusaka
|
||||
Africa/Malabo
|
||||
Africa/Maputo
|
||||
Africa/Maseru
|
||||
Africa/Mbabane
|
||||
Africa/Mogadishu
|
||||
Africa/Monrovia
|
||||
Africa/Nairobi
|
||||
Africa/Ndjamena
|
||||
Africa/Niamey
|
||||
Africa/Nouakchott
|
||||
Africa/Ouagadougou
|
||||
Africa/Porto-Novo
|
||||
Africa/Sao_Tome
|
||||
Africa/Timbuktu
|
||||
Africa/Tripoli
|
||||
Africa/Tunis
|
||||
Africa/Windhoek
|
||||
America/Adak
|
||||
America/Anchorage
|
||||
America/Anguilla
|
||||
America/Antigua
|
||||
America/Araguaina
|
||||
America/Argentina/Buenos_Aires
|
||||
America/Argentina/Catamarca
|
||||
America/Argentina/ComodRivadavia
|
||||
America/Argentina/Cordoba
|
||||
America/Argentina/Jujuy
|
||||
America/Argentina/La_Rioja
|
||||
America/Argentina/Mendoza
|
||||
America/Argentina/Rio_Gallegos
|
||||
America/Argentina/Salta
|
||||
America/Argentina/San_Juan
|
||||
America/Argentina/San_Luis
|
||||
America/Argentina/Tucuman
|
||||
America/Argentina/Ushuaia
|
||||
America/Aruba
|
||||
America/Asuncion
|
||||
America/Atikokan
|
||||
America/Atka
|
||||
America/Bahia
|
||||
America/Bahia_Banderas
|
||||
America/Barbados
|
||||
America/Belem
|
||||
America/Belize
|
||||
America/Blanc-Sablon
|
||||
America/Boa_Vista
|
||||
America/Bogota
|
||||
America/Boise
|
||||
America/Buenos_Aires
|
||||
America/Cambridge_Bay
|
||||
America/Campo_Grande
|
||||
America/Cancun
|
||||
America/Caracas
|
||||
America/Catamarca
|
||||
America/Cayenne
|
||||
America/Cayman
|
||||
America/Chicago
|
||||
America/Chihuahua
|
||||
America/Coral_Harbour
|
||||
America/Cordoba
|
||||
America/Costa_Rica
|
||||
America/Creston
|
||||
America/Cuiaba
|
||||
America/Curacao
|
||||
America/Danmarkshavn
|
||||
America/Dawson
|
||||
America/Dawson_Creek
|
||||
America/Denver
|
||||
America/Detroit
|
||||
America/Dominica
|
||||
America/Edmonton
|
||||
America/Eirunepe
|
||||
America/El_Salvador
|
||||
America/Ensenada
|
||||
America/Fort_Nelson
|
||||
America/Fort_Wayne
|
||||
America/Fortaleza
|
||||
America/Glace_Bay
|
||||
America/Godthab
|
||||
America/Goose_Bay
|
||||
America/Grand_Turk
|
||||
America/Grenada
|
||||
America/Guadeloupe
|
||||
America/Guatemala
|
||||
America/Guayaquil
|
||||
America/Guyana
|
||||
America/Halifax
|
||||
America/Havana
|
||||
America/Hermosillo
|
||||
America/Indiana/Indianapolis
|
||||
America/Indiana/Knox
|
||||
America/Indiana/Marengo
|
||||
America/Indiana/Petersburg
|
||||
America/Indiana/Tell_City
|
||||
America/Indiana/Vevay
|
||||
America/Indiana/Vincennes
|
||||
America/Indiana/Winamac
|
||||
America/Indianapolis
|
||||
America/Inuvik
|
||||
America/Iqaluit
|
||||
America/Jamaica
|
||||
America/Jujuy
|
||||
America/Juneau
|
||||
America/Kentucky/Louisville
|
||||
America/Kentucky/Monticello
|
||||
America/Knox_IN
|
||||
America/Kralendijk
|
||||
America/La_Paz
|
||||
America/Lima
|
||||
America/Los_Angeles
|
||||
America/Louisville
|
||||
America/Lower_Princes
|
||||
America/Maceio
|
||||
America/Managua
|
||||
America/Manaus
|
||||
America/Marigot
|
||||
America/Martinique
|
||||
America/Matamoros
|
||||
America/Mazatlan
|
||||
America/Mendoza
|
||||
America/Menominee
|
||||
America/Merida
|
||||
America/Metlakatla
|
||||
America/Mexico_City
|
||||
America/Miquelon
|
||||
America/Moncton
|
||||
America/Monterrey
|
||||
America/Montevideo
|
||||
America/Montreal
|
||||
America/Montserrat
|
||||
America/Nassau
|
||||
America/New_York
|
||||
America/Nipigon
|
||||
America/Nome
|
||||
America/Noronha
|
||||
America/North_Dakota/Beulah
|
||||
America/North_Dakota/Center
|
||||
America/North_Dakota/New_Salem
|
||||
America/Ojinaga
|
||||
America/Panama
|
||||
America/Pangnirtung
|
||||
America/Paramaribo
|
||||
America/Phoenix
|
||||
America/Port-au-Prince
|
||||
America/Port_of_Spain
|
||||
America/Porto_Acre
|
||||
America/Porto_Velho
|
||||
America/Puerto_Rico
|
||||
America/Punta_Arenas
|
||||
America/Rainy_River
|
||||
America/Rankin_Inlet
|
||||
America/Recife
|
||||
America/Regina
|
||||
America/Resolute
|
||||
America/Rio_Branco
|
||||
America/Rosario
|
||||
America/Santa_Isabel
|
||||
America/Santarem
|
||||
America/Santiago
|
||||
America/Santo_Domingo
|
||||
America/Sao_Paulo
|
||||
America/Scoresbysund
|
||||
America/Shiprock
|
||||
America/Sitka
|
||||
America/St_Barthelemy
|
||||
America/St_Johns
|
||||
America/St_Kitts
|
||||
America/St_Lucia
|
||||
America/St_Thomas
|
||||
America/St_Vincent
|
||||
America/Swift_Current
|
||||
America/Tegucigalpa
|
||||
America/Thule
|
||||
America/Thunder_Bay
|
||||
America/Tijuana
|
||||
America/Toronto
|
||||
America/Tortola
|
||||
America/Vancouver
|
||||
America/Virgin
|
||||
America/Whitehorse
|
||||
America/Winnipeg
|
||||
America/Yakutat
|
||||
America/Yellowknife
|
||||
Antarctica/Casey
|
||||
Antarctica/Davis
|
||||
Antarctica/DumontDUrville
|
||||
Antarctica/Macquarie
|
||||
Antarctica/Mawson
|
||||
Antarctica/McMurdo
|
||||
Antarctica/Palmer
|
||||
Antarctica/Rothera
|
||||
Antarctica/South_Pole
|
||||
Antarctica/Syowa
|
||||
Antarctica/Troll
|
||||
Antarctica/Vostok
|
||||
Arctic/Longyearbyen
|
||||
Asia/Aden
|
||||
Asia/Almaty
|
||||
Asia/Amman
|
||||
Asia/Anadyr
|
||||
Asia/Aqtau
|
||||
Asia/Aqtobe
|
||||
Asia/Ashgabat
|
||||
Asia/Ashkhabad
|
||||
Asia/Atyrau
|
||||
Asia/Baghdad
|
||||
Asia/Bahrain
|
||||
Asia/Baku
|
||||
Asia/Bangkok
|
||||
Asia/Barnaul
|
||||
Asia/Beirut
|
||||
Asia/Bishkek
|
||||
Asia/Brunei
|
||||
Asia/Calcutta
|
||||
Asia/Chita
|
||||
Asia/Choibalsan
|
||||
Asia/Chongqing
|
||||
Asia/Chungking
|
||||
Asia/Colombo
|
||||
Asia/Dacca
|
||||
Asia/Damascus
|
||||
Asia/Dhaka
|
||||
Asia/Dili
|
||||
Asia/Dubai
|
||||
Asia/Dushanbe
|
||||
Asia/Famagusta
|
||||
Asia/Gaza
|
||||
Asia/Harbin
|
||||
Asia/Hebron
|
||||
Asia/Ho_Chi_Minh
|
||||
Asia/Hong_Kong
|
||||
Asia/Hovd
|
||||
Asia/Irkutsk
|
||||
Asia/Istanbul
|
||||
Asia/Jakarta
|
||||
Asia/Jayapura
|
||||
Asia/Jerusalem
|
||||
Asia/Kabul
|
||||
Asia/Kamchatka
|
||||
Asia/Karachi
|
||||
Asia/Kashgar
|
||||
Asia/Kathmandu
|
||||
Asia/Katmandu
|
||||
Asia/Khandyga
|
||||
Asia/Kolkata
|
||||
Asia/Krasnoyarsk
|
||||
Asia/Kuala_Lumpur
|
||||
Asia/Kuching
|
||||
Asia/Kuwait
|
||||
Asia/Macao
|
||||
Asia/Macau
|
||||
Asia/Magadan
|
||||
Asia/Makassar
|
||||
Asia/Manila
|
||||
Asia/Muscat
|
||||
Asia/Nicosia
|
||||
Asia/Novokuznetsk
|
||||
Asia/Novosibirsk
|
||||
Asia/Omsk
|
||||
Asia/Oral
|
||||
Asia/Phnom_Penh
|
||||
Asia/Pontianak
|
||||
Asia/Pyongyang
|
||||
Asia/Qatar
|
||||
Asia/Qyzylorda
|
||||
Asia/Rangoon
|
||||
Asia/Riyadh
|
||||
Asia/Saigon
|
||||
Asia/Sakhalin
|
||||
Asia/Samarkand
|
||||
Asia/Seoul
|
||||
Asia/Shanghai
|
||||
Asia/Singapore
|
||||
Asia/Srednekolymsk
|
||||
Asia/Taipei
|
||||
Asia/Tashkent
|
||||
Asia/Tbilisi
|
||||
Asia/Tehran
|
||||
Asia/Tel_Aviv
|
||||
Asia/Thimbu
|
||||
Asia/Thimphu
|
||||
Asia/Tokyo
|
||||
Asia/Tomsk
|
||||
Asia/Ujung_Pandang
|
||||
Asia/Ulaanbaatar
|
||||
Asia/Ulan_Bator
|
||||
Asia/Urumqi
|
||||
Asia/Ust-Nera
|
||||
Asia/Vientiane
|
||||
Asia/Vladivostok
|
||||
Asia/Yakutsk
|
||||
Asia/Yangon
|
||||
Asia/Yekaterinburg
|
||||
Asia/Yerevan
|
||||
Atlantic/Azores
|
||||
Atlantic/Bermuda
|
||||
Atlantic/Canary
|
||||
Atlantic/Cape_Verde
|
||||
Atlantic/Faeroe
|
||||
Atlantic/Faroe
|
||||
Atlantic/Jan_Mayen
|
||||
Atlantic/Madeira
|
||||
Atlantic/Reykjavik
|
||||
Atlantic/South_Georgia
|
||||
Atlantic/St_Helena
|
||||
Atlantic/Stanley
|
||||
Australia/ACT
|
||||
Australia/Adelaide
|
||||
Australia/Brisbane
|
||||
Australia/Broken_Hill
|
||||
Australia/Canberra
|
||||
Australia/Currie
|
||||
Australia/Darwin
|
||||
Australia/Eucla
|
||||
Australia/Hobart
|
||||
Australia/LHI
|
||||
Australia/Lindeman
|
||||
Australia/Lord_Howe
|
||||
Australia/Melbourne
|
||||
Australia/NSW
|
||||
Australia/North
|
||||
Australia/Perth
|
||||
Australia/Queensland
|
||||
Australia/South
|
||||
Australia/Sydney
|
||||
Australia/Tasmania
|
||||
Australia/Victoria
|
||||
Australia/West
|
||||
Australia/Yancowinna
|
||||
Brazil/Acre
|
||||
Brazil/DeNoronha
|
||||
Brazil/East
|
||||
Brazil/West
|
||||
CET
|
||||
CST6CDT
|
||||
Canada/Atlantic
|
||||
Canada/Central
|
||||
Canada/Eastern
|
||||
Canada/Mountain
|
||||
Canada/Newfoundland
|
||||
Canada/Pacific
|
||||
Canada/Saskatchewan
|
||||
Canada/Yukon
|
||||
Chile/Continental
|
||||
Chile/EasterIsland
|
||||
Cuba
|
||||
EET
|
||||
EST
|
||||
EST5EDT
|
||||
Egypt
|
||||
Eire
|
||||
Etc/GMT
|
||||
Etc/GMT+0
|
||||
Etc/GMT+1
|
||||
Etc/GMT+10
|
||||
Etc/GMT+11
|
||||
Etc/GMT+12
|
||||
Etc/GMT+2
|
||||
Etc/GMT+3
|
||||
Etc/GMT+4
|
||||
Etc/GMT+5
|
||||
Etc/GMT+6
|
||||
Etc/GMT+7
|
||||
Etc/GMT+8
|
||||
Etc/GMT+9
|
||||
Etc/GMT-0
|
||||
Etc/GMT-1
|
||||
Etc/GMT-10
|
||||
Etc/GMT-11
|
||||
Etc/GMT-12
|
||||
Etc/GMT-13
|
||||
Etc/GMT-14
|
||||
Etc/GMT-2
|
||||
Etc/GMT-3
|
||||
Etc/GMT-4
|
||||
Etc/GMT-5
|
||||
Etc/GMT-6
|
||||
Etc/GMT-7
|
||||
Etc/GMT-8
|
||||
Etc/GMT-9
|
||||
Etc/GMT0
|
||||
Etc/Greenwich
|
||||
Etc/UCT
|
||||
Etc/UTC
|
||||
Etc/Universal
|
||||
Etc/Zulu
|
||||
Europe/Amsterdam
|
||||
Europe/Andorra
|
||||
Europe/Astrakhan
|
||||
Europe/Athens
|
||||
Europe/Belfast
|
||||
Europe/Belgrade
|
||||
Europe/Berlin
|
||||
Europe/Bratislava
|
||||
Europe/Brussels
|
||||
Europe/Bucharest
|
||||
Europe/Budapest
|
||||
Europe/Busingen
|
||||
Europe/Chisinau
|
||||
Europe/Copenhagen
|
||||
Europe/Dublin
|
||||
Europe/Gibraltar
|
||||
Europe/Guernsey
|
||||
Europe/Helsinki
|
||||
Europe/Isle_of_Man
|
||||
Europe/Istanbul
|
||||
Europe/Jersey
|
||||
Europe/Kaliningrad
|
||||
Europe/Kiev
|
||||
Europe/Kirov
|
||||
Europe/Lisbon
|
||||
Europe/Ljubljana
|
||||
Europe/London
|
||||
Europe/Luxembourg
|
||||
Europe/Madrid
|
||||
Europe/Malta
|
||||
Europe/Mariehamn
|
||||
Europe/Minsk
|
||||
Europe/Monaco
|
||||
Europe/Moscow
|
||||
Europe/Nicosia
|
||||
Europe/Oslo
|
||||
Europe/Paris
|
||||
Europe/Podgorica
|
||||
Europe/Prague
|
||||
Europe/Riga
|
||||
Europe/Rome
|
||||
Europe/Samara
|
||||
Europe/San_Marino
|
||||
Europe/Sarajevo
|
||||
Europe/Saratov
|
||||
Europe/Simferopol
|
||||
Europe/Skopje
|
||||
Europe/Sofia
|
||||
Europe/Stockholm
|
||||
Europe/Tallinn
|
||||
Europe/Tirane
|
||||
Europe/Tiraspol
|
||||
Europe/Ulyanovsk
|
||||
Europe/Uzhgorod
|
||||
Europe/Vaduz
|
||||
Europe/Vatican
|
||||
Europe/Vienna
|
||||
Europe/Vilnius
|
||||
Europe/Volgograd
|
||||
Europe/Warsaw
|
||||
Europe/Zagreb
|
||||
Europe/Zaporozhye
|
||||
Europe/Zurich
|
||||
Factory
|
||||
GB
|
||||
GB-Eire
|
||||
GMT
|
||||
GMT+0
|
||||
GMT-0
|
||||
GMT0
|
||||
Greenwich
|
||||
HST
|
||||
Hongkong
|
||||
Iceland
|
||||
Indian/Antananarivo
|
||||
Indian/Chagos
|
||||
Indian/Christmas
|
||||
Indian/Cocos
|
||||
Indian/Comoro
|
||||
Indian/Kerguelen
|
||||
Indian/Mahe
|
||||
Indian/Maldives
|
||||
Indian/Mauritius
|
||||
Indian/Mayotte
|
||||
Indian/Reunion
|
||||
Iran
|
||||
Israel
|
||||
Jamaica
|
||||
Japan
|
||||
Kwajalein
|
||||
Libya
|
||||
MET
|
||||
MST
|
||||
MST7MDT
|
||||
Mexico/BajaNorte
|
||||
Mexico/BajaSur
|
||||
Mexico/General
|
||||
NZ
|
||||
NZ-CHAT
|
||||
Navajo
|
||||
PRC
|
||||
PST8PDT
|
||||
Pacific/Apia
|
||||
Pacific/Auckland
|
||||
Pacific/Bougainville
|
||||
Pacific/Chatham
|
||||
Pacific/Chuuk
|
||||
Pacific/Easter
|
||||
Pacific/Efate
|
||||
Pacific/Enderbury
|
||||
Pacific/Fakaofo
|
||||
Pacific/Fiji
|
||||
Pacific/Funafuti
|
||||
Pacific/Galapagos
|
||||
Pacific/Gambier
|
||||
Pacific/Guadalcanal
|
||||
Pacific/Guam
|
||||
Pacific/Honolulu
|
||||
Pacific/Johnston
|
||||
Pacific/Kiritimati
|
||||
Pacific/Kosrae
|
||||
Pacific/Kwajalein
|
||||
Pacific/Majuro
|
||||
Pacific/Marquesas
|
||||
Pacific/Midway
|
||||
Pacific/Nauru
|
||||
Pacific/Niue
|
||||
Pacific/Norfolk
|
||||
Pacific/Noumea
|
||||
Pacific/Pago_Pago
|
||||
Pacific/Palau
|
||||
Pacific/Pitcairn
|
||||
Pacific/Pohnpei
|
||||
Pacific/Ponape
|
||||
Pacific/Port_Moresby
|
||||
Pacific/Rarotonga
|
||||
Pacific/Saipan
|
||||
Pacific/Samoa
|
||||
Pacific/Tahiti
|
||||
Pacific/Tarawa
|
||||
Pacific/Tongatapu
|
||||
Pacific/Truk
|
||||
Pacific/Wake
|
||||
Pacific/Wallis
|
||||
Pacific/Yap
|
||||
Poland
|
||||
Portugal
|
||||
ROC
|
||||
ROK
|
||||
Singapore
|
||||
SystemV/AST4
|
||||
SystemV/AST4ADT
|
||||
SystemV/CST6
|
||||
SystemV/CST6CDT
|
||||
SystemV/EST5
|
||||
SystemV/EST5EDT
|
||||
SystemV/HST10
|
||||
SystemV/MST7
|
||||
SystemV/MST7MDT
|
||||
SystemV/PST8
|
||||
SystemV/PST8PDT
|
||||
SystemV/YST9
|
||||
SystemV/YST9YDT
|
||||
Turkey
|
||||
UCT
|
||||
US/Alaska
|
||||
US/Aleutian
|
||||
US/Arizona
|
||||
US/Central
|
||||
US/East-Indiana
|
||||
US/Eastern
|
||||
US/Hawaii
|
||||
US/Indiana-Starke
|
||||
US/Michigan
|
||||
US/Mountain
|
||||
US/Pacific
|
||||
US/Pacific-New
|
||||
US/Samoa
|
||||
UTC
|
||||
Universal
|
||||
W-SU
|
||||
WET
|
||||
Zulu
|
||||
|
|
@ -6,10 +6,7 @@ import (
|
|||
"html/template"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"lmika.dev/lmika/weiro/models"
|
||||
)
|
||||
|
|
@ -70,75 +67,6 @@ func (s *Service) renderCopyTemplate(upload models.Upload) string {
|
|||
return sb.String()
|
||||
}
|
||||
|
||||
func (s *Service) ReplaceUploadFile(ctx context.Context, uploadID int64, srcPath string) (models.Upload, error) {
|
||||
site, _, err := s.fetchSiteAndUser(ctx)
|
||||
if err != nil {
|
||||
return models.Upload{}, err
|
||||
}
|
||||
|
||||
upload, err := s.db.SelectUploadByID(ctx, uploadID)
|
||||
if err != nil {
|
||||
return models.Upload{}, err
|
||||
} else if upload.SiteID != site.ID {
|
||||
return models.Upload{}, models.NotFoundError
|
||||
}
|
||||
|
||||
if err := s.up.ReplaceFile(site, upload, srcPath); err != nil {
|
||||
return models.Upload{}, err
|
||||
}
|
||||
|
||||
stat, err := os.Stat(srcPath)
|
||||
if err != nil {
|
||||
return models.Upload{}, err
|
||||
}
|
||||
upload.FileSize = stat.Size()
|
||||
|
||||
if err := s.db.UpdateUploadFileSize(ctx, upload.ID, upload.FileSize); err != nil {
|
||||
return models.Upload{}, err
|
||||
}
|
||||
|
||||
return upload, nil
|
||||
}
|
||||
|
||||
func (s *Service) CreateUploadFromFile(ctx context.Context, srcPath string, filename string, mimeType string) (models.Upload, error) {
|
||||
site, _, err := s.fetchSiteAndUser(ctx)
|
||||
if err != nil {
|
||||
return models.Upload{}, err
|
||||
}
|
||||
|
||||
stat, err := os.Stat(srcPath)
|
||||
if err != nil {
|
||||
return models.Upload{}, err
|
||||
}
|
||||
|
||||
newUploadGUID := models.NewNanoID()
|
||||
newTime := time.Now().UTC()
|
||||
newSlug := filepath.Join(
|
||||
fmt.Sprintf("%04d", newTime.Year()),
|
||||
fmt.Sprintf("%02d", newTime.Month()),
|
||||
newUploadGUID+filepath.Ext(filename),
|
||||
)
|
||||
|
||||
newUpload := models.Upload{
|
||||
SiteID: site.ID,
|
||||
GUID: models.NewNanoID(),
|
||||
FileSize: stat.Size(),
|
||||
MIMEType: mimeType,
|
||||
Filename: filename,
|
||||
CreatedAt: newTime,
|
||||
Slug: newSlug,
|
||||
}
|
||||
if err := s.db.SaveUpload(ctx, &newUpload); err != nil {
|
||||
return models.Upload{}, err
|
||||
}
|
||||
|
||||
if err := s.up.AdoptFile(site, newUpload, srcPath); err != nil {
|
||||
return models.Upload{}, err
|
||||
}
|
||||
|
||||
return newUpload, nil
|
||||
}
|
||||
|
||||
func (s *Service) ListUploads(ctx context.Context) (res []UploadWithURL, _ error) {
|
||||
site, _, err := s.fetchSiteAndUser(ctx)
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -1,53 +0,0 @@
|
|||
-- name: SelectCategoriesOfSite :many
|
||||
SELECT * FROM categories
|
||||
WHERE site_id = ? ORDER BY name ASC;
|
||||
|
||||
-- name: SelectCategory :one
|
||||
SELECT * FROM categories WHERE id = ? LIMIT 1;
|
||||
|
||||
-- name: SelectCategoryByGUID :one
|
||||
SELECT * FROM categories WHERE guid = ? LIMIT 1;
|
||||
|
||||
-- name: SelectCategoryBySlugAndSite :one
|
||||
SELECT * FROM categories WHERE site_id = ? AND slug = ? LIMIT 1;
|
||||
|
||||
-- name: SelectCategoriesOfPost :many
|
||||
SELECT c.* FROM categories c
|
||||
INNER JOIN post_categories pc ON pc.category_id = c.id
|
||||
WHERE pc.post_id = ?
|
||||
ORDER BY c.name ASC;
|
||||
|
||||
-- name: SelectPublishedPostsOfCategory :many
|
||||
SELECT p.* FROM posts p
|
||||
INNER JOIN post_categories pc ON pc.post_id = p.id
|
||||
WHERE pc.category_id = ? AND p.state = 0 AND p.deleted_at = 0
|
||||
ORDER BY p.published_at DESC
|
||||
LIMIT ? OFFSET ?;
|
||||
|
||||
-- name: CountPostsOfCategory :one
|
||||
SELECT COUNT(*) FROM posts p
|
||||
INNER JOIN post_categories pc ON pc.post_id = p.id
|
||||
WHERE pc.category_id = ? AND p.state = 0 AND p.deleted_at = 0;
|
||||
|
||||
-- name: InsertCategory :one
|
||||
INSERT INTO categories (
|
||||
site_id, guid, name, slug, description, created_at, updated_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
RETURNING id;
|
||||
|
||||
-- name: UpdateCategory :exec
|
||||
UPDATE categories SET
|
||||
name = ?,
|
||||
slug = ?,
|
||||
description = ?,
|
||||
updated_at = ?
|
||||
WHERE id = ?;
|
||||
|
||||
-- name: DeleteCategory :exec
|
||||
DELETE FROM categories WHERE id = ?;
|
||||
|
||||
-- name: InsertPostCategory :exec
|
||||
INSERT OR IGNORE INTO post_categories (post_id, category_id) VALUES (?, ?);
|
||||
|
||||
-- name: DeletePostCategoriesByPost :exec
|
||||
DELETE FROM post_categories WHERE post_id = ?;
|
||||
|
|
@ -1,34 +0,0 @@
|
|||
-- name: SelectPagesOfSite :many
|
||||
SELECT * FROM pages
|
||||
WHERE site_id = ? ORDER BY sort_order ASC;
|
||||
|
||||
-- name: SelectPage :one
|
||||
SELECT * FROM pages WHERE id = ? LIMIT 1;
|
||||
|
||||
-- name: SelectPageByGUID :one
|
||||
SELECT * FROM pages WHERE guid = ? LIMIT 1;
|
||||
|
||||
-- name: SelectPageBySlugAndSite :one
|
||||
SELECT * FROM pages WHERE site_id = ? AND slug = ? LIMIT 1;
|
||||
|
||||
-- name: InsertPage :one
|
||||
INSERT INTO pages (
|
||||
site_id, guid, title, slug, body, page_type, show_in_nav, sort_order, created_at, updated_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
RETURNING id;
|
||||
|
||||
-- name: UpdatePage :exec
|
||||
UPDATE pages SET
|
||||
title = ?,
|
||||
slug = ?,
|
||||
body = ?,
|
||||
page_type = ?,
|
||||
show_in_nav = ?,
|
||||
updated_at = ?
|
||||
WHERE id = ?;
|
||||
|
||||
-- name: UpdatePageSortOrder :exec
|
||||
UPDATE pages SET sort_order = ? WHERE id = ?;
|
||||
|
||||
-- name: DeletePage :exec
|
||||
DELETE FROM pages WHERE id = ?;
|
||||
|
|
@ -1,27 +1,12 @@
|
|||
-- name: CountPostsOfSite :one
|
||||
SELECT COUNT(*) FROM posts
|
||||
WHERE site_id = sqlc.arg(site_id) AND (
|
||||
CASE CAST (sqlc.arg(post_filter) AS TEXT)
|
||||
WHEN 'deleted' THEN deleted_at > 0
|
||||
ELSE deleted_at = 0
|
||||
END
|
||||
);
|
||||
|
||||
-- name: SelectPostsOfSite :many
|
||||
SELECT *
|
||||
FROM posts
|
||||
WHERE site_id = sqlc.arg(site_id) AND (
|
||||
WHERE site_id = ? AND (
|
||||
CASE CAST (sqlc.arg(post_filter) AS TEXT)
|
||||
WHEN 'deleted' THEN deleted_at > 0
|
||||
ELSE deleted_at = 0
|
||||
END
|
||||
) ORDER BY created_at DESC LIMIT sqlc.arg(limit) OFFSET sqlc.arg(offset);
|
||||
|
||||
-- name: SelectPublishedPostsOfSite :many
|
||||
SELECT *
|
||||
FROM posts
|
||||
WHERE site_id = sqlc.arg(site_id) AND state = 0 AND deleted_at = 0
|
||||
ORDER BY published_at DESC LIMIT sqlc.arg(limit) OFFSET sqlc.arg(offset);
|
||||
) ORDER BY created_at DESC LIMIT 10;
|
||||
|
||||
-- name: SelectPost :one
|
||||
SELECT * FROM posts WHERE id = ? LIMIT 1;
|
||||
|
|
|
|||
|
|
@ -13,18 +13,13 @@ INSERT INTO sites (
|
|||
guid,
|
||||
title,
|
||||
tagline,
|
||||
timezone,
|
||||
posts_per_page,
|
||||
created_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
) VALUES (?, ?, ?, ?, ?)
|
||||
RETURNING id;
|
||||
|
||||
-- name: HasUsersAndSites :one
|
||||
SELECT (SELECT COUNT(*) FROM users) > 0 AND (SELECT COUNT(*) FROM sites) > 0 AS has_users_and_sites;
|
||||
|
||||
-- name: UpdateSite :exec
|
||||
UPDATE sites SET title = ?, tagline = ?, timezone = ?, posts_per_page = ? WHERE id = ?;
|
||||
|
||||
-- name: SelectAllSitesWithOwners :many
|
||||
SELECT s.id, s.guid, s.title, s.owner_id, u.username
|
||||
FROM sites s
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ SELECT * FROM uploads WHERE id = ? LIMIT 1;
|
|||
-- name: SelectUploadBySiteIDAndSlug :one
|
||||
SELECT * FROM uploads WHERE site_id = ? AND slug = ? LIMIT 1;
|
||||
|
||||
-- name: InsertUpload :one
|
||||
-- name: InsertUpload :exec
|
||||
INSERT INTO uploads (
|
||||
site_id,
|
||||
guid,
|
||||
|
|
@ -23,8 +23,5 @@ RETURNING id;
|
|||
-- name: UpdateUpload :exec
|
||||
UPDATE uploads SET alt = ? WHERE id = ?;
|
||||
|
||||
-- name: UpdateUploadFileSize :exec
|
||||
UPDATE uploads SET file_size = ? WHERE id = ?;
|
||||
|
||||
-- name: DeleteUpload :exec
|
||||
DELETE FROM uploads WHERE id = ?;
|
||||
|
|
@ -1 +0,0 @@
|
|||
ALTER TABLE sites ADD COLUMN timezone TEXT NOT NULL DEFAULT 'UTC';
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
CREATE TABLE categories (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
site_id INTEGER NOT NULL,
|
||||
guid TEXT NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
slug TEXT NOT NULL,
|
||||
description TEXT NOT NULL DEFAULT '',
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
FOREIGN KEY (site_id) REFERENCES sites (id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX idx_categories_site ON categories (site_id);
|
||||
CREATE UNIQUE INDEX idx_categories_guid ON categories (guid);
|
||||
CREATE UNIQUE INDEX idx_categories_site_slug ON categories (site_id, slug);
|
||||
|
||||
CREATE TABLE post_categories (
|
||||
post_id INTEGER NOT NULL,
|
||||
category_id INTEGER NOT NULL,
|
||||
PRIMARY KEY (post_id, category_id),
|
||||
FOREIGN KEY (post_id) REFERENCES posts (id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (category_id) REFERENCES categories (id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX idx_post_categories_category ON post_categories (category_id);
|
||||
|
|
@ -1 +0,0 @@
|
|||
ALTER TABLE sites ADD COLUMN posts_per_page INTEGER NOT NULL DEFAULT 10;
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
CREATE TABLE pages (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
site_id INTEGER NOT NULL,
|
||||
guid TEXT NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
slug TEXT NOT NULL,
|
||||
body TEXT NOT NULL,
|
||||
page_type INTEGER NOT NULL DEFAULT 0,
|
||||
show_in_nav INTEGER NOT NULL DEFAULT 0,
|
||||
sort_order INTEGER NOT NULL DEFAULT 0,
|
||||
created_at INTEGER NOT NULL,
|
||||
updated_at INTEGER NOT NULL,
|
||||
FOREIGN KEY (site_id) REFERENCES sites (id) ON DELETE CASCADE
|
||||
);
|
||||
CREATE INDEX idx_pages_site ON pages (site_id);
|
||||
CREATE UNIQUE INDEX idx_pages_guid ON pages (guid);
|
||||
CREATE UNIQUE INDEX idx_pages_site_slug ON pages (site_id, slug);
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue