Add "Woofer Exploration" and improve drafting
This commit is contained in:
parent
a9d5a88d0e
commit
405ad38f5d
25 changed files with 300 additions and 189 deletions
|
|
@ -4,18 +4,24 @@ import { marked, type RendererApi } from "marked";
|
|||
import { decode as tinyDecode } from "tiny-decode";
|
||||
import type { Lang, Website } from "../../../content/config";
|
||||
import { t } from "../../../i18n";
|
||||
import { formatCopyrightedCharacters } from "../../../utils/format_copyrighted_characters";
|
||||
|
||||
type ExportFormat = "bbcode" | "markdown";
|
||||
interface ExportWebsiteInfo {
|
||||
website: string;
|
||||
exportFormat: "bbcode" | "markdown";
|
||||
}
|
||||
|
||||
const WEBSITE_LIST = [
|
||||
["eka", "bbcode"],
|
||||
["furaffinity", "bbcode"],
|
||||
["inkbunny", "bbcode"],
|
||||
["sofurry", "bbcode"],
|
||||
["weasyl", "markdown"],
|
||||
] as const satisfies [Website, ExportFormat][];
|
||||
{ website: "eka", exportFormat: "bbcode" },
|
||||
{ website: "furaffinity", exportFormat: "bbcode" },
|
||||
{ website: "inkbunny", exportFormat: "bbcode" },
|
||||
{ website: "sofurry", exportFormat: "bbcode" },
|
||||
{ website: "weasyl", exportFormat: "markdown" },
|
||||
] as const satisfies ExportWebsiteInfo[];
|
||||
|
||||
type ExportWebsite = typeof WEBSITE_LIST extends ReadonlyArray<[infer K, ExportFormat]> ? K : never;
|
||||
type ExportWebsiteName = typeof WEBSITE_LIST extends ReadonlyArray<{ website: infer K }> ? K : never;
|
||||
|
||||
//type ExportWebsiteName = typeof WEBSITE_LIST extends ReadonlyArray<[infer K, DescriptionExportFormat]> ? K : never;
|
||||
|
||||
const bbcodeRenderer: RendererApi = {
|
||||
strong: (text) => `[b]${text}[/b]`,
|
||||
|
|
@ -122,7 +128,7 @@ function isPreferredWebsite(user: CollectionEntry<"users">, website: Website): b
|
|||
return !preferredLink || preferredLink == website;
|
||||
}
|
||||
|
||||
function getLinkForUser(user: CollectionEntry<"users">, website: ExportWebsite, anonymousFallback: string): string {
|
||||
function getLinkForUser(user: CollectionEntry<"users">, website: ExportWebsiteName, anonymousFallback: string): string {
|
||||
if (user.data.isAnonymous) {
|
||||
return anonymousFallback;
|
||||
}
|
||||
|
|
@ -207,35 +213,21 @@ export const getStaticPaths: GetStaticPaths = async () => {
|
|||
|
||||
export const GET: APIRoute<Props, Params> = async ({ props: { story }, site }) => {
|
||||
const { lang } = story.data;
|
||||
const copyrightedCharacters = await Promise.all(
|
||||
Object.values(
|
||||
Object.keys(story.data.copyrightedCharacters).reduce(
|
||||
(acc, character) => {
|
||||
const user = story.data.copyrightedCharacters[character];
|
||||
if (!(user.id in acc)) {
|
||||
acc[user.id] = [getEntry(user), []];
|
||||
}
|
||||
acc[user.id][1].push(character);
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, [Promise<CollectionEntry<"users">>, string[]]>,
|
||||
),
|
||||
).map(async ([userPromise, characters]) => [await userPromise, characters] as [CollectionEntry<"users">, string[]]),
|
||||
);
|
||||
const copyrightedCharacters = await formatCopyrightedCharacters(story.data.copyrightedCharacters);
|
||||
const authorsList = await getEntries([story.data.authors].flat());
|
||||
const commissioner = story.data.commissioner && (await getEntry(story.data.commissioner));
|
||||
const requester = story.data.requester && (await getEntry(story.data.requester));
|
||||
const anonymousUser = await getEntry("users", "anonymous");
|
||||
const anonymousFallback = getNameForUser(anonymousUser, anonymousUser, lang);
|
||||
|
||||
const description: Record<ExportWebsite, string> = Object.fromEntries(
|
||||
const description: Record<ExportWebsiteName, string> = Object.fromEntries(
|
||||
await Promise.all(
|
||||
WEBSITE_LIST.map(async ([website, exportFormat]) => {
|
||||
WEBSITE_LIST.map(async ({ website, exportFormat }) => {
|
||||
const u = (user: CollectionEntry<"users">) => getLinkForUser(user, website, anonymousFallback);
|
||||
const storyDescription = (
|
||||
[
|
||||
story.data.description,
|
||||
`*${t(lang, "story/warnings", story.data.wordCount, story.data.contentWarning.trim())}*`,
|
||||
`*${t(lang, "story/warnings", story.data.wordCount || "???", story.data.contentWarning.trim())}*`,
|
||||
t(
|
||||
lang,
|
||||
"export_story/writing",
|
||||
|
|
@ -253,8 +245,7 @@ export const GET: APIRoute<Props, Params> = async ({ props: { story }, site }) =
|
|||
.join("\n\n")
|
||||
.replaceAll(
|
||||
/\[([^\]]+)\]\((\/[^\)]+)\)/g,
|
||||
(_, group1, group2) =>
|
||||
`[${group1}](${new URL(group2, site).toString()})`,
|
||||
(_, group1, group2) => `[${group1}](${new URL(group2, site).toString()})`,
|
||||
);
|
||||
if (exportFormat === "bbcode") {
|
||||
return [
|
||||
|
|
|
|||
|
|
@ -29,11 +29,11 @@ const getLinkForUser = (user: CollectionEntry<"users">, lang: Lang) => {
|
|||
};
|
||||
|
||||
export const GET: APIRoute = async ({ site }) => {
|
||||
const stories = (await getCollection("stories", (story) => !story.data.isDraft))
|
||||
.sort((a, b) => b.data.pubDate.getTime() - a.data.pubDate.getTime())
|
||||
const stories = (await getCollection("stories", (story) => !story.data.isDraft && story.data.pubDate))
|
||||
.sort((a, b) => b.data.pubDate!.getTime() - a.data.pubDate!.getTime())
|
||||
.slice(0, MAX_ITEMS);
|
||||
const games = (await getCollection("games", (game) => !game.data.isDraft))
|
||||
.sort((a, b) => b.data.pubDate.getTime() - a.data.pubDate.getTime())
|
||||
const games = (await getCollection("games", (game) => !game.data.isDraft && game.data.pubDate))
|
||||
.sort((a, b) => b.data.pubDate!.getTime() - a.data.pubDate!.getTime())
|
||||
.slice(0, MAX_ITEMS);
|
||||
const users = await getCollection("users");
|
||||
|
||||
|
|
@ -45,7 +45,7 @@ export const GET: APIRoute = async ({ site }) => {
|
|||
await Promise.all(
|
||||
stories.map<Promise<FeedItem>>(async ({ data, slug, body }) => ({
|
||||
title: `New story! "${data.title}"`,
|
||||
pubDate: toNoonUTCDate(data.pubDate),
|
||||
pubDate: toNoonUTCDate(data.pubDate!),
|
||||
link: `/stories/${slug}`,
|
||||
description:
|
||||
`${t(data.lang, "story/warnings", data.wordCount, data.contentWarning.trim())} ${data.descriptionPlaintext || data.description}`
|
||||
|
|
@ -81,7 +81,7 @@ export const GET: APIRoute = async ({ site }) => {
|
|||
await Promise.all(
|
||||
games.map<Promise<FeedItem>>(async ({ data, slug, body }) => ({
|
||||
title: `New game! "${data.title}"`,
|
||||
pubDate: toNoonUTCDate(data.pubDate),
|
||||
pubDate: toNoonUTCDate(data.pubDate!),
|
||||
link: `/games/${slug}`,
|
||||
description:
|
||||
`${t(data.lang, "game/platforms", data.platforms)}. ${data.contentWarning} ${data.descriptionPlaintext || data.description}`
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@ import { Image } from "astro:assets";
|
|||
import { getCollection } from "astro:content";
|
||||
import GalleryLayout from "../layouts/GalleryLayout.astro";
|
||||
|
||||
const games = (await getCollection("games", (game) => !game.data.isDraft)).sort(
|
||||
(a, b) => b.data.pubDate.getTime() - a.data.pubDate.getTime(),
|
||||
const games = (await getCollection("games", (game) => !game.data.isDraft && game.data.pubDate)).sort(
|
||||
(a, b) => b.data.pubDate!.getTime() - a.data.pubDate!.getTime(),
|
||||
);
|
||||
---
|
||||
|
||||
|
|
@ -27,7 +27,7 @@ const games = (await getCollection("games", (game) => !game.data.isDraft)).sort(
|
|||
<span>{game.data.title}</span>
|
||||
<br />
|
||||
<span class="italic">
|
||||
{game.data.pubDate.toLocaleDateString("en-US", { month: "short", day: "numeric", year: "numeric" })}
|
||||
{game.data.pubDate!.toLocaleDateString("en-US", { month: "short", day: "numeric", year: "numeric" })}
|
||||
</span>
|
||||
</>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -13,11 +13,11 @@ interface LatestItemsEntry {
|
|||
pubDate: Date;
|
||||
}
|
||||
|
||||
const stories = (await getCollection("stories", (story) => !story.data.isDraft))
|
||||
.sort((a, b) => b.data.pubDate.getTime() - a.data.pubDate.getTime())
|
||||
const stories = (await getCollection("stories", (story) => !story.data.isDraft && story.data.pubDate))
|
||||
.sort((a, b) => b.data.pubDate!.getTime() - a.data.pubDate!.getTime())
|
||||
.slice(0, MAX_ITEMS);
|
||||
const games = (await getCollection("games", (game) => !game.data.isDraft))
|
||||
.sort((a, b) => b.data.pubDate.getTime() - a.data.pubDate.getTime())
|
||||
const games = (await getCollection("games", (game) => !game.data.isDraft && game.data.pubDate))
|
||||
.sort((a, b) => b.data.pubDate!.getTime() - a.data.pubDate!.getTime())
|
||||
.slice(0, MAX_ITEMS);
|
||||
|
||||
const latestItems: LatestItemsEntry[] = [
|
||||
|
|
@ -26,14 +26,14 @@ const latestItems: LatestItemsEntry[] = [
|
|||
thumbnail: story.data.thumbnail,
|
||||
href: `/stories/${story.slug}`,
|
||||
title: story.data.title,
|
||||
pubDate: story.data.pubDate,
|
||||
pubDate: story.data.pubDate!,
|
||||
})),
|
||||
games.map<LatestItemsEntry>((game) => ({
|
||||
type: "Game",
|
||||
thumbnail: game.data.thumbnail,
|
||||
href: `/games/${game.slug}`,
|
||||
title: game.data.title,
|
||||
pubDate: game.data.pubDate,
|
||||
pubDate: game.data.pubDate!,
|
||||
})),
|
||||
]
|
||||
.flat()
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ The briefcase logo and any unattributed characters are copyrighted and trademark
|
|||
|
||||
The Noto Sans and Noto Serif typefaces are copyrighted to the Noto Project Authors and distributed under the SIL Open Font License v1.1.
|
||||
|
||||
The generic SVG icons were created by Font Awesome and are distributed under the CC BY 4.0 license.
|
||||
The generic SVG icons were created by Font Awesome and are distributed under the CC-BY-4.0 license.
|
||||
|
||||
All third-party trademarks belong to their respective owners, and I'm not affiliated with any of them.
|
||||
`.trim();
|
||||
|
|
|
|||
|
|
@ -10,8 +10,8 @@ type Props = {
|
|||
};
|
||||
|
||||
export const getStaticPaths: GetStaticPaths = async ({ paginate }) => {
|
||||
const stories = (await getCollection("stories", (story) => !story.data.isDraft)).sort(
|
||||
(a, b) => b.data.pubDate.getTime() - a.data.pubDate.getTime(),
|
||||
const stories = (await getCollection("stories", (story) => !story.data.isDraft && story.data.pubDate)).sort(
|
||||
(a, b) => b.data.pubDate!.getTime() - a.data.pubDate!.getTime(),
|
||||
);
|
||||
return paginate(stories, { pageSize: 30 }) satisfies { props: Props }[];
|
||||
};
|
||||
|
|
@ -82,7 +82,7 @@ const totalPages = Math.ceil(page.total / page.size);
|
|||
<span>{story.data.title}</span>
|
||||
<br />
|
||||
<span class="italic">
|
||||
{story.data.pubDate.toLocaleDateString("en-US", { month: "short", day: "numeric", year: "numeric" })}
|
||||
{story.data.pubDate!.toLocaleDateString("en-US", { month: "short", day: "numeric", year: "numeric" })}
|
||||
</span>
|
||||
</div>
|
||||
</a>
|
||||
|
|
|
|||
|
|
@ -5,13 +5,16 @@ import GalleryLayout from "../../layouts/GalleryLayout.astro";
|
|||
import mapImage from "../../assets/images/tlotm_map.jpg";
|
||||
|
||||
const series = await getEntry("series", "the-lost-of-the-marshes");
|
||||
const stories = await getCollection("stories", (story) => !story.data.isDraft && story.data.series?.id === series.id);
|
||||
const stories = await getCollection(
|
||||
"stories",
|
||||
(story) => !story.data.isDraft && story.data.pubDate && story.data.series?.id === series.id,
|
||||
);
|
||||
const mainChapters = stories
|
||||
.filter((story) => story.slug.startsWith("the-lost-of-the-marshes/chapter-"))
|
||||
.sort((a, b) => a.data.pubDate.getTime() - b.data.pubDate.getTime());
|
||||
.sort((a, b) => a.data.pubDate!.getTime() - b.data.pubDate!.getTime());
|
||||
const bonusChapters = stories
|
||||
.filter((story) => story.slug.startsWith("the-lost-of-the-marshes/bonus-"))
|
||||
.sort((a, b) => a.data.pubDate.getTime() - b.data.pubDate.getTime());
|
||||
.sort((a, b) => a.data.pubDate!.getTime() - b.data.pubDate!.getTime());
|
||||
const mainChaptersWithSummaries = mainChapters.filter((story) => story.data.summary);
|
||||
---
|
||||
|
||||
|
|
@ -46,9 +49,9 @@ const mainChaptersWithSummaries = mainChapters.filter((story) => story.data.summ
|
|||
.map((story) => (
|
||||
<li class="my-2">
|
||||
<a class="text-link underline" href={`/stories/${story.slug}`}>
|
||||
{story.data.shortTitle || story.data.title}
|
||||
{story.data.shortTitle || story.data.title}:
|
||||
</a>
|
||||
: <span>{story.data.summary}</span>
|
||||
<span>{story.data.summary}</span>
|
||||
</li>
|
||||
))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,7 +16,8 @@ type Params = {
|
|||
};
|
||||
|
||||
export const getStaticPaths: GetStaticPaths = async () => {
|
||||
const [stories, games] = await Promise.all([getCollection("stories"), getCollection("games")]);
|
||||
const [stories, games, series] = await Promise.all([getCollection("stories"), getCollection("games"), getCollection("series")]);
|
||||
const seriesTags = new Set(series.map((s) => s.data.name));
|
||||
const tags = new Set<string>();
|
||||
stories.forEach((story) => {
|
||||
story.data.tags.forEach((tag) => {
|
||||
|
|
@ -29,17 +30,17 @@ export const getStaticPaths: GetStaticPaths = async () => {
|
|||
});
|
||||
});
|
||||
return [...tags]
|
||||
.filter((tag) => !["The Lost of the Marshes"].includes(tag))
|
||||
.filter((tag) => !seriesTags.has(tag))
|
||||
.map((tag) => ({
|
||||
params: { slug: slug(tag) } satisfies Params,
|
||||
props: {
|
||||
tag,
|
||||
stories: stories
|
||||
.filter((story) => !story.data.isDraft && story.data.tags.includes(tag))
|
||||
.sort((a, b) => b.data.pubDate.getTime() - a.data.pubDate.getTime()),
|
||||
.filter((story) => !story.data.isDraft && story.data.pubDate && story.data.tags.includes(tag))
|
||||
.sort((a, b) => b.data.pubDate!.getTime() - a.data.pubDate!.getTime()),
|
||||
games: games
|
||||
.filter((game) => !game.data.isDraft && game.data.tags.includes(tag))
|
||||
.sort((a, b) => b.data.pubDate.getTime() - a.data.pubDate.getTime()),
|
||||
.filter((game) => !game.data.isDraft && game.data.pubDate && game.data.tags.includes(tag))
|
||||
.sort((a, b) => b.data.pubDate!.getTime() - a.data.pubDate!.getTime()),
|
||||
} satisfies Props,
|
||||
}));
|
||||
};
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue