From a5ba239177fa1f2db404ad0a1ffc11680dff4754 Mon Sep 17 00:00:00 2001 From: "Paul W." Date: Tue, 1 Apr 2025 18:36:29 -0400 Subject: [PATCH] Update links Signed-off-by: Paul W. --- pages/about.tsx | 12 +++++++++--- public/external.json | 2 +- scripts/generate-metadata.js | 7 ++++--- 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/pages/about.tsx b/pages/about.tsx index 5ab9214..f2bcd12 100644 --- a/pages/about.tsx +++ b/pages/about.tsx @@ -8,9 +8,15 @@ function AboutPage() { return (
-

Paul's Personal Website. I go by @LambdaPaul on GitHub and @lambda_paul on X/Twitter.

-

I also have a Gitea server at git.paulw.xyz and a Pleroma (ActivityPub/Mastodon-compatible) server at social.paulw.xyz as back-ups for my GitHub and X/Twitter.

-

Why did I create this? Why do I have the back-ups?

+

Paul's Personal Website. +

You can find me on the following: +

+

The original motivation was to just play with Next.js as it pretty much did the things I wanted web pages to do. But it came at the cost of needless complexity. As I use the JavaScript/ECMAScript/Whatever-you-want-to-call-it-script more and more, I am convinced that it is not a platform worth pursuing because the more complex it gets, the less control I have over what it does and this platform and its users seems to be okay with that sort of loss. I have been instead pivoting toward things that impressed and got me interested in working with computers.

Most services/products are keen on going against what Steph Ango calls File over app, a philosophy in which you prioritize data over software, and anticipate and embrace the eventual death of software. People instead want subscription services that barely support open formats and sometimes do not support exporting data to commonly used formats. The goal here is to avoid storing artifacts under locations that are easily not accessible, not under my control, and does not lock me out of using it with other software. The only reason I have not completely abandoned this is thanks to my decision to rely on Markdown files alone. Had it been reliant on any cloud software, I would have started over.

diff --git a/public/external.json b/public/external.json index aac9e9c..5ea8d4f 100644 --- a/public/external.json +++ b/public/external.json @@ -1,4 +1,4 @@ { "GitHub": "https://github.com/lambdapaul", - "Twitter/X": "https://x.com/lambda_paul" + "Twitter/X": "https://x.com/paulw_xyz" } diff --git a/scripts/generate-metadata.js b/scripts/generate-metadata.js index 7b59438..e99f830 100644 --- a/scripts/generate-metadata.js +++ b/scripts/generate-metadata.js @@ -2,9 +2,10 @@ const path = require('path') const fs = require('fs/promises') const gitRef = process.env.WWW_GIT_REF ?? 'master' +const giteaApiRepo = `https://git.paulw.xyz/api/v1/repos/xyz/www/` async function readFirstLines(filePath, lineCount = 1) { - const gitFileFetch = await fetch(`https://git.paulw.xyz/api/v1/repos/lambdapaul/www/raw/${filePath}?ref=${gitRef}`) + const gitFileFetch = await fetch(`${giteaApiRepo}raw/${filePath}?ref=${gitRef}`) if (!gitFileFetch.ok) return null const file = await gitFileFetch.text() const lines = file.split('\n') @@ -30,7 +31,7 @@ async function getTitle(filePath) { } async function getMarkdownMetadata(dir) { - const dirGitInfoFetch = await fetch(`https://git.paulw.xyz/api/v1/repos/lambdapaul/www/contents/${dir}/?ref=${gitRef}`) + const dirGitInfoFetch = await fetch(`${giteaApiRepo}contents/${dir}/?ref=${gitRef}`) if (!dirGitInfoFetch.ok) return {} const commits = {} @@ -47,7 +48,7 @@ async function getMarkdownMetadata(dir) { if (!(file.last_commit_sha in commits)) { - const lastCommitSha = await fetch(`https://git.paulw.xyz/api/v1/repos/lambdapaul/www/git/commits/${file.last_commit_sha}`) + const lastCommitSha = await fetch(`${giteaApiRepo}/git/commits/${file.last_commit_sha}`) if (lastCommitSha.ok) { const commitJson = await lastCommitSha.json() commits[commitJson.sha] = (new Date(commitJson.created))