ffxiv-wiki-scripts/lib/api/mediawiki.js
ewin 436acf7299
improve killpage logic
relying on an interval causes the process to never exit, because the interval is still running in the background. this is bad for oneshot scripts like ours. instead the check works by comparing the current date to the last known date any time a post request is sent
2025-09-04 23:04:54 -04:00

283 lines
8 KiB
JavaScript

// Extremely basic API client for MediaWiki
import makeFetchCookie from 'fetch-cookie';
function formDataBody (entries) {
let data = new FormData();
for (const [key, value] of Object.entries(entries)) {
if (value != null && value != false) {
data.set(key, value);
}
}
return data;
}
export class MediaWikiClient {
/**
* Creates a new client. Remember to also call `.login()`.
* @param {string} wikiURL Target wiki's MediaWiki path (i.e. the path that
* contains `index.php` and `api.php`) without a trailing slash. For example
* for English Wikipedia this would be `'https://en.wikipedia.org/w'`.
* @param {object} options
* @param {string} [options.killPage] Name of a page that, if provided, will
* be read periodically. If the page ever contains text, the process will be
* killed. This is a safety measure allowing other editors to kill an
* unattended script if it misbehaves.
*/
constructor (wikiURL, {killPage}) {
this.wikiURL = wikiURL;
this.killPage = killPage;
this.lastKillPageCheck = 0; // first check is triggered after login
this.fetch = makeFetchCookie(fetch);
}
/** Checks the kill page if needed, killing the process if not empty. */
async tryKillPageCheck () {
if (!this.killPage) return;
// wait at least 10 seconds since the last check
if (Date.now() > this.lastKillPageCheck + 10 * 1000) return;
this.lastKillPageCheck = Date.now();
const response = await this.fetch(`${this.wikiURL}/index.php?action=raw&title=${encodeURIComponent(this.killPage)}`);
const content = await response.text();
if (content.trim()) {
console.error('*** Kill page is not empty; stopping ***\n');
console.error(content);
process.exit(1);
}
};
/**
* Makes a GET request against `index.php`.
* @param {Record<string, string>} params Query string parameters
* @param {RequestInit} [options] Additional fetch options
* @returns {Promise<Response>}
*/
fetchIndexGet (params, options = {}) {
return this.fetch(`${this.wikiURL}/index.php?${new URLSearchParams(params)}`, {
...options,
method: 'GET',
});
}
/**
* Makes a JSON GET request against `api.php`.
* @param {Record<string, string>} params Query string parameters
* @param {RequestInit} [options] Additional fetch options
* @returns {Promise<any>}
*/
async fetchApiGet (params, options = {}) {
const response = await this.fetch(`${this.wikiURL}/api.php?${new URLSearchParams({
...params,
format: 'json',
})}`, {
...options,
method: 'GET',
});
const body = await response.json();
if (body.error) {
throw new Error(`[${body.error.code}] ${body.error.info}`);
}
return body;
}
/**
* Makes a JSON POST request against `api.php`.
* @param {Record<string, string>} params Form data body parameters
* @param {RequestInit} [options] Additional fetch options
* @returns {Promise<any>}
*/
async fetchApiPost (params, options = {}) {
await this.tryKillPageCheck();
const response = await this.fetch(`${this.wikiURL}/api.php`, {
...options,
method: 'POST',
body: formDataBody({
...params,
format: 'json',
}),
});
const body = await response.json();
if (body.error) {
throw new Error(`[${body.error.code}] ${body.error.info}`);
}
return body;
}
/**
* Obtains a login token for authenticating.
* @returns {Promise<string>}
*/
async getLoginToken () {
const body = await this.fetchApiGet({
action: 'query',
meta: 'tokens',
type: 'login',
});
return body.query.tokens.logintoken;
}
/**
* Obtains a CSRF token for making edits.
* @returns {Promise<string>}
*/
async getCSRFToken () {
const body = await this.fetchApiGet({
action: 'query',
meta: 'tokens',
});
return body.query.tokens.csrftoken;
}
/**
* Logs in with the given bot credentials.
* @param {string} username
* @param {string} password
* @returns {Promise<void>}
*/
async login (username, password) {
const loginToken = await this.getLoginToken();
const body = await this.fetchApiPost({
action: 'login',
lgname: username,
lgpassword: password,
lgtoken: loginToken,
});
if (body.login.result === 'Failed') {
throw new Error(body.login.reason);
}
}
/**
* Gets the current contents of the named item's wiki page and returns the
* contents with the infobox updated to use the given EDB item ID.
* @param {string} name
* @returns {Promise<string>}
*/
async readPage (title) {
const response = await this.fetchIndexGet({
action: 'raw',
title,
});
return response.text();
}
/**
* Updates the named page to the given text.
* @param {string} title
* @param {string} text
* @param {string} summary Edit summary
* @param {boolean} [minor] If true, this is a minor edit
* @returns {Promise<any>}
*/
async editPage (title, text, summary, minor = false) {
const csrfToken = await this.getCSRFToken();
const body = await this.fetchApiPost({
action: 'edit',
title,
text,
summary,
minor,
bot: true,
watchlist: 'nochange',
token: csrfToken,
format: 'json',
});
return body;
}
async purgePages (titles) {
if (!titles.length) return;
// mediawiki has a 50 title per request limit, so we grab the first 50
// and recurse to handle the rest
let currentTitles = titles.splice(0, 50);
const body = await this.fetchApiPost({
action: 'purge',
titles: currentTitles.join('|'),
});
return this.purgePages(titles);
}
/**
*
* @param {string} from The page's current name
* @param {string} to The page's new name
* @param {object} options
* @param {string} options.reason Move reason
* @param {boolean} options.redirect Whether to create a redirect from the
* old name to the new name
* @param {boolean} options.moveTalk Whether to move the page's talk page
* from the old name to the new name
* @param {boolean} options.moveSubpages Whether to move the page's subpages
* from the old name to the new name
* @returns {Promise<any>}
*/
async movePage (from, to, {
reason,
redirect = true,
moveTalk = true,
moveSubpages = true,
} = {}) {
const csrfToken = await this.getCSRFToken();
return this.fetchApiPost({
action: 'move',
from,
to,
reason,
movetalk: moveTalk,
movesubpages: moveSubpages,
noredirect: !redirect,
token: csrfToken,
format: 'json',
});
}
/**
* Gets the list of wiki pages that belong to the given category.
* @param {string} name Category name including the `Category:` namespace.
* @param {number[] | '*'} namespaces Integer namespace ID(s) or the string
* `'*'`. If namespace IDs are provided, only pages in those namespaces will
* be returned.
* @param {string} limit Maximum number of items to return. Must be 500 or
* less. I'm lazy and not supporting API paging so deal with it.
* @returns {Promise<{pageid: number; title: string}[]>}
*/
async listCategoryPages (name, namespaces = '*', limit = 50) {
if (Array.isArray(namespaces)) {
namespaces = namespaces.join('|');
}
const body = await this.fetchApiGet({
action: 'query',
list: 'categorymembers',
cmtitle: name,
cmlimit: limit,
cmnamespace: namespaces,
});
return body.query.categorymembers;
}
/**
* Gets the list of a user's contributions.
* @param {string} username Name of the user whose contribs should be fetched
* @param {object} options
* @param {number | number[] | '*'} [options.namespaces] List of namespaces from which to return results
* @param {number} [options.limit] Maximum number of items to return
* @param {string} [options.show] See the documentation of `ucshow` at https://www.mediawiki.org/wiki/API:Usercontribs
* @returns {Promise<{pageid: string; revid: string; timestamp: string; title: string}[]>}
*/
async listUserContribs (username, {namespaces = '*', limit = 50, show}) {
if (Array.isArray(namespaces)) {
namespaces = namespaces.join('|');
}
const body = await this.fetchApiGet({
action: 'query',
list: 'usercontribs',
ucuser: username,
uclimit: limit,
ucnamespace: namespaces,
ucshow: show,
});
return body.query.usercontribs;
}
}