Restructure project files
This commit is contained in:
parent
107fa221c9
commit
8f07661fb2
6 changed files with 6 additions and 5 deletions
32
lib/api/lodestone.js
Normal file
32
lib/api/lodestone.js
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
// Utilities for scraping data from the Lodestone
|
||||
|
||||
/**
|
||||
* @see https://stackoverflow.com/a/6969486
|
||||
* @param {string}
|
||||
* @returns {string}
|
||||
*/
|
||||
const regExpEscape = str => str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
|
||||
/**
|
||||
* Creates a regular expression that matches a link to the named item and
|
||||
* captures its EDB ID from the matched link's `href` attribute.
|
||||
* @param {string} name
|
||||
* @returns {RegExp}
|
||||
*/
|
||||
const itemLinkRegExp = name => new RegExp(`<a href="/lodestone/playguide/db/item/(?<id>[a-z0-9]+)[^"]+"[^>]*>(?<name>${regExpEscape(name)})</a>`, 'i');
|
||||
|
||||
/**
|
||||
* Gets the ID of the named item in Eorzea Database.
|
||||
* @param {string} name
|
||||
* @returns {Promise<string | undefined>}
|
||||
*/
|
||||
export async function findItemEDBID (name) {
|
||||
// execute a search for the item's name
|
||||
const searchURL = `https://na.finalfantasyxiv.com/lodestone/playguide/db/item/?q=${encodeURIComponent(name.replace(/\([^)]+\)|[&-]/g, ' '))}`;
|
||||
const response = await fetch(searchURL);
|
||||
const body = await response.text();
|
||||
// find an `<a>` in the HTML response whose text exactly matches the name
|
||||
const match = body.match(itemLinkRegExp(name));
|
||||
// return the ID parsed from the URL in the `href` attribute
|
||||
return match?.groups.id;
|
||||
}
|
||||
188
lib/api/mediawiki.js
Normal file
188
lib/api/mediawiki.js
Normal file
|
|
@ -0,0 +1,188 @@
|
|||
// Extremely basic API client for MediaWiki
|
||||
|
||||
import makeFetchCookie from 'fetch-cookie';
|
||||
|
||||
function formDataBody (entries) {
|
||||
let data = new FormData();
|
||||
for (const [key, value] of Object.entries(entries)) {
|
||||
if (value != null && value != false) {
|
||||
data.set(key, value);
|
||||
}
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
export class MediaWikiClient {
|
||||
/**
|
||||
* Creates a new client. Remember to also call `.login()`.
|
||||
* @param {string} wikiURL Target wiki's MediaWiki path (i.e. the path that
|
||||
* contains `index.php` and `api.php`) without a trailing slash. For example
|
||||
* for English Wikipedia this would be `'https://en.wikipedia.org/w'`.
|
||||
*/
|
||||
constructor (wikiURL) {
|
||||
this.wikiURL = wikiURL;
|
||||
this.fetch = makeFetchCookie(fetch);
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a GET request against `index.php`.
|
||||
* @param {Record<string, string>} params Query string parameters
|
||||
* @param {RequestInit} [options] Additional fetch options
|
||||
* @returns {Promise<Response>}
|
||||
*/
|
||||
fetchIndexGet (params, options = {}) {
|
||||
return this.fetch(`${this.wikiURL}/index.php?${new URLSearchParams(params)}`, {
|
||||
...options,
|
||||
method: 'GET',
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a JSON GET request against `api.php`.
|
||||
* @param {Record<string, string>} params Query string parameters
|
||||
* @param {RequestInit} [options] Additional fetch options
|
||||
* @returns {Promise<any>}
|
||||
*/
|
||||
async fetchApiGet (params, options = {}) {
|
||||
const response = await this.fetch(`${this.wikiURL}/api.php?${new URLSearchParams({
|
||||
...params,
|
||||
format: 'json',
|
||||
})}`, {
|
||||
...options,
|
||||
method: 'GET',
|
||||
});
|
||||
const body = await response.json();
|
||||
if (body.error) {
|
||||
throw new Error(`[${body.error.code}] ${body.error.info}`);
|
||||
}
|
||||
return body;
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a JSON POST request against `api.php`.
|
||||
* @param {Record<string, string>} params Form data body parameters
|
||||
* @param {RequestInit} [options] Additional fetch options
|
||||
* @returns {Promise<any>}
|
||||
*/
|
||||
async fetchApiPost (params, options = {}) {
|
||||
const response = await this.fetch(`${this.wikiURL}/api.php`, {
|
||||
...options,
|
||||
method: 'POST',
|
||||
body: formDataBody({
|
||||
...params,
|
||||
format: 'json',
|
||||
}),
|
||||
});
|
||||
const body = await response.json();
|
||||
if (body.error) {
|
||||
throw new Error(`[${body.error.code}] ${body.error.info}`);
|
||||
}
|
||||
return body;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtains a login token for authenticating.
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
async getLoginToken () {
|
||||
const body = await this.fetchApiGet({
|
||||
action: 'query',
|
||||
meta: 'tokens',
|
||||
type: 'login',
|
||||
});
|
||||
return body.query.tokens.logintoken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtains a CSRF token for making edits.
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
async getCSRFToken () {
|
||||
const body = await this.fetchApiGet({
|
||||
action: 'query',
|
||||
meta: 'tokens',
|
||||
});
|
||||
return body.query.tokens.csrftoken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Logs in with the given bot credentials.
|
||||
* @param {string} username
|
||||
* @param {string} password
|
||||
* @returns {Promise<void>}
|
||||
*/
|
||||
async login (username, password) {
|
||||
const loginToken = await this.getLoginToken();
|
||||
const body = await this.fetchApiPost({
|
||||
action: 'login',
|
||||
lgname: username,
|
||||
lgpassword: password,
|
||||
lgtoken: loginToken,
|
||||
});
|
||||
if (body.login.result === 'Failed') {
|
||||
throw new Error(body.login.reason);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the current contents of the named item's wiki page and returns the
|
||||
* contents with the infobox updated to use the given EDB item ID.
|
||||
* @param {string} name
|
||||
* @returns {Promise<string>}
|
||||
*/
|
||||
async readPage (title) {
|
||||
const response = await this.fetchIndexGet({
|
||||
action: 'raw',
|
||||
title,
|
||||
});
|
||||
return response.text();
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the named page to the given text.
|
||||
* @param {string} title
|
||||
* @param {string} text
|
||||
* @param {string} summary Edit summary
|
||||
* @param {boolean} [minor] If true, this is a minor edit
|
||||
* @returns {Promise<any>}
|
||||
*/
|
||||
async editPage (title, text, summary, minor = false) {
|
||||
const csrfToken = await this.getCSRFToken();
|
||||
const body = await this.fetchApiPost({
|
||||
action: 'edit',
|
||||
title,
|
||||
text,
|
||||
summary,
|
||||
minor,
|
||||
bot: true,
|
||||
watchlist: 'nochange',
|
||||
token: csrfToken,
|
||||
format: 'json',
|
||||
});
|
||||
return body;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the list of wiki pages that belong to the given category.
|
||||
* @param {string} name Category name including the `Category:` namespace.
|
||||
* @param {number[] | '*'} namespaces Integer namespace ID(s) or the string
|
||||
* `'*'`. If namespace IDs are provided, only pages in those namespaces will
|
||||
* be returned.
|
||||
* @param {string} limit Maximum number of items to return. Must be 500 or
|
||||
* less. I'm lazy and not supporting API paging so deal with it.
|
||||
* @returns {Promise<{pageid: number; title: string}[]>}
|
||||
*/
|
||||
async listCategoryPages (name, namespaces = '*', limit = 50) {
|
||||
if (Array.isArray(namespaces)) {
|
||||
namespaces = namespaces.join('|');
|
||||
}
|
||||
const body = await this.fetchApiGet({
|
||||
action: 'query',
|
||||
list: 'categorymembers',
|
||||
cmtitle: name,
|
||||
cmlimit: limit,
|
||||
cmnamespace: namespaces,
|
||||
});
|
||||
return body.query.categorymembers;
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue