Run pre-commit hooks over existing codebase

Co-Authored-By: Ben Phelps <ben@phelps.io>
This commit is contained in:
shamoon 2023-10-17 23:26:55 -07:00
parent fa50bbad9c
commit 19c25713c4
387 changed files with 4785 additions and 4109 deletions

View file

@ -9,7 +9,7 @@ import {
servicesFromConfig,
servicesFromDocker,
cleanServiceGroups,
servicesFromKubernetes
servicesFromKubernetes,
} from "utils/config/service-helpers";
import { cleanWidgetGroups, widgetsFromConfig } from "utils/config/widget-helpers";
@ -59,7 +59,7 @@ export async function bookmarksResponse() {
bookmarksArray.forEach((group) => {
if (definedLayouts) {
const layoutIndex = definedLayouts.findIndex(layout => layout === group.name);
const layoutIndex = definedLayouts.findIndex((layout) => layout === group.name);
if (layoutIndex > -1) sortedGroups[layoutIndex] = group;
else unsortedGroups.push(group);
} else {
@ -67,7 +67,7 @@ export async function bookmarksResponse() {
}
});
return [...sortedGroups.filter(g => g), ...unsortedGroups];
return [...sortedGroups.filter((g) => g), ...unsortedGroups];
}
export async function widgetsResponse() {
@ -126,11 +126,13 @@ export async function servicesResponse() {
}
const mergedGroupsNames = [
...new Set([
discoveredDockerServices.map((group) => group.name),
discoveredKubernetesServices.map((group) => group.name),
configuredServices.map((group) => group.name),
].flat()),
...new Set(
[
discoveredDockerServices.map((group) => group.name),
discoveredKubernetesServices.map((group) => group.name),
configuredServices.map((group) => group.name),
].flat(),
),
];
const sortedGroups = [];
@ -138,22 +140,23 @@ export async function servicesResponse() {
const definedLayouts = initialSettings.layout ? Object.keys(initialSettings.layout) : null;
mergedGroupsNames.forEach((groupName) => {
const discoveredDockerGroup = discoveredDockerServices.find((group) => group.name === groupName) || { services: [] };
const discoveredKubernetesGroup = discoveredKubernetesServices.find((group) => group.name === groupName) || { services: [] };
const discoveredDockerGroup = discoveredDockerServices.find((group) => group.name === groupName) || {
services: [],
};
const discoveredKubernetesGroup = discoveredKubernetesServices.find((group) => group.name === groupName) || {
services: [],
};
const configuredGroup = configuredServices.find((group) => group.name === groupName) || { services: [] };
const mergedGroup = {
name: groupName,
services: [
...discoveredDockerGroup.services,
...discoveredKubernetesGroup.services,
...configuredGroup.services
].filter((service) => service)
services: [...discoveredDockerGroup.services, ...discoveredKubernetesGroup.services, ...configuredGroup.services]
.filter((service) => service)
.sort(compareServices),
};
if (definedLayouts) {
const layoutIndex = definedLayouts.findIndex(layout => layout === mergedGroup.name);
const layoutIndex = definedLayouts.findIndex((layout) => layout === mergedGroup.name);
if (layoutIndex > -1) sortedGroups[layoutIndex] = mergedGroup;
else unsortedGroups.push(mergedGroup);
} else {
@ -161,5 +164,5 @@ export async function servicesResponse() {
}
});
return [...sortedGroups.filter(g => g), ...unsortedGroups];
return [...sortedGroups.filter((g) => g), ...unsortedGroups];
}

View file

@ -9,22 +9,24 @@ const cacheKey = "homepageEnvironmentVariables";
const homepageVarPrefix = "HOMEPAGE_VAR_";
const homepageFilePrefix = "HOMEPAGE_FILE_";
export const CONF_DIR = process.env.HOMEPAGE_CONFIG_DIR ? process.env.HOMEPAGE_CONFIG_DIR : join(process.cwd(), "config");
export const CONF_DIR = process.env.HOMEPAGE_CONFIG_DIR
? process.env.HOMEPAGE_CONFIG_DIR
: join(process.cwd(), "config");
export default function checkAndCopyConfig(config) {
if (!existsSync(CONF_DIR)) {
mkdirSync(CONF_DIR, { recursive: true });
mkdirSync(CONF_DIR, { recursive: true });
}
const configYaml = join(CONF_DIR, config);
if (!existsSync(configYaml)) {
const configSkeleton = join(process.cwd(), "src", "skeleton", config);
try {
copyFileSync(configSkeleton, configYaml)
copyFileSync(configSkeleton, configYaml);
console.info("%s was copied to the config folder", config);
} catch (err) {
console.error("error copying config", err);
throw err;
console.error("error copying config", err);
throw err;
}
return true;
@ -42,7 +44,9 @@ function getCachedEnvironmentVars() {
let cachedVars = cache.get(cacheKey);
if (!cachedVars) {
// initialize cache
cachedVars = Object.entries(process.env).filter(([key, ]) => key.includes(homepageVarPrefix) || key.includes(homepageFilePrefix));
cachedVars = Object.entries(process.env).filter(
([key]) => key.includes(homepageVarPrefix) || key.includes(homepageFilePrefix),
);
cache.put(cacheKey, cachedVars);
}
return cachedVars;
@ -50,7 +54,8 @@ function getCachedEnvironmentVars() {
export function substituteEnvironmentVars(str) {
let result = str;
if (result.includes('{{')) { // crude check if we have vars to replace
if (result.includes("{{")) {
// crude check if we have vars to replace
const cachedVars = getCachedEnvironmentVars();
cachedVars.forEach(([key, value]) => {
if (key.startsWith(homepageVarPrefix)) {
@ -77,13 +82,13 @@ export function getSettings() {
// support yaml list but old spec was object so convert to that
// see https://github.com/gethomepage/homepage/issues/1546
if (Array.isArray(initialSettings.layout)) {
const layoutItems = initialSettings.layout
initialSettings.layout = {}
layoutItems.forEach(i => {
const name = Object.keys(i)[0]
initialSettings.layout[name] = i[name]
})
const layoutItems = initialSettings.layout;
initialSettings.layout = {};
layoutItems.forEach((i) => {
const name = Object.keys(i)[0];
initialSettings.layout[name] = i[name];
});
}
}
return initialSettings
return initialSettings;
}

View file

@ -27,16 +27,16 @@ export default function getDockerArguments(server) {
}
if (servers[server].host) {
const res ={
const res = {
conn: { host: servers[server].host },
swarm: !!servers[server].swarm,
}
};
if (servers[server].port){
if (servers[server].port) {
res.conn.port = servers[server].port;
}
if (servers[server].tls){
if (servers[server].tls) {
res.conn.ca = readFileSync(path.join(CONF_DIR, servers[server].tls.caFile));
res.conn.cert = readFileSync(path.join(CONF_DIR, servers[server].tls.certFile));
res.conn.key = readFileSync(path.join(CONF_DIR, servers[server].tls.keyFile));

View file

@ -16,13 +16,13 @@ export default function getKubeConfig() {
const kc = new KubeConfig();
switch (config?.mode) {
case 'cluster':
case "cluster":
kc.loadFromCluster();
break;
case 'default':
case "default":
kc.loadFromDefault();
break;
case 'disabled':
case "disabled":
default:
return null;
}

View file

@ -92,7 +92,7 @@ export async function servicesFromDocker() {
shvl.set(
constructedService,
label.replace("homepage.", ""),
substituteEnvironmentVars(containerLabels[label])
substituteEnvironmentVars(containerLabels[label]),
);
}
});
@ -105,7 +105,7 @@ export async function servicesFromDocker() {
// a server failed, but others may succeed
return { server: serverName, services: [] };
}
})
}),
);
const mappedServiceGroups = [];
@ -152,13 +152,13 @@ export async function checkCRD(kc, name) {
"Error checking if CRD %s exists. Make sure to add the following permission to your RBAC: %d %s %s",
name,
error.statusCode,
error.body.message
error.body.message,
);
}
return false
return false;
});
return exist
return exist;
}
export async function servicesFromKubernetes() {
@ -195,7 +195,7 @@ export async function servicesFromKubernetes() {
"Error getting traefik ingresses from traefik.containo.us: %d %s %s",
error.statusCode,
error.body,
error.response
error.response,
);
}
@ -211,18 +211,18 @@ export async function servicesFromKubernetes() {
"Error getting traefik ingresses from traefik.io: %d %s %s",
error.statusCode,
error.body,
error.response
error.response,
);
}
return [];
});
const traefikIngressList = [...traefikIngressListContaino?.items ?? [], ...traefikIngressListIo?.items ?? []];
const traefikIngressList = [...(traefikIngressListContaino?.items ?? []), ...(traefikIngressListIo?.items ?? [])];
if (traefikIngressList.length > 0) {
const traefikServices = traefikIngressList.filter(
(ingress) => ingress.metadata.annotations && ingress.metadata.annotations[`${ANNOTATION_BASE}/href`]
(ingress) => ingress.metadata.annotations && ingress.metadata.annotations[`${ANNOTATION_BASE}/href`],
);
ingressList.items.push(...traefikServices);
}
@ -233,7 +233,7 @@ export async function servicesFromKubernetes() {
const services = ingressList.items
.filter(
(ingress) =>
ingress.metadata.annotations && ingress.metadata.annotations[`${ANNOTATION_BASE}/enabled`] === "true"
ingress.metadata.annotations && ingress.metadata.annotations[`${ANNOTATION_BASE}/enabled`] === "true",
)
.map((ingress) => {
let constructedService = {
@ -266,7 +266,7 @@ export async function servicesFromKubernetes() {
shvl.set(
constructedService,
annotation.replace(`${ANNOTATION_BASE}/`, ""),
ingress.metadata.annotations[annotation]
ingress.metadata.annotations[annotation],
);
}
});

View file

@ -27,16 +27,14 @@ SOFTWARE.
*/
export function get(object, path, def) {
return (
// Split the path into keys and reduce the object to the target value
object = path.split(/[.[\]]+/).reduce(function (obj, p) {
// Check each nested object to see if the key exists
return obj && obj[p] !== undefined ? obj[p] : undefined;
}, object)
) === undefined
// If the final value is undefined, return the default value
? def
: object; // Otherwise, return the value found
// Split the path into keys and reduce the object to the target value
return (object = path.split(/[.[\]]+/).reduce(function (obj, p) {
// Check each nested object to see if the key exists
return obj && obj[p] !== undefined ? obj[p] : undefined;
}, object)) === undefined
? // If the final value is undefined, return the default value
def
: object; // Otherwise, return the value found
}
export function set(obj, path, val) {
@ -58,13 +56,11 @@ export function set(obj, path, val) {
const isIndex = /^\d+$/.test(keys[i + 1]);
// If current key doesn't exist, initialise it as an array or object
acc[key] = Array.isArray(acc[key])
? acc[key]
: (isIndex ? [] : acc[key] || {});
acc[key] = Array.isArray(acc[key]) ? acc[key] : isIndex ? [] : acc[key] || {};
// Return nested object for next iteration
return acc[key];
}, obj)[lastKey] = val; // Finally set the value
}, obj)[lastKey] = val; // Finally set the value
return obj;
}

View file

@ -6,76 +6,72 @@ import yaml from "js-yaml";
import checkAndCopyConfig, { CONF_DIR, substituteEnvironmentVars } from "utils/config/config";
export async function widgetsFromConfig() {
checkAndCopyConfig("widgets.yaml");
checkAndCopyConfig("widgets.yaml");
const widgetsYaml = path.join(CONF_DIR, "widgets.yaml");
const rawFileContents = await fs.readFile(widgetsYaml, "utf8");
const fileContents = substituteEnvironmentVars(rawFileContents);
const widgets = yaml.load(fileContents);
const widgetsYaml = path.join(CONF_DIR, "widgets.yaml");
const rawFileContents = await fs.readFile(widgetsYaml, "utf8");
const fileContents = substituteEnvironmentVars(rawFileContents);
const widgets = yaml.load(fileContents);
if (!widgets) return [];
if (!widgets) return [];
// map easy to write YAML objects into easy to consume JS arrays
const widgetsArray = widgets.map((group, index) => ({
type: Object.keys(group)[0],
options: {
index,
...group[Object.keys(group)[0]]
},
}));
return widgetsArray;
// map easy to write YAML objects into easy to consume JS arrays
const widgetsArray = widgets.map((group, index) => ({
type: Object.keys(group)[0],
options: {
index,
...group[Object.keys(group)[0]],
},
}));
return widgetsArray;
}
export async function cleanWidgetGroups(widgets) {
return widgets.map((widget, index) => {
const sanitizedOptions = widget.options;
const optionKeys = Object.keys(sanitizedOptions);
// delete private options from the sanitized options
["username", "password", "key"].forEach((pO) => {
if (optionKeys.includes(pO)) {
delete sanitizedOptions[pO];
}
});
// delete url from the sanitized options if the widget is not a search or glances widgeth
if (widget.type !== "search" && widget.type !== "glances" && optionKeys.includes("url")) {
delete sanitizedOptions.url;
}
return widgets.map((widget, index) => {
const sanitizedOptions = widget.options;
const optionKeys = Object.keys(sanitizedOptions);
return {
type: widget.type,
options: {
index,
...sanitizedOptions
},
}
// delete private options from the sanitized options
["username", "password", "key"].forEach((pO) => {
if (optionKeys.includes(pO)) {
delete sanitizedOptions[pO];
}
});
// delete url from the sanitized options if the widget is not a search or glances widgeth
if (widget.type !== "search" && widget.type !== "glances" && optionKeys.includes("url")) {
delete sanitizedOptions.url;
}
return {
type: widget.type,
options: {
index,
...sanitizedOptions,
},
};
});
}
export async function getPrivateWidgetOptions(type, widgetIndex) {
const widgets = await widgetsFromConfig();
const privateOptions = widgets.map((widget) => {
const {
index,
url,
username,
password,
key
} = widget.options;
const widgets = await widgetsFromConfig();
return {
type: widget.type,
options: {
index,
url,
username,
password,
key
},
}
});
return (type !== undefined && widgetIndex !== undefined) ? privateOptions.find(o => o.type === type && o.options.index === parseInt(widgetIndex, 10))?.options : privateOptions;
const privateOptions = widgets.map((widget) => {
const { index, url, username, password, key } = widget.options;
return {
type: widget.type,
options: {
index,
url,
username,
password,
key,
},
};
});
return type !== undefined && widgetIndex !== undefined
? privateOptions.find((o) => o.type === type && o.options.index === parseInt(widgetIndex, 10))?.options
: privateOptions;
}

View file

@ -4,11 +4,11 @@ export function parseCpu(cpuStr) {
const units = cpuStr.substring(cpuStr.length - unitLength);
if (Number.isNaN(Number(units))) {
switch (units) {
case 'n':
case "n":
return base / 1000000000;
case 'u':
case "u":
return base / 1000000;
case 'm':
case "m":
return base / 1000;
default:
return base;
@ -19,22 +19,22 @@ export function parseCpu(cpuStr) {
}
export function parseMemory(memStr) {
const unitLength = (memStr.substring(memStr.length - 1) === 'i' ? 2 : 1);
const unitLength = memStr.substring(memStr.length - 1) === "i" ? 2 : 1;
const base = Number.parseInt(memStr, 10);
const units = memStr.substring(memStr.length - unitLength);
if (Number.isNaN(Number(units))) {
switch (units) {
case 'Ki':
case "Ki":
return base * 1000;
case 'K':
case "K":
return base * 1024;
case 'Mi':
case "Mi":
return base * 1000000;
case 'M':
case "M":
return base * 1024 * 1024;
case 'Gi':
case "Gi":
return base * 1000000000;
case 'G':
case "G":
return base * 1024 * 1024 * 1024;
default:
return base;

View file

@ -1,12 +1,12 @@
// eslint-disable-next-line import/prefer-default-export
export const columnMap = [
"grid-cols-1 md:grid-cols-1 lg:grid-cols-1",
"grid-cols-1 md:grid-cols-1 lg:grid-cols-1",
"grid-cols-1 md:grid-cols-2 lg:grid-cols-2",
"grid-cols-1 md:grid-cols-2 lg:grid-cols-3",
"grid-cols-1 md:grid-cols-2 lg:grid-cols-4",
"grid-cols-1 md:grid-cols-2 lg:grid-cols-5",
"grid-cols-1 md:grid-cols-2 lg:grid-cols-6",
"grid-cols-1 md:grid-cols-2 lg:grid-cols-7",
"grid-cols-1 md:grid-cols-2 lg:grid-cols-8",
];
"grid-cols-1 md:grid-cols-1 lg:grid-cols-1",
"grid-cols-1 md:grid-cols-1 lg:grid-cols-1",
"grid-cols-1 md:grid-cols-2 lg:grid-cols-2",
"grid-cols-1 md:grid-cols-2 lg:grid-cols-3",
"grid-cols-1 md:grid-cols-2 lg:grid-cols-4",
"grid-cols-1 md:grid-cols-2 lg:grid-cols-5",
"grid-cols-1 md:grid-cols-2 lg:grid-cols-6",
"grid-cols-1 md:grid-cols-2 lg:grid-cols-7",
"grid-cols-1 md:grid-cols-2 lg:grid-cols-8",
];

View file

@ -5,7 +5,6 @@ import winston from "winston";
import checkAndCopyConfig, { getSettings, CONF_DIR } from "utils/config/config";
let winstonLogger;
function init() {
@ -48,7 +47,7 @@ function init() {
combineMessageAndSplat(),
winston.format.timestamp(),
winston.format.colorize(),
winston.format.printf(messageFormatter)
winston.format.printf(messageFormatter),
),
handleExceptions: true,
handleRejections: true,
@ -59,7 +58,7 @@ function init() {
winston.format.errors({ stack: true }),
combineMessageAndSplat(),
winston.format.timestamp(),
winston.format.printf(messageFormatter)
winston.format.printf(messageFormatter),
),
filename: `${logpath}/logs/homepage.log`,
handleExceptions: true,

View file

@ -5,7 +5,7 @@ export function formatApiCall(url, args) {
return args[key] || "";
};
return url.replace(/\/+$/, "").replace(find, replace).replace(find,replace);
return url.replace(/\/+$/, "").replace(find, replace).replace(find, replace);
}
function getURLSearchParams(widget, endpoint) {
@ -57,8 +57,8 @@ export function jsonArrayFilter(data, filter) {
export function sanitizeErrorURL(errorURL) {
// Dont display sensitive params on frontend
const url = new URL(errorURL);
["apikey", "api_key", "token", "t"].forEach(key => {
if (url.searchParams.has(key)) url.searchParams.set(key, "***")
["apikey", "api_key", "token", "t"].forEach((key) => {
if (url.searchParams.has(key)) url.searchParams.set(key, "***");
});
return url.toString();
}
}

View file

@ -28,17 +28,12 @@ export default async function credentialedProxyHandler(req, res, map) {
headers["X-CMC_PRO_API_KEY"] = `${widget.key}`;
} else if (widget.type === "gotify") {
headers["X-gotify-Key"] = `${widget.key}`;
} else if ([
"authentik",
"cloudflared",
"ghostfolio",
"mealie",
"tailscale",
"truenas",
"pterodactyl",
].includes(widget.type))
{
headers.Authorization = `Bearer ${widget.key}`;
} else if (
["authentik", "cloudflared", "ghostfolio", "mealie", "tailscale", "truenas", "pterodactyl"].includes(
widget.type,
)
) {
headers.Authorization = `Bearer ${widget.key}`;
} else if (widget.type === "proxmox") {
headers.Authorization = `PVEAPIToken=${widget.username}=${widget.password}`;
} else if (widget.type === "proxmoxbackupserver") {
@ -62,8 +57,7 @@ export default async function credentialedProxyHandler(req, res, map) {
} else {
headers.Authorization = `Basic ${Buffer.from(`${widget.username}:${widget.password}`).toString("base64")}`;
}
}
else if (widget.type === "azuredevops") {
} else if (widget.type === "azuredevops") {
headers.Authorization = `Basic ${Buffer.from(`$:${widget.key}`).toString("base64")}`;
} else if (widget.type === "glances") {
headers.Authorization = `Basic ${Buffer.from(`${widget.username}:${widget.password}`).toString("base64")}`;
@ -91,10 +85,12 @@ export default async function credentialedProxyHandler(req, res, map) {
if (status >= 400) {
logger.error("HTTP Error %d calling %s", status, url.toString());
}
if (status === 200) {
if (!validateWidgetData(widget, endpoint, resultData)) {
return res.status(500).json({error: {message: "Invalid data", url: sanitizeErrorURL(url), data: resultData}});
return res
.status(500)
.json({ error: { message: "Invalid data", url: sanitizeErrorURL(url), data: resultData } });
}
if (map) resultData = map(resultData);
}

View file

@ -19,10 +19,12 @@ export default async function genericProxyHandler(req, res, map) {
if (widget) {
// if there are more than one question marks, replace others to &
const url = new URL(formatApiCall(widgets[widget.type].api, { endpoint, ...widget }).replace(/(?<=\?.*)\?/g, '&'));
const url = new URL(
formatApiCall(widgets[widget.type].api, { endpoint, ...widget }).replace(/(?<=\?.*)\?/g, "&"),
);
const headers = req.extraHeaders ?? widget.headers ?? {};
if (widget.username && widget.password) {
headers.Authorization = `Basic ${Buffer.from(`${widget.username}:${widget.password}`).toString("base64")}`;
}
@ -30,7 +32,7 @@ export default async function genericProxyHandler(req, res, map) {
const params = {
method: widget.method ?? req.method,
headers,
}
};
if (req.body) {
params.body = req.body;
}
@ -38,14 +40,16 @@ export default async function genericProxyHandler(req, res, map) {
const [status, contentType, data] = await httpProxy(url, params);
let resultData = data;
if (resultData.error?.url) {
resultData.error.url = sanitizeErrorURL(url);
}
if (status === 200) {
if (!validateWidgetData(widget, endpoint, resultData)) {
return res.status(status).json({error: {message: "Invalid data", url: sanitizeErrorURL(url), data: resultData}});
return res
.status(status)
.json({ error: { message: "Invalid data", url: sanitizeErrorURL(url), data: resultData } });
}
if (map) resultData = map(resultData);
}
@ -62,10 +66,10 @@ export default async function genericProxyHandler(req, res, map) {
status,
url.protocol,
url.hostname,
url.port ? `:${url.port}` : '',
url.pathname
url.port ? `:${url.port}` : "",
url.pathname,
);
return res.status(status).json({error: {message: "HTTP Error", url: sanitizeErrorURL(url), resultData}});
return res.status(status).json({ error: { message: "HTTP Error", url: sanitizeErrorURL(url), resultData } });
}
return res.status(status).send(resultData);

View file

@ -11,8 +11,8 @@ const logger = createLogger("jsonrpcProxyHandler");
export async function sendJsonRpcRequest(url, method, params, username, password) {
const headers = {
"content-type": "application/json",
"accept": "application/json"
}
accept: "application/json",
};
if (username && password) {
headers.authorization = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`;
@ -23,7 +23,7 @@ export async function sendJsonRpcRequest(url, method, params, username, password
const httpRequestParams = {
method: "POST",
headers,
body
body,
};
// eslint-disable-next-line no-unused-vars
@ -33,7 +33,7 @@ export async function sendJsonRpcRequest(url, method, params, username, password
// in order to get access to the underlying error object in the JSON response
// you must set `result` equal to undefined
if (json.error && (json.result === null)) {
if (json.error && json.result === null) {
json.result = undefined;
}
return client.receive(json);
@ -45,15 +45,14 @@ export async function sendJsonRpcRequest(url, method, params, username, password
try {
const response = await client.request(method, params);
return [200, "application/json", JSON.stringify(response)];
}
catch (e) {
} catch (e) {
if (e instanceof JSONRPCErrorException) {
logger.debug("Error calling JSONPRC endpoint: %s. %s", url, e.message);
return [200, "application/json", JSON.stringify({result: null, error: {code: e.code, message: e.message}})];
return [200, "application/json", JSON.stringify({ result: null, error: { code: e.code, message: e.message } })];
}
logger.warn("Error calling JSONPRC endpoint: %s. %s", url, e);
return [500, "application/json", JSON.stringify({result: null, error: {code: 2, message: e.toString()}})];
return [500, "application/json", JSON.stringify({ result: null, error: { code: 2, message: e.toString() } })];
}
}

View file

@ -7,7 +7,8 @@ import createLogger from "utils/logger";
import widgets from "widgets/widgets";
const INFO_ENDPOINT = "{url}/webapi/query.cgi?api=SYNO.API.Info&version=1&method=query";
const AUTH_ENDPOINT = "{url}/webapi/{path}?api=SYNO.API.Auth&version={maxVersion}&method=login&account={username}&passwd={password}&session=DownloadStation&format=cookie";
const AUTH_ENDPOINT =
"{url}/webapi/{path}?api=SYNO.API.Auth&version={maxVersion}&method=login&account={username}&passwd={password}&session=DownloadStation&format=cookie";
const AUTH_API_NAME = "SYNO.API.Auth";
const proxyName = "synologyProxyHandler";
@ -40,7 +41,7 @@ async function login(loginUrl) {
}
async function getApiInfo(serviceWidget, apiName, serviceName) {
const cacheKey = `${proxyName}__${apiName}__${serviceName}`
const cacheKey = `${proxyName}__${apiName}__${serviceName}`;
let { cgiPath, maxVersion } = cache.get(cacheKey) ?? {};
if (cgiPath && maxVersion) {
return [cgiPath, maxVersion];
@ -56,12 +57,13 @@ async function getApiInfo(serviceWidget, apiName, serviceName) {
if (json?.data?.[apiName]) {
cgiPath = json.data[apiName].path;
maxVersion = json.data[apiName].maxVersion;
logger.debug(`Detected ${serviceWidget.type}: apiName '${apiName}', cgiPath '${cgiPath}', and maxVersion ${maxVersion}`);
logger.debug(
`Detected ${serviceWidget.type}: apiName '${apiName}', cgiPath '${cgiPath}', and maxVersion ${maxVersion}`,
);
cache.put(cacheKey, { cgiPath, maxVersion });
return [cgiPath, maxVersion];
}
}
catch {
} catch {
logger.warn(`Error ${status} obtaining ${apiName} info`);
}
}
@ -124,7 +126,7 @@ function toError(url, synologyError) {
error.error = synologyError.message ?? "Unknown error.";
break;
}
logger.warn(`Unable to call ${url}. code: ${code}, error: ${error.error}.`)
logger.warn(`Unable to call ${url}. code: ${code}, error: ${error.error}.`);
return error;
}
@ -144,7 +146,7 @@ export default async function synologyProxyHandler(req, res) {
const [cgiPath, maxVersion] = await getApiInfo(serviceWidget, mapping.apiName, service);
if (!cgiPath || !maxVersion) {
return res.status(400).json({ error: `Unrecognized API name: ${mapping.apiName}`})
return res.status(400).json({ error: `Unrecognized API name: ${mapping.apiName}` });
}
const url = formatApiCall(widget.api, {
@ -152,7 +154,7 @@ export default async function synologyProxyHandler(req, res) {
apiMethod: mapping.apiMethod,
cgiPath,
maxVersion,
...serviceWidget
...serviceWidget,
});
let [status, contentType, data] = await httpProxy(url);
if (status !== 200) {

View file

@ -25,21 +25,21 @@ function handleRequest(requestor, url, params) {
addCookieHandler(url, params);
if (params?.body) {
params.headers = params.headers ?? {};
params.headers['content-length'] = Buffer.byteLength(params.body);
params.headers["content-length"] = Buffer.byteLength(params.body);
}
const request = requestor.request(url, params, (response) => {
const data = [];
const contentEncoding = response.headers['content-encoding']?.trim().toLowerCase();
const contentEncoding = response.headers["content-encoding"]?.trim().toLowerCase();
let responseContent = response;
if (contentEncoding === 'gzip' || contentEncoding === 'deflate') {
if (contentEncoding === "gzip" || contentEncoding === "deflate") {
// https://github.com/request/request/blob/3c0cddc7c8eb60b470e9519da85896ed7ee0081e/request.js#L1018-L1025
// Be more lenient with decoding compressed responses, in case of invalid gzip responses that are still accepted
// by common browsers.
responseContent = createUnzip({
flush: zlibConstants.Z_SYNC_FLUSH,
finishFlush: zlibConstants.Z_SYNC_FLUSH
finishFlush: zlibConstants.Z_SYNC_FLUSH,
});
// zlib errors
@ -100,14 +100,13 @@ export async function httpProxy(url, params = {}) {
try {
const [status, contentType, data, responseHeaders] = await request;
return [status, contentType, data, responseHeaders];
}
catch (err) {
} catch (err) {
logger.error(
"Error calling %s//%s%s%s...",
constructedUrl.protocol,
constructedUrl.hostname,
constructedUrl.port ? `:${constructedUrl.port}` : '',
constructedUrl.pathname
constructedUrl.port ? `:${constructedUrl.port}` : "",
constructedUrl.pathname,
);
logger.error(err);
return [500, "application/json", { error: { message: err?.message ?? "Unknown error", url, rawError: err } }, null];

View file

@ -7,11 +7,11 @@ export default function useWidgetAPI(widget, ...options) {
if (options && options[1]?.refreshInterval) {
config.refreshInterval = options[1].refreshInterval;
}
let url = formatProxyUrl(widget, ...options)
let url = formatProxyUrl(widget, ...options);
if (options[0] === "") {
url = null
url = null;
}
const { data, error, mutate } = useSWR(url, config);
// make the data error the top-level error
return { data, error: data?.error ?? error, mutate }
return { data, error: data?.error ?? error, mutate };
}

View file

@ -2,34 +2,38 @@
import widgets from "widgets/widgets";
export default function validateWidgetData(widget, endpoint, data) {
let valid = true;
let dataParsed = data;
let error;
let mapping;
if (Buffer.isBuffer(data)) {
try {
dataParsed = JSON.parse(data);
} catch (e) {
error = e;
valid = false;
}
let valid = true;
let dataParsed = data;
let error;
let mapping;
if (Buffer.isBuffer(data)) {
try {
dataParsed = JSON.parse(data);
} catch (e) {
error = e;
valid = false;
}
}
if (dataParsed && Object.entries(dataParsed).length) {
const mappings = widgets[widget.type]?.mappings;
if (mappings) {
mapping = Object.values(mappings).find(m => m.endpoint === endpoint);
mapping?.validate?.forEach(key => {
if (dataParsed[key] === undefined) {
valid = false;
}
});
if (dataParsed && Object.entries(dataParsed).length) {
const mappings = widgets[widget.type]?.mappings;
if (mappings) {
mapping = Object.values(mappings).find((m) => m.endpoint === endpoint);
mapping?.validate?.forEach((key) => {
if (dataParsed[key] === undefined) {
valid = false;
}
});
}
}
if (!valid) {
console.warn(`Invalid data for widget '${widget.type}' endpoint '${endpoint}':\nExpected:${mapping?.validate}\nParse error: ${error ?? "none"}\nData: ${JSON.stringify(data)}`);
}
return valid;
if (!valid) {
console.warn(
`Invalid data for widget '${widget.type}' endpoint '${endpoint}':\nExpected:${mapping?.validate}\nParse error: ${
error ?? "none"
}\nData: ${JSON.stringify(data)}`,
);
}
return valid;
}