mirror of
https://github.com/spacebarchat/server.git
synced 2026-04-01 07:05:40 +00:00
restructure to single project
This commit is contained in:
committed by
TheArcaneBrony
parent
5abd6bb7e0
commit
5e86d7ab9c
28
scripts/benchmark.js
Normal file
28
scripts/benchmark.js
Normal file
@@ -0,0 +1,28 @@
|
||||
const typeorm = require("typeorm");
|
||||
const Models = require("../dist/entities");
|
||||
const { PrimaryColumn } = require("typeorm");
|
||||
|
||||
function shouldIncludeEntity(name) {
|
||||
return ![Models.BaseClassWithoutId, PrimaryColumn, Models.BaseClass, Models.PrimaryGeneratedColumn]
|
||||
.map((x) => x?.name)
|
||||
.includes(name);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log("starting");
|
||||
const db = new typeorm.DataSource({
|
||||
type: "sqlite",
|
||||
database: ":memory:",
|
||||
entities: Object.values(Models).filter((x) => x.constructor.name == "Function" && shouldIncludeEntity(x.name)),
|
||||
synchronize: true,
|
||||
});
|
||||
await db.initialize();
|
||||
console.log("Initialized database");
|
||||
|
||||
for (var i = 0; i < 100; i++) {
|
||||
await Models.User.register({ username: "User" + i });
|
||||
console.log("registered user " + i);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
64
scripts/benchmark/connections.js
Normal file
64
scripts/benchmark/connections.js
Normal file
@@ -0,0 +1,64 @@
|
||||
require("dotenv").config();
|
||||
const cluster = require("cluster");
|
||||
const WebSocket = require("ws");
|
||||
const endpoint = process.env.GATEWAY || "ws://localhost:3001";
|
||||
const connections = Number(process.env.CONNECTIONS) || 50;
|
||||
const token = process.env.TOKEN;
|
||||
let cores = 1;
|
||||
try {
|
||||
cores = Number(process.env.THREADS) || os.cpus().length;
|
||||
} catch {
|
||||
console.log("[Bundle] Failed to get thread count! Using 1...")
|
||||
}
|
||||
|
||||
if (!token) {
|
||||
console.error("TOKEN env var missing");
|
||||
process.exit();
|
||||
}
|
||||
|
||||
if (cluster.isMaster) {
|
||||
for (let i = 0; i < threads; i++) {
|
||||
cluster.fork();
|
||||
}
|
||||
|
||||
cluster.on("exit", (worker, code, signal) => {
|
||||
console.log(`worker ${worker.process.pid} died`);
|
||||
});
|
||||
} else {
|
||||
for (let i = 0; i < connections; i++) {
|
||||
connect();
|
||||
}
|
||||
}
|
||||
|
||||
function connect() {
|
||||
const client = new WebSocket(endpoint);
|
||||
client.on("message", (data) => {
|
||||
data = JSON.parse(data);
|
||||
|
||||
switch (data.op) {
|
||||
case 10:
|
||||
client.interval = setInterval(() => {
|
||||
client.send(JSON.stringify({ op: 1 }));
|
||||
}, data.d.heartbeat_interval);
|
||||
|
||||
client.send(
|
||||
JSON.stringify({
|
||||
op: 2,
|
||||
d: {
|
||||
token,
|
||||
properties: {},
|
||||
},
|
||||
})
|
||||
);
|
||||
|
||||
break;
|
||||
}
|
||||
});
|
||||
client.once("close", (code, reason) => {
|
||||
clearInterval(client.interval);
|
||||
connect();
|
||||
});
|
||||
client.on("error", (err) => {
|
||||
// console.log(err);
|
||||
});
|
||||
}
|
||||
4
scripts/benchmark/index.js
Normal file
4
scripts/benchmark/index.js
Normal file
@@ -0,0 +1,4 @@
|
||||
require("dotenv").config();
|
||||
|
||||
require("./connections");
|
||||
require("./messages");
|
||||
25
scripts/benchmark/users.js
Normal file
25
scripts/benchmark/users.js
Normal file
@@ -0,0 +1,25 @@
|
||||
require("dotenv").config();
|
||||
const fetch = require("node-fetch");
|
||||
const count = Number(process.env.COUNT) || 50;
|
||||
const endpoint = process.env.API || "http://localhost:3001";
|
||||
|
||||
async function main() {
|
||||
for (let i = 0; i < count; i++) {
|
||||
fetch(`${endpoint}/api/auth/register`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({
|
||||
fingerprint: `${i}.wR8vi8lGlFBJerErO9LG5NViJFw`,
|
||||
username: `test${i}`,
|
||||
invite: null,
|
||||
consent: true,
|
||||
date_of_birth: "2000-01-01",
|
||||
gift_code_sku_id: null,
|
||||
captcha_key: null,
|
||||
}),
|
||||
headers: { "content-type": "application/json" },
|
||||
});
|
||||
console.log(i);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
84
scripts/build.js
Normal file
84
scripts/build.js
Normal file
@@ -0,0 +1,84 @@
|
||||
const { execSync } = require("child_process");
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const { argv, stdout, exit } = require("process");
|
||||
const { execIn, parts } = require('./utils');
|
||||
|
||||
if(argv.includes("help")) {
|
||||
console.log(`Fosscord build script help:
|
||||
Arguments:
|
||||
clean Cleans up previous builds
|
||||
copyonly Only copy source files, don't build (useful for updating assets)
|
||||
verbose Enable verbose logging
|
||||
logerrors Log build errors to console
|
||||
pretty-errors Pretty-print build errors
|
||||
silent No output to console or files.`);
|
||||
exit(0);
|
||||
}
|
||||
|
||||
let steps = 3, i = 0;
|
||||
if (argv.includes("clean")) steps++;
|
||||
if (argv.includes("copyonly")) steps--;
|
||||
|
||||
const verbose = argv.includes("verbose") || argv.includes("v");
|
||||
const logerr = argv.includes("logerrors");
|
||||
const pretty = argv.includes("pretty-errors");
|
||||
const silent = argv.includes("silent");
|
||||
|
||||
if(silent) console.error = console.log = function(){}
|
||||
|
||||
if (argv.includes("clean")) {
|
||||
console.log(`[${++i}/${steps}] Cleaning...`);
|
||||
parts.forEach((a) => {
|
||||
let d = "../" + a + "/dist";
|
||||
if (fs.existsSync(d)) {
|
||||
fs.rmSync(d, { recursive: true });
|
||||
if (verbose) console.log(`Deleted ${d}!`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
console.log(`[${++i}/${steps}] Checking if dependencies were installed correctly...`);
|
||||
//exif-be-gone v1.3.0 doesnt build js, known bug
|
||||
if(!fs.existsSync(path.join(__dirname, "..", "node_modules", "exif-be-gone", "index.js")))
|
||||
execIn("npm run build", path.join(__dirname, "..", "node_modules", "exif-be-gone"));
|
||||
|
||||
|
||||
if (!argv.includes("copyonly")) {
|
||||
console.log(`[${++i}/${steps}] Compiling src files ...`);
|
||||
|
||||
let buildFlags = ''
|
||||
if(pretty) buildFlags += '--pretty '
|
||||
|
||||
try {
|
||||
execSync(
|
||||
'node "' +
|
||||
path.join(__dirname, "..", "node_modules", "typescript", "lib", "tsc.js") +
|
||||
'" -p "' +
|
||||
path.join(__dirname, "..") +
|
||||
'" ' + buildFlags,
|
||||
{
|
||||
cwd: path.join(__dirname, ".."),
|
||||
shell: true,
|
||||
env: process.env,
|
||||
encoding: "utf8"
|
||||
}
|
||||
)
|
||||
} catch (error) {
|
||||
if(verbose || logerr) {
|
||||
error.stdout.split(/\r?\n/).forEach((line) => {
|
||||
let _line = line.replace('dist/','',1);
|
||||
if(!pretty && _line.includes('.ts(')) {
|
||||
//reformat file path for easy jumping
|
||||
_line = _line.replace('(',':',1).replace(',',':',1).replace(')','',1)
|
||||
}
|
||||
console.error(_line);
|
||||
})
|
||||
}
|
||||
console.error(`Build failed! Please check build.log for info!`);
|
||||
if(!silent){
|
||||
if(pretty) fs.writeFileSync("build.log.ansi", error.stdout);
|
||||
fs.writeFileSync("build.log", error.stdout.replaceAll(/[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g, ''));
|
||||
}
|
||||
}
|
||||
}
|
||||
40
scripts/db_migrations.sh
Executable file
40
scripts/db_migrations.sh
Executable file
@@ -0,0 +1,40 @@
|
||||
#!/bin/sh
|
||||
|
||||
if [ ! -z "$1" ]
|
||||
then
|
||||
FILENAME="$1"
|
||||
echo "Using filename: $FILENAME"
|
||||
else
|
||||
read -p "Enter migration filename: " FILENAME
|
||||
fi
|
||||
|
||||
[ -f ".env" ] && (
|
||||
mv .env .env.tmp
|
||||
source .env.tmp
|
||||
)
|
||||
|
||||
make_migration() {
|
||||
echo "Creating migrations for $2"
|
||||
mkdir "../util/src/migrations/$2"
|
||||
# npm run build clean logerrors pretty-errors
|
||||
# THREADS=1 DATABASE="$1" DB_MIGRATE=a npm run start:bundle
|
||||
THREADS=1 DATABASE="$1" DB_MIGRATE=a npx typeorm-ts-node-commonjs migration:generate "../util/src/migrations/$2/$FILENAME" -d ../util/src/util/Database.ts -p
|
||||
npm run build clean logerrors pretty-errors
|
||||
THREADS=1 DATABASE="$1" DB_MIGRATE=a npm run start:bundle
|
||||
}
|
||||
|
||||
npm i sqlite3
|
||||
make_migration "database.db" "sqlite"
|
||||
|
||||
[ -z "$FC_DB_POSTGRES" ] || (
|
||||
npm i pg
|
||||
make_migration "$FC_DB_POSTGRES" "postgres"
|
||||
)
|
||||
|
||||
[ -z "$FC_DB_MARIADB" ] || (
|
||||
npm i mysql2
|
||||
make_migration "$FC_DB_MARIADB" "mariadb"
|
||||
)
|
||||
|
||||
[ -f ".env.tmp" ] && mv .env.tmp .env
|
||||
|
||||
56
scripts/depcheck.js
Normal file
56
scripts/depcheck.js
Normal file
@@ -0,0 +1,56 @@
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const { env } = require("process");
|
||||
const { execSync } = require("child_process");
|
||||
const { argv, stdout, exit } = require("process");
|
||||
|
||||
const { execIn, getLines, parts } = require("./utils");
|
||||
|
||||
let npmi_extra_flags = "";
|
||||
|
||||
const resolveminor = argv.includes("resolveminor");
|
||||
if(argv.includes("nobuild")) npmi_extra_flags += "--ignore-scripts ";
|
||||
|
||||
parts.forEach((part) => {
|
||||
let partDir = path.join(__dirname, "..", "..", part);
|
||||
let distDir = path.join(partDir, "dist");
|
||||
console.log(`Checking updates for ${part} (${partDir})`);
|
||||
if(part == "bundle") {
|
||||
execIn(`npm run syncdeps`, partDir)
|
||||
}
|
||||
if(resolveminor) {
|
||||
fs.rmSync(path.join(partDir, "node_modules"), {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
execIn(`npm i --save --no-fund --no-audit --no-package-lock ${npmi_extra_flags}`, partDir)
|
||||
}
|
||||
let x = [
|
||||
[
|
||||
"pkg",
|
||||
{
|
||||
current: "1.0",
|
||||
wanted: "2.0",
|
||||
latest: "2.0",
|
||||
dependent: "cdn",
|
||||
location: "/usr/src/fosscord/bundle/node_packages/pkg",
|
||||
},
|
||||
],
|
||||
];
|
||||
x = Object.entries(
|
||||
JSON.parse(execIn("npm outdated --json", partDir))
|
||||
);
|
||||
x.forEach((a) => {
|
||||
let pkgname = a[0];
|
||||
let pkginfo = a[1];
|
||||
if(!pkginfo.current)
|
||||
console.log(`MISSING ${pkgname}: ${pkginfo.current} -> ${pkginfo.wanted} (latest: ${pkginfo.latest})`);
|
||||
else if(pkginfo.latest != pkginfo.wanted){
|
||||
if(pkginfo.current != pkginfo.wanted)
|
||||
console.log(`MINOR ${pkgname}: ${pkginfo.current} -> ${pkginfo.wanted}`);
|
||||
console.log(`MAJOR ${pkgname}: ${pkginfo.current} -> ${pkginfo.latest}`);
|
||||
}
|
||||
else
|
||||
console.log(`MINOR ${pkgname}: ${pkginfo.current} -> ${pkginfo.wanted}`);
|
||||
});
|
||||
});
|
||||
65
scripts/depclean.js
Normal file
65
scripts/depclean.js
Normal file
@@ -0,0 +1,65 @@
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const { env } = require("process");
|
||||
const { execSync } = require("child_process");
|
||||
const { argv, stdout, exit } = require("process");
|
||||
|
||||
const { execIn, getLines, parts } = require('./utils');
|
||||
|
||||
const bundleRequired = ["@ovos-media/ts-transform-paths"];
|
||||
const removeModules = argv.includes("cleanup");
|
||||
|
||||
parts.forEach((part) => {
|
||||
console.log(`Installing all packages for ${part}...`);
|
||||
execIn("npm i", path.join(__dirname, "..", "..", part));
|
||||
});
|
||||
|
||||
parts.forEach((part) => {
|
||||
let partDir = path.join(__dirname, "..", "..", part);
|
||||
let distDir = path.join(partDir, "dist");
|
||||
let start = 0;
|
||||
start = getLines(
|
||||
execIn("npm ls --parseable --package-lock-only -a", partDir)
|
||||
);
|
||||
if (fs.existsSync(distDir))
|
||||
fs.rmSync(distDir, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
let x = {
|
||||
dependencies: [],
|
||||
devDependencies: [],
|
||||
invalidDirs: [],
|
||||
invalidFiles: [],
|
||||
missing: [],
|
||||
using: [],
|
||||
};
|
||||
let dcproc = execIn("npx depcheck --json", partDir);
|
||||
if(dcproc.stdout) x = JSON.parse(dcproc.stdout);
|
||||
else x = JSON.parse(dcproc);
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(__dirname, "..", `depclean.out.${part}.json`),
|
||||
JSON.stringify(x, null, "\t"),
|
||||
{ encoding: "utf8" }
|
||||
);
|
||||
|
||||
let depsToRemove = x.dependencies.join(" ");
|
||||
if (depsToRemove) execIn(`npm r --save ${depsToRemove}`, partDir);
|
||||
|
||||
depsToRemove = x.devDependencies.join(" ");
|
||||
if (depsToRemove) execIn(`npm r --save --dev ${depsToRemove}`, partDir);
|
||||
|
||||
if (removeModules && fs.existsSync(path.join(partDir, "node_modules")))
|
||||
fs.rmSync(path.join(partDir, "node_modules"), {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
let end = getLines(
|
||||
execIn("npm ls --parseable --package-lock-only -a", partDir)
|
||||
);
|
||||
console.log(`${part}: ${start} -> ${end} (diff: ${start - end})`);
|
||||
});
|
||||
console.log("Installing required packages for bundle...");
|
||||
|
||||
execIn(`npm i --save ${bundleRequired.join(" ")}`, path.join(__dirname, ".."));
|
||||
31
scripts/droptables.sql
Normal file
31
scripts/droptables.sql
Normal file
@@ -0,0 +1,31 @@
|
||||
DROP TABLE applications;
|
||||
DROP TABLE attachments;
|
||||
DROP TABLE audit_logs;
|
||||
DROP TABLE bans;
|
||||
DROP TABLE connected_accounts;
|
||||
DROP TABLE emojis;
|
||||
DROP TABLE invites;
|
||||
DROP TABLE member_roles;
|
||||
DROP TABLE message_channel_mentions;
|
||||
DROP TABLE message_role_mentions;
|
||||
DROP TABLE message_stickers;
|
||||
DROP TABLE message_user_mentions;
|
||||
DROP TABLE messages;
|
||||
DROP TABLE rate_limits;
|
||||
DROP TABLE read_states;
|
||||
DROP TABLE recipients;
|
||||
DROP TABLE relationships;
|
||||
DROP TABLE roles;
|
||||
DROP TABLE sessions;
|
||||
DROP TABLE stickers;
|
||||
DROP TABLE team_members;
|
||||
DROP TABLE teams;
|
||||
DROP TABLE templates;
|
||||
DROP TABLE voice_states;
|
||||
DROP TABLE webhooks;
|
||||
DROP TABLE channels;
|
||||
DROP TABLE members;
|
||||
DROP TABLE guilds;
|
||||
DROP TABLE client_release;
|
||||
-- DROP TABLE users;
|
||||
-- DROP TABLE config;
|
||||
34
scripts/gen_index.js
Normal file
34
scripts/gen_index.js
Normal file
@@ -0,0 +1,34 @@
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const { execIn, getLines, parts } = require('./utils');
|
||||
|
||||
if (!process.argv[2] || !fs.existsSync(process.argv[2])) {
|
||||
console.log("Please pass a directory that exists!");
|
||||
process.exit(1);
|
||||
}
|
||||
console.log(`// ${process.argv[2]}/index.ts`)
|
||||
const recurse = process.argv.includes("--recursive")
|
||||
|
||||
const files = fs.readdirSync(process.argv[2]).filter(x => x.endsWith('.ts') && x != 'index.ts');
|
||||
|
||||
let output = '';
|
||||
|
||||
files.forEach(x => output += `export * from "./${x.replaceAll('.ts','')}";\n`)
|
||||
|
||||
const dirs = fs.readdirSync(process.argv[2]).filter(x => {
|
||||
try {
|
||||
fs.readdirSync(path.join(process.argv[2], x));
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
dirs.forEach(x => {
|
||||
output += `export * from "./${x}/index";\n`
|
||||
})
|
||||
console.log(output);
|
||||
fs.writeFileSync(path.join(process.argv[2], "index.ts"), output)
|
||||
|
||||
dirs.forEach(x => {
|
||||
if(recurse) console.log(execIn([process.argv[0], process.argv[1], `"${path.join(process.argv[2], x)}"`, "--recursive"].join(' '), process.cwd()))
|
||||
})
|
||||
137
scripts/generate_openapi.js
Normal file
137
scripts/generate_openapi.js
Normal file
@@ -0,0 +1,137 @@
|
||||
// https://mermade.github.io/openapi-gui/#
|
||||
// https://editor.swagger.io/
|
||||
const getRouteDescriptions = require("../jest/getRouteDescriptions");
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
require("missing-native-js-functions");
|
||||
|
||||
const openapiPath = path.join(__dirname, "..", "assets", "openapi.json");
|
||||
const SchemaPath = path.join(__dirname, "..", "assets", "schemas.json");
|
||||
const schemas = JSON.parse(fs.readFileSync(SchemaPath, { encoding: "utf8" }));
|
||||
const specification = JSON.parse(fs.readFileSync(openapiPath, { encoding: "utf8" }));
|
||||
|
||||
function combineSchemas(schemas) {
|
||||
let definitions = {};
|
||||
|
||||
for (const name in schemas) {
|
||||
definitions = {
|
||||
...definitions,
|
||||
...schemas[name].definitions,
|
||||
[name]: { ...schemas[name], definitions: undefined, $schema: undefined }
|
||||
};
|
||||
}
|
||||
|
||||
for (const key in definitions) {
|
||||
specification.components.schemas[key] = definitions[key];
|
||||
delete definitions[key].additionalProperties;
|
||||
delete definitions[key].$schema;
|
||||
const definition = definitions[key];
|
||||
|
||||
if (typeof definition.properties === "object") {
|
||||
for (const property of Object.values(definition.properties)) {
|
||||
if (Array.isArray(property.type)) {
|
||||
if (property.type.includes("null")) {
|
||||
property.type = property.type.find((x) => x !== "null");
|
||||
property.nullable = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return definitions;
|
||||
}
|
||||
|
||||
function getTag(key) {
|
||||
return key.match(/\/([\w-]+)/)[1];
|
||||
}
|
||||
|
||||
function apiRoutes() {
|
||||
const routes = getRouteDescriptions();
|
||||
|
||||
const tags = Array.from(routes.keys()).map((x) => getTag(x));
|
||||
specification.tags = [...specification.tags.map((x) => x.name), ...tags].unique().map((x) => ({ name: x }));
|
||||
|
||||
routes.forEach((route, pathAndMethod) => {
|
||||
const [p, method] = pathAndMethod.split("|");
|
||||
const path = p.replace(/:(\w+)/g, "{$1}");
|
||||
|
||||
let obj = specification.paths[path]?.[method] || {};
|
||||
if (!obj.description) {
|
||||
const permission = route.permission ? `##### Requires the \`\`${route.permission}\`\` permission\n` : "";
|
||||
const event = route.test?.event ? `##### Fires a \`\`${route.test?.event}\`\` event\n` : "";
|
||||
obj.description = permission + event;
|
||||
}
|
||||
if (route.body) {
|
||||
obj.requestBody = {
|
||||
required: true,
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: { $ref: `#/components/schemas/${route.body}` }
|
||||
}
|
||||
}
|
||||
}.merge(obj.requestBody);
|
||||
}
|
||||
if (!obj.responses) {
|
||||
obj.responses = {
|
||||
default: {
|
||||
description: "not documented"
|
||||
}
|
||||
};
|
||||
}
|
||||
if (route.test?.response) {
|
||||
const status = route.test.response.status || 200;
|
||||
let schema = {
|
||||
allOf: [
|
||||
{
|
||||
$ref: `#/components/schemas/${route.test.response.body}`
|
||||
},
|
||||
{
|
||||
example: route.test.body
|
||||
}
|
||||
]
|
||||
};
|
||||
if (!route.test.body) schema = schema.allOf[0];
|
||||
|
||||
obj.responses = {
|
||||
[status]: {
|
||||
...(route.test.response.body
|
||||
? {
|
||||
description: obj.responses[status].description || "",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: schema
|
||||
}
|
||||
}
|
||||
}
|
||||
: {})
|
||||
}
|
||||
}.merge(obj.responses);
|
||||
delete obj.responses.default;
|
||||
}
|
||||
if (p.includes(":")) {
|
||||
obj.parameters = p.match(/:\w+/g)?.map((x) => ({
|
||||
name: x.replace(":", ""),
|
||||
in: "path",
|
||||
required: true,
|
||||
schema: { type: "string" },
|
||||
description: x.replace(":", "")
|
||||
}));
|
||||
}
|
||||
obj.tags = [...(obj.tags || []), getTag(p)].unique();
|
||||
|
||||
specification.paths[path] = { ...specification.paths[path], [method]: obj };
|
||||
});
|
||||
}
|
||||
|
||||
function main() {
|
||||
combineSchemas(schemas);
|
||||
apiRoutes();
|
||||
|
||||
fs.writeFileSync(
|
||||
openapiPath,
|
||||
JSON.stringify(specification, null, 4).replaceAll("#/definitions", "#/components/schemas").replaceAll("bigint", "number")
|
||||
);
|
||||
}
|
||||
|
||||
main();
|
||||
95
scripts/generate_schema.js
Normal file
95
scripts/generate_schema.js
Normal file
@@ -0,0 +1,95 @@
|
||||
// https://mermade.github.io/openapi-gui/#
|
||||
// https://editor.swagger.io/
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const TJS = require("typescript-json-schema");
|
||||
require("missing-native-js-functions");
|
||||
const schemaPath = path.join(__dirname, "..", "assets", "schemas.json");
|
||||
|
||||
const settings = {
|
||||
required: true,
|
||||
ignoreErrors: true,
|
||||
excludePrivate: true,
|
||||
defaultNumberType: "integer",
|
||||
noExtraProps: true,
|
||||
defaultProps: false
|
||||
};
|
||||
const compilerOptions = {
|
||||
strictNullChecks: true
|
||||
};
|
||||
const Excluded = [
|
||||
"DefaultSchema",
|
||||
"Schema",
|
||||
"EntitySchema",
|
||||
"ServerResponse",
|
||||
"Http2ServerResponse",
|
||||
"global.Express.Response",
|
||||
"Response",
|
||||
"e.Response",
|
||||
"request.Response",
|
||||
"supertest.Response",
|
||||
|
||||
// TODO: Figure out how to exclude schemas from node_modules?
|
||||
"SomeJSONSchema",
|
||||
"UncheckedPartialSchema",
|
||||
"PartialSchema",
|
||||
"UncheckedPropertiesSchema",
|
||||
"PropertiesSchema",
|
||||
"AsyncSchema",
|
||||
"AnySchema",
|
||||
];
|
||||
|
||||
function modify(obj) {
|
||||
for (let k in obj) {
|
||||
if (typeof obj[k] === "object" && obj[k] !== null) {
|
||||
modify(obj[k]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function main() {
|
||||
const files = [
|
||||
...walk(path.join(__dirname, "..", "src", "util", "schemas")),
|
||||
];
|
||||
const program = TJS.getProgramFromFiles(
|
||||
files,
|
||||
compilerOptions
|
||||
);
|
||||
const generator = TJS.buildGenerator(program, settings);
|
||||
if (!generator || !program) return;
|
||||
|
||||
let schemas = generator.getUserSymbols().filter((x) => (x.endsWith("Schema") || x.endsWith("Response")) && !Excluded.includes(x));
|
||||
console.log(schemas);
|
||||
|
||||
let definitions = {};
|
||||
|
||||
for (const name of schemas) {
|
||||
const part = TJS.generateSchema(program, name, settings, [], generator);
|
||||
if (!part) continue;
|
||||
|
||||
definitions = { ...definitions, [name]: { ...part } };
|
||||
}
|
||||
|
||||
modify(definitions);
|
||||
|
||||
fs.writeFileSync(schemaPath, JSON.stringify(definitions, null, 4));
|
||||
}
|
||||
|
||||
main();
|
||||
|
||||
function walk(dir) {
|
||||
let results = [];
|
||||
let list = fs.readdirSync(dir);
|
||||
list.forEach(function (file) {
|
||||
file = dir + "/" + file;
|
||||
let stat = fs.statSync(file);
|
||||
if (stat && stat.isDirectory()) {
|
||||
/* Recurse into a subdirectory */
|
||||
results = results.concat(walk(file));
|
||||
} else {
|
||||
if (!file.endsWith(".ts")) return;
|
||||
results.push(file);
|
||||
}
|
||||
});
|
||||
return results;
|
||||
}
|
||||
23
scripts/install.js
Normal file
23
scripts/install.js
Normal file
@@ -0,0 +1,23 @@
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const parts = ["api", "util", "cdn", "gateway"];
|
||||
|
||||
const bundle = require("../package.json");
|
||||
|
||||
for (const part of parts) {
|
||||
const { devDependencies, dependencies } = require(path.join(
|
||||
"..",
|
||||
"..",
|
||||
part,
|
||||
"package.json"
|
||||
));
|
||||
bundle.devDependencies = { ...bundle.devDependencies, ...devDependencies };
|
||||
bundle.dependencies = { ...bundle.dependencies, ...dependencies };
|
||||
delete bundle.dependencies["@fosscord/util"];
|
||||
}
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(__dirname, "..", "package.json"),
|
||||
JSON.stringify(bundle, null, "\t"),
|
||||
{ encoding: "utf8" }
|
||||
);
|
||||
109
scripts/migrate_db_engine.js
Normal file
109
scripts/migrate_db_engine.js
Normal file
@@ -0,0 +1,109 @@
|
||||
const { config } = require("dotenv");
|
||||
config();
|
||||
const { createConnection } = require("typeorm");
|
||||
const { initDatabase } = require("../../dist/util/Database");
|
||||
require("missing-native-js-functions");
|
||||
const {
|
||||
Application,
|
||||
Attachment,
|
||||
Ban,
|
||||
Channel,
|
||||
ConfigEntity,
|
||||
ConnectedAccount,
|
||||
Emoji,
|
||||
Guild,
|
||||
Invite,
|
||||
Member,
|
||||
Message,
|
||||
ReadState,
|
||||
Recipient,
|
||||
Relationship,
|
||||
Role,
|
||||
Sticker,
|
||||
Team,
|
||||
TeamMember,
|
||||
Template,
|
||||
User,
|
||||
VoiceState,
|
||||
Webhook,
|
||||
} = require("../../dist/entities/index");
|
||||
|
||||
async function main() {
|
||||
if (!process.env.TO) throw new Error("TO database env connection string not set");
|
||||
|
||||
// manually arrange them because of foreign keys
|
||||
const entities = [
|
||||
ConfigEntity,
|
||||
User,
|
||||
Guild,
|
||||
Channel,
|
||||
Invite,
|
||||
Role,
|
||||
Ban,
|
||||
Application,
|
||||
Emoji,
|
||||
ConnectedAccount,
|
||||
Member,
|
||||
ReadState,
|
||||
Recipient,
|
||||
Relationship,
|
||||
Sticker,
|
||||
Team,
|
||||
TeamMember,
|
||||
Template,
|
||||
VoiceState,
|
||||
Webhook,
|
||||
Message,
|
||||
Attachment,
|
||||
];
|
||||
|
||||
const oldDB = await initDatabase();
|
||||
|
||||
const type = process.env.TO.includes("://") ? process.env.TO.split(":")[0]?.replace("+srv", "") : "sqlite";
|
||||
const isSqlite = type.includes("sqlite");
|
||||
|
||||
// @ts-ignore
|
||||
const newDB = await createConnection({
|
||||
type,
|
||||
url: isSqlite ? undefined : process.env.TO,
|
||||
database: isSqlite ? process.env.TO : undefined,
|
||||
entities,
|
||||
name: "new",
|
||||
synchronize: true,
|
||||
});
|
||||
let i = 0;
|
||||
|
||||
try {
|
||||
for (const entity of entities) {
|
||||
const entries = await oldDB.manager.find(entity);
|
||||
|
||||
// @ts-ignore
|
||||
console.log("migrating " + entries.length + " " + entity.name + " ...");
|
||||
|
||||
for (const entry of entries) {
|
||||
console.log(i++);
|
||||
|
||||
try {
|
||||
await newDB.manager.insert(entity, entry);
|
||||
} catch (error) {
|
||||
try {
|
||||
if (!entry.id) throw new Error("object doesn't have a unique id: " + entry);
|
||||
await newDB.manager.update(entity, { id: entry.id }, entry);
|
||||
} catch (error) {
|
||||
console.error("couldn't migrate " + i + " " + entity.name, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
console.log("migrated " + entries.length + " " + entity.name);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(error.message);
|
||||
}
|
||||
|
||||
console.log("SUCCESS migrated all data");
|
||||
await newDB.close();
|
||||
}
|
||||
|
||||
main().caught();
|
||||
34
scripts/rights.js
Normal file
34
scripts/rights.js
Normal file
@@ -0,0 +1,34 @@
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const { env } = require("process");
|
||||
const { execSync } = require("child_process");
|
||||
const { argv, stdout, exit } = require("process");
|
||||
|
||||
const { execIn, getLines, parts } = require("./utils");
|
||||
|
||||
let lines = fs.readFileSync(path.join(__dirname, "..", "..", "util", "src","util","Rights.ts")).toString()
|
||||
let lines2 = lines.split("\n");
|
||||
let lines3 = lines2.filter(y=>y.includes(": BitFlag("));
|
||||
let lines4 = lines3.map(x=>x.split("//")[0].trim())
|
||||
|
||||
function BitFlag(int) {
|
||||
return 1n << eval(`${int}n`);
|
||||
}
|
||||
|
||||
let rights = []
|
||||
let maxRights = 0n;
|
||||
lines4.forEach(x=>{
|
||||
maxRights += eval(`rights.${x.replace(':'," = ").replace(",",";")}`)
|
||||
})
|
||||
//max rights...
|
||||
console.log(`Maximum rights: ${maxRights}`);
|
||||
//discord rights...
|
||||
discordRights = maxRights;
|
||||
discordRights -= rights.SEND_BACKDATED_EVENTS;
|
||||
discordRights -= rights.MANAGE_GUILD_DIRECTORY;
|
||||
discordRights -= rights.CREDITABLE;
|
||||
discordRights -= rights.BYPASS_RATE_LIMITS;
|
||||
discordRights -= rights.ADD_MEMBERS;
|
||||
discordRights -= rights.MANAGE_RATE_LIMITS;
|
||||
discordRights -= rights.OPERATOR;
|
||||
console.log(`Discord-like rights: ${discordRights}`);
|
||||
3
scripts/stresstest/.gitignore
vendored
Normal file
3
scripts/stresstest/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
/node_modules
|
||||
config.json
|
||||
accounts.json
|
||||
1
scripts/stresstest/accounts.json.example
Normal file
1
scripts/stresstest/accounts.json.example
Normal file
File diff suppressed because one or more lines are too long
5
scripts/stresstest/config.json.example
Normal file
5
scripts/stresstest/config.json.example
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"url": "",
|
||||
"text-channel": "",
|
||||
"invite": ""
|
||||
}
|
||||
38
scripts/stresstest/index.js
Normal file
38
scripts/stresstest/index.js
Normal file
@@ -0,0 +1,38 @@
|
||||
const register = require("./src/register");
|
||||
const login = require("./src/login/index");
|
||||
const config = require("./config.json");
|
||||
const figlet = require("figlet");
|
||||
const sendMessage = require("./src/message/send");
|
||||
const fs = require("fs");
|
||||
figlet("Fosscord Stress Test :)", function (err, data) {
|
||||
if (err) {
|
||||
console.log("Something went wrong...");
|
||||
console.dir(err);
|
||||
return;
|
||||
}
|
||||
console.log("\x1b[32m", data);
|
||||
});
|
||||
setInterval(() => {
|
||||
generate();
|
||||
}, 1000 * 5);
|
||||
setInterval(() => {
|
||||
getUsers();
|
||||
}, 60 * 1000);
|
||||
async function generate() {
|
||||
let accounts = await JSON.parse(fs.readFileSync("accounts.json"));
|
||||
console.log(accounts);
|
||||
let account = await register();
|
||||
accounts.push(account);
|
||||
fs.writeFileSync("accounts.json", JSON.stringify(accounts));
|
||||
console.log(accounts.length);
|
||||
let y = await login(account);
|
||||
sendMessage(y);
|
||||
}
|
||||
async function getUsers() {
|
||||
let accounts = await JSON.parse(fs.readFileSync("accounts.json"));
|
||||
accounts.forEach(async (x) => {
|
||||
let y = await login(x);
|
||||
console.log(y);
|
||||
sendMessage(y);
|
||||
});
|
||||
}
|
||||
BIN
scripts/stresstest/package-lock.json
generated
Normal file
BIN
scripts/stresstest/package-lock.json
generated
Normal file
Binary file not shown.
17
scripts/stresstest/package.json
Normal file
17
scripts/stresstest/package.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"name": "stresstest",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"start": "node ."
|
||||
},
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"figlet": "^1.5.2",
|
||||
"node-fetch": "^2.6.6",
|
||||
"request": "^2.88.2"
|
||||
}
|
||||
}
|
||||
20
scripts/stresstest/src/login/index.js
Normal file
20
scripts/stresstest/src/login/index.js
Normal file
@@ -0,0 +1,20 @@
|
||||
const fetch = require("node-fetch");
|
||||
const fs = require("fs");
|
||||
let config = require("../../config.json");
|
||||
module.exports = login;
|
||||
async function login(account) {
|
||||
let body = {
|
||||
fingerprint: "805826570869932034.wR8vi8lGlFBJerErO9LG5NViJFw",
|
||||
login: account.email,
|
||||
password: account.password
|
||||
};
|
||||
let x = await fetch(config.url + "/auth/login", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify(body)
|
||||
});
|
||||
console.log(x);
|
||||
x = await x.json();
|
||||
console.log(x);
|
||||
return x;
|
||||
}
|
||||
23
scripts/stresstest/src/message/send.js
Normal file
23
scripts/stresstest/src/message/send.js
Normal file
@@ -0,0 +1,23 @@
|
||||
const fetch = require("node-fetch");
|
||||
const fs = require("fs");
|
||||
let config = require("./../../config.json");
|
||||
module.exports = sendMessage;
|
||||
async function sendMessage(account) {
|
||||
let body = {
|
||||
fingerprint: "805826570869932034.wR8vi8lGlFBJerErO9LG5NViJFw",
|
||||
content: "Test",
|
||||
tts: false
|
||||
};
|
||||
let x = await fetch(config.url + "/channels/" + config["text-channel"] + "/messages", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: account.token
|
||||
},
|
||||
body: JSON.stringify(body)
|
||||
});
|
||||
console.log(x);
|
||||
x = await x.json();
|
||||
console.log(x);
|
||||
return x;
|
||||
}
|
||||
34
scripts/stresstest/src/register/index.js
Normal file
34
scripts/stresstest/src/register/index.js
Normal file
@@ -0,0 +1,34 @@
|
||||
const fetch = require("node-fetch");
|
||||
const fs = require("fs");
|
||||
let config = require("./../../config.json");
|
||||
module.exports = generate;
|
||||
async function generate() {
|
||||
let mail = (Math.random() + 10).toString(36).substring(2);
|
||||
mail = mail + "." + (Math.random() + 10).toString(36).substring(2) + "@stresstest.com";
|
||||
let password =
|
||||
(Math.random() * 69).toString(36).substring(-7) +
|
||||
(Math.random() * 69).toString(36).substring(-7) +
|
||||
(Math.random() * 69).toString(36).substring(-8);
|
||||
console.log(mail);
|
||||
console.log(password);
|
||||
let body = {
|
||||
fingerprint: "805826570869932034.wR8vi8lGlFBJerErO9LG5NViJFw",
|
||||
email: mail,
|
||||
username: "Fosscord Stress Test",
|
||||
password: password,
|
||||
invite: config.invite,
|
||||
consent: true,
|
||||
date_of_birth: "2000-04-04",
|
||||
gift_code_sku_id: null,
|
||||
captcha_key: null
|
||||
};
|
||||
let x = await fetch(config.url + "/auth/register", {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify(body)
|
||||
});
|
||||
console.log(x);
|
||||
x = await x.json();
|
||||
console.log(x);
|
||||
return { email: mail, password: password };
|
||||
}
|
||||
9
scripts/update_schemas.js
Normal file
9
scripts/update_schemas.js
Normal file
@@ -0,0 +1,9 @@
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const { env } = require("process");
|
||||
const { execSync } = require("child_process");
|
||||
const { argv, stdout, exit } = require("process");
|
||||
|
||||
const { execIn, getLines, parts } = require("./utils");
|
||||
|
||||
execIn("node scripts/generate_schema.js", path.join("..", "api"));
|
||||
54
scripts/utils.js
Normal file
54
scripts/utils.js
Normal file
@@ -0,0 +1,54 @@
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
const { env } = require("process");
|
||||
const { execSync } = require("child_process");
|
||||
const { argv, stdout, exit } = require("process");
|
||||
|
||||
const parts = ["api", "util", "cdn", "gateway", "bundle"];
|
||||
|
||||
function copyRecursiveSync(src, dest) {
|
||||
//if (verbose) console.log(`cpsync: ${src} -> ${dest}`);
|
||||
let exists = fs.existsSync(src);
|
||||
if (!exists) {
|
||||
console.log(src + " doesn't exist, not copying!");
|
||||
return;
|
||||
}
|
||||
let stats = exists && fs.statSync(src);
|
||||
let isDirectory = exists && stats.isDirectory();
|
||||
if (isDirectory) {
|
||||
fs.mkdirSync(dest, { recursive: true });
|
||||
fs.readdirSync(src).forEach(function (childItemName) {
|
||||
copyRecursiveSync(
|
||||
path.join(src, childItemName),
|
||||
path.join(dest, childItemName)
|
||||
);
|
||||
});
|
||||
} else {
|
||||
fs.copyFileSync(src, dest);
|
||||
}
|
||||
}
|
||||
|
||||
function execIn(cmd, workdir, opts) {
|
||||
try {
|
||||
return execSync(cmd, {
|
||||
cwd: workdir,
|
||||
shell: true,
|
||||
env: process.env,
|
||||
encoding: "utf-8",
|
||||
...opts
|
||||
});
|
||||
} catch (error) {
|
||||
return error.stdout;
|
||||
}
|
||||
}
|
||||
|
||||
function getLines(output) {
|
||||
return output.split("\n").length;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
//consts
|
||||
parts,
|
||||
//functions
|
||||
copyRecursiveSync, execIn, getLines
|
||||
};
|
||||
Reference in New Issue
Block a user