Enforce Prettier at the root

This commit is contained in:
Jordan Eldredge 2022-03-06 23:07:33 -08:00
parent 4ce89495cb
commit 39da79ddf2
22 changed files with 112 additions and 120 deletions

View file

@ -41,6 +41,6 @@
"location": true
},
"rules": {
// "prettier/prettier": "error"
"prettier/prettier": "error"
}
}

View file

@ -25,7 +25,7 @@ jobs:
yarn workspace webamp build-library
- name: Lint
run: |
yarn workspace webamp lint
yarn lint
yarn workspace webamp type-check
- name: Run Unit Tests
run: |

View file

@ -14,6 +14,7 @@
"@babel/preset-typescript": "^7.16.7",
"@typescript-eslint/parser": "^5.13.0",
"eslint": "^8.10.0",
"eslint-plugin-prettier": "^4.0.0",
"jest": "^27.5.1",
"prettier": "^2.3.2"
},

View file

@ -121,7 +121,7 @@ export default class DiscordEventHandler {
}
case "POPULAR_TWEET": {
const dest = await this.getChannel(Config.POPULAR_TWEETS_CHANNEL_ID);
const diff = (Date.now() - Number(action.date));
const diff = Date.now() - Number(action.date);
const seconds = diff / 1000;
const minutes = seconds / 60;
const hours = Math.round(minutes / 60);
@ -135,7 +135,7 @@ export default class DiscordEventHandler {
const dest = await this.getChannel(Config.POPULAR_TWEETS_CHANNEL_ID);
const message = `🎉 Tweet Bot Milestone! Just passed ${action.bracket.toLocaleString()} Followers 🎉`;
await dest.send(message);
break
break;
}
}
}

View file

@ -4,7 +4,11 @@ import { addSkinFromBuffer } from "../addSkin";
import { EventHandler } from "./app";
import DiscordEventHandler from "./DiscordEventHandler";
async function* reportedUploads(): AsyncGenerator<{ skin_md5: string; id: string; filename: string; }, void, unknown> {
async function* reportedUploads(): AsyncGenerator<
{ skin_md5: string; id: string; filename: string },
void,
unknown
> {
const seen = new Set();
while (true) {
const upload = await Skins.getReportedUpload();
@ -39,7 +43,10 @@ function timeout<T>(p: Promise<T>, duration: number): Promise<T> {
]);
}
async function processGivenUserUploads(eventHandler: EventHandler, uploads: AsyncGenerator<{ skin_md5: string; id: string; filename: string; }>) {
async function processGivenUserUploads(
eventHandler: EventHandler,
uploads: AsyncGenerator<{ skin_md5: string; id: string; filename: string }>
) {
log("Uploads to process...");
for await (const upload of uploads) {
log("Going to try: ", upload);
@ -82,38 +89,32 @@ async function processGivenUserUploads(eventHandler: EventHandler, uploads: Asyn
}
export async function reprocessFailedUploads(handler: DiscordEventHandler) {
// eslint-disable-next-line no-inner-declarations
async function* erroredUploads(): AsyncGenerator<
{ skin_md5: string; id: string; filename: string },
void,
unknown
> {
const seen = new Set();
while (true) {
const upload = await Skins.getErroredUpload();
console.log("Found one", { upload });
if (upload == null) {
return;
}
if (seen.has(upload.id)) {
console.error(
"Saw the same upload twice. It didn't get handled?"
);
return;
}
seen.add(upload.id);
yield upload;
}
}
const uploads = erroredUploads();
// eslint-disable-next-line no-inner-declarations
async function* erroredUploads(): AsyncGenerator<
{ skin_md5: string; id: string; filename: string },
void,
unknown
> {
const seen = new Set();
while (true) {
const upload = await Skins.getErroredUpload();
console.log("Found one", { upload });
if (upload == null) {
return;
}
if (seen.has(upload.id)) {
console.error("Saw the same upload twice. It didn't get handled?");
return;
}
seen.add(upload.id);
yield upload;
}
}
const uploads = erroredUploads();
await processGivenUserUploads(
(event) => handler.handle(event),
uploads
);
await processGivenUserUploads((event) => handler.handle(event), uploads);
}
export async function processUserUploads(eventHandler: EventHandler) {
log("process user uploads");
// Ensure we only have one worker processing requests.

View file

@ -1,31 +1,33 @@
import { knex } from "../db";
export default class KeyValue {
static async get(key: string): Promise<any> {
const result = await knex("key_value").where({key}).first("value");
if(result == null) {
return null;
}
return JSON.parse(result.value);
static async get(key: string): Promise<any> {
const result = await knex("key_value").where({ key }).first("value");
if (result == null) {
return null;
}
return JSON.parse(result.value);
}
static async set(key: string, value: any): Promise<void> {
const {count} = (await knex("key_value").where({key}).count({count: '*'}).first() as {count: number});
if(count) {
return await KeyValue.update(key, value);
} else {
return await KeyValue.insert(key, value);
}
static async set(key: string, value: any): Promise<void> {
const { count } = (await knex("key_value")
.where({ key })
.count({ count: "*" })
.first()) as { count: number };
if (count) {
return await KeyValue.update(key, value);
} else {
return await KeyValue.insert(key, value);
}
}
static async update(key: string, value: any): Promise<void> {
const json = JSON.stringify(value);
await knex("key_value").where({key}).update({value: json});
}
static async insert(key: string, value: any): Promise<any> {
const json = JSON.stringify(value);
await knex("key_value").insert({key, value: json});
}
static async update(key: string, value: any): Promise<void> {
const json = JSON.stringify(value);
await knex("key_value").where({ key }).update({ value: json });
}
static async insert(key: string, value: any): Promise<any> {
const json = JSON.stringify(value);
await knex("key_value").insert({ key, value: json });
}
}

View file

@ -1,6 +1,5 @@
import * as Knex from "knex";
const DATA = `8512eb9cb19bcdcb044b6fb1f7dc3a23 17920085909318662 https://www.instagram.com/p/CZfUam9vqNT/
4b874c85014b42cf3d67cfab35e20109 17913896906185364 https://www.instagram.com/p/CZfU_gKPOsU/
bb7fd9baf2292c81436825248837acc7 18163890691172492 https://www.instagram.com/p/CZfVJwVviZJ/
@ -17,26 +16,24 @@ eda35a53d88257c88c0dbf7ca9c4a98d 17891238578503535 https://www.instagram.com/p/C
b3239d943c5347c267a6ee3e1e81e1e5 17895571130531480 https://www.instagram.com/p/CZgWwGrgXh8/`;
export async function up(knex: Knex): Promise<any> {
const lines = DATA.split("\n");
for (const line of lines) {
const [md5, postId, url] = line.split(" ");
const lines = DATA.split("\n");
for (const line of lines) {
const [md5, postId, url] = line.split(" ");
await knex("instagram_posts").insert({
skin_md5: md5,
post_id: postId,
url: url,
});
}
await knex("instagram_posts").insert({
skin_md5: md5,
post_id: postId,
url: url,
});
}
}
export async function down(knex: Knex): Promise<any> {
const lines = DATA.split("\n");
for (const line of lines) {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const [_md5, postId, _url] = line.split(" ");
const lines = DATA.split("\n");
for (const line of lines) {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const [_md5, postId, _url] = line.split(" ");
await knex("instagram_posts").delete().where({post_id: postId});
}
await knex("instagram_posts").delete().where({ post_id: postId });
}
}

View file

@ -1,18 +1,15 @@
import * as Knex from "knex";
export async function up(knex: Knex): Promise<any> {
await knex.raw(
`CREATE TABLE "key_value" (
await knex.raw(
`CREATE TABLE "key_value" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
"key" TEXT NOT NULL UNIQUE,
value TEXT NOT_NULL
);`
);
);
}
export async function down(knex: Knex): Promise<any> {
await knex.raw(`DROP TABLE "key_value"`);
await knex.raw(`DROP TABLE "key_value"`);
}

View file

@ -1,14 +1,9 @@
import * as Knex from "knex";
export async function up(knex: Knex): Promise<any> {
await knex.raw(
`ALTER TABLE ia_items ADD COLUMN metadata;`
);
await knex.raw(`ALTER TABLE ia_items ADD COLUMN metadata;`);
}
export async function down(knex: Knex): Promise<any> {
throw new Error("I never implemented a down migration for adding metadata.");
throw new Error("I never implemented a down migration for adding metadata.");
}

View file

@ -1,14 +1,11 @@
import * as Knex from "knex";
export async function up(knex: Knex): Promise<any> {
await knex.raw(
`ALTER TABLE ia_items ADD COLUMN metadata_timestamp;`
);
await knex.raw(`ALTER TABLE ia_items ADD COLUMN metadata_timestamp;`);
}
export async function down(knex: Knex): Promise<any> {
throw new Error("I never implemented a down migration for adding metadata_timestamp.");
throw new Error(
"I never implemented a down migration for adding metadata_timestamp."
);
}

View file

@ -139,7 +139,7 @@ function putTemp(fileName, buffer) {
ACL: "public-read",
},
(err) => {
console.log("Hello...")
console.log("Hello...");
if (err) {
rejectPromise(err);
return;

View file

@ -43,7 +43,7 @@ export async function checkInternetArchiveMetadata(): Promise<void> {
console.warn("No skin file", { identifier, skin_md5 });
}
if (files.length < 2) {
console.log({skin_md5, identifier, length: files.length});
console.log({ skin_md5, identifier, length: files.length });
tooFew.push(skin_md5);
continue;
console.warn("Too few files", { identifier, skin_md5 });
@ -53,7 +53,6 @@ export async function checkInternetArchiveMetadata(): Promise<void> {
}
}
console.table({
total: results.length,
tooMany: tooMany.length,

View file

@ -30,7 +30,7 @@ async function getTweets(twitterClient): Promise<TweetPayload[]> {
let tweets: TweetPayload[] = [];
let callCount = 0;
while (callCount < (MAX_CALL_COUNT * 6)) {
while (callCount < MAX_CALL_COUNT * 6) {
callCount++;
const response = await twitterClient.get("statuses/user_timeline", {
max_id,
@ -48,15 +48,15 @@ async function getTweets(twitterClient): Promise<TweetPayload[]> {
max_id = newMaxId;
if (response.data.length <= 1) {
console.warn("Page was short")
console.warn("Page was short");
return tweets;
}
if(callCount === MAX_CALL_COUNT) {
console.warn("Hit MAX (but gonna keep going)")
if (callCount === MAX_CALL_COUNT) {
console.warn("Hit MAX (but gonna keep going)");
}
}
console.warn("Hit MAX (but gonna keep going)")
console.warn("Hit MAX (but gonna keep going)");
return tweets;
}

View file

@ -62,14 +62,13 @@ export async function fillMissingMetadata(count: number) {
const ctx = new UserContext();
const skins = await knex("ia_items")
.where((builder) => {
builder.where("ia_items.metadata", null)
.orWhere("ia_items.metadata", "")
builder.where("ia_items.metadata", null).orWhere("ia_items.metadata", "");
})
.whereNot("ia_items.identifier", null)
.select("ia_items.skin_md5", "ia_items.identifier");
console.log(`Found ${skins.length} items to fetch metadata for`);
console.log(`Found ${skins.length} items to fetch metadata for`);
const items = skins.slice(0, count);
const items = skins.slice(0, count);
for (const { skin_md5, identifier } of items) {
const iaItem = await IaItemModel.fromIdentifier(ctx, identifier);

View file

@ -11,7 +11,10 @@ import UserContext from "../data/UserContext";
import TweetModel from "../data/TweetModel";
const temp = _temp.track();
export async function tweet(discordClient: Client, anything: string | null): Promise<string> {
export async function tweet(
discordClient: Client,
anything: string | null
): Promise<string> {
const ctx = new UserContext();
const tweetBotChannel = await discordClient.channels.fetch(
TWEET_BOT_CHANNEL_ID

View file

@ -150,7 +150,6 @@
"react/no-unescaped-entities": "error",
"use-isnan": "error",
"valid-typeof": "error",
"prettier/prettier": "error",
"import/default": "error",
"import/export": "error",
"import/first": "error",

View file

@ -91,7 +91,6 @@
"data-uri-to-buffer": "^2.0.0",
"eslint": "^8.10.0",
"eslint-plugin-import": "^2.18.2",
"eslint-plugin-prettier": "^3.1.0",
"eslint-plugin-react": "^7.16.0",
"eslint-plugin-react-hooks": "^4.3.0",
"file-loader": "^2.0.0",

View file

@ -9,12 +9,12 @@ var PRESET_VALUES = [
"hz12000",
"hz14000",
"hz16000",
"preamp"
"preamp",
];
var HEADER = "Winamp EQ library file v1.1";
module.exports = {
PRESET_VALUES: PRESET_VALUES,
HEADER: HEADER
HEADER: HEADER,
};

View file

@ -16,7 +16,7 @@ function creator(data) {
if (!data.presets) {
throw new Error("Eqf data is missing presets");
}
data.presets.forEach(function(preset) {
data.presets.forEach(function (preset) {
var k = 0;
for (; k < preset.name.length; k++) {
buffer.push(preset.name.charCodeAt(k));
@ -25,7 +25,7 @@ function creator(data) {
buffer.push(0);
}
CONSTANTS.PRESET_VALUES.forEach(function(valueName) {
CONSTANTS.PRESET_VALUES.forEach(function (valueName) {
buffer.push(64 - preset[valueName]); // Adjust for inverse values
});
});

View file

@ -3,5 +3,5 @@ var creator = require("./creator");
module.exports = {
parser: parser,
creator: creator
creator: creator,
};

View file

@ -30,7 +30,7 @@ function parser(arrayBuffer) {
i = nameEnd; // Skip over any unused bytes
// Get the levels
CONSTANTS.PRESET_VALUES.forEach(function(valueName) {
CONSTANTS.PRESET_VALUES.forEach(function (valueName) {
preset[valueName] = 64 - arr[i++]; // Adjust for inverse values
});
data.presets.push(preset);

View file

@ -6739,9 +6739,10 @@ eslint-plugin-import@^2.18.2:
read-pkg-up "^2.0.0"
resolve "^1.12.0"
eslint-plugin-prettier@^3.1.0:
version "3.1.3"
resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-3.1.3.tgz#ae116a0fc0e598fdae48743a4430903de5b4e6ca"
eslint-plugin-prettier@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-4.0.0.tgz#8b99d1e4b8b24a762472b4567992023619cb98e0"
integrity sha512-98MqmCJ7vJodoQK359bqQWaxOE0CS8paAz/GgjaZLyex4TTk3g9HugoO89EqWCrFiOqn9EVvcoo7gZzONCWVwQ==
dependencies:
prettier-linter-helpers "^1.0.0"
@ -7413,6 +7414,7 @@ fast-deep-equal@^3.1.3:
fast-diff@^1.1.2:
version "1.2.0"
resolved "https://registry.yarnpkg.com/fast-diff/-/fast-diff-1.2.0.tgz#73ee11982d86caaf7959828d519cfe927fac5f03"
integrity sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==
fast-glob@^2.0.2:
version "2.2.7"
@ -12456,6 +12458,7 @@ preserve@^0.2.0:
prettier-linter-helpers@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz#d23d41fe1375646de2d0104d3454a3008802cf7b"
integrity sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==
dependencies:
fast-diff "^1.1.2"