refactor: change to async file operations

This commit is contained in:
alok8bb 2022-07-10 00:47:00 +05:30
parent 68eebf1229
commit f520d16f19
No known key found for this signature in database
GPG Key ID: 748A8580B906551C
4 changed files with 21 additions and 30 deletions

View File

@ -1,7 +1,8 @@
import { logError, logSuccess, projectRoot } from "../utils"; import { logError, logSuccess, projectRoot } from "../utils";
import fs from "fs"; import fs from "fs";
import path from "path"; import path from "path";
import { Client, QueryResult } from "pg"; import { Client } from "pg";
import fsp from "fs/promises";
export const tableExists = async (db: Client): Promise<boolean> => { export const tableExists = async (db: Client): Promise<boolean> => {
const result = await db const result = await db
@ -47,33 +48,22 @@ export const populateDB = async (db: Client) => {
) )
.catch((err) => err); .catch((err) => err);
fs.readFile( let data = await fsp.readFile(filepath, { encoding: "utf-8" });
filepath,
{ encoding: "utf-8" },
async (err: Error | null, data: string) => {
if (err != null) {
logError(err.message);
throw err;
}
let arr: Array<string> = data.split("\r\n"); let arr: string[] = data.split("\r\n");
// 1k entries per query // 1k entries per query
const chunkSize = 1000; const chunkSize = 1000;
for (let i = 0; i < arr.length; i += chunkSize) { for (let i = 0; i < arr.length; i += chunkSize) {
const chunk: Array<string> = arr.slice(i, i + chunkSize); const chunk: string[] = arr.slice(i, i + chunkSize);
const queryStr = { const queryStr = {
text: `INSERT INTO words (val) VALUES ${expand( text: `INSERT INTO words (val) VALUES ${expand(chunk.length)}`,
chunk.length values: chunk,
)}`, };
values: chunk,
};
await db.query(queryStr); await db.query(queryStr);
} }
}
);
logSuccess("Successfully populated the database!"); logSuccess("Successfully populated the database!");
}; };

View File

@ -7,7 +7,7 @@ import { tableExists, populateDB } from "./db";
const main = async () => { const main = async () => {
dotenv.config(); dotenv.config();
createDataDir(); await createDataDir();
// init db // init db
const pool = new Pool(); const pool = new Pool();

View File

@ -3,6 +3,7 @@ import { Pool, QueryResult } from "pg";
import fs from "fs"; import fs from "fs";
import path from "path"; import path from "path";
import { Word } from "../utils/types"; import { Word } from "../utils/types";
import fsp from "fs/promises";
import { projectRoot } from "../utils"; import { projectRoot } from "../utils";
import { getAvailableWord, setWordTaken } from "../db/bin"; import { getAvailableWord, setWordTaken } from "../db/bin";
@ -18,10 +19,9 @@ export default function BinRouter(db: Pool) {
const word: Word | undefined = await getAvailableWord(db); const word: Word | undefined = await getAvailableWord(db);
if (word != undefined) { if (word != undefined) {
fs.writeFile( await fsp.writeFile(
path.join(projectRoot, "data", word.val + ".txt"), path.join(projectRoot, "data", word.val + ".txt"),
body, body
(err) => err
); );
await setWordTaken(db, word.id); await setWordTaken(db, word.id);

View File

@ -2,6 +2,7 @@ import chalk from "chalk";
import { ErrorRequestHandler } from "express"; import { ErrorRequestHandler } from "express";
import path from "path"; import path from "path";
import fs from "fs"; import fs from "fs";
import fsp from "fs/promises";
// logs // logs
export const logError = (msg: string) => console.error(chalk.bold.red(msg)); export const logError = (msg: string) => console.error(chalk.bold.red(msg));
@ -16,10 +17,10 @@ export const errorHandler: ErrorRequestHandler = (err, _, res, __) => {
return res.status(500).json({ message: "Internal server error!" }); return res.status(500).json({ message: "Internal server error!" });
}; };
export const createDataDir = () => { export const createDataDir = async () => {
let dir = path.join(projectRoot, "data"); let dir = path.join(projectRoot, "data");
if (!fs.existsSync(dir)) { if (!fs.existsSync(dir)) {
fs.mkdirSync(dir); await fsp.mkdir(dir);
} }
}; };