Replace lodash with built-ins where possible to reduce bundle size (#1766)

* add tslint rule to disallow lodash.isUndefined

* add tslint rule to disallow lodash.isNull

* apply fixes
This commit is contained in:
Xianny
2019-04-10 09:36:32 -07:00
committed by GitHub
parent 49d951b7be
commit 7423028fea
299 changed files with 1249 additions and 1038 deletions

View File

@@ -64,7 +64,7 @@ async function checkCurrentVersionMatchesLatestPublishedNPMPackageAsync(
const packageName = pkg.packageJson.name;
const packageVersion = pkg.packageJson.version;
const packageRegistryJsonIfExists = await npmUtils.getPackageRegistryJsonIfExistsAsync(packageName);
if (_.isUndefined(packageRegistryJsonIfExists)) {
if (packageRegistryJsonIfExists === undefined) {
continue; // noop for packages not yet published to NPM
}
const allVersionsIncludingUnpublished = npmUtils.getPreviouslyPublishedVersions(packageRegistryJsonIfExists);
@@ -101,7 +101,7 @@ async function checkChangelogFormatAsync(updatedPublicPackages: Package[]): Prom
const currentVersion = pkg.packageJson.version;
if (!_.isEmpty(changelog)) {
const lastEntry = changelog[0];
const doesLastEntryHaveTimestamp = !_.isUndefined(lastEntry.timestamp);
const doesLastEntryHaveTimestamp = lastEntry.timestamp !== undefined;
if (semver.lt(lastEntry.version, currentVersion)) {
changeLogInconsistencies.push({
packageJsonVersion: currentVersion,
@@ -142,14 +142,14 @@ async function checkPublishRequiredSetupAsync(): Promise<void> {
}
// Check to see if Git personal token setup
if (_.isUndefined(constants.githubPersonalAccessToken)) {
if (constants.githubPersonalAccessToken === undefined) {
throw new Error(
'You must have a Github personal access token set to an envVar named `GITHUB_PERSONAL_ACCESS_TOKEN_0X_JS`. Add it then try again.',
);
}
// Check to see if discord URL is set up
if (_.isUndefined(constants.discordAlertWebhookUrl)) {
if (constants.discordAlertWebhookUrl === undefined) {
throw new Error(
'You must have a discord webhook URL set to an envVar named `DISCORD_GITHUB_RELEASE_WEBHOOK_URL`. Add it then try again.',
);

View File

@@ -61,7 +61,7 @@ async function confirmAsync(message: string): Promise<void> {
const currentVersion = pkg.packageJson.version;
const packageName = pkg.packageJson.name;
const nextPatchVersionIfValid = semver.inc(currentVersion, 'patch');
if (!_.isNull(nextPatchVersionIfValid)) {
if (nextPatchVersionIfValid !== null) {
packageToNextVersion[packageName] = nextPatchVersionIfValid;
} else {
throw new Error(`Encountered invalid semver version: ${currentVersion} for package: ${packageName}`);
@@ -110,9 +110,9 @@ async function publishImagesToDockerHubAsync(allUpdatedPackages: Package[]): Pro
for (const pkg of allUpdatedPackages) {
const packageJSON = pkg.packageJson;
const shouldPublishDockerImage =
!_.isUndefined(packageJSON.config) &&
!_.isUndefined(packageJSON.config.postpublish) &&
!_.isUndefined(packageJSON.config.postpublish.dockerHubRepo);
packageJSON.config !== undefined &&
packageJSON.config.postpublish !== undefined &&
packageJSON.config.postpublish.dockerHubRepo !== undefined;
if (!shouldPublishDockerImage) {
continue;
}
@@ -143,7 +143,7 @@ function getPackagesWithDocs(allUpdatedPackages: Package[]): Package[] {
const rootPackageJsonPath = `${constants.monorepoRootPath}/package.json`;
const rootPackageJSON = utils.readJSONFile<PackageJSON>(rootPackageJsonPath);
const packagesWithDocPagesStringIfExist = _.get(rootPackageJSON, 'config.packagesWithDocPages', undefined);
if (_.isUndefined(packagesWithDocPagesStringIfExist)) {
if (packagesWithDocPagesStringIfExist === undefined) {
return []; // None to generate & publish
}
const packagesWithDocPages = packagesWithDocPagesStringIfExist.split(' ');
@@ -214,7 +214,7 @@ async function updateChangeLogsAsync(updatedPublicPackages: Package[]): Promise<
if (shouldAddNewEntry) {
// Create a new entry for a patch version with generic changelog entry.
const nextPatchVersionIfValid = semver.inc(currentVersion, 'patch');
if (_.isNull(nextPatchVersionIfValid)) {
if (nextPatchVersionIfValid === null) {
throw new Error(`Encountered invalid semver version: ${currentVersion} for package: ${packageName}`);
}
const newChangelogEntry: VersionChangelog = {
@@ -231,7 +231,7 @@ async function updateChangeLogsAsync(updatedPublicPackages: Package[]): Promise<
} else {
// Update existing entry with timestamp
const lastEntry = changelog[0];
if (_.isUndefined(lastEntry.timestamp)) {
if (lastEntry.timestamp === undefined) {
lastEntry.timestamp = TODAYS_TIMESTAMP;
}
// Check version number is correct.
@@ -269,7 +269,7 @@ async function lernaPublishAsync(packageToNextVersion: { [name: string]: string
function updateVersionNumberIfNeeded(currentVersion: string, proposedNextVersion: string): string {
const updatedVersionIfValid = semver.inc(currentVersion, 'patch');
if (_.isNull(updatedVersionIfValid)) {
if (updatedVersionIfValid === null) {
throw new Error(`Encountered invalid semver: ${currentVersion}`);
}
if (proposedNextVersion === currentVersion) {

View File

@@ -20,7 +20,7 @@ const args = yargs
(async () => {
const isDryRun = args.isDryRun;
let packages;
if (_.isUndefined(args.packages)) {
if (args.packages === undefined) {
const shouldIncludePrivate = false;
packages = await utils.getPackagesToPublishAsync(shouldIncludePrivate);
} else {

View File

@@ -36,7 +36,7 @@ function findPackageIndex(packages: Package[], packageName: string): number {
}
function logIfDefined(x: any): void {
if (!_.isUndefined(x)) {
if (x !== undefined) {
utils.log(x);
}
}
@@ -53,7 +53,7 @@ function logIfDefined(x: any): void {
const packages = utils.getTopologicallySortedPackages(monorepoRootPath);
const installablePackages = _.filter(
packages,
pkg => !pkg.packageJson.private && !_.isUndefined(pkg.packageJson.main) && pkg.packageJson.main.endsWith('.js'),
pkg => !pkg.packageJson.private && pkg.packageJson.main !== undefined && pkg.packageJson.main.endsWith('.js'),
);
const CHUNK_SIZE = 15;
const chunkedInstallablePackages = _.chunk(installablePackages, CHUNK_SIZE);

View File

@@ -26,7 +26,7 @@ export const changelogUtils = {
},
getChangelogMdChange(change: Change): string {
let line = ` * ${change.note}`;
if (!_.isUndefined(change.pr)) {
if (change.pr !== undefined) {
line += ` (#${change.pr})`;
}
return line;
@@ -71,7 +71,7 @@ export const changelogUtils = {
getChangelogOrCreateIfMissing(packageName: string, packageLocation: string): Changelog {
const changelogJSONPath = path.join(packageLocation, 'CHANGELOG.json');
let changelogJsonIfExists = changelogUtils.getChangelogJSONIfExists(changelogJSONPath);
if (_.isUndefined(changelogJsonIfExists)) {
if (changelogJsonIfExists === undefined) {
// If none exists, create new, empty one.
changelogJsonIfExists = '[]';
fs.writeFileSync(changelogJSONPath, changelogJsonIfExists);

View File

@@ -59,7 +59,7 @@ export class DocGenerateAndUploadUtils {
const SUB_TYPE_PROPERTY_NAMES = ['inheritedFrom', 'overwrites', 'extendedTypes', 'implementationOf'];
const TS_MAPPED_TYPES = ['Partial', 'Promise', 'Readonly', 'Pick', 'Record'];
if (
!_.isUndefined(node.type) &&
node.type !== undefined &&
_.isString(node.type) &&
node.type === 'reference' &&
!_.includes(TS_MAPPED_TYPES, node.name) &&
@@ -96,7 +96,7 @@ export class DocGenerateAndUploadUtils {
);
const exportPathToExportedItems: ExportPathToExportedItems = {};
const exportPathOrder: string[] = [];
const exportsToOmit = _.isUndefined(omitExports) ? [] : omitExports;
const exportsToOmit = omitExports === undefined ? [] : omitExports;
processNode(sourceFile);
@@ -104,19 +104,20 @@ export class DocGenerateAndUploadUtils {
switch (node.kind) {
case ts.SyntaxKind.ExportDeclaration: {
const exportClause = (node as any).exportClause;
if (_.isUndefined(exportClause)) {
if (exportClause === undefined) {
return;
}
const exportPath = exportClause.parent.moduleSpecifier.text;
_.each(exportClause.elements, element => {
const exportItem = element.name.escapedText;
if (!_.includes(exportsToOmit, exportItem)) {
exportPathToExportedItems[exportPath] = _.isUndefined(exportPathToExportedItems[exportPath])
? [exportItem]
: [...exportPathToExportedItems[exportPath], exportItem];
exportPathToExportedItems[exportPath] =
exportPathToExportedItems[exportPath] === undefined
? [exportItem]
: [...exportPathToExportedItems[exportPath], exportItem];
}
});
if (!_.isUndefined(exportPathToExportedItems[exportPath])) {
if (exportPathToExportedItems[exportPath] !== undefined) {
exportPathOrder.push(exportPath);
}
break;
@@ -127,21 +128,21 @@ export class DocGenerateAndUploadUtils {
let exportPath = './index';
if (foundNode.parent && foundNode.parent.name) {
const exportItem = foundNode.parent.name.escapedText;
const isExportImportRequireStatement = !_.isUndefined(
_.get(foundNode, 'parent.moduleReference.expression.text'),
);
const isExportImportRequireStatement =
_.get(foundNode, 'parent.moduleReference.expression.text') !== undefined;
if (isExportImportRequireStatement) {
exportPath = foundNode.parent.moduleReference.expression.text;
}
if (!_.includes(exportsToOmit, exportItem)) {
exportPathToExportedItems[exportPath] = _.isUndefined(exportPathToExportedItems[exportPath])
? [exportItem]
: [...exportPathToExportedItems[exportPath], exportItem];
exportPathToExportedItems[exportPath] =
exportPathToExportedItems[exportPath] === undefined
? [exportItem]
: [...exportPathToExportedItems[exportPath], exportItem];
}
}
if (
!_.includes(exportPathOrder, exportPath) &&
!_.isUndefined(exportPathToExportedItems[exportPath])
exportPathToExportedItems[exportPath] !== undefined
) {
exportPathOrder.push(exportPath);
}
@@ -173,7 +174,7 @@ export class DocGenerateAndUploadUtils {
const pkg = _.find(monorepoPackages, monorepoPackage => {
return _.includes(monorepoPackage.packageJson.name, packageName);
});
if (_.isUndefined(pkg)) {
if (pkg === undefined) {
throw new Error(`Couldn't find a package.json for ${packageName}`);
}
this._packageJson = pkg.packageJson;
@@ -232,7 +233,7 @@ export class DocGenerateAndUploadUtils {
const externalExports: string[] = this._getAllExternalExports();
_.each(externalExports, externalExport => {
const linkIfExists = docGenConfigs.EXTERNAL_EXPORT_TO_LINK[externalExport];
if (_.isUndefined(linkIfExists)) {
if (linkIfExists === undefined) {
externalExportsWithoutLinks.push(externalExport);
return;
}
@@ -251,9 +252,10 @@ export class DocGenerateAndUploadUtils {
const exportPathToTypedocNames: ExportNameToTypedocNames = {};
_.each(modifiedTypedocOutput.children, file => {
const exportPath = this._findExportPathGivenTypedocName(file.name);
exportPathToTypedocNames[exportPath] = _.isUndefined(exportPathToTypedocNames[exportPath])
? [file.name]
: [...exportPathToTypedocNames[exportPath], file.name];
exportPathToTypedocNames[exportPath] =
exportPathToTypedocNames[exportPath] === undefined
? [file.name]
: [...exportPathToTypedocNames[exportPath], file.name];
});
// Since we need additional metadata included in the doc JSON, we nest the TypeDoc JSON
@@ -305,7 +307,7 @@ export class DocGenerateAndUploadUtils {
_.each(referenceNames, referenceName => {
if (
!_.includes(allExportedItems, referenceName) &&
_.isUndefined(docGenConfigs.EXTERNAL_TYPE_TO_LINK[referenceName])
docGenConfigs.EXTERNAL_TYPE_TO_LINK[referenceName] === undefined
) {
missingReferences.push(referenceName);
}
@@ -421,7 +423,7 @@ export class DocGenerateAndUploadUtils {
const matchingSanitizedExportPathIfExists = _.find(sanitizedExportPathsSortedByLength, p => {
return _.startsWith(typeDocNameWithoutQuotes, p);
});
if (_.isUndefined(matchingSanitizedExportPathIfExists)) {
if (matchingSanitizedExportPathIfExists === undefined) {
throw new Error(`Didn't find an exportPath for ${typeDocNameWithoutQuotes}`);
}
const matchingExportPath = sanitizedExportPathToExportPath[matchingSanitizedExportPathIfExists];
@@ -431,7 +433,7 @@ export class DocGenerateAndUploadUtils {
const externalExports: string[] = [];
_.each(this._exportPathToExportedItems, (exportedItems, exportPath) => {
const pathIfExists = this._monoRepoPkgNameToPath[exportPath];
if (_.isUndefined(pathIfExists) && !_.startsWith(exportPath, './')) {
if (pathIfExists === undefined && !_.startsWith(exportPath, './')) {
_.each(exportedItems, exportedItem => {
externalExports.push(exportedItem);
});
@@ -451,7 +453,7 @@ export class DocGenerateAndUploadUtils {
}
const pathIfExists = this._monoRepoPkgNameToPath[exportPath];
if (_.isUndefined(pathIfExists)) {
if (pathIfExists === undefined) {
return; // It's an external package
}

View File

@@ -47,7 +47,7 @@ export async function publishReleaseNotesAsync(
aggregateNotes += getReleaseNotesForPackage(pkg.location, pkg.packageJson.name);
const packageAssets = _.get(pkg.packageJson, 'config.postpublish.assets');
if (!_.isUndefined(packageAssets)) {
if (packageAssets !== undefined) {
assets = [...assets, ...packageAssets];
}
});

View File

@@ -30,7 +30,7 @@ export const utils = {
},
getPackages(rootDir: string): Package[] {
const rootPackageJson = utils.readJSONFile<PackageJSON>(`${rootDir}/package.json`);
if (_.isUndefined(rootPackageJson.workspaces)) {
if (rootPackageJson.workspaces === undefined) {
throw new Error(`Did not find 'workspaces' key in root package.json`);
}
const packages = [];
@@ -104,7 +104,7 @@ export const utils = {
nextVersionIfValid = semver.eq(lastEntry.version, currentVersion)
? semver.inc(currentVersion, 'patch')
: lastEntry.version;
if (_.isNull(nextVersionIfValid)) {
if (nextVersionIfValid === null) {
throw new Error(`Encountered invalid semver: ${currentVersion} associated with ${packageName}`);
}
return nextVersionIfValid;
@@ -144,7 +144,7 @@ export const utils = {
const packageNameIfExists = _.find(packageNames, name => {
return _.includes(tag, `${name}@`);
});
if (_.isUndefined(packageNameIfExists)) {
if (packageNameIfExists === undefined) {
return; // ignore tags not related to a package we care about.
}
const splitTag = tag.split(`${packageNameIfExists}@`);