@@ -465,7 +465,19 @@ namespace ts {
465
465
const enumNumberIndexInfo = createIndexInfo(stringType, /*isReadonly*/ true);
466
466
467
467
const globals = createSymbolTable();
468
- let amalgamatedDuplicates: Map<{ firstFile: SourceFile, secondFile: SourceFile, firstFileInstances: Map<{ instances: Node[], blockScoped: boolean }>, secondFileInstances: Map<{ instances: Node[], blockScoped: boolean }> }> | undefined;
468
+ interface DuplicateInfoForSymbol {
469
+ readonly firstFileLocations: Node[];
470
+ readonly secondFileLocations: Node[];
471
+ readonly isBlockScoped: boolean;
472
+ }
473
+ interface DuplicateInfoForFiles {
474
+ readonly firstFile: SourceFile;
475
+ readonly secondFile: SourceFile;
476
+ /** Key is symbol name. */
477
+ readonly conflictingSymbols: Map<DuplicateInfoForSymbol>;
478
+ }
479
+ /** Key is "/path/to/a.ts|/path/to/b.ts". */
480
+ let amalgamatedDuplicates: Map<DuplicateInfoForFiles> | undefined;
469
481
const reverseMappedCache = createMap<Type | undefined>();
470
482
let ambientModulesCache: Symbol[] | undefined;
471
483
/**
@@ -886,50 +898,45 @@ namespace ts {
886
898
: Diagnostics.Duplicate_identifier_0;
887
899
const sourceSymbolFile = source.declarations && getSourceFileOfNode(source.declarations[0]);
888
900
const targetSymbolFile = target.declarations && getSourceFileOfNode(target.declarations[0]);
901
+ const symbolName = symbolToString(source);
889
902
890
903
// Collect top-level duplicate identifier errors into one mapping, so we can then merge their diagnostics if there are a bunch
891
904
if (sourceSymbolFile && targetSymbolFile && amalgamatedDuplicates && !isEitherEnum && sourceSymbolFile !== targetSymbolFile) {
892
905
const firstFile = comparePaths(sourceSymbolFile.path, targetSymbolFile.path) === Comparison.LessThan ? sourceSymbolFile : targetSymbolFile;
893
906
const secondFile = firstFile === sourceSymbolFile ? targetSymbolFile : sourceSymbolFile;
894
- const cacheKey = `${firstFile.path}|${secondFile.path}`;
895
- const existing = amalgamatedDuplicates.get(cacheKey) || { firstFile, secondFile, firstFileInstances: createMap(), secondFileInstances: createMap() };
896
- const symbolName = symbolToString(source);
897
- const firstInstanceList = existing.firstFileInstances.get(symbolName) || { instances: [], blockScoped: isEitherBlockScoped };
898
- const secondInstanceList = existing.secondFileInstances.get(symbolName) || { instances: [], blockScoped: isEitherBlockScoped };
899
-
900
- forEach(source.declarations, node => {
901
- const errorNode = (getExpandoInitializer(node, /*isPrototypeAssignment*/ false) ? getNameOfExpando(node) : getNameOfDeclaration(node)) || node;
902
- const targetList = sourceSymbolFile === firstFile ? firstInstanceList : secondInstanceList;
903
- targetList.instances.push(errorNode);
904
- });
905
- forEach(target.declarations, node => {
906
- const errorNode = (getExpandoInitializer(node, /*isPrototypeAssignment*/ false) ? getNameOfExpando(node) : getNameOfDeclaration(node)) || node;
907
- const targetList = targetSymbolFile === firstFile ? firstInstanceList : secondInstanceList;
908
- targetList.instances.push(errorNode);
909
- });
910
-
911
- existing.firstFileInstances.set(symbolName, firstInstanceList);
912
- existing.secondFileInstances.set(symbolName, secondInstanceList);
913
- amalgamatedDuplicates.set(cacheKey, existing);
914
- return target;
907
+ const filesDuplicates = getOrUpdate<DuplicateInfoForFiles>(amalgamatedDuplicates, `${firstFile.path}|${secondFile.path}`, () =>
908
+ ({ firstFile, secondFile, conflictingSymbols: createMap() }));
909
+ const conflictingSymbolInfo = getOrUpdate<DuplicateInfoForSymbol>(filesDuplicates.conflictingSymbols, symbolName, () =>
910
+ ({ isBlockScoped: isEitherBlockScoped, firstFileLocations: [], secondFileLocations: [] }));
911
+ addDuplicateLocations(conflictingSymbolInfo.firstFileLocations, source);
912
+ addDuplicateLocations(conflictingSymbolInfo.secondFileLocations, target);
913
+ }
914
+ else {
915
+ addDuplicateDeclarationErrorsForSymbols(source, message, symbolName, target);
916
+ addDuplicateDeclarationErrorsForSymbols(target, message, symbolName, source);
915
917
}
916
- const symbolName = symbolToString(source);
917
- addDuplicateDeclarationErrorsForSymbols(source, message, symbolName, target);
918
- addDuplicateDeclarationErrorsForSymbols(target, message, symbolName, source);
919
918
}
920
919
return target;
920
+
921
+ function addDuplicateLocations(locs: Node[], symbol: Symbol): void {
922
+ for (const decl of symbol.declarations) {
923
+ pushIfUnique(locs, (getExpandoInitializer(decl, /*isPrototypeAssignment*/ false) ? getNameOfExpando(decl) : getNameOfDeclaration(decl)) || decl);
924
+ }
925
+ }
921
926
}
922
927
923
928
function addDuplicateDeclarationErrorsForSymbols(target: Symbol, message: DiagnosticMessage, symbolName: string, source: Symbol) {
924
929
forEach(target.declarations, node => {
925
930
const errorNode = (getExpandoInitializer(node, /*isPrototypeAssignment*/ false) ? getNameOfExpando(node) : getNameOfDeclaration(node)) || node;
926
- addDuplicateDeclarationError(errorNode, message, symbolName, source.declarations && source.declarations[0] );
931
+ addDuplicateDeclarationError(errorNode, message, symbolName, source.declarations);
927
932
});
928
933
}
929
934
930
- function addDuplicateDeclarationError(errorNode: Node, message: DiagnosticMessage, symbolName: string, relatedNode: Node | undefined) {
935
+ function addDuplicateDeclarationError(errorNode: Node, message: DiagnosticMessage, symbolName: string, relatedNodes: ReadonlyArray< Node> | undefined) {
931
936
const err = lookupOrIssueError(errorNode, message, symbolName);
932
- if (relatedNode && length(err.relatedInformation) < 5) {
937
+ for (const relatedNode of relatedNodes || emptyArray) {
938
+ err.relatedInformation = err.relatedInformation || [];
939
+ if (length(err.relatedInformation) >= 5) continue;
933
940
addRelatedInfo(err, !length(err.relatedInformation) ? createDiagnosticForNode(relatedNode, Diagnostics._0_was_also_declared_here, symbolName) : createDiagnosticForNode(relatedNode, Diagnostics.and_here));
934
941
}
935
942
}
@@ -28901,38 +28908,33 @@ namespace ts {
28901
28908
}
28902
28909
}
28903
28910
28904
- amalgamatedDuplicates.forEach(({ firstFile, secondFile, firstFileInstances, secondFileInstances }) => {
28905
- const conflictingKeys = arrayFrom(firstFileInstances.keys());
28911
+ amalgamatedDuplicates.forEach(({ firstFile, secondFile, conflictingSymbols }) => {
28906
28912
// If not many things conflict, issue individual errors
28907
- if (conflictingKeys.length < 8) {
28908
- addErrorsForDuplicates(firstFileInstances, secondFileInstances);
28909
- addErrorsForDuplicates(secondFileInstances, firstFileInstances);
28910
- return;
28913
+ if (conflictingSymbols.size < 8) {
28914
+ conflictingSymbols.forEach(({ isBlockScoped, firstFileLocations, secondFileLocations }, symbolName) => {
28915
+ const message = isBlockScoped ? Diagnostics.Cannot_redeclare_block_scoped_variable_0 : Diagnostics.Duplicate_identifier_0;
28916
+ for (const node of firstFileLocations) {
28917
+ addDuplicateDeclarationError(node, message, symbolName, secondFileLocations);
28918
+ }
28919
+ for (const node of secondFileLocations) {
28920
+ addDuplicateDeclarationError(node, message, symbolName, firstFileLocations);
28921
+ }
28922
+ });
28923
+ }
28924
+ else {
28925
+ // Otherwise issue top-level error since the files appear very identical in terms of what they contain
28926
+ const list = arrayFrom(conflictingSymbols.keys()).join(", ");
28927
+ diagnostics.add(addRelatedInfo(
28928
+ createDiagnosticForNode(firstFile, Diagnostics.Definitions_of_the_following_identifiers_conflict_with_those_in_another_file_Colon_0, list),
28929
+ createDiagnosticForNode(secondFile, Diagnostics.Conflicts_are_in_this_file)
28930
+ ));
28931
+ diagnostics.add(addRelatedInfo(
28932
+ createDiagnosticForNode(secondFile, Diagnostics.Definitions_of_the_following_identifiers_conflict_with_those_in_another_file_Colon_0, list),
28933
+ createDiagnosticForNode(firstFile, Diagnostics.Conflicts_are_in_this_file)
28934
+ ));
28911
28935
}
28912
- // Otheriwse issue top-level error since the files appear very identical in terms of what they appear
28913
- const list = conflictingKeys.join(", ");
28914
- diagnostics.add(addRelatedInfo(
28915
- createDiagnosticForNode(firstFile, Diagnostics.Definitions_of_the_following_identifiers_conflict_with_those_in_another_file_Colon_0, list),
28916
- createDiagnosticForNode(secondFile, Diagnostics.Conflicts_are_in_this_file)
28917
- ));
28918
- diagnostics.add(addRelatedInfo(
28919
- createDiagnosticForNode(secondFile, Diagnostics.Definitions_of_the_following_identifiers_conflict_with_those_in_another_file_Colon_0, list),
28920
- createDiagnosticForNode(firstFile, Diagnostics.Conflicts_are_in_this_file)
28921
- ));
28922
28936
});
28923
28937
amalgamatedDuplicates = undefined;
28924
-
28925
- function addErrorsForDuplicates(secondFileInstances: Map<{ instances: Node[]; blockScoped: boolean; }>, firstFileInstances: Map<{ instances: Node[]; blockScoped: boolean; }>) {
28926
- secondFileInstances.forEach((locations, symbolName) => {
28927
- const firstFileEquivalent = firstFileInstances.get(symbolName)!;
28928
- const message = locations.blockScoped
28929
- ? Diagnostics.Cannot_redeclare_block_scoped_variable_0
28930
- : Diagnostics.Duplicate_identifier_0;
28931
- locations.instances.forEach(node => {
28932
- addDuplicateDeclarationError(node, message, symbolName, firstFileEquivalent.instances[0]);
28933
- });
28934
- });
28935
- }
28936
28938
}
28937
28939
28938
28940
function checkExternalEmitHelpers(location: Node, helpers: ExternalEmitHelpers) {
0 commit comments