Removing unused imports from generate module declarations
parent
51d135aab2
commit
3044e035bc
|
@ -7,8 +7,8 @@ source ../scripts/resolve_commands.sh
|
|||
function generate_declaration() {
|
||||
echo "Generating declarations for project $1 ($2)"
|
||||
|
||||
if [[ -e "${2}" ]]; then
|
||||
rm "${2}"; _exit_code=$?
|
||||
if [[ -d "${2}" ]]; then
|
||||
rm -r "${2}"; _exit_code=$?
|
||||
if [[ $_exit_code -ne 0 ]]; then
|
||||
echo "Failed to remove old declaration file ($2): $_exit_code!"
|
||||
echo "This could be critical later!"
|
||||
|
@ -23,12 +23,12 @@ function generate_declaration() {
|
|||
}
|
||||
|
||||
#Generate the loader definitions first
|
||||
app_declaration="declarations/exports_app.d.ts"
|
||||
loader_declaration_app="declarations/exports_loader_app.d.ts"
|
||||
loader_declaration_certaccept="declarations/exports_loader_certaccept.d.ts"
|
||||
app_declaration="../declarations/shared-app/"
|
||||
loader_declaration_app="../declarations/loader/"
|
||||
# loader_declaration_certaccept="declarations/exports_loader_certaccept.d.ts"
|
||||
|
||||
generate_declaration dtsconfig_app.json ${app_declaration}
|
||||
generate_declaration dtsconfig_loader_app.json ${loader_declaration_app}
|
||||
generate_declaration dtsconfig_loader_certaccept.json ${loader_declaration_certaccept}
|
||||
# generate_declaration dtsconfig_loader_certaccept.json ${loader_declaration_certaccept}
|
||||
|
||||
exit 0
|
|
@ -6,7 +6,7 @@ import * as moment from "moment";
|
|||
|
||||
let global_modal: PokeModal;
|
||||
|
||||
interface ServerEntry {
|
||||
export interface ServerEntry {
|
||||
source: ConnectionHandler;
|
||||
add_message(invoker: PokeInvoker, message: string);
|
||||
}
|
||||
|
|
|
@ -6,6 +6,6 @@
|
|||
"workers/**/*.ts"
|
||||
],
|
||||
"base_directory": "shared/js/",
|
||||
"target_directory": "../declarations/shared-app",
|
||||
"target_directory": "../../declarations/shared-app",
|
||||
"modular": true
|
||||
}
|
|
@ -452,6 +452,6 @@ generators[SyntaxKind.ImportDeclaration] = (settings, stack, node: ts.ImportDecl
|
|||
node.decorators,
|
||||
node.modifiers,
|
||||
node.importClause,
|
||||
ts.createStringLiteral(specifier.text + ".d")
|
||||
ts.createStringLiteral(specifier.text)
|
||||
);
|
||||
};
|
|
@ -0,0 +1,15 @@
|
|||
import * as ts from "typescript";
|
||||
import {SyntaxKind} from "typescript";
|
||||
|
||||
let has_export;
|
||||
const visit = (node: ts.Node) => has_export = has_export || (node.modifiers || [] as any).filter(e => e.kind === SyntaxKind.ExportKeyword).length !== 0 || ts.forEachChild(node, visit);
|
||||
|
||||
export function fix_declare_global(nodes: ts.Node[]) : ts.Node[] {
|
||||
has_export = false;
|
||||
|
||||
// nodes.forEach(visit); /* for a "deep" check */
|
||||
nodes.forEach(e => has_export = has_export || (e.modifiers || [] as any).filter(e => e.kind === SyntaxKind.ExportKeyword).length !== 0);
|
||||
if(has_export) return nodes;
|
||||
|
||||
return [];
|
||||
}
|
|
@ -0,0 +1,248 @@
|
|||
import * as ts from "typescript";
|
||||
import {SyntaxKind} from "typescript";
|
||||
|
||||
interface RequiredType {
|
||||
identifier: string;
|
||||
}
|
||||
|
||||
class ImportsParserData {
|
||||
readonly source_file: ts.SourceFile;
|
||||
required_type: RequiredType[];
|
||||
depth: number;
|
||||
|
||||
constructor(sf: ts.SourceFile) {
|
||||
this.source_file = sf;
|
||||
this.required_type = [];
|
||||
this.depth = 0;
|
||||
}
|
||||
|
||||
has_type(name: string) {
|
||||
return this.required_type.findIndex(e => e.identifier === name) !== -1;
|
||||
}
|
||||
}
|
||||
|
||||
export function remove_unused(source_file: ts.SourceFile, nodes: ts.Node[]) : ts.Node[] {
|
||||
const data = new ImportsParserData(source_file);
|
||||
|
||||
for(const node of nodes)
|
||||
gather_required_types(node, data);
|
||||
|
||||
//console.log(data.required_type);
|
||||
const result2d = nodes.map(e => ts.transform(e, [ctx => node => eliminate_imports(node, ctx, data)])).map(e => e.transformed);
|
||||
const result = [];
|
||||
for(const entry of result2d)
|
||||
result.push(...entry);
|
||||
return result;
|
||||
}
|
||||
|
||||
function eliminate_imports(node: ts.Node, ctx: ts.TransformationContext, data: ImportsParserData) : ts.Node | undefined {
|
||||
switch (node.kind) {
|
||||
case SyntaxKind.ImportDeclaration:
|
||||
const import_decl = node as ts.ImportDeclaration;
|
||||
const clause = import_decl.importClause;
|
||||
if(!clause.namedBindings) return node;
|
||||
|
||||
let new_binding;
|
||||
if(clause.namedBindings.kind === SyntaxKind.NamedImports) {
|
||||
const bindings = clause.namedBindings as ts.NamedImports;
|
||||
const elements = bindings.elements.filter(e => data.has_type(e.name.text));
|
||||
if(!elements.length) return ts.createIdentifier("");
|
||||
|
||||
new_binding = ts.createNamedImports(elements);
|
||||
} else if(clause.namedBindings.kind === SyntaxKind.NamespaceImport) {
|
||||
const binding = clause.namedBindings as ts.NamespaceImport;
|
||||
if(!data.has_type(binding.name.text))
|
||||
return ts.createIdentifier("");
|
||||
new_binding = binding;
|
||||
} else
|
||||
throw "unknown named binding";
|
||||
|
||||
return ts.createImportDeclaration(import_decl.decorators, import_decl.modifiers, new_binding, import_decl.moduleSpecifier);
|
||||
default:
|
||||
return ts.visitEachChild(node, e => eliminate_imports(e, ctx, data), ctx);
|
||||
}
|
||||
}
|
||||
|
||||
const import_parsers: {[key: number]:(node: ts.Node, data: ImportsParserData) => void} = {};
|
||||
function gather_required_types(node: ts.Node, data: ImportsParserData) {
|
||||
if(!node) return;
|
||||
//console.log("%d %s", data.depth, SyntaxKind[node.kind]);
|
||||
|
||||
if(import_parsers[node.kind]) {
|
||||
import_parsers[node.kind](node, data);
|
||||
return;
|
||||
}
|
||||
|
||||
data.depth++;
|
||||
node.forEachChild(e => gather_required_types(e, data));
|
||||
data.depth--;
|
||||
}
|
||||
|
||||
|
||||
import_parsers[SyntaxKind.Parameter] = (node: ts.ParameterDeclaration, data) => {
|
||||
if(!node.type) return;
|
||||
|
||||
analyze_type_node(node.type, data);
|
||||
};
|
||||
|
||||
import_parsers[SyntaxKind.TypeAliasDeclaration] = (node: ts.TypeAliasDeclaration, data) => {
|
||||
(node.typeParameters || []).forEach(e => gather_required_types(e, data));
|
||||
if(node.type) analyze_type_node(node.type, data);
|
||||
if(node.decorators) node.decorators.forEach(e => analyze_type_node(e.expression, data));
|
||||
};
|
||||
|
||||
|
||||
import_parsers[SyntaxKind.HeritageClause] = (node: ts.HeritageClause, data) => {
|
||||
const heritage = node as ts.HeritageClause;
|
||||
for(const type of heritage.types)
|
||||
analyze_type_node(type, data);
|
||||
};
|
||||
|
||||
import_parsers[SyntaxKind.TypeParameter] = (node: ts.TypeParameterDeclaration, data) => {
|
||||
if(node.constraint) analyze_type_node(node.constraint, data);
|
||||
if(node.default) analyze_type_node(node.default, data);
|
||||
};
|
||||
|
||||
import_parsers[SyntaxKind.FunctionDeclaration] = (node: ts.FunctionDeclaration, data) => {
|
||||
if(node.type)
|
||||
analyze_type_node(node.type, data);
|
||||
(node.typeParameters || []).forEach(e => gather_required_types(e, data));
|
||||
for(const param of node.parameters)
|
||||
gather_required_types(param, data);
|
||||
};
|
||||
|
||||
import_parsers[SyntaxKind.MethodSignature] = (node: ts.MethodSignature, data) => {
|
||||
if(node.type)
|
||||
analyze_type_node(node.type, data);
|
||||
(node.typeParameters || []).forEach(e => gather_required_types(e, data));
|
||||
for(const param of node.parameters)
|
||||
gather_required_types(param, data);
|
||||
};
|
||||
|
||||
import_parsers[SyntaxKind.ClassDeclaration] = (node: ts.ClassDeclaration, data) => {
|
||||
for(const e of node.heritageClauses || [])
|
||||
gather_required_types(e, data);
|
||||
|
||||
for(const e of node.typeParameters || [])
|
||||
gather_required_types(e, data);
|
||||
|
||||
for(const e of node.members || [])
|
||||
gather_required_types(e, data);
|
||||
};
|
||||
|
||||
import_parsers[SyntaxKind.PropertySignature] = (node: ts.PropertySignature, data) => {
|
||||
analyze_type_node(node.type, data);
|
||||
};
|
||||
|
||||
import_parsers[SyntaxKind.PropertyDeclaration] = (node: ts.PropertyDeclaration, data) => {
|
||||
analyze_type_node(node.type, data);
|
||||
};
|
||||
|
||||
import_parsers[SyntaxKind.MethodDeclaration] = (node: ts.MethodDeclaration, data) => {
|
||||
for(const e of node.parameters || [])
|
||||
gather_required_types(e, data);
|
||||
for(const e of node.typeParameters || [])
|
||||
gather_required_types(e, data);
|
||||
analyze_type_node(node.type, data);
|
||||
};
|
||||
|
||||
function analyze_type_node(node: ts.TypeNode | ts.LeftHandSideExpression, data: ImportsParserData) {
|
||||
if(!node) return;
|
||||
|
||||
//console.log("T: %s", SyntaxKind[node.kind]);
|
||||
switch (node.kind) {
|
||||
case SyntaxKind.AnyKeyword:
|
||||
case SyntaxKind.VoidKeyword:
|
||||
case SyntaxKind.ThisType:
|
||||
case SyntaxKind.ThisKeyword:
|
||||
case SyntaxKind.BooleanKeyword:
|
||||
case SyntaxKind.StringKeyword:
|
||||
case SyntaxKind.StringLiteral:
|
||||
case SyntaxKind.LiteralType:
|
||||
case SyntaxKind.NumberKeyword:
|
||||
case SyntaxKind.ObjectKeyword:
|
||||
case SyntaxKind.NullKeyword:
|
||||
case SyntaxKind.NeverKeyword:
|
||||
case SyntaxKind.UndefinedKeyword:
|
||||
/* no special export type */
|
||||
break;
|
||||
|
||||
case SyntaxKind.UnionType:
|
||||
const union = node as ts.UnionTypeNode;
|
||||
union.types.forEach(e => analyze_type_node(e, data));
|
||||
break;
|
||||
|
||||
case SyntaxKind.IntersectionType:
|
||||
const intersection = node as ts.IntersectionTypeNode;
|
||||
intersection.types.forEach(e => analyze_type_node(e, data));
|
||||
break;
|
||||
|
||||
case SyntaxKind.TypeReference:
|
||||
const ref = node as ts.TypeReferenceNode;
|
||||
if(ref.typeName.kind === SyntaxKind.Identifier) {
|
||||
data.required_type.push({
|
||||
identifier: ref.typeName.text
|
||||
});
|
||||
} else if(ref.typeName.kind === SyntaxKind.QualifiedName) {
|
||||
let left: ts.Identifier | ts.QualifiedName = ref.typeName.left;
|
||||
while(left.kind !== SyntaxKind.Identifier)
|
||||
left = left.left;
|
||||
data.required_type.push({
|
||||
identifier: left.text
|
||||
});
|
||||
} else
|
||||
throw "invalid type name";
|
||||
for(const e of ref.typeArguments || [])
|
||||
analyze_type_node(e, data);
|
||||
break;
|
||||
|
||||
case SyntaxKind.Identifier:
|
||||
data.required_type.push({
|
||||
identifier: (node as ts.Identifier).text
|
||||
});
|
||||
break;
|
||||
|
||||
case SyntaxKind.TypeLiteral:
|
||||
const lit = node as ts.TypeLiteralNode;
|
||||
for(const member of lit.members)
|
||||
gather_required_types(member, data);
|
||||
break;
|
||||
|
||||
case SyntaxKind.ArrayType:
|
||||
const array = node as ts.ArrayTypeNode;
|
||||
analyze_type_node(array.elementType, data);
|
||||
break;
|
||||
|
||||
case SyntaxKind.FunctionType:
|
||||
const fn = node as ts.FunctionTypeNode;
|
||||
for(const param of fn.parameters || [])
|
||||
gather_required_types(param, data);
|
||||
|
||||
for(const type of fn.typeParameters || [])
|
||||
gather_required_types(type, data);
|
||||
break;
|
||||
|
||||
case SyntaxKind.TypeOperator:
|
||||
const to = node as ts.TypeOperatorNode;
|
||||
analyze_type_node(to.type, data);
|
||||
break;
|
||||
|
||||
case SyntaxKind.ExpressionWithTypeArguments:
|
||||
analyze_type_node((node as ts.ExpressionWithTypeArguments).expression, data);
|
||||
break;
|
||||
|
||||
case SyntaxKind.IndexedAccessType:
|
||||
const ia = node as ts.IndexedAccessTypeNode;
|
||||
analyze_type_node(ia.indexType, data);
|
||||
analyze_type_node(ia.objectType, data);
|
||||
break;
|
||||
|
||||
case SyntaxKind.ParenthesizedType:
|
||||
const parenthesized = node as ts.ParenthesizedTypeNode;
|
||||
analyze_type_node(parenthesized.type, data);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw "Unknown type " + SyntaxKind[node.kind] + ". Extend me :)";
|
||||
}
|
||||
}
|
|
@ -5,6 +5,8 @@ import * as glob from "glob";
|
|||
import * as path from "path";
|
||||
import * as mkdirp from "mkdirp";
|
||||
import {removeSync} from "fs-extra";
|
||||
import * as import_organizer from "./import_organizer";
|
||||
import * as declare_fixup from "./declare_fixup";
|
||||
|
||||
let source_files: string[] = [];
|
||||
let exclude_files: string[] = [];
|
||||
|
@ -110,10 +112,12 @@ source_files.forEach(file => {
|
|||
);
|
||||
|
||||
console.log("Compile %s (%s)", _file, relpath);
|
||||
const result = decl.print(source, decl.generate(source, {
|
||||
const decl_nodes = decl.generate(source, {
|
||||
remove_private: false,
|
||||
module_mode: module_mode
|
||||
}));
|
||||
});
|
||||
|
||||
const result = decl.print(source, declare_fixup.fix_declare_global(import_organizer.remove_unused(source, decl_nodes)));
|
||||
|
||||
let fpath = path.join(base_path, target_directory, relpath);
|
||||
fpath = fpath.substr(0, fpath.lastIndexOf(".")) + ".d.ts";
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
export declare class TestClass {
|
||||
public say_hi();
|
||||
}
|
||||
export declare function say_hello_a();
|
||||
export declare namespace X {
|
||||
export class Y {
|
||||
}
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
import * as module_a from "./module_a.d";
|
||||
export declare class C extends module_a.TestClass {
|
||||
}
|
||||
export declare const say_a;
|
||||
export declare function say_b();
|
|
@ -1,8 +0,0 @@
|
|||
import * as module_a from "./module_a.d";
|
||||
/* CLASS COMMENT!*/
|
||||
export declare class C extends module_a.TestClass {
|
||||
}
|
||||
/* Say a comment */
|
||||
export declare const say_a;
|
||||
/* Say b comment */
|
||||
export declare function say_b();
|
|
@ -0,0 +1,8 @@
|
|||
import * as a from "./module_a";
|
||||
|
||||
export declare class PokeModal {
|
||||
//private source_map: a.TestClass[];
|
||||
private _awaiters_unique_ids: {
|
||||
[unique_id: string]: ((resolved: a.TestClass) => any)[];
|
||||
};
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
declare global {
|
||||
interface X {
|
||||
|
||||
}
|
||||
}
|
||||
export = {};
|
|
@ -78,14 +78,6 @@ module.exports = {
|
|||
};
|
||||
}
|
||||
}
|
||||
/*
|
||||
{
|
||||
"transform": "../../tools/trgen/ttsc_transformer.js",
|
||||
"type": "program",
|
||||
"target_file": "../generated/messages_script.json",
|
||||
"verbose": true
|
||||
}
|
||||
*/
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
Loading…
Reference in New Issue