mirror of https://github.com/sveltejs/svelte
Refactored preprocess for readability (#5763)
parent
a49375696f
commit
abf11bb02b
@ -0,0 +1,88 @@
|
||||
import { decode as decode_mappings } from 'sourcemap-codec';
|
||||
import { Processed } from './types';
|
||||
|
||||
/**
|
||||
* Import decoded sourcemap from mozilla/source-map/SourceMapGenerator
|
||||
* Forked from source-map/lib/source-map-generator.js
|
||||
* from methods _serializeMappings and toJSON.
|
||||
* We cannot use source-map.d.ts types, because we access hidden properties.
|
||||
*/
|
||||
function decoded_sourcemap_from_generator(generator: any) {
|
||||
let previous_generated_line = 1;
|
||||
const converted_mappings = [[]];
|
||||
let result_line;
|
||||
let result_segment;
|
||||
let mapping;
|
||||
|
||||
const source_idx = generator._sources.toArray()
|
||||
.reduce((acc, val, idx) => (acc[val] = idx, acc), {});
|
||||
|
||||
const name_idx = generator._names.toArray()
|
||||
.reduce((acc, val, idx) => (acc[val] = idx, acc), {});
|
||||
|
||||
const mappings = generator._mappings.toArray();
|
||||
result_line = converted_mappings[0];
|
||||
|
||||
for (let i = 0, len = mappings.length; i < len; i++) {
|
||||
mapping = mappings[i];
|
||||
|
||||
if (mapping.generatedLine > previous_generated_line) {
|
||||
while (mapping.generatedLine > previous_generated_line) {
|
||||
converted_mappings.push([]);
|
||||
previous_generated_line++;
|
||||
}
|
||||
result_line = converted_mappings[mapping.generatedLine - 1]; // line is one-based
|
||||
} else if (i > 0) {
|
||||
const previous_mapping = mappings[i - 1];
|
||||
if (
|
||||
// sorted by selectivity
|
||||
mapping.generatedColumn === previous_mapping.generatedColumn &&
|
||||
mapping.originalColumn === previous_mapping.originalColumn &&
|
||||
mapping.name === previous_mapping.name &&
|
||||
mapping.generatedLine === previous_mapping.generatedLine &&
|
||||
mapping.originalLine === previous_mapping.originalLine &&
|
||||
mapping.source === previous_mapping.source
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
result_line.push([mapping.generatedColumn]);
|
||||
result_segment = result_line[result_line.length - 1];
|
||||
|
||||
if (mapping.source != null) {
|
||||
result_segment.push(...[
|
||||
source_idx[mapping.source],
|
||||
mapping.originalLine - 1, // line is one-based
|
||||
mapping.originalColumn
|
||||
]);
|
||||
if (mapping.name != null) {
|
||||
result_segment.push(name_idx[mapping.name]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const map = {
|
||||
version: generator._version,
|
||||
sources: generator._sources.toArray(),
|
||||
names: generator._names.toArray(),
|
||||
mappings: converted_mappings
|
||||
};
|
||||
if (generator._file != null) {
|
||||
(map as any).file = generator._file;
|
||||
}
|
||||
// not needed: map.sourcesContent and map.sourceRoot
|
||||
return map;
|
||||
}
|
||||
|
||||
export function decode_map(processed: Processed) {
|
||||
let decoded_map = typeof processed.map === 'string' ? JSON.parse(processed.map) : processed.map;
|
||||
if (typeof(decoded_map.mappings) === 'string') {
|
||||
decoded_map.mappings = decode_mappings(decoded_map.mappings);
|
||||
}
|
||||
if ((decoded_map as any)._mappings && decoded_map.constructor.name === 'SourceMapGenerator') {
|
||||
// import decoded sourcemap from mozilla/source-map/SourceMapGenerator
|
||||
decoded_map = decoded_sourcemap_from_generator(decoded_map);
|
||||
}
|
||||
|
||||
return decoded_map;
|
||||
}
|
@ -1,327 +1,230 @@
|
||||
import { RawSourceMap, DecodedSourceMap } from '@ampproject/remapping/dist/types/types';
|
||||
import { decode as decode_mappings } from 'sourcemap-codec';
|
||||
import { getLocator } from 'locate-character';
|
||||
import {
|
||||
StringWithSourcemap,
|
||||
sourcemap_add_offset,
|
||||
combine_sourcemaps,
|
||||
parse_attached_sourcemap
|
||||
} from '../utils/string_with_sourcemap';
|
||||
|
||||
export interface Processed {
|
||||
code: string;
|
||||
map?: string | object; // we are opaque with the type here to avoid dependency on the remapping module for our public types.
|
||||
import { MappedCode, SourceLocation, parse_attached_sourcemap, sourcemap_add_offset, combine_sourcemaps } from '../utils/mapped_code';
|
||||
import { decode_map } from './decode_sourcemap';
|
||||
import { replace_in_code, slice_source } from './replace_in_code';
|
||||
import { MarkupPreprocessor, Source, Preprocessor, PreprocessorGroup, Processed } from './types';
|
||||
|
||||
interface SourceUpdate {
|
||||
string?: string;
|
||||
map?: DecodedSourceMap;
|
||||
dependencies?: string[];
|
||||
}
|
||||
|
||||
export interface PreprocessorGroup {
|
||||
markup?: (options: {
|
||||
content: string;
|
||||
filename: string;
|
||||
}) => Processed | Promise<Processed>;
|
||||
style?: Preprocessor;
|
||||
script?: Preprocessor;
|
||||
}
|
||||
|
||||
export type Preprocessor = (options: {
|
||||
content: string;
|
||||
attributes: Record<string, string | boolean>;
|
||||
filename?: string;
|
||||
}) => Processed | Promise<Processed>;
|
||||
|
||||
function parse_attributes(str: string) {
|
||||
const attrs = {};
|
||||
str.split(/\s+/).filter(Boolean).forEach(attr => {
|
||||
const p = attr.indexOf('=');
|
||||
if (p === -1) {
|
||||
attrs[attr] = true;
|
||||
} else {
|
||||
attrs[attr.slice(0, p)] = '\'"'.includes(attr[p + 1]) ?
|
||||
attr.slice(p + 2, -1) :
|
||||
attr.slice(p + 1);
|
||||
}
|
||||
});
|
||||
return attrs;
|
||||
}
|
||||
|
||||
function get_file_basename(filename: string) {
|
||||
return filename.split(/[/\\]/).pop();
|
||||
}
|
||||
|
||||
interface Replacement {
|
||||
offset: number;
|
||||
length: number;
|
||||
replacement: StringWithSourcemap;
|
||||
}
|
||||
|
||||
async function replace_async(
|
||||
file_basename: string,
|
||||
source: string,
|
||||
get_location: ReturnType<typeof getLocator>,
|
||||
re: RegExp,
|
||||
func: (...any) => Promise<StringWithSourcemap>
|
||||
): Promise<StringWithSourcemap> {
|
||||
const replacements: Array<Promise<Replacement>> = [];
|
||||
source.replace(re, (...args) => {
|
||||
replacements.push(
|
||||
func(...args).then(
|
||||
res =>
|
||||
({
|
||||
offset: args[args.length - 2],
|
||||
length: args[0].length,
|
||||
replacement: res
|
||||
}) as Replacement
|
||||
)
|
||||
);
|
||||
return '';
|
||||
});
|
||||
const out = new StringWithSourcemap();
|
||||
let last_end = 0;
|
||||
for (const { offset, length, replacement } of await Promise.all(
|
||||
replacements
|
||||
)) {
|
||||
// content = unchanged source characters before the replaced segment
|
||||
const content = StringWithSourcemap.from_source(
|
||||
file_basename, source.slice(last_end, offset), get_location(last_end));
|
||||
out.concat(content).concat(replacement);
|
||||
last_end = offset + length;
|
||||
}
|
||||
// final_content = unchanged source characters after last replaced segment
|
||||
const final_content = StringWithSourcemap.from_source(
|
||||
file_basename, source.slice(last_end), get_location(last_end));
|
||||
return out.concat(final_content);
|
||||
}
|
||||
|
||||
/**
|
||||
* Import decoded sourcemap from mozilla/source-map/SourceMapGenerator
|
||||
* Forked from source-map/lib/source-map-generator.js
|
||||
* from methods _serializeMappings and toJSON.
|
||||
* We cannot use source-map.d.ts types, because we access hidden properties.
|
||||
* Represents intermediate states of the preprocessing.
|
||||
*/
|
||||
function decoded_sourcemap_from_generator(generator: any) {
|
||||
let previous_generated_line = 1;
|
||||
const converted_mappings = [[]];
|
||||
let result_line;
|
||||
let result_segment;
|
||||
let mapping;
|
||||
|
||||
const source_idx = generator._sources.toArray()
|
||||
.reduce((acc, val, idx) => (acc[val] = idx, acc), {});
|
||||
|
||||
const name_idx = generator._names.toArray()
|
||||
.reduce((acc, val, idx) => (acc[val] = idx, acc), {});
|
||||
class PreprocessResult implements Source {
|
||||
// sourcemap_list is sorted in reverse order from last map (index 0) to first map (index -1)
|
||||
// so we use sourcemap_list.unshift() to add new maps
|
||||
// https://github.com/ampproject/remapping#multiple-transformations-of-a-file
|
||||
sourcemap_list: Array<DecodedSourceMap | RawSourceMap> = [];
|
||||
dependencies: string[] = [];
|
||||
file_basename: string;
|
||||
|
||||
const mappings = generator._mappings.toArray();
|
||||
result_line = converted_mappings[0];
|
||||
get_location: ReturnType<typeof getLocator>;
|
||||
|
||||
for (let i = 0, len = mappings.length; i < len; i++) {
|
||||
mapping = mappings[i];
|
||||
constructor(public source: string, public filename: string) {
|
||||
this.update_source({ string: source });
|
||||
|
||||
if (mapping.generatedLine > previous_generated_line) {
|
||||
while (mapping.generatedLine > previous_generated_line) {
|
||||
converted_mappings.push([]);
|
||||
previous_generated_line++;
|
||||
}
|
||||
result_line = converted_mappings[mapping.generatedLine - 1]; // line is one-based
|
||||
} else if (i > 0) {
|
||||
const previous_mapping = mappings[i - 1];
|
||||
if (
|
||||
// sorted by selectivity
|
||||
mapping.generatedColumn === previous_mapping.generatedColumn &&
|
||||
mapping.originalColumn === previous_mapping.originalColumn &&
|
||||
mapping.name === previous_mapping.name &&
|
||||
mapping.generatedLine === previous_mapping.generatedLine &&
|
||||
mapping.originalLine === previous_mapping.originalLine &&
|
||||
mapping.source === previous_mapping.source
|
||||
) {
|
||||
continue;
|
||||
// preprocess source must be relative to itself or equal null
|
||||
this.file_basename = filename == null ? null : get_file_basename(filename);
|
||||
}
|
||||
|
||||
update_source({ string: source, map, dependencies }: SourceUpdate) {
|
||||
if (source != null) {
|
||||
this.source = source;
|
||||
this.get_location = getLocator(source);
|
||||
}
|
||||
result_line.push([mapping.generatedColumn]);
|
||||
result_segment = result_line[result_line.length - 1];
|
||||
|
||||
if (mapping.source != null) {
|
||||
result_segment.push(...[
|
||||
source_idx[mapping.source],
|
||||
mapping.originalLine - 1, // line is one-based
|
||||
mapping.originalColumn
|
||||
]);
|
||||
if (mapping.name != null) {
|
||||
result_segment.push(name_idx[mapping.name]);
|
||||
|
||||
if (map) {
|
||||
this.sourcemap_list.unshift(map);
|
||||
}
|
||||
|
||||
if (dependencies) {
|
||||
this.dependencies.push(...dependencies);
|
||||
}
|
||||
}
|
||||
|
||||
const map = {
|
||||
version: generator._version,
|
||||
sources: generator._sources.toArray(),
|
||||
names: generator._names.toArray(),
|
||||
mappings: converted_mappings
|
||||
to_processed(): Processed {
|
||||
// Combine all the source maps for each preprocessor function into one
|
||||
const map: RawSourceMap = combine_sourcemaps(this.file_basename, this.sourcemap_list);
|
||||
|
||||
return {
|
||||
// TODO return separated output, in future version where svelte.compile supports it:
|
||||
// style: { code: styleCode, map: styleMap },
|
||||
// script { code: scriptCode, map: scriptMap },
|
||||
// markup { code: markupCode, map: markupMap },
|
||||
|
||||
code: this.source,
|
||||
dependencies: [...new Set(this.dependencies)],
|
||||
map: map as object,
|
||||
toString: () => this.source
|
||||
};
|
||||
if (generator._file != null) {
|
||||
(map as any).file = generator._file;
|
||||
}
|
||||
// not needed: map.sourcesContent and map.sourceRoot
|
||||
return map;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a preprocessor output and its leading prefix and trailing suffix into StringWithSourceMap
|
||||
* Convert preprocessor output for the tag content into MappedCode
|
||||
*/
|
||||
function get_replacement(
|
||||
file_basename: string,
|
||||
offset: number,
|
||||
get_location: ReturnType<typeof getLocator>,
|
||||
original: string,
|
||||
processed: Processed,
|
||||
prefix: string,
|
||||
suffix: string,
|
||||
tag_name: 'script' | 'style'
|
||||
): StringWithSourcemap {
|
||||
|
||||
// Convert the unchanged prefix and suffix to StringWithSourcemap
|
||||
const prefix_with_map = StringWithSourcemap.from_source(
|
||||
file_basename, prefix, get_location(offset));
|
||||
const suffix_with_map = StringWithSourcemap.from_source(
|
||||
file_basename, suffix, get_location(offset + prefix.length + original.length));
|
||||
|
||||
parse_attached_sourcemap(processed, tag_name);
|
||||
|
||||
// Convert the preprocessed code and its sourcemap to a StringWithSourcemap
|
||||
function processed_content_to_code(processed: Processed, location: SourceLocation, file_basename: string): MappedCode {
|
||||
// Convert the preprocessed code and its sourcemap to a MappedCode
|
||||
let decoded_map: DecodedSourceMap;
|
||||
if (processed.map) {
|
||||
decoded_map = typeof processed.map === 'string' ? JSON.parse(processed.map) : processed.map;
|
||||
if (typeof(decoded_map.mappings) === 'string') {
|
||||
decoded_map.mappings = decode_mappings(decoded_map.mappings);
|
||||
}
|
||||
if ((decoded_map as any)._mappings && decoded_map.constructor.name === 'SourceMapGenerator') {
|
||||
// import decoded sourcemap from mozilla/source-map/SourceMapGenerator
|
||||
decoded_map = decoded_sourcemap_from_generator(decoded_map);
|
||||
}
|
||||
decoded_map = decode_map(processed);
|
||||
|
||||
// offset only segments pointing at original component source
|
||||
const source_index = decoded_map.sources.indexOf(file_basename);
|
||||
if (source_index !== -1) {
|
||||
sourcemap_add_offset(decoded_map, get_location(offset + prefix.length), source_index);
|
||||
sourcemap_add_offset(decoded_map, location, source_index);
|
||||
}
|
||||
}
|
||||
const processed_with_map = StringWithSourcemap.from_processed(processed.code, decoded_map);
|
||||
|
||||
// Surround the processed code with the prefix and suffix, retaining valid sourcemappings
|
||||
return prefix_with_map.concat(processed_with_map).concat(suffix_with_map);
|
||||
return MappedCode.from_processed(processed.code, decoded_map);
|
||||
}
|
||||
|
||||
export default async function preprocess(
|
||||
source: string,
|
||||
preprocessor: PreprocessorGroup | PreprocessorGroup[],
|
||||
options?: { filename?: string }
|
||||
) {
|
||||
// @ts-ignore todo: doublecheck
|
||||
const filename = (options && options.filename) || preprocessor.filename; // legacy
|
||||
const dependencies = [];
|
||||
/**
|
||||
* Given the whole tag including content, return a `MappedCode`
|
||||
* representing the tag content replaced with `processed`.
|
||||
*/
|
||||
function processed_tag_to_code(
|
||||
processed: Processed,
|
||||
tag_name: 'style' | 'script',
|
||||
attributes: string,
|
||||
source: Source
|
||||
): MappedCode {
|
||||
const { file_basename, get_location } = source;
|
||||
|
||||
// preprocess source must be relative to itself or equal null
|
||||
const file_basename = filename == null ? null : get_file_basename(filename);
|
||||
const build_mapped_code = (code: string, offset: number) =>
|
||||
MappedCode.from_source(slice_source(code, offset, source));
|
||||
|
||||
const preprocessors = preprocessor
|
||||
? Array.isArray(preprocessor) ? preprocessor : [preprocessor]
|
||||
: [];
|
||||
const tag_open = `<${tag_name}${attributes || ''}>`;
|
||||
const tag_close = `</${tag_name}>`;
|
||||
|
||||
const markup = preprocessors.map(p => p.markup).filter(Boolean);
|
||||
const script = preprocessors.map(p => p.script).filter(Boolean);
|
||||
const style = preprocessors.map(p => p.style).filter(Boolean);
|
||||
const tag_open_code = build_mapped_code(tag_open, 0);
|
||||
const tag_close_code = build_mapped_code(tag_close, tag_open.length + source.source.length);
|
||||
|
||||
// sourcemap_list is sorted in reverse order from last map (index 0) to first map (index -1)
|
||||
// so we use sourcemap_list.unshift() to add new maps
|
||||
// https://github.com/ampproject/remapping#multiple-transformations-of-a-file
|
||||
const sourcemap_list: Array<DecodedSourceMap | RawSourceMap> = [];
|
||||
parse_attached_sourcemap(processed, tag_name);
|
||||
|
||||
// TODO keep track: what preprocessor generated what sourcemap? to make debugging easier = detect low-resolution sourcemaps in fn combine_mappings
|
||||
const content_code = processed_content_to_code(processed, get_location(tag_open.length), file_basename);
|
||||
|
||||
for (const fn of markup) {
|
||||
return tag_open_code.concat(content_code).concat(tag_close_code);
|
||||
}
|
||||
|
||||
function parse_tag_attributes(str: string) {
|
||||
// note: won't work with attribute values containing spaces.
|
||||
return str
|
||||
.split(/\s+/)
|
||||
.filter(Boolean)
|
||||
.reduce((attrs, attr) => {
|
||||
const i = attr.indexOf('=');
|
||||
const [key, value] = i > 0 ? [attr.slice(0, i), attr.slice(i+1)] : [attr];
|
||||
const [, unquoted] = (value && value.match(/^['"](.*)['"]$/)) || [];
|
||||
|
||||
return { ...attrs, [key]: unquoted ?? value ?? true };
|
||||
}, {});
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the updates required to process all instances of the specified tag.
|
||||
*/
|
||||
async function process_tag(
|
||||
tag_name: 'style' | 'script',
|
||||
preprocessor: Preprocessor,
|
||||
source: Source
|
||||
): Promise<SourceUpdate> {
|
||||
const { filename } = source;
|
||||
const tag_regex =
|
||||
tag_name === 'style'
|
||||
? /<!--[^]*?-->|<style(\s[^]*?)?(?:>([^]*?)<\/style>|\/>)/gi
|
||||
: /<!--[^]*?-->|<script(\s[^]*?)?(?:>([^]*?)<\/script>|\/>)/gi;
|
||||
|
||||
// run markup preprocessor
|
||||
const processed = await fn({
|
||||
content: source,
|
||||
const dependencies: string[] = [];
|
||||
|
||||
async function process_single_tag(
|
||||
tag_with_content: string,
|
||||
attributes = '',
|
||||
content = '',
|
||||
tag_offset: number
|
||||
): Promise<MappedCode> {
|
||||
const no_change = () => MappedCode.from_source(slice_source(tag_with_content, tag_offset, source));
|
||||
|
||||
if (!attributes && !content) return no_change();
|
||||
|
||||
const processed = await preprocessor({
|
||||
content: content || '',
|
||||
attributes: parse_tag_attributes(attributes || ''),
|
||||
filename
|
||||
});
|
||||
|
||||
if (!processed) continue;
|
||||
|
||||
if (!processed) return no_change();
|
||||
if (processed.dependencies) dependencies.push(...processed.dependencies);
|
||||
source = processed.code;
|
||||
if (processed.map) {
|
||||
sourcemap_list.unshift(
|
||||
typeof(processed.map) === 'string'
|
||||
? JSON.parse(processed.map)
|
||||
: processed.map
|
||||
);
|
||||
}
|
||||
if (!processed.map && processed.code === content) return no_change();
|
||||
|
||||
return processed_tag_to_code(processed, tag_name, attributes, slice_source(content, tag_offset, source));
|
||||
}
|
||||
|
||||
async function preprocess_tag_content(tag_name: 'style' | 'script', preprocessor: Preprocessor) {
|
||||
const get_location = getLocator(source);
|
||||
const tag_regex = tag_name === 'style'
|
||||
? /<!--[^]*?-->|<style(\s[^]*?)?(?:>([^]*?)<\/style>|\/>)/gi
|
||||
: /<!--[^]*?-->|<script(\s[^]*?)?(?:>([^]*?)<\/script>|\/>)/gi;
|
||||
const { string, map } = await replace_in_code(tag_regex, process_single_tag, source);
|
||||
|
||||
const res = await replace_async(
|
||||
file_basename,
|
||||
source,
|
||||
get_location,
|
||||
tag_regex,
|
||||
async (match, attributes = '', content = '', offset) => {
|
||||
const no_change = () => StringWithSourcemap.from_source(
|
||||
file_basename, match, get_location(offset));
|
||||
if (!attributes && !content) {
|
||||
return no_change();
|
||||
}
|
||||
attributes = attributes || '';
|
||||
content = content || '';
|
||||
return { string, map, dependencies };
|
||||
}
|
||||
|
||||
// run script preprocessor
|
||||
const processed = await preprocessor({
|
||||
content,
|
||||
attributes: parse_attributes(attributes),
|
||||
async function process_markup(filename: string, process: MarkupPreprocessor, source: Source) {
|
||||
const processed = await process({
|
||||
content: source.source,
|
||||
filename
|
||||
});
|
||||
if (processed && processed.dependencies) {
|
||||
dependencies.push(...processed.dependencies);
|
||||
}
|
||||
if (!processed || !processed.map && processed.code === content) {
|
||||
return no_change();
|
||||
}
|
||||
return get_replacement(file_basename, offset, get_location, content, processed, `<${tag_name}${attributes}>`, `</${tag_name}>`, tag_name);
|
||||
}
|
||||
);
|
||||
source = res.string;
|
||||
sourcemap_list.unshift(res.map);
|
||||
}
|
||||
|
||||
for (const fn of script) {
|
||||
await preprocess_tag_content('script', fn);
|
||||
if (processed) {
|
||||
return {
|
||||
string: processed.code,
|
||||
map: processed.map
|
||||
? // TODO: can we use decode_sourcemap?
|
||||
typeof processed.map === 'string'
|
||||
? JSON.parse(processed.map)
|
||||
: processed.map
|
||||
: undefined,
|
||||
dependencies: processed.dependencies
|
||||
};
|
||||
} else {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
for (const fn of style) {
|
||||
await preprocess_tag_content('style', fn);
|
||||
}
|
||||
export default async function preprocess(
|
||||
source: string,
|
||||
preprocessor: PreprocessorGroup | PreprocessorGroup[],
|
||||
options?: { filename?: string }
|
||||
): Promise<Processed> {
|
||||
// @ts-ignore todo: doublecheck
|
||||
const filename = (options && options.filename) || preprocessor.filename; // legacy
|
||||
|
||||
// Combine all the source maps for each preprocessor function into one
|
||||
const map: RawSourceMap = combine_sourcemaps(
|
||||
file_basename,
|
||||
sourcemap_list
|
||||
);
|
||||
const preprocessors = preprocessor ? (Array.isArray(preprocessor) ? preprocessor : [preprocessor]) : [];
|
||||
|
||||
return {
|
||||
// TODO return separated output, in future version where svelte.compile supports it:
|
||||
// style: { code: styleCode, map: styleMap },
|
||||
// script { code: scriptCode, map: scriptMap },
|
||||
// markup { code: markupCode, map: markupMap },
|
||||
const markup = preprocessors.map(p => p.markup).filter(Boolean);
|
||||
const script = preprocessors.map(p => p.script).filter(Boolean);
|
||||
const style = preprocessors.map(p => p.style).filter(Boolean);
|
||||
|
||||
const result = new PreprocessResult(source, filename);
|
||||
|
||||
code: source,
|
||||
dependencies: [...new Set(dependencies)],
|
||||
map: (map as object),
|
||||
toString() {
|
||||
return source;
|
||||
// TODO keep track: what preprocessor generated what sourcemap?
|
||||
// to make debugging easier = detect low-resolution sourcemaps in fn combine_mappings
|
||||
|
||||
for (const process of markup) {
|
||||
result.update_source(await process_markup(filename, process, result));
|
||||
}
|
||||
};
|
||||
|
||||
for (const process of script) {
|
||||
result.update_source(await process_tag('script', process, result));
|
||||
}
|
||||
|
||||
for (const preprocess of style) {
|
||||
result.update_source(await process_tag('style', preprocess, result));
|
||||
}
|
||||
|
||||
return result.to_processed();
|
||||
}
|
||||
|
@ -0,0 +1,75 @@
|
||||
import { MappedCode } from '../utils/mapped_code';
|
||||
import { Source } from './types';
|
||||
|
||||
interface Replacement {
|
||||
offset: number;
|
||||
length: number;
|
||||
replacement: MappedCode;
|
||||
}
|
||||
|
||||
export function slice_source(
|
||||
code_slice: string,
|
||||
offset: number,
|
||||
{ file_basename, filename, get_location }: Source
|
||||
): Source {
|
||||
return {
|
||||
source: code_slice,
|
||||
get_location: (index: number) => get_location(index + offset),
|
||||
file_basename,
|
||||
filename
|
||||
};
|
||||
}
|
||||
|
||||
function calculate_replacements(
|
||||
re: RegExp,
|
||||
get_replacement: (...match: any[]) => Promise<MappedCode>,
|
||||
source: string
|
||||
) {
|
||||
const replacements: Array<Promise<Replacement>> = [];
|
||||
|
||||
source.replace(re, (...match) => {
|
||||
replacements.push(
|
||||
get_replacement(...match).then(
|
||||
replacement => {
|
||||
const matched_string = match[0];
|
||||
const offset = match[match.length-2];
|
||||
|
||||
return ({ offset, length: matched_string.length, replacement });
|
||||
}
|
||||
)
|
||||
);
|
||||
return '';
|
||||
});
|
||||
|
||||
return Promise.all(replacements);
|
||||
}
|
||||
|
||||
function perform_replacements(
|
||||
replacements: Replacement[],
|
||||
source: Source
|
||||
): MappedCode {
|
||||
const out = new MappedCode();
|
||||
let last_end = 0;
|
||||
|
||||
for (const { offset, length, replacement } of replacements) {
|
||||
const unchanged_prefix = MappedCode.from_source(
|
||||
slice_source(source.source.slice(last_end, offset), last_end, source)
|
||||
);
|
||||
out.concat(unchanged_prefix).concat(replacement);
|
||||
last_end = offset + length;
|
||||
}
|
||||
|
||||
const unchanged_suffix = MappedCode.from_source(slice_source(source.source.slice(last_end), last_end, source));
|
||||
|
||||
return out.concat(unchanged_suffix);
|
||||
}
|
||||
|
||||
export async function replace_in_code(
|
||||
regex: RegExp,
|
||||
get_replacement: (...match: any[]) => Promise<MappedCode>,
|
||||
location: Source
|
||||
): Promise<MappedCode> {
|
||||
const replacements = await calculate_replacements(regex, get_replacement, location.source);
|
||||
|
||||
return perform_replacements(replacements, location);
|
||||
}
|
@ -0,0 +1,32 @@
|
||||
import { Location } from 'locate-character';
|
||||
|
||||
export interface Source {
|
||||
source: string;
|
||||
get_location: (search: number) => Location;
|
||||
file_basename: string;
|
||||
filename: string;
|
||||
}
|
||||
|
||||
export interface Processed {
|
||||
code: string;
|
||||
map?: string | object; // we are opaque with the type here to avoid dependency on the remapping module for our public types.
|
||||
dependencies?: string[];
|
||||
toString?: () => string;
|
||||
}
|
||||
|
||||
export type MarkupPreprocessor = (options: {
|
||||
content: string;
|
||||
filename: string;
|
||||
}) => Processed | Promise<Processed>;
|
||||
|
||||
export type Preprocessor = (options: {
|
||||
content: string;
|
||||
attributes: Record<string, string | boolean>;
|
||||
filename?: string;
|
||||
}) => Processed | Promise<Processed>;
|
||||
|
||||
export interface PreprocessorGroup {
|
||||
markup?: MarkupPreprocessor;
|
||||
style?: Preprocessor;
|
||||
script?: Preprocessor;
|
||||
}
|
Loading…
Reference in new issue