Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
66 changes: 39 additions & 27 deletions src/concat_sourcemap_builder.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,19 @@
use std::sync::Arc;

use crate::{SourceMap, Token, token::TokenChunk};
#[cfg(test)]
use crate::Token;
use crate::{
SourceMap,
token::{TokenChunk, Tokens},
};

/// The `ConcatSourceMapBuilder` is a helper to concat sourcemaps.
#[derive(Debug, Default)]
pub struct ConcatSourceMapBuilder {
pub(crate) names: Vec<Arc<str>>,
pub(crate) sources: Vec<Arc<str>>,
pub(crate) source_contents: Vec<Option<Arc<str>>>,
pub(crate) tokens: Vec<Token>,
pub(crate) tokens: Tokens,
/// The `token_chunks` is used for encode tokens to vlq mappings at parallel.
pub(crate) token_chunks: Vec<TokenChunk>,
pub(crate) token_chunk_prev_source_id: u32,
Expand All @@ -32,7 +37,7 @@ impl ConcatSourceMapBuilder {
names: Vec::with_capacity(names_len),
sources: Vec::with_capacity(sources_len),
source_contents: Vec::with_capacity(sources_len),
tokens: Vec::with_capacity(tokens_len),
tokens: Tokens::with_capacity(tokens_len),
token_chunks: Vec::with_capacity(token_chunks_len),
token_chunk_prev_source_id: 0,
token_chunk_prev_name_id: 0,
Expand Down Expand Up @@ -120,23 +125,24 @@ impl ConcatSourceMapBuilder {

// Extend `tokens`.
self.tokens.reserve(sourcemap.tokens.len());
let tokens = sourcemap.get_tokens().map(|token| {
Token::new(
for token in sourcemap.get_tokens() {
let source_id = token.get_source_id().map(|x| {
self.token_chunk_prev_source_id = x + source_offset;
self.token_chunk_prev_source_id
});
let name_id = token.get_name_id().map(|x| {
self.token_chunk_prev_name_id = x + name_offset;
self.token_chunk_prev_name_id
});
self.tokens.push_raw(
token.get_dst_line() + line_offset,
token.get_dst_col(),
token.get_src_line(),
token.get_src_col(),
token.get_source_id().map(|x| {
self.token_chunk_prev_source_id = x + source_offset;
self.token_chunk_prev_source_id
}),
token.get_name_id().map(|x| {
self.token_chunk_prev_name_id = x + name_offset;
self.token_chunk_prev_name_id
}),
)
});
self.tokens.extend(tokens);
source_id,
name_id,
);
}
}

pub fn into_sourcemap(self) -> SourceMap {
Expand All @@ -146,7 +152,7 @@ impl ConcatSourceMapBuilder {
None,
self.sources,
self.source_contents,
self.tokens.into_boxed_slice(),
self.tokens,
Some(self.token_chunks),
)
}
Expand All @@ -173,53 +179,59 @@ fn run_test<F>(create_builder: F)
where
F: Fn(&[(&SourceMap, u32)]) -> ConcatSourceMapBuilder,
{
let mut tokens1 = Tokens::new();
tokens1.push(Token::new(1, 1, 1, 1, Some(0), Some(0)));
let sm1 = SourceMap::new(
None,
vec!["foo".into(), "foo2".into()],
None,
vec!["foo.js".into()],
vec![],
vec![Token::new(1, 1, 1, 1, Some(0), Some(0))].into_boxed_slice(),
tokens1,
None,
);
let mut tokens2 = Tokens::new();
tokens2.push(Token::new(1, 1, 1, 1, Some(0), Some(0)));
let sm2 = SourceMap::new(
None,
vec!["bar".into()],
None,
vec!["bar.js".into()],
vec![],
vec![Token::new(1, 1, 1, 1, Some(0), Some(0))].into_boxed_slice(),
tokens2,
None,
);
let mut tokens3 = Tokens::new();
tokens3.push(Token::new(1, 2, 2, 2, Some(0), Some(0)));
let sm3 = SourceMap::new(
None,
vec!["abc".into()],
None,
vec!["abc.js".into()],
vec![],
vec![Token::new(1, 2, 2, 2, Some(0), Some(0))].into_boxed_slice(),
tokens3,
None,
);

let builder = create_builder(&[(&sm1, 0), (&sm2, 2), (&sm3, 2)]);

let mut expected_tokens = Tokens::new();
expected_tokens.push(Token::new(1, 1, 1, 1, Some(0), Some(0)));
expected_tokens.push(Token::new(3, 1, 1, 1, Some(1), Some(2)));
expected_tokens.push(Token::new(3, 2, 2, 2, Some(2), Some(3)));

let sm = SourceMap::new(
None,
vec!["foo".into(), "foo2".into(), "bar".into(), "abc".into()],
None,
vec!["foo.js".into(), "bar.js".into(), "abc.js".into()],
vec![],
vec![
Token::new(1, 1, 1, 1, Some(0), Some(0)),
Token::new(3, 1, 1, 1, Some(1), Some(2)),
Token::new(3, 2, 2, 2, Some(2), Some(3)),
]
.into_boxed_slice(),
expected_tokens.clone(),
None,
);
let concat_sm = builder.into_sourcemap();

assert_eq!(concat_sm.tokens, sm.tokens);
assert_eq!(concat_sm.tokens, expected_tokens);
assert_eq!(concat_sm.sources, sm.sources);
assert_eq!(concat_sm.names, sm.names);
assert_eq!(
Expand Down
8 changes: 4 additions & 4 deletions src/decode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
use std::sync::Arc;

use crate::error::{Error, Result};
use crate::token::INVALID_ID;
use crate::token::{INVALID_ID, Tokens};
use crate::{SourceMap, Token};

/// See <https://github.com/tc39/source-map/blob/1930e58ffabefe54038f7455759042c6e3dd590e/source-map-rev3.md>.
Expand Down Expand Up @@ -44,7 +44,7 @@ pub fn decode(json: JSONSourceMap) -> Result<SourceMap> {
.sources_content
.map(|content| content.into_iter().map(|c| c.map(Arc::from)).collect())
.unwrap_or_default(),
tokens: tokens.into_boxed_slice(),
tokens,
token_chunks: None,
x_google_ignore_list: json.x_google_ignore_list,
debug_id: json.debug_id,
Expand All @@ -55,8 +55,8 @@ pub fn decode_from_string(value: &str) -> Result<SourceMap> {
decode(serde_json::from_str(value)?)
}

fn decode_mapping(mapping: &str, names_len: usize, sources_len: usize) -> Result<Vec<Token>> {
let mut tokens = vec![];
fn decode_mapping(mapping: &str, names_len: usize, sources_len: usize) -> Result<Tokens> {
let mut tokens = Tokens::new();

let mut dst_col;
let mut src_id = 0;
Expand Down
18 changes: 11 additions & 7 deletions src/encode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@
use json_escape_simd::{escape_into, escape_into_generic};

use crate::JSONSourceMap;
use crate::{SourceMap, Token, token::TokenChunk};
use crate::{
SourceMap,
token::{TokenChunk, Tokens},
};

pub fn encode(sourcemap: &SourceMap) -> JSONSourceMap {
JSONSourceMap {
Expand Down Expand Up @@ -181,7 +184,7 @@ fn serialize_sourcemap_mappings(sm: &SourceMap, output: &mut String) {
// Max length of a single VLQ encoding
const MAX_VLQ_BYTES: usize = 7;

fn serialize_mappings(tokens: &[Token], token_chunk: &TokenChunk, output: &mut String) {
fn serialize_mappings(tokens: &Tokens, token_chunk: &TokenChunk, output: &mut String) {
let TokenChunk {
start,
end,
Expand All @@ -193,9 +196,10 @@ fn serialize_mappings(tokens: &[Token], token_chunk: &TokenChunk, output: &mut S
mut prev_source_id,
} = *token_chunk;

let mut prev_token = if start == 0 { None } else { Some(&tokens[start as usize - 1]) };
let mut prev_token = if start == 0 { None } else { tokens.get(start as usize - 1) };

for token in &tokens[start as usize..end as usize] {
for idx in start as usize..end as usize {
let token = tokens.get(idx).unwrap();
// Max length of a single VLQ encoding is 7 bytes. Max number of calls to `encode_vlq_diff` is 5.
// Also need 1 byte for each line number difference, or 1 byte if no line num difference.
// Reserve this amount of capacity in `rv` early, so can skip bounds checks in code below.
Expand All @@ -211,8 +215,8 @@ fn serialize_mappings(tokens: &[Token], token_chunk: &TokenChunk, output: &mut S
unsafe { push_bytes_unchecked(output, b';', num_line_breaks) };
prev_dst_col = 0;
prev_dst_line += num_line_breaks;
} else if let Some(prev_token) = prev_token {
if prev_token == token {
} else if let Some(prev_token) = prev_token.as_ref() {
if *prev_token == token {
continue;
}
output.reserve(MAX_TOTAL_VLQ_BYTES + 1);
Expand Down Expand Up @@ -458,7 +462,7 @@ fn test_encode_escape_string() {
None,
vec!["\0".into()],
vec![Some("emoji-👀-\0".into())],
vec![].into_boxed_slice(),
Tokens::new(),
None,
);
sm.set_x_google_ignore_list(vec![0]);
Expand Down
2 changes: 1 addition & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,4 +16,4 @@ pub use error::Error;
pub use sourcemap::SourceMap;
pub use sourcemap_builder::SourceMapBuilder;
pub use sourcemap_visualizer::SourcemapVisualizer;
pub use token::{SourceViewToken, Token, TokenChunk};
pub use token::{SourceViewToken, Token, TokenChunk, Tokens};
Loading
Loading