Reformat some long lines and macros.

This commit is contained in:
Dan Gohman
2019-11-08 15:44:07 -08:00
parent 9a4992f601
commit e8f08193fc
8 changed files with 84 additions and 49 deletions

View File

@@ -265,7 +265,11 @@ impl WorkerThread {
let errno_val = errno::errno().0;
if errno_val != 0 {
warn!("Failed to lower worker thread priority. It might affect application performance. errno: {}", errno_val);
warn!(
"Failed to lower worker thread priority. It might affect application performance. \
errno: {}",
errno_val
);
} else {
debug!("New nice value of worker thread: {}", current_nice);
}
@@ -333,12 +337,9 @@ impl WorkerThread {
.ok()
})
.and_then(|cache_bytes| {
zstd::encode_all(
&cache_bytes[..],
opt_compr_lvl,
)
.map_err(|err| warn!("Failed to compress cached code: {}", err))
.ok()
zstd::encode_all(&cache_bytes[..], opt_compr_lvl)
.map_err(|err| warn!("Failed to compress cached code: {}", err))
.ok()
})
.and_then(|recompressed_cache_bytes| {
fs::write(&lock_path, &recompressed_cache_bytes)
@@ -379,21 +380,31 @@ impl WorkerThread {
// the cache file and the stats file (they are not updated together atomically)
// Possible solution is to use directories per cache entry, but it complicates the system
// and is not worth it.
debug!("DETECTED task did more than once (or race with new file): recompression of {}. \
Note: if optimized compression level setting has changed in the meantine, \
the stats file might contain inconsistent compression level due to race.", path.display());
}
else {
debug!(
"DETECTED task did more than once (or race with new file): \
recompression of {}. Note: if optimized compression level setting \
has changed in the meantine, the stats file might contain \
inconsistent compression level due to race.",
path.display()
);
} else {
new_stats.compression_level = opt_compr_lvl;
let _ = write_stats_file(stats_path.as_ref(), &new_stats);
}
if new_stats.usages < stats.usages {
debug!("DETECTED lower usage count (new file or race with counter increasing): file {}", path.display());
debug!(
"DETECTED lower usage count (new file or race with counter \
increasing): file {}",
path.display()
);
}
}
else {
debug!("Can't read stats file again to update compression level (it might got cleaned up): file {}", stats_path.display());
} else {
debug!(
"Can't read stats file again to update compression level (it might got \
cleaned up): file {}",
stats_path.display()
);
}
});
@@ -690,11 +701,15 @@ impl WorkerThread {
add_unrecognized_and!(
[file: stats_path],
unwrap_or!(
mod_metadata.modified(),
add_unrecognized_and!([file: stats_path, file: mod_path], continue),
"Failed to get mtime, deleting BOTH module cache and stats files",
mod_path
)
mod_metadata.modified(),
add_unrecognized_and!(
[file: stats_path, file: mod_path],
continue
),
"Failed to get mtime, deleting BOTH module cache and stats \
files",
mod_path
)
),
"Failed to get metadata/mtime, deleting the file",
stats_path

View File

@@ -41,11 +41,11 @@ pub fn link_module(
if signature != *import_signature {
// TODO: If the difference is in the calling convention,
// we could emit a wrapper function to fix it up.
return Err(LinkError(
format!("{}/{}: incompatible import type: exported function with signature {} incompatible with function import with signature {}",
module_name, field,
signature, import_signature)
));
return Err(LinkError(format!(
"{}/{}: incompatible import type: exported function with signature {} \
incompatible with function import with signature {}",
module_name, field, signature, import_signature
)));
}
dependencies.insert(unsafe { InstanceHandle::from_vmctx(vmctx) });
function_imports.push(VMFunctionImport {
@@ -81,7 +81,8 @@ pub fn link_module(
let import_table = &module.table_plans[index];
if !is_table_compatible(&table, import_table) {
return Err(LinkError(format!(
"{}/{}: incompatible import type: exported table incompatible with table import",
"{}/{}: incompatible import type: exported table incompatible with \
table import",
module_name, field,
)));
}
@@ -119,7 +120,8 @@ pub fn link_module(
let import_memory = &module.memory_plans[index];
if !is_memory_compatible(&memory, import_memory) {
return Err(LinkError(format!(
"{}/{}: incompatible import type: exported memory incompatible with memory import",
"{}/{}: incompatible import type: exported memory incompatible with \
memory import",
module_name, field
)));
}
@@ -167,7 +169,8 @@ pub fn link_module(
Some(export_value) => match export_value {
Export::Table { .. } | Export::Memory { .. } | Export::Function { .. } => {
return Err(LinkError(format!(
"{}/{}: incompatible import type: exported global incompatible with global import",
"{}/{}: incompatible import type: exported global incompatible with \
global import",
module_name, field
)));
}
@@ -179,7 +182,8 @@ pub fn link_module(
let imported_global = module.globals[index];
if !is_global_compatible(&global, &imported_global) {
return Err(LinkError(format!(
"{}/{}: incompatible import type: exported global incompatible with global import",
"{}/{}: incompatible import type: exported global incompatible with \
global import",
module_name, field
)));
}

View File

@@ -486,7 +486,11 @@ where
if block.calling_convention.is_some() {
let new_cc = block.calling_convention.clone();
assert!(cc.is_none() || cc == new_cc, "Can't pass different params to different elements of `br_table` yet");
assert!(
cc.is_none() || cc == new_cc,
"Can't pass different params to different elements of `br_table` \
yet"
);
cc = new_cc;
}
@@ -500,22 +504,22 @@ where
.to_drop
.as_ref()
.map(|t| t.clone().count())
.unwrap_or_default() as u32
.unwrap_or_default() as u32,
);
}
let cc = cc.map(|cc| {
match cc {
let cc = cc
.map(|cc| match cc {
Left(cc) => Left(ctx.serialize_block_args(&cc, max_params)),
Right(cc) => Right(cc),
}
}).unwrap_or_else(||
if max_num_callers.map(|callers| callers <= 1).unwrap_or(false) {
Right(ctx.virtual_calling_convention())
} else {
Left(ctx.serialize_args(max_params))
}
);
})
.unwrap_or_else(|| {
if max_num_callers.map(|callers| callers <= 1).unwrap_or(false) {
Right(ctx.virtual_calling_convention())
} else {
Left(ctx.serialize_args(max_params))
}
});
for target in targets.iter().chain(std::iter::once(&default)).unique() {
let block = blocks.get_mut(&target.target).unwrap();

View File

@@ -181,7 +181,7 @@ pub(crate) fn enc_slice_of_wasi32_uintptr(
}
macro_rules! dec_enc_scalar {
( $ty:ident, $dec_byref:ident, $enc_byref:ident) => {
($ty:ident, $dec_byref:ident, $enc_byref:ident) => {
pub(crate) fn $dec_byref(memory: &mut [u8], ptr: wasi32::uintptr_t) -> Result<wasi::$ty> {
dec_int_byref::<wasi::$ty>(memory, ptr)
}