Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .ci/pytorch/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -216,11 +216,13 @@ fi
# if you're not careful. Check this if you made some changes and the
# ASAN test is not working
if [[ "$BUILD_ENVIRONMENT" == *asan* ]]; then
export ASAN_OPTIONS=detect_leaks=0:symbolize=1:detect_stack_use_after_return=true:strict_init_order=true:detect_odr_violation=1:detect_container_overflow=0:check_initialization_order=true:debug=true
export ASAN_OPTIONS=detect_leaks=1:symbolize=1:detect_stack_use_after_return=true:strict_init_order=true:detect_odr_violation=1:detect_container_overflow=0:check_initialization_order=true:debug=true:fast_unwind_on_malloc=1:print_suppressions=0
if [[ "$BUILD_ENVIRONMENT" == *cuda* ]]; then
export ASAN_OPTIONS="${ASAN_OPTIONS}:protect_shadow_gap=0"
fi
export UBSAN_OPTIONS=print_stacktrace=1:suppressions=$PWD/ubsan.supp
# Suppress some hard to solve indirect leaks
export LSAN_OPTIONS="suppressions=$PWD/lsan.supp"
export PYTORCH_TEST_WITH_ASAN=1
export PYTORCH_TEST_WITH_UBSAN=1
# TODO: Figure out how to avoid hard-coding these paths
Expand Down
31 changes: 31 additions & 0 deletions lsan.supp
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
leak:pybind11::cpp_function
leak:PyMem_RawMalloc
leak:unicode_resize
leak:PyObject_Malloc
leak:PyByteArray_Resize
leak:numpy
leak:list_append
leak:unicodeobject
leak:obmalloc
leak:gcmodule
leak:listobject
leak:bytesobject
leak:PyThread_allocate_lock
leak:sccache
leak:rustc-1.61.0
leak:gcc/x86_64-linux-gnu/11
leak:x86_64-linux-gnu-gcc-11
leak:libbfd
leak:x86_64-linux-gnu-ld.bfd
leak:git
leak:libio
leak:unknown module
leak:g++
leak:conda-linux-gnu-ld
leak:crypto
leak:torch::detail::(anonymous namespace)::get_set_cached_attr
leak:torch::jit::tensorexpr::TensorExprKernel::preAllocIntermediateBufs
leak:optree
leak:python
leak:torch::tensors::initialize_aten_types
leak:libclang_rt
2 changes: 1 addition & 1 deletion torch/csrc/Module.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ static PyObject* THPModule_initExtension(
END_HANDLE_TH_ERRORS
}

// The idea behind these two functions is to make it easy to test if we are
// The idea behind these functions is to make it easy to test if we are
// built with ASAN: they're designed not to crash if ASAN is not enabled, but
// to trigger ASAN if it is enabled. This lets us run a "canary" tests which
// checks if our build environment is misconfigured.
Expand Down
4 changes: 1 addition & 3 deletions torch/csrc/autograd/VariableTypeManual.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -456,7 +456,7 @@ static Tensor detach(c10::DispatchKeySet ks, const Tensor& self) {
// NB: we can't make detach() a normal view operator because the codegen
// generates allow_tensor_metadata_change = True for them. In the future we
// should have an option for this in the codegen.
auto result = as_view(
return as_view(
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Was that a leak?!

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Previous run reported possible leaks here

/* base */ self,
/* output */ out,
/* is_bw_differentiable */ false,
Expand All @@ -465,8 +465,6 @@ static Tensor detach(c10::DispatchKeySet ks, const Tensor& self) {
/* rev_view_func */ nullptr,
/* creation_meta */ CreationMeta::DEFAULT,
/*allow_tensor_metadata_change=*/false);

return result;
}

static Tensor _fw_primal(
Expand Down
4 changes: 2 additions & 2 deletions torch/csrc/autograd/variable.h
Original file line number Diff line number Diff line change
Expand Up @@ -844,7 +844,7 @@ inline Variable make_variable_non_differentiable_view(
/*version_counter=*/impl::version_counter(base),
/*allow_tensor_metadata_change=*/allow_tensor_metadata_change);
data_impl_copy->set_autograd_meta(nullptr);
return Variable(data_impl_copy);
return Variable(std::move(data_impl_copy));
}
return Variable();
}
Expand Down Expand Up @@ -903,7 +903,7 @@ inline Variable make_variable(
/*allow_tensor_metadata_change=*/allow_tensor_metadata_change);
data_impl_copy->set_autograd_meta(std::make_unique<AutogradMeta>(
data_impl_copy.get(), false, std::move(gradient_edge)));
return Variable(data_impl_copy);
return Variable(std::move(data_impl_copy));
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Same, are these leaks?!

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Previous run reported possible leaks here, should be false positive. Nevertheless, the move avoids one copy operation.

}
return Variable();
}
Expand Down
Loading