
This PR implements structured, tooling-friendly optimization remarks with zero cost unless enabled. It implements: - `RemarkEngine` collects finalized remarks within `MLIRContext`. - `MLIRRemarkStreamerBase` abstract class streams them to a backend. - Backends: `MLIRLLVMRemarkStreamer` (bridges to llvm::remarks → YAML/Bitstream) or your own custom streamer. - Optional mirroring to DiagnosticEngine (printAsEmitRemarks + categories). - Off by default; no behavior change unless enabled. Thread-safe; ordering best-effort. ## Overview ``` Passes (reportOptimization*) │ ▼ +-------------------+ | RemarkEngine | collects +-------------------+ │ │ │ mirror │ stream ▼ ▼ emitRemark MLIRRemarkStreamerBase (abstract) │ ├── MLIRLLVMRemarkStreamer → llvm::remarks → YAML | Bitstream └── CustomStreamer → your sink ``` ## Enable Remark engine and Plug LLVM's Remark streamer ``` // Enable once per MLIRContext. This uses `MLIRLLVMRemarkStreamer` mlir::remark::enableOptimizationRemarksToFile( ctx, path, llvm::remarks::Format::YAML, cats); ``` ## API to emit remark ``` // Emit from a pass remark::passed(loc, categoryVectorizer, myPassname1) << "vectorized loop"; remark::missed(loc, categoryUnroll, "MyPass") << remark::reason("not profitable at this size") // Creates structured reason arg << remark::suggest("increase unroll factor to >=4"); // Creates structured suggestion arg remark::passed(loc, categoryVectorizer, myPassname1) << "vectorized loop" << remark::metric("tripCount", 128); // Create structured metric on-the-fly ```
70 lines
2.2 KiB
C++
70 lines
2.2 KiB
C++
#include "mlir/Remark/RemarkStreamer.h"
|
|
#include "mlir/IR/MLIRContext.h"
|
|
#include "mlir/IR/Remarks.h"
|
|
|
|
#include "llvm/Remarks/RemarkSerializer.h"
|
|
#include "llvm/Remarks/RemarkStreamer.h"
|
|
#include "llvm/Support/Error.h"
|
|
#include "llvm/Support/FileSystem.h"
|
|
#include "llvm/Support/ToolOutputFile.h"
|
|
|
|
namespace mlir::remark::detail {
|
|
|
|
FailureOr<std::unique_ptr<MLIRRemarkStreamerBase>>
|
|
LLVMRemarkStreamer::createToFile(llvm::StringRef path,
|
|
llvm::remarks::Format fmt) {
|
|
std::error_code ec;
|
|
// Use error_code ctor; YAML is text. (Bitstream also works fine here.)
|
|
auto f =
|
|
std::make_unique<llvm::ToolOutputFile>(path, ec, llvm::sys::fs::OF_Text);
|
|
if (ec)
|
|
return failure();
|
|
|
|
auto serOr = llvm::remarks::createRemarkSerializer(
|
|
fmt, llvm::remarks::SerializerMode::Separate, f->os());
|
|
if (!serOr) {
|
|
llvm::consumeError(serOr.takeError());
|
|
return failure();
|
|
}
|
|
|
|
auto rs =
|
|
std::make_unique<llvm::remarks::RemarkStreamer>(std::move(*serOr), path);
|
|
|
|
auto impl = std::unique_ptr<LLVMRemarkStreamer>(new LLVMRemarkStreamer());
|
|
impl->remarkStreamer = std::move(rs);
|
|
impl->file = std::move(f);
|
|
return std::unique_ptr<MLIRRemarkStreamerBase>(std::move(impl));
|
|
}
|
|
|
|
void LLVMRemarkStreamer::streamOptimizationRemark(const Remark &remark) {
|
|
if (!remarkStreamer->matchesFilter(remark.getCategoryName()))
|
|
return;
|
|
|
|
// First, convert the diagnostic to a remark.
|
|
llvm::remarks::Remark r = remark.generateRemark();
|
|
// Then, emit the remark through the serializer.
|
|
remarkStreamer->getSerializer().emit(r);
|
|
}
|
|
|
|
LLVMRemarkStreamer::~LLVMRemarkStreamer() {
|
|
if (file && remarkStreamer)
|
|
file->keep();
|
|
}
|
|
} // namespace mlir::remark::detail
|
|
|
|
namespace mlir::remark {
|
|
LogicalResult enableOptimizationRemarksWithLLVMStreamer(
|
|
MLIRContext &ctx, StringRef path, llvm::remarks::Format fmt,
|
|
const RemarkCategories &cat, bool printAsEmitRemarks) {
|
|
|
|
FailureOr<std::unique_ptr<detail::MLIRRemarkStreamerBase>> sOr =
|
|
detail::LLVMRemarkStreamer::createToFile(path, fmt);
|
|
if (failed(sOr))
|
|
return failure();
|
|
|
|
return remark::enableOptimizationRemarks(ctx, std::move(*sOr), cat,
|
|
printAsEmitRemarks);
|
|
}
|
|
|
|
} // namespace mlir::remark
|