Skip to content

Commit a1844ec

Browse files
authored
Rollup merge of rust-lang#143708 - epage:pretty, r=compiler-errors
fix: Include frontmatter in -Zunpretty output In the implementation (rust-lang#140035), this was left as an open question for the tracking issue (rust-lang#136889). My assumption is that this should be carried over. The test was carried over from rust-lang#137193 which was superseded by rust-lang#140035. Thankfully, either way, `-Zunpretty` is unstable and we can always change it even if we stabilize frontmatter.
2 parents 0a64bfd + 24a4ad5 commit a1844ec

File tree

6 files changed

+17
-20
lines changed

6 files changed

+17
-20
lines changed

clippy_lints/src/undocumented_unsafe_blocks.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ use clippy_utils::visitors::{Descend, for_each_expr};
99
use hir::HirId;
1010
use rustc_hir as hir;
1111
use rustc_hir::{Block, BlockCheckMode, ItemKind, Node, UnsafeSource};
12-
use rustc_lexer::{TokenKind, tokenize};
12+
use rustc_lexer::{FrontmatterAllowed, TokenKind, tokenize};
1313
use rustc_lint::{LateContext, LateLintPass, LintContext};
1414
use rustc_session::impl_lint_pass;
1515
use rustc_span::{BytePos, Pos, RelativeBytePos, Span, SyntaxContext};
@@ -746,7 +746,7 @@ fn text_has_safety_comment(src: &str, line_starts: &[RelativeBytePos], start_pos
746746
loop {
747747
if line.starts_with("/*") {
748748
let src = &src[line_start..line_starts.last().unwrap().to_usize()];
749-
let mut tokens = tokenize(src);
749+
let mut tokens = tokenize(src, FrontmatterAllowed::No);
750750
return (src[..tokens.next().unwrap().len as usize]
751751
.to_ascii_uppercase()
752752
.contains("SAFETY:")

clippy_lints/src/utils/format_args_collector.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ use clippy_utils::source::SpanRangeExt;
33
use itertools::Itertools;
44
use rustc_ast::{Crate, Expr, ExprKind, FormatArgs};
55
use rustc_data_structures::fx::FxHashMap;
6-
use rustc_lexer::{TokenKind, tokenize};
6+
use rustc_lexer::{FrontmatterAllowed, TokenKind, tokenize};
77
use rustc_lint::{EarlyContext, EarlyLintPass};
88
use rustc_session::impl_lint_pass;
99
use rustc_span::{Span, hygiene};
@@ -82,7 +82,7 @@ fn has_span_from_proc_macro(cx: &EarlyContext<'_>, args: &FormatArgs) -> bool {
8282
.all(|sp| {
8383
sp.check_source_text(cx, |src| {
8484
// text should be either `, name` or `, name =`
85-
let mut iter = tokenize(src).filter(|t| {
85+
let mut iter = tokenize(src, FrontmatterAllowed::No).filter(|t| {
8686
!matches!(
8787
t.kind,
8888
TokenKind::LineComment { .. } | TokenKind::BlockComment { .. } | TokenKind::Whitespace

clippy_utils/src/consts.rs

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ use rustc_hir::def::{DefKind, Res};
1515
use rustc_hir::{
1616
BinOpKind, Block, ConstBlock, Expr, ExprKind, HirId, Item, ItemKind, Node, PatExpr, PatExprKind, QPath, UnOp,
1717
};
18-
use rustc_lexer::tokenize;
18+
use rustc_lexer::{FrontmatterAllowed, tokenize};
1919
use rustc_lint::LateContext;
2020
use rustc_middle::mir::ConstValue;
2121
use rustc_middle::mir::interpret::{Scalar, alloc_range};
@@ -304,9 +304,7 @@ pub fn lit_to_mir_constant<'tcx>(lit: &LitKind, ty: Option<Ty<'tcx>>) -> Constan
304304
match *lit {
305305
LitKind::Str(ref is, _) => Constant::Str(is.to_string()),
306306
LitKind::Byte(b) => Constant::Int(u128::from(b)),
307-
LitKind::ByteStr(ref s, _) | LitKind::CStr(ref s, _) => {
308-
Constant::Binary(s.as_byte_str().to_vec())
309-
}
307+
LitKind::ByteStr(ref s, _) | LitKind::CStr(ref s, _) => Constant::Binary(s.as_byte_str().to_vec()),
310308
LitKind::Char(c) => Constant::Char(c),
311309
LitKind::Int(n, _) => Constant::Int(n.get()),
312310
LitKind::Float(ref is, LitFloatType::Suffixed(fty)) => match fty {
@@ -568,9 +566,7 @@ impl<'tcx> ConstEvalCtxt<'tcx> {
568566
} else {
569567
match &lit.node {
570568
LitKind::Str(is, _) => Some(is.is_empty()),
571-
LitKind::ByteStr(s, _) | LitKind::CStr(s, _) => {
572-
Some(s.as_byte_str().is_empty())
573-
}
569+
LitKind::ByteStr(s, _) | LitKind::CStr(s, _) => Some(s.as_byte_str().is_empty()),
574570
_ => None,
575571
}
576572
}
@@ -715,7 +711,7 @@ impl<'tcx> ConstEvalCtxt<'tcx> {
715711
&& let Some(src) = src.as_str()
716712
{
717713
use rustc_lexer::TokenKind::{BlockComment, LineComment, OpenBrace, Semi, Whitespace};
718-
if !tokenize(src)
714+
if !tokenize(src, FrontmatterAllowed::No)
719715
.map(|t| t.kind)
720716
.filter(|t| !matches!(t, Whitespace | LineComment { .. } | BlockComment { .. } | Semi))
721717
.eq([OpenBrace])

clippy_utils/src/hir_utils.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ use rustc_hir::{
1212
Pat, PatExpr, PatExprKind, PatField, PatKind, Path, PathSegment, PrimTy, QPath, Stmt, StmtKind, StructTailExpr,
1313
TraitBoundModifiers, Ty, TyKind, TyPat, TyPatKind,
1414
};
15-
use rustc_lexer::{TokenKind, tokenize};
15+
use rustc_lexer::{FrontmatterAllowed, TokenKind, tokenize};
1616
use rustc_lint::LateContext;
1717
use rustc_middle::ty::TypeckResults;
1818
use rustc_span::{BytePos, ExpnKind, MacroKind, Symbol, SyntaxContext, sym};
@@ -686,7 +686,7 @@ fn reduce_exprkind<'hir>(cx: &LateContext<'_>, kind: &'hir ExprKind<'hir>) -> &'
686686
// `{}` => `()`
687687
([], None)
688688
if block.span.check_source_text(cx, |src| {
689-
tokenize(src)
689+
tokenize(src, FrontmatterAllowed::No)
690690
.map(|t| t.kind)
691691
.filter(|t| {
692692
!matches!(

clippy_utils/src/lib.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,7 @@ use rustc_hir::{
106106
Param, Pat, PatExpr, PatExprKind, PatKind, Path, PathSegment, QPath, Stmt, StmtKind, TraitFn, TraitItem,
107107
TraitItemKind, TraitRef, TyKind, UnOp, def,
108108
};
109-
use rustc_lexer::{TokenKind, tokenize};
109+
use rustc_lexer::{FrontmatterAllowed, TokenKind, tokenize};
110110
use rustc_lint::{LateContext, Level, Lint, LintContext};
111111
use rustc_middle::hir::nested_filter;
112112
use rustc_middle::hir::place::PlaceBase;
@@ -2764,7 +2764,7 @@ pub fn expr_use_ctxt<'tcx>(cx: &LateContext<'tcx>, e: &Expr<'tcx>) -> ExprUseCtx
27642764
/// Tokenizes the input while keeping the text associated with each token.
27652765
pub fn tokenize_with_text(s: &str) -> impl Iterator<Item = (TokenKind, &str, InnerSpan)> {
27662766
let mut pos = 0;
2767-
tokenize(s).map(move |t| {
2767+
tokenize(s, FrontmatterAllowed::No).map(move |t| {
27682768
let end = pos + t.len;
27692769
let range = pos as usize..end as usize;
27702770
let inner = InnerSpan::new(range.start, range.end);
@@ -2779,7 +2779,7 @@ pub fn span_contains_comment(sm: &SourceMap, span: Span) -> bool {
27792779
let Ok(snippet) = sm.span_to_snippet(span) else {
27802780
return false;
27812781
};
2782-
return tokenize(&snippet).any(|token| {
2782+
return tokenize(&snippet, FrontmatterAllowed::No).any(|token| {
27832783
matches!(
27842784
token.kind,
27852785
TokenKind::BlockComment { .. } | TokenKind::LineComment { .. }

clippy_utils/src/source.rs

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ use std::sync::Arc;
77
use rustc_ast::{LitKind, StrStyle};
88
use rustc_errors::Applicability;
99
use rustc_hir::{BlockCheckMode, Expr, ExprKind, UnsafeSource};
10-
use rustc_lexer::{LiteralKind, TokenKind, tokenize};
10+
use rustc_lexer::{FrontmatterAllowed, LiteralKind, TokenKind, tokenize};
1111
use rustc_lint::{EarlyContext, LateContext};
1212
use rustc_middle::ty::TyCtxt;
1313
use rustc_session::Session;
@@ -277,7 +277,7 @@ fn map_range(
277277
}
278278

279279
fn ends_with_line_comment_or_broken(text: &str) -> bool {
280-
let Some(last) = tokenize(text).last() else {
280+
let Some(last) = tokenize(text, FrontmatterAllowed::No).last() else {
281281
return false;
282282
};
283283
match last.kind {
@@ -310,7 +310,8 @@ fn with_leading_whitespace_inner(lines: &[RelativeBytePos], src: &str, range: Ra
310310
&& ends_with_line_comment_or_broken(&start[prev_start..])
311311
&& let next_line = lines.partition_point(|&pos| pos.to_usize() < range.end)
312312
&& let next_start = lines.get(next_line).map_or(src.len(), |&x| x.to_usize())
313-
&& tokenize(src.get(range.end..next_start)?).any(|t| !matches!(t.kind, TokenKind::Whitespace))
313+
&& tokenize(src.get(range.end..next_start)?, FrontmatterAllowed::No)
314+
.any(|t| !matches!(t.kind, TokenKind::Whitespace))
314315
{
315316
Some(range.start)
316317
} else {

0 commit comments

Comments
 (0)