1use std::iter;
2use std::ops::ControlFlow;
3
4use rustc_abi::{BackendRepr, TagEncoding, VariantIdx, Variants, WrappingRange};
5use rustc_data_structures::fx::FxHashSet;
6use rustc_errors::DiagMessage;
7use rustc_hir::intravisit::VisitorExt;
8use rustc_hir::{AmbigArg, Expr, ExprKind, HirId, LangItem};
9use rustc_middle::bug;
10use rustc_middle::ty::layout::{LayoutOf, SizeSkeleton};
11use rustc_middle::ty::{
12 self, Adt, AdtKind, GenericArgsRef, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable,
13 TypeVisitableExt,
14};
15use rustc_session::{declare_lint, declare_lint_pass, impl_lint_pass};
16use rustc_span::def_id::LocalDefId;
17use rustc_span::{Span, Symbol, sym};
18use tracing::debug;
19use {rustc_ast as ast, rustc_hir as hir};
20
21mod improper_ctypes;
22
23use crate::lints::{
24 AmbiguousWidePointerComparisons, AmbiguousWidePointerComparisonsAddrMetadataSuggestion,
25 AmbiguousWidePointerComparisonsAddrSuggestion, AmbiguousWidePointerComparisonsCastSuggestion,
26 AmbiguousWidePointerComparisonsExpectSuggestion, AtomicOrderingFence, AtomicOrderingLoad,
27 AtomicOrderingStore, ImproperCTypes, InvalidAtomicOrderingDiag, InvalidNanComparisons,
28 InvalidNanComparisonsSuggestion, UnpredictableFunctionPointerComparisons,
29 UnpredictableFunctionPointerComparisonsSuggestion, UnusedComparisons, UsesPowerAlignment,
30 VariantSizeDifferencesDiag,
31};
32use crate::{LateContext, LateLintPass, LintContext, fluent_generated as fluent};
33
34mod literal;
35
36use literal::{int_ty_range, lint_literal, uint_ty_range};
37
38declare_lint! {
39 UNUSED_COMPARISONS,
57 Warn,
58 "comparisons made useless by limits of the types involved"
59}
60
61declare_lint! {
62 OVERFLOWING_LITERALS,
78 Deny,
79 "literal out of range for its type"
80}
81
82declare_lint! {
83 VARIANT_SIZE_DIFFERENCES,
115 Allow,
116 "detects enums with widely varying variant sizes"
117}
118
119declare_lint! {
120 INVALID_NAN_COMPARISONS,
137 Warn,
138 "detects invalid floating point NaN comparisons"
139}
140
141declare_lint! {
142 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
168 Warn,
169 "detects ambiguous wide pointer comparisons"
170}
171
172declare_lint! {
173 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
197 Warn,
198 "detects unpredictable function pointer comparisons",
199 report_in_external_macro
200}
201
202#[derive(Copy, Clone, Default)]
203pub(crate) struct TypeLimits {
204 negated_expr_id: Option<hir::HirId>,
206 negated_expr_span: Option<Span>,
208}
209
210impl_lint_pass!(TypeLimits => [
211 UNUSED_COMPARISONS,
212 OVERFLOWING_LITERALS,
213 INVALID_NAN_COMPARISONS,
214 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
215 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS
216]);
217
218impl TypeLimits {
219 pub(crate) fn new() -> TypeLimits {
220 TypeLimits { negated_expr_id: None, negated_expr_span: None }
221 }
222}
223
224fn lint_nan<'tcx>(
225 cx: &LateContext<'tcx>,
226 e: &'tcx hir::Expr<'tcx>,
227 binop: hir::BinOpKind,
228 l: &'tcx hir::Expr<'tcx>,
229 r: &'tcx hir::Expr<'tcx>,
230) {
231 fn is_nan(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool {
232 let expr = expr.peel_blocks().peel_borrows();
233 match expr.kind {
234 ExprKind::Path(qpath) => {
235 let Some(def_id) = cx.typeck_results().qpath_res(&qpath, expr.hir_id).opt_def_id()
236 else {
237 return false;
238 };
239
240 matches!(
241 cx.tcx.get_diagnostic_name(def_id),
242 Some(sym::f16_nan | sym::f32_nan | sym::f64_nan | sym::f128_nan)
243 )
244 }
245 _ => false,
246 }
247 }
248
249 fn eq_ne(
250 e: &hir::Expr<'_>,
251 l: &hir::Expr<'_>,
252 r: &hir::Expr<'_>,
253 f: impl FnOnce(Span, Span) -> InvalidNanComparisonsSuggestion,
254 ) -> InvalidNanComparisons {
255 let suggestion = if let Some(l_span) = l.span.find_ancestor_inside(e.span)
256 && let Some(r_span) = r.span.find_ancestor_inside(e.span)
257 {
258 f(l_span, r_span)
259 } else {
260 InvalidNanComparisonsSuggestion::Spanless
261 };
262
263 InvalidNanComparisons::EqNe { suggestion }
264 }
265
266 let lint = match binop {
267 hir::BinOpKind::Eq | hir::BinOpKind::Ne if is_nan(cx, l) => {
268 eq_ne(e, l, r, |l_span, r_span| InvalidNanComparisonsSuggestion::Spanful {
269 nan_plus_binop: l_span.until(r_span),
270 float: r_span.shrink_to_hi(),
271 neg: (binop == hir::BinOpKind::Ne).then(|| r_span.shrink_to_lo()),
272 })
273 }
274 hir::BinOpKind::Eq | hir::BinOpKind::Ne if is_nan(cx, r) => {
275 eq_ne(e, l, r, |l_span, r_span| InvalidNanComparisonsSuggestion::Spanful {
276 nan_plus_binop: l_span.shrink_to_hi().to(r_span),
277 float: l_span.shrink_to_hi(),
278 neg: (binop == hir::BinOpKind::Ne).then(|| l_span.shrink_to_lo()),
279 })
280 }
281 hir::BinOpKind::Lt | hir::BinOpKind::Le | hir::BinOpKind::Gt | hir::BinOpKind::Ge
282 if is_nan(cx, l) || is_nan(cx, r) =>
283 {
284 InvalidNanComparisons::LtLeGtGe
285 }
286 _ => return,
287 };
288
289 cx.emit_span_lint(INVALID_NAN_COMPARISONS, e.span, lint);
290}
291
292#[derive(Debug, PartialEq, Copy, Clone)]
293enum ComparisonOp {
294 BinOp(hir::BinOpKind),
295 Other,
296}
297
298fn lint_wide_pointer<'tcx>(
299 cx: &LateContext<'tcx>,
300 e: &'tcx hir::Expr<'tcx>,
301 cmpop: ComparisonOp,
302 l: &'tcx hir::Expr<'tcx>,
303 r: &'tcx hir::Expr<'tcx>,
304) {
305 let ptr_unsized = |mut ty: Ty<'tcx>| -> Option<(
306 usize,
307 String,
308 bool,
309 )> {
310 let mut refs = 0;
311 while let ty::Ref(_, inner_ty, _) = ty.kind() {
314 ty = *inner_ty;
315 refs += 1;
316 }
317
318 let mut modifiers = String::new();
320 ty = match ty.kind() {
321 ty::RawPtr(ty, _) => *ty,
322 ty::Adt(def, args) if cx.tcx.is_diagnostic_item(sym::NonNull, def.did()) => {
323 modifiers.push_str(".as_ptr()");
324 args.type_at(0)
325 }
326 _ => return None,
327 };
328
329 (!ty.is_sized(cx.tcx, cx.typing_env()))
330 .then(|| (refs, modifiers, matches!(ty.kind(), ty::Dynamic(_, _, ty::Dyn))))
331 };
332
333 let l = l.peel_borrows();
335 let r = r.peel_borrows();
336
337 let Some(l_ty) = cx.typeck_results().expr_ty_opt(l) else {
338 return;
339 };
340 let Some(r_ty) = cx.typeck_results().expr_ty_opt(r) else {
341 return;
342 };
343
344 let Some((l_ty_refs, l_modifiers, l_inner_ty_is_dyn)) = ptr_unsized(l_ty) else {
345 return;
346 };
347 let Some((r_ty_refs, r_modifiers, r_inner_ty_is_dyn)) = ptr_unsized(r_ty) else {
348 return;
349 };
350
351 let (Some(l_span), Some(r_span)) =
352 (l.span.find_ancestor_inside(e.span), r.span.find_ancestor_inside(e.span))
353 else {
354 return cx.emit_span_lint(
355 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
356 e.span,
357 AmbiguousWidePointerComparisons::Spanless,
358 );
359 };
360
361 let ne = if cmpop == ComparisonOp::BinOp(hir::BinOpKind::Ne) { "!" } else { "" };
362 let is_eq_ne = matches!(cmpop, ComparisonOp::BinOp(hir::BinOpKind::Eq | hir::BinOpKind::Ne));
363 let is_dyn_comparison = l_inner_ty_is_dyn && r_inner_ty_is_dyn;
364 let via_method_call = matches!(&e.kind, ExprKind::MethodCall(..) | ExprKind::Call(..));
365
366 let left = e.span.shrink_to_lo().until(l_span.shrink_to_lo());
367 let middle = l_span.shrink_to_hi().until(r_span.shrink_to_lo());
368 let right = r_span.shrink_to_hi().until(e.span.shrink_to_hi());
369
370 let deref_left = &*"*".repeat(l_ty_refs);
371 let deref_right = &*"*".repeat(r_ty_refs);
372
373 let l_modifiers = &*l_modifiers;
374 let r_modifiers = &*r_modifiers;
375
376 cx.emit_span_lint(
377 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
378 e.span,
379 if is_eq_ne {
380 AmbiguousWidePointerComparisons::SpanfulEq {
381 addr_metadata_suggestion: (!is_dyn_comparison).then(|| {
382 AmbiguousWidePointerComparisonsAddrMetadataSuggestion {
383 ne,
384 deref_left,
385 deref_right,
386 l_modifiers,
387 r_modifiers,
388 left,
389 middle,
390 right,
391 }
392 }),
393 addr_suggestion: AmbiguousWidePointerComparisonsAddrSuggestion {
394 ne,
395 deref_left,
396 deref_right,
397 l_modifiers,
398 r_modifiers,
399 left,
400 middle,
401 right,
402 },
403 }
404 } else {
405 AmbiguousWidePointerComparisons::SpanfulCmp {
406 cast_suggestion: AmbiguousWidePointerComparisonsCastSuggestion {
407 deref_left,
408 deref_right,
409 l_modifiers,
410 r_modifiers,
411 paren_left: if l_ty_refs != 0 { ")" } else { "" },
412 paren_right: if r_ty_refs != 0 { ")" } else { "" },
413 left_before: (l_ty_refs != 0).then_some(l_span.shrink_to_lo()),
414 left_after: l_span.shrink_to_hi(),
415 right_before: (r_ty_refs != 0).then_some(r_span.shrink_to_lo()),
416 right_after: r_span.shrink_to_hi(),
417 },
418 expect_suggestion: AmbiguousWidePointerComparisonsExpectSuggestion {
419 paren_left: if via_method_call { "" } else { "(" },
420 paren_right: if via_method_call { "" } else { ")" },
421 before: e.span.shrink_to_lo(),
422 after: e.span.shrink_to_hi(),
423 },
424 }
425 },
426 );
427}
428
429fn lint_fn_pointer<'tcx>(
430 cx: &LateContext<'tcx>,
431 e: &'tcx hir::Expr<'tcx>,
432 cmpop: ComparisonOp,
433 l: &'tcx hir::Expr<'tcx>,
434 r: &'tcx hir::Expr<'tcx>,
435) {
436 let peel_refs = |mut ty: Ty<'tcx>| -> (Ty<'tcx>, usize) {
437 let mut refs = 0;
438
439 while let ty::Ref(_, inner_ty, _) = ty.kind() {
440 ty = *inner_ty;
441 refs += 1;
442 }
443
444 (ty, refs)
445 };
446
447 let l = l.peel_borrows();
449 let r = r.peel_borrows();
450
451 let Some(l_ty) = cx.typeck_results().expr_ty_opt(l) else { return };
452 let Some(r_ty) = cx.typeck_results().expr_ty_opt(r) else { return };
453
454 let (l_ty, l_ty_refs) = peel_refs(l_ty);
457 let (r_ty, r_ty_refs) = peel_refs(r_ty);
458
459 if l_ty.is_fn() && r_ty.is_fn() {
460 } else if let ty::Adt(l_def, l_args) = l_ty.kind()
462 && let ty::Adt(r_def, r_args) = r_ty.kind()
463 && cx.tcx.is_lang_item(l_def.did(), LangItem::Option)
464 && cx.tcx.is_lang_item(r_def.did(), LangItem::Option)
465 && let Some(l_some_arg) = l_args.get(0)
466 && let Some(r_some_arg) = r_args.get(0)
467 && l_some_arg.expect_ty().is_fn()
468 && r_some_arg.expect_ty().is_fn()
469 {
470 return cx.emit_span_lint(
472 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
473 e.span,
474 UnpredictableFunctionPointerComparisons::Warn,
475 );
476 } else {
477 return;
479 }
480
481 let is_eq_ne = matches!(cmpop, ComparisonOp::BinOp(hir::BinOpKind::Eq | hir::BinOpKind::Ne));
484
485 if !is_eq_ne {
486 return cx.emit_span_lint(
488 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
489 e.span,
490 UnpredictableFunctionPointerComparisons::Warn,
491 );
492 }
493
494 let (Some(l_span), Some(r_span)) =
495 (l.span.find_ancestor_inside(e.span), r.span.find_ancestor_inside(e.span))
496 else {
497 return cx.emit_span_lint(
499 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
500 e.span,
501 UnpredictableFunctionPointerComparisons::Warn,
502 );
503 };
504
505 let ne = if cmpop == ComparisonOp::BinOp(hir::BinOpKind::Ne) { "!" } else { "" };
506
507 let deref_left = &*"*".repeat(l_ty_refs);
509 let deref_right = &*"*".repeat(r_ty_refs);
510
511 let left = e.span.shrink_to_lo().until(l_span.shrink_to_lo());
512 let middle = l_span.shrink_to_hi().until(r_span.shrink_to_lo());
513 let right = r_span.shrink_to_hi().until(e.span.shrink_to_hi());
514
515 let sugg =
516 if !r_ty.is_fn_ptr() {
519 let fn_sig = r_ty.fn_sig(cx.tcx);
520
521 UnpredictableFunctionPointerComparisonsSuggestion::FnAddrEqWithCast {
522 ne,
523 fn_sig,
524 deref_left,
525 deref_right,
526 left,
527 middle,
528 right,
529 }
530 } else {
531 UnpredictableFunctionPointerComparisonsSuggestion::FnAddrEq {
532 ne,
533 deref_left,
534 deref_right,
535 left,
536 middle,
537 right,
538 }
539 };
540
541 cx.emit_span_lint(
542 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
543 e.span,
544 UnpredictableFunctionPointerComparisons::Suggestion { sugg },
545 );
546}
547
548impl<'tcx> LateLintPass<'tcx> for TypeLimits {
549 fn check_lit(&mut self, cx: &LateContext<'tcx>, hir_id: HirId, lit: hir::Lit, negated: bool) {
550 if negated {
551 self.negated_expr_id = Some(hir_id);
552 self.negated_expr_span = Some(lit.span);
553 }
554 lint_literal(cx, self, hir_id, lit.span, &lit, negated);
555 }
556
557 fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx hir::Expr<'tcx>) {
558 match e.kind {
559 hir::ExprKind::Unary(hir::UnOp::Neg, expr) => {
560 if self.negated_expr_id != Some(e.hir_id) {
562 self.negated_expr_id = Some(expr.hir_id);
563 self.negated_expr_span = Some(e.span);
564 }
565 }
566 hir::ExprKind::Binary(binop, ref l, ref r) => {
567 if is_comparison(binop.node) {
568 if !check_limits(cx, binop.node, l, r) {
569 cx.emit_span_lint(UNUSED_COMPARISONS, e.span, UnusedComparisons);
570 } else {
571 lint_nan(cx, e, binop.node, l, r);
572 let cmpop = ComparisonOp::BinOp(binop.node);
573 lint_wide_pointer(cx, e, cmpop, l, r);
574 lint_fn_pointer(cx, e, cmpop, l, r);
575 }
576 }
577 }
578 hir::ExprKind::Call(path, [l, r])
579 if let ExprKind::Path(ref qpath) = path.kind
580 && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
581 && let Some(diag_item) = cx.tcx.get_diagnostic_name(def_id)
582 && let Some(cmpop) = diag_item_cmpop(diag_item) =>
583 {
584 lint_wide_pointer(cx, e, cmpop, l, r);
585 lint_fn_pointer(cx, e, cmpop, l, r);
586 }
587 hir::ExprKind::MethodCall(_, l, [r], _)
588 if let Some(def_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
589 && let Some(diag_item) = cx.tcx.get_diagnostic_name(def_id)
590 && let Some(cmpop) = diag_item_cmpop(diag_item) =>
591 {
592 lint_wide_pointer(cx, e, cmpop, l, r);
593 lint_fn_pointer(cx, e, cmpop, l, r);
594 }
595 _ => {}
596 };
597
598 fn is_valid<T: PartialOrd>(binop: hir::BinOpKind, v: T, min: T, max: T) -> bool {
599 match binop {
600 hir::BinOpKind::Lt => v > min && v <= max,
601 hir::BinOpKind::Le => v >= min && v < max,
602 hir::BinOpKind::Gt => v >= min && v < max,
603 hir::BinOpKind::Ge => v > min && v <= max,
604 hir::BinOpKind::Eq | hir::BinOpKind::Ne => v >= min && v <= max,
605 _ => bug!(),
606 }
607 }
608
609 fn rev_binop(binop: hir::BinOpKind) -> hir::BinOpKind {
610 match binop {
611 hir::BinOpKind::Lt => hir::BinOpKind::Gt,
612 hir::BinOpKind::Le => hir::BinOpKind::Ge,
613 hir::BinOpKind::Gt => hir::BinOpKind::Lt,
614 hir::BinOpKind::Ge => hir::BinOpKind::Le,
615 _ => binop,
616 }
617 }
618
619 fn check_limits(
620 cx: &LateContext<'_>,
621 binop: hir::BinOpKind,
622 l: &hir::Expr<'_>,
623 r: &hir::Expr<'_>,
624 ) -> bool {
625 let (lit, expr, swap) = match (&l.kind, &r.kind) {
626 (&hir::ExprKind::Lit(_), _) => (l, r, true),
627 (_, &hir::ExprKind::Lit(_)) => (r, l, false),
628 _ => return true,
629 };
630 let norm_binop = if swap { rev_binop(binop) } else { binop };
633 match *cx.typeck_results().node_type(expr.hir_id).kind() {
634 ty::Int(int_ty) => {
635 let (min, max) = int_ty_range(int_ty);
636 let lit_val: i128 = match lit.kind {
637 hir::ExprKind::Lit(li) => match li.node {
638 ast::LitKind::Int(
639 v,
640 ast::LitIntType::Signed(_) | ast::LitIntType::Unsuffixed,
641 ) => v.get() as i128,
642 _ => return true,
643 },
644 _ => bug!(),
645 };
646 is_valid(norm_binop, lit_val, min, max)
647 }
648 ty::Uint(uint_ty) => {
649 let (min, max): (u128, u128) = uint_ty_range(uint_ty);
650 let lit_val: u128 = match lit.kind {
651 hir::ExprKind::Lit(li) => match li.node {
652 ast::LitKind::Int(v, _) => v.get(),
653 _ => return true,
654 },
655 _ => bug!(),
656 };
657 is_valid(norm_binop, lit_val, min, max)
658 }
659 _ => true,
660 }
661 }
662
663 fn is_comparison(binop: hir::BinOpKind) -> bool {
664 matches!(
665 binop,
666 hir::BinOpKind::Eq
667 | hir::BinOpKind::Lt
668 | hir::BinOpKind::Le
669 | hir::BinOpKind::Ne
670 | hir::BinOpKind::Ge
671 | hir::BinOpKind::Gt
672 )
673 }
674
675 fn diag_item_cmpop(diag_item: Symbol) -> Option<ComparisonOp> {
676 Some(match diag_item {
677 sym::cmp_ord_max => ComparisonOp::Other,
678 sym::cmp_ord_min => ComparisonOp::Other,
679 sym::ord_cmp_method => ComparisonOp::Other,
680 sym::cmp_partialeq_eq => ComparisonOp::BinOp(hir::BinOpKind::Eq),
681 sym::cmp_partialeq_ne => ComparisonOp::BinOp(hir::BinOpKind::Ne),
682 sym::cmp_partialord_cmp => ComparisonOp::Other,
683 sym::cmp_partialord_ge => ComparisonOp::BinOp(hir::BinOpKind::Ge),
684 sym::cmp_partialord_gt => ComparisonOp::BinOp(hir::BinOpKind::Gt),
685 sym::cmp_partialord_le => ComparisonOp::BinOp(hir::BinOpKind::Le),
686 sym::cmp_partialord_lt => ComparisonOp::BinOp(hir::BinOpKind::Lt),
687 _ => return None,
688 })
689 }
690 }
691}
692
693declare_lint! {
694 IMPROPER_CTYPES,
716 Warn,
717 "proper use of libc types in foreign modules"
718}
719
720declare_lint_pass!(ImproperCTypesDeclarations => [IMPROPER_CTYPES]);
721
722declare_lint! {
723 IMPROPER_CTYPES_DEFINITIONS,
745 Warn,
746 "proper use of libc types in foreign item definitions"
747}
748
749declare_lint! {
750 USES_POWER_ALIGNMENT,
800 Warn,
801 "Structs do not follow the power alignment rule under repr(C)"
802}
803
804declare_lint_pass!(ImproperCTypesDefinitions => [IMPROPER_CTYPES_DEFINITIONS, USES_POWER_ALIGNMENT]);
805
806#[derive(Clone, Copy)]
807pub(crate) enum CItemKind {
808 Declaration,
809 Definition,
810}
811
812struct ImproperCTypesVisitor<'a, 'tcx> {
813 cx: &'a LateContext<'tcx>,
814 mode: CItemKind,
815}
816
817struct CTypesVisitorState<'tcx> {
819 cache: FxHashSet<Ty<'tcx>>,
820 base_ty: Ty<'tcx>,
823}
824
825enum FfiResult<'tcx> {
826 FfiSafe,
827 FfiPhantom(Ty<'tcx>),
828 FfiUnsafe { ty: Ty<'tcx>, reason: DiagMessage, help: Option<DiagMessage> },
829}
830
831pub(crate) fn nonnull_optimization_guaranteed<'tcx>(
832 tcx: TyCtxt<'tcx>,
833 def: ty::AdtDef<'tcx>,
834) -> bool {
835 tcx.has_attr(def.did(), sym::rustc_nonnull_optimization_guaranteed)
836}
837
838pub(crate) fn transparent_newtype_field<'a, 'tcx>(
841 tcx: TyCtxt<'tcx>,
842 variant: &'a ty::VariantDef,
843) -> Option<&'a ty::FieldDef> {
844 let typing_env = ty::TypingEnv::non_body_analysis(tcx, variant.def_id);
845 variant.fields.iter().find(|field| {
846 let field_ty = tcx.type_of(field.did).instantiate_identity();
847 let is_1zst =
848 tcx.layout_of(typing_env.as_query_input(field_ty)).is_ok_and(|layout| layout.is_1zst());
849 !is_1zst
850 })
851}
852
853fn ty_is_known_nonnull<'tcx>(
855 tcx: TyCtxt<'tcx>,
856 typing_env: ty::TypingEnv<'tcx>,
857 ty: Ty<'tcx>,
858 mode: CItemKind,
859) -> bool {
860 let ty = tcx.try_normalize_erasing_regions(typing_env, ty).unwrap_or(ty);
861
862 match ty.kind() {
863 ty::FnPtr(..) => true,
864 ty::Ref(..) => true,
865 ty::Adt(def, _) if def.is_box() && matches!(mode, CItemKind::Definition) => true,
866 ty::Adt(def, args) if def.repr().transparent() && !def.is_union() => {
867 let marked_non_null = nonnull_optimization_guaranteed(tcx, *def);
868
869 if marked_non_null {
870 return true;
871 }
872
873 if def.is_unsafe_cell() || def.is_unsafe_pinned() {
875 return false;
876 }
877
878 def.variants()
879 .iter()
880 .filter_map(|variant| transparent_newtype_field(tcx, variant))
881 .any(|field| ty_is_known_nonnull(tcx, typing_env, field.ty(tcx, args), mode))
882 }
883 ty::Pat(base, pat) => {
884 ty_is_known_nonnull(tcx, typing_env, *base, mode)
885 || pat_ty_is_known_nonnull(tcx, typing_env, *pat)
886 }
887 _ => false,
888 }
889}
890
891fn pat_ty_is_known_nonnull<'tcx>(
892 tcx: TyCtxt<'tcx>,
893 typing_env: ty::TypingEnv<'tcx>,
894 pat: ty::Pattern<'tcx>,
895) -> bool {
896 Option::unwrap_or_default(
897 try {
898 match *pat {
899 ty::PatternKind::Range { start, end } => {
900 let start = start.try_to_value()?.try_to_bits(tcx, typing_env)?;
901 let end = end.try_to_value()?.try_to_bits(tcx, typing_env)?;
902
903 start > 0 && end >= start
906 }
907 ty::PatternKind::Or(patterns) => {
908 patterns.iter().all(|pat| pat_ty_is_known_nonnull(tcx, typing_env, pat))
909 }
910 }
911 },
912 )
913}
914
915fn get_nullable_type<'tcx>(
918 tcx: TyCtxt<'tcx>,
919 typing_env: ty::TypingEnv<'tcx>,
920 ty: Ty<'tcx>,
921) -> Option<Ty<'tcx>> {
922 let ty = tcx.try_normalize_erasing_regions(typing_env, ty).unwrap_or(ty);
923
924 Some(match *ty.kind() {
925 ty::Adt(field_def, field_args) => {
926 let inner_field_ty = {
927 let mut first_non_zst_ty =
928 field_def.variants().iter().filter_map(|v| transparent_newtype_field(tcx, v));
929 debug_assert_eq!(
930 first_non_zst_ty.clone().count(),
931 1,
932 "Wrong number of fields for transparent type"
933 );
934 first_non_zst_ty
935 .next_back()
936 .expect("No non-zst fields in transparent type.")
937 .ty(tcx, field_args)
938 };
939 return get_nullable_type(tcx, typing_env, inner_field_ty);
940 }
941 ty::Pat(base, ..) => return get_nullable_type(tcx, typing_env, base),
942 ty::Int(_) | ty::Uint(_) | ty::RawPtr(..) => ty,
943 ty::Ref(_region, ty, mutbl) => Ty::new_ptr(tcx, ty, mutbl),
946 ty::FnPtr(..) => ty,
949 ref unhandled => {
952 debug!(
953 "get_nullable_type: Unhandled scalar kind: {:?} while checking {:?}",
954 unhandled, ty
955 );
956 return None;
957 }
958 })
959}
960
961fn is_niche_optimization_candidate<'tcx>(
966 tcx: TyCtxt<'tcx>,
967 typing_env: ty::TypingEnv<'tcx>,
968 ty: Ty<'tcx>,
969) -> bool {
970 if tcx.layout_of(typing_env.as_query_input(ty)).is_ok_and(|layout| !layout.is_1zst()) {
971 return false;
972 }
973
974 match ty.kind() {
975 ty::Adt(ty_def, _) => {
976 let non_exhaustive = ty_def.is_variant_list_non_exhaustive();
977 let empty = (ty_def.is_struct() && ty_def.all_fields().next().is_none())
978 || (ty_def.is_enum() && ty_def.variants().is_empty());
979
980 !non_exhaustive && empty
981 }
982 ty::Tuple(tys) => tys.is_empty(),
983 _ => false,
984 }
985}
986
987pub(crate) fn repr_nullable_ptr<'tcx>(
992 tcx: TyCtxt<'tcx>,
993 typing_env: ty::TypingEnv<'tcx>,
994 ty: Ty<'tcx>,
995 ckind: CItemKind,
996) -> Option<Ty<'tcx>> {
997 debug!("is_repr_nullable_ptr(tcx, ty = {:?})", ty);
998 match ty.kind() {
999 ty::Adt(ty_def, args) => {
1000 let field_ty = match &ty_def.variants().raw[..] {
1001 [var_one, var_two] => match (&var_one.fields.raw[..], &var_two.fields.raw[..]) {
1002 ([], [field]) | ([field], []) => field.ty(tcx, args),
1003 ([field1], [field2]) => {
1004 let ty1 = field1.ty(tcx, args);
1005 let ty2 = field2.ty(tcx, args);
1006
1007 if is_niche_optimization_candidate(tcx, typing_env, ty1) {
1008 ty2
1009 } else if is_niche_optimization_candidate(tcx, typing_env, ty2) {
1010 ty1
1011 } else {
1012 return None;
1013 }
1014 }
1015 _ => return None,
1016 },
1017 _ => return None,
1018 };
1019
1020 if !ty_is_known_nonnull(tcx, typing_env, field_ty, ckind) {
1021 return None;
1022 }
1023
1024 let compute_size_skeleton = |t| SizeSkeleton::compute(t, tcx, typing_env).ok();
1028 if !compute_size_skeleton(ty)?.same_size(compute_size_skeleton(field_ty)?) {
1029 bug!("improper_ctypes: Option nonnull optimization not applied?");
1030 }
1031
1032 let field_ty_layout = tcx.layout_of(typing_env.as_query_input(field_ty));
1034 if field_ty_layout.is_err() && !field_ty.has_non_region_param() {
1035 bug!("should be able to compute the layout of non-polymorphic type");
1036 }
1037
1038 let field_ty_abi = &field_ty_layout.ok()?.backend_repr;
1039 if let BackendRepr::Scalar(field_ty_scalar) = field_ty_abi {
1040 match field_ty_scalar.valid_range(&tcx) {
1041 WrappingRange { start: 0, end }
1042 if end == field_ty_scalar.size(&tcx).unsigned_int_max() - 1 =>
1043 {
1044 return Some(get_nullable_type(tcx, typing_env, field_ty).unwrap());
1045 }
1046 WrappingRange { start: 1, .. } => {
1047 return Some(get_nullable_type(tcx, typing_env, field_ty).unwrap());
1048 }
1049 WrappingRange { start, end } => {
1050 unreachable!("Unhandled start and end range: ({}, {})", start, end)
1051 }
1052 };
1053 }
1054 None
1055 }
1056 ty::Pat(base, pat) => get_nullable_type_from_pat(tcx, typing_env, *base, *pat),
1057 _ => None,
1058 }
1059}
1060
1061fn get_nullable_type_from_pat<'tcx>(
1062 tcx: TyCtxt<'tcx>,
1063 typing_env: ty::TypingEnv<'tcx>,
1064 base: Ty<'tcx>,
1065 pat: ty::Pattern<'tcx>,
1066) -> Option<Ty<'tcx>> {
1067 match *pat {
1068 ty::PatternKind::Range { .. } => get_nullable_type(tcx, typing_env, base),
1069 ty::PatternKind::Or(patterns) => {
1070 let first = get_nullable_type_from_pat(tcx, typing_env, base, patterns[0])?;
1071 for &pat in &patterns[1..] {
1072 assert_eq!(first, get_nullable_type_from_pat(tcx, typing_env, base, pat)?);
1073 }
1074 Some(first)
1075 }
1076 }
1077}
1078
1079impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
1080 fn check_for_array_ty(&mut self, sp: Span, ty: Ty<'tcx>) -> bool {
1082 if let ty::Array(..) = ty.kind() {
1083 self.emit_ffi_unsafe_type_lint(
1084 ty,
1085 sp,
1086 fluent::lint_improper_ctypes_array_reason,
1087 Some(fluent::lint_improper_ctypes_array_help),
1088 );
1089 true
1090 } else {
1091 false
1092 }
1093 }
1094
1095 fn check_field_type_for_ffi(
1097 &self,
1098 acc: &mut CTypesVisitorState<'tcx>,
1099 field: &ty::FieldDef,
1100 args: GenericArgsRef<'tcx>,
1101 ) -> FfiResult<'tcx> {
1102 let field_ty = field.ty(self.cx.tcx, args);
1103 let field_ty = self
1104 .cx
1105 .tcx
1106 .try_normalize_erasing_regions(self.cx.typing_env(), field_ty)
1107 .unwrap_or(field_ty);
1108 self.check_type_for_ffi(acc, field_ty)
1109 }
1110
1111 fn check_variant_for_ffi(
1113 &self,
1114 acc: &mut CTypesVisitorState<'tcx>,
1115 ty: Ty<'tcx>,
1116 def: ty::AdtDef<'tcx>,
1117 variant: &ty::VariantDef,
1118 args: GenericArgsRef<'tcx>,
1119 ) -> FfiResult<'tcx> {
1120 use FfiResult::*;
1121 let transparent_with_all_zst_fields = if def.repr().transparent() {
1122 if let Some(field) = transparent_newtype_field(self.cx.tcx, variant) {
1123 match self.check_field_type_for_ffi(acc, field, args) {
1125 FfiUnsafe { ty, .. } if ty.is_unit() => (),
1126 r => return r,
1127 }
1128
1129 false
1130 } else {
1131 true
1134 }
1135 } else {
1136 false
1137 };
1138
1139 let mut all_phantom = !variant.fields.is_empty();
1141 for field in &variant.fields {
1142 all_phantom &= match self.check_field_type_for_ffi(acc, field, args) {
1143 FfiSafe => false,
1144 FfiUnsafe { ty, .. } if ty.is_unit() => false,
1146 FfiPhantom(..) => true,
1147 r @ FfiUnsafe { .. } => return r,
1148 }
1149 }
1150
1151 if all_phantom {
1152 FfiPhantom(ty)
1153 } else if transparent_with_all_zst_fields {
1154 FfiUnsafe { ty, reason: fluent::lint_improper_ctypes_struct_zst, help: None }
1155 } else {
1156 FfiSafe
1157 }
1158 }
1159
1160 fn check_type_for_ffi(
1163 &self,
1164 acc: &mut CTypesVisitorState<'tcx>,
1165 ty: Ty<'tcx>,
1166 ) -> FfiResult<'tcx> {
1167 use FfiResult::*;
1168
1169 let tcx = self.cx.tcx;
1170
1171 if !acc.cache.insert(ty) {
1176 return FfiSafe;
1177 }
1178
1179 match *ty.kind() {
1180 ty::Adt(def, args) => {
1181 if let Some(boxed) = ty.boxed_ty()
1182 && matches!(self.mode, CItemKind::Definition)
1183 {
1184 if boxed.is_sized(tcx, self.cx.typing_env()) {
1185 return FfiSafe;
1186 } else {
1187 return FfiUnsafe {
1188 ty,
1189 reason: fluent::lint_improper_ctypes_box,
1190 help: None,
1191 };
1192 }
1193 }
1194 if def.is_phantom_data() {
1195 return FfiPhantom(ty);
1196 }
1197 match def.adt_kind() {
1198 AdtKind::Struct | AdtKind::Union => {
1199 if let Some(sym::cstring_type | sym::cstr_type) =
1200 tcx.get_diagnostic_name(def.did())
1201 && !acc.base_ty.is_mutable_ptr()
1202 {
1203 return FfiUnsafe {
1204 ty,
1205 reason: fluent::lint_improper_ctypes_cstr_reason,
1206 help: Some(fluent::lint_improper_ctypes_cstr_help),
1207 };
1208 }
1209
1210 if !def.repr().c() && !def.repr().transparent() {
1211 return FfiUnsafe {
1212 ty,
1213 reason: if def.is_struct() {
1214 fluent::lint_improper_ctypes_struct_layout_reason
1215 } else {
1216 fluent::lint_improper_ctypes_union_layout_reason
1217 },
1218 help: if def.is_struct() {
1219 Some(fluent::lint_improper_ctypes_struct_layout_help)
1220 } else {
1221 Some(fluent::lint_improper_ctypes_union_layout_help)
1222 },
1223 };
1224 }
1225
1226 if def.non_enum_variant().field_list_has_applicable_non_exhaustive() {
1227 return FfiUnsafe {
1228 ty,
1229 reason: if def.is_struct() {
1230 fluent::lint_improper_ctypes_struct_non_exhaustive
1231 } else {
1232 fluent::lint_improper_ctypes_union_non_exhaustive
1233 },
1234 help: None,
1235 };
1236 }
1237
1238 if def.non_enum_variant().fields.is_empty() {
1239 return FfiUnsafe {
1240 ty,
1241 reason: if def.is_struct() {
1242 fluent::lint_improper_ctypes_struct_fieldless_reason
1243 } else {
1244 fluent::lint_improper_ctypes_union_fieldless_reason
1245 },
1246 help: if def.is_struct() {
1247 Some(fluent::lint_improper_ctypes_struct_fieldless_help)
1248 } else {
1249 Some(fluent::lint_improper_ctypes_union_fieldless_help)
1250 },
1251 };
1252 }
1253
1254 self.check_variant_for_ffi(acc, ty, def, def.non_enum_variant(), args)
1255 }
1256 AdtKind::Enum => {
1257 if def.variants().is_empty() {
1258 return FfiSafe;
1260 }
1261 if !def.repr().c() && !def.repr().transparent() && def.repr().int.is_none()
1264 {
1265 if let Some(ty) =
1267 repr_nullable_ptr(self.cx.tcx, self.cx.typing_env(), ty, self.mode)
1268 {
1269 return self.check_type_for_ffi(acc, ty);
1270 }
1271
1272 return FfiUnsafe {
1273 ty,
1274 reason: fluent::lint_improper_ctypes_enum_repr_reason,
1275 help: Some(fluent::lint_improper_ctypes_enum_repr_help),
1276 };
1277 }
1278
1279 use improper_ctypes::check_non_exhaustive_variant;
1280
1281 let non_exhaustive = def.variant_list_has_applicable_non_exhaustive();
1282 let ret = def.variants().iter().try_for_each(|variant| {
1284 check_non_exhaustive_variant(non_exhaustive, variant)
1285 .map_break(|reason| FfiUnsafe { ty, reason, help: None })?;
1286
1287 match self.check_variant_for_ffi(acc, ty, def, variant, args) {
1288 FfiSafe => ControlFlow::Continue(()),
1289 r => ControlFlow::Break(r),
1290 }
1291 });
1292 if let ControlFlow::Break(result) = ret {
1293 return result;
1294 }
1295
1296 FfiSafe
1297 }
1298 }
1299 }
1300
1301 ty::Char => FfiUnsafe {
1302 ty,
1303 reason: fluent::lint_improper_ctypes_char_reason,
1304 help: Some(fluent::lint_improper_ctypes_char_help),
1305 },
1306
1307 ty::Pat(base, ..) => self.check_type_for_ffi(acc, base),
1310
1311 ty::Bool | ty::Int(..) | ty::Uint(..) | ty::Float(..) | ty::Never => FfiSafe,
1313
1314 ty::Slice(_) => FfiUnsafe {
1315 ty,
1316 reason: fluent::lint_improper_ctypes_slice_reason,
1317 help: Some(fluent::lint_improper_ctypes_slice_help),
1318 },
1319
1320 ty::Dynamic(..) => {
1321 FfiUnsafe { ty, reason: fluent::lint_improper_ctypes_dyn, help: None }
1322 }
1323
1324 ty::Str => FfiUnsafe {
1325 ty,
1326 reason: fluent::lint_improper_ctypes_str_reason,
1327 help: Some(fluent::lint_improper_ctypes_str_help),
1328 },
1329
1330 ty::Tuple(..) => FfiUnsafe {
1331 ty,
1332 reason: fluent::lint_improper_ctypes_tuple_reason,
1333 help: Some(fluent::lint_improper_ctypes_tuple_help),
1334 },
1335
1336 ty::RawPtr(ty, _) | ty::Ref(_, ty, _)
1337 if {
1338 matches!(self.mode, CItemKind::Definition)
1339 && ty.is_sized(self.cx.tcx, self.cx.typing_env())
1340 } =>
1341 {
1342 FfiSafe
1343 }
1344
1345 ty::RawPtr(ty, _)
1346 if match ty.kind() {
1347 ty::Tuple(tuple) => tuple.is_empty(),
1348 _ => false,
1349 } =>
1350 {
1351 FfiSafe
1352 }
1353
1354 ty::RawPtr(ty, _) | ty::Ref(_, ty, _) => self.check_type_for_ffi(acc, ty),
1355
1356 ty::Array(inner_ty, _) => self.check_type_for_ffi(acc, inner_ty),
1357
1358 ty::FnPtr(sig_tys, hdr) => {
1359 let sig = sig_tys.with(hdr);
1360 if sig.abi().is_rustic_abi() {
1361 return FfiUnsafe {
1362 ty,
1363 reason: fluent::lint_improper_ctypes_fnptr_reason,
1364 help: Some(fluent::lint_improper_ctypes_fnptr_help),
1365 };
1366 }
1367
1368 let sig = tcx.instantiate_bound_regions_with_erased(sig);
1369 for arg in sig.inputs() {
1370 match self.check_type_for_ffi(acc, *arg) {
1371 FfiSafe => {}
1372 r => return r,
1373 }
1374 }
1375
1376 let ret_ty = sig.output();
1377 if ret_ty.is_unit() {
1378 return FfiSafe;
1379 }
1380
1381 self.check_type_for_ffi(acc, ret_ty)
1382 }
1383
1384 ty::Foreign(..) => FfiSafe,
1385
1386 ty::Alias(ty::Opaque, ..) => {
1389 FfiUnsafe { ty, reason: fluent::lint_improper_ctypes_opaque, help: None }
1390 }
1391
1392 ty::Param(..) | ty::Alias(ty::Projection | ty::Inherent, ..)
1395 if matches!(self.mode, CItemKind::Definition) =>
1396 {
1397 FfiSafe
1398 }
1399
1400 ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"),
1401
1402 ty::Param(..)
1403 | ty::Alias(ty::Projection | ty::Inherent | ty::Free, ..)
1404 | ty::Infer(..)
1405 | ty::Bound(..)
1406 | ty::Error(_)
1407 | ty::Closure(..)
1408 | ty::CoroutineClosure(..)
1409 | ty::Coroutine(..)
1410 | ty::CoroutineWitness(..)
1411 | ty::Placeholder(..)
1412 | ty::FnDef(..) => bug!("unexpected type in foreign function: {:?}", ty),
1413 }
1414 }
1415
1416 fn emit_ffi_unsafe_type_lint(
1417 &mut self,
1418 ty: Ty<'tcx>,
1419 sp: Span,
1420 note: DiagMessage,
1421 help: Option<DiagMessage>,
1422 ) {
1423 let lint = match self.mode {
1424 CItemKind::Declaration => IMPROPER_CTYPES,
1425 CItemKind::Definition => IMPROPER_CTYPES_DEFINITIONS,
1426 };
1427 let desc = match self.mode {
1428 CItemKind::Declaration => "block",
1429 CItemKind::Definition => "fn",
1430 };
1431 let span_note = if let ty::Adt(def, _) = ty.kind()
1432 && let Some(sp) = self.cx.tcx.hir_span_if_local(def.did())
1433 {
1434 Some(sp)
1435 } else {
1436 None
1437 };
1438 self.cx.emit_span_lint(
1439 lint,
1440 sp,
1441 ImproperCTypes { ty, desc, label: sp, help, note, span_note },
1442 );
1443 }
1444
1445 fn check_for_opaque_ty(&mut self, sp: Span, ty: Ty<'tcx>) -> bool {
1446 struct ProhibitOpaqueTypes;
1447 impl<'tcx> ty::TypeVisitor<TyCtxt<'tcx>> for ProhibitOpaqueTypes {
1448 type Result = ControlFlow<Ty<'tcx>>;
1449
1450 fn visit_ty(&mut self, ty: Ty<'tcx>) -> Self::Result {
1451 if !ty.has_opaque_types() {
1452 return ControlFlow::Continue(());
1453 }
1454
1455 if let ty::Alias(ty::Opaque, ..) = ty.kind() {
1456 ControlFlow::Break(ty)
1457 } else {
1458 ty.super_visit_with(self)
1459 }
1460 }
1461 }
1462
1463 if let Some(ty) = self
1464 .cx
1465 .tcx
1466 .try_normalize_erasing_regions(self.cx.typing_env(), ty)
1467 .unwrap_or(ty)
1468 .visit_with(&mut ProhibitOpaqueTypes)
1469 .break_value()
1470 {
1471 self.emit_ffi_unsafe_type_lint(ty, sp, fluent::lint_improper_ctypes_opaque, None);
1472 true
1473 } else {
1474 false
1475 }
1476 }
1477
1478 fn check_type_for_ffi_and_report_errors(
1479 &mut self,
1480 sp: Span,
1481 ty: Ty<'tcx>,
1482 is_static: bool,
1483 is_return_type: bool,
1484 ) {
1485 if self.check_for_opaque_ty(sp, ty) {
1486 return;
1488 }
1489
1490 let ty = self.cx.tcx.try_normalize_erasing_regions(self.cx.typing_env(), ty).unwrap_or(ty);
1491
1492 if !is_static && self.check_for_array_ty(sp, ty) {
1496 return;
1497 }
1498
1499 if is_return_type && ty.is_unit() {
1503 return;
1504 }
1505
1506 let mut acc = CTypesVisitorState { cache: FxHashSet::default(), base_ty: ty };
1507 match self.check_type_for_ffi(&mut acc, ty) {
1508 FfiResult::FfiSafe => {}
1509 FfiResult::FfiPhantom(ty) => {
1510 self.emit_ffi_unsafe_type_lint(
1511 ty,
1512 sp,
1513 fluent::lint_improper_ctypes_only_phantomdata,
1514 None,
1515 );
1516 }
1517 FfiResult::FfiUnsafe { ty, reason, help } => {
1518 self.emit_ffi_unsafe_type_lint(ty, sp, reason, help);
1519 }
1520 }
1521 }
1522
1523 fn check_fn(&mut self, def_id: LocalDefId, decl: &'tcx hir::FnDecl<'_>) {
1528 let sig = self.cx.tcx.fn_sig(def_id).instantiate_identity();
1529 let sig = self.cx.tcx.instantiate_bound_regions_with_erased(sig);
1530
1531 for (input_ty, input_hir) in iter::zip(sig.inputs(), decl.inputs) {
1532 for (fn_ptr_ty, span) in self.find_fn_ptr_ty_with_external_abi(input_hir, *input_ty) {
1533 self.check_type_for_ffi_and_report_errors(span, fn_ptr_ty, false, false);
1534 }
1535 }
1536
1537 if let hir::FnRetTy::Return(ret_hir) = decl.output {
1538 for (fn_ptr_ty, span) in self.find_fn_ptr_ty_with_external_abi(ret_hir, sig.output()) {
1539 self.check_type_for_ffi_and_report_errors(span, fn_ptr_ty, false, true);
1540 }
1541 }
1542 }
1543
1544 fn check_foreign_fn(&mut self, def_id: LocalDefId, decl: &'tcx hir::FnDecl<'_>) {
1546 let sig = self.cx.tcx.fn_sig(def_id).instantiate_identity();
1547 let sig = self.cx.tcx.instantiate_bound_regions_with_erased(sig);
1548
1549 for (input_ty, input_hir) in iter::zip(sig.inputs(), decl.inputs) {
1550 self.check_type_for_ffi_and_report_errors(input_hir.span, *input_ty, false, false);
1551 }
1552
1553 if let hir::FnRetTy::Return(ret_hir) = decl.output {
1554 self.check_type_for_ffi_and_report_errors(ret_hir.span, sig.output(), false, true);
1555 }
1556 }
1557
1558 fn check_foreign_static(&mut self, id: hir::OwnerId, span: Span) {
1559 let ty = self.cx.tcx.type_of(id).instantiate_identity();
1560 self.check_type_for_ffi_and_report_errors(span, ty, true, false);
1561 }
1562
1563 fn find_fn_ptr_ty_with_external_abi(
1567 &self,
1568 hir_ty: &hir::Ty<'tcx>,
1569 ty: Ty<'tcx>,
1570 ) -> Vec<(Ty<'tcx>, Span)> {
1571 struct FnPtrFinder<'tcx> {
1572 spans: Vec<Span>,
1573 tys: Vec<Ty<'tcx>>,
1574 }
1575
1576 impl<'tcx> hir::intravisit::Visitor<'_> for FnPtrFinder<'tcx> {
1577 fn visit_ty(&mut self, ty: &'_ hir::Ty<'_, AmbigArg>) {
1578 debug!(?ty);
1579 if let hir::TyKind::FnPtr(hir::FnPtrTy { abi, .. }) = ty.kind
1580 && !abi.is_rustic_abi()
1581 {
1582 self.spans.push(ty.span);
1583 }
1584
1585 hir::intravisit::walk_ty(self, ty)
1586 }
1587 }
1588
1589 impl<'tcx> ty::TypeVisitor<TyCtxt<'tcx>> for FnPtrFinder<'tcx> {
1590 type Result = ();
1591
1592 fn visit_ty(&mut self, ty: Ty<'tcx>) -> Self::Result {
1593 if let ty::FnPtr(_, hdr) = ty.kind()
1594 && !hdr.abi.is_rustic_abi()
1595 {
1596 self.tys.push(ty);
1597 }
1598
1599 ty.super_visit_with(self)
1600 }
1601 }
1602
1603 let mut visitor = FnPtrFinder { spans: Vec::new(), tys: Vec::new() };
1604 ty.visit_with(&mut visitor);
1605 visitor.visit_ty_unambig(hir_ty);
1606
1607 iter::zip(visitor.tys.drain(..), visitor.spans.drain(..)).collect()
1608 }
1609}
1610
1611impl<'tcx> LateLintPass<'tcx> for ImproperCTypesDeclarations {
1612 fn check_foreign_item(&mut self, cx: &LateContext<'tcx>, it: &hir::ForeignItem<'tcx>) {
1613 let mut vis = ImproperCTypesVisitor { cx, mode: CItemKind::Declaration };
1614 let abi = cx.tcx.hir_get_foreign_abi(it.hir_id());
1615
1616 match it.kind {
1617 hir::ForeignItemKind::Fn(sig, _, _) => {
1618 if abi.is_rustic_abi() {
1619 vis.check_fn(it.owner_id.def_id, sig.decl)
1620 } else {
1621 vis.check_foreign_fn(it.owner_id.def_id, sig.decl);
1622 }
1623 }
1624 hir::ForeignItemKind::Static(ty, _, _) if !abi.is_rustic_abi() => {
1625 vis.check_foreign_static(it.owner_id, ty.span);
1626 }
1627 hir::ForeignItemKind::Static(..) | hir::ForeignItemKind::Type => (),
1628 }
1629 }
1630}
1631
1632impl ImproperCTypesDefinitions {
1633 fn check_ty_maybe_containing_foreign_fnptr<'tcx>(
1634 &mut self,
1635 cx: &LateContext<'tcx>,
1636 hir_ty: &'tcx hir::Ty<'_>,
1637 ty: Ty<'tcx>,
1638 ) {
1639 let mut vis = ImproperCTypesVisitor { cx, mode: CItemKind::Definition };
1640 for (fn_ptr_ty, span) in vis.find_fn_ptr_ty_with_external_abi(hir_ty, ty) {
1641 vis.check_type_for_ffi_and_report_errors(span, fn_ptr_ty, true, false);
1642 }
1643 }
1644
1645 fn check_arg_for_power_alignment<'tcx>(
1646 &mut self,
1647 cx: &LateContext<'tcx>,
1648 ty: Ty<'tcx>,
1649 ) -> bool {
1650 assert!(cx.tcx.sess.target.os == "aix");
1651 if ty.is_floating_point() && ty.primitive_size(cx.tcx).bytes() > 4 {
1658 return true;
1659 } else if let Adt(adt_def, _) = ty.kind()
1660 && adt_def.is_struct()
1661 && adt_def.repr().c()
1662 && !adt_def.repr().packed()
1663 && adt_def.repr().align.is_none()
1664 {
1665 let struct_variant = adt_def.variant(VariantIdx::ZERO);
1666 for struct_field in &struct_variant.fields {
1670 let field_ty = cx.tcx.type_of(struct_field.did).instantiate_identity();
1671 if self.check_arg_for_power_alignment(cx, field_ty) {
1672 return true;
1673 }
1674 }
1675 }
1676 return false;
1677 }
1678
1679 fn check_struct_for_power_alignment<'tcx>(
1680 &mut self,
1681 cx: &LateContext<'tcx>,
1682 item: &'tcx hir::Item<'tcx>,
1683 ) {
1684 let adt_def = cx.tcx.adt_def(item.owner_id.to_def_id());
1685 if adt_def.repr().c()
1688 && !adt_def.repr().packed()
1689 && adt_def.repr().align.is_none()
1690 && cx.tcx.sess.target.os == "aix"
1691 && !adt_def.all_fields().next().is_none()
1692 {
1693 let struct_variant_data = item.expect_struct().2;
1694 for field_def in struct_variant_data.fields().iter().skip(1) {
1695 let def_id = field_def.def_id;
1699 let ty = cx.tcx.type_of(def_id).instantiate_identity();
1700 if self.check_arg_for_power_alignment(cx, ty) {
1701 cx.emit_span_lint(USES_POWER_ALIGNMENT, field_def.span, UsesPowerAlignment);
1702 }
1703 }
1704 }
1705 }
1706}
1707
1708impl<'tcx> LateLintPass<'tcx> for ImproperCTypesDefinitions {
1716 fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) {
1717 match item.kind {
1718 hir::ItemKind::Static(_, _, ty, _)
1719 | hir::ItemKind::Const(_, _, ty, _)
1720 | hir::ItemKind::TyAlias(_, _, ty) => {
1721 self.check_ty_maybe_containing_foreign_fnptr(
1722 cx,
1723 ty,
1724 cx.tcx.type_of(item.owner_id).instantiate_identity(),
1725 );
1726 }
1727 hir::ItemKind::Fn { .. } => {}
1729 hir::ItemKind::Struct(..) => {
1732 self.check_struct_for_power_alignment(cx, item);
1733 }
1734 hir::ItemKind::Union(..) | hir::ItemKind::Enum(..) => {}
1736 hir::ItemKind::Impl(..)
1738 | hir::ItemKind::TraitAlias(..)
1739 | hir::ItemKind::Trait(..)
1740 | hir::ItemKind::GlobalAsm { .. }
1741 | hir::ItemKind::ForeignMod { .. }
1742 | hir::ItemKind::Mod(..)
1743 | hir::ItemKind::Macro(..)
1744 | hir::ItemKind::Use(..)
1745 | hir::ItemKind::ExternCrate(..) => {}
1746 }
1747 }
1748
1749 fn check_field_def(&mut self, cx: &LateContext<'tcx>, field: &'tcx hir::FieldDef<'tcx>) {
1750 self.check_ty_maybe_containing_foreign_fnptr(
1751 cx,
1752 field.ty,
1753 cx.tcx.type_of(field.def_id).instantiate_identity(),
1754 );
1755 }
1756
1757 fn check_fn(
1758 &mut self,
1759 cx: &LateContext<'tcx>,
1760 kind: hir::intravisit::FnKind<'tcx>,
1761 decl: &'tcx hir::FnDecl<'_>,
1762 _: &'tcx hir::Body<'_>,
1763 _: Span,
1764 id: LocalDefId,
1765 ) {
1766 use hir::intravisit::FnKind;
1767
1768 let abi = match kind {
1769 FnKind::ItemFn(_, _, header, ..) => header.abi,
1770 FnKind::Method(_, sig, ..) => sig.header.abi,
1771 _ => return,
1772 };
1773
1774 let mut vis = ImproperCTypesVisitor { cx, mode: CItemKind::Definition };
1775 if abi.is_rustic_abi() {
1776 vis.check_fn(id, decl);
1777 } else {
1778 vis.check_foreign_fn(id, decl);
1779 }
1780 }
1781}
1782
1783declare_lint_pass!(VariantSizeDifferences => [VARIANT_SIZE_DIFFERENCES]);
1784
1785impl<'tcx> LateLintPass<'tcx> for VariantSizeDifferences {
1786 fn check_item(&mut self, cx: &LateContext<'_>, it: &hir::Item<'_>) {
1787 if let hir::ItemKind::Enum(_, _, ref enum_definition) = it.kind {
1788 let t = cx.tcx.type_of(it.owner_id).instantiate_identity();
1789 let ty = cx.tcx.erase_regions(t);
1790 let Ok(layout) = cx.layout_of(ty) else { return };
1791 let Variants::Multiple { tag_encoding: TagEncoding::Direct, tag, variants, .. } =
1792 &layout.variants
1793 else {
1794 return;
1795 };
1796
1797 let tag_size = tag.size(&cx.tcx).bytes();
1798
1799 debug!(
1800 "enum `{}` is {} bytes large with layout:\n{:#?}",
1801 t,
1802 layout.size.bytes(),
1803 layout
1804 );
1805
1806 let (largest, slargest, largest_index) = iter::zip(enum_definition.variants, variants)
1807 .map(|(variant, variant_layout)| {
1808 let bytes = variant_layout.size.bytes().saturating_sub(tag_size);
1810
1811 debug!("- variant `{}` is {} bytes large", variant.ident, bytes);
1812 bytes
1813 })
1814 .enumerate()
1815 .fold((0, 0, 0), |(l, s, li), (idx, size)| {
1816 if size > l {
1817 (size, l, idx)
1818 } else if size > s {
1819 (l, size, li)
1820 } else {
1821 (l, s, li)
1822 }
1823 });
1824
1825 if largest > slargest * 3 && slargest > 0 {
1828 cx.emit_span_lint(
1829 VARIANT_SIZE_DIFFERENCES,
1830 enum_definition.variants[largest_index].span,
1831 VariantSizeDifferencesDiag { largest },
1832 );
1833 }
1834 }
1835 }
1836}
1837
1838declare_lint! {
1839 INVALID_ATOMIC_ORDERING,
1876 Deny,
1877 "usage of invalid atomic ordering in atomic operations and memory fences"
1878}
1879
1880declare_lint_pass!(InvalidAtomicOrdering => [INVALID_ATOMIC_ORDERING]);
1881
1882impl InvalidAtomicOrdering {
1883 fn inherent_atomic_method_call<'hir>(
1884 cx: &LateContext<'_>,
1885 expr: &Expr<'hir>,
1886 recognized_names: &[Symbol], ) -> Option<(Symbol, &'hir [Expr<'hir>])> {
1888 const ATOMIC_TYPES: &[Symbol] = &[
1889 sym::AtomicBool,
1890 sym::AtomicPtr,
1891 sym::AtomicUsize,
1892 sym::AtomicU8,
1893 sym::AtomicU16,
1894 sym::AtomicU32,
1895 sym::AtomicU64,
1896 sym::AtomicU128,
1897 sym::AtomicIsize,
1898 sym::AtomicI8,
1899 sym::AtomicI16,
1900 sym::AtomicI32,
1901 sym::AtomicI64,
1902 sym::AtomicI128,
1903 ];
1904 if let ExprKind::MethodCall(method_path, _, args, _) = &expr.kind
1905 && recognized_names.contains(&method_path.ident.name)
1906 && let Some(m_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
1907 && let Some(impl_did) = cx.tcx.impl_of_method(m_def_id)
1908 && let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def()
1909 && cx.tcx.trait_id_of_impl(impl_did).is_none()
1911 && let parent = cx.tcx.parent(adt.did())
1912 && cx.tcx.is_diagnostic_item(sym::atomic_mod, parent)
1913 && ATOMIC_TYPES.contains(&cx.tcx.item_name(adt.did()))
1914 {
1915 return Some((method_path.ident.name, args));
1916 }
1917 None
1918 }
1919
1920 fn match_ordering(cx: &LateContext<'_>, ord_arg: &Expr<'_>) -> Option<Symbol> {
1921 let ExprKind::Path(ref ord_qpath) = ord_arg.kind else { return None };
1922 let did = cx.qpath_res(ord_qpath, ord_arg.hir_id).opt_def_id()?;
1923 let tcx = cx.tcx;
1924 let atomic_ordering = tcx.get_diagnostic_item(sym::Ordering);
1925 let name = tcx.item_name(did);
1926 let parent = tcx.parent(did);
1927 [sym::Relaxed, sym::Release, sym::Acquire, sym::AcqRel, sym::SeqCst].into_iter().find(
1928 |&ordering| {
1929 name == ordering
1930 && (Some(parent) == atomic_ordering
1931 || tcx.opt_parent(parent) == atomic_ordering)
1933 },
1934 )
1935 }
1936
1937 fn check_atomic_load_store(cx: &LateContext<'_>, expr: &Expr<'_>) {
1938 if let Some((method, args)) =
1939 Self::inherent_atomic_method_call(cx, expr, &[sym::load, sym::store])
1940 && let Some((ordering_arg, invalid_ordering)) = match method {
1941 sym::load => Some((&args[0], sym::Release)),
1942 sym::store => Some((&args[1], sym::Acquire)),
1943 _ => None,
1944 }
1945 && let Some(ordering) = Self::match_ordering(cx, ordering_arg)
1946 && (ordering == invalid_ordering || ordering == sym::AcqRel)
1947 {
1948 if method == sym::load {
1949 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, ordering_arg.span, AtomicOrderingLoad);
1950 } else {
1951 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, ordering_arg.span, AtomicOrderingStore);
1952 };
1953 }
1954 }
1955
1956 fn check_memory_fence(cx: &LateContext<'_>, expr: &Expr<'_>) {
1957 if let ExprKind::Call(func, args) = expr.kind
1958 && let ExprKind::Path(ref func_qpath) = func.kind
1959 && let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id()
1960 && matches!(cx.tcx.get_diagnostic_name(def_id), Some(sym::fence | sym::compiler_fence))
1961 && Self::match_ordering(cx, &args[0]) == Some(sym::Relaxed)
1962 {
1963 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, args[0].span, AtomicOrderingFence);
1964 }
1965 }
1966
1967 fn check_atomic_compare_exchange(cx: &LateContext<'_>, expr: &Expr<'_>) {
1968 let Some((method, args)) = Self::inherent_atomic_method_call(
1969 cx,
1970 expr,
1971 &[sym::fetch_update, sym::compare_exchange, sym::compare_exchange_weak],
1972 ) else {
1973 return;
1974 };
1975
1976 let fail_order_arg = match method {
1977 sym::fetch_update => &args[1],
1978 sym::compare_exchange | sym::compare_exchange_weak => &args[3],
1979 _ => return,
1980 };
1981
1982 let Some(fail_ordering) = Self::match_ordering(cx, fail_order_arg) else { return };
1983
1984 if matches!(fail_ordering, sym::Release | sym::AcqRel) {
1985 cx.emit_span_lint(
1986 INVALID_ATOMIC_ORDERING,
1987 fail_order_arg.span,
1988 InvalidAtomicOrderingDiag { method, fail_order_arg_span: fail_order_arg.span },
1989 );
1990 }
1991 }
1992}
1993
1994impl<'tcx> LateLintPass<'tcx> for InvalidAtomicOrdering {
1995 fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
1996 Self::check_atomic_load_store(cx, expr);
1997 Self::check_memory_fence(cx, expr);
1998 Self::check_atomic_compare_exchange(cx, expr);
1999 }
2000}