1use std::iter;
2use std::ops::ControlFlow;
3
4use rustc_abi::{BackendRepr, TagEncoding, VariantIdx, Variants, WrappingRange};
5use rustc_data_structures::fx::FxHashSet;
6use rustc_errors::DiagMessage;
7use rustc_hir::intravisit::VisitorExt;
8use rustc_hir::{AmbigArg, Expr, ExprKind, HirId, LangItem};
9use rustc_middle::bug;
10use rustc_middle::ty::layout::{LayoutOf, SizeSkeleton};
11use rustc_middle::ty::{
12 self, Adt, AdtKind, GenericArgsRef, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable,
13 TypeVisitableExt,
14};
15use rustc_session::{declare_lint, declare_lint_pass, impl_lint_pass};
16use rustc_span::def_id::LocalDefId;
17use rustc_span::{Span, Symbol, sym};
18use tracing::debug;
19use {rustc_ast as ast, rustc_hir as hir};
20
21mod improper_ctypes;
22
23use crate::lints::{
24 AmbiguousWidePointerComparisons, AmbiguousWidePointerComparisonsAddrMetadataSuggestion,
25 AmbiguousWidePointerComparisonsAddrSuggestion, AmbiguousWidePointerComparisonsCastSuggestion,
26 AmbiguousWidePointerComparisonsExpectSuggestion, AtomicOrderingFence, AtomicOrderingLoad,
27 AtomicOrderingStore, ImproperCTypes, InvalidAtomicOrderingDiag, InvalidNanComparisons,
28 InvalidNanComparisonsSuggestion, UnpredictableFunctionPointerComparisons,
29 UnpredictableFunctionPointerComparisonsSuggestion, UnusedComparisons, UsesPowerAlignment,
30 VariantSizeDifferencesDiag,
31};
32use crate::{LateContext, LateLintPass, LintContext, fluent_generated as fluent};
33
34mod literal;
35
36use literal::{int_ty_range, lint_literal, uint_ty_range};
37
38declare_lint! {
39 UNUSED_COMPARISONS,
57 Warn,
58 "comparisons made useless by limits of the types involved"
59}
60
61declare_lint! {
62 OVERFLOWING_LITERALS,
79 Deny,
80 "literal out of range for its type"
81}
82
83declare_lint! {
84 VARIANT_SIZE_DIFFERENCES,
116 Allow,
117 "detects enums with widely varying variant sizes"
118}
119
120declare_lint! {
121 INVALID_NAN_COMPARISONS,
138 Warn,
139 "detects invalid floating point NaN comparisons"
140}
141
142declare_lint! {
143 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
169 Warn,
170 "detects ambiguous wide pointer comparisons"
171}
172
173declare_lint! {
174 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
198 Warn,
199 "detects unpredictable function pointer comparisons",
200 report_in_external_macro
201}
202
203#[derive(Copy, Clone, Default)]
204pub(crate) struct TypeLimits {
205 negated_expr_id: Option<hir::HirId>,
207 negated_expr_span: Option<Span>,
209}
210
211impl_lint_pass!(TypeLimits => [
212 UNUSED_COMPARISONS,
213 OVERFLOWING_LITERALS,
214 INVALID_NAN_COMPARISONS,
215 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
216 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS
217]);
218
219impl TypeLimits {
220 pub(crate) fn new() -> TypeLimits {
221 TypeLimits { negated_expr_id: None, negated_expr_span: None }
222 }
223}
224
225fn lint_nan<'tcx>(
226 cx: &LateContext<'tcx>,
227 e: &'tcx hir::Expr<'tcx>,
228 binop: hir::BinOpKind,
229 l: &'tcx hir::Expr<'tcx>,
230 r: &'tcx hir::Expr<'tcx>,
231) {
232 fn is_nan(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool {
233 let expr = expr.peel_blocks().peel_borrows();
234 match expr.kind {
235 ExprKind::Path(qpath) => {
236 let Some(def_id) = cx.typeck_results().qpath_res(&qpath, expr.hir_id).opt_def_id()
237 else {
238 return false;
239 };
240
241 matches!(
242 cx.tcx.get_diagnostic_name(def_id),
243 Some(sym::f16_nan | sym::f32_nan | sym::f64_nan | sym::f128_nan)
244 )
245 }
246 _ => false,
247 }
248 }
249
250 fn eq_ne(
251 e: &hir::Expr<'_>,
252 l: &hir::Expr<'_>,
253 r: &hir::Expr<'_>,
254 f: impl FnOnce(Span, Span) -> InvalidNanComparisonsSuggestion,
255 ) -> InvalidNanComparisons {
256 let suggestion = if let Some(l_span) = l.span.find_ancestor_inside(e.span)
257 && let Some(r_span) = r.span.find_ancestor_inside(e.span)
258 {
259 f(l_span, r_span)
260 } else {
261 InvalidNanComparisonsSuggestion::Spanless
262 };
263
264 InvalidNanComparisons::EqNe { suggestion }
265 }
266
267 let lint = match binop {
268 hir::BinOpKind::Eq | hir::BinOpKind::Ne if is_nan(cx, l) => {
269 eq_ne(e, l, r, |l_span, r_span| InvalidNanComparisonsSuggestion::Spanful {
270 nan_plus_binop: l_span.until(r_span),
271 float: r_span.shrink_to_hi(),
272 neg: (binop == hir::BinOpKind::Ne).then(|| r_span.shrink_to_lo()),
273 })
274 }
275 hir::BinOpKind::Eq | hir::BinOpKind::Ne if is_nan(cx, r) => {
276 eq_ne(e, l, r, |l_span, r_span| InvalidNanComparisonsSuggestion::Spanful {
277 nan_plus_binop: l_span.shrink_to_hi().to(r_span),
278 float: l_span.shrink_to_hi(),
279 neg: (binop == hir::BinOpKind::Ne).then(|| l_span.shrink_to_lo()),
280 })
281 }
282 hir::BinOpKind::Lt | hir::BinOpKind::Le | hir::BinOpKind::Gt | hir::BinOpKind::Ge
283 if is_nan(cx, l) || is_nan(cx, r) =>
284 {
285 InvalidNanComparisons::LtLeGtGe
286 }
287 _ => return,
288 };
289
290 cx.emit_span_lint(INVALID_NAN_COMPARISONS, e.span, lint);
291}
292
293#[derive(Debug, PartialEq, Copy, Clone)]
294enum ComparisonOp {
295 BinOp(hir::BinOpKind),
296 Other,
297}
298
299fn lint_wide_pointer<'tcx>(
300 cx: &LateContext<'tcx>,
301 e: &'tcx hir::Expr<'tcx>,
302 cmpop: ComparisonOp,
303 l: &'tcx hir::Expr<'tcx>,
304 r: &'tcx hir::Expr<'tcx>,
305) {
306 let ptr_unsized = |mut ty: Ty<'tcx>| -> Option<(
307 usize,
308 String,
309 bool,
310 )> {
311 let mut refs = 0;
312 while let ty::Ref(_, inner_ty, _) = ty.kind() {
315 ty = *inner_ty;
316 refs += 1;
317 }
318
319 let mut modifiers = String::new();
321 ty = match ty.kind() {
322 ty::RawPtr(ty, _) => *ty,
323 ty::Adt(def, args) if cx.tcx.is_diagnostic_item(sym::NonNull, def.did()) => {
324 modifiers.push_str(".as_ptr()");
325 args.type_at(0)
326 }
327 _ => return None,
328 };
329
330 (!ty.is_sized(cx.tcx, cx.typing_env()))
331 .then(|| (refs, modifiers, matches!(ty.kind(), ty::Dynamic(_, _, ty::Dyn))))
332 };
333
334 let l = l.peel_borrows();
336 let r = r.peel_borrows();
337
338 let Some(l_ty) = cx.typeck_results().expr_ty_opt(l) else {
339 return;
340 };
341 let Some(r_ty) = cx.typeck_results().expr_ty_opt(r) else {
342 return;
343 };
344
345 let Some((l_ty_refs, l_modifiers, l_inner_ty_is_dyn)) = ptr_unsized(l_ty) else {
346 return;
347 };
348 let Some((r_ty_refs, r_modifiers, r_inner_ty_is_dyn)) = ptr_unsized(r_ty) else {
349 return;
350 };
351
352 let (Some(l_span), Some(r_span)) =
353 (l.span.find_ancestor_inside(e.span), r.span.find_ancestor_inside(e.span))
354 else {
355 return cx.emit_span_lint(
356 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
357 e.span,
358 AmbiguousWidePointerComparisons::Spanless,
359 );
360 };
361
362 let ne = if cmpop == ComparisonOp::BinOp(hir::BinOpKind::Ne) { "!" } else { "" };
363 let is_eq_ne = matches!(cmpop, ComparisonOp::BinOp(hir::BinOpKind::Eq | hir::BinOpKind::Ne));
364 let is_dyn_comparison = l_inner_ty_is_dyn && r_inner_ty_is_dyn;
365 let via_method_call = matches!(&e.kind, ExprKind::MethodCall(..) | ExprKind::Call(..));
366
367 let left = e.span.shrink_to_lo().until(l_span.shrink_to_lo());
368 let middle = l_span.shrink_to_hi().until(r_span.shrink_to_lo());
369 let right = r_span.shrink_to_hi().until(e.span.shrink_to_hi());
370
371 let deref_left = &*"*".repeat(l_ty_refs);
372 let deref_right = &*"*".repeat(r_ty_refs);
373
374 let l_modifiers = &*l_modifiers;
375 let r_modifiers = &*r_modifiers;
376
377 cx.emit_span_lint(
378 AMBIGUOUS_WIDE_POINTER_COMPARISONS,
379 e.span,
380 if is_eq_ne {
381 AmbiguousWidePointerComparisons::SpanfulEq {
382 addr_metadata_suggestion: (!is_dyn_comparison).then(|| {
383 AmbiguousWidePointerComparisonsAddrMetadataSuggestion {
384 ne,
385 deref_left,
386 deref_right,
387 l_modifiers,
388 r_modifiers,
389 left,
390 middle,
391 right,
392 }
393 }),
394 addr_suggestion: AmbiguousWidePointerComparisonsAddrSuggestion {
395 ne,
396 deref_left,
397 deref_right,
398 l_modifiers,
399 r_modifiers,
400 left,
401 middle,
402 right,
403 },
404 }
405 } else {
406 AmbiguousWidePointerComparisons::SpanfulCmp {
407 cast_suggestion: AmbiguousWidePointerComparisonsCastSuggestion {
408 deref_left,
409 deref_right,
410 l_modifiers,
411 r_modifiers,
412 paren_left: if l_ty_refs != 0 { ")" } else { "" },
413 paren_right: if r_ty_refs != 0 { ")" } else { "" },
414 left_before: (l_ty_refs != 0).then_some(l_span.shrink_to_lo()),
415 left_after: l_span.shrink_to_hi(),
416 right_before: (r_ty_refs != 0).then_some(r_span.shrink_to_lo()),
417 right_after: r_span.shrink_to_hi(),
418 },
419 expect_suggestion: AmbiguousWidePointerComparisonsExpectSuggestion {
420 paren_left: if via_method_call { "" } else { "(" },
421 paren_right: if via_method_call { "" } else { ")" },
422 before: e.span.shrink_to_lo(),
423 after: e.span.shrink_to_hi(),
424 },
425 }
426 },
427 );
428}
429
430fn lint_fn_pointer<'tcx>(
431 cx: &LateContext<'tcx>,
432 e: &'tcx hir::Expr<'tcx>,
433 cmpop: ComparisonOp,
434 l: &'tcx hir::Expr<'tcx>,
435 r: &'tcx hir::Expr<'tcx>,
436) {
437 let peel_refs = |mut ty: Ty<'tcx>| -> (Ty<'tcx>, usize) {
438 let mut refs = 0;
439
440 while let ty::Ref(_, inner_ty, _) = ty.kind() {
441 ty = *inner_ty;
442 refs += 1;
443 }
444
445 (ty, refs)
446 };
447
448 let l = l.peel_borrows();
450 let r = r.peel_borrows();
451
452 let Some(l_ty) = cx.typeck_results().expr_ty_opt(l) else { return };
453 let Some(r_ty) = cx.typeck_results().expr_ty_opt(r) else { return };
454
455 let (l_ty, l_ty_refs) = peel_refs(l_ty);
458 let (r_ty, r_ty_refs) = peel_refs(r_ty);
459
460 if l_ty.is_fn() && r_ty.is_fn() {
461 } else if let ty::Adt(l_def, l_args) = l_ty.kind()
463 && let ty::Adt(r_def, r_args) = r_ty.kind()
464 && cx.tcx.is_lang_item(l_def.did(), LangItem::Option)
465 && cx.tcx.is_lang_item(r_def.did(), LangItem::Option)
466 && let Some(l_some_arg) = l_args.get(0)
467 && let Some(r_some_arg) = r_args.get(0)
468 && l_some_arg.expect_ty().is_fn()
469 && r_some_arg.expect_ty().is_fn()
470 {
471 return cx.emit_span_lint(
473 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
474 e.span,
475 UnpredictableFunctionPointerComparisons::Warn,
476 );
477 } else {
478 return;
480 }
481
482 let is_eq_ne = matches!(cmpop, ComparisonOp::BinOp(hir::BinOpKind::Eq | hir::BinOpKind::Ne));
485
486 if !is_eq_ne {
487 return cx.emit_span_lint(
489 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
490 e.span,
491 UnpredictableFunctionPointerComparisons::Warn,
492 );
493 }
494
495 let (Some(l_span), Some(r_span)) =
496 (l.span.find_ancestor_inside(e.span), r.span.find_ancestor_inside(e.span))
497 else {
498 return cx.emit_span_lint(
500 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
501 e.span,
502 UnpredictableFunctionPointerComparisons::Warn,
503 );
504 };
505
506 let ne = if cmpop == ComparisonOp::BinOp(hir::BinOpKind::Ne) { "!" } else { "" };
507
508 let deref_left = &*"*".repeat(l_ty_refs);
510 let deref_right = &*"*".repeat(r_ty_refs);
511
512 let left = e.span.shrink_to_lo().until(l_span.shrink_to_lo());
513 let middle = l_span.shrink_to_hi().until(r_span.shrink_to_lo());
514 let right = r_span.shrink_to_hi().until(e.span.shrink_to_hi());
515
516 let sugg =
517 if !r_ty.is_fn_ptr() {
520 let fn_sig = r_ty.fn_sig(cx.tcx);
521
522 UnpredictableFunctionPointerComparisonsSuggestion::FnAddrEqWithCast {
523 ne,
524 fn_sig,
525 deref_left,
526 deref_right,
527 left,
528 middle,
529 right,
530 }
531 } else {
532 UnpredictableFunctionPointerComparisonsSuggestion::FnAddrEq {
533 ne,
534 deref_left,
535 deref_right,
536 left,
537 middle,
538 right,
539 }
540 };
541
542 cx.emit_span_lint(
543 UNPREDICTABLE_FUNCTION_POINTER_COMPARISONS,
544 e.span,
545 UnpredictableFunctionPointerComparisons::Suggestion { sugg },
546 );
547}
548
549impl<'tcx> LateLintPass<'tcx> for TypeLimits {
550 fn check_lit(&mut self, cx: &LateContext<'tcx>, hir_id: HirId, lit: hir::Lit, negated: bool) {
551 if negated {
552 self.negated_expr_id = Some(hir_id);
553 self.negated_expr_span = Some(lit.span);
554 }
555 lint_literal(cx, self, hir_id, lit.span, &lit, negated);
556 }
557
558 fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx hir::Expr<'tcx>) {
559 match e.kind {
560 hir::ExprKind::Unary(hir::UnOp::Neg, expr) => {
561 if self.negated_expr_id != Some(e.hir_id) {
563 self.negated_expr_id = Some(expr.hir_id);
564 self.negated_expr_span = Some(e.span);
565 }
566 }
567 hir::ExprKind::Binary(binop, ref l, ref r) => {
568 if is_comparison(binop.node) {
569 if !check_limits(cx, binop.node, l, r) {
570 cx.emit_span_lint(UNUSED_COMPARISONS, e.span, UnusedComparisons);
571 } else {
572 lint_nan(cx, e, binop.node, l, r);
573 let cmpop = ComparisonOp::BinOp(binop.node);
574 lint_wide_pointer(cx, e, cmpop, l, r);
575 lint_fn_pointer(cx, e, cmpop, l, r);
576 }
577 }
578 }
579 hir::ExprKind::Call(path, [l, r])
580 if let ExprKind::Path(ref qpath) = path.kind
581 && let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
582 && let Some(diag_item) = cx.tcx.get_diagnostic_name(def_id)
583 && let Some(cmpop) = diag_item_cmpop(diag_item) =>
584 {
585 lint_wide_pointer(cx, e, cmpop, l, r);
586 lint_fn_pointer(cx, e, cmpop, l, r);
587 }
588 hir::ExprKind::MethodCall(_, l, [r], _)
589 if let Some(def_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
590 && let Some(diag_item) = cx.tcx.get_diagnostic_name(def_id)
591 && let Some(cmpop) = diag_item_cmpop(diag_item) =>
592 {
593 lint_wide_pointer(cx, e, cmpop, l, r);
594 lint_fn_pointer(cx, e, cmpop, l, r);
595 }
596 _ => {}
597 };
598
599 fn is_valid<T: PartialOrd>(binop: hir::BinOpKind, v: T, min: T, max: T) -> bool {
600 match binop {
601 hir::BinOpKind::Lt => v > min && v <= max,
602 hir::BinOpKind::Le => v >= min && v < max,
603 hir::BinOpKind::Gt => v >= min && v < max,
604 hir::BinOpKind::Ge => v > min && v <= max,
605 hir::BinOpKind::Eq | hir::BinOpKind::Ne => v >= min && v <= max,
606 _ => bug!(),
607 }
608 }
609
610 fn rev_binop(binop: hir::BinOpKind) -> hir::BinOpKind {
611 match binop {
612 hir::BinOpKind::Lt => hir::BinOpKind::Gt,
613 hir::BinOpKind::Le => hir::BinOpKind::Ge,
614 hir::BinOpKind::Gt => hir::BinOpKind::Lt,
615 hir::BinOpKind::Ge => hir::BinOpKind::Le,
616 _ => binop,
617 }
618 }
619
620 fn check_limits(
621 cx: &LateContext<'_>,
622 binop: hir::BinOpKind,
623 l: &hir::Expr<'_>,
624 r: &hir::Expr<'_>,
625 ) -> bool {
626 let (lit, expr, swap) = match (&l.kind, &r.kind) {
627 (&hir::ExprKind::Lit(_), _) => (l, r, true),
628 (_, &hir::ExprKind::Lit(_)) => (r, l, false),
629 _ => return true,
630 };
631 let norm_binop = if swap { rev_binop(binop) } else { binop };
634 match *cx.typeck_results().node_type(expr.hir_id).kind() {
635 ty::Int(int_ty) => {
636 let (min, max) = int_ty_range(int_ty);
637 let lit_val: i128 = match lit.kind {
638 hir::ExprKind::Lit(li) => match li.node {
639 ast::LitKind::Int(
640 v,
641 ast::LitIntType::Signed(_) | ast::LitIntType::Unsuffixed,
642 ) => v.get() as i128,
643 _ => return true,
644 },
645 _ => bug!(),
646 };
647 is_valid(norm_binop, lit_val, min, max)
648 }
649 ty::Uint(uint_ty) => {
650 let (min, max): (u128, u128) = uint_ty_range(uint_ty);
651 let lit_val: u128 = match lit.kind {
652 hir::ExprKind::Lit(li) => match li.node {
653 ast::LitKind::Int(v, _) => v.get(),
654 _ => return true,
655 },
656 _ => bug!(),
657 };
658 is_valid(norm_binop, lit_val, min, max)
659 }
660 _ => true,
661 }
662 }
663
664 fn is_comparison(binop: hir::BinOpKind) -> bool {
665 matches!(
666 binop,
667 hir::BinOpKind::Eq
668 | hir::BinOpKind::Lt
669 | hir::BinOpKind::Le
670 | hir::BinOpKind::Ne
671 | hir::BinOpKind::Ge
672 | hir::BinOpKind::Gt
673 )
674 }
675
676 fn diag_item_cmpop(diag_item: Symbol) -> Option<ComparisonOp> {
677 Some(match diag_item {
678 sym::cmp_ord_max => ComparisonOp::Other,
679 sym::cmp_ord_min => ComparisonOp::Other,
680 sym::ord_cmp_method => ComparisonOp::Other,
681 sym::cmp_partialeq_eq => ComparisonOp::BinOp(hir::BinOpKind::Eq),
682 sym::cmp_partialeq_ne => ComparisonOp::BinOp(hir::BinOpKind::Ne),
683 sym::cmp_partialord_cmp => ComparisonOp::Other,
684 sym::cmp_partialord_ge => ComparisonOp::BinOp(hir::BinOpKind::Ge),
685 sym::cmp_partialord_gt => ComparisonOp::BinOp(hir::BinOpKind::Gt),
686 sym::cmp_partialord_le => ComparisonOp::BinOp(hir::BinOpKind::Le),
687 sym::cmp_partialord_lt => ComparisonOp::BinOp(hir::BinOpKind::Lt),
688 _ => return None,
689 })
690 }
691 }
692}
693
694declare_lint! {
695 IMPROPER_CTYPES,
717 Warn,
718 "proper use of libc types in foreign modules"
719}
720
721declare_lint_pass!(ImproperCTypesDeclarations => [IMPROPER_CTYPES]);
722
723declare_lint! {
724 IMPROPER_CTYPES_DEFINITIONS,
746 Warn,
747 "proper use of libc types in foreign item definitions"
748}
749
750declare_lint! {
751 USES_POWER_ALIGNMENT,
801 Warn,
802 "Structs do not follow the power alignment rule under repr(C)"
803}
804
805declare_lint_pass!(ImproperCTypesDefinitions => [IMPROPER_CTYPES_DEFINITIONS, USES_POWER_ALIGNMENT]);
806
807#[derive(Clone, Copy)]
808pub(crate) enum CItemKind {
809 Declaration,
810 Definition,
811}
812
813struct ImproperCTypesVisitor<'a, 'tcx> {
814 cx: &'a LateContext<'tcx>,
815 mode: CItemKind,
816}
817
818struct CTypesVisitorState<'tcx> {
820 cache: FxHashSet<Ty<'tcx>>,
821 base_ty: Ty<'tcx>,
824}
825
826enum FfiResult<'tcx> {
827 FfiSafe,
828 FfiPhantom(Ty<'tcx>),
829 FfiUnsafe { ty: Ty<'tcx>, reason: DiagMessage, help: Option<DiagMessage> },
830}
831
832pub(crate) fn nonnull_optimization_guaranteed<'tcx>(
833 tcx: TyCtxt<'tcx>,
834 def: ty::AdtDef<'tcx>,
835) -> bool {
836 tcx.has_attr(def.did(), sym::rustc_nonnull_optimization_guaranteed)
837}
838
839pub(crate) fn transparent_newtype_field<'a, 'tcx>(
842 tcx: TyCtxt<'tcx>,
843 variant: &'a ty::VariantDef,
844) -> Option<&'a ty::FieldDef> {
845 let typing_env = ty::TypingEnv::non_body_analysis(tcx, variant.def_id);
846 variant.fields.iter().find(|field| {
847 let field_ty = tcx.type_of(field.did).instantiate_identity();
848 let is_1zst =
849 tcx.layout_of(typing_env.as_query_input(field_ty)).is_ok_and(|layout| layout.is_1zst());
850 !is_1zst
851 })
852}
853
854fn ty_is_known_nonnull<'tcx>(
856 tcx: TyCtxt<'tcx>,
857 typing_env: ty::TypingEnv<'tcx>,
858 ty: Ty<'tcx>,
859 mode: CItemKind,
860) -> bool {
861 let ty = tcx.try_normalize_erasing_regions(typing_env, ty).unwrap_or(ty);
862
863 match ty.kind() {
864 ty::FnPtr(..) => true,
865 ty::Ref(..) => true,
866 ty::Adt(def, _) if def.is_box() && matches!(mode, CItemKind::Definition) => true,
867 ty::Adt(def, args) if def.repr().transparent() && !def.is_union() => {
868 let marked_non_null = nonnull_optimization_guaranteed(tcx, *def);
869
870 if marked_non_null {
871 return true;
872 }
873
874 if def.is_unsafe_cell() || def.is_unsafe_pinned() {
876 return false;
877 }
878
879 def.variants()
880 .iter()
881 .filter_map(|variant| transparent_newtype_field(tcx, variant))
882 .any(|field| ty_is_known_nonnull(tcx, typing_env, field.ty(tcx, args), mode))
883 }
884 ty::Pat(base, pat) => {
885 ty_is_known_nonnull(tcx, typing_env, *base, mode)
886 || pat_ty_is_known_nonnull(tcx, typing_env, *pat)
887 }
888 _ => false,
889 }
890}
891
892fn pat_ty_is_known_nonnull<'tcx>(
893 tcx: TyCtxt<'tcx>,
894 typing_env: ty::TypingEnv<'tcx>,
895 pat: ty::Pattern<'tcx>,
896) -> bool {
897 Option::unwrap_or_default(
898 try {
899 match *pat {
900 ty::PatternKind::Range { start, end } => {
901 let start = start.try_to_value()?.try_to_bits(tcx, typing_env)?;
902 let end = end.try_to_value()?.try_to_bits(tcx, typing_env)?;
903
904 start > 0 && end >= start
907 }
908 ty::PatternKind::Or(patterns) => {
909 patterns.iter().all(|pat| pat_ty_is_known_nonnull(tcx, typing_env, pat))
910 }
911 }
912 },
913 )
914}
915
916fn get_nullable_type<'tcx>(
919 tcx: TyCtxt<'tcx>,
920 typing_env: ty::TypingEnv<'tcx>,
921 ty: Ty<'tcx>,
922) -> Option<Ty<'tcx>> {
923 let ty = tcx.try_normalize_erasing_regions(typing_env, ty).unwrap_or(ty);
924
925 Some(match *ty.kind() {
926 ty::Adt(field_def, field_args) => {
927 let inner_field_ty = {
928 let mut first_non_zst_ty =
929 field_def.variants().iter().filter_map(|v| transparent_newtype_field(tcx, v));
930 debug_assert_eq!(
931 first_non_zst_ty.clone().count(),
932 1,
933 "Wrong number of fields for transparent type"
934 );
935 first_non_zst_ty
936 .next_back()
937 .expect("No non-zst fields in transparent type.")
938 .ty(tcx, field_args)
939 };
940 return get_nullable_type(tcx, typing_env, inner_field_ty);
941 }
942 ty::Pat(base, ..) => return get_nullable_type(tcx, typing_env, base),
943 ty::Int(_) | ty::Uint(_) | ty::RawPtr(..) => ty,
944 ty::Ref(_region, ty, mutbl) => Ty::new_ptr(tcx, ty, mutbl),
947 ty::FnPtr(..) => ty,
950 ref unhandled => {
953 debug!(
954 "get_nullable_type: Unhandled scalar kind: {:?} while checking {:?}",
955 unhandled, ty
956 );
957 return None;
958 }
959 })
960}
961
962fn is_niche_optimization_candidate<'tcx>(
967 tcx: TyCtxt<'tcx>,
968 typing_env: ty::TypingEnv<'tcx>,
969 ty: Ty<'tcx>,
970) -> bool {
971 if tcx.layout_of(typing_env.as_query_input(ty)).is_ok_and(|layout| !layout.is_1zst()) {
972 return false;
973 }
974
975 match ty.kind() {
976 ty::Adt(ty_def, _) => {
977 let non_exhaustive = ty_def.is_variant_list_non_exhaustive();
978 let empty = (ty_def.is_struct() && ty_def.all_fields().next().is_none())
979 || (ty_def.is_enum() && ty_def.variants().is_empty());
980
981 !non_exhaustive && empty
982 }
983 ty::Tuple(tys) => tys.is_empty(),
984 _ => false,
985 }
986}
987
988pub(crate) fn repr_nullable_ptr<'tcx>(
993 tcx: TyCtxt<'tcx>,
994 typing_env: ty::TypingEnv<'tcx>,
995 ty: Ty<'tcx>,
996 ckind: CItemKind,
997) -> Option<Ty<'tcx>> {
998 debug!("is_repr_nullable_ptr(tcx, ty = {:?})", ty);
999 match ty.kind() {
1000 ty::Adt(ty_def, args) => {
1001 let field_ty = match &ty_def.variants().raw[..] {
1002 [var_one, var_two] => match (&var_one.fields.raw[..], &var_two.fields.raw[..]) {
1003 ([], [field]) | ([field], []) => field.ty(tcx, args),
1004 ([field1], [field2]) => {
1005 let ty1 = field1.ty(tcx, args);
1006 let ty2 = field2.ty(tcx, args);
1007
1008 if is_niche_optimization_candidate(tcx, typing_env, ty1) {
1009 ty2
1010 } else if is_niche_optimization_candidate(tcx, typing_env, ty2) {
1011 ty1
1012 } else {
1013 return None;
1014 }
1015 }
1016 _ => return None,
1017 },
1018 _ => return None,
1019 };
1020
1021 if !ty_is_known_nonnull(tcx, typing_env, field_ty, ckind) {
1022 return None;
1023 }
1024
1025 let compute_size_skeleton = |t| SizeSkeleton::compute(t, tcx, typing_env).ok();
1029 if !compute_size_skeleton(ty)?.same_size(compute_size_skeleton(field_ty)?) {
1030 bug!("improper_ctypes: Option nonnull optimization not applied?");
1031 }
1032
1033 let field_ty_layout = tcx.layout_of(typing_env.as_query_input(field_ty));
1035 if field_ty_layout.is_err() && !field_ty.has_non_region_param() {
1036 bug!("should be able to compute the layout of non-polymorphic type");
1037 }
1038
1039 let field_ty_abi = &field_ty_layout.ok()?.backend_repr;
1040 if let BackendRepr::Scalar(field_ty_scalar) = field_ty_abi {
1041 match field_ty_scalar.valid_range(&tcx) {
1042 WrappingRange { start: 0, end }
1043 if end == field_ty_scalar.size(&tcx).unsigned_int_max() - 1 =>
1044 {
1045 return Some(get_nullable_type(tcx, typing_env, field_ty).unwrap());
1046 }
1047 WrappingRange { start: 1, .. } => {
1048 return Some(get_nullable_type(tcx, typing_env, field_ty).unwrap());
1049 }
1050 WrappingRange { start, end } => {
1051 unreachable!("Unhandled start and end range: ({}, {})", start, end)
1052 }
1053 };
1054 }
1055 None
1056 }
1057 ty::Pat(base, pat) => get_nullable_type_from_pat(tcx, typing_env, *base, *pat),
1058 _ => None,
1059 }
1060}
1061
1062fn get_nullable_type_from_pat<'tcx>(
1063 tcx: TyCtxt<'tcx>,
1064 typing_env: ty::TypingEnv<'tcx>,
1065 base: Ty<'tcx>,
1066 pat: ty::Pattern<'tcx>,
1067) -> Option<Ty<'tcx>> {
1068 match *pat {
1069 ty::PatternKind::Range { .. } => get_nullable_type(tcx, typing_env, base),
1070 ty::PatternKind::Or(patterns) => {
1071 let first = get_nullable_type_from_pat(tcx, typing_env, base, patterns[0])?;
1072 for &pat in &patterns[1..] {
1073 assert_eq!(first, get_nullable_type_from_pat(tcx, typing_env, base, pat)?);
1074 }
1075 Some(first)
1076 }
1077 }
1078}
1079
1080impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
1081 fn check_for_array_ty(&mut self, sp: Span, ty: Ty<'tcx>) -> bool {
1083 if let ty::Array(..) = ty.kind() {
1084 self.emit_ffi_unsafe_type_lint(
1085 ty,
1086 sp,
1087 fluent::lint_improper_ctypes_array_reason,
1088 Some(fluent::lint_improper_ctypes_array_help),
1089 );
1090 true
1091 } else {
1092 false
1093 }
1094 }
1095
1096 fn check_field_type_for_ffi(
1098 &self,
1099 acc: &mut CTypesVisitorState<'tcx>,
1100 field: &ty::FieldDef,
1101 args: GenericArgsRef<'tcx>,
1102 ) -> FfiResult<'tcx> {
1103 let field_ty = field.ty(self.cx.tcx, args);
1104 let field_ty = self
1105 .cx
1106 .tcx
1107 .try_normalize_erasing_regions(self.cx.typing_env(), field_ty)
1108 .unwrap_or(field_ty);
1109 self.check_type_for_ffi(acc, field_ty)
1110 }
1111
1112 fn check_variant_for_ffi(
1114 &self,
1115 acc: &mut CTypesVisitorState<'tcx>,
1116 ty: Ty<'tcx>,
1117 def: ty::AdtDef<'tcx>,
1118 variant: &ty::VariantDef,
1119 args: GenericArgsRef<'tcx>,
1120 ) -> FfiResult<'tcx> {
1121 use FfiResult::*;
1122 let transparent_with_all_zst_fields = if def.repr().transparent() {
1123 if let Some(field) = transparent_newtype_field(self.cx.tcx, variant) {
1124 match self.check_field_type_for_ffi(acc, field, args) {
1126 FfiUnsafe { ty, .. } if ty.is_unit() => (),
1127 r => return r,
1128 }
1129
1130 false
1131 } else {
1132 true
1135 }
1136 } else {
1137 false
1138 };
1139
1140 let mut all_phantom = !variant.fields.is_empty();
1142 for field in &variant.fields {
1143 all_phantom &= match self.check_field_type_for_ffi(acc, field, args) {
1144 FfiSafe => false,
1145 FfiUnsafe { ty, .. } if ty.is_unit() => false,
1147 FfiPhantom(..) => true,
1148 r @ FfiUnsafe { .. } => return r,
1149 }
1150 }
1151
1152 if all_phantom {
1153 FfiPhantom(ty)
1154 } else if transparent_with_all_zst_fields {
1155 FfiUnsafe { ty, reason: fluent::lint_improper_ctypes_struct_zst, help: None }
1156 } else {
1157 FfiSafe
1158 }
1159 }
1160
1161 fn check_type_for_ffi(
1164 &self,
1165 acc: &mut CTypesVisitorState<'tcx>,
1166 ty: Ty<'tcx>,
1167 ) -> FfiResult<'tcx> {
1168 use FfiResult::*;
1169
1170 let tcx = self.cx.tcx;
1171
1172 if !acc.cache.insert(ty) {
1177 return FfiSafe;
1178 }
1179
1180 match *ty.kind() {
1181 ty::Adt(def, args) => {
1182 if let Some(boxed) = ty.boxed_ty()
1183 && matches!(self.mode, CItemKind::Definition)
1184 {
1185 if boxed.is_sized(tcx, self.cx.typing_env()) {
1186 return FfiSafe;
1187 } else {
1188 return FfiUnsafe {
1189 ty,
1190 reason: fluent::lint_improper_ctypes_box,
1191 help: None,
1192 };
1193 }
1194 }
1195 if def.is_phantom_data() {
1196 return FfiPhantom(ty);
1197 }
1198 match def.adt_kind() {
1199 AdtKind::Struct | AdtKind::Union => {
1200 if let Some(sym::cstring_type | sym::cstr_type) =
1201 tcx.get_diagnostic_name(def.did())
1202 && !acc.base_ty.is_mutable_ptr()
1203 {
1204 return FfiUnsafe {
1205 ty,
1206 reason: fluent::lint_improper_ctypes_cstr_reason,
1207 help: Some(fluent::lint_improper_ctypes_cstr_help),
1208 };
1209 }
1210
1211 if !def.repr().c() && !def.repr().transparent() {
1212 return FfiUnsafe {
1213 ty,
1214 reason: if def.is_struct() {
1215 fluent::lint_improper_ctypes_struct_layout_reason
1216 } else {
1217 fluent::lint_improper_ctypes_union_layout_reason
1218 },
1219 help: if def.is_struct() {
1220 Some(fluent::lint_improper_ctypes_struct_layout_help)
1221 } else {
1222 Some(fluent::lint_improper_ctypes_union_layout_help)
1223 },
1224 };
1225 }
1226
1227 if def.non_enum_variant().field_list_has_applicable_non_exhaustive() {
1228 return FfiUnsafe {
1229 ty,
1230 reason: if def.is_struct() {
1231 fluent::lint_improper_ctypes_struct_non_exhaustive
1232 } else {
1233 fluent::lint_improper_ctypes_union_non_exhaustive
1234 },
1235 help: None,
1236 };
1237 }
1238
1239 if def.non_enum_variant().fields.is_empty() {
1240 return FfiUnsafe {
1241 ty,
1242 reason: if def.is_struct() {
1243 fluent::lint_improper_ctypes_struct_fieldless_reason
1244 } else {
1245 fluent::lint_improper_ctypes_union_fieldless_reason
1246 },
1247 help: if def.is_struct() {
1248 Some(fluent::lint_improper_ctypes_struct_fieldless_help)
1249 } else {
1250 Some(fluent::lint_improper_ctypes_union_fieldless_help)
1251 },
1252 };
1253 }
1254
1255 self.check_variant_for_ffi(acc, ty, def, def.non_enum_variant(), args)
1256 }
1257 AdtKind::Enum => {
1258 if def.variants().is_empty() {
1259 return FfiSafe;
1261 }
1262 if !def.repr().c() && !def.repr().transparent() && def.repr().int.is_none()
1265 {
1266 if let Some(ty) =
1268 repr_nullable_ptr(self.cx.tcx, self.cx.typing_env(), ty, self.mode)
1269 {
1270 return self.check_type_for_ffi(acc, ty);
1271 }
1272
1273 return FfiUnsafe {
1274 ty,
1275 reason: fluent::lint_improper_ctypes_enum_repr_reason,
1276 help: Some(fluent::lint_improper_ctypes_enum_repr_help),
1277 };
1278 }
1279
1280 use improper_ctypes::check_non_exhaustive_variant;
1281
1282 let non_exhaustive = def.variant_list_has_applicable_non_exhaustive();
1283 let ret = def.variants().iter().try_for_each(|variant| {
1285 check_non_exhaustive_variant(non_exhaustive, variant)
1286 .map_break(|reason| FfiUnsafe { ty, reason, help: None })?;
1287
1288 match self.check_variant_for_ffi(acc, ty, def, variant, args) {
1289 FfiSafe => ControlFlow::Continue(()),
1290 r => ControlFlow::Break(r),
1291 }
1292 });
1293 if let ControlFlow::Break(result) = ret {
1294 return result;
1295 }
1296
1297 FfiSafe
1298 }
1299 }
1300 }
1301
1302 ty::Char => FfiUnsafe {
1303 ty,
1304 reason: fluent::lint_improper_ctypes_char_reason,
1305 help: Some(fluent::lint_improper_ctypes_char_help),
1306 },
1307
1308 ty::Pat(base, ..) => self.check_type_for_ffi(acc, base),
1311
1312 ty::Bool | ty::Int(..) | ty::Uint(..) | ty::Float(..) | ty::Never => FfiSafe,
1314
1315 ty::Slice(_) => FfiUnsafe {
1316 ty,
1317 reason: fluent::lint_improper_ctypes_slice_reason,
1318 help: Some(fluent::lint_improper_ctypes_slice_help),
1319 },
1320
1321 ty::Dynamic(..) => {
1322 FfiUnsafe { ty, reason: fluent::lint_improper_ctypes_dyn, help: None }
1323 }
1324
1325 ty::Str => FfiUnsafe {
1326 ty,
1327 reason: fluent::lint_improper_ctypes_str_reason,
1328 help: Some(fluent::lint_improper_ctypes_str_help),
1329 },
1330
1331 ty::Tuple(..) => FfiUnsafe {
1332 ty,
1333 reason: fluent::lint_improper_ctypes_tuple_reason,
1334 help: Some(fluent::lint_improper_ctypes_tuple_help),
1335 },
1336
1337 ty::RawPtr(ty, _) | ty::Ref(_, ty, _)
1338 if {
1339 matches!(self.mode, CItemKind::Definition)
1340 && ty.is_sized(self.cx.tcx, self.cx.typing_env())
1341 } =>
1342 {
1343 FfiSafe
1344 }
1345
1346 ty::RawPtr(ty, _)
1347 if match ty.kind() {
1348 ty::Tuple(tuple) => tuple.is_empty(),
1349 _ => false,
1350 } =>
1351 {
1352 FfiSafe
1353 }
1354
1355 ty::RawPtr(ty, _) | ty::Ref(_, ty, _) => self.check_type_for_ffi(acc, ty),
1356
1357 ty::Array(inner_ty, _) => self.check_type_for_ffi(acc, inner_ty),
1358
1359 ty::FnPtr(sig_tys, hdr) => {
1360 let sig = sig_tys.with(hdr);
1361 if sig.abi().is_rustic_abi() {
1362 return FfiUnsafe {
1363 ty,
1364 reason: fluent::lint_improper_ctypes_fnptr_reason,
1365 help: Some(fluent::lint_improper_ctypes_fnptr_help),
1366 };
1367 }
1368
1369 let sig = tcx.instantiate_bound_regions_with_erased(sig);
1370 for arg in sig.inputs() {
1371 match self.check_type_for_ffi(acc, *arg) {
1372 FfiSafe => {}
1373 r => return r,
1374 }
1375 }
1376
1377 let ret_ty = sig.output();
1378 if ret_ty.is_unit() {
1379 return FfiSafe;
1380 }
1381
1382 self.check_type_for_ffi(acc, ret_ty)
1383 }
1384
1385 ty::Foreign(..) => FfiSafe,
1386
1387 ty::Alias(ty::Opaque, ..) => {
1390 FfiUnsafe { ty, reason: fluent::lint_improper_ctypes_opaque, help: None }
1391 }
1392
1393 ty::Param(..) | ty::Alias(ty::Projection | ty::Inherent, ..)
1396 if matches!(self.mode, CItemKind::Definition) =>
1397 {
1398 FfiSafe
1399 }
1400
1401 ty::UnsafeBinder(_) => todo!("FIXME(unsafe_binder)"),
1402
1403 ty::Param(..)
1404 | ty::Alias(ty::Projection | ty::Inherent | ty::Free, ..)
1405 | ty::Infer(..)
1406 | ty::Bound(..)
1407 | ty::Error(_)
1408 | ty::Closure(..)
1409 | ty::CoroutineClosure(..)
1410 | ty::Coroutine(..)
1411 | ty::CoroutineWitness(..)
1412 | ty::Placeholder(..)
1413 | ty::FnDef(..) => bug!("unexpected type in foreign function: {:?}", ty),
1414 }
1415 }
1416
1417 fn emit_ffi_unsafe_type_lint(
1418 &mut self,
1419 ty: Ty<'tcx>,
1420 sp: Span,
1421 note: DiagMessage,
1422 help: Option<DiagMessage>,
1423 ) {
1424 let lint = match self.mode {
1425 CItemKind::Declaration => IMPROPER_CTYPES,
1426 CItemKind::Definition => IMPROPER_CTYPES_DEFINITIONS,
1427 };
1428 let desc = match self.mode {
1429 CItemKind::Declaration => "block",
1430 CItemKind::Definition => "fn",
1431 };
1432 let span_note = if let ty::Adt(def, _) = ty.kind()
1433 && let Some(sp) = self.cx.tcx.hir_span_if_local(def.did())
1434 {
1435 Some(sp)
1436 } else {
1437 None
1438 };
1439 self.cx.emit_span_lint(
1440 lint,
1441 sp,
1442 ImproperCTypes { ty, desc, label: sp, help, note, span_note },
1443 );
1444 }
1445
1446 fn check_for_opaque_ty(&mut self, sp: Span, ty: Ty<'tcx>) -> bool {
1447 struct ProhibitOpaqueTypes;
1448 impl<'tcx> ty::TypeVisitor<TyCtxt<'tcx>> for ProhibitOpaqueTypes {
1449 type Result = ControlFlow<Ty<'tcx>>;
1450
1451 fn visit_ty(&mut self, ty: Ty<'tcx>) -> Self::Result {
1452 if !ty.has_opaque_types() {
1453 return ControlFlow::Continue(());
1454 }
1455
1456 if let ty::Alias(ty::Opaque, ..) = ty.kind() {
1457 ControlFlow::Break(ty)
1458 } else {
1459 ty.super_visit_with(self)
1460 }
1461 }
1462 }
1463
1464 if let Some(ty) = self
1465 .cx
1466 .tcx
1467 .try_normalize_erasing_regions(self.cx.typing_env(), ty)
1468 .unwrap_or(ty)
1469 .visit_with(&mut ProhibitOpaqueTypes)
1470 .break_value()
1471 {
1472 self.emit_ffi_unsafe_type_lint(ty, sp, fluent::lint_improper_ctypes_opaque, None);
1473 true
1474 } else {
1475 false
1476 }
1477 }
1478
1479 fn check_type_for_ffi_and_report_errors(
1480 &mut self,
1481 sp: Span,
1482 ty: Ty<'tcx>,
1483 is_static: bool,
1484 is_return_type: bool,
1485 ) {
1486 if self.check_for_opaque_ty(sp, ty) {
1487 return;
1489 }
1490
1491 let ty = self.cx.tcx.try_normalize_erasing_regions(self.cx.typing_env(), ty).unwrap_or(ty);
1492
1493 if !is_static && self.check_for_array_ty(sp, ty) {
1497 return;
1498 }
1499
1500 if is_return_type && ty.is_unit() {
1504 return;
1505 }
1506
1507 let mut acc = CTypesVisitorState { cache: FxHashSet::default(), base_ty: ty };
1508 match self.check_type_for_ffi(&mut acc, ty) {
1509 FfiResult::FfiSafe => {}
1510 FfiResult::FfiPhantom(ty) => {
1511 self.emit_ffi_unsafe_type_lint(
1512 ty,
1513 sp,
1514 fluent::lint_improper_ctypes_only_phantomdata,
1515 None,
1516 );
1517 }
1518 FfiResult::FfiUnsafe { ty, reason, help } => {
1519 self.emit_ffi_unsafe_type_lint(ty, sp, reason, help);
1520 }
1521 }
1522 }
1523
1524 fn check_fn(&mut self, def_id: LocalDefId, decl: &'tcx hir::FnDecl<'_>) {
1529 let sig = self.cx.tcx.fn_sig(def_id).instantiate_identity();
1530 let sig = self.cx.tcx.instantiate_bound_regions_with_erased(sig);
1531
1532 for (input_ty, input_hir) in iter::zip(sig.inputs(), decl.inputs) {
1533 for (fn_ptr_ty, span) in self.find_fn_ptr_ty_with_external_abi(input_hir, *input_ty) {
1534 self.check_type_for_ffi_and_report_errors(span, fn_ptr_ty, false, false);
1535 }
1536 }
1537
1538 if let hir::FnRetTy::Return(ret_hir) = decl.output {
1539 for (fn_ptr_ty, span) in self.find_fn_ptr_ty_with_external_abi(ret_hir, sig.output()) {
1540 self.check_type_for_ffi_and_report_errors(span, fn_ptr_ty, false, true);
1541 }
1542 }
1543 }
1544
1545 fn check_foreign_fn(&mut self, def_id: LocalDefId, decl: &'tcx hir::FnDecl<'_>) {
1547 let sig = self.cx.tcx.fn_sig(def_id).instantiate_identity();
1548 let sig = self.cx.tcx.instantiate_bound_regions_with_erased(sig);
1549
1550 for (input_ty, input_hir) in iter::zip(sig.inputs(), decl.inputs) {
1551 self.check_type_for_ffi_and_report_errors(input_hir.span, *input_ty, false, false);
1552 }
1553
1554 if let hir::FnRetTy::Return(ret_hir) = decl.output {
1555 self.check_type_for_ffi_and_report_errors(ret_hir.span, sig.output(), false, true);
1556 }
1557 }
1558
1559 fn check_foreign_static(&mut self, id: hir::OwnerId, span: Span) {
1560 let ty = self.cx.tcx.type_of(id).instantiate_identity();
1561 self.check_type_for_ffi_and_report_errors(span, ty, true, false);
1562 }
1563
1564 fn find_fn_ptr_ty_with_external_abi(
1568 &self,
1569 hir_ty: &hir::Ty<'tcx>,
1570 ty: Ty<'tcx>,
1571 ) -> Vec<(Ty<'tcx>, Span)> {
1572 struct FnPtrFinder<'tcx> {
1573 spans: Vec<Span>,
1574 tys: Vec<Ty<'tcx>>,
1575 }
1576
1577 impl<'tcx> hir::intravisit::Visitor<'_> for FnPtrFinder<'tcx> {
1578 fn visit_ty(&mut self, ty: &'_ hir::Ty<'_, AmbigArg>) {
1579 debug!(?ty);
1580 if let hir::TyKind::FnPtr(hir::FnPtrTy { abi, .. }) = ty.kind
1581 && !abi.is_rustic_abi()
1582 {
1583 self.spans.push(ty.span);
1584 }
1585
1586 hir::intravisit::walk_ty(self, ty)
1587 }
1588 }
1589
1590 impl<'tcx> ty::TypeVisitor<TyCtxt<'tcx>> for FnPtrFinder<'tcx> {
1591 type Result = ();
1592
1593 fn visit_ty(&mut self, ty: Ty<'tcx>) -> Self::Result {
1594 if let ty::FnPtr(_, hdr) = ty.kind()
1595 && !hdr.abi.is_rustic_abi()
1596 {
1597 self.tys.push(ty);
1598 }
1599
1600 ty.super_visit_with(self)
1601 }
1602 }
1603
1604 let mut visitor = FnPtrFinder { spans: Vec::new(), tys: Vec::new() };
1605 ty.visit_with(&mut visitor);
1606 visitor.visit_ty_unambig(hir_ty);
1607
1608 iter::zip(visitor.tys.drain(..), visitor.spans.drain(..)).collect()
1609 }
1610}
1611
1612impl<'tcx> LateLintPass<'tcx> for ImproperCTypesDeclarations {
1613 fn check_foreign_item(&mut self, cx: &LateContext<'tcx>, it: &hir::ForeignItem<'tcx>) {
1614 let mut vis = ImproperCTypesVisitor { cx, mode: CItemKind::Declaration };
1615 let abi = cx.tcx.hir_get_foreign_abi(it.hir_id());
1616
1617 match it.kind {
1618 hir::ForeignItemKind::Fn(sig, _, _) => {
1619 if abi.is_rustic_abi() {
1620 vis.check_fn(it.owner_id.def_id, sig.decl)
1621 } else {
1622 vis.check_foreign_fn(it.owner_id.def_id, sig.decl);
1623 }
1624 }
1625 hir::ForeignItemKind::Static(ty, _, _) if !abi.is_rustic_abi() => {
1626 vis.check_foreign_static(it.owner_id, ty.span);
1627 }
1628 hir::ForeignItemKind::Static(..) | hir::ForeignItemKind::Type => (),
1629 }
1630 }
1631}
1632
1633impl ImproperCTypesDefinitions {
1634 fn check_ty_maybe_containing_foreign_fnptr<'tcx>(
1635 &mut self,
1636 cx: &LateContext<'tcx>,
1637 hir_ty: &'tcx hir::Ty<'_>,
1638 ty: Ty<'tcx>,
1639 ) {
1640 let mut vis = ImproperCTypesVisitor { cx, mode: CItemKind::Definition };
1641 for (fn_ptr_ty, span) in vis.find_fn_ptr_ty_with_external_abi(hir_ty, ty) {
1642 vis.check_type_for_ffi_and_report_errors(span, fn_ptr_ty, true, false);
1643 }
1644 }
1645
1646 fn check_arg_for_power_alignment<'tcx>(
1647 &mut self,
1648 cx: &LateContext<'tcx>,
1649 ty: Ty<'tcx>,
1650 ) -> bool {
1651 assert!(cx.tcx.sess.target.os == "aix");
1652 if ty.is_floating_point() && ty.primitive_size(cx.tcx).bytes() > 4 {
1659 return true;
1660 } else if let Adt(adt_def, _) = ty.kind()
1661 && adt_def.is_struct()
1662 && adt_def.repr().c()
1663 && !adt_def.repr().packed()
1664 && adt_def.repr().align.is_none()
1665 {
1666 let struct_variant = adt_def.variant(VariantIdx::ZERO);
1667 for struct_field in &struct_variant.fields {
1671 let field_ty = cx.tcx.type_of(struct_field.did).instantiate_identity();
1672 if self.check_arg_for_power_alignment(cx, field_ty) {
1673 return true;
1674 }
1675 }
1676 }
1677 return false;
1678 }
1679
1680 fn check_struct_for_power_alignment<'tcx>(
1681 &mut self,
1682 cx: &LateContext<'tcx>,
1683 item: &'tcx hir::Item<'tcx>,
1684 ) {
1685 let adt_def = cx.tcx.adt_def(item.owner_id.to_def_id());
1686 if adt_def.repr().c()
1689 && !adt_def.repr().packed()
1690 && adt_def.repr().align.is_none()
1691 && cx.tcx.sess.target.os == "aix"
1692 && !adt_def.all_fields().next().is_none()
1693 {
1694 let struct_variant_data = item.expect_struct().2;
1695 for field_def in struct_variant_data.fields().iter().skip(1) {
1696 let def_id = field_def.def_id;
1700 let ty = cx.tcx.type_of(def_id).instantiate_identity();
1701 if self.check_arg_for_power_alignment(cx, ty) {
1702 cx.emit_span_lint(USES_POWER_ALIGNMENT, field_def.span, UsesPowerAlignment);
1703 }
1704 }
1705 }
1706 }
1707}
1708
1709impl<'tcx> LateLintPass<'tcx> for ImproperCTypesDefinitions {
1717 fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx hir::Item<'tcx>) {
1718 match item.kind {
1719 hir::ItemKind::Static(_, _, ty, _)
1720 | hir::ItemKind::Const(_, _, ty, _)
1721 | hir::ItemKind::TyAlias(_, _, ty) => {
1722 self.check_ty_maybe_containing_foreign_fnptr(
1723 cx,
1724 ty,
1725 cx.tcx.type_of(item.owner_id).instantiate_identity(),
1726 );
1727 }
1728 hir::ItemKind::Fn { .. } => {}
1730 hir::ItemKind::Struct(..) => {
1733 self.check_struct_for_power_alignment(cx, item);
1734 }
1735 hir::ItemKind::Union(..) | hir::ItemKind::Enum(..) => {}
1737 hir::ItemKind::Impl(..)
1739 | hir::ItemKind::TraitAlias(..)
1740 | hir::ItemKind::Trait(..)
1741 | hir::ItemKind::GlobalAsm { .. }
1742 | hir::ItemKind::ForeignMod { .. }
1743 | hir::ItemKind::Mod(..)
1744 | hir::ItemKind::Macro(..)
1745 | hir::ItemKind::Use(..)
1746 | hir::ItemKind::ExternCrate(..) => {}
1747 }
1748 }
1749
1750 fn check_field_def(&mut self, cx: &LateContext<'tcx>, field: &'tcx hir::FieldDef<'tcx>) {
1751 self.check_ty_maybe_containing_foreign_fnptr(
1752 cx,
1753 field.ty,
1754 cx.tcx.type_of(field.def_id).instantiate_identity(),
1755 );
1756 }
1757
1758 fn check_fn(
1759 &mut self,
1760 cx: &LateContext<'tcx>,
1761 kind: hir::intravisit::FnKind<'tcx>,
1762 decl: &'tcx hir::FnDecl<'_>,
1763 _: &'tcx hir::Body<'_>,
1764 _: Span,
1765 id: LocalDefId,
1766 ) {
1767 use hir::intravisit::FnKind;
1768
1769 let abi = match kind {
1770 FnKind::ItemFn(_, _, header, ..) => header.abi,
1771 FnKind::Method(_, sig, ..) => sig.header.abi,
1772 _ => return,
1773 };
1774
1775 let mut vis = ImproperCTypesVisitor { cx, mode: CItemKind::Definition };
1776 if abi.is_rustic_abi() {
1777 vis.check_fn(id, decl);
1778 } else {
1779 vis.check_foreign_fn(id, decl);
1780 }
1781 }
1782}
1783
1784declare_lint_pass!(VariantSizeDifferences => [VARIANT_SIZE_DIFFERENCES]);
1785
1786impl<'tcx> LateLintPass<'tcx> for VariantSizeDifferences {
1787 fn check_item(&mut self, cx: &LateContext<'_>, it: &hir::Item<'_>) {
1788 if let hir::ItemKind::Enum(_, _, ref enum_definition) = it.kind {
1789 let t = cx.tcx.type_of(it.owner_id).instantiate_identity();
1790 let ty = cx.tcx.erase_regions(t);
1791 let Ok(layout) = cx.layout_of(ty) else { return };
1792 let Variants::Multiple { tag_encoding: TagEncoding::Direct, tag, variants, .. } =
1793 &layout.variants
1794 else {
1795 return;
1796 };
1797
1798 let tag_size = tag.size(&cx.tcx).bytes();
1799
1800 debug!(
1801 "enum `{}` is {} bytes large with layout:\n{:#?}",
1802 t,
1803 layout.size.bytes(),
1804 layout
1805 );
1806
1807 let (largest, slargest, largest_index) = iter::zip(enum_definition.variants, variants)
1808 .map(|(variant, variant_layout)| {
1809 let bytes = variant_layout.size.bytes().saturating_sub(tag_size);
1811
1812 debug!("- variant `{}` is {} bytes large", variant.ident, bytes);
1813 bytes
1814 })
1815 .enumerate()
1816 .fold((0, 0, 0), |(l, s, li), (idx, size)| {
1817 if size > l {
1818 (size, l, idx)
1819 } else if size > s {
1820 (l, size, li)
1821 } else {
1822 (l, s, li)
1823 }
1824 });
1825
1826 if largest > slargest * 3 && slargest > 0 {
1829 cx.emit_span_lint(
1830 VARIANT_SIZE_DIFFERENCES,
1831 enum_definition.variants[largest_index].span,
1832 VariantSizeDifferencesDiag { largest },
1833 );
1834 }
1835 }
1836 }
1837}
1838
1839declare_lint! {
1840 INVALID_ATOMIC_ORDERING,
1877 Deny,
1878 "usage of invalid atomic ordering in atomic operations and memory fences"
1879}
1880
1881declare_lint_pass!(InvalidAtomicOrdering => [INVALID_ATOMIC_ORDERING]);
1882
1883impl InvalidAtomicOrdering {
1884 fn inherent_atomic_method_call<'hir>(
1885 cx: &LateContext<'_>,
1886 expr: &Expr<'hir>,
1887 recognized_names: &[Symbol], ) -> Option<(Symbol, &'hir [Expr<'hir>])> {
1889 const ATOMIC_TYPES: &[Symbol] = &[
1890 sym::AtomicBool,
1891 sym::AtomicPtr,
1892 sym::AtomicUsize,
1893 sym::AtomicU8,
1894 sym::AtomicU16,
1895 sym::AtomicU32,
1896 sym::AtomicU64,
1897 sym::AtomicU128,
1898 sym::AtomicIsize,
1899 sym::AtomicI8,
1900 sym::AtomicI16,
1901 sym::AtomicI32,
1902 sym::AtomicI64,
1903 sym::AtomicI128,
1904 ];
1905 if let ExprKind::MethodCall(method_path, _, args, _) = &expr.kind
1906 && recognized_names.contains(&method_path.ident.name)
1907 && let Some(m_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
1908 && let Some(impl_did) = cx.tcx.impl_of_method(m_def_id)
1909 && let Some(adt) = cx.tcx.type_of(impl_did).instantiate_identity().ty_adt_def()
1910 && cx.tcx.trait_id_of_impl(impl_did).is_none()
1912 && let parent = cx.tcx.parent(adt.did())
1913 && cx.tcx.is_diagnostic_item(sym::atomic_mod, parent)
1914 && ATOMIC_TYPES.contains(&cx.tcx.item_name(adt.did()))
1915 {
1916 return Some((method_path.ident.name, args));
1917 }
1918 None
1919 }
1920
1921 fn match_ordering(cx: &LateContext<'_>, ord_arg: &Expr<'_>) -> Option<Symbol> {
1922 let ExprKind::Path(ref ord_qpath) = ord_arg.kind else { return None };
1923 let did = cx.qpath_res(ord_qpath, ord_arg.hir_id).opt_def_id()?;
1924 let tcx = cx.tcx;
1925 let atomic_ordering = tcx.get_diagnostic_item(sym::Ordering);
1926 let name = tcx.item_name(did);
1927 let parent = tcx.parent(did);
1928 [sym::Relaxed, sym::Release, sym::Acquire, sym::AcqRel, sym::SeqCst].into_iter().find(
1929 |&ordering| {
1930 name == ordering
1931 && (Some(parent) == atomic_ordering
1932 || tcx.opt_parent(parent) == atomic_ordering)
1934 },
1935 )
1936 }
1937
1938 fn check_atomic_load_store(cx: &LateContext<'_>, expr: &Expr<'_>) {
1939 if let Some((method, args)) =
1940 Self::inherent_atomic_method_call(cx, expr, &[sym::load, sym::store])
1941 && let Some((ordering_arg, invalid_ordering)) = match method {
1942 sym::load => Some((&args[0], sym::Release)),
1943 sym::store => Some((&args[1], sym::Acquire)),
1944 _ => None,
1945 }
1946 && let Some(ordering) = Self::match_ordering(cx, ordering_arg)
1947 && (ordering == invalid_ordering || ordering == sym::AcqRel)
1948 {
1949 if method == sym::load {
1950 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, ordering_arg.span, AtomicOrderingLoad);
1951 } else {
1952 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, ordering_arg.span, AtomicOrderingStore);
1953 };
1954 }
1955 }
1956
1957 fn check_memory_fence(cx: &LateContext<'_>, expr: &Expr<'_>) {
1958 if let ExprKind::Call(func, args) = expr.kind
1959 && let ExprKind::Path(ref func_qpath) = func.kind
1960 && let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id()
1961 && matches!(cx.tcx.get_diagnostic_name(def_id), Some(sym::fence | sym::compiler_fence))
1962 && Self::match_ordering(cx, &args[0]) == Some(sym::Relaxed)
1963 {
1964 cx.emit_span_lint(INVALID_ATOMIC_ORDERING, args[0].span, AtomicOrderingFence);
1965 }
1966 }
1967
1968 fn check_atomic_compare_exchange(cx: &LateContext<'_>, expr: &Expr<'_>) {
1969 let Some((method, args)) = Self::inherent_atomic_method_call(
1970 cx,
1971 expr,
1972 &[sym::fetch_update, sym::compare_exchange, sym::compare_exchange_weak],
1973 ) else {
1974 return;
1975 };
1976
1977 let fail_order_arg = match method {
1978 sym::fetch_update => &args[1],
1979 sym::compare_exchange | sym::compare_exchange_weak => &args[3],
1980 _ => return,
1981 };
1982
1983 let Some(fail_ordering) = Self::match_ordering(cx, fail_order_arg) else { return };
1984
1985 if matches!(fail_ordering, sym::Release | sym::AcqRel) {
1986 cx.emit_span_lint(
1987 INVALID_ATOMIC_ORDERING,
1988 fail_order_arg.span,
1989 InvalidAtomicOrderingDiag { method, fail_order_arg_span: fail_order_arg.span },
1990 );
1991 }
1992 }
1993}
1994
1995impl<'tcx> LateLintPass<'tcx> for InvalidAtomicOrdering {
1996 fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
1997 Self::check_atomic_load_store(cx, expr);
1998 Self::check_memory_fence(cx, expr);
1999 Self::check_atomic_compare_exchange(cx, expr);
2000 }
2001}