diff --git a/src/code_info_builder/classlike_scanner.rs b/src/code_info_builder/classlike_scanner.rs index 813ee95a..a993183a 100644 --- a/src/code_info_builder/classlike_scanner.rs +++ b/src/code_info_builder/classlike_scanner.rs @@ -578,6 +578,9 @@ pub(crate) fn scan( .get(&(user_attribute.name.0.start_offset() as u32)) .unwrap(); + signature_hash = + signature_hash.wrapping_add(xxhash_rust::xxh3::xxh3_64(&name.0.to_le_bytes())); + if name == StrId::CODEGEN { storage.generated = true; } @@ -657,6 +660,7 @@ pub(crate) fn scan( &m.name, &m.tparams, &m.params, + &functionlike_storage.attributes, &m.ret, all_uses .symbol_member_uses diff --git a/src/code_info_builder/functionlike_scanner.rs b/src/code_info_builder/functionlike_scanner.rs index 578f22ec..ecdf43a5 100644 --- a/src/code_info_builder/functionlike_scanner.rs +++ b/src/code_info_builder/functionlike_scanner.rs @@ -331,13 +331,15 @@ pub(crate) fn get_functionlike( } for user_attribute in user_attributes { - let name = *resolved_names + let attribute_name = *resolved_names .get(&(user_attribute.name.0.start_offset() as u32)) .unwrap(); - functionlike_info.attributes.push(AttributeInfo { name }); + functionlike_info.attributes.push(AttributeInfo { + name: attribute_name, + }); - match name { + match attribute_name { StrId::HAKANA_SECURITY_ANALYSIS_SOURCE => { let mut source_types = vec![]; @@ -558,37 +560,35 @@ pub(crate) fn adjust_location_from_comments( suppressed_issues: &mut Vec<(IssueKind, HPos)>, all_custom_issues: &FxHashSet, ) { - if !comments.is_empty() { - for (comment_pos, comment) in comments.iter().rev() { - let (start, end) = comment_pos.to_start_and_end_lnum_bol_offset(); - let (start_line, _, start_offset) = start; - let (end_line, _, _) = end; - - if meta_start.start_line as usize == (end_line + 1) - || meta_start.start_line as usize == (end_line + 2) - { - match comment { - Comment::CmtLine(_) => { - meta_start.start_line = start_line as u32; - meta_start.start_offset = start_offset as u32; + for (comment_pos, comment) in comments.iter().rev() { + let (start, end) = comment_pos.to_start_and_end_lnum_bol_offset(); + let (start_line, _, start_offset) = start; + let (end_line, _, _) = end; + + if meta_start.start_line as usize == (end_line + 1) + || meta_start.start_line as usize == (end_line + 2) + { + match comment { + Comment::CmtLine(_) => { + meta_start.start_line = start_line as u32; + meta_start.start_offset = start_offset as u32; + } + Comment::CmtBlock(text) => { + let trimmed_text = if let Some(trimmed_text) = text.strip_prefix('*') { + trimmed_text.trim() + } else { + text.trim() + }; + + if let Some(Ok(issue_kind)) = + get_issue_from_comment(trimmed_text, all_custom_issues) + { + let comment_pos = HPos::new(comment_pos, file_source.file_path); + suppressed_issues.push((issue_kind, comment_pos)); } - Comment::CmtBlock(text) => { - let trimmed_text = if let Some(trimmed_text) = text.strip_prefix('*') { - trimmed_text.trim() - } else { - text.trim() - }; - - if let Some(Ok(issue_kind)) = - get_issue_from_comment(trimmed_text, all_custom_issues) - { - let comment_pos = HPos::new(comment_pos, file_source.file_path); - suppressed_issues.push((issue_kind, comment_pos)); - } - meta_start.start_line = start_line as u32; - meta_start.start_offset = start_offset as u32; - } + meta_start.start_line = start_line as u32; + meta_start.start_offset = start_offset as u32; } } } diff --git a/src/code_info_builder/lib.rs b/src/code_info_builder/lib.rs index cb0bb50e..a1556a14 100644 --- a/src/code_info_builder/lib.rs +++ b/src/code_info_builder/lib.rs @@ -16,8 +16,9 @@ use hakana_reflection_info::{ use hakana_reflection_info::{FileSource, GenericParent}; use hakana_str::{StrId, ThreadedInterner}; use hakana_type::{get_bool, get_int, get_mixed_any, get_string}; +use naming_special_names_rust::user_attributes; use no_pos_hash::{position_insensitive_hash, Hasher}; -use oxidized::ast::{FunParam, Tparam, TypeHint}; +use oxidized::ast::{FunParam, Tparam, TypeHint, UserAttribute}; use oxidized::ast_defs::Id; use oxidized::{ aast, @@ -507,6 +508,7 @@ impl<'ast> Visitor<'ast> for Scanner<'_> { &f.name, &f.tparams, &f.fun.params, + &functionlike_storage.attributes, &f.fun.ret, self.uses.symbol_uses.get(&name).unwrap_or(&vec![]), ); @@ -652,7 +654,7 @@ impl<'a> Scanner<'a> { self.user_defined, ); - functionlike_storage.is_production_code = self.file_source.is_production_code; + functionlike_storage.is_production_code &= self.file_source.is_production_code; if matches!( name, @@ -756,6 +758,7 @@ fn get_function_hashes( name: &Id, tparams: &[Tparam], params: &[FunParam], + user_attributes: &[AttributeInfo], ret: &TypeHint, uses: &Vec<(StrId, StrId)>, ) -> (u64, u64) { @@ -783,10 +786,15 @@ fn get_function_hashes( signature_end = ret_hint.0.end_offset(); } - let signature_hash = xxhash_rust::xxh3::xxh3_64( + let mut signature_hash = xxhash_rust::xxh3::xxh3_64( file_contents[def_location.start_offset as usize..signature_end].as_bytes(), ); + for attribute in user_attributes { + signature_hash = signature_hash + .wrapping_add(xxhash_rust::xxh3::xxh3_64(&attribute.name.0.to_le_bytes())); + } + ( signature_hash, xxhash_rust::xxh3::xxh3_64( diff --git a/src/file_scanner_analyzer/unused_symbols.rs b/src/file_scanner_analyzer/unused_symbols.rs index 9ef2b316..0d93d862 100644 --- a/src/file_scanner_analyzer/unused_symbols.rs +++ b/src/file_scanner_analyzer/unused_symbols.rs @@ -231,6 +231,7 @@ pub(crate) fn find_unused_definitions( let mut classlike_only_used_in_tests = false; if classlike_info.is_production_code + && classlike_name != &StrId::HAKANA_TEST_ONLY && !referenced_symbols_and_members_in_production .contains(&(*classlike_name, StrId::EMPTY)) {