diff --git a/hhvm-patch.diff b/hhvm-patch.diff index 15582989..79b0cf26 100644 --- a/hhvm-patch.diff +++ b/hhvm-patch.diff @@ -177,10 +177,10 @@ index e7f91b56e96..7be4f159ecf 100644 pub struct Names { diff --git a/hphp/hack/src/oxidized/aast_visitor/node_impl_gen.rs b/hphp/hack/src/oxidized/aast_visitor/node_impl_gen.rs -index 384622926b5..54fa1671824 100644 +index cfc3b81042c..e3e7a5b8fc0 100644 --- a/hphp/hack/src/oxidized/aast_visitor/node_impl_gen.rs +++ b/hphp/hack/src/oxidized/aast_visitor/node_impl_gen.rs -@@ -2237,7 +2237,6 @@ impl Node

for Typedef { +@@ -2247,7 +2247,6 @@ impl Node

for Typedef { self.file_attributes.accept(c, v)?; self.mode.accept(c, v)?; self.vis.accept(c, v)?; @@ -189,10 +189,10 @@ index 384622926b5..54fa1671824 100644 self.emit_id.accept(c, v)?; self.is_ctx.accept(c, v)?; diff --git a/hphp/hack/src/oxidized/aast_visitor/node_mut_impl_gen.rs b/hphp/hack/src/oxidized/aast_visitor/node_mut_impl_gen.rs -index c6da9b503e4..64a8c2caf9a 100644 +index 2df37947608..1943892a3e3 100644 --- a/hphp/hack/src/oxidized/aast_visitor/node_mut_impl_gen.rs +++ b/hphp/hack/src/oxidized/aast_visitor/node_mut_impl_gen.rs -@@ -2237,7 +2237,6 @@ impl NodeMut

for Typedef { +@@ -2247,7 +2247,6 @@ impl NodeMut

for Typedef { self.file_attributes.accept(c, v)?; self.mode.accept(c, v)?; self.vis.accept(c, v)?; @@ -200,32 +200,6 @@ index c6da9b503e4..64a8c2caf9a 100644 self.span.accept(c, v)?; self.emit_id.accept(c, v)?; self.is_ctx.accept(c, v)?; -diff --git a/hphp/hack/src/oxidized/lib.rs b/hphp/hack/src/oxidized/lib.rs -index 6b89bf88bb8..00242f8bd3d 100644 ---- a/hphp/hack/src/oxidized/lib.rs -+++ b/hphp/hack/src/oxidized/lib.rs -@@ -3,7 +3,7 @@ - // This source code is licensed under the MIT license found in the - // LICENSE file in the "hack" directory of this source tree. - #![feature(box_patterns)] --#![feature(drain_filter)] -+#![feature(extract_if)] - - #[macro_use] - extern crate rust_to_ocaml_attr; -diff --git a/hphp/hack/src/oxidized/manual/custom_error_config_impl.rs b/hphp/hack/src/oxidized/manual/custom_error_config_impl.rs -index 30d98a66d71..d6914543ba2 100644 ---- a/hphp/hack/src/oxidized/manual/custom_error_config_impl.rs -+++ b/hphp/hack/src/oxidized/manual/custom_error_config_impl.rs -@@ -39,7 +39,7 @@ use crate::validation_err::ValidationErr; - impl CustomErrorConfig { - pub fn new(mut errors: Vec) -> Self { - let invalid = errors -- .drain_filter(|e| { -+ .extract_if(|e| { - let mut env = ValidationEnv::default(); - !e.validate(&mut env) - }) diff --git a/hphp/hack/src/oxidized_by_ref/Cargo.toml b/hphp/hack/src/oxidized_by_ref/Cargo.toml index de0a509ccff..4b7bd0f2947 100644 --- a/hphp/hack/src/oxidized_by_ref/Cargo.toml @@ -406,9 +380,18 @@ index 33ba4ae40cf..bebe6e2e909 100644 } } diff --git a/hphp/hack/src/parser/aast_parser.rs b/hphp/hack/src/parser/aast_parser.rs -index 01b39e624aa..8d04ce3b9ca 100644 +index c0bba7f63f8..93d79e22298 100644 --- a/hphp/hack/src/parser/aast_parser.rs +++ b/hphp/hack/src/parser/aast_parser.rs +@@ -5,7 +5,7 @@ + // LICENSE file in the "hack" directory of this source tree. + + use std::sync::Arc; +-use std::time::Instant; ++ + + use bumpalo::Bump; + use hash::HashSet; @@ -85,11 +85,11 @@ impl<'src> AastParser { indexed_source_text: &'src IndexedSourceText<'src>, default_unstable_features: HashSet, @@ -443,7 +426,7 @@ index 01b39e624aa..8d04ce3b9ca 100644 match language { Language::Hack => {} _ => return Err(Error::NotAHackFile()), -@@ -166,14 +166,14 @@ impl<'src> AastParser { +@@ -165,14 +165,14 @@ impl<'src> AastParser { ); stack_limit::reset(); let ret = lower(&mut lowerer_env, tree.root()); @@ -460,7 +443,7 @@ index 01b39e624aa..8d04ce3b9ca 100644 stack_limit::reset(); let syntax_errors = Self::check_syntax_error( env, -@@ -186,7 +186,7 @@ impl<'src> AastParser { +@@ -185,7 +185,7 @@ impl<'src> AastParser { let lowerer_parsing_errors = lowerer_env.parsing_errors().to_vec(); let errors = lowerer_env.hh_errors().to_vec(); let lint_errors = lowerer_env.lint_errors().to_vec(); @@ -469,7 +452,7 @@ index 01b39e624aa..8d04ce3b9ca 100644 Ok(ParserResult { file_mode: mode, -@@ -198,9 +198,9 @@ impl<'src> AastParser { +@@ -197,9 +197,9 @@ impl<'src> AastParser { lint_errors, profile: ParserProfile { lower_peak, @@ -482,7 +465,7 @@ index 01b39e624aa..8d04ce3b9ca 100644 error_peak, arena_bytes: arena.allocated_bytes() as u64, ..Default::default() -@@ -334,7 +334,7 @@ impl<'src> AastParser { +@@ -333,7 +333,7 @@ impl<'src> AastParser { disable_hh_ignore_error: env.parser_options.po_disable_hh_ignore_error, allowed_decl_fixme_codes: &env.parser_options.po_allowed_decl_fixme_codes, }; @@ -505,10 +488,10 @@ index 8db2f3a3271..cadcc02dbbf 100644 } _ => { diff --git a/hphp/hack/src/parser/lowerer/lowerer.rs b/hphp/hack/src/parser/lowerer/lowerer.rs -index 2635de421ad..cc70f07f0e8 100644 +index 978ee9dd0b7..1bf7e45930d 100644 --- a/hphp/hack/src/parser/lowerer/lowerer.rs +++ b/hphp/hack/src/parser/lowerer/lowerer.rs -@@ -3648,12 +3648,12 @@ fn p_markup<'a>(node: S<'a>, env: &mut Env<'a>) -> Result { +@@ -3491,12 +3491,12 @@ fn p_markup<'a>(node: S<'a>, env: &mut Env<'a>) -> Result { let markup_hashbang = &c.hashbang; let markup_suffix = &c.suffix; let pos = p_pos(node, env); diff --git a/src/analyzer/function_analysis_data.rs b/src/analyzer/function_analysis_data.rs index 9d770674..87dd0942 100644 --- a/src/analyzer/function_analysis_data.rs +++ b/src/analyzer/function_analysis_data.rs @@ -27,6 +27,7 @@ pub struct FunctionAnalysisData { pub closures: FxHashMap, pub closure_spans: Vec<(u32, u32)>, pub replacements: BTreeMap<(u32, u32), Replacement>, + pub insertions: BTreeMap>, pub current_stmt_offset: Option, pub expr_fixme_positions: FxHashMap<(u32, u32), StmtStart>, pub symbol_references: SymbolReferences, @@ -64,6 +65,7 @@ impl FunctionAnalysisData { if_true_assertions: FxHashMap::default(), if_false_assertions: FxHashMap::default(), replacements: BTreeMap::new(), + insertions: BTreeMap::new(), current_stmt_offset, hh_fixmes: file_source.hh_fixmes.clone(), symbol_references: SymbolReferences::new(), @@ -558,6 +560,13 @@ impl FunctionAnalysisData { self.replacements.insert(offsets, replacement); true } + + pub fn insert_at(&mut self, insertion_point: u32, replacement: String) { + self.insertions + .entry(insertion_point) + .or_insert_with(Vec::new) + .push(replacement); + } } fn get_hakana_fixmes_and_ignores( diff --git a/src/analyzer/functionlike_analyzer.rs b/src/analyzer/functionlike_analyzer.rs index 714ed047..e2b71655 100644 --- a/src/analyzer/functionlike_analyzer.rs +++ b/src/analyzer/functionlike_analyzer.rs @@ -745,6 +745,12 @@ impl<'a> FunctionLikeAnalyzer<'a> { .extend(analysis_data.replacements); } + if !analysis_data.insertions.is_empty() { + parent_analysis_data + .insertions + .extend(analysis_data.insertions); + } + for issue in analysis_data.issues_to_emit { parent_analysis_data.maybe_add_issue( issue, @@ -1400,6 +1406,14 @@ pub(crate) fn update_analysis_result_with_tast( .extend(analysis_data.replacements); } + if !analysis_data.insertions.is_empty() { + analysis_result + .insertions + .entry(*file_path) + .or_insert_with(BTreeMap::new) + .extend(analysis_data.insertions); + } + let mut issues_to_emit = analysis_data.issues_to_emit; issues_to_emit.sort_by(|a, b| a.pos.start_offset.partial_cmp(&b.pos.start_offset).unwrap()); diff --git a/src/analyzer/stmt/control_analyzer.rs b/src/analyzer/stmt/control_analyzer.rs index adcb6eb8..68e35e1b 100644 --- a/src/analyzer/stmt/control_analyzer.rs +++ b/src/analyzer/stmt/control_analyzer.rs @@ -415,25 +415,32 @@ pub(crate) fn get_control_actions( control_actions.retain(|action| *action != ControlAction::None); } aast::Stmt_::Block(block_stmts) => { - let mut block_actions = get_control_actions( + if handle_block( codebase, interner, resolved_names, - &block_stmts.0, + &block_stmts.1, analysis_data, - break_context.clone(), + &break_context, return_is_exit, - ); - - if !block_actions.contains(&ControlAction::None) { - control_actions.extend(block_actions); - control_actions.retain(|action| *action != ControlAction::None); - + &mut control_actions, + ) { + return control_actions; + } + } + aast::Stmt_::Concurrent(block_stmts) => { + if handle_block( + codebase, + interner, + resolved_names, + &block_stmts, + analysis_data, + &break_context, + return_is_exit, + &mut control_actions, + ) { return control_actions; } - - block_actions.retain(|action| *action != ControlAction::None); - control_actions.extend(block_actions); } aast::Stmt_::Awaitall(boxed) => { let mut block_actions = get_control_actions( @@ -475,6 +482,39 @@ pub(crate) fn get_control_actions( control_actions } +fn handle_block( + codebase: &CodebaseInfo, + interner: &Interner, + resolved_names: &FxHashMap, + block_stmts: &aast::Block<(), ()>, + analysis_data: Option<&FunctionAnalysisData>, + break_context: &Vec, + return_is_exit: bool, + control_actions: &mut FxHashSet, +) -> bool { + let mut block_actions = get_control_actions( + codebase, + interner, + resolved_names, + &block_stmts.0, + analysis_data, + break_context.clone(), + return_is_exit, + ); + + if !block_actions.contains(&ControlAction::None) { + control_actions.extend(block_actions); + control_actions.retain(|action| *action != ControlAction::None); + + return true; + } + + block_actions.retain(|action| *action != ControlAction::None); + control_actions.extend(block_actions); + + false +} + fn handle_call( call_expr: &CallExpr, resolved_names: &FxHashMap, diff --git a/src/analyzer/stmt_analyzer.rs b/src/analyzer/stmt_analyzer.rs index 21e702fb..2d09d210 100644 --- a/src/analyzer/stmt_analyzer.rs +++ b/src/analyzer/stmt_analyzer.rs @@ -207,7 +207,7 @@ pub(crate) fn analyze( } } aast::Stmt_::Block(boxed) => { - for boxed_stmt in boxed { + for boxed_stmt in &boxed.1 { analyze( statements_analyzer, boxed_stmt, @@ -232,7 +232,18 @@ pub(crate) fn analyze( ); return Err(AnalysisError::UserError); } - aast::Stmt_::DeclareLocal(_) => {} + aast::Stmt_::DeclareLocal(_) => {}, + aast::Stmt_::Concurrent(boxed) => { + for boxed_stmt in &boxed.0 { + analyze( + statements_analyzer, + boxed_stmt, + analysis_data, + context, + loop_scope, + )?; + } + }, } context.cond_referenced_var_ids = FxHashSet::default(); @@ -399,7 +410,7 @@ fn detect_unused_statement_expressions( fn analyze_awaitall( boxed: ( - &Vec<(Option, aast::Expr<(), ()>)>, + &Vec<(oxidized::tast::Lid, aast::Expr<(), ()>)>, &Vec>, ), statements_analyzer: &StatementsAnalyzer, @@ -413,45 +424,43 @@ fn analyze_awaitall( for (assignment_id, expr) in boxed.0 { expression_analyzer::analyze(statements_analyzer, expr, analysis_data, context, &mut None)?; - if let Some(assignment_id) = assignment_id { - let mut assignment_type = None; - - if let Some(t) = analysis_data.get_expr_type(expr.pos()) { - let parent_nodes = t.parent_nodes.clone(); - if t.is_single() { - let inner = t.get_single(); - if let TAtomic::TNamedObject { - name: STR_AWAITABLE, - type_params: Some(type_params), - .. - } = inner - { - let mut new = type_params.get(0).unwrap().clone(); + let mut assignment_type = None; + + if let Some(t) = analysis_data.get_expr_type(expr.pos()) { + let parent_nodes = t.parent_nodes.clone(); + if t.is_single() { + let inner = t.get_single(); + if let TAtomic::TNamedObject { + name: STR_AWAITABLE, + type_params: Some(type_params), + .. + } = inner + { + let mut new = type_params.get(0).unwrap().clone(); - new.parent_nodes = parent_nodes; - assignment_type = Some(new) - } + new.parent_nodes = parent_nodes; + assignment_type = Some(new) } } + } - assignment_analyzer::analyze( - statements_analyzer, - ( - &ast_defs::Bop::Eq(None), - &aast::Expr( - (), - assignment_id.0.clone(), - aast::Expr_::Lvar(Box::new(assignment_id.clone())), - ), - None, + assignment_analyzer::analyze( + statements_analyzer, + ( + &ast_defs::Bop::Eq(None), + &aast::Expr( + (), + assignment_id.0.clone(), + aast::Expr_::Lvar(Box::new(assignment_id.clone())), ), - &stmt.0, - assignment_type.as_ref(), - analysis_data, - context, - false, - )?; - } + None, + ), + &stmt.0, + assignment_type.as_ref(), + analysis_data, + context, + false, + )?; } for stmt in boxed.1 { diff --git a/src/cli/lib.rs b/src/cli/lib.rs index 3b6bd0b8..d69cda55 100644 --- a/src/cli/lib.rs +++ b/src/cli/lib.rs @@ -642,8 +642,12 @@ fn do_fix( None, ); - if let Ok((analysis_result, successfull_run_data)) = result { - update_files(analysis_result, &root_dir, &successfull_run_data.interner); + if let Ok((mut analysis_result, successfull_run_data)) = result { + update_files( + &mut analysis_result, + &root_dir, + &successfull_run_data.interner, + ); } } @@ -690,8 +694,12 @@ fn do_remove_unused_fixmes( None, ); - if let Ok((analysis_result, successful_run_data)) = result { - update_files(analysis_result, root_dir, &successful_run_data.interner); + if let Ok((mut analysis_result, successful_run_data)) = result { + update_files( + &mut analysis_result, + root_dir, + &successful_run_data.interner, + ); } } @@ -757,8 +765,12 @@ fn do_add_fixmes( None, ); - if let Ok((analysis_result, successful_run_data)) = result { - update_files(analysis_result, root_dir, &successful_run_data.interner); + if let Ok((mut analysis_result, successful_run_data)) = result { + update_files( + &mut analysis_result, + root_dir, + &successful_run_data.interner, + ); } } @@ -837,8 +849,12 @@ fn do_migrate( None, ); - if let Ok((analysis_result, successful_run_data)) = result { - update_files(analysis_result, root_dir, &successful_run_data.interner); + if let Ok((mut analysis_result, successful_run_data)) = result { + update_files( + &mut analysis_result, + root_dir, + &successful_run_data.interner, + ); } } @@ -1209,69 +1225,98 @@ fn write_output_files( } } -fn update_files(analysis_result: AnalysisResult, root_dir: &String, interner: &Interner) { - for (relative_path, replacements) in analysis_result +fn update_files(analysis_result: &mut AnalysisResult, root_dir: &String, interner: &Interner) { + let mut replacement_and_insertion_keys = analysis_result .replacements .iter() - .map(|(k, v)| (k.get_relative_path(interner, root_dir), v)) + .map(|(k, _)| *k) + .collect::>(); + replacement_and_insertion_keys.extend(analysis_result.insertions.iter().map(|(k, _)| *k)); + + for (relative_path, original_path) in replacement_and_insertion_keys + .into_iter() + .map(|v| (v.get_relative_path(interner, root_dir), v)) .collect::>() { println!("updating {}", relative_path); - let file_path = format!("{}/{}", root_dir, relative_path); - let file_contents = fs::read_to_string(&file_path).unwrap(); let mut file = File::create(&file_path).unwrap(); - file.write_all(replace_contents(file_contents, replacements).as_bytes()) + let replacements = analysis_result + .replacements + .remove(&original_path) + .unwrap_or_else(BTreeMap::default); + + let insertions = analysis_result + .insertions + .remove(&original_path) + .unwrap_or_else(BTreeMap::default); + + file.write_all(replace_contents(file_contents, replacements, insertions).as_bytes()) .unwrap_or_else(|_| panic!("Could not write file {}", &file_path)); } } fn replace_contents( mut file_contents: String, - replacements: &BTreeMap<(u32, u32), Replacement>, + replacements: BTreeMap<(u32, u32), Replacement>, + insertions: BTreeMap>, ) -> String { - for ((mut start, mut end), replacement) in replacements.iter().rev() { - match replacement { - Replacement::Remove => { - file_contents = file_contents[..start as usize].to_string() - + &*file_contents[end as usize..].to_string(); - } - Replacement::TrimPrecedingWhitespace(beg_of_line) => { - let potential_whitespace = - file_contents[(*beg_of_line as usize)..start as usize].to_string(); - if potential_whitespace.trim() == "" { - start = *beg_of_line as u32; - - if beg_of_line > &0 - && &file_contents[((*beg_of_line as usize) - 1)..start as usize] == "\n" - { - start -= 1; - } - } + let mut replacements = replacements + .into_iter() + .map(|(k, v)| (k, vec![v])) + .collect::>(); + + for (offset, insertion) in insertions { + replacements + .entry((offset, offset)) + .or_insert_with(Vec::new) + .extend(insertion.into_iter().map(|s| Replacement::Substitute(s))); + } - if &file_contents[end as usize..end as usize + 1] == "," { - end += 1; + for ((mut start, mut end), replacements) in replacements.iter().rev() { + for replacement in replacements { + match replacement { + Replacement::Remove => { + file_contents = file_contents[..start as usize].to_string() + + &*file_contents[end as usize..].to_string(); } + Replacement::TrimPrecedingWhitespace(beg_of_line) => { + let potential_whitespace = + file_contents[(*beg_of_line as usize)..start as usize].to_string(); + if potential_whitespace.trim() == "" { + start = *beg_of_line as u32; + + if beg_of_line > &0 + && &file_contents[((*beg_of_line as usize) - 1)..start as usize] == "\n" + { + start -= 1; + } + } - file_contents = file_contents[..start as usize].to_string() - + &*file_contents[end as usize..].to_string(); - } - Replacement::TrimTrailingWhitespace(end_of_line) => { - let potential_whitespace = - file_contents[end as usize..(*end_of_line as usize)].to_string(); + if &file_contents[end as usize..end as usize + 1] == "," { + end += 1; + } - if potential_whitespace.trim() == "" { - end = *end_of_line as u32; + file_contents = file_contents[..start as usize].to_string() + + &*file_contents[end as usize..].to_string(); } + Replacement::TrimTrailingWhitespace(end_of_line) => { + let potential_whitespace = + file_contents[end as usize..(*end_of_line as usize)].to_string(); - file_contents = file_contents[..start as usize].to_string() - + &*file_contents[end as usize..].to_string(); - } - Replacement::Substitute(string) => { - file_contents = file_contents[..start as usize].to_string() - + string - + &*file_contents[end as usize..].to_string(); + if potential_whitespace.trim() == "" { + end = *end_of_line as u32; + } + + file_contents = file_contents[..start as usize].to_string() + + &*file_contents[end as usize..].to_string(); + } + Replacement::Substitute(string) => { + file_contents = file_contents[..start as usize].to_string() + + string + + &*file_contents[end as usize..].to_string(); + } } } } diff --git a/src/cli/test_runners/test_runner.rs b/src/cli/test_runners/test_runner.rs index 5c50c5b7..a43d0a23 100644 --- a/src/cli/test_runners/test_runner.rs +++ b/src/cli/test_runners/test_runner.rs @@ -12,6 +12,7 @@ use rand::seq::SliceRandom; use rand::SeedableRng; use rustc_hash::FxHashMap; use rustc_hash::FxHashSet; +use std::collections::BTreeMap; use std::env; use std::fs; use std::io; @@ -164,13 +165,16 @@ impl TestRunner { } if dir.contains("/migrations/") { - let migration_name = dir_parts.get(1).unwrap().to_string(); let replacements_path = dir.clone() + "/replacements.txt"; let replacements = fs::read_to_string(replacements_path).unwrap().to_string(); analysis_config.migration_symbols = replacements .lines() - .map(|v| (migration_name.clone(), v.to_string())) + .map(|v| { + let mut parts = v.split(",").collect::>(); + let first_part = parts.remove(0); + return (first_part.to_string(), parts.join(",")); + }) .collect(); } else if dir.contains("/fix/") { let issue_name = dir_parts.get(1).unwrap().to_string(); @@ -272,18 +276,28 @@ impl TestRunner { let input_contents = fs::read_to_string(&input_file).unwrap(); let expected_output_contents = fs::read_to_string(&output_file).unwrap(); - let result = result.unwrap(); + let mut result = result.unwrap(); *total_time_in_analysis += result.0.time_in_analysis; let input_file_path = FilePath(result.1.interner.get(&input_file).unwrap()); - let output_contents = - if let Some(file_replacements) = result.0.replacements.get(&input_file_path) { - crate::replace_contents(input_contents, file_replacements) - } else { - input_contents - }; + let replacements = result + .0 + .replacements + .remove(&input_file_path) + .unwrap_or(BTreeMap::default()); + let insertions = result + .0 + .insertions + .remove(&input_file_path) + .unwrap_or(BTreeMap::default()); + + let output_contents = if !replacements.is_empty() || !insertions.is_empty() { + crate::replace_contents(input_contents, replacements, insertions) + } else { + input_contents + }; return if output_contents == expected_output_contents { (".".to_string(), Some(result.1), Some(result.0)) diff --git a/src/code_info/analysis_result.rs b/src/code_info/analysis_result.rs index e6b49a0b..4a80dbd8 100644 --- a/src/code_info/analysis_result.rs +++ b/src/code_info/analysis_result.rs @@ -24,6 +24,7 @@ pub struct AnalysisResult { pub emitted_issues: FxHashMap>, pub emitted_definition_issues: FxHashMap>, pub replacements: FxHashMap>, + pub insertions: FxHashMap>>, pub mixed_source_counts: FxHashMap>, pub program_dataflow_graph: DataFlowGraph, pub symbol_references: SymbolReferences, @@ -40,6 +41,7 @@ impl AnalysisResult { emitted_issues: FxHashMap::default(), emitted_definition_issues: FxHashMap::default(), replacements: FxHashMap::default(), + insertions: FxHashMap::default(), mixed_source_counts: FxHashMap::default(), program_dataflow_graph: DataFlowGraph::new(program_dataflow_graph_kind), issue_counts: FxHashMap::default(), @@ -56,6 +58,7 @@ impl AnalysisResult { .extend(issues); } self.replacements.extend(other.replacements); + self.insertions.extend(other.insertions); for (id, c) in other.mixed_source_counts { self.mixed_source_counts .entry(id) diff --git a/third-party/hhvm b/third-party/hhvm index 9bbefb18..c46470d9 160000 --- a/third-party/hhvm +++ b/third-party/hhvm @@ -1 +1 @@ -Subproject commit 9bbefb18098d8480d15bd6d3e6fd07a8b2e2a716 +Subproject commit c46470d9e78fa32604115c59ed91aa55b7b101e7