diff --git a/sapp/json_diagnostics.py b/sapp/json_diagnostics.py index cafd7dcb..3ba92fac 100644 --- a/sapp/json_diagnostics.py +++ b/sapp/json_diagnostics.py @@ -128,7 +128,7 @@ def entries(self, search: str, pretty_print: bool = False) -> List[Dict[str, Any entries = [] for callable_name, entry_location in entry_locations.items(): - for (file_id, offset) in entry_location: + for file_id, offset in entry_location: path = lookup_table.file_index[file_id] errors = parser.get_json_from_file_offset(path, offset) diff --git a/sapp/pipeline/model_generator.py b/sapp/pipeline/model_generator.py index 5740f970..6d748389 100644 --- a/sapp/pipeline/model_generator.py +++ b/sapp/pipeline/model_generator.py @@ -141,7 +141,7 @@ def _get_minimum_trace_length( ) -> int: length = None for entry in entries: - for (_leaf, depth) in entry.leaves: + for _leaf, depth in entry.leaves: if length is None or length > depth: length = depth if length is not None: @@ -443,7 +443,7 @@ def _generate_raw_trace_frame( leaf_records = [] leaf_mapping_ids: Set[LeafMapping] = set() - for (leaf, depth) in leaves: + for leaf, depth in leaves: leaf_record = self._get_shared_text(leaf_kind, leaf) caller_leaf_id = self.graph.get_transform_normalized_caller_kind_id( leaf_record @@ -477,7 +477,7 @@ def _generate_raw_trace_frame( reachability=FrameReachability.UNREACHABLE, ) - for (leaf_record, depth) in leaf_records: + for leaf_record, depth in leaf_records: self.graph.add_trace_frame_leaf_assoc(trace_frame, leaf_record, depth) # Note that the "graph._trace_frame_leaf_assoc" table is really associated with diff --git a/sapp/sarif.py b/sapp/sarif.py index de132fe7..8a601fa0 100644 --- a/sapp/sarif.py +++ b/sapp/sarif.py @@ -64,9 +64,9 @@ def __init__( driver_json["informationUri"] = "https://github.com/facebook/pyre-check/" elif self.tool == "mariana-trench": driver_json["name"] = "Mariana Trench" - driver_json[ - "informationUri" - ] = "https://github.com/facebook/mariana-trench/" + driver_json["informationUri"] = ( + "https://github.com/facebook/mariana-trench/" + ) else: raise NotImplementedError diff --git a/sapp/tests/fake_object_generator.py b/sapp/tests/fake_object_generator.py index 535be331..d340aea1 100644 --- a/sapp/tests/fake_object_generator.py +++ b/sapp/tests/fake_object_generator.py @@ -120,7 +120,7 @@ def precondition( ) if self.graph: self.graph.add_trace_frame(trace_frame) - for (leaf, depth) in leaves: + for leaf, depth in leaves: # pyre-fixme[16]: `Optional` has no attribute # `add_trace_frame_leaf_assoc`. self.graph.add_trace_frame_leaf_assoc(trace_frame, leaf, depth) @@ -171,7 +171,7 @@ def postcondition( ) if self.graph: self.graph.add_trace_frame(trace_frame) - for (leaf, depth) in leaves: + for leaf, depth in leaves: # pyre-fixme[16]: `Optional` has no attribute # `add_trace_frame_leaf_assoc`. self.graph.add_trace_frame_leaf_assoc(trace_frame, leaf, depth) diff --git a/sapp/trace_graph.py b/sapp/trace_graph.py index 85e98acf..c3dc50b8 100644 --- a/sapp/trace_graph.py +++ b/sapp/trace_graph.py @@ -67,34 +67,34 @@ def __init__(self) -> None: self._trace_frames: Dict[int, TraceFrame] = {} self._shared_texts: Dict[int, SharedText] = {} - self._shared_text_lookup: ( - DefaultDict[SharedTextKind, Dict[str, int]] - ) = defaultdict(dict) + self._shared_text_lookup: DefaultDict[SharedTextKind, Dict[str, int]] = ( + defaultdict(dict) + ) self._trace_frame_leaf_assoc: DefaultDict[int, LeafIDToDepthMap] = defaultdict( lambda: {} ) - self._trace_frame_issue_instance_assoc: DefaultDict[ - int, Set[int] - ] = defaultdict(set) - self._issue_instance_trace_frame_assoc: DefaultDict[ - int, Set[int] - ] = defaultdict(set) - - self._trace_frame_annotation_trace_frame_assoc: DefaultDict[ - int, Set[int] - ] = defaultdict(set) - self._trace_frame_trace_frame_annotation_assoc: DefaultDict[ - int, Set[int] - ] = defaultdict(set) - - self._issue_instance_shared_text_assoc: DefaultDict[ - int, Set[int] - ] = defaultdict(set) - self._shared_text_issue_instance_assoc: DefaultDict[ - int, Set[int] - ] = defaultdict(set) + self._trace_frame_issue_instance_assoc: DefaultDict[int, Set[int]] = ( + defaultdict(set) + ) + self._issue_instance_trace_frame_assoc: DefaultDict[int, Set[int]] = ( + defaultdict(set) + ) + + self._trace_frame_annotation_trace_frame_assoc: DefaultDict[int, Set[int]] = ( + defaultdict(set) + ) + self._trace_frame_trace_frame_annotation_assoc: DefaultDict[int, Set[int]] = ( + defaultdict(set) + ) + + self._issue_instance_shared_text_assoc: DefaultDict[int, Set[int]] = ( + defaultdict(set) + ) + self._shared_text_issue_instance_assoc: DefaultDict[int, Set[int]] = ( + defaultdict(set) + ) self._issue_instance_fix_info: Dict[int, IssueInstanceFixInfo] = {} diff --git a/sapp/trimmed_trace_graph.py b/sapp/trimmed_trace_graph.py index 67770656..40b02aef 100644 --- a/sapp/trimmed_trace_graph.py +++ b/sapp/trimmed_trace_graph.py @@ -365,7 +365,6 @@ def _next_interval(self, interval: Interval, next_frame: TraceFrame) -> Interval def _recompute_trace_length_association( self, visited: Visited, initial_frame_ids: Set[int], leaf_kind: SharedTextKind ) -> int: - """Walks the traces starting at the initial frames with the initial corresponding kinds to recompute and store the minimum trace length from each reachable frame to the corresponding leaf.""" @@ -685,7 +684,7 @@ def _populate_issues_from_affected_conditions( # Conditions that call this may have originated from other issues, # keep searching for parent conditions leading to this one. - for (next_frame, frame_leaves) in self._get_predecessor_frames( + for next_frame, frame_leaves in self._get_predecessor_frames( graph, leaves, condition ): if len(frame_leaves) > 0: @@ -786,7 +785,7 @@ def _add_trace_frame(self, graph: TraceGraph, trace_frame: TraceFrame) -> None: self._populate_shared_text(graph, trace_frame.filename_id) self._populate_shared_text(graph, trace_frame.caller_id) self._populate_shared_text(graph, trace_frame.callee_id) - for (leaf_id, depth) in graph._trace_frame_leaf_assoc[trace_frame_id].items(): + for leaf_id, depth in graph._trace_frame_leaf_assoc[trace_frame_id].items(): leaf = graph._shared_texts[leaf_id] if leaf_id not in self._shared_texts: self.add_shared_text(leaf) diff --git a/sapp/ui/filter_predicates.py b/sapp/ui/filter_predicates.py index 42015bf8..19f8c93f 100644 --- a/sapp/ui/filter_predicates.py +++ b/sapp/ui/filter_predicates.py @@ -27,8 +27,7 @@ class Predicate(ABC): class QueryPredicate(Predicate): @abstractmethod - def apply(self, query: Query[_Q]) -> Query[_Q]: - ... + def apply(self, query: Query[_Q]) -> Query[_Q]: ... class InRange(Generic[_T], QueryPredicate): @@ -80,8 +79,7 @@ def apply(self, query: Query[_Q]) -> Query[_Q]: class IssuePredicate(Predicate): @abstractmethod - def apply(self, issues: List[IssueQueryResult]) -> List[IssueQueryResult]: - ... + def apply(self, issues: List[IssueQueryResult]) -> List[IssueQueryResult]: ... class HasAll(IssuePredicate): diff --git a/sapp/ui/interactive.py b/sapp/ui/interactive.py index ddec5ac5..1076a373 100644 --- a/sapp/ui/interactive.py +++ b/sapp/ui/interactive.py @@ -774,7 +774,7 @@ def leaves( query = queries.leaves( session=session, kind=kind, run_id=self._current_run_id ) - for (_, name) in query: + for _, name in query: leaves[name] += 1 results: Iterable[Tuple[str, int]] @@ -1211,9 +1211,9 @@ def _group_trace_frames( self, trace_frames: Iterable[TraceFrameQueryResult], limit: int ) -> Dict[Tuple[str, str], List[TraceFrameQueryResult]]: """Buckets together trace frames that have the same caller:caller_port.""" - caller_buckets: DefaultDict[ - Tuple[str, str], List[TraceFrameQueryResult] - ] = defaultdict(list) + caller_buckets: DefaultDict[Tuple[str, str], List[TraceFrameQueryResult]] = ( + defaultdict(list) + ) for trace_frame in itertools.islice(trace_frames, limit): caller_buckets[(trace_frame.caller, trace_frame.caller_port)].append( trace_frame diff --git a/sapp/ui/query_result.py b/sapp/ui/query_result.py index 85a55e7c..63368913 100644 --- a/sapp/ui/query_result.py +++ b/sapp/ui/query_result.py @@ -60,21 +60,31 @@ def from_record(record: Any) -> "IssueQueryResult": is_new_issue=record.is_new_issue, min_trace_length_to_sources=record.min_trace_length_to_sources, min_trace_length_to_sinks=record.min_trace_length_to_sinks, - features=frozenset(record.concatenated_features.split(",")) - if record.concatenated_features - else frozenset(), - source_names=frozenset(record.concatenated_source_names.split(",")) - if record.concatenated_source_names - else frozenset(), - source_kinds=frozenset(record.concatenated_source_kinds.split(",")) - if record.concatenated_source_kinds - else frozenset(), - sink_names=frozenset(record.concatenated_sink_names.split(",")) - if record.concatenated_sink_names - else frozenset(), - sink_kinds=frozenset(record.concatenated_sink_kinds.split(",")) - if record.concatenated_sink_kinds - else frozenset(), + features=( + frozenset(record.concatenated_features.split(",")) + if record.concatenated_features + else frozenset() + ), + source_names=( + frozenset(record.concatenated_source_names.split(",")) + if record.concatenated_source_names + else frozenset() + ), + source_kinds=( + frozenset(record.concatenated_source_kinds.split(",")) + if record.concatenated_source_kinds + else frozenset() + ), + sink_names=( + frozenset(record.concatenated_sink_names.split(",")) + if record.concatenated_sink_names + else frozenset() + ), + sink_kinds=( + frozenset(record.concatenated_sink_kinds.split(",")) + if record.concatenated_sink_kinds + else frozenset() + ), similar_issues=set(), run_id=record.run_id, )