Skip to content

Commit

Permalink
Merge pull request #1628 from mandiant/feat/flake8-simplify
Browse files Browse the repository at this point in the history
introduce flake8-simplify
  • Loading branch information
williballenthin committed Jul 12, 2023
2 parents a35bf4c + 65e8300 commit 4e02e36
Show file tree
Hide file tree
Showing 28 changed files with 114 additions and 131 deletions.
11 changes: 10 additions & 1 deletion .github/flake8.ini
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,16 @@ extend-ignore =
# B010 Do not call setattr with a constant attribute value
B010,
# G200 Logging statement uses exception in arguments
G200
G200,
# SIM102 Use a single if-statement instead of nested if-statements
# doesn't provide a space for commenting or logical separation of conditions
SIM102,
# SIM114 Use logical or and a single body
# makes logic trees too complex
SIM114,
# SIM117 Use 'with Foo, Bar:' instead of multiple with statements
# makes lines too long
SIM117


per-file-ignores =
Expand Down
2 changes: 1 addition & 1 deletion capa/features/extractors/binja/basicblock.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ def is_mov_imm_to_stack(il: MediumLevelILInstruction) -> bool:
if il.src.operation != MediumLevelILOperation.MLIL_CONST:
return False

if not il.dest.source_type == VariableSourceType.StackVariableSourceType:
if il.dest.source_type != VariableSourceType.StackVariableSourceType:
return False

return True
Expand Down
4 changes: 1 addition & 3 deletions capa/features/extractors/binja/extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,7 @@ def get_basic_blocks(self, fh: FunctionHandle) -> Iterator[BBHandle]:
mlil_lookup[mlil_bb.source_block.start] = mlil_bb

for bb in f.basic_blocks:
mlil_bb = None
if bb.start in mlil_lookup:
mlil_bb = mlil_lookup[bb.start]
mlil_bb = mlil_lookup.get(bb.start)

yield BBHandle(address=AbsoluteVirtualAddress(bb.start), inner=(bb, mlil_bb))

Expand Down
15 changes: 5 additions & 10 deletions capa/features/extractors/binja/insn.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,8 +155,7 @@ def llil_checker(il: LowLevelILInstruction, parent: LowLevelILInstruction, index
for llil in func.get_llils_at(ih.address):
visit_llil_exprs(llil, llil_checker)

for result in results:
yield result
yield from results


def extract_insn_bytes_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
Expand Down Expand Up @@ -318,8 +317,7 @@ def llil_checker(il: LowLevelILInstruction, parent: LowLevelILInstruction, index
for llil in func.get_llils_at(ih.address):
visit_llil_exprs(llil, llil_checker)

for result in results:
yield result
yield from results


def is_nzxor_stack_cookie(f: Function, bb: BinjaBasicBlock, llil: LowLevelILInstruction) -> bool:
Expand Down Expand Up @@ -375,8 +373,7 @@ def llil_checker(il: LowLevelILInstruction, parent: LowLevelILInstruction, index
for llil in func.get_llils_at(ih.address):
visit_llil_exprs(llil, llil_checker)

for result in results:
yield result
yield from results


def extract_insn_mnemonic_features(
Expand Down Expand Up @@ -438,8 +435,7 @@ def llil_checker(il: LowLevelILInstruction, parent: LowLevelILOperation, index:
for llil in func.get_llils_at(ih.address):
visit_llil_exprs(llil, llil_checker)

for result in results:
yield result
yield from results


def extract_insn_segment_access_features(
Expand All @@ -466,8 +462,7 @@ def llil_checker(il: LowLevelILInstruction, parent: LowLevelILInstruction, index
for llil in func.get_llils_at(ih.address):
visit_llil_exprs(llil, llil_checker)

for result in results:
yield result
yield from results


def extract_insn_cross_section_cflow(
Expand Down
12 changes: 6 additions & 6 deletions capa/features/extractors/dnfile/extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,19 +53,19 @@ def __init__(self, pe: dnfile.dnPE):
self.types[type_.token] = type_

def get_import(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
return self.imports.get(token, None)
return self.imports.get(token)

def get_native_import(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
return self.native_imports.get(token, None)
return self.native_imports.get(token)

def get_method(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
return self.methods.get(token, None)
return self.methods.get(token)

def get_field(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
return self.fields.get(token, None)
return self.fields.get(token)

def get_type(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
return self.types.get(token, None)
return self.types.get(token)


class DnfileFeatureExtractor(FeatureExtractor):
Expand Down Expand Up @@ -120,7 +120,7 @@ def get_functions(self) -> Iterator[FunctionHandle]:
address: DNTokenAddress = DNTokenAddress(insn.operand.value)

# record call to destination method; note: we only consider MethodDef methods for destinations
dest: Optional[FunctionHandle] = methods.get(address, None)
dest: Optional[FunctionHandle] = methods.get(address)
if dest is not None:
dest.ctx["calls_to"].add(fh.address)

Expand Down
4 changes: 2 additions & 2 deletions capa/features/extractors/dnfile/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def resolve_dotnet_token(pe: dnfile.dnPE, token: Token) -> Union[dnfile.base.MDT
return InvalidToken(token.value)
return user_string

table: Optional[dnfile.base.ClrMetaDataTable] = pe.net.mdtables.tables.get(token.table, None)
table: Optional[dnfile.base.ClrMetaDataTable] = pe.net.mdtables.tables.get(token.table)
if table is None:
# table index is not valid
return InvalidToken(token.value)
Expand Down Expand Up @@ -204,7 +204,7 @@ def get_dotnet_managed_methods(pe: dnfile.dnPE) -> Iterator[DnType]:
continue

token: int = calculate_dotnet_token_value(method.table.number, method.row_index)
access: Optional[str] = accessor_map.get(token, None)
access: Optional[str] = accessor_map.get(token)

method_name: str = method.row.Name
if method_name.startswith(("get_", "set_")):
Expand Down
2 changes: 1 addition & 1 deletion capa/features/extractors/dnfile/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from typing import Optional


class DnType(object):
class DnType:
def __init__(self, token: int, class_: str, namespace: str = "", member: str = "", access: Optional[str] = None):
self.token: int = token
self.access: Optional[str] = access
Expand Down
3 changes: 1 addition & 2 deletions capa/features/extractors/elf.py
Original file line number Diff line number Diff line change
Expand Up @@ -706,8 +706,7 @@ def get_symbols(self) -> Iterator[Symbol]:
return a tuple: (name, value, size, info, other, shndx)
for each symbol contained in the symbol table
"""
for symbol in self.symbols:
yield symbol
yield from self.symbols

@classmethod
def from_Elf(cls, ElfBinary) -> Optional["SymTab"]:
Expand Down
28 changes: 11 additions & 17 deletions capa/features/extractors/ida/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def get_file_externs() -> Dict[int, Tuple[str, str, int]]:
externs = {}

for seg in get_segments(skip_header_segments=True):
if not (seg.type == ida_segment.SEG_XTRN):
if seg.type != ida_segment.SEG_XTRN:
continue

for ea in idautils.Functions(seg.start_ea, seg.end_ea):
Expand Down Expand Up @@ -275,20 +275,18 @@ def is_op_offset(insn: idaapi.insn_t, op: idaapi.op_t) -> bool:

def is_sp_modified(insn: idaapi.insn_t) -> bool:
"""determine if instruction modifies SP, ESP, RSP"""
for op in get_insn_ops(insn, target_ops=(idaapi.o_reg,)):
if op.reg == idautils.procregs.sp.reg and is_op_write(insn, op):
# register is stack and written
return True
return False
return any(
op.reg == idautils.procregs.sp.reg and is_op_write(insn, op)
for op in get_insn_ops(insn, target_ops=(idaapi.o_reg,))
)


def is_bp_modified(insn: idaapi.insn_t) -> bool:
"""check if instruction modifies BP, EBP, RBP"""
for op in get_insn_ops(insn, target_ops=(idaapi.o_reg,)):
if op.reg == idautils.procregs.bp.reg and is_op_write(insn, op):
# register is base and written
return True
return False
return any(
op.reg == idautils.procregs.bp.reg and is_op_write(insn, op)
for op in get_insn_ops(insn, target_ops=(idaapi.o_reg,))
)


def is_frame_register(reg: int) -> bool:
Expand Down Expand Up @@ -334,10 +332,7 @@ def mask_op_val(op: idaapi.op_t) -> int:

def is_function_recursive(f: idaapi.func_t) -> bool:
"""check if function is recursive"""
for ref in idautils.CodeRefsTo(f.start_ea, True):
if f.contains(ref):
return True
return False
return any(f.contains(ref) for ref in idautils.CodeRefsTo(f.start_ea, True))


def is_basic_block_tight_loop(bb: idaapi.BasicBlock) -> bool:
Expand Down Expand Up @@ -386,8 +381,7 @@ def find_data_reference_from_insn(insn: idaapi.insn_t, max_depth: int = 10) -> i
def get_function_blocks(f: idaapi.func_t) -> Iterator[idaapi.BasicBlock]:
"""yield basic blocks contained in specified function"""
# leverage idaapi.FC_NOEXT flag to ignore useless external blocks referenced by the function
for block in idaapi.FlowChart(f, flags=(idaapi.FC_PREDS | idaapi.FC_NOEXT)):
yield block
yield from idaapi.FlowChart(f, flags=(idaapi.FC_PREDS | idaapi.FC_NOEXT))


def is_basic_block_return(bb: idaapi.BasicBlock) -> bool:
Expand Down
4 changes: 2 additions & 2 deletions capa/features/extractors/ida/insn.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ def extract_insn_offset_features(

p_info = capa.features.extractors.ida.helpers.get_op_phrase_info(op)

op_off = p_info.get("offset", None)
op_off = p_info.get("offset")
if op_off is None:
continue

Expand Down Expand Up @@ -447,7 +447,7 @@ def extract_insn_cross_section_cflow(
insn: idaapi.insn_t = ih.inner

for ref in idautils.CodeRefsFrom(insn.ea, False):
if ref in get_imports(fh.ctx).keys():
if ref in get_imports(fh.ctx):
# ignore API calls
continue
if not idaapi.getseg(ref):
Expand Down
5 changes: 2 additions & 3 deletions capa/features/extractors/strings.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
# See the License for the specific language governing permissions and limitations under the License.

import re
import contextlib
from collections import namedtuple

ASCII_BYTE = r" !\"#\$%&\'\(\)\*\+,-\./0123456789:;<=>\?@ABCDEFGHIJKLMNOPQRSTUVWXYZ\[\]\^_`abcdefghijklmnopqrstuvwxyz\{\|\}\\\~\t".encode(
Expand Down Expand Up @@ -81,7 +82,5 @@ def extract_unicode_strings(buf, n=4):
reg = b"((?:[%s]\x00){%d,})" % (ASCII_BYTE, n)
r = re.compile(reg)
for match in r.finditer(buf):
try:
with contextlib.suppress(UnicodeDecodeError):
yield String(match.group().decode("utf-16"), match.start())
except UnicodeDecodeError:
pass
4 changes: 2 additions & 2 deletions capa/features/extractors/viv/indirect_calls.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
# See the License for the specific language governing permissions and limitations under the License.

import collections
from typing import Set, List, Deque, Tuple, Union, Optional
from typing import Set, List, Deque, Tuple, Optional

import envi
import vivisect.const
Expand Down Expand Up @@ -71,7 +71,7 @@ class NotFoundError(Exception):
pass


def find_definition(vw: VivWorkspace, va: int, reg: int) -> Tuple[int, Union[int, None]]:
def find_definition(vw: VivWorkspace, va: int, reg: int) -> Tuple[int, Optional[int]]:
"""
scan backwards from the given address looking for assignments to the given register.
if a constant, return that value.
Expand Down
4 changes: 1 addition & 3 deletions capa/features/extractors/viv/insn.py
Original file line number Diff line number Diff line change
Expand Up @@ -410,9 +410,7 @@ def extract_insn_obfs_call_plus_5_characteristic_features(f, bb, ih: InsnHandle)
if insn.va + 5 == insn.opers[0].getOperValue(insn):
yield Characteristic("call $+5"), ih.address

if isinstance(insn.opers[0], envi.archs.i386.disasm.i386ImmMemOper) or isinstance(
insn.opers[0], envi.archs.amd64.disasm.Amd64RipRelOper
):
if isinstance(insn.opers[0], (envi.archs.i386.disasm.i386ImmMemOper, envi.archs.amd64.disasm.Amd64RipRelOper)):
if insn.va + 5 == insn.opers[0].getOperAddr(insn):
yield Characteristic("call $+5"), ih.address

Expand Down
4 changes: 2 additions & 2 deletions capa/ida/plugin/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,11 +197,11 @@ def find_file_capabilities(self, ruleset: RuleSet) -> Tuple[FeatureSet, MatchRes
return features, matches

def _get_cached_func_node(self, fh: FunctionHandle) -> Optional[CapaRuleGenFeatureCacheNode]:
f_node: Optional[CapaRuleGenFeatureCacheNode] = self.func_nodes.get(fh.address, None)
f_node: Optional[CapaRuleGenFeatureCacheNode] = self.func_nodes.get(fh.address)
if f_node is None:
# function is not in our cache, do extraction now
self._find_function_and_below_features(fh)
f_node = self.func_nodes.get(fh.address, None)
f_node = self.func_nodes.get(fh.address)
return f_node

def get_all_function_features(self, fh: FunctionHandle) -> FeatureSet:
Expand Down
8 changes: 4 additions & 4 deletions capa/ida/plugin/form.py
Original file line number Diff line number Diff line change
Expand Up @@ -1204,19 +1204,19 @@ def update_rule_status(self, rule_text: str):
self.set_rulegen_status(f"Failed to create function rule matches from rule set ({e})")
return

if rule.scope == capa.rules.Scope.FUNCTION and rule.name in func_matches.keys():
if rule.scope == capa.rules.Scope.FUNCTION and rule.name in func_matches:
is_match = True
elif rule.scope == capa.rules.Scope.BASIC_BLOCK and rule.name in bb_matches.keys():
elif rule.scope == capa.rules.Scope.BASIC_BLOCK and rule.name in bb_matches:
is_match = True
elif rule.scope == capa.rules.Scope.INSTRUCTION and rule.name in insn_matches.keys():
elif rule.scope == capa.rules.Scope.INSTRUCTION and rule.name in insn_matches:
is_match = True
elif rule.scope == capa.rules.Scope.FILE:
try:
_, file_matches = self.rulegen_feature_cache.find_file_capabilities(ruleset)
except Exception as e:
self.set_rulegen_status(f"Failed to create file rule matches from rule set ({e})")
return
if rule.name in file_matches.keys():
if rule.name in file_matches:
is_match = True
else:
is_match = False
Expand Down
2 changes: 1 addition & 1 deletion capa/ida/plugin/hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def preprocess_action(self, name):
@retval must be 0
"""
self.process_action_handle = self.process_action_hooks.get(name, None)
self.process_action_handle = self.process_action_hooks.get(name)

if self.process_action_handle:
self.process_action_handle(self.process_action_meta)
Expand Down
3 changes: 1 addition & 2 deletions capa/ida/plugin/item.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,8 +130,7 @@ def setData(self, column: int, value: str):

def children(self) -> Iterator["CapaExplorerDataItem"]:
"""yield children"""
for child in self._children:
yield child
yield from self._children

def removeChildren(self):
"""remove children"""
Expand Down
2 changes: 1 addition & 1 deletion capa/ida/plugin/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -628,7 +628,7 @@ def render_capa_doc_feature(
matched_rule_source = ""

# check if match is a matched rule
matched_rule = doc.rules.get(feature.match, None)
matched_rule = doc.rules.get(feature.match)
if matched_rule is not None:
matched_rule_source = matched_rule.source

Expand Down
3 changes: 1 addition & 2 deletions capa/ida/plugin/view.py
Original file line number Diff line number Diff line change
Expand Up @@ -1224,8 +1224,7 @@ def load_function_context_menu_actions(self, data):
yield self.new_action(*action)

# add default actions
for action in self.load_default_context_menu_actions(data):
yield action
yield from self.load_default_context_menu_actions(data)

def load_default_context_menu(self, pos, item, model_index):
"""create default custom context menu
Expand Down
7 changes: 3 additions & 4 deletions capa/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,10 +327,9 @@ def pbar(s, *args, **kwargs):


def has_rule_with_namespace(rules: RuleSet, capabilities: MatchResults, namespace: str) -> bool:
for rule_name in capabilities.keys():
if rules.rules[rule_name].meta.get("namespace", "").startswith(namespace):
return True
return False
return any(
rules.rules[rule_name].meta.get("namespace", "").startswith(namespace) for rule_name in capabilities.keys()
)


def is_internal_rule(rule: Rule) -> bool:
Expand Down
5 changes: 3 additions & 2 deletions capa/render/vverbose.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.

from typing import Dict, Iterable
from typing import Dict, Iterable, Optional

import tabulate

Expand Down Expand Up @@ -129,6 +129,7 @@ def render_feature(ostream, match: rd.Match, feature: frzf.Feature, indent=0):
ostream.write(" " * indent)

key = feature.type
value: Optional[str]
if isinstance(feature, frzf.BasicBlockFeature):
# i don't think it makes sense to have standalone basic block features.
# we don't parse them from rules, only things like: `count(basic block) > 1`
Expand All @@ -140,7 +141,7 @@ def render_feature(ostream, match: rd.Match, feature: frzf.Feature, indent=0):
value = feature.class_
else:
# convert attributes to dictionary using aliased names, if applicable
value = feature.dict(by_alias=True).get(key, None)
value = feature.dict(by_alias=True).get(key)

if value is None:
raise ValueError(f"{key} contains None")
Expand Down
Loading

0 comments on commit 4e02e36

Please sign in to comment.