Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Updated pylintrc to version 3.2 #4909

Merged
merged 1 commit into from
Oct 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 18 additions & 6 deletions .pylintrc
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Pylint 3.0.x configuration file
# Pylint 3.2.x configuration file
#
# This file is generated by l2tdevtools update-dependencies.py, any dependency
# related changes should be made in dependencies.ini.
Expand Down Expand Up @@ -29,6 +29,7 @@ clear-cache-post-run=no
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
# extension-pkg-allow-list=
extension-pkg-allow-list=pybde,pycaes,pycreg,pyesedb,pyevt,pyevtx,pyewf,pyfcrypto,pyfsapfs,pyfsext,pyfsfat,pyfshfs,pyfsntfs,pyfsxfs,pyfvde,pyfwnt,pyfwsi,pylnk,pyluksde,pymodi,pymsiecf,pyolecf,pyphdi,pyqcow,pyregf,pyscca,pysigscan,pysmdev,pysmraw,pytsk3,pyvhdi,pyvmdk,pyvsapm,pyvsgpt,pyvshadow,pyvslvm,xattr,yara,zstd

# A comma-separated list of package or module names from where C extensions may
Expand Down Expand Up @@ -63,10 +64,11 @@ ignore-paths=
# Emacs file locks
ignore-patterns=^\.#

# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
# List of module names for which member attributes should not be checked and
# will not be imported (useful for modules/projects where namespaces are
# manipulated during runtime and thus existing member attributes cannot be
# deduced by static analysis). It supports qualified module names, as well as
# Unix pattern matching.
ignored-modules=

# Python code to execute, usually for sys.path manipulation such as
Expand All @@ -85,11 +87,16 @@ limit-inference-results=100

# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
# load-plugins=
load-plugins=pylint.extensions.docparams

# Pickle collected data for later comparisons.
persistent=yes

# Resolve imports to .pyi stubs if available. May reduce no-member messages and
# increase not-an-iterable messages.
prefer-stubs=no

# Minimum Python version to use for version dependent checks. Will default to
# the version used to run pylint.
py-version=3.12
Expand Down Expand Up @@ -440,7 +447,6 @@ confidence=HIGH,
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".

disable=assignment-from-none,
bad-inline-option,
consider-using-f-string,
Expand Down Expand Up @@ -478,6 +484,7 @@ disable=assignment-from-none,
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
# enable=
enable=c-extension-no-member


Expand Down Expand Up @@ -510,6 +517,11 @@ max-nested-blocks=5
# printed.
never-returning-functions=sys.exit,argparse.parse_error

# Let 'consider-using-join' be raised when the separator to join on would be
# non-empty (resulting in expected fixes of the type: ``"- " + " -
# ".join(items)``)
suggest-join-with-non-empty-separator=yes


[REPORTS]

Expand Down
2 changes: 2 additions & 0 deletions plaso/analysis/hash_tagging.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,8 @@ def _MakeRequestAndDecodeJSON(self, url, method, **kwargs):
if method_upper not in ('GET', 'POST'):
raise ValueError('Method {0:s} is not supported')

response = None

try:
if method_upper == 'GET':
response = requests.get(url, timeout=self._REQUEST_TIMEOUT, **kwargs)
Expand Down
3 changes: 3 additions & 0 deletions plaso/cli/image_export_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,6 +218,9 @@ def _ExtractDataStream(
if not destination_path.endswith(os.path.sep):
destination_path = destination_path + os.path.sep

# TODO: refactor
path = None

target_path = os.path.join(target_directory, target_filename)
if target_path.startswith(destination_path):
path = target_path[len(destination_path):]
Expand Down
3 changes: 1 addition & 2 deletions plaso/cli/tool_options.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,7 @@ def ListAnalysisPlugins(self):

column_width = 10
for name, _, _ in analysis_plugin_info:
if len(name) > column_width:
column_width = len(name)
column_width = max(column_width, len(name))

table_view = views.ViewsFactory.GetTableView(
self._views_format_type, column_names=['Name', 'Description'],
Expand Down
3 changes: 1 addition & 2 deletions plaso/cli/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,8 +334,7 @@ def ListTimeZones(self):
"""Lists the time zones."""
max_length = 0
for time_zone_name in pytz.all_timezones:
if len(time_zone_name) > max_length:
max_length = len(time_zone_name)
max_length = max(max_length, len(time_zone_name))

utc_date_time = datetime.datetime.utcnow()

Expand Down
4 changes: 1 addition & 3 deletions plaso/cli/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,9 +164,7 @@ def AddRow(self, values):
"""
super(CLITableView, self).AddRow(values)

value_length = len(values[0])
if value_length > self._column_width:
self._column_width = value_length
self._column_width = max(self._column_width, len(values[0]))

def Write(self, output_writer):
"""Writes the table to the output writer.
Expand Down
20 changes: 7 additions & 13 deletions plaso/engine/extractors.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,14 +391,12 @@ def _ExtractPathSpecsFromDirectory(self, file_entry, depth=0):
if sub_file_entry.IsDirectory():
sub_directories.append(sub_file_entry)

for path_spec in self._ExtractPathSpecsFromFile(sub_file_entry):
yield path_spec
yield from self._ExtractPathSpecsFromFile(sub_file_entry)

for sub_file_entry in sub_directories:
try:
for path_spec in self._ExtractPathSpecsFromDirectory(
sub_file_entry, depth=depth + 1):
yield path_spec
yield from self._ExtractPathSpecsFromDirectory(
sub_file_entry, depth=depth + 1)

except (
IOError, dfvfs_errors.AccessError, dfvfs_errors.BackEndError,
Expand Down Expand Up @@ -463,15 +461,12 @@ def _ExtractPathSpecsFromFileSystem(
if find_specs:
searcher = file_system_searcher.FileSystemSearcher(
file_system, path_spec)
for extracted_path_spec in searcher.Find(find_specs=find_specs):
yield extracted_path_spec
yield from searcher.Find(find_specs=find_specs)

elif recurse_file_system:
file_entry = file_system.GetFileEntryByPathSpec(path_spec)
if file_entry:
for extracted_path_spec in self._ExtractPathSpecsFromDirectory(
file_entry):
yield extracted_path_spec
yield from self._ExtractPathSpecsFromDirectory(file_entry)

else:
yield path_spec
Expand Down Expand Up @@ -535,8 +530,7 @@ def ExtractPathSpecs(
yield path_spec

else:
for extracted_path_spec in self._ExtractPathSpecsFromFileSystem(
yield from self._ExtractPathSpecsFromFileSystem(
path_spec, find_specs=find_specs,
recurse_file_system=recurse_file_system,
resolver_context=resolver_context):
yield extracted_path_spec
resolver_context=resolver_context)
1 change: 1 addition & 0 deletions plaso/engine/processing_status.py
Original file line number Diff line number Diff line change
Expand Up @@ -340,6 +340,7 @@ def _UpdateProcessStatus(
process_status.pid = pid
process_status.status = status

# pylint: disable=consider-using-min-builtin
if used_memory > 0:
process_status.used_memory = used_memory

Expand Down
3 changes: 1 addition & 2 deletions plaso/engine/yaml_timeliner_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,5 +117,4 @@ def ReadFromFile(self, path):
TimelinerDefinition: a timeliner definition.
"""
with open(path, 'r', encoding='utf-8') as file_object:
for yaml_definition in self._ReadFromFileObject(file_object):
yield yaml_definition
yield from self._ReadFromFileObject(file_object)
5 changes: 3 additions & 2 deletions plaso/formatters/yaml_formatters_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,6 +232,8 @@ def _ReadFormatterDefinition(self, formatter_definition_values):
'Invalid event formatter definition: {0:s} missing source.'.format(
data_type))

formatter = None

if formatter_type == 'basic':
formatter = interface.BasicEventFormatter(
data_type=data_type, format_string=message,
Expand Down Expand Up @@ -286,5 +288,4 @@ def ReadFromFile(self, path):
EventFormatter: an event formatter.
"""
with open(path, 'r', encoding='utf-8') as file_object:
for yaml_definition in self._ReadFromFileObject(file_object):
yield yaml_definition
yield from self._ReadFromFileObject(file_object)
3 changes: 1 addition & 2 deletions plaso/lib/bufferlib.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,7 @@ def Clear(self):

def Flush(self):
"""Returns a generator for all items and clear the buffer."""
for item in self:
yield item
yield from self
self.Clear()

def GetCurrent(self):
Expand Down
4 changes: 1 addition & 3 deletions plaso/multi_process/merge_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,7 @@ def _GetAttributeContainers(self, task_storage_reader):
AttributeContainer: attribute container.
"""
for container_type in self._CONTAINER_TYPES:
for container in task_storage_reader.GetAttributeContainers(
container_type):
yield container
yield from task_storage_reader.GetAttributeContainers(container_type)

self.fully_merged = True

Expand Down
4 changes: 4 additions & 0 deletions plaso/parsers/bsm.py
Original file line number Diff line number Diff line change
Expand Up @@ -438,10 +438,14 @@ def _FormatInAddrExToken(self, token_data):
dict[str, str]: token values.
"""
protocol = self._NETWORK_PROTOCOLS.get(token_data.net_type, 'UNKNOWN')

if token_data.net_type == 4:
ip_address = self._FormatPackedIPv6Address(token_data.ip_address[:4])
elif token_data.net_type == 16:
ip_address = self._FormatPackedIPv6Address(token_data.ip_address)
else:
ip_address = None

return {
'protocols': protocol,
'net_type': token_data.net_type,
Expand Down
16 changes: 16 additions & 0 deletions plaso/parsers/cookie_plugins/ganalytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,11 @@ def _ParseCookieData(

number_of_sessions = self._ParseIntegerValue(fields[5])

else:
domain_hash = None
number_of_sessions = None
visitor_identifier = None

event_data = GoogleAnalyticsUtmaEventData()
event_data.cookie_name = self.COOKIE_NAME
event_data.domain_hash = domain_hash
Expand Down Expand Up @@ -245,6 +250,11 @@ def _ParseCookieData(
else:
date_time = self._ParsePosixTime(fields[3])

else:
date_time = None
domain_hash = None
number_of_pages_viewed = None

event_data = GoogleAnalyticsUtmbEventData()
event_data.cookie_name = self.COOKIE_NAME
event_data.domain_hash = domain_hash
Expand Down Expand Up @@ -366,6 +376,12 @@ def _ParseCookieData(
key, _, value = variable.partition('=')
extra_attributes[key] = urlparse.unquote(value)

else:
date_time = None
domain_hash = None
number_of_sessions = None
number_of_sources = None

event_data = GoogleAnalyticsUtmzEventData()
event_data.cookie_name = self.COOKIE_NAME
event_data.domain_hash = domain_hash
Expand Down
2 changes: 2 additions & 0 deletions plaso/parsers/esedb_plugins/srum.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,8 @@ def _ConvertValueBinaryDataToFloatingPointValue(self, value):
floating_point_map = self._GetDataTypeMap('float32le')
elif value_length == 8:
floating_point_map = self._GetDataTypeMap('float64le')
else:
floating_point_map = None

try:
return self._ReadStructureFromByteStream(value, 0, floating_point_map)
Expand Down
4 changes: 4 additions & 0 deletions plaso/parsers/java_idx.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,8 @@ def ParseFileObject(self, parser_mediator, file_object):
section1_map = self._GetDataTypeMap('java_idx_603_section1')
elif file_header.format_version == 605:
section1_map = self._GetDataTypeMap('java_idx_605_section1')
else:
section1_map = None

try:
section1, data_size = self._ReadStructureFromFileObject(
Expand All @@ -116,6 +118,8 @@ def ParseFileObject(self, parser_mediator, file_object):
elif file_header.format_version in (603, 604, 605):
file_offset = 128
section2_map = self._GetDataTypeMap('java_idx_603_section2')
else:
section2_map = None

try:
section2, data_size = self._ReadStructureFromFileObject(
Expand Down
12 changes: 5 additions & 7 deletions plaso/parsers/plist_plugins/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -803,9 +803,8 @@ def _RecurseKey(self, plist_item, depth=15, key_path=''):

elif isinstance(plist_item, (list, tuple)):
for sub_plist_item in plist_item:
for subkey_values in self._RecurseKey(
sub_plist_item, depth=depth - 1, key_path=key_path):
yield subkey_values
yield from self._RecurseKey(
sub_plist_item, depth=depth - 1, key_path=key_path)

elif hasattr(plist_item, 'items'):
for subkey_name, value in plist_item.items():
Expand All @@ -818,10 +817,9 @@ def _RecurseKey(self, plist_item, depth=15, key_path=''):

for sub_plist_item in value:
if isinstance(sub_plist_item, dict):
subkey_path = '{0:s}/{1:s}'.format(key_path, subkey_name)
for subkey_values in self._RecurseKey(
sub_plist_item, depth=depth - 1, key_path=subkey_path):
yield subkey_values
subkey_path = '/'.join([key_path, subkey_name])
yield from self._RecurseKey(
sub_plist_item, depth=depth - 1, key_path=subkey_path)

# pylint: disable=arguments-differ
@abc.abstractmethod
Expand Down
12 changes: 7 additions & 5 deletions plaso/parsers/spotlight_storedb.py
Original file line number Diff line number Diff line change
Expand Up @@ -1097,6 +1097,9 @@ def _ReadMetadataAttributePageValues(
data_type_map = self._GetDataTypeMap(
'spotlight_store_db_property_value21')

else:
data_type_map = None

page_data_offset = 12
page_data_size = page_header.used_page_size - 20
page_value_index = 0
Expand Down Expand Up @@ -1242,12 +1245,11 @@ def _ReadMetadataAttributeStreamsMap(
stream_values = self._ReadStreamsMap(parent_file_entry, streams_map_number)

if streams_map_number == 1:
data_type_map = self._GetDataTypeMap(
'spotlight_metadata_attribute_type')

data_type_map = self._GetDataTypeMap('spotlight_metadata_attribute_type')
elif streams_map_number == 2:
data_type_map = self._GetDataTypeMap(
'spotlight_metadata_attribute_value')
data_type_map = self._GetDataTypeMap('spotlight_metadata_attribute_value')
else:
data_type_map = None

for index, stream_value in enumerate(stream_values):
if index == 0:
Expand Down
5 changes: 2 additions & 3 deletions plaso/parsers/text_plugins/syslog.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,15 +264,14 @@ def _ParseSshdMessageBody(self, body):
key = keys[0]
structure = structure[0]

if key not in ('failed_connection', 'login', 'opened_connection'):
return None

if key == 'failed_connection':
event_data = SyslogSSHFailedConnectionEventData()
elif key == 'login':
event_data = SyslogSSHLoginEventData()
elif key == 'opened_connection':
event_data = SyslogSSHOpenedConnectionEventData()
else:
return None

event_data.authentication_method = structure.get(
'authentication_method', None)
Expand Down
4 changes: 2 additions & 2 deletions plaso/parsers/windefender_history.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,8 +121,8 @@ def _ReadThreatTrackingData(self, threat_tracking_data, file_offset):
else:
header = self._ReadThreatTrackingHeader(threat_tracking_data)

values_data_offset = header.header_size + 4
values_data_end_offset = header.total_data_size
values_data_offset = header.header_size + 4
values_data_end_offset = header.total_data_size

while values_data_offset < values_data_end_offset:
threat_value, data_size = self._ReadThreatTrackingValue(
Expand Down
Loading