Skip to content

Commit

Permalink
chore(python): format and lint with ruff
Browse files Browse the repository at this point in the history
  • Loading branch information
jbms committed Oct 5, 2023
1 parent 28a66ca commit 6848afb
Show file tree
Hide file tree
Showing 117 changed files with 5,116 additions and 3,705 deletions.
2 changes: 1 addition & 1 deletion .editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ end_of_line = lf
charset = utf-8
trim_leading_whitespace = true
insert_final_newline = true
max_line_length = 100

[*.py]
indent_style = space
indent_size = 4
max_line_length = 88
1 change: 0 additions & 1 deletion .github/workflows/build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@ jobs:
strategy:
matrix:
python-version:
- '3.8'
- '3.9'
- '3.10'
- '3.11'
Expand Down
23 changes: 9 additions & 14 deletions cors_webserver.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,38 +23,33 @@
can connect to the web server.
"""

from __future__ import print_function, absolute_import

import argparse
import os
import sys

try:
# Python3 and Python2 with future package.
from http.server import SimpleHTTPRequestHandler, HTTPServer
except ImportError:
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
from http.server import HTTPServer, SimpleHTTPRequestHandler


class RequestHandler(SimpleHTTPRequestHandler):
def end_headers(self):
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header("Access-Control-Allow-Origin", "*")
SimpleHTTPRequestHandler.end_headers(self)


class Server(HTTPServer):
protocol_version = 'HTTP/1.1'
protocol_version = "HTTP/1.1"

def __init__(self, server_address):
HTTPServer.__init__(self, server_address, RequestHandler)


if __name__ == '__main__':
if __name__ == "__main__":
ap = argparse.ArgumentParser()
ap.add_argument('-p', '--port', type=int, default=9000, help='TCP port to listen on')
ap.add_argument('-a', '--bind', default='127.0.0.1', help='Bind address')
ap.add_argument('-d', '--directory', default='.', help='Directory to serve')
ap.add_argument(
"-p", "--port", type=int, default=9000, help="TCP port to listen on"
)
ap.add_argument("-a", "--bind", default="127.0.0.1", help="Bind address")
ap.add_argument("-d", "--directory", default=".", help="Directory to serve")

args = ap.parse_args()
os.chdir(args.directory)
Expand Down
21 changes: 21 additions & 0 deletions noxfile.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import nox

nox.options.reuse_existing_virtualenvs = True


@nox.session
def lint(session):
session.install("-r", "python/requirements-lint.txt")
session.run("ruff", "check", ".")


@nox.session
def format(session):
session.install("-r", "python/requirements-lint.txt")
session.run("ruff", "format", ".")


@nox.session
def mypy(session):
session.install("-r", "python/requirements-mypy.txt")
session.run("mypy", ".")
50 changes: 49 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,50 @@
[build-system]
requires = ["setuptools>=64", "wheel", "setuptools_scm", "oldest-supported-numpy"]
requires = [
"setuptools>=64",
"wheel",
"setuptools_scm",
"oldest-supported-numpy",
]

[tool.ruff]
target-version = "py39"
select = [
"E", # pycodestyle
"W", # pycodestyle
"F", # pyflakes
"I", # isort
"UP", # pyupgrade
"NPY", # numpy
]
ignore = [
"E501", # Line length regulated by ruff format
]

[tool.mypy]
show_error_codes = true
pretty = true
exclude = [
"noxfile\\.py",
"^src/",
"/guide_video_recorder/",
"^docs/",
]

[[tool.mypy.overrides]]
module = [
"apitools",
"apitools.*",
"numcodecs",
"google",
"google.*",
"zarr",
"zarrita",
"tensorstore",
"dask",
"dask.*",
"scipy",
"scipy.*",
"cloudvolume",
"trio",
]
ignore_missing_imports = true
2 changes: 1 addition & 1 deletion python/build_tools/cibuildwheel.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/bin/bash

export CIBW_ARCHS_MACOS="x86_64 arm64"
export CIBW_SKIP="cp27-* cp36-* pp* *_i686 *-win32"
export CIBW_SKIP="cp27-* cp36-* cp37-* cp38-* pp* *_i686 *-win32"
export CIBW_TEST_EXTRAS="test"
export CIBW_TEST_COMMAND="python -m pytest {project}/python/tests -vv -s --skip-browser-tests"
export CIBW_MANYLINUX_X86_64_IMAGE=manylinux2014
Expand Down
42 changes: 29 additions & 13 deletions python/copy_openmesh_deps.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,27 +7,43 @@
import sys

script_dir = os.path.dirname(sys.argv[0])
src_dir = os.path.join(script_dir, 'ext/src')
dest_openmesh_dir = os.path.join(script_dir, 'ext/third_party/openmesh/OpenMesh')
src_dir = os.path.join(script_dir, "ext/src")
dest_openmesh_dir = os.path.join(script_dir, "ext/third_party/openmesh/OpenMesh")

ap = argparse.ArgumentParser()
ap.add_argument('openmesh_directory', help='Path to OpenMesh root directory')
ap.add_argument("openmesh_directory", help="Path to OpenMesh root directory")
args = ap.parse_args()

openmesh_dir = os.path.abspath(args.openmesh_directory)

deps = subprocess.check_output(
['gcc', '-pthread', '-I', os.path.join(openmesh_dir, 'src'), '-c', 'openmesh_dependencies.cc',
'on_demand_object_mesh_generator.cc', '-MM', '-MF', '/dev/stdout',
'-fopenmp', '-std=c++11'], cwd=src_dir).split()
deps = [x[len(openmesh_dir)+1:] for x in deps if x.startswith(openmesh_dir + '/')] + ['LICENSE', 'VERSION']
[
"gcc",
"-pthread",
"-I",
os.path.join(openmesh_dir, "src"),
"-c",
"openmesh_dependencies.cc",
"on_demand_object_mesh_generator.cc",
"-MM",
"-MF",
"/dev/stdout",
"-fopenmp",
"-std=c++11",
],
text=True,
cwd=src_dir,
).split()
deps = [
x[len(openmesh_dir) + 1 :] for x in deps if x.startswith(openmesh_dir + "/")
] + ["LICENSE", "VERSION"]

if os.path.exists(dest_openmesh_dir):
shutil.rmtree(dest_openmesh_dir)
shutil.rmtree(dest_openmesh_dir)

for dep in deps:
dest_path = os.path.join(dest_openmesh_dir, dep)
dest_dir = os.path.dirname(dest_path)
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
shutil.copyfile(os.path.join(openmesh_dir, dep), dest_path)
dest_path = os.path.join(dest_openmesh_dir, dep)
dest_dir = os.path.dirname(dest_path)
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
shutil.copyfile(os.path.join(openmesh_dir, dep), dest_path)
121 changes: 74 additions & 47 deletions python/examples/agglomeration_split_tool_csv_to_sqlite.py
Original file line number Diff line number Diff line change
@@ -1,56 +1,83 @@
import pandas
import argparse
import sqlite3

import neuroglancer.equivalence_map
import argparse
import numpy as np
import pandas


def load_edges2(path, include_agglo_id=False):
edges = []
dtype = {'segment_a': np.uint64, 'segment_b': np.uint64, 'score': np.float64, 'x': np.int64, 'y': np.int64, 'z': np.int64}
if include_agglo_id:
dtype['agglo_id'] = np.uint64
df = pandas.read_csv(path, sep=',', dtype=dtype)
return df
dtype = {
"segment_a": np.uint64,
"segment_b": np.uint64,
"score": np.float64,
"x": np.int64,
"y": np.int64,
"z": np.int64,
}
if include_agglo_id:
dtype["agglo_id"] = np.uint64
df = pandas.read_csv(path, sep=",", dtype=dtype)
return df


def write_db(edges_csv_path, output_path, include_agglo_id=False):
print("Loading edges")
edges = load_edges2(edges_csv_path, include_agglo_id=include_agglo_id)

all_eqs = neuroglancer.equivalence_map.EquivalenceMap()
print("Creating equivalence map for agglomeration")
for a, b in edges[["segment_a", "segment_b"]].values:
all_eqs.union(a, b)

conn = sqlite3.connect(output_path)
c = conn.cursor()

c.execute("CREATE TABLE supervoxels (supervoxel_id INTEGER, agglo_id INTEGER)")
c.execute(
"CREATE INDEX supervoxels_by_supervoxel_id_index ON supervoxels (supervoxel_id)"
)
c.execute("CREATE INDEX supervoxels_by_agglo_id_index ON supervoxels (agglo_id)")
c.execute(
"CREATE TABLE edges (agglo_id INTEGER, segment_a INTEGER, segment_b INTEGER, score REAL, x INTEGER, y INTEGER, z INTEGER)"
)
c.execute("CREATE INDEX edges_by_agglo_id_index ON edges (agglo_id)")

print("Writing supervoxels table")
c.executemany(
"INSERT INTO supervoxels VALUES (?,?)",
((int(x), int(all_eqs[x])) for x in all_eqs),
)

print("Writing edges table")
c.executemany(
"INSERT INTO edges VALUES (?, ?, ?, ?, ?, ?, ?)",
(
(
int(all_eqs[segment_a]),
int(segment_a),
int(segment_b),
float(score),
int(x),
int(y),
int(z),
)
for (segment_a, segment_b), score, (x, y, z) in zip(
edges[["segment_a", "segment_b"]].values,
edges["score"].values,
edges[["x", "y", "z"]].values,
)
),
)
print("Committing")
conn.commit()
conn.close()


if __name__ == "__main__":
ap = argparse.ArgumentParser()
ap.add_argument("csv", help="Path to CSV file specifying edges.")
ap.add_argument("db", help="Output path to sqlite3 db.")
args = ap.parse_args()

print('Loading edges')
edges = load_edges2(edges_csv_path, include_agglo_id=include_agglo_id)

all_eqs = neuroglancer.equivalence_map.EquivalenceMap()
print('Creating equivalence map for agglomeration')
for a, b in edges[['segment_a', 'segment_b']].values:
all_eqs.union(a, b)

conn = sqlite3.connect(output_path)
c = conn.cursor()

c.execute('CREATE TABLE supervoxels (supervoxel_id INTEGER, agglo_id INTEGER)')
c.execute('CREATE INDEX supervoxels_by_supervoxel_id_index ON supervoxels (supervoxel_id)')
c.execute('CREATE INDEX supervoxels_by_agglo_id_index ON supervoxels (agglo_id)')
c.execute('CREATE TABLE edges (agglo_id INTEGER, segment_a INTEGER, segment_b INTEGER, score REAL, x INTEGER, y INTEGER, z INTEGER)')
c.execute('CREATE INDEX edges_by_agglo_id_index ON edges (agglo_id)')

print('Writing supervoxels table')
c.executemany('INSERT INTO supervoxels VALUES (?,?)',
((int(x), int(all_eqs[x])) for x in all_eqs.keys()))

print('Writing edges table')
c.executemany(
'INSERT INTO edges VALUES (?, ?, ?, ?, ?, ?, ?)',
((int(all_eqs[segment_a]), int(segment_a), int(segment_b), float(score), int(x), int(y), int(z))
for (segment_a, segment_b), score,
(x, y, z) in zip(edges[['segment_a', 'segment_b']].values, edges['score']
.values, edges[['x', 'y', 'z']].values)))
print('Committing')
conn.commit()
conn.close()

if __name__ == '__main__':
ap = argparse.ArgumentParser()
ap.add_argument('csv', help='Path to CSV file specifying edges.')
ap.add_argument('db', help='Output path to sqlite3 db.')
args = ap.parse_args()

write_db(args.csv, args.db)
write_db(args.csv, args.db)
Loading

0 comments on commit 6848afb

Please sign in to comment.