Skip to content

Commit

Permalink
Merge pull request #864 from ipython/pre-commit-ci-update-config
Browse files Browse the repository at this point in the history
[pre-commit.ci] pre-commit autoupdate
  • Loading branch information
minrk authored Feb 8, 2024
2 parents ae1191b + 95dbcbb commit 04d8a4f
Show file tree
Hide file tree
Showing 68 changed files with 105 additions and 29 deletions.
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ repos:
- id: isort
exclude: setupbase.py
- repo: https://github.com/psf/black
rev: 23.12.1
rev: 24.1.1
hooks:
- id: black
exclude: setupbase.py
Expand All @@ -24,7 +24,7 @@ repos:
hooks:
- id: prettier
- repo: https://github.com/PyCQA/flake8
rev: "6.1.0"
rev: "7.0.0"
hooks:
- id: flake8
- repo: https://github.com/pre-commit/pre-commit-hooks
Expand All @@ -35,7 +35,7 @@ repos:
- id: check-executables-have-shebangs
- id: requirements-txt-fixer
- repo: https://github.com/pre-commit/mirrors-eslint
rev: v9.0.0-alpha.0
rev: v9.0.0-alpha.2
hooks:
- id: eslint
files: \.[jt]sx?$ # *.js, *.jsx, *.ts and *.tsx
Expand Down
32 changes: 20 additions & 12 deletions benchmarks/cluster_start.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,16 @@ def start_cluster(depth, number_of_engines, path='', log_output_to_file=False):
ps = [
Popen(
ipcontroller_cmd.split(),
stdout=open('ipcontroller_output.log', 'a+')
if log_output_to_file
else sys.stdout,
stderr=open('ipcontroller_error_output.log', 'a+')
if log_output_to_file
else sys.stdout,
stdout=(
open('ipcontroller_output.log', 'a+')
if log_output_to_file
else sys.stdout
),
stderr=(
open('ipcontroller_error_output.log', 'a+')
if log_output_to_file
else sys.stdout
),
stdin=sys.stdin,
)
]
Expand All @@ -35,12 +39,16 @@ def start_cluster(depth, number_of_engines, path='', log_output_to_file=False):
ps.append(
Popen(
ipengine_cmd.split(),
stdout=open('ipengine_output.log', 'a+')
if log_output_to_file
else sys.stdout,
stderr=open('ipengine_error_output.log', 'a+')
if log_output_to_file
else sys.stdout,
stdout=(
open('ipengine_output.log', 'a+')
if log_output_to_file
else sys.stdout
),
stderr=(
open('ipengine_error_output.log', 'a+')
if log_output_to_file
else sys.stdout
),
stdin=sys.stdin,
)
)
Expand Down
14 changes: 9 additions & 5 deletions benchmarks/gcloud_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,15 @@ def delete_all_instances():
def gcloud_run(*args, block=True):
cmd = ["gcloud", "compute"] + list(args)
print(f'$ {" ".join(cmd)}')
check_call(
cmd
# stdout=open(get_gcloud_log_file_name(instance_name) + ".log", "a+"),
# stderr=open(f"{get_gcloud_log_file_name(instance_name)}_error.out", "a+"),
) if block else Popen(cmd)
(
check_call(
cmd
# stdout=open(get_gcloud_log_file_name(instance_name) + ".log", "a+"),
# stderr=open(f"{get_gcloud_log_file_name(instance_name)}_error.out", "a+"),
)
if block
else Popen(cmd)
)


def copy_files_to_instance(instance_name, *file_names, directory="~"):
Expand Down
1 change: 1 addition & 0 deletions docs/source/examples/customresults.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
-------
* MinRK
"""

import random

import ipyparallel as ipp
Expand Down
1 change: 1 addition & 0 deletions docs/source/examples/dagdeps.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
-------
* MinRK
"""

from random import randint

import networkx as nx
Expand Down
1 change: 1 addition & 0 deletions docs/source/examples/fetchparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
ipcluster start -n 4
"""

import sys
import time

Expand Down
1 change: 1 addition & 0 deletions docs/source/examples/interengine/bintree.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
Provides parallel [all]reduce functionality
"""

import re
import socket
from functools import reduce
Expand Down
1 change: 1 addition & 0 deletions docs/source/examples/iopubwatcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
-------
* MinRK
"""

import json
import sys

Expand Down
1 change: 1 addition & 0 deletions docs/source/examples/itermapresult.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
-------
* MinRK
"""

import time

import ipyparallel as ipp
Expand Down
1 change: 1 addition & 0 deletions docs/source/examples/nwmerge.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Example showing how to merge multiple remote data streams.
"""

# Slightly modified version of:
# https://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/511509
import heapq
Expand Down
1 change: 1 addition & 0 deletions docs/source/examples/phistogram.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Parallel histogram function"""

from ipyparallel import Reference


Expand Down
1 change: 1 addition & 0 deletions docs/source/examples/pi/parallelpi.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
and the files used will be downloaded if they are not in the working directory
of the IPython engines.
"""

from timeit import default_timer as clock

from matplotlib import pyplot as plt
Expand Down
1 change: 1 addition & 0 deletions docs/source/examples/pi/pidigits.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
If the digits of pi are truly random, these frequencies
should be equal.
"""

import os
from urllib.request import urlretrieve

Expand Down
1 change: 1 addition & 0 deletions hatch_build.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Custom build script for hatch backend"""

import glob
import os
import subprocess
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""The IPython ZMQ-based parallel computing interface."""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
# export return_when constants
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/_async.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Async utilities"""

import asyncio
import concurrent.futures
import inspect
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/apps/baseapp.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
The Base Application class for ipyparallel apps
"""

import logging
import os
import re
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/apps/launcher.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Deprecated import for ipyparallel.cluster.launcher"""

import warnings

from ipyparallel.cluster.launcher import * # noqa
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/apps/logwatcher.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
A logger object that consolidates messages incoming from ipcluster processes.
"""

import logging

import zmq
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/client/_joblib.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""joblib parallel backend for IPython Parallel"""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from joblib.parallel import AutoBatchingMixin, ParallelBackendBase
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/client/asyncresult.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""AsyncResult objects for the client"""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import concurrent.futures
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/client/client.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""A semi-synchronous Client for IPython parallel"""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import asyncio
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/client/futures.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Future-related utils"""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import sys
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/client/magics.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
{CONFIG_DOC}
"""

import time
from contextlib import contextmanager

Expand Down
1 change: 1 addition & 0 deletions ipyparallel/client/map.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Scattering consists of partitioning a sequence and sending the various
pieces to individual nodes in a cluster.
"""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import sys
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/client/remotefunction.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Remote Functions and decorators for Views."""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import sys
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/client/view.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Views of remote engines."""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import builtins
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/cluster/_winhpcjob.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Job and task components for writing .xml files that the Windows HPC Server
2008 can use to start jobs.
"""

import os
import re
import uuid
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/cluster/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
starts/stops/polls controllers, engines, etc.
"""

import asyncio
import atexit
import glob
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/cluster/launcher.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Facilities for launching IPython Parallel processes asynchronously."""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import asyncio
Expand Down
12 changes: 6 additions & 6 deletions ipyparallel/controller/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -923,9 +923,9 @@ def init_hub(self):
# save to new json config files
base = {
'key': self.session.key.decode('ascii'),
'curve_serverkey': self.curve_publickey.decode("ascii")
if self.enable_curve
else None,
'curve_serverkey': (
self.curve_publickey.decode("ascii") if self.enable_curve else None
),
'location': self.location,
'pack': self.session.packer,
'unpack': self.session.unpacker,
Expand Down Expand Up @@ -982,9 +982,9 @@ def get_python_scheduler_args(
'mon_addr': monitor_url,
'not_addr': disambiguate_url(self.client_url('notification')),
'reg_addr': disambiguate_url(self.client_url('registration')),
'identity': identity
if identity is not None
else bytes(scheduler_name, 'utf8'),
'identity': (
identity if identity is not None else bytes(scheduler_name, 'utf8')
),
'logname': logname,
'loglevel': self.log_level,
'log_url': self.log_url,
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/controller/dependency.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Dependency utilities
"""

from types import ModuleType

from ipyparallel.client.asyncresult import AsyncResult
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/controller/dictdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
$lt,$gt,$lte,$gte,$ne,$in,$nin,$all,$mod,$exists
"""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import copy
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/controller/hub.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
This is the master object that handles connections from engines and clients,
and monitors traffic through the various queues.
"""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import inspect
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/controller/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
nor does it check msg_id DAG dependencies. For those, a slightly slower
Python Scheduler exists.
"""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import logging
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/controller/sqlitedb.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""A TaskRecord backend using sqlite3"""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import json
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/engine/datapub.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Publishing native (typically pickled) objects."""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from ipykernel.jsonutil import json_clean
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/engine/kernel.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""IPython kernel for parallel computing"""

import asyncio
import inspect
import sys
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/engine/nanny.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
only engine *sets*
- Cluster API can efficiently signal all engines via mpiexec
"""

import asyncio
import logging
import os
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/error.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
.. inheritance-diagram:: ipyparallel.error
:parts: 3
"""

import builtins
import sys
import traceback
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/joblib.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
.. versionadded:: 5.1
"""

# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from joblib.parallel import register_parallel_backend
Expand Down
1 change: 1 addition & 0 deletions ipyparallel/nbextension/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Jupyter server extension(s)"""

import warnings


Expand Down
1 change: 1 addition & 0 deletions ipyparallel/nbextension/base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Place to put the base Handler"""

import warnings
from functools import lru_cache

Expand Down
Loading

0 comments on commit 04d8a4f

Please sign in to comment.