diff --git a/Inst_eddies/Analysis/eddy_dump.py b/Inst_eddies/Analysis/eddy_dump.py
index 238184824f9f76dee5e2acfc3e2803e83961d1cb..bca355615243bc675ee8045bd861ce8ce2a9c55e 100755
--- a/Inst_eddies/Analysis/eddy_dump.py
+++ b/Inst_eddies/Analysis/eddy_dump.py
@@ -16,21 +16,21 @@ import shapefile
 import util_eddies
 
 parser = argparse.ArgumentParser()
-parser.add_argument("directory", metavar = "SHPC-directory")
-parser.add_argument("orientation", choices = ["Anticyclones", "Cyclones"])
+parser.add_argument("directory", metavar="SHPC-directory")
+parser.add_argument("orientation", choices=["Anticyclones", "Cyclones"])
 group = parser.add_mutually_exclusive_group()
-group.add_argument("-i", "--ishape", type = int)
-group.add_argument("-n", "--node", type = int)
+group.add_argument("-i", "--ishape", type=int)
+group.add_argument("-n", "--node", type=int)
 args = parser.parse_args()
-SHPC = util_eddies.SHPC_class(args.directory, def_orient = args.orientation)
+SHPC = util_eddies.SHPC_class(args.directory, def_orient=args.orientation)
 
 if args.ishape:
     ishape = args.ishape
 else:
     if args.node:
-        with open("e_overestim.txt") as f: e_overestim = int(f.read())
-        date, eddy_index = util_eddies.node_to_date_eddy(args.node,
-                                                          e_overestim)
+        with open("e_overestim.txt") as f:
+            e_overestim = int(f.read())
+        date, eddy_index = util_eddies.node_to_date_eddy(args.node, e_overestim)
     else:
         reply = input("date, eddy_index = ? ").split(",")
         date = int(reply[0])
@@ -45,8 +45,9 @@ for layer in ["extremum", "outermost_contour", "max_speed_contour", "centroid"]:
     print()
 
     try:
-        shapeRec = SHPC.get_reader(0, args.orientation, layer)\
-                       .shapeRecord(ishape)
+        shapeRec = SHPC.get_reader(0, args.orientation, layer).shapeRecord(
+            ishape
+        )
     except AttributeError:
         pass
     else:
diff --git a/Overlap/report_graph.py b/Overlap/report_graph.py
index 49d1925c0505e0efef649c45eeb66b730fd2f8ad..a5ddb666c77b6e43703a8365e5ad177150fc6974 100755
--- a/Overlap/report_graph.py
+++ b/Overlap/report_graph.py
@@ -8,13 +8,14 @@ import os
 import json
 import util_eddies
 
-def read_eddy_graph(edgelist, shpc_dir = None, orientation = "Cyclones"):
+
+def read_eddy_graph(edgelist, shpc_dir=None, orientation="Cyclones"):
     if os.access(edgelist, os.R_OK):
-        G = nx.read_edgelist(edgelist, create_using = nx.DiGraph,
-                             nodetype = int)
+        G = nx.read_edgelist(edgelist, create_using=nx.DiGraph, nodetype=int)
         dir_edgelist = path.dirname(edgelist)
         e_overestim_file = path.join(dir_edgelist, "e_overestim.txt")
-        with open(e_overestim_file) as f: G.graph["e_overestim"] = int(f.read())
+        with open(e_overestim_file) as f:
+            G.graph["e_overestim"] = int(f.read())
 
         if shpc_dir is not None:
             # Read and set attributes of eddies:
@@ -26,28 +27,39 @@ def read_eddy_graph(edgelist, shpc_dir = None, orientation = "Cyclones"):
 
     return G
 
+
 def set_attribute(G, SHPC, orientation):
     reader = SHPC.get_reader(0, orientation, "extremum")
 
     if G.number_of_nodes() <= SHPC.get_n_shapes(0, orientation) / 100:
         for n in G:
-            date_index, eddy_index = util_eddies.node_to_date_eddy(n,
-                                                       G.graph["e_overestim"])
+            date_index, eddy_index = util_eddies.node_to_date_eddy(
+                n, G.graph["e_overestim"]
+            )
             ishape = SHPC.comp_ishape(date_index, eddy_index, 0, orientation)
             shape_rec = reader.shapeRecord(ishape)
-            G.add_node(n, coordinates = tuple(shape_rec.shape.points[0]),
-                       speed = shape_rec.record.speed,
-                       ssh = shape_rec.record.ssh)
+            G.add_node(
+                n,
+                coordinates=tuple(shape_rec.shape.points[0]),
+                speed=shape_rec.record.speed,
+                ssh=shape_rec.record.ssh,
+            )
     else:
         for shape_rec in reader:
-            n = util_eddies.date_eddy_to_node(shape_rec.record.date,
-                                  shape_rec.record.eddy_index,
-                                  G.graph["e_overestim"])
+            n = util_eddies.date_eddy_to_node(
+                shape_rec.record.date,
+                shape_rec.record.eddy_index,
+                G.graph["e_overestim"],
+            )
 
             if n in G:
-                G.add_node(n, coordinates = tuple(shape_rec.shape.points[0]),
-                           speed = shape_rec.record.speed,
-                           ssh = shape_rec.record.ssh)
+                G.add_node(
+                    n,
+                    coordinates=tuple(shape_rec.shape.points[0]),
+                    speed=shape_rec.record.speed,
+                    ssh=shape_rec.record.ssh,
+                )
+
 
 def partition_graph_date(A, e_overestim):
     """Add subgraphs in pygraphviz graph, one subgraph for each date. We
@@ -59,7 +71,9 @@ def partition_graph_date(A, e_overestim):
     my_subgraphs = {}
 
     for n in A.iternodes():
-        date_index = util_eddies.node_to_date_eddy(int(n), e_overestim, only_date = True)
+        date_index = util_eddies.node_to_date_eddy(
+            int(n), e_overestim, only_date=True
+        )
 
         if date_index in my_subgraphs:
             my_subgraphs[date_index].append(n)
@@ -67,7 +81,8 @@ def partition_graph_date(A, e_overestim):
             my_subgraphs[date_index] = [n]
 
     for date, s in my_subgraphs.items():
-        A.add_subgraph(s, rank = "same")
+        A.add_subgraph(s, rank="same")
+
 
 def add_component(G: nx.DiGraph, source: int):
     """Add the weakly-connected component containing node source.
@@ -89,7 +104,9 @@ def add_component(G: nx.DiGraph, source: int):
         else:
             G.graph["component_list"] = [component]
 
-        for n in component: G.nodes[n]["component"] = component
+        for n in component:
+            G.nodes[n]["component"] = component
+
 
 if __name__ == "__main__":
     import argparse
@@ -97,13 +114,18 @@ if __name__ == "__main__":
     # Parse arguments:
     parser = argparse.ArgumentParser()
     parser.add_argument("edgelist_file")
-    parser.add_argument("--gv", help = "Create Graphviz file",
-                        action = "store_true")
-    parser.add_argument("-n", "--node",
-                        help = "Select component containing node", type = int)
-    parser.add_argument("-m", "--merging",
-                        help = "Report merging and splitting events",
-                        action = "store_true")
+    parser.add_argument(
+        "--gv", help="Create Graphviz file", action="store_true"
+    )
+    parser.add_argument(
+        "-n", "--node", help="Select component containing node", type=int
+    )
+    parser.add_argument(
+        "-m",
+        "--merging",
+        help="Report merging and splitting events",
+        action="store_true",
+    )
     args = parser.parse_args()
 
     G = read_eddy_graph(args.edgelist_file)
@@ -116,23 +138,29 @@ if __name__ == "__main__":
 
     len_H = len(H)
     print("Number of nodes:", len_H)
-    print("(This should be the number of eddies minus the number of isolated "
-          "nodes.)")
+    print(
+        "(This should be the number of eddies minus the number of isolated "
+        "nodes.)"
+    )
     print("Number of edges:", H.number_of_edges())
-    print("Number of weakly connected components:",
-          nx.number_weakly_connected_components(H))
+    print(
+        "Number of weakly connected components:",
+        nx.number_weakly_connected_components(H),
+    )
     n1 = len_H
     n2 = len_H
 
     if len_H >= 1:
         # First and last dates:
         node_index = min(G.nodes)
-        my_date = util_eddies.node_to_date_eddy(node_index, G.graph["e_overestim"],
-                                    only_date = True)
+        my_date = util_eddies.node_to_date_eddy(
+            node_index, G.graph["e_overestim"], only_date=True
+        )
         print("First date:", my_date)
         node_index = max(G.nodes)
-        my_date = util_eddies.node_to_date_eddy(node_index, G.graph["e_overestim"],
-                                    only_date = True)
+        my_date = util_eddies.node_to_date_eddy(
+            node_index, G.graph["e_overestim"], only_date=True
+        )
         print("Last date:", my_date)
 
     if args.merging:
@@ -141,7 +169,8 @@ if __name__ == "__main__":
     for n, d in H.in_degree():
         if d >= 1:
             n2 -= 1
-            if args.merging and d >= 2: print(list(H.predecessors(n)), "->", n)
+            if args.merging and d >= 2:
+                print(list(H.predecessors(n)), "->", n)
 
     if args.merging:
         print("---")
@@ -150,7 +179,8 @@ if __name__ == "__main__":
     for n, d in H.out_degree():
         if d >= 1:
             n1 -= 1
-            if args.merging and d >= 2: print(n, "->", list(H.successors(n)))
+            if args.merging and d >= 2:
+                print(n, "->", list(H.successors(n)))
 
     if args.merging:
         print("---")
diff --git a/Trajectories/draw_segments.py b/Trajectories/draw_segments.py
index fc4701f54f97877eb02448c82dd9eb211000db7d..1be68917e4c60938c127ec90fd4039702762bc01 100755
--- a/Trajectories/draw_segments.py
+++ b/Trajectories/draw_segments.py
@@ -19,15 +19,17 @@ import report_graph
 import util_eddies
 
 parser = argparse.ArgumentParser()
-parser.add_argument("input_file", help = "Graph-tool file")
-parser.add_argument("output_file", help = "Graphviz file")
-parser.add_argument("-t",
-                    help = "trajectories as lists of segments, JSON file, to "
-                    "label each segment with trajectory")
+parser.add_argument("input_file", help="Graph-tool file")
+parser.add_argument("output_file", help="Graphviz file")
+parser.add_argument(
+    "-t",
+    help="trajectories as lists of segments, JSON file, to "
+    "label each segment with trajectory",
+)
 args = parser.parse_args()
 
 g_in = graph_tool.load_graph(args.input_file)
-g_out = pgv.AGraph(directed = True)
+g_out = pgv.AGraph(directed=True)
 
 for v in g_in.vertices():
     g_out.add_node(g_in.vp.name[v])
@@ -35,18 +37,34 @@ for v in g_in.vertices():
 report_graph.partition_graph_date(g_out, g_in.gp.e_overestim)
 
 for e in g_in.edges():
-    source  = e.source()
+    source = e.source()
     target = e.target()
     try:
-        g_out.add_edge(g_in.vp.name[source], g_in.vp.name[target],
-                       label = f"{g_in.ep.cost_function[e]:.2g}")
+        g_out.add_edge(
+            g_in.vp.name[source],
+            g_in.vp.name[target],
+            label=f"{g_in.ep.cost_function[e]:.2g}",
+        )
     except AttributeError:
         g_out.add_edge(g_in.vp.name[source], g_in.vp.name[target])
 
 if args.t:
-    with open(args.t) as f: traj_segm = json.load(f)
-    colors = itertools.cycle(['blue', 'orange', 'green', 'red', 'purple',
-                              'brown', 'pink', 'gray', 'olive', 'cyan'])
+    with open(args.t) as f:
+        traj_segm = json.load(f)
+    colors = itertools.cycle(
+        [
+            "blue",
+            "orange",
+            "green",
+            "red",
+            "purple",
+            "brown",
+            "pink",
+            "gray",
+            "olive",
+            "cyan",
+        ]
+    )
 
     for i, segment_list in enumerate(traj_segm):
         color = next(colors)
@@ -57,17 +75,17 @@ if args.t:
             except KeyError:
                 break
             else:
-                date_index = util_eddies.node_to_date_eddy(segment,
-                                                            g_in.gp.e_overestim,
-                                                            only_date = True)
+                date_index = util_eddies.node_to_date_eddy(
+                    segment, g_in.gp.e_overestim, only_date=True
+                )
                 n.attr["label"] = f"{segment} in {i}\n({date_index})"
                 n.attr["color"] = color
 else:
     for n in g_out.iternodes():
         segment = int(n)
-        date_index = util_eddies.node_to_date_eddy(segment,
-                                                    g_in.gp.e_overestim,
-                                                    only_date = True)
+        date_index = util_eddies.node_to_date_eddy(
+            segment, g_in.gp.e_overestim, only_date=True
+        )
         n.attr["label"] = f"{segment}\n({date_index})"
 
 g_out.write(args.output_file)
diff --git a/Trajectories/filter_traj.py b/Trajectories/filter_traj.py
index 354ed35791906fdfc4b723e8d24bcfac1cf61cb7..7fcbdb198df64887c9e6ab2a3da28a7cdec0f2d2 100755
--- a/Trajectories/filter_traj.py
+++ b/Trajectories/filter_traj.py
@@ -9,15 +9,19 @@ if len(sys.argv) != 4:
     sys.exit("Required arguments: last-date input-file output-file")
 
 last_date = int(sys.argv[1])
-with open(sys.argv[2]) as f: expanded_traj = json.load(f)
+with open(sys.argv[2]) as f:
+    expanded_traj = json.load(f)
 last_node = util_eddies.date_eddy_to_node(
-    last_date, expanded_traj["e_overestim"], expanded_traj["e_overestim"])
+    last_date, expanded_traj["e_overestim"], expanded_traj["e_overestim"]
+)
 filt_traj = []
 
 for t in expanded_traj["traj"]:
     ip = bisect.bisect(t, last_node)
-    if ip != 0: filt_traj.append(t[:ip])
+    if ip != 0:
+        filt_traj.append(t[:ip])
 
 with open(sys.argv[3], "w") as f:
-    json.dump({"e_overestim": expanded_traj["e_overestim"], "traj": filt_traj},
-              f)
+    json.dump(
+        {"e_overestim": expanded_traj["e_overestim"], "traj": filt_traj}, f
+    )
diff --git a/Trajectories/plot_traj.py b/Trajectories/plot_traj.py
index 76368d949911e17e8ace3544fa69742bbdbea89b..85842cc102ea0e516d9e28d2d0eba19cbf857ebc 100755
--- a/Trajectories/plot_traj.py
+++ b/Trajectories/plot_traj.py
@@ -4,6 +4,7 @@ import util_eddies
 import random
 import numpy as np
 
+
 def get_extr_coord(traj, e_overestim, SHPC, orientation):
     """Get the coordinates of the extrema of instantaneous eddies along a
     given trajectory.
@@ -14,42 +15,53 @@ def get_extr_coord(traj, e_overestim, SHPC, orientation):
     y = []
 
     for node in traj:
-        date_index, eddy_index = util_eddies.node_to_date_eddy(node,
-                                                                e_overestim)
+        date_index, eddy_index = util_eddies.node_to_date_eddy(
+            node, e_overestim
+        )
         i_slice = SHPC.get_slice(date_index)
         ishape = SHPC.comp_ishape(date_index, eddy_index, i_slice, orientation)
-        shape = SHPC.get_reader(i_slice, orientation, layer = "extremum")\
-                    .shape(ishape)
+        shape = SHPC.get_reader(i_slice, orientation, layer="extremum").shape(
+            ishape
+        )
         x.append(shape.points[0][0])
         y.append(shape.points[0][1])
 
     return x, y
 
-def plot_single_traj(traj, e_overestim, SHPC, orientation, ax, src_crs,
-                     annotate_flag, color):
+
+def plot_single_traj(
+    traj, e_overestim, SHPC, orientation, ax, src_crs, annotate_flag, color
+):
     x, y = get_extr_coord(traj, e_overestim, SHPC, orientation)
-    ax.plot(x, y, color, linewidth = 0.5, transform = src_crs)
-    ax.plot(x[0], y[0], marker = "s", markersize = 2, color = "green",
-            transform = src_crs)
+    ax.plot(x, y, color, linewidth=0.5, transform=src_crs)
+    ax.plot(
+        x[0], y[0], marker="s", markersize=2, color="green", transform=src_crs
+    )
 
     if annotate_flag:
-        ax.annotate(str(traj[0]),
-                    ax.projection.transform_point(x[0], y[0], src_crs),
-                    xytext = (3 * random.random(), 3 * random.random()),
-                    textcoords = "offset points")
+        ax.annotate(
+            str(traj[0]),
+            ax.projection.transform_point(x[0], y[0], src_crs),
+            xytext=(3 * random.random(), 3 * random.random()),
+            textcoords="offset points",
+        )
+
 
 def get_duration(expanded_traj):
     duration_list = []
 
     for traj in expanded_traj["traj"]:
         init_date = util_eddies.node_to_date_eddy(
-            traj[0], expanded_traj["e_overestim"], only_date = True)
+            traj[0], expanded_traj["e_overestim"], only_date=True
+        )
         final_date = util_eddies.node_to_date_eddy(
-            traj[- 1], expanded_traj["e_overestim"], only_date = True)
+            traj[-1], expanded_traj["e_overestim"], only_date=True
+        )
         duration_list.append(final_date - init_date)
 
     return np.array(duration_list)
 
+
 if __name__ == "__main__":
     import util_eddies
     import matplotlib.pyplot as plt
@@ -59,30 +71,48 @@ if __name__ == "__main__":
     import cartopy.feature as cfeature
 
     parser = argparse.ArgumentParser()
-    parser.add_argument("expanded_traj", help = "JSon file")
-    parser.add_argument("SHPC", help = "directory")
-    parser.add_argument("--save", metavar = "format",
-                        help = "Save file to specified format")
-    parser.add_argument("--annotate", action = "store_true", help = "annotate "
-                        "the first point of trajectory with node number")
-    parser.add_argument("--min_duration", type = int, default = 1,
-                        help = "minimum duration of plotted trajectories (in "
-                        "time steps), >= 1")
+    parser.add_argument("expanded_traj", help="JSon file")
+    parser.add_argument("SHPC", help="directory")
+    parser.add_argument(
+        "--save", metavar="format", help="Save file to specified format"
+    )
+    parser.add_argument(
+        "--annotate",
+        action="store_true",
+        help="annotate " "the first point of trajectory with node number",
+    )
+    parser.add_argument(
+        "--min_duration",
+        type=int,
+        default=1,
+        help="minimum duration of plotted trajectories (in "
+        "time steps), >= 1",
+    )
     args = parser.parse_args()
 
-    with open(args.expanded_traj) as f: expanded_traj = json.load(f)
+    with open(args.expanded_traj) as f:
+        expanded_traj = json.load(f)
     print("Number of trajectories:", len(expanded_traj["traj"]))
-    SHPC = util_eddies.SHPC_class(args.SHPC, def_orient = expanded_traj["orientation"])
+    SHPC = util_eddies.SHPC_class(
+        args.SHPC, def_orient=expanded_traj["orientation"]
+    )
     src_crs = ccrs.Geodetic()
-    projection = ccrs.Mercator(central_longitude = 110)
-    fig, ax = plt.subplots(subplot_kw = {"projection": projection})
+    projection = ccrs.Mercator(central_longitude=110)
+    fig, ax = plt.subplots(subplot_kw={"projection": projection})
     random.seed(0)
 
     if args.min_duration == 1:
         for traj in expanded_traj["traj"]:
             plot_single_traj(
-                traj, expanded_traj["e_overestim"], SHPC, expanded_traj["orientation"], ax,
-                src_crs, args.annotate, color = "red")
+                traj,
+                expanded_traj["e_overestim"],
+                SHPC,
+                expanded_traj["orientation"],
+                ax,
+                src_crs,
+                args.annotate,
+                color="red",
+            )
     else:
         # args.min_duration > 1
         n_long_traj = 0
@@ -92,16 +122,25 @@ if __name__ == "__main__":
             if duration >= args.min_duration:
                 n_long_traj += 1
                 plot_single_traj(
-                    traj, expanded_traj["e_overestim"], SHPC, expanded_traj["orientation"],
-                    ax, src_crs, args.annotate, color = "red")
+                    traj,
+                    expanded_traj["e_overestim"],
+                    SHPC,
+                    expanded_traj["orientation"],
+                    ax,
+                    src_crs,
+                    args.annotate,
+                    color="red",
+                )
 
         print("Number of trajectories with sufficient duration:", n_long_traj)
-        ax.set_title(rf"lifetime $\geq$ {args.min_duration} time steps"
-                     f"\nnumber of trajectories: {n_long_traj}")
-
-    ax.set_title(expanded_traj["orientation"], loc = "left")
-    ax.add_feature(cfeature.LAND, edgecolor = "black")
-    ax.gridlines(draw_labels = True)
+        ax.set_title(
+            rf"lifetime $\geq$ {args.min_duration} time steps"
+            f"\nnumber of trajectories: {n_long_traj}"
+        )
+
+    ax.set_title(expanded_traj["orientation"], loc="left")
+    ax.add_feature(cfeature.LAND, edgecolor="black")
+    ax.gridlines(draw_labels=True)
 
     if args.save:
         plt.savefig(f"plot_traj.{args.save}")
diff --git a/Trajectories/trajectories.py b/Trajectories/trajectories.py
index bfc2a1e8a64898d17d1b843b5aef1d06962fbd39..35e7cf5578198827a5a1ff5dca67806292f8ebb2 100755
--- a/Trajectories/trajectories.py
+++ b/Trajectories/trajectories.py
@@ -20,6 +20,7 @@ import util_eddies
 import numpy as np
 import time
 
+
 def new_traj(ind_traj, traj_prop, n, traj_vert_ind):
     """Assign new trajectory to vertex index n."""
 
@@ -28,9 +29,11 @@ def new_traj(ind_traj, traj_prop, n, traj_vert_ind):
     traj_vert_ind.append([n])
     return ind_traj
 
+
 t0 = time.perf_counter()
 timings_file = open("timings.txt", "w")
-if len(sys.argv) != 2: sys.exit("Required argument: input-graph")
+if len(sys.argv) != 2:
+    sys.exit("Required argument: input-graph")
 g = graph_tool.load_graph(sys.argv[1])
 print("Input graph:")
 print("Number of vertices:", g.num_vertices())
@@ -38,22 +41,22 @@ print("Number of edges:", g.num_edges())
 print("Internal properties:")
 g.list_properties()
 t1 = time.perf_counter()
-timings_file.write(f'Loading done in {t1 - t0:.0f} s\n')
+timings_file.write(f"Loading done in {t1 - t0:.0f} s\n")
 t0 = t1
 
 if "cost_function" in g.edge_properties:
-    closest_succ = g.new_vertex_property('int')
+    closest_succ = g.new_vertex_property("int")
     # Index of the closest direct successor of a node: tail of the
     # out-edge with smallest cost. - 1 means there is no successor.
 
     # Set closest_succ for all nodes:
     for n in g.iter_vertices():
-        all_cost = g.get_out_edges(n, eprops = [g.ep.cost_function])
+        all_cost = g.get_out_edges(n, eprops=[g.ep.cost_function])
         # numpy array with dtype float64
 
         if all_cost.size == 0:
             # n has no successor
-            closest_succ[n] = - 1
+            closest_succ[n] = -1
         else:
             i_min = all_cost[:, 2].argmin()
             closest_succ[n] = all_cost[i_min, 1]
@@ -62,14 +65,14 @@ else:
     print("No cost values in the input file.")
 
 t1 = time.perf_counter()
-timings_file.write(f'closest_succ defined in {t1 - t0:.0f} s\n')
+timings_file.write(f"closest_succ defined in {t1 - t0:.0f} s\n")
 t0 = t1
-traj_prop = g.new_vertex_property('int', val = - 1)
+traj_prop = g.new_vertex_property("int", val=-1)
 # Trajectory index to which a segment belongs.  - 1 means the segment
 # does not belong yet to any trajectory. The trajectory index is also
 # the index in lists traj_vert_ind, traj_segm and expanded_traj.
 
-ind_traj = - 1 # initialize index of trajectory
+ind_traj = -1  # initialize index of trajectory
 
 traj_vert_ind = []
 # An element of traj_vert_ind is a trajectory, represented by a list
@@ -89,8 +92,13 @@ for n in topology.topological_sort(g):
     if in_deg_prop[n] == 2:
         pred = g.get_in_neighbors(n)
 
-        if in_deg_prop[pred[0]] == 1 == in_deg_prop[pred[1]] \
-           == out_deg_prop[pred[0]] == out_deg_prop[pred[1]]:
+        if (
+            in_deg_prop[pred[0]]
+            == 1
+            == in_deg_prop[pred[1]]
+            == out_deg_prop[pred[0]]
+            == out_deg_prop[pred[1]]
+        ):
             n1 = g.get_in_neighbors(pred[0])[0]
             n2 = g.get_in_neighbors(pred[1])[0]
 
@@ -102,18 +110,21 @@ for n in topology.topological_sort(g):
                 # splitting is the date of the last eddy in segment
                 # n1.
                 date_merging = util_eddies.node_to_date_eddy(
-                    g.vp.name[n], g.gp.e_overestim, only_date = True)
+                    g.vp.name[n], g.gp.e_overestim, only_date=True
+                )
                 date_splitting = util_eddies.node_to_date_eddy(
-                    g.vp.inst_eddies[n1][- 1], g.gp.e_overestim,
-                    only_date = True)
+                    g.vp.inst_eddies[n1][-1], g.gp.e_overestim, only_date=True
+                )
 
                 if date_merging - date_splitting <= 6:
                     # We have a phantom pattern. Get the vertex on the
                     # side of the shortest path:
-                    if g.ep.cost_function[(n1, pred[0])] \
-                       + g.ep.cost_function[(pred[0], n)] \
-                       < g.ep.cost_function[(n1, pred[1])] \
-                       + g.ep.cost_function[(pred[1], n)]:
+                    if (
+                        g.ep.cost_function[(n1, pred[0])]
+                        + g.ep.cost_function[(pred[0], n)]
+                        < g.ep.cost_function[(n1, pred[1])]
+                        + g.ep.cost_function[(pred[1], n)]
+                    ):
                         v_short = pred[0]
                     else:
                         v_short = pred[1]
@@ -123,22 +134,27 @@ for n in topology.topological_sort(g):
                         # according to closest successor, that is:
                         # swap the trajectories of pred[0] and
                         # pred[1]:
-                        traj_prop[pred[0]], traj_prop[pred[1]] \
-                            = traj_prop[pred[1]], traj_prop[pred[0]]
-                        traj_vert_ind[traj_prop[pred[0]]][- 1], \
-                            traj_vert_ind[traj_prop[pred[1]]][- 1] \
-                            = traj_vert_ind[traj_prop[pred[1]]][- 1], \
-                            traj_vert_ind[traj_prop[pred[0]]][- 1]
+                        traj_prop[pred[0]], traj_prop[pred[1]] = (
+                            traj_prop[pred[1]],
+                            traj_prop[pred[0]],
+                        )
+                        (
+                            traj_vert_ind[traj_prop[pred[0]]][-1],
+                            traj_vert_ind[traj_prop[pred[1]]][-1],
+                        ) = (
+                            traj_vert_ind[traj_prop[pred[1]]][-1],
+                            traj_vert_ind[traj_prop[pred[0]]][-1],
+                        )
 
                     traj_prop[n] = traj_prop[n1]
                     traj_vert_ind[traj_prop[n1]].append(n)
 
-    if traj_prop[n] == - 1:
+    if traj_prop[n] == -1:
         if in_deg_prop[n] >= 1:
             # Find the index of the closest direct predecessor of a node:
             # head of the in-edge with smallest cost.
 
-            all_cost = g.get_in_edges(n, eprops = [g.ep.cost_function])
+            all_cost = g.get_in_edges(n, eprops=[g.ep.cost_function])
             # numpy array with dtype float64
 
             i_min = all_cost[:, 2].argmin()
@@ -160,7 +176,7 @@ for n in topology.topological_sort(g):
     # traj_prop[n] != - 1
 
 t1 = time.perf_counter()
-timings_file.write(f'traj_prop defined in {t1 - t0:.0f} s\n')
+timings_file.write(f"traj_prop defined in {t1 - t0:.0f} s\n")
 t0 = t1
 print("Number of trajectories: ", len(traj_vert_ind))
 
@@ -177,8 +193,8 @@ expanded_traj = []
 
 for t in traj_vert_ind:
     # t is a trajectory as a list of vertex indices.
-    list_segm = [] # list of segments in trajectory t
-    list_eddies_traj = [] # list of eddies in trajectory t
+    list_segm = []  # list of segments in trajectory t
+    list_eddies_traj = []  # list of eddies in trajectory t
 
     for n in t:
         list_segm.append(g.vp.name[n])
@@ -192,22 +208,28 @@ for t in traj_vert_ind:
     expanded_traj.append(list_eddies_traj)
 
 t1 = time.perf_counter()
-timings_file.write(f'traj_segm and expanded_traj defined in {t1 - t0:.0f} s\n')
+timings_file.write(f"traj_segm and expanded_traj defined in {t1 - t0:.0f} s\n")
 t0 = t1
 
 # Save:
 
 with open("traj_segm.json", "w") as outfile:
-    json.dump(traj_segm, outfile, separators = (',', ':'))
+    json.dump(traj_segm, outfile, separators=(",", ":"))
 
 print("Created traj_segm.json.")
 
 with open("expanded_traj.json", "w") as outfile:
-    json.dump({"orientation": g.gp.orientation,
-               "e_overestim": g.gp.e_overestim, "traj": expanded_traj},
-              outfile, separators = (',', ':'))
+    json.dump(
+        {
+            "orientation": g.gp.orientation,
+            "e_overestim": g.gp.e_overestim,
+            "traj": expanded_traj,
+        },
+        outfile,
+        separators=(",", ":"),
+    )
 
 print("Created expanded_traj.json.")
 t1 = time.perf_counter()
-timings_file.write(f'Saving done in {t1 - t0:.0f} s\n')
-print('Done')
+timings_file.write(f"Saving done in {t1 - t0:.0f} s\n")
+print("Done")