diff --git a/examples/formats/NEXRAD_Level_2_File.py b/examples/formats/NEXRAD_Level_2_File.py index 81b3cb1ed1f..b3415780998 100644 --- a/examples/formats/NEXRAD_Level_2_File.py +++ b/examples/formats/NEXRAD_Level_2_File.py @@ -73,7 +73,8 @@ fig = plt.figure(figsize=(15, 8)) add_metpy_logo(fig, 190, 85, size='large') -for var_data, var_range, ax_rect in zip((ref, rho), (ref_range, rho_range), spec): +for var_data, var_range, ax_rect in zip((ref, rho), (ref_range, rho_range), spec, + strict=False): # Turn into an array, then mask data = np.ma.array(var_data) data[np.isnan(data)] = np.ma.masked diff --git a/examples/formats/NEXRAD_Level_3_File.py b/examples/formats/NEXRAD_Level_3_File.py index ab16620ce01..9b5e7998124 100644 --- a/examples/formats/NEXRAD_Level_3_File.py +++ b/examples/formats/NEXRAD_Level_3_File.py @@ -26,7 +26,7 @@ ctables = (('NWSStormClearReflectivity', -20, 0.5), # dBZ ('NWS8bitVel', -100, 1.0)) # m/s -for v, ctable, ax_rect in zip(('N0Q', 'N0U'), ctables, spec): +for v, ctable, ax_rect in zip(('N0Q', 'N0U'), ctables, spec, strict=False): # Open the file name = get_test_data(f'nids/KOUN_SDUS54_{v}TLX_201305202016', as_file_obj=False) f = Level3File(name) diff --git a/examples/gridding/Inverse_Distance_Verification.py b/examples/gridding/Inverse_Distance_Verification.py index d17d5f274b7..43d98262df2 100644 --- a/examples/gridding/Inverse_Distance_Verification.py +++ b/examples/gridding/Inverse_Distance_Verification.py @@ -61,10 +61,10 @@ def draw_circle(ax, x, y, r, m, label): # # The variable ``indices`` represents the index of each matched coordinate within the # cKDTree's ``data`` list. -grid_points = np.array(list(zip(sim_gridx, sim_gridy))) +grid_points = np.array(list(zip(sim_gridx, sim_gridy, strict=False))) radius = 40 -obs_tree = cKDTree(list(zip(xp, yp))) +obs_tree = cKDTree(list(zip(xp, yp, strict=False))) indices = obs_tree.query_ball_point(grid_points, r=radius) ########################################### @@ -83,7 +83,7 @@ def draw_circle(ax, x, y, r, m, label): barnes_dist = dist_2(sim_gridx[1], sim_gridy[1], x2, y2) barnes_obs = zp[indices[1]] -kappa = calc_kappa(average_spacing(list(zip(xp, yp)))) +kappa = calc_kappa(average_spacing(list(zip(xp, yp, strict=False)))) barnes_val = barnes_point(barnes_dist, barnes_obs, kappa) @@ -121,7 +121,7 @@ def draw_circle(ax, x, y, r, m, label): mx, my = obs_tree.data[indices[0]].T mz = zp[indices[0]] -for x, y, z in zip(mx, my, mz): +for x, y, z in zip(mx, my, mz, strict=False): d = np.sqrt((sim_gridx[0] - x)**2 + (y - sim_gridy[0])**2) ax.plot([sim_gridx[0], x], [sim_gridy[0], y], '--') @@ -160,7 +160,7 @@ def draw_circle(ax, x, y, r, m, label): mx, my = obs_tree.data[indices[1]].T mz = zp[indices[1]] -for x, y, z in zip(mx, my, mz): +for x, y, z in zip(mx, my, mz, strict=False): d = np.sqrt((sim_gridx[1] - x)**2 + (y - sim_gridy[1])**2) ax.plot([sim_gridx[1], x], [sim_gridy[1], y], '--') diff --git a/examples/gridding/Natural_Neighbor_Verification.py b/examples/gridding/Natural_Neighbor_Verification.py index b63b2bcd76e..b07f430af8b 100644 --- a/examples/gridding/Natural_Neighbor_Verification.py +++ b/examples/gridding/Natural_Neighbor_Verification.py @@ -93,7 +93,8 @@ ax.set_title('Triangulation of observations and test grid cell ' 'natural neighbor interpolation values') -members, circumcenters = geometry.find_natural_neighbors(tri, list(zip(sim_gridx, sim_gridy))) +members, circumcenters = geometry.find_natural_neighbors(tri, list(zip(sim_gridx, sim_gridy, + strict=False))) val = natural_neighbor_point(xp, yp, zp, (sim_gridx[0], sim_gridy[0]), tri, members[0], circumcenters) @@ -164,7 +165,7 @@ def draw_circle(ax, x, y, r, m, label): # spatial data structure that we use here simply to show areal ratios. # Notice that the two natural neighbor triangle circumcenters are also vertices # in the Voronoi plot (green dots), and the observations are in the polygons (blue dots). -vort = Voronoi(list(zip(xp, yp))) +vort = Voronoi(list(zip(xp, yp, strict=False))) fig, ax = plt.subplots(1, 1, figsize=(15, 10)) ax.ishold = lambda: True # Work-around for Matplotlib 3.0.0 incompatibility @@ -175,7 +176,7 @@ def draw_circle(ax, x, y, r, m, label): x_0 = xp[nn_ind] y_0 = yp[nn_ind] -for x, y, z in zip(x_0, y_0, z_0): +for x, y, z in zip(x_0, y_0, z_0, strict=False): ax.annotate(f'{x}, {y}: {z:.3f} F', xy=(x, y)) ax.plot(sim_gridx[0], sim_gridy[0], 'k+', markersize=10) diff --git a/examples/plots/Plotting_Surface_Analysis.py b/examples/plots/Plotting_Surface_Analysis.py index 076ddcbf4c0..6b4f5fdc7fd 100644 --- a/examples/plots/Plotting_Surface_Analysis.py +++ b/examples/plots/Plotting_Surface_Analysis.py @@ -44,7 +44,7 @@ def plot_bulletin(ax, data): # Handle H/L points using MetPy's StationPlot class for field in ('HIGH', 'LOW'): rows = data[data.feature == field] - x, y = zip(*((pt.x, pt.y) for pt in rows.geometry)) + x, y = zip(*((pt.x, pt.y) for pt in rows.geometry), strict=False) sp = StationPlot(ax, x, y, transform=ccrs.PlateCarree(), clip_on=True) sp.plot_text('C', [field[0]] * len(x), **complete_style[field]) sp.plot_parameter('S', rows.strength, **complete_style[field]) diff --git a/examples/plots/US_Counties.py b/examples/plots/US_Counties.py index 64830071e3b..2adad06d5a9 100644 --- a/examples/plots/US_Counties.py +++ b/examples/plots/US_Counties.py @@ -22,6 +22,6 @@ ax2 = fig.add_subplot(1, 3, 2, projection=proj) ax3 = fig.add_subplot(1, 3, 3, projection=proj) -for scale, axis in zip(['20m', '5m', '500k'], [ax1, ax2, ax3]): +for scale, axis in zip(['20m', '5m', '500k'], [ax1, ax2, ax3], strict=False): axis.set_extent([270.25, 270.9, 38.15, 38.75], ccrs.Geodetic()) axis.add_feature(USCOUNTIES.with_scale(scale)) diff --git a/src/metpy/calc/basic.py b/src/metpy/calc/basic.py index a290e3df12e..99388a6beef 100644 --- a/src/metpy/calc/basic.py +++ b/src/metpy/calc/basic.py @@ -1130,7 +1130,7 @@ def zoom_xarray(input_field, zoom, output=None, order=3, mode='constant', cval=0 if not np.iterable(zoom): zoom = tuple(zoom for _ in input_field.dims) zoomed_dim_coords = {} - for dim_name, dim_zoom in zip(input_field.dims, zoom): + for dim_name, dim_zoom in zip(input_field.dims, zoom, strict=False): if dim_name in input_field.coords: zoomed_dim_coords[dim_name] = scipy_zoom( input_field[dim_name].data, dim_zoom, order=order, mode=mode, cval=cval, diff --git a/src/metpy/calc/kinematics.py b/src/metpy/calc/kinematics.py index fb80b3b18ee..73f8d7a64c4 100644 --- a/src/metpy/calc/kinematics.py +++ b/src/metpy/calc/kinematics.py @@ -464,7 +464,7 @@ def advection( return -sum( wind * gradient - for wind, gradient in zip(wind_vector.values(), gradient_vector) + for wind, gradient in zip(wind_vector.values(), gradient_vector, strict=False) ) diff --git a/src/metpy/calc/thermo.py b/src/metpy/calc/thermo.py index b87a037e056..983e1315474 100644 --- a/src/metpy/calc/thermo.py +++ b/src/metpy/calc/thermo.py @@ -781,7 +781,7 @@ def _wide_option(intersect_type, p_list, t_list, pressure, parcel_temperature_pr lfc_p_list, _ = find_intersections(pressure, parcel_temperature_profile, temperature, direction='increasing', log_x=True) - diff = [lfc_p.m - el_p.m for lfc_p, el_p in zip(lfc_p_list, el_p_list)] + diff = [lfc_p.m - el_p.m for lfc_p, el_p in zip(lfc_p_list, el_p_list, strict=False)] return (p_list[np.where(diff == np.max(diff))][0], t_list[np.where(diff == np.max(diff))][0]) diff --git a/src/metpy/interpolate/grid.py b/src/metpy/interpolate/grid.py index d3153f13357..df7d07d17b6 100644 --- a/src/metpy/interpolate/grid.py +++ b/src/metpy/interpolate/grid.py @@ -162,7 +162,7 @@ def natural_neighbor_to_grid(xp, yp, variable, grid_x, grid_y): """ # Handle grid-to-points conversion, and use function from `interpolation` - points_obs = list(zip(xp, yp)) + points_obs = list(zip(xp, yp, strict=False)) points_grid = generate_grid_coords(grid_x, grid_y) img = natural_neighbor_to_points(points_obs, variable, points_grid) return img.reshape(grid_x.shape) @@ -214,7 +214,7 @@ def inverse_distance_to_grid(xp, yp, variable, grid_x, grid_y, r, gamma=None, ka """ # Handle grid-to-points conversion, and use function from `interpolation` - points_obs = list(zip(xp, yp)) + points_obs = list(zip(xp, yp, strict=False)) points_grid = generate_grid_coords(grid_x, grid_y) img = inverse_distance_to_points(points_obs, variable, points_grid, r, gamma=gamma, kappa=kappa, min_neighbors=min_neighbors, kind=kind) @@ -296,7 +296,7 @@ def interpolate_to_grid(x, y, z, interp_type='linear', hres=50000, grid_x, grid_y = generate_grid(hres, boundary_coords) # Handle grid-to-points conversion, and use function from `interpolation` - points_obs = np.array(list(zip(x, y))) + points_obs = np.array(list(zip(x, y, strict=False))) points_grid = generate_grid_coords(grid_x, grid_y) img = interpolate_to_points(points_obs, z, points_grid, interp_type=interp_type, minimum_neighbors=minimum_neighbors, gamma=gamma, diff --git a/src/metpy/interpolate/points.py b/src/metpy/interpolate/points.py index e33f7e19cb2..216c0545291 100644 --- a/src/metpy/interpolate/points.py +++ b/src/metpy/interpolate/points.py @@ -43,7 +43,7 @@ def cressman_point(sq_dist, values, radius): weights = tools.cressman_weights(sq_dist, radius) total_weights = np.sum(weights) - return sum(v * (w / total_weights) for (w, v) in zip(weights, values)) + return sum(v * (w / total_weights) for (w, v) in zip(weights, values, strict=False)) def barnes_point(sq_dist, values, kappa, gamma=None): @@ -82,7 +82,7 @@ def barnes_point(sq_dist, values, kappa, gamma=None): weights = tools.barnes_weights(sq_dist, kappa, gamma) total_weights = np.sum(weights) - return sum(v * (w / total_weights) for (w, v) in zip(weights, values)) + return sum(v * (w / total_weights) for (w, v) in zip(weights, values, strict=False)) def natural_neighbor_point(xp, yp, variable, grid_loc, tri, neighbors, circumcenters): @@ -271,7 +271,7 @@ def inverse_distance_to_points(points, values, xi, r, gamma=None, kappa=None, mi img = np.asarray([interp_func(geometry.dist_2(*grid, *obs_tree.data[matches].T), values[matches]) if len(matches) >= min_neighbors else np.nan - for matches, grid in zip(indices, xi)]) + for matches, grid in zip(indices, xi, strict=False)]) if org_units: img = units.Quantity(img, org_units) diff --git a/src/metpy/interpolate/tools.py b/src/metpy/interpolate/tools.py index ccbfea29d88..673dd2772a3 100644 --- a/src/metpy/interpolate/tools.py +++ b/src/metpy/interpolate/tools.py @@ -133,7 +133,7 @@ def remove_repeat_coordinates(x, y, z): coords = [] variable = [] - for (x_, y_, t_) in zip(x, y, z): + for (x_, y_, t_) in zip(x, y, z, strict=False): if (x_, y_) not in coords: coords.append((x_, y_)) variable.append(t_) diff --git a/src/metpy/io/_tools.py b/src/metpy/io/_tools.py index 174566cc615..c79eba680c2 100644 --- a/src/metpy/io/_tools.py +++ b/src/metpy/io/_tools.py @@ -64,7 +64,7 @@ def __init__(self, info, prefmt='', tuple_name=None): """Initialize the NamedStruct.""" if tuple_name is None: tuple_name = 'NamedStruct' - names, fmts = zip(*info) + names, fmts = zip(*info, strict=False) self.converters = {} conv_off = 0 for ind, i in enumerate(info): @@ -118,7 +118,7 @@ class DictStruct: def __init__(self, info, prefmt=''): """Initialize the DictStruct.""" - names, formats = zip(*info) + names, formats = zip(*info, strict=False) # Remove empty names self._names = [n for n in names if n] @@ -131,7 +131,7 @@ def size(self): return self._struct.size def _create(self, items): - return dict(zip(self._names, items)) + return dict(zip(self._names, items, strict=False)) def unpack(self, s): """Parse bytes and return a dict.""" @@ -151,7 +151,7 @@ def __init__(self, *args, **kwargs): self.val_map = dict(enumerate(args)) # Invert the kwargs dict so that we can map from value to name - self.val_map.update(zip(kwargs.values(), kwargs.keys())) + self.val_map.update(zip(kwargs.values(), kwargs.keys(), strict=False)) def __call__(self, val): """Map an integer to the string representation.""" diff --git a/src/metpy/io/gempak.py b/src/metpy/io/gempak.py index 5280087ce91..8eefa9591ed 100644 --- a/src/metpy/io/gempak.py +++ b/src/metpy/io/gempak.py @@ -629,8 +629,9 @@ def __init__(self, file): fkey_prod = product(['header_name', 'header_length', 'header_type'], range(1, self.prod_desc.file_headers + 1)) fkey_names = ['{}{}'.format(*x) for x in fkey_prod] - fkey_info = list(zip(fkey_names, np.repeat(('4s', 'i', 'i'), - self.prod_desc.file_headers))) + fkey_info = list(zip(fkey_names, + np.repeat(('4s', 'i', 'i'), self.prod_desc.file_headers), + strict=False)) self.file_keys_format = NamedStruct(fkey_info, self.prefmt, 'FileKeys') self._buffer.jump_to(self._start, _word_to_position(self.prod_desc.file_keys_ptr)) @@ -1897,7 +1898,7 @@ def _merge_sounding(self, parts): if num_man_levels >= 1: for mp, mt, mz in zip(parts['TTAA']['PRES'], parts['TTAA']['TEMP'], - parts['TTAA']['HGHT']): + parts['TTAA']['HGHT'], strict=False): if self.prod_desc.missing_float not in [ mp, mt, diff --git a/src/metpy/io/gini.py b/src/metpy/io/gini.py index f870ceb9e6f..c86d4b208b2 100644 --- a/src/metpy/io/gini.py +++ b/src/metpy/io/gini.py @@ -49,7 +49,7 @@ def _scaled_int(s): def _name_lookup(names): r"""Create an io helper to convert an integer to a named value.""" - mapper = dict(zip(range(len(names)), names)) + mapper = dict(zip(range(len(names)), names, strict=False)) def lookup(val): return mapper.get(val, 'UnknownValue') diff --git a/src/metpy/io/nexrad.py b/src/metpy/io/nexrad.py index 8125b327110..ee0041f0356 100644 --- a/src/metpy/io/nexrad.py +++ b/src/metpy/io/nexrad.py @@ -518,7 +518,7 @@ def _decode_msg15(self, msg_hdr): num_rng = data[offset] codes = data[offset + 1:offset + 1 + 2 * num_rng:2] ends = data[offset + 2:offset + 2 + 2 * num_rng:2] - az_data.append(list(zip(ends, codes))) + az_data.append(list(zip(ends, codes, strict=False))) offset += 2 * num_rng + 1 self.clutter_filter_map['data'].append(az_data) @@ -1963,7 +1963,7 @@ def _unpack_packet_radial_data(self, code, in_sym_block): rads.append((start_az, end_az, self._unpack_rle_data( self._buffer.read_binary(2 * rad.num_hwords)))) - start, end, vals = zip(*rads) + start, end, vals = zip(*rads, strict=False) return {'start_az': list(start), 'end_az': list(end), 'data': list(vals), 'center': (hdr.i_center * self.pos_scale(in_sym_block), hdr.j_center * self.pos_scale(in_sym_block)), @@ -1984,7 +1984,7 @@ def _unpack_packet_digital_radial(self, code, in_sym_block): start_az = rad.start_angle * 0.1 end_az = start_az + rad.angle_delta * 0.1 rads.append((start_az, end_az, self._buffer.read_binary(rad.num_bytes))) - start, end, vals = zip(*rads) + start, end, vals = zip(*rads, strict=False) return {'start_az': list(start), 'end_az': list(end), 'data': list(vals), 'center': (hdr.i_center * self.pos_scale(in_sym_block), hdr.j_center * self.pos_scale(in_sym_block)), @@ -2148,7 +2148,7 @@ def _unpack_packet_digital_precipitation(self, code, in_sym_block): row = self._unpack_rle_data(row_bytes) else: row = [] - for run, level in zip(row_bytes[::2], row_bytes[1::2]): + for run, level in zip(row_bytes[::2], row_bytes[1::2], strict=False): row.extend([level] * run) assert len(row) == lfm_boxes rows.append(row) @@ -2164,7 +2164,7 @@ def _unpack_packet_linked_vector(self, code, in_sym_block): value = None scale = self.pos_scale(in_sym_block) pos = [b * scale for b in self._buffer.read_binary(num_bytes / 2, '>h')] - vectors = list(zip(pos[::2], pos[1::2])) + vectors = list(zip(pos[::2], pos[1::2], strict=False)) return {'vectors': vectors, 'color': value} def _unpack_packet_vector(self, code, in_sym_block): @@ -2176,7 +2176,7 @@ def _unpack_packet_vector(self, code, in_sym_block): value = None scale = self.pos_scale(in_sym_block) pos = [p * scale for p in self._buffer.read_binary(num_bytes / 2, '>h')] - vectors = list(zip(pos[::4], pos[1::4], pos[2::4], pos[3::4])) + vectors = list(zip(pos[::4], pos[1::4], pos[2::4], pos[3::4], strict=False)) return {'vectors': vectors, 'color': value} def _unpack_packet_contour_color(self, code, in_sym_block): @@ -2196,7 +2196,7 @@ def _unpack_packet_linked_contour(self, code, in_sym_block): vectors = [(startx, starty)] num_bytes = self._buffer.read_int(2, 'big', signed=False) pos = [b * scale for b in self._buffer.read_binary(num_bytes / 2, '>h')] - vectors.extend(zip(pos[::2], pos[1::2])) + vectors.extend(zip(pos[::2], pos[1::2], strict=False)) return {'vectors': vectors} def _unpack_packet_wind_barbs(self, code, in_sym_block): diff --git a/src/metpy/plots/_mpl.py b/src/metpy/plots/_mpl.py index fba2ba80663..6d96ae7b56e 100644 --- a/src/metpy/plots/_mpl.py +++ b/src/metpy/plots/_mpl.py @@ -196,7 +196,7 @@ def draw(self, renderer): angle = self.get_rotation() - for (posx, posy), t in zip(pts, self.text): + for (posx, posy), t in zip(pts, self.text, strict=False): # Skip empty strings--not only is this a performance gain, but it fixes # rendering with path effects below. if not t: diff --git a/src/metpy/plots/declarative.py b/src/metpy/plots/declarative.py index aba41c40771..274eac1ed9d 100644 --- a/src/metpy/plots/declarative.py +++ b/src/metpy/plots/declarative.py @@ -1379,7 +1379,7 @@ def _build(self): # The order here needs to match the order of the tuple if self.arrowkey is not None: key_kwargs = {'U': 100, 'X': 0.85, 'Y': 1.02, 'labelpos': 'E', 'label': ''} - for name, val in zip(key_kwargs, self.arrowkey): + for name, val in zip(key_kwargs, self.arrowkey, strict=False): if val is not None: key_kwargs[name] = val self.parent.ax.quiverkey(self.handle, labelcolor=self.color, **key_kwargs) diff --git a/src/metpy/plots/patheffects.py b/src/metpy/plots/patheffects.py index af1baeb0e8c..b4e9e3f424a 100644 --- a/src/metpy/plots/patheffects.py +++ b/src/metpy/plots/patheffects.py @@ -130,7 +130,7 @@ def draw_path(self, renderer, gc, path, affine, rgbFace=None): # noqa: N803 line_shift = renderer.points_to_pixels(gc.get_linewidth()) / 2 # Loop over all the markers to draw - for ind, marker_offset in zip(segment_indices, marker_offsets): + for ind, marker_offset in zip(segment_indices, marker_offsets, strict=False): sym_trans = self._get_symbol_transform(renderer, marker_offset, line_shift, angles[ind], starts[ind]) renderer.draw_path(gc0, self._symbol, sym_trans, @@ -218,12 +218,12 @@ def draw_path(self, renderer, gc, path, affine, rgbFace=None): # noqa: N803 line_shift = renderer.points_to_pixels(gc.get_linewidth()) / 2 # Loop over all the segments to draw - for start_path, end_path in zip(segment_starts, segment_ends): + for start_path, end_path in zip(segment_starts, segment_ends, strict=False): renderer.draw_path(gc0, mpath.Path(starts[start_path:end_path]), mtransforms.Affine2D(), None) # Loop over all the markers to draw - for ind, marker_offset in zip(segment_indices, marker_offsets): + for ind, marker_offset in zip(segment_indices, marker_offsets, strict=False): sym_trans = self._get_symbol_transform(renderer, marker_offset, line_shift, angles[ind], starts[ind]) @@ -311,12 +311,12 @@ def draw_path(self, renderer, gc, path, affine, rgbFace=None): # noqa: N803 line_shift = renderer.points_to_pixels(gc.get_linewidth()) / 2 # Loop over all the segments to draw - for start_path, end_path in zip(segment_starts, segment_ends): + for start_path, end_path in zip(segment_starts, segment_ends, strict=False): renderer.draw_path(gc0, mpath.Path(starts[start_path:end_path]), mtransforms.Affine2D(), None) # Loop over all the markers to draw - for ind, marker_offset in zip(segment_indices[::2], marker_offsets[::2]): + for ind, marker_offset in zip(segment_indices[::2], marker_offsets[::2], strict=False): sym_trans = self._get_symbol_transform(renderer, marker_offset, line_shift, angles[ind], starts[ind]) @@ -906,7 +906,8 @@ def draw_path(self, renderer, gc, path, affine, rgbFace=None): # noqa: N803 # Loop over all the markers to draw for ind, start_path, end_path, marker_offset in zip(segment_indices, start_path_inds, - end_path_inds, marker_offsets): + end_path_inds, marker_offsets, + strict=False): sym_trans = self._get_symbol_transform(renderer, marker_offset, line_shift, angles[ind], starts[ind]) gc = next(self._gc_cycle) @@ -970,7 +971,8 @@ def draw_path(self, renderer, gc, path, affine, rgbFace=None): # noqa: N803 # Loop over all the markers to draw for ind, start_path, end_path, marker_offset in zip(segment_indices, segment_starts, - segment_ends, marker_offsets): + segment_ends, marker_offsets, + strict=False): sym_trans = self._get_symbol_transform(renderer, marker_offset, line_shift, angles[ind], starts[ind]) gc = next(self._gc_cycle) @@ -1040,7 +1042,7 @@ def draw_path(self, renderer, gc, path, affine, rgbFace=None): # noqa: N803 line_shift = renderer.points_to_pixels(gc.get_linewidth()) / 2 # Loop over all the markers to draw - for ind, marker_offset in zip(segment_indices[::2], marker_offsets[::2]): + for ind, marker_offset in zip(segment_indices[::2], marker_offsets[::2], strict=False): sym_trans = self._get_symbol_transform(renderer, marker_offset, line_shift, angles[ind], starts[ind]) gc = next(self._gc_cycle) @@ -1053,7 +1055,7 @@ def draw_path(self, renderer, gc, path, affine, rgbFace=None): # noqa: N803 for start_path, mid_path, end_path in zip(segment_starts, segment_indices, - segment_ends): + segment_ends, strict=False): color1, color2 = next(self._segment_cycle) gcx = self._override_gc(renderer, gc, foreground=mcolors.to_rgb(color1)) diff --git a/src/metpy/units.py b/src/metpy/units.py index 0ed468e990b..1328c772251 100644 --- a/src/metpy/units.py +++ b/src/metpy/units.py @@ -391,7 +391,8 @@ def wrapper(*args, **kwargs): # Wrap output if multiple_output: wrapped_result = [] - for this_result, this_output_control in zip(result, output_control): + for this_result, this_output_control in zip(result, output_control, + strict=False): q = units.Quantity(this_result, this_output_control[0]) if this_output_control[1] is not None: q = q.to(this_output_control[1]) diff --git a/src/metpy/xarray.py b/src/metpy/xarray.py index d96e61872d9..b6650b4caa2 100644 --- a/src/metpy/xarray.py +++ b/src/metpy/xarray.py @@ -622,7 +622,7 @@ def expand(self, key): """Parse key using xarray utils to ensure we have dimension names.""" if not is_dict_like(key): labels = expanded_indexer(key, self.data_array.ndim) - key = dict(zip(self.data_array.dims, labels)) + key = dict(zip(self.data_array.dims, labels, strict=False)) return key def __getitem__(self, key): @@ -1336,7 +1336,7 @@ def cast_variables(arg, arg_name): wrapping = _wrap_output_like_not_matching_units if isinstance(match, tuple): - return tuple(wrapping(*args) for args in zip(result, match)) + return tuple(wrapping(*args) for args in zip(result, match, strict=False)) else: return wrapping(result, match) return wrapper diff --git a/tests/calc/test_calc_tools.py b/tests/calc/test_calc_tools.py index 6f92f03d385..c54acb21a0b 100644 --- a/tests/calc/test_calc_tools.py +++ b/tests/calc/test_calc_tools.py @@ -796,7 +796,7 @@ def test_gradient_2d(deriv_2d_data): [-3, -1, 4], [-3, -1, 4], [-3, -1, 4]])) - for r, t in zip(res, truth): + for r, t in zip(res, truth, strict=False): assert_array_almost_equal(r, t, 5) @@ -804,7 +804,7 @@ def test_gradient_4d(deriv_4d_data): """Test gradient with 4D arrays.""" res = gradient(deriv_4d_data, deltas=(1, 1, 1, 1)) truth = tuple(factor * np.ones_like(deriv_4d_data) for factor in (48., 16., 4., 1.)) - for r, t in zip(res, truth): + for r, t in zip(res, truth, strict=False): assert_array_almost_equal(r, t, 8) @@ -820,7 +820,7 @@ def test_gradient_restricted_axes(deriv_2d_data): [[-3], [-1], [4]], [[-3], [-1], [4]], [[-3], [-1], [4]]])) - for r, t in zip(res, truth): + for r, t in zip(res, truth, strict=False): assert_array_almost_equal(r, t, 5) @@ -1009,7 +1009,7 @@ def test_3d_gradient_3d_data_no_axes(deriv_4d_data): test = deriv_4d_data[0] res = gradient(test, deltas=(1, 1, 1)) truth = tuple(factor * np.ones_like(test) for factor in (16., 4., 1.)) - for r, t in zip(res, truth): + for r, t in zip(res, truth, strict=False): assert_array_almost_equal(r, t, 8) @@ -1033,7 +1033,7 @@ def test_2d_gradient_4d_data_2_axes_3_deltas(deriv_4d_data): """Test 2D gradient of 4D data with 2 axes and 3 deltas.""" res = gradient(deriv_4d_data, deltas=(1, 1, 1), axes=(-2, -1)) truth = tuple(factor * np.ones_like(deriv_4d_data) for factor in (4., 1.)) - for r, t in zip(res, truth): + for r, t in zip(res, truth, strict=False): assert_array_almost_equal(r, t, 8) @@ -1041,7 +1041,7 @@ def test_2d_gradient_4d_data_2_axes_2_deltas(deriv_4d_data): """Test 2D gradient of 4D data with 2 axes and 2 deltas.""" res = gradient(deriv_4d_data, deltas=(1, 1), axes=(0, 1)) truth = tuple(factor * np.ones_like(deriv_4d_data) for factor in (48., 16.)) - for r, t in zip(res, truth): + for r, t in zip(res, truth, strict=False): assert_array_almost_equal(r, t, 8) diff --git a/tests/interpolate/test_geometry.py b/tests/interpolate/test_geometry.py index 0ca0bc4ff87..2300686b61d 100644 --- a/tests/interpolate/test_geometry.py +++ b/tests/interpolate/test_geometry.py @@ -27,7 +27,7 @@ def test_get_points_within_r(): radius = 5 - matches = get_points_within_r(center, list(zip(x, y)), radius).T + matches = get_points_within_r(center, list(zip(x, y, strict=False)), radius).T truth = [[1, 1], [2, 2], [3, 3], [4, 4], [5, 5]] @@ -44,7 +44,7 @@ def test_get_point_count_within_r(): radius = 5 - count = get_point_count_within_r([center1, center2], list(zip(x, y)), radius) + count = get_point_count_within_r([center1, center2], list(zip(x, y, strict=False)), radius) truth = np.array([5, 2]) diff --git a/tests/interpolate/test_points.py b/tests/interpolate/test_points.py index beb0ac49069..608938f4d60 100644 --- a/tests/interpolate/test_points.py +++ b/tests/interpolate/test_points.py @@ -44,13 +44,13 @@ def test_nn_point(test_data): r"""Test find natural neighbors for a point interpolation function.""" xp, yp, z = test_data - tri = Delaunay(list(zip(xp, yp))) + tri = Delaunay(list(zip(xp, yp, strict=False))) sim_gridx = [30] sim_gridy = [30] members, tri_info = find_natural_neighbors(tri, - list(zip(sim_gridx, sim_gridy))) + list(zip(sim_gridx, sim_gridy, strict=False))) val = natural_neighbor_point(xp, yp, z, (sim_gridx[0], sim_gridy[0]), tri, members[0], tri_info) @@ -66,7 +66,7 @@ def test_cressman_point(test_data): r = 40 - obs_tree = cKDTree(list(zip(xp, yp))) + obs_tree = cKDTree(list(zip(xp, yp, strict=False))) indices = obs_tree.query_ball_point([30, 30], r=r) @@ -86,7 +86,7 @@ def test_barnes_point(test_data): r = 40 - obs_tree = cKDTree(list(zip(xp, yp))) + obs_tree = cKDTree(list(zip(xp, yp, strict=False))) indices = obs_tree.query_ball_point([60, 60], r=r) diff --git a/tests/plots/test_cartopy_utils.py b/tests/plots/test_cartopy_utils.py index 1cc864a3683..074cfdc55ec 100644 --- a/tests/plots/test_cartopy_utils.py +++ b/tests/plots/test_cartopy_utils.py @@ -31,7 +31,7 @@ def test_us_county_scales(ccrs): ax2 = fig.add_subplot(1, 3, 2, projection=proj) ax3 = fig.add_subplot(1, 3, 3, projection=proj) - for scale, axis in zip(['20m', '5m', '500k'], [ax1, ax2, ax3]): + for scale, axis in zip(['20m', '5m', '500k'], [ax1, ax2, ax3], strict=False): axis.set_extent([270.25, 270.9, 38.15, 38.75], ccrs.Geodetic()) axis.add_feature(mpplots.USCOUNTIES.with_scale(scale)) return fig @@ -60,7 +60,7 @@ def test_us_states_scales(ccrs): ax2 = fig.add_subplot(1, 3, 2, projection=proj) ax3 = fig.add_subplot(1, 3, 3, projection=proj) - for scale, axis in zip(['20m', '5m', '500k'], [ax1, ax2, ax3]): + for scale, axis in zip(['20m', '5m', '500k'], [ax1, ax2, ax3], strict=False): axis.set_extent([270, 280, 28, 39], ccrs.Geodetic()) axis.add_feature(mpplots.USSTATES.with_scale(scale)) return fig diff --git a/tests/plots/test_patheffects.py b/tests/plots/test_patheffects.py index d1af81a8b35..ef6d3e88830 100644 --- a/tests/plots/test_patheffects.py +++ b/tests/plots/test_patheffects.py @@ -123,7 +123,7 @@ def test_scalloped_stroke_closed(): -0.212, -0.482, -0.722, -0.462, -0.172] y = [1.264, 0.784, -0.076, -0.846, -1.126, -1.246, -1.006, 0.234, 0.754, 1.264] - verts = np.array([[x, y] for x, y in zip(x, y)]) + verts = np.array([[x, y] for x, y in zip(x, y, strict=False)]) codes = np.repeat(mpath.Path.LINETO, len(x)) codes[0] = mpath.Path.MOVETO codes[-1] = mpath.Path.CLOSEPOLY @@ -150,7 +150,7 @@ def test_scalloped_stroke_segment(): # test data x = np.arange(9) y = np.concatenate([np.arange(5), np.arange(3, -1, -1)]) - verts = np.array([[x, y] for x, y in zip(x, y)]) + verts = np.array([[x, y] for x, y in zip(x, y, strict=False)]) codes = np.repeat(mpath.Path.LINETO, len(x)) codes[0] = mpath.Path.MOVETO