0
Follow
2
View

How to make this loop parallel and faster?

dinsam6 注册会员
2023-01-25 11:00

Here is a rough outline of something you can try.

def main():
    # Create a list of tuples consisting of the file path, and the class
    # dictionary info for each of the cl arguments
    args = []
    for cl in file_images:
        for img_file in data[cl]:
            path = img_path + cl + "/" + img_file
            args.append((path, class_dictionary[cl]))

    with multiprocessing.Pool(processes=30) as pool:   # or however many processes
        image_counter = 0
        # Use multiprocessing to call image(pathname, info)
        # and return the results in order
        for images, info in pool.starmap(handle_one_image, args):
            # Images is a list of returned images.  info is the class_dictionary info that we passed
            for image in images:
                image_counter += 1
                image_data.append(image)
                image_label.append(info)

def handle_one_image(path, info):
    image_data = []
    im = image.img(path)
    im = image.img_to_array(im)
    if (im.shape[0] >= SIZE and im.shape[1] >= SIZE):
        img = sliding_window(im, STRIDE, SIZE)
        for i in range(len(img)):
            if(img[i].shape[2] >=3):
                temp_img = img[i]
                temp_img = preprocess_input(temp_img)
                image_data.append(temp_img)
        return image_data, info
    else:
        # indicate that no images are available
        return [], info

About the Author

Question Info

Publish Time
2023-01-25 11:00
Update Time
2023-01-25 11:00