如何使用多个地理过程实现多处理?

时间:2013-06-10 17:56:53

标签: python multiprocessing raster geo arcpy

我对Python比较陌生,并认为我会尝试多处理。我有一个在IDLE或ArcMap工具箱脚本中运行良好的脚本。在仔细阅读这些论坛和docs.python之后,我尝试将我的工作脚本合并到一个多处理程序中。但是,在这个论坛上的类似工作示例是,没有人按照我的意愿处理数据处理。我希望这是可行的。

基本上,脚本会移动一个高程栅格列表(ERDAS IMG格式),提取低于阈值的单元格,最后将它们合并在一起。我当前正在命令提示符下运行脚本,因为其他所有内容都会打开新窗口,或者尝试崩溃。该脚本给出了它工作正常的错觉,除了它似乎在等待工人完全完成之前进入最终合并。

我看过几个例子,很少有人在工作函数中有多个进程。这些都不是arcpy geoprocesses。所以我想我的问题基本上是1)我应该使用pool.apply_async以外的东西,比如pool.map还是pool.apply? 2)我是否正确地将最终多边形的路径返回到resultList?

任何批评都是受欢迎的,非常感谢。提前谢谢。

# Import modules
import arcpy, os, math
from arcpy import env
from arcpy.sa import *
import multiprocessing
import time

# Check out licenses
arcpy.CheckOutExtension("spatial")

# Define functions
def worker_bee(inputRaster, scratch, addNum):   
    (path, lName) = os.path.split(inputRaster)
    (sName, ext) = os.path.splitext(lName)
    nameParts = sName.split("_")
    nameNumber = nameParts[-1]

    # Create scratch subfolder if not exists
    subFolder = scratch + "\\" + nameNumber + "_output" 
    if not os.path.exists(subFolder):os.makedirs(subFolder)
    # Set workspace to subfolder
    arcpy.env.workspace = subFolder
    arcpy.env.overwriteOutput=True
    arcpy.env.extent = "MAXOF"

    # Local Variables
    Expression = "Shape_Area >= 100"
    poly1 = subFolder + "\\poly1.shp"
    poly2 = subFolder + "\\poly2.shp"
    poly3 = subFolder + "\\poly3.shp"
    poly4 = subFolder + "\\poly4.shp"
    poly5 = subFolder + "\\poly5.shp"
    poly6 = subFolder + "\\poly6.shp"
    poly7 = subFolder + "\\poly7.shp"
    outName = scratch + "\\ABL_" + nameNumber + ".shp"

    #### Perform calculations ###
    # Map Algebra (replace -9999 with 9999)
    inRasterCon = Con(inputRaster, 9999, inputRaster, "Value = -9999")
    # Filter DEM to smooth out low outliers
    filterOut = Filter(inRasterCon, "LOW", "DATA")
    # Determine raster MINIMUM value and calculate threshold
    filterMinResult = arcpy.GetRasterProperties_management(filterOut, "MINIMUM")
    filterMin = filterMinResult.getOutput(0)
    threshold = (float(filterMin) + float(addNum))
    # Map Algebra (values under threshold)
    outCon = Con(filterOut <= threshold, 1, "")
    arcpy.RasterToPolygon_conversion(outCon, poly1, "SIMPLIFY", "Value")
    # Dissolve parts
    arcpy.Dissolve_management(poly1, poly2, "", "", "SINGLE_PART", "DISSOLVE_LINES")    
    # Select parts larger than 100 sq m
    arcpy.Select_analysis(poly2, poly3, Expression)
    # Process: Eliminate Polygon Part
    arcpy.EliminatePolygonPart_management(poly4, poly5, "PERCENT", "0 SquareMeters", "10", "CONTAINED_ONLY")
    # Select parts larget than 100 sq m
    arcpy.Select_analysis(poly5, poly6, Expression)
    # Simplify Polygon
    arcpy.SimplifyPolygon_cartography(poly6, poly7, "BEND_SIMPLIFY", "3 Meters", "3000 SquareMeters", "RESOLVE_ERRORS", "KEEP_COLLAPSED_POINTS")
    # Smooth Polygon
    outShape = arcpy.SmoothPolygon_cartography(poly7, outName, "PAEK", "3 Meters", "FIXED_ENDPOINT", "FLAG_ERRORS").getOutput(0)
    ### Calculations complete ###

    # Delete scratch subfolder
    arcpy.Delete_management(subFolder)

    print("Completed " + outShape + "...")
    return outShape

resultList = []
def log_result(result):
    resultList.append(result)

if __name__ == "__main__":
    arcpy.env.overwriteOutput=True
    # Read in parameters
    inFolder = raw_input("Input Folder: ")#arcpy.GetParameterAsText(0)
    addElev = raw_input("Number of elevation units to add to minimum: ")

    # Create scratch folder workspace
    scratchFolder = inFolder + "\\scratch" 
    if not os.path.exists(scratchFolder):os.makedirs(scratchFolder)

    # Local variables
    dec_num = str(float(addElev) - int(float(addElev)))[1:]
    outNameNum = dec_num.replace(".", "")
    outMerge = inFolder + "\\ABL_" + outNameNum + ".shp"

    # Print core usage
    cores = multiprocessing.cpu_count()
    print("Using " + str(cores) + " cores...")

    #Start timing
    start = time.clock()

    # List input tiles
    arcpy.env.workspace = inFolder
    inTiles = arcpy.ListRasters("*", "IMG")
    tileList = []
    for tile in inTiles:
        tileList.append(inFolder + "\\" + tile)

    # Create a Pool of subprocesses
    pool = multiprocessing.Pool(cores)

    print("Adding jobs to multiprocessing pool...")
    for tile in tileList:
        # Add the job to the multiprocessing pool asynchronously
        pool.apply_async(worker_bee, (tile, scratchFolder, addElev), callback = log_result)
    pool.close()
    pool.join()

    # Merge the temporary outputs
    print("Merging temporary outputs into shapefile " + outMerge + "...")
    arcpy.Merge_management(resultList, outMerge)

    # Clean up temporary data
    print("Deleting temporary data ...")
    for result in results:
        try:
            arcpy.Delete_management(result)
        except:
            pass

    # Stop timing and report duration
    end = time.clock()
    duration = end - start
    hours, remainder = divmod(duration, 3600)
    minutes, seconds = divmod(remainder, 60)
    print("Completed in " + hours + "hrs " + minutes + "min " + seconds + "sec")

0 个答案:

没有答案