我正在关注如何从Here将PostGIS中的空间数据转换为R的完美示例。
该示例非常有效,但是当我尝试检索更大的数据集(> 1000rows)时,事情变慢了。这可能是每次必须进行的新内存分配(只是猜测)。
有没有办法在循环之前设置spTemp
的大小?完整代码供参考:
library(RPostgreSQL)
library(rgeos)
library(sp)
# Load data from the PostGIS server
conn = dbConnect(
dbDriver("PostgreSQL"), dbname=dbname, host=host, port=5432,
user=user, password=password
)
strSQL = "
SELECT gid, ST_AsText(geom) AS wkt_geometry, attr1, attr2[, ...]
FROM geo_layer"
dfTemp = dbGetQuery(conn, strSQL)
row.names(dfTemp) = dfTemp$gid
# Create spatial polygons
# To set the PROJ4 string, enter the EPSG SRID and uncomment the
# following two lines:
# EPSG = make_EPSG()
# p4s = EPSG[which(EPSG$code == SRID), "prj4"]
for (i in seq(nrow(dfTemp))) {
if (i == 1) {
spTemp = readWKT(dfTemp$wkt_geometry[i], dfTemp$gid[i])
# If the PROJ4 string has been set, use the following instead
# spTemp = readWKT(dfTemp$wkt_geometry[i], dfTemp$gid[i], p4s)
}
else {
spTemp = rbind(
spTemp, readWKT(dfTemp$wkt_geometry[i], dfTemp$gid[i])
# If the PROJ4 string has been set, use the following instead
# spTemp, readWKT(dfTemp$wkt_geometry[i], dfTemp$gid[i], p4s)
)
}
}
# Create SpatialPolygonsDataFrame, drop WKT field from attributes
spdfFinal = SpatialPolygonsDataFrame(spTemp, dfTemp[-2])