Python arcpy.Exists() Examples
The following are 30
code examples of arcpy.Exists().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
arcpy
, or try the search function
.
Example #1
Source File: mapmatcher.py From mapmatching with MIT License | 7 votes |
def getNetworkGraph(segments,segmentlengths): """ Builds a networkx graph from the network file, inluding segment length taken from arcpy. It selects the largest connected component of the network (to prevent errors from routing between unconnected parts) """ #generate the full network path for GDAL to be able to read the file path =str(os.path.join(arcpy.env.workspace,segments)) print path if arcpy.Exists(path): g = nx.read_shp(path) #This selects the largest connected component of the graph sg = list(nx.connected_component_subgraphs(g.to_undirected()))[0] print "graph size (excluding unconnected parts): "+str(len(g)) # Get the length for each road segment and append it as an attribute to the edges in the graph. for n0, n1 in sg.edges(): oid = sg[n0][n1]["OBJECTID"] sg[n0][n1]['length'] = segmentlengths[oid] return sg else: print "network file not found on path: "+path
Example #2
Source File: arcapi_test.py From arcapi with GNU Lesser General Public License v3.0 | 6 votes |
def testdlt(self): est = [] wc = '"OBJECTID" < 11' lr = arcpy.management.MakeFeatureLayer(self.t_fc, "lr", wc).getOutput(0) # TODO: test for deleting layers won't pass even though ap.dlt works #print lr #print arcpy.Exists(lr) tempfc = 'in_memory\\tmp' if arcpy.Exists(tempfc): arcpy.Delete_management(tempfc) tmpfc = arcpy.CopyFeatures_management(lr, tempfc).getOutput(0) tempshp = arcpy.CreateScratchName('tmp.dbf', workspace='c:\\temp').replace('.dbf', '.shp') fc = arcpy.CopyFeatures_management(tmpfc, tempshp).getOutput(0) ap.dlt(lr) est.append(ap.dlt(tmpfc)) est.append(ap.dlt(fc)) est.append(ap.dlt('this does not exist')) self.assertEquals(est, [True, True, False]) pass
Example #3
Source File: dataprep.py From utilities-solution-data-automation with Apache License 2.0 | 6 votes |
def _CheckCreateGDBProcess(self): try: # If user param is to overwrite GDB, then delete it first if self.overWrite.upper() == "YES": if arcpy.Exists(self.end_db)==True: arcpy.Delete_management(self.end_db) self.overWrite = None print "Deleted previous GDB {0}".format(self.end_db) # if the local gdb doesn't exist, then create it using the path and name given in the end_db string if arcpy.Exists(self.end_db)==False: if self.end_db.rfind("\\") != -1: lastSlash = self.end_db.rfind("\\") else: lastSlash = self.end_db.rfind("/") arcpy.CreateFileGDB_management(self.end_db[:lastSlash], self.end_db[lastSlash+1:]) self.overWrite = None print "Created geodatabase {0}".format(self.end_db[lastSlash+1:]) else: self.overWrite = None #print "Geodatabase already exists" return True except: print "Unexpected error create geodatabase:", sys.exc_info()[0] return False
Example #4
Source File: arcapi_test.py From arcapi with GNU Lesser General Public License v3.0 | 6 votes |
def testadd_fields_from_table(self): fc = os.path.join(self.testing_gdb, 'Illinois') copy = fc + '_copy' if arcpy.Exists(copy): arcpy.Delete_management(copy) arcpy.CopyFeatures_management(fc, copy) flds = ['POP1990', 'POP2000'] tab = fc = os.path.join(self.testing_gdb, 'Illinois_county_info') ap.add_fields_from_table(copy, tab, flds) est = [f.name for f in arcpy.ListFields(copy)] try: arcpy.Delete_management(copy) except: pass for f in flds: self.assertTrue(f in est) pass
Example #5
Source File: arcapi_test.py From arcapi with GNU Lesser General Public License v3.0 | 6 votes |
def testjoin_using_dict(self): if arcpy.Exists(r'in_memory\copy'): arcpy.Delete_management(r'in_memory\copy') fc = os.path.join(self.testing_gdb, 'Illinois') copy = fc + '_copy' if arcpy.Exists(copy): arcpy.Delete_management(copy) arcpy.CopyFeatures_management(fc, copy) flds = ['POP1990', 'POP2000'] tab = fc = os.path.join(self.testing_gdb, 'Illinois_county_info') ap.join_using_dict(copy, 'CNTY_FIPS', tab, 'CNTY_FIPS', flds) est = [f.name for f in arcpy.ListFields(copy)] try: arcpy.Delete_management(copy) except: pass for f in flds: self.assertTrue(f in est) pass
Example #6
Source File: arcapi_test.py From arcapi with GNU Lesser General Public License v3.0 | 6 votes |
def testconcatenate_fields(self): if arcpy.Exists(r'in_memory\copy'): arcpy.Delete_management(r'in_memory\copy') fc = os.path.join(self.testing_gdb, 'Illinois') copy = fc + '_copy' if arcpy.Exists(copy): arcpy.Delete_management(copy) arcpy.CopyFeatures_management(fc, copy) ap.concatenate_fields(copy, 'FULL', 75, ['NAME', 'STATE_NAME'], ' County, ') obs = 'Jo Daviess County, Illinois' with arcpy.da.SearchCursor(copy, 'FULL') as rows: est = rows.next()[0] del rows try: arcpy.Delete_management(copy) except: pass self.assertEqual(est, obs) pass
Example #7
Source File: describe_reporter.py From sample-gp-tools with Apache License 2.0 | 6 votes |
def generate_report(verbose_mode, property_list, user_files): """ Generates the report containing each file and its associated Describe-object attributes. Report is a dictionary and can be useful for other scripts. """ report_results = {} report_path = open(os.path.join(os.getcwd(), u'Describe Report.txt'), 'wt') for f in user_files: if arcpy.Exists(f): desc_dict = od() for d_class in sorted(property_list): desc_dict[d_class] = {} for p in properties[d_class]: try: desc_dict[d_class][p] = eval("arcpy.Describe(f).{0}".format(p)) except AttributeError: if verbose_mode: desc_dict[d_class][p] = 'ATTRIBUTE ERROR: Method not found' else: pass report_results[f] = desc_dict else: report_results[f] = 'FILE NOT FOUND' pprint(report_results, report_path, width=400)
Example #8
Source File: mapmatcher.py From mapmatching with MIT License | 6 votes |
def getSegmentInfo(segments): """ Builds a dictionary for looking up endpoints of network segments (needed only because networkx graph identifies edges by nodes) """ if arcpy.Exists(segments): cursor = arcpy.da.SearchCursor(segments, ["OBJECTID", "SHAPE@"]) endpoints = {} segmentlengths = {} for row in cursor: endpoints[row[0]]=((row[1].firstPoint.X,row[1].firstPoint.Y), (row[1].lastPoint.X, row[1].lastPoint.Y)) segmentlengths[row[0]]= row[1].length del row del cursor print "Number of segments: "+ str(len(endpoints)) #prepare segment layer for fast search arcpy.Delete_management('segments_lyr') arcpy.MakeFeatureLayer_management(segments, 'segments_lyr') return (endpoints,segmentlengths) else: print "segment file does not exist!"
Example #9
Source File: mapmatcher.py From mapmatching with MIT License | 6 votes |
def exportPath(opt, trackname): """ This exports the list of segments into a shapefile, a subset of the loaded segment file, including all attributes """ start_time = time.time() opt=getUniqueList(opt) qr = '"OBJECTID" IN ' +str(tuple(opt)) outname = (os.path.splitext(os.path.basename(trackname))[0][:9])+'_pth' arcpy.SelectLayerByAttribute_management('segments_lyr',"NEW_SELECTION", qr) try: if arcpy.Exists(outname): arcpy.Delete_management(outname) arcpy.FeatureClassToFeatureClass_conversion('segments_lyr', arcpy.env.workspace, outname) print("--- export: %s seconds ---" % (time.time() - start_time)) except Exception: e = sys.exc_info()[1] print(e.args[0]) # If using this code within a script tool, AddError can be used to return messages # back to a script tool. If not, AddError will have no effect. arcpy.AddError(e.args[0]) arcpy.AddError(arcpy.env.workspace) arcpy.AddError(outname) #raise arcpy.ExecuteError except arcpy.ExecuteError: arcpy.AddError(arcpy.GetMessages(2)) # Return any other type of error except: # By default any other errors will be caught here # e = sys.exc_info()[1] print(e.args[0]) arcpy.AddError(e.args[0]) arcpy.AddError(arcpy.env.workspace) arcpy.AddError(outname)
Example #10
Source File: mapmatcher.py From mapmatching with MIT License | 6 votes |
def getSegmentInfo(segments): """ Builds a dictionary for looking up endpoints of network segments (needed only because networkx graph identifies edges by nodes) """ if arcpy.Exists(segments): cursor = arcpy.da.SearchCursor(segments, ["OBJECTID", "SHAPE@"]) endpoints = {} segmentlengths = {} for row in cursor: endpoints[row[0]]=((row[1].firstPoint.X,row[1].firstPoint.Y), (row[1].lastPoint.X, row[1].lastPoint.Y)) segmentlengths[row[0]]= row[1].length del row del cursor print "Number of segments: "+ str(len(endpoints)) #prepare segment layer for fast search arcpy.Delete_management('segments_lyr') arcpy.MakeFeatureLayer_management(segments, 'segments_lyr') return (endpoints,segmentlengths) else: print "segment file does not exist!"
Example #11
Source File: mapmatcher.py From mapmatching with MIT License | 6 votes |
def getNetworkGraph(segments,segmentlengths): """ Builds a networkx graph from the network file, inluding segment length taken from arcpy. It selects the largest connected component of the network (to prevent errors from routing between unconnected parts) """ #generate the full network path for GDAL to be able to read the file path =str(os.path.join(arcpy.env.workspace,segments)) print path if arcpy.Exists(path): g = nx.read_shp(path) #This selects the largest connected component of the graph sg = list(nx.connected_component_subgraphs(g.to_undirected()))[0] print "graph size (excluding unconnected parts): "+str(len(g)) # Get the length for each road segment and append it as an attribute to the edges in the graph. for n0, n1 in sg.edges(): oid = sg[n0][n1]["OBJECTID"] sg[n0][n1]['length'] = segmentlengths[oid] return sg else: print "network file not found on path: "+path
Example #12
Source File: SSURGO_CheckgSSURGO.py From geo-pit with GNU General Public License v2.0 | 6 votes |
def CheckFeatureClasses(theWS): # Simply make sure that each featureclass is present. # try: PrintMsg(" \n\tChecking for existence of featureclasses", 0) env.workspace = theWS missingFC = list() lFC = ['MUPOLYGON', 'FEATLINE', 'FEATPOINT', 'MULINE', 'SAPOLYGON', 'MUPOINT'] for fc in lFC: if not arcpy.Exists(fc): missingFC.append(fc) if len(missingFC) > 0: PrintMsg("\t" + os.path.basename(theWS) + " is missing the following gSSURGO featureclasses: " + ", ".join(missingFC), 2) return False return True except: errorMsg() return False ## ===================================================================================
Example #13
Source File: mapmatcher.py From mapmatching with MIT License | 6 votes |
def exportPath(opt, trackname): """ This exports the list of segments into a shapefile, a subset of the loaded segment file, including all attributes """ start_time = time.time() opt=getUniqueList(opt) qr = '"OBJECTID" IN ' +str(tuple(opt)) outname = (os.path.splitext(os.path.basename(trackname))[0][:9])+'_pth' arcpy.SelectLayerByAttribute_management('segments_lyr',"NEW_SELECTION", qr) try: if arcpy.Exists(outname): arcpy.Delete_management(outname) arcpy.FeatureClassToFeatureClass_conversion('segments_lyr', arcpy.env.workspace, outname) print("--- export: %s seconds ---" % (time.time() - start_time)) except Exception: e = sys.exc_info()[1] print(e.args[0]) # If using this code within a script tool, AddError can be used to return messages # back to a script tool. If not, AddError will have no effect. arcpy.AddError(e.args[0]) arcpy.AddError(arcpy.env.workspace) arcpy.AddError(outname) #raise arcpy.ExecuteError except arcpy.ExecuteError: arcpy.AddError(arcpy.GetMessages(2)) # Return any other type of error except: # By default any other errors will be caught here # e = sys.exc_info()[1] print(e.args[0]) arcpy.AddError(e.args[0]) arcpy.AddError(arcpy.env.workspace) arcpy.AddError(outname)
Example #14
Source File: ToolValidator.py From public-transit-tools with Apache License 2.0 | 6 votes |
def populate_restrictions_and_impedances(param_ND, param_restrictions, param_impedances): '''Populate the restrictions and impdance attribute parameters with filter lists based on the chosen network dataset''' if param_ND.altered: inNADataset = param_ND.value if arcpy.Exists(inNADataset): desc = arcpy.Describe(inNADataset) atts = desc.attributes restrictions = [] impedances = [] # Cycle through the attributes, find the restrictions and impedances, # and add the names to the arrays. for att in atts: if att.usageType == "Restriction": restrictions.append(att.name) elif att.usageType == "Cost": impedances.append(att.name + " (Units: " + att.units + ")") # Put the value list of restrictions into the GUI field. param_restrictions.filter.list = sorted(restrictions) param_impedances.filter.list = sorted(impedances)
Example #15
Source File: SSURGO_CheckgSSURGO2.py From geo-pit with GNU General Public License v2.0 | 6 votes |
def CheckFeatureClasses(theWS): # Simply make sure that each featureclass is present. # try: PrintMsg(" \n\tChecking for existence of featureclasses", 0) env.workspace = theWS missingFC = list() lFC = ['MUPOLYGON', 'FEATLINE', 'FEATPOINT', 'MULINE', 'SAPOLYGON', 'MUPOINT'] for fc in lFC: if not arcpy.Exists(fc): missingFC.append(fc) if len(missingFC) > 0: PrintMsg("\t" + os.path.basename(theWS) + " is missing the following gSSURGO featureclasses: " + ", ".join(missingFC), 2) return False return True except: errorMsg() return False ## ===================================================================================
Example #16
Source File: SSURGO_Slope_Range_Inventory.py From geo-pit with GNU General Public License v2.0 | 5 votes |
def FindField(layer, chkField): # Check table or featureclass to see if specified field exists # If fully qualified name is found, return that name; otherwise return "" # Set workspace before calling FindField try: if arcpy.Exists(layer): theDesc = arcpy.Describe(layer) theFields = theDesc.fields theField = theFields[0] for theField in theFields: # Parses a fully qualified field name into its components (database, owner name, table name, and field name) parseList = arcpy.ParseFieldName(theField.name) # (null), (null), (null), MUKEY # choose the last component which would be the field name theFieldname = parseList.split(",")[len(parseList.split(','))-1].strip() # MUKEY if theFieldname.upper() == chkField.upper(): return theField.name return "" else: AddMsgAndPrint("\tInput layer not found", 0) return "" except: errorMsg() return "" ## ===============================================================================================================
Example #17
Source File: Mapunit_Geodata_Breakdown_Description.py From geo-pit with GNU General Public License v2.0 | 5 votes |
def FindField(layer,chkField): # Check table or featureclass to see if specified field exists # If fully qualified name is found, return that name; otherwise return "" # Set workspace before calling FindField try: if arcpy.Exists(layer): theDesc = arcpy.Describe(layer) theFields = theDesc.fields theField = theFields[0] for theField in theFields: # Parses a fully qualified field name into its components (database, owner name, table name, and field name) parseList = arcpy.ParseFieldName(theField.name) # (null), (null), (null), MUKEY # choose the last component which would be the field name theFieldname = parseList.split(",")[len(parseList.split(','))-1].strip() # MUKEY if theFieldname.upper() == chkField.upper(): return theField.name return False else: AddMsgAndPrint("\tInput layer not found", 0) return False except: errorMsg() return False # ===============================================================================================================
Example #18
Source File: Tabulate_Components_By_Mukey.py From geo-pit with GNU General Public License v2.0 | 5 votes |
def FindField(layer, chkField): # Check table or featureclass to see if specified field exists # If fully qualified name is found, return that name; otherwise return "" # Set workspace before calling FindField try: if arcpy.Exists(layer): theDesc = arcpy.Describe(layer) theFields = theDesc.fields theField = theFields[0] for theField in theFields: # Parses a fully qualified field name into its components (database, owner name, table name, and field name) parseList = arcpy.ParseFieldName(theField.name) # (null), (null), (null), MUKEY # choose the last component which would be the field name theFieldname = parseList.split(",")[len(parseList.split(','))-1].strip() # MUKEY if theFieldname.upper() == chkField.upper(): return theField.name return "" else: AddMsgAndPrint("\tInput layer not found", 0) return "" except: errorMsg() return "" ## ===================================================================================
Example #19
Source File: Select_Mapunits_by_Project.py From geo-pit with GNU General Public License v2.0 | 5 votes |
def FindField(ssurgoInput, chkField): # Check table or featureclass to see if specified field exists # If fully qualified name is found, return that name; otherwise return "" # Set workspace before calling FindField try: if arcpy.Exists(ssurgoInput): theDesc = arcpy.Describe(ssurgoInput) theFields = theDesc.fields theField = theFields[0] for theField in theFields: # Parses a fully qualified field name into its components (database, owner name, table name, and field name) parseList = arcpy.ParseFieldName(theField.name) # (null), (null), (null), MUKEY # choose the last component which would be the field name theFieldname = parseList.split(",")[len(parseList.split(','))-1].strip() # MUKEY if theFieldname.upper() == chkField.upper(): return theField.name return "" else: AddMsgAndPrint("\tInput layer not found", 0) return "" except: errorMsg() return "" ## ===================================================================================
Example #20
Source File: SSURGO_MergeDatabasesByMap.py From geo-pit with GNU General Public License v2.0 | 5 votes |
def GetTemplateDate(newDB, areaSym): # Get SAVEREST date from previously existing Template database # Use it to compare with the date from the WSS dataset # If the existing database is same or newer, it will be kept and the WSS version skipped try: if not arcpy.Exists(newDB): return 0 saCatalog = os.path.join(newDB, "SACATALOG") dbDate = 0 if arcpy.Exists(saCatalog): with arcpy.da.SearchCursor(saCatalog, ("SAVEREST"), "[AREASYMBOL] = '" + areaSym + "'") as srcCursor: for rec in srcCursor: dbDate = str(rec[0]).split(" ")[0] del saCatalog del newDB return dbDate else: # unable to open SACATALOG table in existing dataset # return 0 which will result in the existing dataset being overwritten by a new WSS download return 0 except: errorMsg() return 0 ## ===============================================================================================================
Example #21
Source File: Area_Geodata_Breakdown_Description.py From geo-pit with GNU General Public License v2.0 | 5 votes |
def FindField(layer, chkField): # Check table or featureclass to see if specified field exists # If fully qualified name is found, return that name; otherwise return "" # Set workspace before calling FindField try: if arcpy.Exists(layer): theDesc = arcpy.Describe(layer) theFields = theDesc.fields theField = theFields[0] for theField in theFields: # Parses a fully qualified field name into its components (database, owner name, table name, and field name) parseList = arcpy.ParseFieldName(theField.name) # (null), (null), (null), MUKEY # choose the last component which would be the field name theFieldname = parseList.split(",")[len(parseList.split(','))-1].strip() # MUKEY if theFieldname.upper() == chkField.upper(): return theField.name return "" else: AddMsgAndPrint("\tInput layer not found", 0) return "" except: errorMsg() return "" # ===============================================================================================================
Example #22
Source File: SSURGO_Convert_to_Geodatabase.py From geo-pit with GNU General Public License v2.0 | 5 votes |
def GetTemplateDate(newDB, areaSym): # Get SAVEREST date from previously existing Template database # Use it to compare with the date from the WSS dataset # If the existing database is same or newer, it will be kept and the WSS version skipped try: if not arcpy.Exists(newDB): return 0 saCatalog = os.path.join(newDB, "SACATALOG") dbDate = 0 if arcpy.Exists(saCatalog): with arcpy.da.SearchCursor(saCatalog, ("SAVEREST"), "AREASYMBOL = '" + areaSym + "'") as srcCursor: for rec in srcCursor: dbDate = str(rec[0]).split(" ")[0] del saCatalog del newDB return dbDate else: # unable to open SACATALOG table in existing dataset # return 0 which will result in the existing dataset being overwritten by a new WSS download return 0 except: errorMsg() return 0 ## ===============================================================================================================
Example #23
Source File: SSURGO_Convert_to_Geodatabase.py From geo-pit with GNU General Public License v2.0 | 5 votes |
def GetTableList(outputWS): # Query mdstattabs table to get list of input text files (tabular) and output tables # This function assumes that the MDSTATTABS table is already present and populated # in the output geodatabase per XML Workspace Document. # # Skip all 'MDSTAT' tables. They are static. # try: tblList = list() mdTbl = os.path.join(outputWS, "mdstattabs") if not arcpy.Exists(outputWS): raise MyError, "Missing output geodatabase: " + outputWS if not arcpy.Exists(mdTbl): raise MyError, "Missing mdstattabs table in output geodatabase" else: # got the mdstattabs table, create list #mdFields = ('tabphyname','iefilename') mdFields = ('tabphyname') with arcpy.da.SearchCursor(mdTbl, mdFields) as srcCursor: for rec in srcCursor: tblName = rec[0] if not tblName.startswith('mdstat') and not tblName in ('mupolygon', 'muline', 'mupoint', 'featline', 'featpoint', 'sapolygon'): tblList.append(rec[0]) #PrintMsg(" \nTables to import: " + ", ".join(tblList), 0) return tblList except MyError, e: PrintMsg(str(e), 2) return []
Example #24
Source File: SSURGO_gSSURGO_byState.py From geo-pit with GNU General Public License v2.0 | 5 votes |
def GetFolders(inputFolder, valList, bRequired, theTile): # get a list of all matching folders under the input folder, assuming 'soil_' naming convention try: env.workspace = inputFolder surveyList = list() folderList = arcpy.ListWorkspaces("soil_*", "Folder") missingList = list() # check each subfolder to make sure it is a valid SSURGO dataset # validation: has 'soil_' prefix and contains a spatial folder and a soilsmu_a shapefile # and matches one of the AREASYMBOL values in the legend table #PrintMsg(" \nLooking for these SSURGO datasets: " + ", ".join(valList), 0) for shpAS in valList: # this should be one of the target SSURGO dataset folder # add it to the choice list subFolder = "soil_" + shpAS.lower() shpName = "soilmu_a_" + shpAS + ".shp" shpPath = os.path.join( os.path.join( inputFolder, os.path.join(subFolder, "spatial")), shpName) if arcpy.Exists(shpPath): surveyList.append(os.path.basename(subFolder)) else: # Missing soil polygon shapefile for a required SSURGO dataset missingList.append(shpAS) if len(missingList) > 0 and bRequired: raise MyError, "Failed to find one or more required SSURGO datasets for " + theTile + ": " + ", ".join(missingList) return surveyList except MyError, err: PrintMsg(str(err), 2) return []
Example #25
Source File: SSURGO_BatchDownload.py From geo-pit with GNU General Public License v2.0 | 5 votes |
def GetTabularDate(newFolder): # Get string for SAVEREST date from tabular/sacatlog.txt file # Use it to compare with the date from the WSS dataset # If the existing database is same or newer, it will be kept and the WSS version skipped # The original string looks like this: 12/05/2013 23:44:00 # # Return YYYYMMDD as integer try: tabDate = 0 # Try finding the text file in the tabular folder and reading SAVEREST from that file. saCatalog = os.path.join(newFolder, r"tabular\sacatlog.txt") if arcpy.Exists(saCatalog): fh = open(saCatalog, "r") rec = fh.readline() fh.close() # Example date (which is index 3 in pipe-delimited file): 9/23/2014 6:49:27 vals = rec.split("|") recDate = vals[3] wssDate = "%m/%d/%Y %H:%M:%S" # string date format used for SAVEREST in text file intDate = "%Y%m%d" # YYYYMMDD format for comparison dateObj = datetime.strptime(recDate, wssDate) tabDate = int(dateObj.strftime(intDate)) else: PrintMsg(" \nUnable to find file: " + saCatalog, 1) return tabDate except: errorMsg() return tabDate ## ===================================================================================
Example #26
Source File: SSURGO_BatchDownload.py From geo-pit with GNU General Public License v2.0 | 5 votes |
def GetTemplateDate(newDB): # Get SAVEREST date from previously existing Template database # Use it to compare with the date from the WSS dataset # If the existing database is same or newer, it will be kept and the WSS version skipped # # da cursor will return: datetime.datetime(2014, 12, 1, 15, 22, 8) # Should be able to reformat to an integer value for comparison with filename-imbedded date. # try: #if not arcpy.Exists(newDB): # Check for existance before calling this function # return 0 saCatalog = os.path.join(newDB, "SACATALOG") dbDate = 0 if arcpy.Exists(saCatalog): dateObj = None with arcpy.da.SearchCursor(saCatalog, ("SAVEREST"), "[AREASYMBOL] = '" + areaSym + "'") as srcCursor: for rec in srcCursor: # Reformat datetime to YYYYMMDD and convert to integer #dateObj = int(rec[0].strftime('%Y%m%d')) dateObj = rec[0] if dateObj is None: return 0 intDate = "%Y%m%d" # YYYYMMDD format for comparison dbDate = int(dateObj.strftime(intDate)) else: raise MyError, "SACATALOG table in Template database not found" del saCatalog del newDB return dbDate except MyError, e: # Example: raise MyError, "This is an error message" PrintMsg(str(e), 2) return 0
Example #27
Source File: SSURGO_CheckgSSURGO.py From geo-pit with GNU General Public License v2.0 | 5 votes |
def MapunitCount(theWS, uniqueValues): # Return number of mapunits (mukey) in this survey using the MUPOLYGON featureclass # try: env.workspace = theWS muTbl = os.path.join(theWS, "mapunit") muPoly = os.path.join(theWS, "MUPOLYGON") if arcpy.Exists(muPoly): # use cursor to generate list of values # PrintMsg("\tGetting mapunit count...", 0) valList = [row[0] for row in arcpy.da.SearchCursor(muPoly, ['MUKEY'])] # convert long list to a sorted list of unique values valSet = set(valList) valList = list(sorted(valSet)) return len(valList) else: # unable to find MUPOLYGON featureclass PrintMsg("\tMUPOLYGON featureclass not found in " + os.path.basename(theWS), 2) return 0 except: errorMsg() return 0 ## ===================================================================================
Example #28
Source File: SSURGO_CheckgSSURGO.py From geo-pit with GNU General Public License v2.0 | 5 votes |
def CheckTables(theWS): # Simply make sure that each table is present and that the SACATALOG table has at least one record # The rest of the tables will be checked for record count and existence try: PrintMsg(" \n\t\tChecking for existence of metadata and SDV attribute tables") env.workspace = theWS missingTbl = list() lTbl = ['mdstatdomdet', 'mdstatdommas', 'mdstatidxdet', 'mdstatidxmas', 'mdstatrshipdet', 'mdstatrshipmas', 'mdstattabcols', 'mdstattabs', 'sdvalgorithm', 'sdvattribute', 'sdvfolder', 'sdvfolderattribute'] for tbl in lTbl: if not arcpy.Exists(tbl): missingTbl.append(tbl) if len(missingTbl) > 0: PrintMsg("\t" + os.path.basename(theWS) + " is missing the following gSSURGO attribute tables: " + ", ".join(missingTbl), 2) return False return True except: errorMsg() return False ## ===================================================================================
Example #29
Source File: SSURGO_CheckgSSURGO2.py From geo-pit with GNU General Public License v2.0 | 5 votes |
def MapunitCount(theWS, uniqueValues): # Return number of mapunits (mukey) in this survey using the MUPOLYGON featureclass # try: env.workspace = theWS muTbl = os.path.join(theWS, "mapunit") muPoly = os.path.join(theWS, "MUPOLYGON") if arcpy.Exists(muPoly): # use cursor to generate list of values # PrintMsg("\tGetting mapunit count...", 0) valList = [row[0] for row in arcpy.da.SearchCursor(muPoly, ['MUKEY'])] # convert long list to a sorted list of unique values valSet = set(valList) valList = list(sorted(valSet)) return len(valList) else: # unable to find MUPOLYGON featureclass PrintMsg("\tMUPOLYGON featureclass not found in " + os.path.basename(theWS), 2) return 0 except: errorMsg() return 0 ## ===================================================================================
Example #30
Source File: SSURGO_CheckgSSURGO2.py From geo-pit with GNU General Public License v2.0 | 5 votes |
def CheckCatalog(theWS): # Simply make sure that at least one survey is populated in the SACATALOG table try: env.workspace = theWS saTbl = os.path.join(theWS, "sacatalog") if arcpy.Exists(saTbl): # parse Areasymbol from database name. If the geospatial naming convention isn't followed, # then this will not work. surveyList = list() with arcpy.da.SearchCursor(saTbl, ("AREASYMBOL")) as srcCursor: for rec in srcCursor: # Get Areasymbol from SACATALOG table, assuming just one survey is present in the database surveyList.append(rec[0]) if len(surveyList) == 0: PrintMsg("\t" + os.path.basename(theWS) + "\\SACATALOG table contains no surveys", 2) return False else: PrintMsg(os.path.basename(theWS) + " contains " + str(len(surveyList)) + " soil surveys", 0) else: # unable to open SACATALOG table in existing dataset PrintMsg("\tSACATALOG table not found in " + os.path.basename(theWS), 2) return False except: errorMsg() return False ## ===================================================================================