trouble with FeatureClassToGeodatabase_conversion in arcpy, using .da.Walk - python

With this code, I am trying to read all files a directory and all its subdirectories. I have another list of files names, if the search finds files in the directories that are on the other list, I want to copy those feature classes to another location. When the code gets to FeatureClasstoGeodatabase, I keep getting an error that the input features data type is not supported or does not exist. I wasn't sure if I needed to somehow get the path as well as the filename so I created a couple of lists to capture that separately, but I'm kind of stuck here:
import arcpy
import os
workspace = r'F:\SF_HMP - transferred to Ydrive'
output_loc = r'C:\temp\temp.gdb'
mssng_files = r'F:\SF_HMP - transferred to Ydrive\Maps\broken_links_missing_files.txt'
files_to_find = []
layers_list = []
layers_path = []
with open(mssng_files) as filelist:
for line in filelist:
files_to_find.append(line.strip())
for dirpath, dirnames, filenames in arcpy.da.Walk(workspace,datatype="FeatureClass"):
for filename in filenames:
layers_list.append(filename)
layers_path.append(os.path.join(dirpath,filename))
for lyr in layers_list:
if lyr in files_to_find:
arcpy.FeatureClassToGeodatabase_conversion(lyr,output_loc)

I realized I needed to specify the workspace for each file to be copied over. I also repeated the code to search for and copy over rasters and tables:
import arcpy,os, easygui,sys
mssng_files = r'L:\SF_HMP - transferred to Ydrive\Maps\broken_links_missing_files.txt'
wkspc = easygui.enterbox("Enter workspace path:",title='Search for Files')
output_loc = easygui.enterbox("Output location:",title='Copy Files')
with open(mssng_files) as filelist:
for line in filelist:
files_to_find.append(line.strip())
for dirpath, dirnames, filenames in arcpy.da.Walk(wkspc,datatype='FeatureClass'):
for filename in filenames:
if filename in files_to_find:
ws_l = os.path.join(dirpath,filename)
arcpy.env.workspace = ws_l
arcpy.FeatureClassToGeodatabase_conversion(ws_l,output_loc)
for dirpath, dirnames, filenames in arcpy.da.Walk(wkspc,datatype='RasterDataset'):
for filename in filenames:
if filename in files_to_find:
ws_r = os.path.join(dirpath,filename)
arcpy.env.workspace = ws_r
arcpy.RasterToGeodatabase_conversion(ws_r,output_loc)
for dirpath, dirnames, filenames in arcpy.da.Walk(wkspc,datatype='Table'):
for filename in filenames:
if filename in files_to_find:
ws_t = os.path.join(dirpath,filename)
arcpy.env.workspace = ws_t
arcpy.TableToGeodatabase_conversion(ws_t,output_loc)

Related

Python - copy specific file from subfolder to destination, get filename from text file

I want to get my script to read a list of names from a list(txt), then search for those in a selected folder with subfolders, then copy and paste those files to another selected folder. My script running without error but no result.
My script:
import os
import os.path
import shutil
textFile = ("D:\\Test\\list.txt")
sourceFolder = ("D:\\Test")
destinationFolder = ("D:\\")
filesToFind = []
with open(textFile, "r") as tx:
for row in tx:
filesToFind.append(row.strip())
for root, dirs, filename in os.walk(sourceFolder):
if filename in filesToFind:
f = os.path.join(root, filename)
shutil.copy(f, destinationFolder)
Haven’t test it but I think this will work - change this:
for root, dirs, filename in os.walk(sourceFolder):
if filename in filesToFind:
f = os.path.join(root, filename)
shutil.copy(f, destinationFolder)
To this:
for root, dirs, filenames in os.walk(sourceFolder):
for filename in filenames:
if filename in filesToFind:
f = os.path.join(root, filename)
shutil.copy(f, destinationFolder)
# Same code using glob #
## More efficient and also tested one ##
## One more feature added- checks file name given present or not ##
import os
import os.path
import shutil
import glob
textFile = ("D:\\Test\\list.txt")
sourceFolder = ("D:\Test")
destinationFolder = ("D:\\")
f = open(textFile, "r").readlines()
for i in f:
ListFile= glob.glob(os.path.join(sourceFolder,"**",i.strip()),recursive=True)
if len(ListFile):
print(ListFile[0],destinationFolder,os.path.basename(ListFile[0]))
destinationfile=os.path.join(destinationFolder,os.path.basename(ListFile[0]))
shutil.copyfile(ListFile[0],destinationfile)
else:
print(i,"-File not found")

python How do I import multiple .txt files in a folder to add characters to each .txt file?

There are text files of various names in the folder 'a'. I want to read all of these text files and add the letter 'b' to each text file. What should I do?
cwd = os.getcwd()
input_dir = os.path.join(cwd, "my .txt files dir")
sorts = sorted(glob(input_dir), key = lambda x:(len(x) , x))
for f in sorts :
f = open(input_dir, 'a')
data = "add text"
f.write(data)
f.close()
Append data to file:
- first: get all file in folder a.
- second: find extension with .txt.
- third: open it and do something('append', or 'rewrite').
Demo:
import os
# your .txt files dir
path = 'a'
# append data what you want
appendData = 'b'
fileNames = list(os.walk(path))[0][2]
fileNames.sort(key=len)
fileNums = len(fileNames)
# your dst file extension
fileExt = '.txt'
# # Extract extension from filename
# fileExt = os.path.splitext(fileNames[0])[1]
for fileName in fileNames:
if fileName.endswith(fileExt):
fileFullPath = os.path.join(path, fileName)
with open(fileFullPath, 'a') as f:
f.write(appendData)
Like the others said, this is an easy question that could easily be find on google. Anyway here's how to do it:
from os import listdir
from os.path import isfile, isdir, join
files = [file for file in listdir("files") if isfile(join("files", file))]
directories = [directory for directory in listdir("files") if isdir(join("files", directory))]
print(files)
for file_name in files:
try:
file = open("files/" + file_name, "a")
file.write("b")
file.close()
except IOError as err:
print("Could not open file because : ", err)
Replace "file" with the directory where your files are or the path to that directory like "directory0/directory1/directory_with_files"
Avoid to open files with
f = open(input_dir, 'a')
f.close()
Instead
with open(input_dir, 'a') as inputFile:
Do something
Also what you want is
import os
import glob # We will use this module to open only .txt files
path = 'your/path'
for filename in glob.glob(os.path.join(path, '*.txt'))
with open(filename, 'a') as inputFile:
inputFile.write('b')

Read the paths from the text file

I wrote a script which will scan the shared folders and get the file age of all the files in those folders and some other file meta info.
my_paths = []
for p in ['\\\\data\\hold\\app1','\\\\data\\hold\\app2']:
for dirpath, dirnames, filenames in os.walk(p):
my_paths.append(filenames)
for filename in filenames:
full_path = os.path.join(dirpath, filename)
if last_modified(full_path) < threshold:
string_output='Age :' +age_file(full_path)+'\n'+'File Size : '+file_size(full_path)+'\n'
print(string_output)
#create(string_output);
I have to hard code all the shares in the code, instead I thought of making the script to read from a text file where I can update all the paths.
text.txt
\\data\hold\app1
\\data\hold\app2
revamp code,
my_paths = []
for line in open(r'd:\mydata\text.txt').readlines():
my_paths.append(line.strip())
print my_paths
for dirpath, dirnames, filenames in os.walk(line):
my_paths.append(filenames)
for filename in filenames:
full_path = os.path.join(dirpath, filename)
if last_modified(full_path) < threshold:
string_output='Age :' +age_file(full_path)+'\n'+'File Size : '+file_size(full_path)+'\n'
print(string_output)
#create(string_output);
my_paths returning the data like ['\\data\hold\app1','\\data\hold\app2'] but somehow the rest is not working.
You simply forgot to strip the line you read in the loop:
...
for dirpath, dirnames, filenames in os.walk(line.strip()):
...

Getting paths of each file of a directory into an Array in python

Im trying to put into an array files[] the paths of each file from the Data folder but when I try to go into subfolders I want it to be able to go down to the end of the Data file, for example I can read files in a subfolder of the main folder Data which im trying to get a list of all the paths of each file into an array but it doesn't go deeper it does not access the subfolder of the subfolder of Data without writing a loop. Want I want is a loop which has infinit depth of view of files in the Data folder so I can get all the file paths.
For example this is what I get:
['Data/DataReader.py', 'Data/DataReader - Copy.py', 'Data/Dat/DataReader.py', 'Data/fge/er.txt']
This is what I want but it can still go into deeper folders:
['Data/DataReader.py', 'Data/DataReader - Copy.py', 'Data/Dat/DataReader.py', 'Data/fge/er.txt', 'Data/fge/Folder/dummy.png', 'Data/fge/Folder/AnotherFolder/data.dat']
This is my current path, what would i need to add or change?
import os
from os import walk
files = []
folders = []
for (dirname, dirpath, filename) in walk('Data'):
folders.extend(dirpath)
files.extend(filename)
break
filecount = 0
for i in files:
i = 'Data/' + i
files[filecount] = i
filecount += 1
foldercount = 0
for i in folders:
i = 'Data/' + i
folders[foldercount] = i
foldercount += 1
subfolders = []
subf_files = []
for i in folders:
for (dirname, dirpath, filename) in walk(i):
subfolders.extend(dirpath)
subf_files.extend(filename)
break
subf_files_count = 0
for a in subf_files:
a = i + '/'+a
files = files
files.append(a)
print files
subf_files = []
print files
print folders
Thanks a lot!
Don't understand what are your trying to do, especially why you break your walk after the first element:
import os
files = []
folders = []
for (path, dirnames, filenames) in os.walk('Data'):
folders.extend(os.path.join(path, name) for name in dirnames)
files.extend(os.path.join(path, name) for name in filenames)
print files
print folders

return text file path

I want to return the path of a file, If it is found by the program, but I want it to continue to loop(or recursively repeat) the program until all files are checked.
def findAll(fname, path):
for item in os.listdir(path):
n = os.path.join(path, item)
try:
findAll(n, fname)
except:
if item == fname:
print(os.idontknow(item))
So I'm having trouble with calling the path, right now I have
os.idontknow(item)
as a place holder
Input is :
findAll('fileA.txt', 'testpath')
The output is:
['testpat\\fileA.txt', 'testpath\\folder1\\folder11\\fileA.txt','testpath\\folder2\\fileA.txt']
Per my comment above, here is an example that will start at the current directory and search through all sub-directories, looking for files matching fname:
import os
# path is your starting point - everything under it will be searched
path = os.getcwd()
fname = 'file1.txt'
my_files = []
# Start iterating, and anytime we see a file that matches fname,
# add to our list
for root, dirs, files in os.walk(path):
for name in files:
if name == fname:
# root here is the path to the file
my_files.append(os.path.join(root, name))
print my_files
Or as a function (more appropriate for your case :) ):
import os
def findAll(fname, start_dir=os.getcwd()):
my_files = []
for root, dirs, files in os.walk(start_dir):
for name in files:
if name == fname:
my_files.append(os.path.join(root, name))
return my_files
print findAll('file1.txt')
print findAll('file1.txt', '/some/other/starting/directory')
Something like this, maybe?
import os
path = "path/to/your/dir"
for (path, dirs, files) in os.walk(path):
print files

Categories

Resources