I'm having several .json files with the same structure in the same directory. I would like to create a unique csv file with value from some keys of each json file.
Looping through one single file everything works. Here a snapshot of the script:
import json, os
import csv
input_file = open ('JSON/test.json')
json_array = json.load(input_file)
object_list = []
for obj in json_array:
for item in obj['objects']:
object_details = {"_system_object_id":None,"preview_url":None,"original_download_url":None,"original_url":None}
object_details['_system_object_id'] = item['_system_object_id']
try:
object_details['preview_url'] = item['do']['do_digitalobject'][0]['versions']['preview']['url']
except:
print("not found")
try:
object_details['original_download_url'] = item['do']['do_digitalobject'][0]['versions']['original']['download_url']
except:
print("not found")
try:
object_details['original_url'] = item['do']['do_digitalobject'][0]['versions']['original']['url']
except:
print("not found")
#object_details['type'] = item['type']
object_list.append(object_details)
print(object_list)
How to process all the .json files in the folder?
Thanks
Using listdir
, you can find all the .json
files in the specified directory and then iterate over them and apply your logic:
from os import listdir
from os.path import isfile, join
import json
dir_path = "full/path/to/dir"
object_list = []
# get all json full paths of the json files in dir_path
all_json_files = [join(dir_path, f) for f in listdir(dir_path) if isfile(join(dir_path, f)) and f.endswith(".json")]
# iterate over the paths and apply your logic
for file_path in all_json_files:
with open(file_path) as input_file:
json_array = json.load(input_file)
for obj in json_array:
# your business logic
object_list.append(object_details)
print(object_list)