Use F5 REST API Python SDK to update an external datagroup
I have working code using the F5 Python REST SDK that can upload a txt file and create an external datagroup from it.
But I cannot figure out how to use the API to update that datagroup file without deleting the datagroup and recreating it.
All of the examples I can find (e.g. https://devcentral.f5.com/s/question/0D51T00006i7j1ySAA/upload-programatically-external-data-group-file) do the following:
- create a datagroup file object from a sourcepath
- create a datagroup object from the datagroup file object
- display the created object's attributes
Then they delete the datagroup object and datagroup file object so that they can run again.
I cannot find a way to reload the data from an updated data file into an existing datagroup object.
I have also seen code that can walk a datagroup and update items in it one by one, but I'd really like to use a file update model.
Any ideas? Code sample follows:
from f5.bigip import ManagementRoot
# Connect to the BigIP
mgmt = ManagementRoot("IP-FIXME", "USER-FIXME", "PASSWD-FIXME")
def showDatagroups() :
print("\n\n**** Showing Datagroups")
dgs = mgmt.tm.ltm.data_group.externals.get_collection()
for idx, dg in enumerate(dgs):
print("\n{}: {}".format(idx, dg.raw))
print("\n{}: {}".format(idx, dg.name))
if hasattr(dg, 'records'):
print("\n{}: {}".format(idx, dg.records))
for record in dg.records:
print("\nrec: {}".format(record))
else:
print("\nObject {} has no records".format(dg.name))
def showDatagroupFiles():
print("\n\n**** Showing DatagroupFiles")
dgFiles = mgmt.tm.sys.file.data_groups.get_collection()
for idx, f in enumerate(dgFiles):
print('\n{}: {}'.format(idx, f.raw))
def uploadFile(f):
# This upload works - it places the file in the uploads folder.
# But I cannot seem to access the file uploaded file with the datagroup create method
print("\n\n**** Uploading datagroup file {}".format(f))
mgmt.shared.file_transfer.uploads.upload_file(f)
def createDatagroupFile(source_path, dgname, dataType):
print("\n\n**** Creating datagroup file {}, name {}, type {}".format(fname, dgname, dataType))
# check first if its there
dgFiles = mgmt.tm.sys.file.data_groups.get_collection()
found = 0
for idx, f in enumerate(dgFiles):
if f.name == dgname:
found = 1
break
if found:
print("File {} already exists".format(dgname))
# Here's where I would just update the file if I could
else:
print("Creating DG File {} from fname {}".format(dgname, sourcePath))
dgFile = mgmt.tm.sys.file.data_groups.data_group.create(sourcePath=source_path, name=dgname, type=dataType)
def createDatagroupFromFile(name, file):
print("\n\n**** Creating datagroup {} from file {}".format(name, file))
# check first if its there
dgs = mgmt.tm.sys.file.data_groups.get_collection()
found = 0
for idx, dg in enumerate(dgs):
if dg.name == name:
found = 1
break
if found:
print("Datagroup {} already exists".format(name))
# Here's where I would just update the datagroup if I could
else:
dgObject = mgmt.tm.ltm.data_group.externals.external.create(name=name, externalFileName=file)
if __name__ == "__main__":
fname = './dg_test5.txt'
# using HTTP server for raw file - this is the only way I could get the upload to work
sourcePath = 'http://127.0.0.1:8000/dg_test5.txt'
dgfilename = 'dg_test5.txt'
dgname = 'dg_test5'
uploadFile(fname)
createDatagroupFile(sourcePath, dgfilename, 'string')
createDatagroupFromFile(dgname, dgfilename)
showDatagroups()
showDatagroupFiles()