migrate 1.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354
  1. #!/usr/bin/python3
  2. import json
  3. import os
  4. import sys
  5. def storage_migrate(storage):
  6. delete_keys = []
  7. if storage['type'] == 'hostPath':
  8. # Check if the key exists, if not we have already migrated
  9. if not storage.get('hostPath'):
  10. return storage
  11. storage['hostPathConfig'] = {'hostPath': storage['hostPath']}
  12. delete_keys.append('hostPath')
  13. elif storage['type'] == 'ixVolume':
  14. # Fixes an invalid key carried from values.yaml
  15. storage.pop('hostPath', None)
  16. # Check if the key exists, if not we have already migrated
  17. if not storage.get('datasetName'):
  18. return storage
  19. storage['ixVolumeConfig'] = {'datasetName': storage['datasetName']}
  20. delete_keys.append('datasetName')
  21. for key in delete_keys:
  22. storage.pop(key, None)
  23. return storage
  24. def migrate(values):
  25. storage_key = 'fireflyStorage'
  26. storages = ['uploads', 'pgData', 'pgBackup']
  27. for storage in storages:
  28. check_val = values.get(storage_key, {}).get(storage, {})
  29. if not isinstance(check_val, dict) or not check_val:
  30. raise Exception(f'Storage section {storage} is malformed')
  31. values[storage_key][storage] = storage_migrate(check_val)
  32. return values
  33. if __name__ == '__main__':
  34. if len(sys.argv) != 2:
  35. exit(1)
  36. if os.path.exists(sys.argv[1]):
  37. with open(sys.argv[1], 'r') as f:
  38. print(json.dumps(migrate(json.loads(f.read()))))