API Example: Organization Backup
This script backs up an entire organization. The backup is a .zip file and is downloaded locally. An api_key cannot be used since content rights are restricted to user credentials only.
# GroveStreams.com Python 3.7 Example # Demonstrates getting a session token and backing up an organization to a local file # License: # Copyright 2019 GroveStreams LLC. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import requests import datetime def checkResponse(response): if response.status_code != 200 and response.status_code != 201: if (response.reason != None): raise Exception("HTTP Failure Reason: " + response.reason + " body: " + response.content.decode('utf-8')) else: raise Exception("HTTP Failure Body: " + response.content.decode('utf-8')) def main(): #CHANGE THESE!!!!!! userId = "brucelee@acme.com" userPwd = "wingchung" org = 'af5da88b-1487-3873-a7c4-5ea69d867f3a' backupFile = "/home/gs/gs_backup_" + datetime.datetime.now().strftime("%Y-%m-%d %H:%M") + ".zip" #This is a linux directory. Change it to a Windows directory if that's what you're using # Login and request a use session token url = "https://grovestreams.com/api/login" data = { "email": userId, "password" : userPwd } response = requests.post(url,json=data) checkResponse(response) json_response = response.json() if (json_response["success"] == False): raise Exception(str(json_response["message"])) session = json_response["sessionUid"] #Download the Backup File url = "https://grovestreams.com/api/organization/backup?org=" + org + "&time=&exportAll=true&compDir=&contentDir=&session=" + session chunk_size = 100000 response = requests.get(url, stream=True) checkResponse(response) with open(backupFile, "wb") as fd: for chunk in response.iter_content(chunk_size): fd.write(chunk) if __name__ == "__main__": try: main() except Exception as e: print(str(e)) # quit exit(0)