#!/usr/bin/python3

import argparse
import boto3
import grove.fs
import os
import subprocess
import sys
from botocore.errorfactory import ClientError

def main():
  parser = argparse.ArgumentParser(description='Build a deploy tarball.')
  parser.add_argument('source_path', metavar='source-directory', type=str)
  parser.add_argument('build_path', metavar='build-directory', type=str)
  parser.add_argument('--upload-bucket', metavar='bucket', help='upload to this S3 bucket', type=str)
  parser.add_argument('--upload-prefix', metavar='prefix', help='upload to this prefix in the S3 bucket', type=str, default="tgz/")
  parser.add_argument('-R', '--remove-existing-build-directory', action='store_true', default=False)
  args = parser.parse_args()

  if not os.path.exists(args.source_path):
    raise Exception("source path does not exist: " + args.source_path)

  if os.path.exists(args.build_path):
    if args.remove_existing_build_directory:
      grove.fs.rmr(args.build_path)
    else:
      raise Exception("build path already exists: " + args.build_path)

  # Prepare staging, temp directories
  stage_path = os.path.abspath(os.path.join(args.build_path, "stage"))
  tmp_path = os.path.abspath(os.path.join(args.build_path, "tmp"))
  grove.fs.mkdirs(stage_path)
  grove.fs.mkdirs(tmp_path)

  # Run build script
  os.environ["STAGING_DIR"] = stage_path
  os.environ["TMP_DIR"] = tmp_path
  subprocess.check_call(["./build"], env=os.environ, cwd=args.source_path)

  # Read build.version.
  build_version_path = os.path.join(stage_path, "build.version")
  build_version = grove.fs.read_file(build_version_path).rstrip()

  # Read build.name, if it exists. Otherwise just guess.
  build_name_path = os.path.join(stage_path, "build.name")
  if os.path.exists(build_name_path):
    build_name = grove.fs.read_file(build_name_path).rstrip()
  else:
    build_name = os.path.basename(args.source_path)

  # Determine list of files to include in the tarball.
  files_to_include = [f for f in os.listdir(stage_path) if (not f.startswith('.')) and f != 'build.name' and f != 'build.version']

  if files_to_include:
    print("including files: " + ", ".join(files_to_include))
  else:
    raise Exception("no files to include in the archive, aborting")

  # Create tarball
  tarball_name = build_name + "-" + build_version + ".tar.gz"
  tarball_path = os.path.abspath(os.path.join(args.build_path, tarball_name))
  subprocess.check_call(["tar", "-C", stage_path, "-czf", tarball_path] + files_to_include)

  # Upload, if asked
  if args.upload_bucket is not None:
    s3 = boto3.client('s3')
    key_string = args.upload_prefix + tarball_name
    try:
      s3.head_object(Bucket=args.upload_bucket, Key=key_string)
      print("skipping upload, artifact already present at s3://{}/{}".format(args.upload_bucket, key_string))
    except ClientError:
      print("uploading to s3://{}/{}".format(args.upload_bucket, key_string))
      s3.upload_file(tarball_path, args.upload_bucket, key_string)

def upload_callback(done, total):
  print("uploaded {}/{} bytes ({}%)".format(done, total, (done*100) / total))

main()
sys.exit(0)
