From 540fa578421fea6ebd2c261e9440ae2fcbda11cb Mon Sep 17 00:00:00 2001 From: Joseph Atkins-Turkish Date: Mon, 11 Apr 2016 14:25:40 -0700 Subject: [PATCH] Remove support for AWS_ENABLED==False --- app.json | 1 - cloudpebble/settings.py | 1 - ide/api/project.py | 1 + ide/api/resource.py | 16 +++------ ide/models/build.py | 52 ++++++----------------------- ide/models/meta.py | 1 + ide/models/s3file.py | 56 ++++---------------------------- ide/models/textfile.py | 1 - ide/tasks/archive.py | 15 +++------ utils/s3.py | 72 +++++++++++++++-------------------------- 10 files changed, 53 insertions(+), 163 deletions(-) diff --git a/app.json b/app.json index f30bf30e..c83791d8 100644 --- a/app.json +++ b/app.json @@ -7,7 +7,6 @@ "AWS_ACCESS_KEY_ID": { "required": true }, - "AWS_ENABLED": "yes", "AWS_S3_BUILDS_BUCKET": "builds-staging.cloudpebble.net", "AWS_S3_EXPORT_BUCKET": "export-staging.cloudpebble.net", "AWS_S3_SOURCE_BUCKET": "source-staging.cloudpebble.net", diff --git a/cloudpebble/settings.py b/cloudpebble/settings.py index fc0fa8e1..870dfbca 100644 --- a/cloudpebble/settings.py +++ b/cloudpebble/settings.py @@ -341,7 +341,6 @@ MAILCHIMP_API_KEY = _environ.get('MAILCHIMP_API_KEY', None) MAILCHIMP_LIST_ID = _environ.get('MAILCHIMP_LIST_ID', None) -AWS_ENABLED = 'AWS_ENABLED' in _environ AWS_ACCESS_KEY_ID = _environ.get('AWS_ACCESS_KEY_ID', None) AWS_SECRET_ACCESS_KEY = _environ.get('AWS_SECRET_ACCESS_KEY', None) diff --git a/ide/api/project.py b/ide/api/project.py index e0a24c39..b6588ae2 100644 --- a/ide/api/project.py +++ b/ide/api/project.py @@ -263,6 +263,7 @@ def save_project_dependencies(request, project_id): else: send_td_event('cloudpebble_save_project_settings', request=request, project=project) + @require_POST @login_required @json_view diff --git a/ide/api/resource.py b/ide/api/resource.py index b7b7dc78..f6a39f1d 100644 --- a/ide/api/resource.py +++ b/ide/api/resource.py @@ -1,5 +1,4 @@ import json -from django.conf import settings from django.contrib.auth.decorators import login_required from django.db import transaction, IntegrityError from django.http import HttpResponse, HttpResponseRedirect @@ -238,13 +237,8 @@ def show_resource(request, project_id, resource_id, variant): } content_disposition = "attachment; filename=\"%s\"" % resource.file_name content_type = content_types[resource.kind] - if settings.AWS_ENABLED: - headers = { - 'response-content-disposition': content_disposition, - 'Content-Type': content_type - } - return HttpResponseRedirect(s3.get_signed_url('source', variant.s3_path, headers=headers)) - else: - response = HttpResponse(open(variant.local_filename), content_type=content_type) - response['Content-Disposition'] = content_disposition - return response + headers = { + 'response-content-disposition': content_disposition, + 'Content-Type': content_type + } + return HttpResponseRedirect(s3.get_signed_url('source', variant.s3_path, headers=headers)) \ No newline at end of file diff --git a/ide/models/build.py b/ide/models/build.py index a93bd3da..463cc8ea 100644 --- a/ide/models/build.py +++ b/ide/models/build.py @@ -1,8 +1,5 @@ import uuid import json -import shutil -import os -import os.path from django.conf import settings from django.db import models from ide.models.project import Project @@ -42,19 +39,10 @@ class BuildResult(IdeModel): finished = models.DateTimeField(blank=True, null=True) def _get_dir(self): - if settings.AWS_ENABLED: - return '%s/' % self.uuid - else: - path = '%s%s/%s/%s/' % (settings.MEDIA_ROOT, self.uuid[0], self.uuid[1], self.uuid) - if not os.path.exists(path): - os.makedirs(path) - return path + return '%s/' % self.uuid def get_url(self): - if settings.AWS_ENABLED: - return "%s%s/" % (settings.MEDIA_URL, self.uuid) - else: - return '%s%s/%s/%s/' % (settings.MEDIA_URL, self.uuid[0], self.uuid[1], self.uuid) + return "%s%s/" % (settings.MEDIA_URL, self.uuid) @property def pbw(self): @@ -88,46 +76,24 @@ def get_debug_info_filename(self, platform, kind): return self._get_dir() + self.DEBUG_INFO_MAP[platform][kind] def save_build_log(self, text): - if not settings.AWS_ENABLED: - with open(self.build_log, 'w') as f: - f.write(text) - else: - s3.save_file('builds', self.build_log, text, public=True, content_type='text/plain') + s3.save_file('builds', self.build_log, text, public=True, content_type='text/plain') def read_build_log(self): - if not settings.AWS_ENABLED: - with open(self.build_log, 'r') as f: - return f.read() - else: - return s3.read_file('builds', self.build_log) + return s3.read_file('builds', self.build_log) def save_debug_info(self, json_info, platform, kind): text = json.dumps(json_info) - if not settings.AWS_ENABLED: - with open(self.get_debug_info_filename(platform, kind), 'w') as f: - f.write(text) - else: - s3.save_file('builds', self.get_debug_info_filename(platform, kind), text, public=True, content_type='application/json') + s3.save_file('builds', self.get_debug_info_filename(platform, kind), text, public=True, content_type='application/json') def save_package(self, package_path): - if not settings.AWS_ENABLED: - shutil.move(package_path, self.package) - else: - filename = '%s.tar.gz' % self.project.app_short_name.replace('/', '-') - s3.upload_file('builds', self.package, package_path, public=True, download_filename=filename, content_type='application/gzip') + filename = '%s.tar.gz' % self.project.app_short_name.replace('/', '-') + s3.upload_file('builds', self.package, package_path, public=True, download_filename=filename, content_type='application/gzip') def save_pbw(self, pbw_path): - if not settings.AWS_ENABLED: - shutil.move(pbw_path, self.pbw) - else: - s3.upload_file('builds', self.pbw, pbw_path, public=True, download_filename='%s.pbw' % self.project.app_short_name.replace('/','-')) + s3.upload_file('builds', self.pbw, pbw_path, public=True, download_filename='%s.pbw' % self.project.app_short_name.replace('/','-')) def save_simplyjs(self, javascript): - if not settings.AWS_ENABLED: - with open(self.simplyjs, 'w') as f: - f.write(javascript) - else: - s3.save_file('builds', self.simplyjs, javascript, public=True, content_type='text/javascript') + s3.save_file('builds', self.simplyjs, javascript, public=True, content_type='text/javascript') def get_sizes(self): sizes = {} diff --git a/ide/models/meta.py b/ide/models/meta.py index bf27a098..4762ee30 100644 --- a/ide/models/meta.py +++ b/ide/models/meta.py @@ -2,6 +2,7 @@ from django.db.models.signals import pre_save from django.dispatch import receiver + class IdeModel(models.Model): class Meta: abstract = True diff --git a/ide/models/s3file.py b/ide/models/s3file.py index 3e282ea0..0d66bfba 100644 --- a/ide/models/s3file.py +++ b/ide/models/s3file.py @@ -1,5 +1,3 @@ -import shutil -import os import logging from django.utils.translation import ugettext as _ @@ -18,7 +16,6 @@ class S3File(IdeModel): bucket_name = 'source' folder = None project = None - _create_local_if_not_exists = False @property def padded_id(self): @@ -37,41 +34,11 @@ def s3_id(self): def s3_path(self): return '%s/%s' % (self.folder, self.s3_id) - def _get_contents_local(self): - try: - return open(self.local_filename).read() - except IOError: - if self._create_local_if_not_exists: - return '' - else: - raise - - def _save_string_local(self, string): - if not os.path.exists(os.path.dirname(self.local_filename)): - os.makedirs(os.path.dirname(self.local_filename)) - with open(self.local_filename, 'wb') as out: - out.write(string) - - def _copy_to_path_local(self, path): - try: - shutil.copy(self.local_filename, path) - except IOError as err: - if err.errno == 2 and self._crete_local_if_not_exists: - open(path, 'w').close() # create the file if it's missing. - else: - raise - def get_contents(self): - if not settings.AWS_ENABLED: - return self._get_contents_local() - else: - return s3.read_file(self.bucket_name, self.s3_path) + return s3.read_file(self.bucket_name, self.s3_path) def save_string(self, string): - if not settings.AWS_ENABLED: - self._save_string_local(string) - else: - s3.save_file(self.bucket_name, self.s3_path, string) + s3.save_file(self.bucket_name, self.s3_path, string) if self.project: self.project.last_modified = now() self.project.save() @@ -85,10 +52,7 @@ def save_text(self, content): self.save_string(content.encode('utf-8')) def copy_to_path(self, path): - if not settings.AWS_ENABLED: - self._copy_to_path_local(path) - else: - s3.read_file_to_filesystem(self.bucket_name, self.s3_path, path) + s3.read_file_to_filesystem(self.bucket_name, self.s3_path, path) class Meta(IdeModel.Meta): abstract = True @@ -97,13 +61,7 @@ class Meta(IdeModel.Meta): @receiver(post_delete) def delete_file(sender, instance, **kwargs): if issubclass(sender, S3File): - if settings.AWS_ENABLED: - try: - s3.delete_file(sender.bucket_name, instance.s3_path) - except: - logger.exception("Failed to delete S3 file") - else: - try: - os.unlink(instance.local_filename) - except OSError: - pass + try: + s3.delete_file(sender.bucket_name, instance.s3_path) + except: + logger.exception("Failed to delete S3 file") diff --git a/ide/models/textfile.py b/ide/models/textfile.py index 58f0b9fd..17a684f9 100644 --- a/ide/models/textfile.py +++ b/ide/models/textfile.py @@ -10,7 +10,6 @@ class TextFile(S3File): """ TextFile adds support to S3File for last-modified timestamps and code folding """ last_modified = models.DateTimeField(blank=True, null=True, auto_now=True) folded_lines = models.TextField(default="[]") - _create_local_if_not_exists = True def was_modified_since(self, expected_modification_time): if isinstance(expected_modification_time, int): diff --git a/ide/tasks/archive.py b/ide/tasks/archive.py index d562b7ee..29a1c34d 100644 --- a/ide/tasks/archive.py +++ b/ide/tasks/archive.py @@ -61,16 +61,9 @@ def create_archive(project_id): send_td_event('cloudpebble_export_project', project=project) - if not settings.AWS_ENABLED: - outfile = '%s%s/%s.zip' % (settings.EXPORT_DIRECTORY, u, prefix) - os.makedirs(os.path.dirname(outfile), 0755) - shutil.copy(filename, outfile) - os.chmod(outfile, 0644) - return '%s%s/%s.zip' % (settings.EXPORT_ROOT, u, prefix) - else: - outfile = '%s/%s.zip' % (u, prefix) - s3.upload_file('export', outfile, filename, public=True, content_type='application/zip') - return '%s%s' % (settings.EXPORT_ROOT, outfile) + outfile = '%s/%s.zip' % (u, prefix) + s3.upload_file('export', outfile, filename, public=True, content_type='application/zip') + return '%s%s' % (settings.EXPORT_ROOT, outfile) @task(acks_late=True) @@ -309,4 +302,4 @@ def make_valid_filename(zip_entry): 'reason': str(e) } }, user=project.owner) - raise + raise \ No newline at end of file diff --git a/utils/s3.py b/utils/s3.py index f4e82310..842c2b7b 100644 --- a/utils/s3.py +++ b/utils/s3.py @@ -27,37 +27,33 @@ def __init__(self): self.s3 = None def configure(self): - if settings.AWS_ENABLED: - if settings.AWS_S3_FAKE_S3 is None: - # The host must be manually specified in Python 2.7.9+ due to - # https://github.com/boto/boto/issues/2836 this bug in boto with .s in - # bucket names. - host = settings.AWS_S3_HOST if settings.AWS_S3_HOST else NoHostProvided - - self.s3 = boto.connect_s3( - settings.AWS_ACCESS_KEY_ID, - settings.AWS_SECRET_ACCESS_KEY, - host=host, - calling_format=OrdinaryCallingFormat() - ) - else: - host, port = (settings.AWS_S3_FAKE_S3.split(':', 2) + [80])[:2] - port = int(port) - self.s3 = boto.connect_s3("key_id", "secret_key", is_secure=False, port=port, - host=host, calling_format=OrdinaryCallingFormat()) - _ensure_bucket_exists(self.s3, settings.AWS_S3_SOURCE_BUCKET) - _ensure_bucket_exists(self.s3, settings.AWS_S3_EXPORT_BUCKET) - _ensure_bucket_exists(self.s3, settings.AWS_S3_BUILDS_BUCKET) - - self.buckets = { - 'source': self.s3.get_bucket(settings.AWS_S3_SOURCE_BUCKET), - 'export': self.s3.get_bucket(settings.AWS_S3_EXPORT_BUCKET), - 'builds': self.s3.get_bucket(settings.AWS_S3_BUILDS_BUCKET), - } - self.configured = True + if settings.AWS_S3_FAKE_S3 is None: + # The host must be manually specified in Python 2.7.9+ due to + # https://github.com/boto/boto/issues/2836 this bug in boto with .s in + # bucket names. + host = settings.AWS_S3_HOST if settings.AWS_S3_HOST else NoHostProvided + + self.s3 = boto.connect_s3( + settings.AWS_ACCESS_KEY_ID, + settings.AWS_SECRET_ACCESS_KEY, + host=host, + calling_format=OrdinaryCallingFormat() + ) else: - self.s3 = None - self.buckets = None + host, port = (settings.AWS_S3_FAKE_S3.split(':', 2) + [80])[:2] + port = int(port) + self.s3 = boto.connect_s3("key_id", "secret_key", is_secure=False, port=port, + host=host, calling_format=OrdinaryCallingFormat()) + _ensure_bucket_exists(self.s3, settings.AWS_S3_SOURCE_BUCKET) + _ensure_bucket_exists(self.s3, settings.AWS_S3_EXPORT_BUCKET) + _ensure_bucket_exists(self.s3, settings.AWS_S3_BUILDS_BUCKET) + + self.buckets = { + 'source': self.s3.get_bucket(settings.AWS_S3_SOURCE_BUCKET), + 'export': self.s3.get_bucket(settings.AWS_S3_EXPORT_BUCKET), + 'builds': self.s3.get_bucket(settings.AWS_S3_BUILDS_BUCKET), + } + self.configured = True def __getitem__(self, item): if settings.TESTING: @@ -70,38 +66,24 @@ def __getitem__(self, item): _buckets = BucketHolder() -def _requires_aws(fn): - if settings.AWS_ENABLED: - return fn - else: - def complain(*args, **kwargs): - raise Exception("AWS_ENABLED must be True to call %s" % fn.__name__) - - return complain - - -@_requires_aws def read_file(bucket_name, path): bucket = _buckets[bucket_name] key = bucket.get_key(path) return key.get_contents_as_string() -@_requires_aws def read_file_to_filesystem(bucket_name, path, destination): bucket = _buckets[bucket_name] key = bucket.get_key(path) key.get_contents_to_filename(destination) -@_requires_aws def delete_file(bucket_name, path): bucket = _buckets[bucket_name] key = bucket.get_key(path) key.delete() -@_requires_aws def save_file(bucket_name, path, value, public=False, content_type='application/octet-stream'): bucket = _buckets[bucket_name] key = Key(bucket) @@ -115,7 +97,6 @@ def save_file(bucket_name, path, value, public=False, content_type='application/ key.set_contents_from_string(value, policy=policy, headers={'Content-Type': content_type}) -@_requires_aws def upload_file(bucket_name, dest_path, src_path, public=False, content_type='application/octet-stream', download_filename=None): bucket = _buckets[bucket_name] @@ -137,7 +118,6 @@ def upload_file(bucket_name, dest_path, src_path, public=False, content_type='ap key.set_contents_from_filename(src_path, policy=policy, headers=headers) -@_requires_aws def get_signed_url(bucket_name, path, headers=None): bucket = _buckets[bucket_name] key = bucket.get_key(path)