diff --git a/.gitignore b/.gitignore index 9c17deae..22143d07 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ *.pyc *.mo *.bak +*.pem user_data/ dev.db .DS_Store @@ -12,3 +13,4 @@ src/ .env/ .idea/ bower_components +staticfiles diff --git a/Dockerfile b/Dockerfile index ef54ecc7..679c6247 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ FROM python:2.7.11 MAINTAINER Katharine Berry -ENV NPM_CONFIG_LOGLEVEL=info NODE_VERSION=4.2.3 DJANGO_VERSION=1.6 +ENV NPM_CONFIG_LOGLEVEL=info NODE_VERSION=4.2.3 DJANGO_VERSION=1.9.7 # Node stuff. @@ -41,6 +41,17 @@ EXPOSE 8000 # CloudPebble stuff RUN npm install -g bower && echo '{"allow_root": true}' > ~/.bowerrc +ADD package.json /tmp/package.json +RUN cd /tmp && npm install +RUN mkdir -p /opt/npm && cp -a /tmp/node_modules /opt/npm/ +ENV NODE_MODULES_PATH /opt/npm/node_modules + +ADD package.json /tmp/package.json +RUN cd /tmp && npm install +RUN mkdir -p /opt/npm && cp -a /tmp/node_modules /opt/npm/ +ENV NODE_MODULES_PATH /opt/npm/node_modules + + # Grab the toolchain RUN curl -o /tmp/arm-cs-tools.tar https://cloudpebble-vagrant.s3.amazonaws.com/arm-cs-tools-stripped.tar && \ tar -xf /tmp/arm-cs-tools.tar -C / && rm /tmp/arm-cs-tools.tar @@ -55,8 +66,8 @@ RUN mkdir /sdk2 && \ curl -L "https://s3.amazonaws.com/assets.getpebble.com/sdk3/sdk-core/sdk-core-${SDK_TWO_VERSION}.tar.bz2" | \ tar --strip-components=1 -xj -C /sdk2 -ENV SDK_THREE_CHANNEL=beta -ENV SDK_THREE_VERSION=4.0-beta16 +ENV SDK_THREE_CHANNEL=release +ENV SDK_THREE_VERSION=4.0.1 # Install SDK 3 RUN mkdir /sdk3 && \ @@ -69,6 +80,9 @@ WORKDIR /code # Bower is awful. RUN rm -rf bower_components && cd /tmp && python /code/manage.py bower install && mv bower_components /code/ +ENV PYTHONUNBUFFERED 1 + RUN python manage.py compilemessages +RUN python manage.py collectstatic --noinput CMD ["sh", "docker_start.sh"] diff --git a/Procfile b/Procfile index 6a91e975..201b7b72 100644 --- a/Procfile +++ b/Procfile @@ -1,2 +1,2 @@ web: newrelic-admin run-program gunicorn -c gunicorn.py cloudpebble.wsgi -celery: newrelic-admin run-program python manage.py celeryd -E -l info +celery: newrelic-admin run-program celery worker -A cloudpebble -E -l info diff --git a/README.md b/README.md index 961b45e0..6e1cad10 100644 --- a/README.md +++ b/README.md @@ -17,6 +17,9 @@ appropriate values there. Setting environment variables also works. Note that you won't be able to set up integration with certain Pebble systems (e.g. Pebble SSO). This shouldn't usually matter; whenever these are used, an alternative route is provided and should be invoked in its absence. +In order to run locally with `DEBUG=False` set, add the setting `STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage'` +to `cloudpebble/settings_local.py`. + Contributing ------------ diff --git a/app.json b/app.json index f30bf30e..5dd217ab 100644 --- a/app.json +++ b/app.json @@ -16,7 +16,6 @@ "required": true }, "BROKER_POOL_LIMIT": "1", - "BUILDPACK_URL": "https://github.com/pebble/cloudpebble-buildpack", "COMPLETION_CERTS": "/app/completion-certs.crt", "ENABLE_KEEN": "yes", "FROM_EMAIL": "noreply@cloudpebble.net", diff --git a/bin/post_compile b/bin/post_compile index 5b44c74c..950370f6 100644 --- a/bin/post_compile +++ b/bin/post_compile @@ -35,8 +35,7 @@ rm /tmp/*.tar.* # Make sure the database is up to date. echo "Performing database migration." -python manage.py syncdb --noinput -python manage.py migrate +python manage.py migrate --fake-initial echo "Compiling gettext files" python manage.py compilemessages diff --git a/bootstrap.sh b/bootstrap.sh index fdb4b37c..9fa8afb3 100644 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -32,6 +32,14 @@ ln -s /vagrant/user_data/export /var/www/export # Fix broken pip in 14.04 easy_install pip +# Allow pip to access pebblehq packages +sudo -u vagrant mkdir .pip +cat << 'EOF' > /home/vagrant/.pip/pip.conf +[global] +trusted-host = pypi.hq.getpebble.com +index-url = http://pypi.hq.getpebble.com/simple/ +EOF + # CloudPebble python requirements. pip install -r /vagrant/requirements.txt @@ -40,7 +48,6 @@ easy_install requests==2.7.0 # Make sure we have a useful database and our JS dependencies. pushd /vagrant - sudo -u vagrant python manage.py syncdb --noinput sudo -u vagrant python manage.py migrate sudo -u vagrant python manage.py bower install popd @@ -105,6 +112,14 @@ pushd /pypkjs deactivate popd + +mkdir pebble-test +pushd pebble-test + git clone git@github.com:pebble/pebblesdk-test.git . + # must first have access to pebble.loghash + pip install -r requirements.txt +popd + sudo -u vagrant mkdir qemu-controller pushd qemu-controller git clone https://github.com/pebble/cloudpebble-qemu-controller.git . @@ -169,7 +184,8 @@ console log script export PATH="$PATH:/home/vagrant/arm-cs-tools/bin:/home/vagrant/sdk2/bin" export DEBUG=yes - exec /usr/bin/python manage.py celery worker --autoreload --loglevel=info --no-color + # The following line is untested after move to Django 1.9 + exec celery worker --autoreload --loglevel=info --no-color end script EOF @@ -221,6 +237,8 @@ script export PKJS_BIN=/pypkjs/phonesim.py export PKJS_VIRTUALENV=/pypkjs/.env export QEMU_IMAGE_ROOT=/home/vagrant/qemu-tintin-images + export PEBBLE_TEST_BIN=/home/vagrant/pebble-test/runner.py + export PEBBLE_LOGHASH_DICT=/home/vagrant/loghash/loghash_dict.json export DEBUG=yes export QCON_PORT=8003 exec /usr/bin/python controller.py diff --git a/cloudpebble/__init__.py b/cloudpebble/__init__.py index e69de29b..b64e43e8 100644 --- a/cloudpebble/__init__.py +++ b/cloudpebble/__init__.py @@ -0,0 +1,5 @@ +from __future__ import absolute_import + +# This will make sure the app is always imported when +# Django starts so that shared_task will use this app. +from .celery import app as celery_app diff --git a/cloudpebble/celery.py b/cloudpebble/celery.py new file mode 100644 index 00000000..c3f85a5d --- /dev/null +++ b/cloudpebble/celery.py @@ -0,0 +1,22 @@ +from __future__ import absolute_import + +import os + +from celery import Celery + +# set the default Django settings module for the 'celery' program. +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'cloudpebble.settings') + +from django.conf import settings # noqa + +app = Celery('cloudpebble') + +# Using a string here means the worker will not have to +# pickle the object when using Windows. +app.config_from_object('django.conf:settings') +app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) + + +@app.task(bind=True) +def debug_task(self): + print('Request: {0!r}'.format(self.request)) diff --git a/cloudpebble/compressors.py b/cloudpebble/compressors.py new file mode 100644 index 00000000..22e16c0b --- /dev/null +++ b/cloudpebble/compressors.py @@ -0,0 +1,53 @@ +from __future__ import unicode_literals + +import codecs +import tempfile + +from django.contrib.staticfiles.storage import staticfiles_storage + +from pipeline.conf import settings +from pipeline.compressors import SubProcessCompressor +from pipeline.utils import source_map_re, path_depth + + +class ConcatenatingUglifyJSCompressor(SubProcessCompressor): + def compress_js(self, js): + command = [settings.UGLIFYJS_BINARY, settings.UGLIFYJS_ARGUMENTS] + if self.verbose: + command.append(' --verbose') + return self.execute_command(command, js) + + def concatenate_files(self, paths, out_path): + args = [settings.CONCATENATOR_BINARY] + args += [staticfiles_storage.path(p) for p in paths] + args += ['-o', out_path] + args += ['-d', staticfiles_storage.base_location] + self.execute_command(args) + + def compress_js_with_source_map(self, paths): + concatenated_js_file = tempfile.NamedTemporaryFile() + source_map_file = tempfile.NamedTemporaryFile() + try: + self.concatenate_files(paths, concatenated_js_file.name) + args = [settings.UGLIFYJS_BINARY] + args += [concatenated_js_file.name] + args += ["--in-source-map", concatenated_js_file.name + ".map"] + args += ["--source-map", source_map_file.name] + args += ["--source-map-root", staticfiles_storage.base_url] + args += ["--prefix", "%s" % path_depth(staticfiles_storage.base_location)] + args += settings.UGLIFYJS_ARGUMENTS + if self.verbose: + args.append('--verbose') + + js = self.execute_command(args) + + with codecs.open(source_map_file.name, encoding='utf-8') as f: + source_map = f.read() + + # Strip out existing source map comment (it will be re-added with packaging) + js = source_map_re.sub('', js) + + return js, source_map + finally: + concatenated_js_file.close() + source_map_file.close() diff --git a/cloudpebble/settings.py b/cloudpebble/settings.py index fc0fa8e1..4473c63e 100644 --- a/cloudpebble/settings.py +++ b/cloudpebble/settings.py @@ -12,27 +12,27 @@ VERBOSE = DEBUG or (_environ.get('VERBOSE', '') != '') TESTING = 'test' in sys.argv TRAVIS = 'TRAVIS' in _environ and os.environ["TRAVIS"] == "true" -TEMPLATE_DEBUG = DEBUG + +BASE_DIR = os.path.dirname(os.path.dirname(__file__)) ADMINS = ( ('Administrator', 'example@example.com'), ) +MANAGERS = ADMINS DEFAULT_FROM_EMAIL = _environ.get('FROM_EMAIL', 'CloudPebble ') SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') -MANAGERS = ADMINS - if TRAVIS: DATABASES = { 'default': { - 'ENGINE': 'django.db.backends.postgresql_psycopg2', - 'NAME': 'travisci', - 'USER': 'postgres', + 'ENGINE': 'django.db.backends.postgresql_psycopg2', + 'NAME': 'travisci', + 'USER': 'postgres', 'PASSWORD': '', - 'HOST': 'localhost', - 'PORT': '', + 'HOST': 'localhost', + 'PORT': '', } } elif 'DATABASE_URL' not in _environ: @@ -54,18 +54,6 @@ LANGUAGE_COOKIE_NAME = 'cloudpebble_language' -TEMPLATE_CONTEXT_PROCESSORS = ( - "django.contrib.auth.context_processors.auth", - "django.core.context_processors.debug", - "django.core.context_processors.i18n", - "django.core.context_processors.media", - "django.core.context_processors.static", - "django.core.context_processors.tz", - "django.contrib.messages.context_processors.messages", - "social.apps.django_app.context_processors.backends", - "social.apps.django_app.context_processors.login_redirect", -) - # Hosts/domain names that are valid for this site; required if DEBUG is False # See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts ALLOWED_HOSTS = ['*'] @@ -120,29 +108,46 @@ # Don't put anything in this directory yourself; store your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/var/www/example.com/static/" -STATIC_ROOT = 'staticfiles' +STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles') # URL prefix for static files. # Example: "http://example.com/static/", "http://static.example.com/" STATIC_URL = '/static/' -PUBLIC_URL = _environ.get('PUBLIC_URL', 'http://localhost:8000/') # This default is completely useless. +PUBLIC_URL = _environ.get('PUBLIC_URL', 'http://localhost:8000/') # This default is completely useless. -# Additional locations of static files -STATICFILES_DIRS = ( - # Put strings here, like "/home/html/static" or "C:/www/django/static". - # Always use forward slashes, even on Windows. - # Don't forget to use absolute paths, not relative paths. -) +NODE_MODULES_PATH = _environ.get('NODE_MODULES_PATH', os.path.join(os.getcwd(), 'node_modules')) -# List of finder classes that know how to find static files in -# various locations. -STATICFILES_FINDERS = ( - 'django.contrib.staticfiles.finders.FileSystemFinder', - 'django.contrib.staticfiles.finders.AppDirectoriesFinder', - 'djangobower.finders.BowerFinder', -# 'django.contrib.staticfiles.finders.DefaultStorageFinder', -) + +def _node_bin(name): + return os.path.join(NODE_MODULES_PATH, '.bin', name) + + +if DEBUG or TESTING: + # Additional locations of static files + STATICFILES_DIRS = ( + # This is used instead of django-bower's finder, because django-pipeline + # is actually better than django-bower at filtering out unneeded static + # files. + os.path.join(os.path.dirname(__file__), '..', 'bower_components'), + ) + STATICFILES_FINDERS = ( + 'django.contrib.staticfiles.finders.FileSystemFinder', + 'django.contrib.staticfiles.finders.AppDirectoriesFinder', + 'pipeline.finders.CachedFileFinder', + 'pipeline.finders.PipelineFinder', + ) + STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage' + +else: + STATICFILES_FINDERS = ( + 'django.contrib.staticfiles.finders.FileSystemFinder', + 'django.contrib.staticfiles.finders.AppDirectoriesFinder', + 'djangobower.finders.BowerFinder', + 'pipeline.finders.CachedFileFinder', + 'pipeline.finders.PipelineFinder', + ) + STATICFILES_STORAGE = 'cloudpebble.storage.CompressedManifestPipelineStorage' BOWER_INSTALLED_APPS = ( 'https://github.com/krisk/Fuse.git#2ec2f2c40059e135cabf2b01c8c3f96f808b8809', @@ -156,23 +161,39 @@ 'codemirror#4.2.0', 'bluebird#3.3.4', 'kanaka/noVNC#v0.5', + 'react#15.0.1', + 'classnames' ) +BOWER_PATH = _environ.get('BOWER_PATH', _node_bin('bower')) + # Make this unique, and don't share it with anybody. SECRET_KEY = _environ.get('SECRET_KEY', 'y_!-!-i!_txo$v5j(@c7m4uk^jyg)l4bf*0yqrztmax)l2027j') -# List of callables that know how to import templates from various sources. -TEMPLATE_LOADERS = ( - 'django.template.loaders.filesystem.Loader', - 'django.template.loaders.app_directories.Loader', -# 'django.template.loaders.eggs.Loader', -) - -if not DEBUG: - STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.CachedStaticFilesStorage' - +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [], + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.contrib.auth.context_processors.auth', + 'django.template.context_processors.debug', + 'django.template.context_processors.i18n', + 'django.template.context_processors.media', + 'django.template.context_processors.static', + 'django.template.context_processors.tz', + 'django.template.context_processors.request', + 'django.contrib.messages.context_processors.messages', + "social.apps.django_app.context_processors.backends", + "social.apps.django_app.context_processors.login_redirect", + ] + } + } +] MIDDLEWARE_CLASSES = ( + 'whitenoise.middleware.WhiteNoiseMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.locale.LocaleMiddleware', 'django.middleware.common.CommonMiddleware', @@ -184,7 +205,7 @@ ) AUTHENTICATION_BACKENDS = ( - 'auth.pebble.PebbleOAuth2', + 'site_auth.pebble.PebbleOAuth2', 'django.contrib.auth.backends.ModelBackend', ) @@ -192,11 +213,11 @@ 'social.pipeline.social_auth.social_details', 'social.pipeline.social_auth.social_uid', 'social.pipeline.social_auth.auth_allowed', - 'auth.pebble.merge_user', # formerly social.pipeline.social_auth.social_user + 'site_auth.pebble.merge_user', # formerly social.pipeline.social_auth.social_user 'social.pipeline.user.get_username', 'social.pipeline.user.create_user', 'social.pipeline.social_auth.associate_user', - 'auth.pebble.clear_old_login', + 'site_auth.pebble.clear_old_login', 'social.pipeline.social_auth.load_extra_data', 'social.pipeline.user.user_details' ) @@ -220,12 +241,6 @@ # Python dotted path to the WSGI application used by Django's runserver. WSGI_APPLICATION = 'cloudpebble.wsgi.application' -TEMPLATE_DIRS = ( - # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". - # Always use forward slashes, even on Windows. - # Don't forget to use absolute paths, not relative paths. -) - INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.contenttypes', @@ -234,20 +249,156 @@ 'django.contrib.messages', 'django.contrib.staticfiles', # Uncomment the next line to enable the admin: - #'django.contrib.admin', + # 'django.contrib.admin', # Uncomment the next line to enable admin documentation: # 'django.contrib.admindocs', 'social.apps.django_app.default', 'ide', - 'auth', + 'site_auth', + 'pipeline', 'root', 'qr', - 'south', - 'djcelery', + 'orchestrator_proxy', 'registration', 'djangobower', ) +# Note: due to the way that django-pipeline currently works, babel is executed with a cwd of the file being compiled. +# This means that we must reference the babel presets by their absolute location. +# This is arguably a bug and it might be worth making a PR on django-pipeline to fix this (or at least, fixing it in +# Pebble's fork). +BABEL_PRESETS = [ + 'babel-preset-stage-2', + 'babel-preset-react', + 'babel-preset-es2015', +] + +TEST_BENCH_USERS = _environ.get('TEST_BENCH_USERS', 'test@test.test').split(",") + +# Configuration for django-pipeline, used to concatenate and compress JS and CSS sources and +# output source-maps. +PIPELINE = { + 'COMPILERS': ('pipeline.compilers.es6.ES6Compiler',), + 'BABEL_BINARY': _node_bin('babel'), + 'BABEL_ARGUMENTS': '--presets {}'.format(",".join(os.path.join(NODE_MODULES_PATH, p) for p in BABEL_PRESETS)), + 'BABEL_EXTENSION': '.jsx', + 'OUTPUT_SOURCEMAPS': True, + 'JS_COMPRESSOR': 'cloudpebble.compressors.ConcatenatingUglifyJSCompressor', + 'CSS_COMPRESSOR': 'pipeline.compressors.cleancss.CleanCSSCompressor', + 'CLEANCSS_BINARY': _node_bin('cleancss'), + 'UGLIFYJS_BINARY': _node_bin('uglifyjs'), + 'CONCATENATOR_BINARY': _node_bin('source-map-concat'), + 'DISABLE_WRAPPER': True, + 'VERBOSE': DEBUG, + 'STYLESHEETS': { + 'codemirror': { + 'source_filenames': ( + 'CodeMirror/addon/hint/show-hint.css', + 'CodeMirror/addon/dialog/dialog.css', + 'CodeMirror/lib/codemirror.css', + 'CodeMirror/theme/monokai.css', + 'CodeMirror/theme/eclipse.css', + 'CodeMirror/theme/blackboard.css', + 'CodeMirror/theme/solarized.css', + 'CodeMirror/addon/fold/foldgutter.css', + ), + 'output_filename': 'build/codemirror.css' + }, + 'textext': { + 'source_filenames': ( + 'jquery-textext/src/css/textext.core.css', + 'jquery-textext/src/css/textext.plugin.tags.css', + 'jquery-textext/src/css/textext.plugin.autocomplete.css', + 'jquery-textext/src/css/textext.plugin.focus.css', + 'jquery-textext/src/css/textext.plugin.prompt.css', + 'jquery-textext/src/css/textext.plugin.arrow.css', + ), + 'output_filename': 'build/textext.css' + }, + 'ide': { + 'source_filenames': ( + 'ide/css/ide.css', + 'ide/css/ib.css', + 'ide/css/codemirror-default.css', + ), + 'output_filename': 'build/ide.css' + }, + 'base': { + 'source_filenames': ( + 'common/fonts/fonts.css', + 'common/css/progress.css', + 'common/css/common.css', + 'ide/css/base.css', + ), + 'output_filename': 'build/base.css' + } + }, + 'JAVASCRIPT': { + 'ide': { + 'source_filenames': ( + 'ide/js/cloudpebble.js', + 'ide/js/editor.js', + 'ide/js/ib/ib.js', + 'ide/js/ib/registry.js', + 'ide/js/*.js', + 'ide/js/ib/*.js', + 'ide/js/libpebble/*.js' + ), + 'output_filename': 'build/ide.js', + }, + 'jsx': { + 'source_filenames': ( + 'ide/js/monkey/*.jsx', + ), + 'output_filename': 'build/jsx.js', + }, + 'lib': { + 'source_filenames': ( + 'react/react.js', + 'react/react-dom.js', + 'classnames/index.js', + 'CodeMirror/lib/codemirror.js', + 'CodeMirror/addon/dialog/dialog.js', + 'CodeMirror/addon/search/searchcursor.js', + 'CodeMirror/addon/search/search.js', + 'CodeMirror/addon/edit/matchbrackets.js', + 'CodeMirror/addon/edit/closebrackets.js', + 'CodeMirror/addon/comment/comment.js', + 'CodeMirror/addon/fold/foldgutter.js', + 'CodeMirror/addon/fold/foldcode.js', + 'CodeMirror/addon/fold/brace-fold.js', + 'CodeMirror/addon/fold/comment-fold.js', + 'CodeMirror/addon/runmode/runmode.js', + 'ide/external/codemirror.hint.js', + 'fuse.js/src/fuse.js', + 'CodeMirror/mode/clike/clike.js', + 'CodeMirror/mode/javascript/javascript.js', + 'CodeMirror/keymap/emacs.js', + 'CodeMirror/keymap/vim.js', + 'ide/external/uuid.js', + 'jshint/dist/jshint.js', + 'html.sortable/dist/html.sortable.min.js', + 'text-encoding/lib/encoding.js', + 'noVNC/include/util.js', + 'jquery-textext/src/js/*.js', + ), + 'output_filename': 'build/textext.js', + }, + 'base': { + 'source_filenames': ( + 'jquery/dist/jquery.min.js', + 'common/js/modal.js', + 'bluebird/js/browser/bluebird.js', + 'underscore/underscore-min.js', + 'backbone/backbone-min.js', + 'common/js/whats_new.js', + 'common/js/ajax.js' + ), + 'output_filename': 'build/base.js', + } + } +} + # This logging config prints: # INFO logs from django # INFO or DEBUG logs from 'ide', depending on whether DEBUG=True @@ -358,17 +509,24 @@ YCM_URLS = _environ.get('YCM_URLS', 'http://localhost:8002/').split(',') COMPLETION_CERTS = _environ.get('COMPLETION_CERTS', os.getcwd() + '/completion-certs.crt') +# If the key for pbltest.io is stored in the environment, read it into a local file. +# The key will be deleted from the environment later. +PBLTEST_CERT_LOCATION = _environ.get('PBLTEST_CERT_LOCATION', None) +_cert = _environ.get('PBLTEST_CERT', None) +if not PBLTEST_CERT_LOCATION and _cert: + PBLTEST_CERT_LOCATION = os.path.join(os.getcwd(), 'pbltest-cert.pem') + with open(PBLTEST_CERT_LOCATION, 'w') as f: + f.write(_cert) + QEMU_URLS = _environ.get('QEMU_URLS', 'http://qemu/').split(',') QEMU_LAUNCH_AUTH_HEADER = _environ.get('QEMU_LAUNCH_AUTH_HEADER', 'secret') -QEMU_LAUNCH_TIMEOUT = int(_environ.get('QEMU_LAUNCH_TIMEOUT', 15)) +QEMU_LAUNCH_TIMEOUT = int(_environ.get('QEMU_LAUNCH_TIMEOUT', 20)) PHONE_SHORTURL = _environ.get('PHONE_SHORTURL', 'cpbl.io') +ORCHESTRATOR_URL = _environ.get('ORCHESTRATOR_URL', 'https://pbltest.io') WAF_NODE_PATH = _environ.get('WAF_NODE_PATH', None) -import djcelery -djcelery.setup_loader() - # import local settings try: from settings_local import * diff --git a/cloudpebble/storage.py b/cloudpebble/storage.py new file mode 100644 index 00000000..8be2b884 --- /dev/null +++ b/cloudpebble/storage.py @@ -0,0 +1,8 @@ +from pipeline.storage import PipelineMixin, PipelineStorage +from whitenoise.storage import CompressedManifestStaticFilesStorage, HelpfulExceptionMixin, CompressedStaticFilesMixin + +class CompressedManifestPipelineStorage(PipelineMixin, CompressedManifestStaticFilesStorage): + pass + +class CompressedPipelineStorage(HelpfulExceptionMixin, CompressedStaticFilesMixin, PipelineStorage): + pass diff --git a/cloudpebble/urls.py b/cloudpebble/urls.py index 0539ce6c..532f8458 100644 --- a/cloudpebble/urls.py +++ b/cloudpebble/urls.py @@ -1,12 +1,10 @@ -from django.conf.urls import patterns, include, url -from django.conf.urls.static import static -from django.conf import settings +from django.conf.urls import include, url # Uncomment the next two lines to enable the admin: # from django.contrib import admin # admin.autodiscover() -urlpatterns = patterns('', +urlpatterns = [ # Examples: # url(r'^$', 'cloudpebble.views.home', name='home'), # url(r'^cloudpebble/', include('cloudpebble.foo.urls')), @@ -15,11 +13,12 @@ # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: - #url(r'^admin/', include(admin.site.urls)), - url(r'^ide/', include('ide.urls', namespace='ide')), - url(r'^accounts/', include('auth.urls')), # Namespacing this breaks things. - url(r'^qr/', include('qr.urls', namespace='qr')), - url(r'^$', include('root.urls', namespace='root')), - url(r'', include('social.apps.django_app.urls', namespace='social')), - url(r'^i18n/', include('django.conf.urls.i18n')) -) + # url(r'^admin/', include(admin.site.urls)), + url(r'^ide/', include('ide.urls', namespace='ide')), + url(r'^accounts/', include('site_auth.urls')), # Namespacing this breaks things. + url(r'^qr/', include('qr.urls', namespace='qr')), + url(r'^', include('root.urls', namespace='root')), + url(r'', include('social.apps.django_app.urls', namespace='social')), + url(r'^i18n/', include('django.conf.urls.i18n')), + url(r'^orchestrator/', include('orchestrator_proxy.urls', namespace='orchestrator')), +] diff --git a/cloudpebble/wsgi.py b/cloudpebble/wsgi.py index 79c44ca2..75e8ea0b 100644 --- a/cloudpebble/wsgi.py +++ b/cloudpebble/wsgi.py @@ -1,33 +1,16 @@ """ WSGI config for cloudpebble project. -This module contains the WSGI application used by Django's development server -and any production WSGI deployments. It should expose a module-level variable -named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover -this application via the ``WSGI_APPLICATION`` setting. - -Usually you will have the standard Django WSGI application here, but it also -might make sense to replace the whole Django WSGI application with a custom one -that later delegates to the Django one. For example, you could introduce WSGI -middleware here, or combine a Django application with an application of another -framework. +It exposes the WSGI callable as a module-level variable named ``application``. +For more information on this file, see +https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/ """ -import os -# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks -# if running multiple sites in the same mod_wsgi process. To fix this, use -# mod_wsgi daemon mode with each site in its own daemon process, or use -# os.environ["DJANGO_SETTINGS_MODULE"] = "cloudpebble.settings" -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cloudpebble.settings") +import os -# This application object is used by any WSGI server configured to use this -# file. This includes Django's development server, if the WSGI_APPLICATION -# setting points here. from django.core.wsgi import get_wsgi_application -from dj_static import Cling -application = Cling(get_wsgi_application()) -# Apply WSGI middleware here. -# from helloworld.wsgi import HelloWorldApplication -# application = HelloWorldApplication(application) +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cloudpebble.settings") + +application = get_wsgi_application() diff --git a/docker_start.sh b/docker_start.sh index 4a293d16..01bfa123 100644 --- a/docker_start.sh +++ b/docker_start.sh @@ -3,13 +3,12 @@ sleep 1 if [ ! -z "$RUN_WEB" ]; then # Make sure the database is up to date. echo "Performing database migration." - python manage.py syncdb --noinput - python manage.py migrate + python manage.py migrate --fake-initial python manage.py runserver 0.0.0.0:$PORT elif [ ! -z "$RUN_CELERY" ]; then sleep 2 - C_FORCE_ROOT=true python manage.py celery worker --autoreload --loglevel=info + C_FORCE_ROOT=true celery -A cloudpebble worker --autoreload --loglevel=info else echo "Doing nothing!" exit 1 diff --git a/ide/api/__init__.py b/ide/api/__init__.py index b2538ed2..767c7890 100644 --- a/ide/api/__init__.py +++ b/ide/api/__init__.py @@ -10,16 +10,26 @@ __author__ = 'katharine' -def json_response(response=None): +def json_response(response=None, success=True): if response is None: response = {} - response["success"] = True + if success is not None: + response["success"] = success return HttpResponse(json.dumps(response), content_type="application/json") -def json_failure(error): - return HttpResponse(json.dumps({"success": False, "error": error}), content_type="application/json") +def json_failure(error, status=200): + if isinstance(error, basestring): + message = error + elif hasattr(error, 'messages'): + message = ", ".join(error.messages) + elif hasattr(error, 'message'): + message = error.message + else: + message = "Internal server error" + + return HttpResponse(json.dumps({"success": False, "error": message}), content_type="application/json", status=status) @login_required diff --git a/ide/api/monkey.py b/ide/api/monkey.py new file mode 100644 index 00000000..aa99c428 --- /dev/null +++ b/ide/api/monkey.py @@ -0,0 +1,402 @@ +import logging +import urllib +import os +from functools import wraps + +from django.conf import settings +from django.contrib.auth.decorators import login_required +from django.core.exceptions import PermissionDenied +from django.core.urlresolvers import reverse +from django.db import transaction +from django.http import HttpResponse, Http404 +from django.shortcuts import get_object_or_404 +from django.utils import timezone +from django.views.decorators.cache import cache_control +from django.views.decorators.csrf import csrf_exempt +from django.views.decorators.http import last_modified +from django.views.decorators.http import require_POST, require_safe + +import ide.tasks.monkey as tasks +from ide.models import ScreenshotSet, ScreenshotFile, TestFile +from ide.models.monkey import TestSession, TestRun, TestCode, TestLog +from ide.models.project import Project +from utils import orchestrator +from utils.bundle import TestBundle +from utils.jsonview import json_view +from utils.redis_helper import redis_client + +logger = logging.getLogger(__name__) + +__author__ = 'joe' + + +def _filtered_max(*args): + """ Find the maximum of all arguments, completely ignoring any values of None """ + filtered = [a for a in args if a] + return max(filtered) if len(filtered) > 0 else None + + +def serialise_run(run, link_test=True, link_session=True, include_subscribe_url=False): + """ Prepare a TestRun for representation in JSON + + :param run: TestRun to represent + :param link_test: if True, adds in data from the TestRun's test + :param link_session: if True, adds in data from the TestRun's session + :return: A dict full of info from the TestRun object + """ + result = { + 'id': run.id, + 'name': run.name, + 'logs': reverse('ide:get_test_run_log', args=[run.session.project.id, run.id]) if run.has_log else None, + 'date_added': str(run.session.date_added), + 'artefacts': run.artefacts, + 'platform': run.platform + } + + if link_test and run.has_test: + result['test'] = { + 'id': run.test.id, + 'name': run.test.file_name + } + + if link_session: + result['session_id'] = run.session.id + if run.code is not None: + result['code'] = run.code + result['date_added'] = str(run.session.date_added) + if run.date_completed is not None: + result['date_completed'] = str(run.date_completed) + if include_subscribe_url and run.code == TestCode.PENDING: + url = redis_client.get('qemu_subscribe_url_for_run_{}'.format(run.id)) + if url: + result['subscribe_url'] = url + return result + + +def serialise_session(session, include_runs=False): + """ Prepare a TestSession for representation in JSON + + :param session: TestSession to represent + :param include_runs: if True, includes a list of serialised test runs + :return: A dict full of info from the TestSession object + """ + runs = TestRun.objects.filter(session=session) + + pendings = runs.filter(code=TestCode.PENDING) + passes = runs.filter(code=TestCode.PASSED) + fails = runs.filter(code__lt=0) + status = TestCode.PENDING if pendings.count() > 0 else (TestCode.FAILED if fails.count() > 0 else TestCode.PASSED) + result = { + 'id': session.id, + 'date_added': str(session.date_added), + 'passes': passes.count(), + 'fails': fails.count(), + 'run_count': runs.count(), + 'status': status, + 'kind': session.kind + } + if session.date_completed is not None: + result['date_completed'] = str(session.date_completed) + if include_runs: + result['runs'] = [serialise_run(run, link_session=False, link_test=True) for run in runs] + return result + + +def testbench_privilages_required(f): + """ Decorator for 404ing any test-bench API requests from unauthorized users. """ + + @wraps(f) + def _wrapped(request, *args, **kwargs): + if not request.user.is_testbench_user: + # As far as non-authorized clients are concerned, this test point should not exist at all. + raise Http404 + return f(request, *args, **kwargs) + + return _wrapped + + +def get_latest_run_date_for_sessions(sessions): + """ Given a list of sessions, find the latest date_completed of all of their runs + :param sessions: List or queryset of TestSessions + """ + try: + return TestRun.objects.filter(session__in=sessions).exclude(date_completed__isnull=True).latest( + "date_completed").date_completed + except TestRun.DoesNotExist: + return None + + +def get_latest_session_date(sessions): + """ Given a list of sessions, find the latest time that any of them or their runs were added/completed + :param sessions: List or queryset of TestSessions + """ + try: + latest_completed = sessions.exclude(date_completed__isnull=True).latest("date_completed").date_completed + except TestSession.DoesNotExist: + latest_completed = None + try: + latest_added = sessions.latest("date_added").date_added + except TestSession.DoesNotExist: + latest_added = None + latest_run_completed = get_latest_run_date_for_sessions(sessions) + return _filtered_max(latest_added, latest_completed, latest_run_completed) + + +@testbench_privilages_required +def get_test_session_latest(request, project_id, session_id): + """ Get the last-modified date for a get_test_session request """ + project = get_object_or_404(Project, pk=project_id, owner=request.user) + session = get_object_or_404(TestSession, pk=session_id, project=project) + return _filtered_max(session.date_completed, session.date_added, get_latest_run_date_for_sessions([session])) + + +# GET /project//test_sessions/ +@testbench_privilages_required +@last_modified(get_test_session_latest) +@cache_control(must_revalidate=True, max_age=1) +@require_safe +@login_required +@json_view +def get_test_session(request, project_id, session_id): + """ Fetch a single test session by its ID """ + project = get_object_or_404(Project, pk=project_id, owner=request.user) + session = get_object_or_404(TestSession, pk=session_id, project=project) + return {"data": serialise_session(session)} + + +@testbench_privilages_required +def get_sessions_for_get_sessions_request(request, project_id): + """ Get the sessions relevant to a get_sessions request """ + project = get_object_or_404(Project, pk=project_id, owner=request.user) + session_id = request.GET.get('id', None) + kwargs = {'project': project} + if session_id is not None: + kwargs['id'] = session_id + return TestSession.objects.filter(**kwargs) + + +@testbench_privilages_required +def get_test_run_latest(request, project_id, run_id): + project = get_object_or_404(Project, pk=project_id, owner=request.user) + run = get_object_or_404(TestRun, pk=run_id, session__project=project) + return _filtered_max(run.session.date_added, run.date_completed) + + +# GET /project//test_runs/ +@testbench_privilages_required +@last_modified(get_test_run_latest) +@cache_control(must_revalidate=True, max_age=1) +@require_safe +@login_required +@json_view +def get_test_run(request, project_id, run_id): + """ Fetch a single test run """ + project = get_object_or_404(Project, pk=project_id, owner=request.user) + run = get_object_or_404(TestRun, pk=run_id, session__project=project) + return {"data": serialise_run(run, include_subscribe_url=True)} + + +def get_test_sessions_latest(request, project_id): + """ Get the last-modified date for a get_test_sessions request """ + sessions = get_sessions_for_get_sessions_request(request, project_id) + return get_latest_session_date(sessions) + + +# GET /project//test_sessions +@testbench_privilages_required +@last_modified(get_test_sessions_latest) +@cache_control(must_revalidate=True, max_age=1) +@require_safe +@login_required +@json_view +def get_test_sessions(request, project_id): + """ Fetch all test sessions for a project, optionally filtering by ID """ + sessions = get_sessions_for_get_sessions_request(request, project_id) + return {"data": [serialise_session(session) for session in sessions]} + + +def get_test_runs_for_get_test_runs_request(project_id, request): + """ Get the runs relevant to a get_test_runs request """ + project = get_object_or_404(Project, pk=project_id, owner=request.user) + test_id = request.GET.get('test', None) + session_id = request.GET.get('session', None) + run_id = request.GET.get('id', None) + kwargs = {'session__project': project} + if test_id is not None: + kwargs['test__id'] = test_id + if session_id is not None: + kwargs['session__id'] = session_id + if run_id is not None: + kwargs['id'] = run_id + runs = TestRun.objects.filter(**kwargs) + return runs + + +@testbench_privilages_required +def get_test_runs_latest(request, project_id): + """ Get the last-modified date for a get_test_runs request """ + runs = get_test_runs_for_get_test_runs_request(project_id, request) + try: + latest_run_completed = runs.exclude(date_completed__isnull=True).latest("date_completed").date_completed + except TestRun.DoesNotExist: + latest_run_completed = None + latest_session = get_latest_session_date(TestSession.objects.filter(runs__in=runs)) + return _filtered_max(latest_run_completed, latest_session) + + +# GET /project//test_runs?test=&session= +@testbench_privilages_required +@cache_control(must_revalidate=True, max_age=1) +@last_modified(get_test_runs_latest) +@require_safe +@login_required +@json_view +def get_test_runs(request, project_id): + """ Fetch a list of test runs, optionally filtering by test ID or session ID """ + runs = get_test_runs_for_get_test_runs_request(project_id, request) + return {"data": [serialise_run(run, link_test=True, link_session=True) for run in runs]} + + +@testbench_privilages_required +@require_safe +@login_required +def get_test_run_log(request, project_id, run_id): + """ Download the log file for a test run """ + # TODO: catch errors + project = get_object_or_404(Project, pk=project_id, owner=request.user) + run = get_object_or_404(TestRun, pk=run_id, session__project=project) + log = get_object_or_404(TestLog, test_run=run) + contents = log.get_contents() + return HttpResponse(contents, content_type="text/plain") + + +@testbench_privilages_required +@require_POST +@login_required +@json_view +def run_qemu_test(request, project_id, test_id): + """ Request an interactive QEMU test session """ + # Load request parameters and database objects + project = get_object_or_404(Project, pk=project_id, owner=request.user) + token = request.POST['token'] + host = request.POST['host'] + emu = request.POST['emu'] + platform = request.POST['platform'] + update = request.POST.get('update', False) + # Get QEMU server which corresponds to the requested host + server = next(x for x in set(settings.QEMU_URLS) if host in x) + subscribe_url = server + 'qemu/%s/test/subscribe' % urllib.quote_plus(emu) + + # Create the session and make the run and callback URL. + # This will fail if the session has more than one run + session = TestSession.setup_session(project, [test_id], [platform], 'live') + run = session.runs.get() + callback_url = session.make_callback_url(request, token=token) + + # Start the task in celery + task = tasks.start_qemu_test.delay(session.id, callback_url, emu, server, token, update) + + # Record the subscribe URL in redis so it can be fetched after a page reload + redis_client.set('qemu_subscribe_url_for_run_{}'.format(run.id), subscribe_url, ex=1200) + + return { + 'run_id': run.id, + 'session_id': session.id, + 'subscribe_url': subscribe_url, + 'task_id': task.task_id + } + + +@require_POST +@csrf_exempt +@json_view +def notify_test_session(request, project_id, session_id): + """ Callback from interactive test session. Sets the code/log/date for a test session's runs, + and uses the qemu launch token to ensure that only the cloudpebble-qemu-controller can call it. + @csrf_exempt is needed to prevent the qemu-controller from being blocked by Django's CSRF Prevention.""" + + # TODO: deal with invalid input/situations (e.g. notified twice) + project = get_object_or_404(Project, pk=int(project_id)) + session = get_object_or_404(TestSession, pk=int(session_id), project=project) + token = request.POST.get('token', None) + if not token: + token = request.GET['token'] + if token != settings.QEMU_LAUNCH_AUTH_HEADER: + logging.warn("Rejecting test result, posted token %s doesn't match %s", token, settings.QEMU_LAUNCH_AUTH_HEADER) + raise PermissionDenied + + orch_id = request.POST.get('id', None) + + # The procedure depends on whether orchestrator or qemu-controller are notifying us. + if orch_id: + # GET /api/jobs/ + job_info = orchestrator.get_job_info(orch_id) + tasks.notify_orchestrator_session.delay(session.id, job_info) + else: + uploaded_files = request.FILES.getlist('uploads[]') + platform = request.POST.get('uploads_platform', None) + log = request.POST['log'] + status = request.POST['status'] + notify_qemu_session(session, platform, status, log, uploaded_files) + + +def notify_qemu_session(session, platform, status, log, uploaded_files): + date_completed = timezone.now() + if status == 'passed': + result = TestCode.PASSED + elif status == 'failed': + result = TestCode.FAILED + else: + result = TestCode.ERROR + # Non-orchestrator notifications should only be for sessions with single runs + run = TestRun.objects.get(session=session) + with transaction.atomic(): + session.date_completed = date_completed + run.code = result + run.log = log + run.date_completed = date_completed + session.save() + run.save() + if uploaded_files: + test = run.test + for posted_file in uploaded_files: + if posted_file.content_type != "image/png": + raise ValueError("Screenshots must be PNG files") + name = os.path.splitext(os.path.basename(posted_file.name))[0] + screenshot_set, did_create_set = ScreenshotSet.objects.get_or_create(test=test, name=name) + screenshot_file, did_create_file = ScreenshotFile.objects.get_or_create(screenshot_set=screenshot_set, + platform=platform) + screenshot_file.save() + screenshot_file.save_file(posted_file, file_size=posted_file.size) + + +@testbench_privilages_required +@require_safe +@login_required +def download_tests(request, project_id): + """ Download all the tests for a project as a ZIP file. """ + project = get_object_or_404(Project, pk=project_id, owner=request.user) + with TestBundle(project=project).open(frame_tests=settings.DEBUG) as f: + return HttpResponse(f.read(), content_type='application/zip') + + +# POST /project//test_sessions +@testbench_privilages_required +@require_POST +@login_required +@json_view +def post_test_session(request, project_id): + # TODO: run as celery task? + project = get_object_or_404(Project, pk=project_id, owner=request.user) + test_ids = [int(test) for test in request.POST.get('tests', "").split(",") if test] or None + platforms = project.last_built_platforms + session = TestSession.setup_session(project, test_ids, platforms, 'batch') + callback_url = session.make_callback_url(request, settings.QEMU_LAUNCH_AUTH_HEADER) + + task = tasks.start_orchestrator_test.delay(session.id, callback_url) + return { + "session": serialise_session(session, include_runs=True), + "task_id": task.id + } + +# TODO: Analytics diff --git a/ide/api/phone.py b/ide/api/phone.py index 61c6b50b..a7d8547d 100644 --- a/ide/api/phone.py +++ b/ide/api/phone.py @@ -1,7 +1,7 @@ import uuid +import json from django.conf import settings from django.contrib.auth.decorators import login_required -from django.utils import simplejson as json, simplejson from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_safe, require_POST from django.utils.translation import ugettext as _ diff --git a/ide/api/project.py b/ide/api/project.py index e0a24c39..019a9f89 100644 --- a/ide/api/project.py +++ b/ide/api/project.py @@ -11,6 +11,7 @@ from ide.models.build import BuildResult from ide.models.project import Project, TemplateProject from ide.models.files import SourceFile, ResourceFile +from ide.models.monkey import TestFile from ide.tasks.archive import create_archive, do_import_archive from ide.tasks.build import run_compile from ide.tasks.gist import import_gist @@ -28,6 +29,7 @@ def project_info(request, project_id): project = get_object_or_404(Project, pk=project_id, owner=request.user) source_files = SourceFile.objects.filter(project=project).order_by('file_name') resources = ResourceFile.objects.filter(project=project).order_by('file_name') + test_files = TestFile.objects.filter(project=project).order_by('file_name') return { 'type': project.project_type, 'name': project.name, @@ -50,6 +52,12 @@ def project_info(request, project_id): 'app_platforms': project.app_platforms, 'app_modern_multi_js': project.app_modern_multi_js, 'menu_icon': project.menu_icon.id if project.menu_icon else None, + 'test_files': [{ + 'name': f.file_name, + 'id': f.id, + 'target': f.target, + 'lastModified': time.mktime(f.last_modified.utctimetuple()) + } for f in test_files], 'source_files': [{ 'name': f.file_name, 'id': f.id, diff --git a/ide/api/screenshots.py b/ide/api/screenshots.py new file mode 100644 index 00000000..0d410e56 --- /dev/null +++ b/ide/api/screenshots.py @@ -0,0 +1,149 @@ +import json +from django.conf import settings +from django.contrib.auth.decorators import login_required +from django.shortcuts import get_object_or_404 +from django.http import StreamingHttpResponse, HttpResponseRedirect +from django.db import transaction +from django.views.decorators.http import require_POST, require_safe +from django.core.urlresolvers import reverse +from utils.td_helper import send_td_event + +from ide.api import json_failure, json_response +from ide.models.project import Project +from ide.models.monkey import TestFile, ScreenshotSet, ScreenshotFile +import utils.s3 as s3 +from utils.jsonview import json_view, BadRequest + +__author__ = 'joe' + + +def make_screenshot_dict(screenshot_set, project_id): + return { + "name": screenshot_set.name, + "id": screenshot_set.id, + "files": dict([(screenshot_file.platform, { + "id": screenshot_file.id, + # "src": "project/%s/screenshot/%s" % (project_id, screenshot_file.id) + "src": reverse('ide:show_screenshot', kwargs={ + 'project_id': project_id, + 'test_id': screenshot_set.test.id, + 'screenshot_id': screenshot_set.id, + 'platform_name': screenshot_file.platform + }) + }) for screenshot_file in screenshot_set.files.all()]) + } + + +@require_safe +@login_required +@json_view +def load_screenshots(request, project_id, test_id): + project = get_object_or_404(Project, pk=project_id, owner=request.user) + test = get_object_or_404(TestFile, pk=test_id) + screenshots = test.screenshot_sets.all() + + send_td_event('cloudpebble_load_screenshots', data={'data': { + 'test': test.id + }}, project=project, request=request) + + return {"screenshots": [make_screenshot_dict(screenshot, project_id) for screenshot in screenshots]} + + +@require_POST +@login_required +@json_view +def sync_screenshots(request, project_id, test_id): + """ Synchronise screenshots between the client and server + Takes a JSON parameter 'screenshots'. An example: + [ + { + "name": "one", + "id": 2 + "files": { + "chalk": {"id": 5}, + "basalt": {"id": 2} + }, + }, { + "name": "two", + "files": {"basalt": {"uploadId": 0}} + } + ] + - Chalk and basalt screenshots for one.png are kept + - A new screenshot for "two.png" is uploaded, using the first uploaded file + - Any other screenshots are deleted. + """ + + project = get_object_or_404(Project, pk=project_id, owner=request.user) + screenshot_data = json.loads(request.POST['screenshots']) + uploaded_files = request.FILES.getlist('files[]') + + test = get_object_or_404(TestFile, pk=test_id) + current_screenshot_sets = test.screenshot_sets.all() + # Get the list of screenshot set IDs + deleted_screenshot_set_ids = [shot.id for shot in current_screenshot_sets] + try: + with transaction.atomic(): + # Go through uploaded screenshot sets + for screenshot_set_info in screenshot_data: + # If uploaded screenshot set has an ID, mark it as not deleted and update it + screenshot_set_id = screenshot_set_info.get('id', None) + if screenshot_set_id: + deleted_screenshot_set_ids.remove(screenshot_set_id) + screenshot_set = get_object_or_404(ScreenshotSet, pk=screenshot_set_id) + # Set a new name + screenshot_set.name = screenshot_set_info['name'] + # Delete any removed or replaced screenshot files + for screenshot_file in screenshot_set.files.all(): + if screenshot_file.platform not in screenshot_set_info['files']: + screenshot_file.delete() + elif screenshot_set_info['files'][screenshot_file.platform].get('uploadId', None) is not None: + screenshot_file.delete() + else: + # Create a new ScreenshotSet if no ID was given + screenshot_set = ScreenshotSet.objects.create(test=test, name=screenshot_set_info['name']) + screenshot_set.save() + + # Add new uploads + for platform, upload_info in screenshot_set_info['files'].iteritems(): + uploadId = upload_info.get('uploadId', None) + if isinstance(uploadId, int): + screenshot_file, did_create = ScreenshotFile.objects.get_or_create(screenshot_set=screenshot_set, platform=platform) + posted_file = uploaded_files[uploadId] + if posted_file.content_type != "image/png": + raise ValueError("Screenshots must be PNG files") + screenshot_file.save() + screenshot_file.save_file(posted_file, file_size=posted_file.size) + + screenshot_set.save() + + # Delete all screenshot sets missing from POST request + for screenshot in current_screenshot_sets: + if screenshot.id in deleted_screenshot_set_ids: + screenshot.delete() + except (FloatingPointError, ValueError) as e: + # I can't remember which FloatingPointError this was intended to catch. + raise BadRequest(str(e)) + + current_screenshot_sets = ScreenshotSet.objects.filter(test=test) + return {"screenshots": [make_screenshot_dict(screenshot, project_id) for screenshot in current_screenshot_sets]} + + +@require_safe +@login_required +def show_screenshot(request, project_id, test_id, screenshot_id, platform_name): + screenshot_set = get_object_or_404(ScreenshotSet, pk=screenshot_id, test__project__owner=request.user) + screenshot_file = get_object_or_404(ScreenshotFile, platform=platform_name, screenshot_set=screenshot_set) + file_name = screenshot_set.name + ".png" + content_type = 'image/png' + content_disposition = "attachment; filename=\"%s\"" % file_name + + if settings.AWS_ENABLED: + headers = { + 'response-content-disposition': content_disposition, + 'Content-Type': content_type + } + return HttpResponseRedirect(s3.get_signed_url('source', screenshot_file.s3_path, headers=headers)) + else: + response = StreamingHttpResponse(open(screenshot_file.local_filename), content_type=content_type) + response['Content-Disposition'] = content_disposition + return response diff --git a/ide/api/source.py b/ide/api/source.py index 0c6e313e..428f92a1 100644 --- a/ide/api/source.py +++ b/ide/api/source.py @@ -2,13 +2,15 @@ import time import json from django.contrib.auth.decorators import login_required -from django.db import IntegrityError +from django.db import IntegrityError, transaction from django.shortcuts import get_object_or_404 from django.views.decorators.csrf import csrf_protect from django.views.decorators.http import require_POST, require_safe from django.utils.translation import ugettext as _ +from django.core.exceptions import ValidationError from ide.models.project import Project from ide.models.files import SourceFile +from ide.models.monkey import TestFile from utils.td_helper import send_td_event from utils.jsonview import json_view, BadRequest @@ -21,21 +23,21 @@ def create_source_file(request, project_id): project = get_object_or_404(Project, pk=project_id, owner=request.user) try: - f = SourceFile.objects.create(project=project, - file_name=request.POST['name'], - target=request.POST.get('target', 'app')) - f.save_text(request.POST.get('content', '')) - - except IntegrityError as e: + with transaction.atomic(): + f = SourceFile.objects.create(project=project, + file_name=request.POST['name'], + target=request.POST.get('target', 'app')) + f.save_text(request.POST.get('content', '')) + except (IntegrityError, ValidationError) as e: raise BadRequest(str(e)) - - send_td_event('cloudpebble_create_file', data={ - 'data': { - 'filename': request.POST['name'], - 'kind': 'source', - 'target': f.target - } - }, request=request, project=project) + else: + send_td_event('cloudpebble_create_file', data={ + 'data': { + 'filename': request.POST['name'], + 'kind': 'source', + 'target': f.target, + } + }, request=request, project=project) return { 'file': { @@ -47,13 +49,51 @@ def create_source_file(request, project_id): } +@require_POST +@login_required +@json_view +def create_test_file(request, project_id): + project = get_object_or_404(Project, pk=project_id, owner=request.user) + try: + f = TestFile.objects.create(project=project, + file_name=request.POST['name']) + f.save_text(request.POST.get('content', '')) + except IntegrityError as e: + raise BadRequest(str(e)) + else: + send_td_event('cloudpebble_create_file', data={ + 'data': { + 'filename': request.POST['name'], + 'kind': 'test' + } + }, project=project, request=request) + + return { + 'file': { + 'id': f.id, + 'name': f.file_name, + 'target': f.target, + 'file_path': f.project_path + } + } + + +def get_source_file(kind, pk, project): + if kind == 'source': + return get_object_or_404(SourceFile, pk=pk, project=project) + elif kind == 'tests': + return get_object_or_404(TestFile, pk=pk, project=project) + else: + raise ValueError('Invalid source kind %s' % kind) + + @require_safe @csrf_protect @login_required @json_view -def load_source_file(request, project_id, file_id): +def load_source_file(request, project_id, kind, file_id): project = get_object_or_404(Project, pk=project_id, owner=request.user) - source_file = get_object_or_404(SourceFile, pk=file_id, project=project) + source_file = get_source_file(kind, pk=file_id, project=project) content = source_file.get_contents() @@ -65,7 +105,7 @@ def load_source_file(request, project_id, file_id): send_td_event('cloudpebble_open_file', data={ 'data': { 'filename': source_file.file_name, - 'kind': 'source' + 'kind': kind } }, request=request, project=project) @@ -76,13 +116,36 @@ def load_source_file(request, project_id, file_id): } +@require_safe +@login_required +@json_view +def get_test_list(request, project_id): + project = get_object_or_404(Project, pk=project_id, owner=request.user) + objects = TestFile.objects.filter(project=project) + + send_td_event('cloudpebble_list_source', data={ + 'data': { + 'kind': 'tests' + } + }, project=project, request=request) + + return { + "tests": [{ + "modified": time.mktime(test.last_modified.utctimetuple()), + "id": test.id, + "name": test.file_name, + "last_code": test.latest_code + } for test in objects] + } + + @require_safe @csrf_protect @login_required @json_view -def source_file_is_safe(request, project_id, file_id): +def source_file_is_safe(request, project_id, kind, file_id): project = get_object_or_404(Project, pk=project_id, owner=request.user) - source_file = get_object_or_404(SourceFile, pk=file_id, project=project) + source_file = get_source_file(kind, pk=file_id, project=project) client_modified = datetime.datetime.fromtimestamp(int(request.GET['modified'])) server_modified = source_file.last_modified.replace(tzinfo=None, microsecond=0) is_safe = client_modified >= server_modified @@ -92,16 +155,16 @@ def source_file_is_safe(request, project_id, file_id): @require_POST @login_required @json_view -def rename_source_file(request, project_id, file_id): +def rename_source_file(request, project_id, kind, file_id): project = get_object_or_404(Project, pk=project_id, owner=request.user) - source_file = get_object_or_404(SourceFile, pk=file_id, project=project) + source_file = get_source_file(kind, pk=file_id, project=project) old_filename = source_file.file_name if source_file.file_name != request.POST['old_name']: send_td_event('cloudpebble_rename_abort_unsafe', data={ 'data': { 'filename': source_file.file_name, - 'kind': 'source' + 'kind': kind } }, request=request, project=project) raise BadRequest(_("Could not rename, file has been renamed already.")) @@ -109,7 +172,7 @@ def rename_source_file(request, project_id, file_id): send_td_event('cloudpebble_rename_abort_unsafe', data={ 'data': { 'filename': source_file.file_name, - 'kind': 'source', + 'kind': kind, 'modified': time.mktime(source_file.last_modified.utctimetuple()), } }, request=request, project=project) @@ -121,7 +184,7 @@ def rename_source_file(request, project_id, file_id): 'data': { 'old_filename': old_filename, 'new_filename': source_file.file_name, - 'kind': 'source' + 'kind': kind } }, request=request, project=project) return {'modified': time.mktime(source_file.last_modified.utctimetuple()), 'file_path': source_file.project_path} @@ -130,14 +193,15 @@ def rename_source_file(request, project_id, file_id): @require_POST @login_required @json_view -def save_source_file(request, project_id, file_id): +def save_source_file(request, project_id, kind, file_id): project = get_object_or_404(Project, pk=project_id, owner=request.user) - source_file = get_object_or_404(SourceFile, pk=file_id, project=project) + source_file = get_source_file(kind, pk=file_id, project=project) + if source_file.was_modified_since(int(request.POST['modified'])): send_td_event('cloudpebble_save_abort_unsafe', data={ 'data': { 'filename': source_file.file_name, - 'kind': 'source' + 'kind': kind } }, request=request, project=project) raise Exception(_("Could not save: file has been modified since last save.")) @@ -147,7 +211,7 @@ def save_source_file(request, project_id, file_id): send_td_event('cloudpebble_save_file', data={ 'data': { 'filename': source_file.file_name, - 'kind': 'source' + 'kind': kind } }, request=request, project=project) @@ -157,15 +221,15 @@ def save_source_file(request, project_id, file_id): @require_POST @login_required @json_view -def delete_source_file(request, project_id, file_id): +def delete_source_file(request, project_id, kind, file_id): project = get_object_or_404(Project, pk=project_id, owner=request.user) - source_file = get_object_or_404(SourceFile, pk=file_id, project=project) + source_file = get_source_file(kind, pk=file_id, project=project) source_file.delete() send_td_event('cloudpebble_delete_file', data={ 'data': { 'filename': source_file.file_name, - 'kind': 'source' + 'kind': kind } }, request=request, project=project) diff --git a/ide/api/ycm.py b/ide/api/ycm.py index f0a17f86..4ed643e4 100644 --- a/ide/api/ycm.py +++ b/ide/api/ycm.py @@ -1,13 +1,14 @@ import json import logging import random - +from urlparse import urlparse import requests + +from django.utils.translation import ugettext as _ from django.conf import settings from django.contrib.auth.decorators import login_required from django.shortcuts import get_object_or_404 from django.views.decorators.http import require_POST -from urlparse import urlparse from ide.models.project import Project from utils.jsonview import json_view diff --git a/ide/migrations/0001_initial.py b/ide/migrations/0001_initial.py index d5e6ec78..6f07fe99 100644 --- a/ide/migrations/0001_initial.py +++ b/ide/migrations/0001_initial.py @@ -1,191 +1,240 @@ # -*- coding: utf-8 -*- -import datetime -from south.db import db -from south.v2 import SchemaMigration -from django.db import models - - -class Migration(SchemaMigration): - - def forwards(self, orm): - # Adding model 'Project' - db.create_table(u'ide_project', ( - (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), - ('owner', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])), - ('name', self.gf('django.db.models.fields.CharField')(max_length=50)), - ('last_modified', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), - ('version_def_name', self.gf('django.db.models.fields.CharField')(default='APP_RESOURCES', max_length=50)), - )) - db.send_create_signal(u'ide', ['Project']) - - # Adding unique constraint on 'Project', fields ['owner', 'name'] - db.create_unique(u'ide_project', ['owner_id', 'name']) - - # Adding model 'TemplateProject' - db.create_table(u'ide_templateproject', ( - (u'project_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['ide.Project'], unique=True, primary_key=True)), - ('template_kind', self.gf('django.db.models.fields.IntegerField')(db_index=True)), - )) - db.send_create_signal(u'ide', ['TemplateProject']) - - # Adding model 'BuildResult' - db.create_table(u'ide_buildresult', ( - (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), - ('project', self.gf('django.db.models.fields.related.ForeignKey')(related_name='builds', to=orm['ide.Project'])), - ('uuid', self.gf('django.db.models.fields.CharField')(default='8277f892d4d84a69ba21c3989a02c61c', max_length=32)), - ('state', self.gf('django.db.models.fields.IntegerField')(default=1)), - ('started', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)), - ('finished', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)), - )) - db.send_create_signal(u'ide', ['BuildResult']) - - # Adding model 'ResourceFile' - db.create_table(u'ide_resourcefile', ( - (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), - ('project', self.gf('django.db.models.fields.related.ForeignKey')(related_name='resources', to=orm['ide.Project'])), - ('file_name', self.gf('django.db.models.fields.CharField')(max_length=100)), - ('kind', self.gf('django.db.models.fields.CharField')(max_length=9)), - )) - db.send_create_signal(u'ide', ['ResourceFile']) - - # Adding unique constraint on 'ResourceFile', fields ['project', 'file_name'] - db.create_unique(u'ide_resourcefile', ['project_id', 'file_name']) - - # Adding model 'ResourceIdentifier' - db.create_table(u'ide_resourceidentifier', ( - (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), - ('resource_file', self.gf('django.db.models.fields.related.ForeignKey')(related_name='identifiers', to=orm['ide.ResourceFile'])), - ('resource_id', self.gf('django.db.models.fields.CharField')(max_length=100)), - ('character_regex', self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True)), - )) - db.send_create_signal(u'ide', ['ResourceIdentifier']) - - # Adding unique constraint on 'ResourceIdentifier', fields ['resource_file', 'resource_id'] - db.create_unique(u'ide_resourceidentifier', ['resource_file_id', 'resource_id']) - - # Adding model 'SourceFile' - db.create_table(u'ide_sourcefile', ( - (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), - ('project', self.gf('django.db.models.fields.related.ForeignKey')(related_name='source_files', to=orm['ide.Project'])), - ('file_name', self.gf('django.db.models.fields.CharField')(max_length=100)), - )) - db.send_create_signal(u'ide', ['SourceFile']) - - # Adding unique constraint on 'SourceFile', fields ['project', 'file_name'] - db.create_unique(u'ide_sourcefile', ['project_id', 'file_name']) - - - def backwards(self, orm): - # Removing unique constraint on 'SourceFile', fields ['project', 'file_name'] - db.delete_unique(u'ide_sourcefile', ['project_id', 'file_name']) - - # Removing unique constraint on 'ResourceIdentifier', fields ['resource_file', 'resource_id'] - db.delete_unique(u'ide_resourceidentifier', ['resource_file_id', 'resource_id']) - - # Removing unique constraint on 'ResourceFile', fields ['project', 'file_name'] - db.delete_unique(u'ide_resourcefile', ['project_id', 'file_name']) - - # Removing unique constraint on 'Project', fields ['owner', 'name'] - db.delete_unique(u'ide_project', ['owner_id', 'name']) - - # Deleting model 'Project' - db.delete_table(u'ide_project') - - # Deleting model 'TemplateProject' - db.delete_table(u'ide_templateproject') - - # Deleting model 'BuildResult' - db.delete_table(u'ide_buildresult') - - # Deleting model 'ResourceFile' - db.delete_table(u'ide_resourcefile') - - # Deleting model 'ResourceIdentifier' - db.delete_table(u'ide_resourceidentifier') - - # Deleting model 'SourceFile' - db.delete_table(u'ide_sourcefile') - - - models = { - u'auth.group': { - 'Meta': {'object_name': 'Group'}, - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), - 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) - }, - u'auth.permission': { - 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, - 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) - }, - u'auth.user': { - 'Meta': {'object_name': 'User'}, - 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), - 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), - 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), - 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), - 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), - 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), - 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), - 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), - 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) - }, - u'contenttypes.contenttype': { - 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, - 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) - }, - u'ide.buildresult': { - 'Meta': {'object_name': 'BuildResult'}, - 'finished': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'builds'", 'to': u"orm['ide.Project']"}), - 'started': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}), - 'state': ('django.db.models.fields.IntegerField', [], {'default': '1'}), - 'uuid': ('django.db.models.fields.CharField', [], {'default': "'7d2901ebedec4f708e706c6424a71e73'", 'max_length': '32'}) - }, - u'ide.project': { - 'Meta': {'unique_together': "(('owner', 'name'),)", 'object_name': 'Project'}, - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), - 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}), - 'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), - 'version_def_name': ('django.db.models.fields.CharField', [], {'default': "'APP_RESOURCES'", 'max_length': '50'}) - }, - u'ide.resourcefile': { - 'Meta': {'unique_together': "(('project', 'file_name'),)", 'object_name': 'ResourceFile'}, - 'file_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'kind': ('django.db.models.fields.CharField', [], {'max_length': '9'}), - 'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'resources'", 'to': u"orm['ide.Project']"}) - }, - u'ide.resourceidentifier': { - 'Meta': {'unique_together': "(('resource_file', 'resource_id'),)", 'object_name': 'ResourceIdentifier'}, - 'character_regex': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'resource_file': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'identifiers'", 'to': u"orm['ide.ResourceFile']"}), - 'resource_id': ('django.db.models.fields.CharField', [], {'max_length': '100'}) - }, - u'ide.sourcefile': { - 'Meta': {'unique_together': "(('project', 'file_name'),)", 'object_name': 'SourceFile'}, - 'file_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'source_files'", 'to': u"orm['ide.Project']"}) - }, - u'ide.templateproject': { - 'Meta': {'object_name': 'TemplateProject', '_ormbases': [u'ide.Project']}, - u'project_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['ide.Project']", 'unique': 'True', 'primary_key': 'True'}), - 'template_kind': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}) - } - } - - complete_apps = ['ide'] \ No newline at end of file +# Generated by Django 1.9.7 on 2016-09-07 00:05 +from __future__ import unicode_literals + +from django.conf import settings +import django.core.validators +from django.db import migrations, models +import django.db.models.deletion +import ide.models.build +import ide.models.dependency +import ide.models.project +import ide.utils +import ide.utils.whatsnew + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('auth', '0007_alter_validators_add_error_messages'), + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ] + + operations = [ + migrations.CreateModel( + name='BuildResult', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('uuid', models.CharField(default=ide.models.build.make_uuid, max_length=36, validators=[django.core.validators.RegexValidator(b'^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$', message='Invalid UUID.')])), + ('state', models.IntegerField(choices=[(1, 'Pending'), (2, 'Failed'), (3, 'Succeeded')], default=1)), + ('started', models.DateTimeField(auto_now_add=True, db_index=True)), + ('finished', models.DateTimeField(blank=True, null=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='BuildSize', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('platform', models.CharField(max_length=20)), + ('total_size', models.IntegerField(blank=True, null=True)), + ('binary_size', models.IntegerField(blank=True, null=True)), + ('resource_size', models.IntegerField(blank=True, null=True)), + ('worker_size', models.IntegerField(blank=True, null=True)), + ('build', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sizes', to='ide.BuildResult')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Dependency', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=100)), + ('version', models.CharField(max_length=2000, validators=[ide.models.dependency.validate_dependency_version])), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Project', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=50)), + ('last_modified', models.DateTimeField(auto_now_add=True)), + ('project_type', models.CharField(choices=[(b'native', 'Pebble C SDK'), (b'simplyjs', 'Simply.js'), (b'pebblejs', 'Pebble.js (beta)'), (b'package', 'Pebble Package'), (b'rocky', 'Rocky.js')], default=b'native', max_length=10)), + ('sdk_version', models.CharField(choices=[(b'2', 'SDK 2 (obsolete)'), (b'3', 'SDK 4 beta')], default=b'2', max_length=6)), + ('app_uuid', models.CharField(blank=True, default=ide.utils.generate_half_uuid, max_length=36, null=True, validators=[django.core.validators.RegexValidator(b'^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-4[0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$', message='Invalid UUID.')])), + ('app_company_name', models.CharField(blank=True, max_length=100, null=True)), + ('app_short_name', models.CharField(blank=True, max_length=100, null=True)), + ('app_long_name', models.CharField(blank=True, max_length=100, null=True)), + ('app_version_label', models.CharField(blank=True, default=b'1.0', max_length=40, null=True, validators=[ide.models.project.version_validator])), + ('app_is_watchface', models.BooleanField(default=False)), + ('app_is_hidden', models.BooleanField(default=False)), + ('app_is_shown_on_communication', models.BooleanField(default=False)), + ('app_capabilities', models.CharField(blank=True, max_length=255, null=True)), + ('app_keys', models.TextField(default=b'{}')), + ('app_jshint', models.BooleanField(default=True)), + ('app_platforms', models.TextField(blank=True, max_length=255, null=True)), + ('app_modern_multi_js', models.BooleanField(default=True)), + ('app_keywords', models.TextField(default=b'[]')), + ('optimisation', models.CharField(choices=[(b'0', b'None'), (b'1', b'Limited'), (b's', b'Prefer smaller'), (b'2', b'Prefer faster'), (b'3', b'Aggressive (faster, bigger)')], default=b's', max_length=1)), + ('github_repo', models.CharField(blank=True, max_length=100, null=True)), + ('github_branch', models.CharField(blank=True, max_length=100, null=True)), + ('github_last_sync', models.DateTimeField(blank=True, null=True)), + ('github_last_commit', models.CharField(blank=True, max_length=40, null=True)), + ('github_hook_uuid', models.CharField(blank=True, max_length=36, null=True)), + ('github_hook_build', models.BooleanField(default=False)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='ResourceFile', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('file_name', models.CharField(max_length=100, validators=[django.core.validators.RegexValidator(b'^[/a-zA-Z0-9_(). -]+$', message='Invalid filename.')])), + ('kind', models.CharField(choices=[(b'raw', 'Binary blob'), (b'bitmap', 'Bitmap Image'), (b'png', '1-bit PNG'), (b'png-trans', '1-bit PNG with transparency'), (b'font', 'True-Type Font'), (b'pbi', '1-bit Pebble image')], max_length=9)), + ('is_menu_icon', models.BooleanField(default=False)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='ResourceIdentifier', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('resource_id', models.CharField(max_length=100, validators=[django.core.validators.RegexValidator(b'^\\w+$', message='Invalid resource ID.')])), + ('character_regex', models.CharField(blank=True, max_length=100, null=True)), + ('tracking', models.IntegerField(blank=True, null=True)), + ('compatibility', models.CharField(blank=True, max_length=10, null=True)), + ('target_platforms', models.CharField(blank=True, default=None, max_length=100, null=True)), + ('memory_format', models.CharField(blank=True, choices=[(b'Smallest', 'Smallest'), (b'SmallestPalette', 'Smallest Palette'), (b'1Bit', '1-bit'), (b'8Bit', '8-bit'), (b'1BitPalette', '1-bit Palette'), (b'2BitPalette', '2-bit Palette'), (b'4BitPalette', '4-bit Palette')], max_length=15, null=True)), + ('storage_format', models.CharField(blank=True, choices=[(b'pbi', '1 bit Pebble Image'), (b'png', 'PNG')], max_length=3, null=True)), + ('space_optimisation', models.CharField(blank=True, choices=[(b'storage', 'Storage'), (b'memory', 'Memory')], max_length=7, null=True)), + ('resource_file', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='identifiers', to='ide.ResourceFile')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='ResourceVariant', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('tags', models.CommaSeparatedIntegerField(blank=True, max_length=50)), + ('is_legacy', models.BooleanField(default=False)), + ('resource_file', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='variants', to='ide.ResourceFile')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='SourceFile', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('last_modified', models.DateTimeField(auto_now=True, null=True)), + ('folded_lines', models.TextField(default=b'[]')), + ('file_name', models.CharField(max_length=100, validators=[django.core.validators.RegexValidator(b'^[/a-zA-Z0-9_.-]+\\.(c|h|js|json)$', message='Invalid file name.')])), + ('target', models.CharField(choices=[(b'app', 'App'), (b'pkjs', 'PebbleKit JS'), (b'worker', 'Worker'), (b'public', 'Public Header File'), (b'common', 'Shared JS')], default=b'app', max_length=10)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='UserGithub', + fields=[ + ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='github', serialize=False, to=settings.AUTH_USER_MODEL)), + ('token', models.CharField(blank=True, max_length=50, null=True)), + ('nonce', models.CharField(blank=True, max_length=36, null=True)), + ('username', models.CharField(blank=True, max_length=50, null=True)), + ('avatar', models.CharField(blank=True, max_length=255, null=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='UserSettings', + fields=[ + ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)), + ('autocomplete', models.IntegerField(choices=[(1, 'As-you-type'), (2, 'When pressing Ctrl-Space'), (3, 'Never')], default=1, verbose_name='Autocompletion')), + ('keybinds', models.CharField(choices=[(b'default', 'Standard'), (b'vim', 'vim-like'), (b'emacs', 'emacs-like')], default=b'default', max_length=20, verbose_name='Keybinds')), + ('theme', models.CharField(choices=[(b'cloudpebble', b'CloudPebble'), (b'monokai', b'Monokai (Sublime Text)'), (b'blackboard', b'Blackboard (TextMate)'), (b'eclipse', b'Eclipse'), (b'solarized light', b'Solarized (light)'), (b'solarized dark', b'Solarized (dark)')], default=b'cloudpebble', max_length=50, verbose_name='Theme')), + ('use_spaces', models.BooleanField(choices=[(True, 'Using spaces'), (False, 'Using tabs')], default=True, verbose_name='Indents')), + ('tab_width', models.PositiveSmallIntegerField(default=2, verbose_name='Tab width')), + ('accepted_terms', models.BooleanField(default=True)), + ('whats_new', models.PositiveIntegerField(default=ide.utils.whatsnew.count_things)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='TemplateProject', + fields=[ + ('project_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='ide.Project')), + ('template_kind', models.IntegerField(choices=[(1, 'Template'), (2, 'SDK Demo')], db_index=True)), + ], + options={ + 'abstract': False, + }, + bases=('ide.project',), + ), + migrations.AddField( + model_name='sourcefile', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='source_files', to='ide.Project'), + ), + migrations.AddField( + model_name='resourcefile', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='resources', to='ide.Project'), + ), + migrations.AddField( + model_name='project', + name='owner', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), + ), + migrations.AddField( + model_name='project', + name='project_dependencies', + field=models.ManyToManyField(to='ide.Project'), + ), + migrations.AddField( + model_name='dependency', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='dependencies', to='ide.Project'), + ), + migrations.AddField( + model_name='buildresult', + name='project', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='builds', to='ide.Project'), + ), + migrations.AlterUniqueTogether( + name='sourcefile', + unique_together=set([('project', 'file_name', 'target')]), + ), + migrations.AlterUniqueTogether( + name='resourcevariant', + unique_together=set([('resource_file', 'tags')]), + ), + migrations.AlterUniqueTogether( + name='resourcefile', + unique_together=set([('project', 'file_name')]), + ), + migrations.AlterUniqueTogether( + name='dependency', + unique_together=set([('project', 'name')]), + ), + ] diff --git a/ide/migrations/0002_setup_test_bench.py b/ide/migrations/0002_setup_test_bench.py new file mode 100644 index 00000000..cb8f4f31 --- /dev/null +++ b/ide/migrations/0002_setup_test_bench.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +# Generated by Django 1.9.7 on 2016-08-05 18:03 +from __future__ import unicode_literals + +import django.core.validators +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('ide', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='Artefact', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('log_name', models.CharField(max_length=100)), + ('link_name', models.CharField(max_length=100)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='ScreenshotFile', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('platform', models.CharField(choices=[(b'aplite', b'Aplite'), (b'basalt', b'Basalt'), (b'chalk', b'Chalk')], max_length=10)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='ScreenshotSet', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=100, validators=[django.core.validators.RegexValidator(b'^[/a-zA-Z0-9_-]+$')])), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='TestFile', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('last_modified', models.DateTimeField(auto_now=True, null=True)), + ('folded_lines', models.TextField(default=b'[]')), + ('file_name', models.CharField(max_length=100, validators=[django.core.validators.RegexValidator(b'^[/a-zA-Z0-9_-]+$')])), + ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='test_files', to='ide.Project')), + ], + options={ + 'ordering': ['file_name'], + 'abstract': False, + }, + ), + migrations.CreateModel( + name='TestLog', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='TestRun', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('platform', models.CharField(choices=[(b'aplite', b'Aplite'), (b'basalt', b'Basalt'), (b'chalk', b'Chalk')], max_length=10)), + ('date_completed', models.DateTimeField(blank=True, null=True)), + ('original_name', models.CharField(max_length=100)), + ('code', models.IntegerField(default=0)), + ], + options={ + 'ordering': ['original_name', '-session__date_added'], + 'abstract': False, + }, + ), + migrations.CreateModel( + name='TestSession', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('date_added', models.DateTimeField(auto_now_add=True)), + ('date_completed', models.DateTimeField(blank=True, null=True)), + ('kind', models.CharField(choices=[(b'batch', 'Batch Test'), (b'live', 'Live Test')], max_length=5)), + ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='test_sessions', to='ide.Project')), + ], + options={ + 'ordering': ['-date_added'], + 'abstract': False, + }, + ), + migrations.AddField( + model_name='testrun', + name='session', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='runs', to='ide.TestSession'), + ), + migrations.AddField( + model_name='testrun', + name='test', + field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='runs', to='ide.TestFile'), + ), + migrations.AddField( + model_name='testlog', + name='test_run', + field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='logfile', to='ide.TestRun'), + ), + migrations.AddField( + model_name='screenshotset', + name='test', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='screenshot_sets', to='ide.TestFile'), + ), + migrations.AddField( + model_name='screenshotfile', + name='screenshot_set', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='files', to='ide.ScreenshotSet'), + ), + migrations.AddField( + model_name='artefact', + name='test_log', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='artefacts', to='ide.TestLog'), + ), + migrations.AlterUniqueTogether( + name='testrun', + unique_together=set([('test', 'session', 'platform')]), + ), + migrations.AlterUniqueTogether( + name='testfile', + unique_together=set([('project', 'file_name')]), + ), + migrations.AlterUniqueTogether( + name='screenshotset', + unique_together=set([('test', 'name')]), + ), + migrations.AlterUniqueTogether( + name='screenshotfile', + unique_together=set([('platform', 'screenshot_set')]), + ), + ] diff --git a/ide/migrations/0046_auto__add_dependency.py b/ide/migrations/0046_auto__add_dependency.py deleted file mode 100644 index 0edc3ccb..00000000 --- a/ide/migrations/0046_auto__add_dependency.py +++ /dev/null @@ -1,196 +0,0 @@ -# -*- coding: utf-8 -*- -from south.utils import datetime_utils as datetime -from south.db import db -from south.v2 import SchemaMigration -from django.db import models - - -class Migration(SchemaMigration): - - def forwards(self, orm): - # Adding model 'Dependency' - db.create_table(u'ide_dependency', ( - (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), - ('project', self.gf('django.db.models.fields.related.ForeignKey')(related_name='dependencies', to=orm['ide.Project'])), - ('name', self.gf('django.db.models.fields.CharField')(max_length=100)), - ('version', self.gf('django.db.models.fields.CharField')(max_length=100)), - )) - db.send_create_signal('ide', ['Dependency']) - - # Adding unique constraint on 'Dependency', fields ['project', 'name'] - db.create_unique(u'ide_dependency', ['project_id', 'name']) - - # Adding field 'Project.app_keywords' - db.add_column(u'ide_project', 'app_keywords', - self.gf('django.db.models.fields.TextField')(default='[]'), - keep_default=False) - - - def backwards(self, orm): - # Removing unique constraint on 'Dependency', fields ['project', 'name'] - db.delete_unique(u'ide_dependency', ['project_id', 'name']) - - # Deleting model 'Dependency' - db.delete_table(u'ide_dependency') - - # Deleting field 'Project.app_keywords' - db.delete_column(u'ide_project', 'app_keywords') - - - models = { - u'auth.group': { - 'Meta': {'object_name': 'Group'}, - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), - 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) - }, - u'auth.permission': { - 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, - 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) - }, - u'auth.user': { - 'Meta': {'object_name': 'User'}, - 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), - 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), - 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), - 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}), - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), - 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), - 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), - 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), - 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}), - 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) - }, - u'contenttypes.contenttype': { - 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, - 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) - }, - 'ide.buildresult': { - 'Meta': {'object_name': 'BuildResult'}, - 'finished': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'builds'", 'to': "orm['ide.Project']"}), - 'started': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}), - 'state': ('django.db.models.fields.IntegerField', [], {'default': '1'}), - 'uuid': ('django.db.models.fields.CharField', [], {'default': "'91bfe649-bbc6-404d-b9e2-5245b3ab41b9'", 'max_length': '36'}) - }, - 'ide.buildsize': { - 'Meta': {'object_name': 'BuildSize'}, - 'binary_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), - 'build': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sizes'", 'to': "orm['ide.BuildResult']"}), - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'platform': ('django.db.models.fields.CharField', [], {'max_length': '20'}), - 'resource_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), - 'total_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), - 'worker_size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}) - }, - 'ide.dependency': { - 'Meta': {'unique_together': "(('project', 'name'),)", 'object_name': 'Dependency'}, - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - 'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': "orm['ide.Project']"}), - 'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}) - }, - 'ide.project': { - 'Meta': {'object_name': 'Project'}, - 'app_capabilities': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'app_company_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), - 'app_is_hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'app_is_shown_on_communication': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'app_is_watchface': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'app_jshint': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), - 'app_keys': ('django.db.models.fields.TextField', [], {'default': "'{}'"}), - 'app_keywords': ('django.db.models.fields.TextField', [], {'default': "'[]'"}), - 'app_long_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), - 'app_modern_multi_js': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), - 'app_platforms': ('django.db.models.fields.TextField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'app_short_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), - 'app_uuid': ('django.db.models.fields.CharField', [], {'default': "'3d189e75-50d6-4e17-aa07-3ac28c399c3b'", 'max_length': '36', 'null': 'True', 'blank': 'True'}), - 'app_version_label': ('django.db.models.fields.CharField', [], {'default': "'1.0'", 'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'github_branch': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), - 'github_hook_build': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'github_hook_uuid': ('django.db.models.fields.CharField', [], {'max_length': '36', 'null': 'True', 'blank': 'True'}), - 'github_last_commit': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}), - 'github_last_sync': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), - 'github_repo': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), - 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}), - 'optimisation': ('django.db.models.fields.CharField', [], {'default': "'s'", 'max_length': '1'}), - 'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), - 'project_type': ('django.db.models.fields.CharField', [], {'default': "'native'", 'max_length': '10'}), - 'sdk_version': ('django.db.models.fields.CharField', [], {'default': "'2'", 'max_length': '6'}) - }, - 'ide.resourcefile': { - 'Meta': {'unique_together': "(('project', 'file_name'),)", 'object_name': 'ResourceFile'}, - 'file_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'is_menu_icon': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'kind': ('django.db.models.fields.CharField', [], {'max_length': '9'}), - 'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'resources'", 'to': "orm['ide.Project']"}) - }, - 'ide.resourceidentifier': { - 'Meta': {'object_name': 'ResourceIdentifier'}, - 'character_regex': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), - 'compatibility': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}), - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'memory_format': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}), - 'resource_file': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'identifiers'", 'to': "orm['ide.ResourceFile']"}), - 'resource_id': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - 'space_optimisation': ('django.db.models.fields.CharField', [], {'max_length': '7', 'null': 'True', 'blank': 'True'}), - 'storage_format': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}), - 'target_platforms': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '30', 'null': 'True', 'blank': 'True'}), - 'tracking': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}) - }, - 'ide.resourcevariant': { - 'Meta': {'unique_together': "(('resource_file', 'tags'),)", 'object_name': 'ResourceVariant'}, - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'is_legacy': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), - 'resource_file': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'variants'", 'to': "orm['ide.ResourceFile']"}), - 'tags': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '50', 'blank': 'True'}) - }, - 'ide.sourcefile': { - 'Meta': {'unique_together': "(('project', 'file_name'),)", 'object_name': 'SourceFile'}, - 'file_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), - 'folded_lines': ('django.db.models.fields.TextField', [], {'default': "'[]'"}), - u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), - 'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}), - 'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'source_files'", 'to': "orm['ide.Project']"}), - 'target': ('django.db.models.fields.CharField', [], {'default': "'app'", 'max_length': '10'}) - }, - 'ide.templateproject': { - 'Meta': {'object_name': 'TemplateProject', '_ormbases': ['ide.Project']}, - u'project_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['ide.Project']", 'unique': 'True', 'primary_key': 'True'}), - 'template_kind': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}) - }, - 'ide.usergithub': { - 'Meta': {'object_name': 'UserGithub'}, - 'avatar': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), - 'nonce': ('django.db.models.fields.CharField', [], {'max_length': '36', 'null': 'True', 'blank': 'True'}), - 'token': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), - 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'github'", 'unique': 'True', 'primary_key': 'True', 'to': u"orm['auth.User']"}), - 'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}) - }, - 'ide.usersettings': { - 'Meta': {'object_name': 'UserSettings'}, - 'accepted_terms': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), - 'autocomplete': ('django.db.models.fields.IntegerField', [], {'default': '1'}), - 'keybinds': ('django.db.models.fields.CharField', [], {'default': "'default'", 'max_length': '20'}), - 'tab_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}), - 'theme': ('django.db.models.fields.CharField', [], {'default': "'cloudpebble'", 'max_length': '50'}), - 'use_spaces': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), - 'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}), - 'whats_new': ('django.db.models.fields.PositiveIntegerField', [], {'default': '21'}) - } - } - - complete_apps = ['ide'] \ No newline at end of file diff --git a/ide/models/__init__.py b/ide/models/__init__.py index af9e197d..e07e98ef 100644 --- a/ide/models/__init__.py +++ b/ide/models/__init__.py @@ -4,4 +4,5 @@ from ide.models.files import * from ide.models.project import * from ide.models.user import * +from ide.models.monkey import * from ide.models.dependency import * diff --git a/ide/models/build.py b/ide/models/build.py index a93bd3da..ad7d88dd 100644 --- a/ide/models/build.py +++ b/ide/models/build.py @@ -15,6 +15,10 @@ __author__ = 'katharine' +def make_uuid(): + return str(uuid.uuid4()) + + class BuildResult(IdeModel): STATE_WAITING = 1 @@ -36,7 +40,7 @@ class BuildResult(IdeModel): DEBUG_WORKER = 1 project = models.ForeignKey(Project, related_name='builds') - uuid = models.CharField(max_length=36, default=lambda: str(uuid.uuid4()), validators=regexes.validator('uuid', _('Invalid UUID.'))) + uuid = models.CharField(max_length=36, default=make_uuid, validators=regexes.validator('uuid', _('Invalid UUID.'))) state = models.IntegerField(choices=STATE_CHOICES, default=STATE_WAITING) started = models.DateTimeField(auto_now_add=True, db_index=True) finished = models.DateTimeField(blank=True, null=True) @@ -101,6 +105,12 @@ def read_build_log(self): else: return s3.read_file('builds', self.build_log) + def copy_pbw_to_path(self, path): + if not settings.AWS_ENABLED: + shutil.copy(self.local_filename, self.pbw) + else: + s3.read_file_to_filesystem('builds', self.pbw, path) + def save_debug_info(self, json_info, platform, kind): text = json.dumps(json_info) if not settings.AWS_ENABLED: diff --git a/ide/models/files.py b/ide/models/files.py index dabf0216..838a4a36 100644 --- a/ide/models/files.py +++ b/ide/models/files.py @@ -1,5 +1,5 @@ -import os import json +import os import logging from collections import OrderedDict @@ -9,6 +9,7 @@ from django.core.validators import RegexValidator, ValidationError from django.utils.translation import ugettext_lazy as _ +from ide.models.meta import IdeModel from ide.models.s3file import S3File from ide.models.textfile import TextFile from ide.models.meta import IdeModel @@ -132,6 +133,10 @@ def s3_id(self): def folder(self): return 'resources' if self.is_legacy else 'resources/variants' + def save_project(self): + self.resource_file.project.last_modified = now() + self.resource_file.project.save() + def get_tags(self): return [int(tag) for tag in self.tags.split(",") if tag] @@ -304,5 +309,5 @@ def project_dir(self): except KeyError: Exception("Invalid file type in project") - class Meta(IdeModel.Meta): + class Meta(TextFile.Meta): unique_together = (('project', 'file_name', 'target'),) diff --git a/ide/models/monkey.py b/ide/models/monkey.py new file mode 100644 index 00000000..86b4ca28 --- /dev/null +++ b/ide/models/monkey.py @@ -0,0 +1,286 @@ +import os +from io import BytesIO +from django.core.exceptions import ObjectDoesNotExist +from django.core.validators import RegexValidator +from django.db import models +from django.db import transaction +from django.utils.timezone import now +from django.utils.translation import ugettext as _ + +from django.core.urlresolvers import reverse +from ide.models.textfile import TextFile +from ide.models.s3file import S3File +from ide.models.meta import IdeModel +from ide.utils.image_correction import uncorrect + +from utils.monkeyscript_helpers import frame_test_file + +__author__ = 'joe' + + +class TestCode: + ERROR = -2 + FAILED = -1 + PENDING = 0 + PASSED = 1 + + +class TestSession(IdeModel): + """ A TestSession is owned by a project contains a set of test runs. It represents a time that a set of N>=1 tests + were run as one job.""" + date_added = models.DateTimeField(auto_now_add=True) + date_completed = models.DateTimeField(null=True, blank=True) + project = models.ForeignKey('Project', related_name='test_sessions') + SESSION_KINDS = ( + ('batch', _('Batch Test')), + ('live', _('Live Test')) + ) + kind = models.CharField(max_length=5, choices=SESSION_KINDS) + + @property + def tests(self): + return {run.test for run in self.runs.all()} + + @property + def platforms(self): + return {run.platform for run in self.runs.all()} + + def make_callback_url(self, request, token): + """ + Return a function which will take a test session and return the URL which orchestrator needs to notify + CloudPebble that the test is complete + :param request: A Django request object + :param token: The access token + """ + location = request.build_absolute_uri(reverse('ide:notify_test_session', args=[self.project.pk, self.id])) + # TODO: simple concatenation might not be the best thing here + return location + ("?token=%s" % token if token else "") + + @staticmethod + def setup_session(project, test_ids, platforms, kind): + """ Make a test session which has a test run for each platform for each test ID + :param project: Project which the session is for + :param test_ids: List of test IDs (integers) + :param platforms: An iterable of string platform names (e.g. ['basalt', 'chalk']) + :param kind: Either 'live' or 'batch'. + :return: The newly created session object. + """ + if test_ids is not None: + tests = TestFile.objects.filter(project=project, id__in=test_ids) + else: + tests = project.test_files.all() + + assert kind in dict(TestSession.SESSION_KINDS).keys() + + with transaction.atomic(): + # Create a test session + session = TestSession.objects.create(project=project, kind=kind) + session.save() + runs = [] + + # Then make a test run for every test + for platform in platforms: + assert platform in [x[0] for x in TestRun.PLATFORM_CHOICES] + for test in tests: + run = TestRun.objects.create(session=session, test=test, platform=platform, + original_name=test.file_name) + run.save() + runs.append(run) + return session + + def fail(self, message="An unknown error occurred", date=None): + """ Mark all pending tests as failed. + :param message: The log message for the failure + :param date: Specify the completion date, defaults to now. + """ + with transaction.atomic(): + for run in self.runs.filter(code=0): + run.code = TestCode.ERROR + run.log = message + run.date_completed = date if date is not None else now() + run.save() + + class Meta(IdeModel.Meta): + ordering = ['-date_added'] + + +class TestLog(S3File): + """ A TestLog is a text file owned by a TestRun. It stores the console output from an AT process. """ + folder = 'tests/logs' + test_run = models.OneToOneField('TestRun', related_name='logfile') + + +class Artefact(IdeModel): + log_name = models.CharField(max_length=100) + link_name = models.CharField(max_length=100) + test_log = models.ForeignKey('TestLog', related_name='artefacts') + + +class TestRun(IdeModel): + """ A TestRun is owned by a TestSession and links to a TestFile. It stores the result code and date information for + a particular time that a single test was run. """ + session = models.ForeignKey('TestSession', related_name='runs') + test = models.ForeignKey('TestFile', related_name='runs', null=True, on_delete=models.SET_NULL) + PLATFORM_CHOICES = ( + ('aplite', 'Aplite'), + ('basalt', 'Basalt'), + ('chalk', 'Chalk') + ) + platform = models.CharField(max_length=10, choices=PLATFORM_CHOICES) + + date_completed = models.DateTimeField(blank=True, null=True) + + original_name = models.CharField(max_length=100) + code = models.IntegerField(default=TestCode.PENDING) + + @property + def artefacts(self): + if self.has_log: + return [[a.log_name, a.link_name] for a in Artefact.objects.filter(test_log=self.logfile)] + + @artefacts.setter + def artefacts(self, value): + if self.has_log: + Artefact.objects.filter(test_log=self.logfile).delete() + for artefact in value: + Artefact.objects.create(test_log=self.logfile, log_name=artefact[0], link_name=artefact[1]) + + @property + def log(self): + if self.has_log: + return self.logfile.get_contents() + else: + return None + + @log.setter + def log(self, value): + with transaction.atomic(): + if self.has_log: + self.logfile.delete() + logfile = TestLog.objects.create(test_run=self) + logfile.save() + logfile.save_text(value) + + @property + def has_log(self): + try: + return self.logfile is not None + except TestLog.DoesNotExist: + return False + + @property + def has_test(self): + return self.test is not None + + @property + def name(self): + if self.test is not None: + return self.test.file_name + else: + return self.original_name + + class Meta(IdeModel.Meta): + unique_together = ('test', 'session', 'platform') + ordering = ['original_name', '-session__date_added'] + + +class TestFile(TextFile): + file_name = models.CharField(max_length=100, validators=[RegexValidator(r"^[/a-zA-Z0-9_-]+$")]) + project = models.ForeignKey('Project', related_name='test_files') + folder = 'tests/scripts' + target = 'test' + + def copy_screenshots_to_directory(self, directory): + for screenshot_set in self.get_screenshot_sets(): + screenshot_set.copy_to_directory(directory) + + def copy_test_to_path(self, path, frame_test=True): + self.copy_to_path(path) + if frame_test: + with open(path, 'r+') as f: + full_test = frame_test_file(f, self.file_name, self.project.app_short_name, self.project.app_uuid) + with open(path, 'w') as f: + f.write(full_test) + + @property + def project_path(self): + return 'integration_tests/%s' % self.file_name + + @property + def latest_code(self): + try: + return self.runs.latest('session__date_added').code + except ObjectDoesNotExist: + return None + + def get_screenshot_sets(self): + return ScreenshotSet.objects.filter(test=self) + + class Meta(TextFile.Meta): + unique_together = (('project', 'file_name'),) + ordering = ['file_name'] + + +class ScreenshotSet(IdeModel): + test = models.ForeignKey('TestFile', related_name='screenshot_sets') + name = models.CharField(max_length=100, validators=[RegexValidator(r"^[/a-zA-Z0-9_-]+$")]) + + def save(self, *args, **kwargs): + self.clean_fields() + self.test.project.last_modified = now() + self.test.project.save() + super(ScreenshotSet, self).save(*args, **kwargs) + + def copy_to_directory(self, directory): + screenshots = ScreenshotFile.objects.filter(screenshot_set=self) + for screenshot in screenshots: + if screenshot.platform == 'aplite': + platform = 'tintin' + size = '144x168' + elif screenshot.platform == 'basalt': + platform = 'snowy' + size = '144x168' + elif screenshot.platform == 'chalk': + platform = 'snowy' + size = '180x180' + else: + raise ValueError("Invalid platform") + file_dir = os.path.join(directory, 'english', platform, size) + file_path = os.path.join(file_dir, self.name + '.png') + if not os.path.isdir(file_dir): + os.makedirs(file_dir) + screenshot.copy_to_path(file_path) + + class Meta(IdeModel.Meta): + unique_together = (('test', 'name'),) + + +class ScreenshotFile(S3File): + folder = 'tests/screenshots' + screenshot_set = models.ForeignKey('ScreenshotSet', related_name='files') + PLATFORMS = ( + ('aplite', 'Aplite'), + ('basalt', 'Basalt'), + ('chalk', 'Chalk') + ) + platform = models.CharField(max_length=10, choices=PLATFORMS) + + @property + def project(self): + return self.screenshot_set.test.project + + def save(self, *args, **kwargs): + self.full_clean() + self.screenshot_set.save() + super(ScreenshotFile, self).save(*args, **kwargs) + + def save_file(self, stream, file_size=0): + with BytesIO() as buff: + uncorrect(stream, buff) + buff.seek(0) + data = buff.read() + super(ScreenshotFile, self).save_string(data) + + class Meta(S3File.Meta): + unique_together = (('platform', 'screenshot_set'),) + diff --git a/ide/models/project.py b/ide/models/project.py index 63b58f1b..3908b01a 100644 --- a/ide/models/project.py +++ b/ide/models/project.py @@ -90,6 +90,12 @@ def __init__(self, *args, **kwargs): if self.sdk_version == '2': self.app_modern_multi_js = False + def get_last_built_platforms(self): + try: + return self.last_build.get_sizes().keys() + except AttributeError: + return [] + def set_dependencies(self, dependencies): """ Set the project's dependencies from a dictionary. :param dependencies: A dictionary of dependency->version @@ -234,6 +240,7 @@ def clean(self): last_build = property(get_last_build) menu_icon = property(get_menu_icon) + last_built_platforms = property(get_last_built_platforms) def __unicode__(self): return u"%s" % self.name @@ -252,7 +259,8 @@ class TemplateProject(Project): def copy_into_project(self, project): uuid_string = ", ".join(["0x%02X" % ord(b) for b in uuid.uuid4().bytes]) for resource in self.resources.all(): - new_resource = ResourceFile.objects.create(project=project, file_name=resource.file_name, kind=resource.kind) + new_resource = ResourceFile.objects.create(project=project, file_name=resource.file_name, + kind=resource.kind) for variant in resource.variants.all(): new_variant = ResourceVariant.objects.create(resource_file=new_resource, tags=variant.tags) new_variant.save_string(variant.get_contents()) diff --git a/ide/models/user.py b/ide/models/user.py index 1d5e0a70..1a88f73f 100644 --- a/ide/models/user.py +++ b/ide/models/user.py @@ -2,10 +2,12 @@ from django.db import models from django.utils.translation import ugettext_lazy as _ from django.utils.translation import pgettext_lazy +from django.conf import settings from ide.models.meta import IdeModel from ide.utils.whatsnew import count_things + __author__ = 'katharine' @@ -60,7 +62,7 @@ def __unicode__(self): whats_new = models.PositiveIntegerField(default=count_things) User.settings = property(lambda self: UserSettings.objects.get_or_create(user=self)[0]) - +User.is_testbench_user = property(lambda self: self.email in settings.TEST_BENCH_USERS) class UserGithub(IdeModel): user = models.OneToOneField(User, primary_key=True, related_name='github') diff --git a/ide/south_migrations/0001_initial.py b/ide/south_migrations/0001_initial.py new file mode 100644 index 00000000..d5e6ec78 --- /dev/null +++ b/ide/south_migrations/0001_initial.py @@ -0,0 +1,191 @@ +# -*- coding: utf-8 -*- +import datetime +from south.db import db +from south.v2 import SchemaMigration +from django.db import models + + +class Migration(SchemaMigration): + + def forwards(self, orm): + # Adding model 'Project' + db.create_table(u'ide_project', ( + (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('owner', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])), + ('name', self.gf('django.db.models.fields.CharField')(max_length=50)), + ('last_modified', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)), + ('version_def_name', self.gf('django.db.models.fields.CharField')(default='APP_RESOURCES', max_length=50)), + )) + db.send_create_signal(u'ide', ['Project']) + + # Adding unique constraint on 'Project', fields ['owner', 'name'] + db.create_unique(u'ide_project', ['owner_id', 'name']) + + # Adding model 'TemplateProject' + db.create_table(u'ide_templateproject', ( + (u'project_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['ide.Project'], unique=True, primary_key=True)), + ('template_kind', self.gf('django.db.models.fields.IntegerField')(db_index=True)), + )) + db.send_create_signal(u'ide', ['TemplateProject']) + + # Adding model 'BuildResult' + db.create_table(u'ide_buildresult', ( + (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('project', self.gf('django.db.models.fields.related.ForeignKey')(related_name='builds', to=orm['ide.Project'])), + ('uuid', self.gf('django.db.models.fields.CharField')(default='8277f892d4d84a69ba21c3989a02c61c', max_length=32)), + ('state', self.gf('django.db.models.fields.IntegerField')(default=1)), + ('started', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)), + ('finished', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)), + )) + db.send_create_signal(u'ide', ['BuildResult']) + + # Adding model 'ResourceFile' + db.create_table(u'ide_resourcefile', ( + (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('project', self.gf('django.db.models.fields.related.ForeignKey')(related_name='resources', to=orm['ide.Project'])), + ('file_name', self.gf('django.db.models.fields.CharField')(max_length=100)), + ('kind', self.gf('django.db.models.fields.CharField')(max_length=9)), + )) + db.send_create_signal(u'ide', ['ResourceFile']) + + # Adding unique constraint on 'ResourceFile', fields ['project', 'file_name'] + db.create_unique(u'ide_resourcefile', ['project_id', 'file_name']) + + # Adding model 'ResourceIdentifier' + db.create_table(u'ide_resourceidentifier', ( + (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('resource_file', self.gf('django.db.models.fields.related.ForeignKey')(related_name='identifiers', to=orm['ide.ResourceFile'])), + ('resource_id', self.gf('django.db.models.fields.CharField')(max_length=100)), + ('character_regex', self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True)), + )) + db.send_create_signal(u'ide', ['ResourceIdentifier']) + + # Adding unique constraint on 'ResourceIdentifier', fields ['resource_file', 'resource_id'] + db.create_unique(u'ide_resourceidentifier', ['resource_file_id', 'resource_id']) + + # Adding model 'SourceFile' + db.create_table(u'ide_sourcefile', ( + (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), + ('project', self.gf('django.db.models.fields.related.ForeignKey')(related_name='source_files', to=orm['ide.Project'])), + ('file_name', self.gf('django.db.models.fields.CharField')(max_length=100)), + )) + db.send_create_signal(u'ide', ['SourceFile']) + + # Adding unique constraint on 'SourceFile', fields ['project', 'file_name'] + db.create_unique(u'ide_sourcefile', ['project_id', 'file_name']) + + + def backwards(self, orm): + # Removing unique constraint on 'SourceFile', fields ['project', 'file_name'] + db.delete_unique(u'ide_sourcefile', ['project_id', 'file_name']) + + # Removing unique constraint on 'ResourceIdentifier', fields ['resource_file', 'resource_id'] + db.delete_unique(u'ide_resourceidentifier', ['resource_file_id', 'resource_id']) + + # Removing unique constraint on 'ResourceFile', fields ['project', 'file_name'] + db.delete_unique(u'ide_resourcefile', ['project_id', 'file_name']) + + # Removing unique constraint on 'Project', fields ['owner', 'name'] + db.delete_unique(u'ide_project', ['owner_id', 'name']) + + # Deleting model 'Project' + db.delete_table(u'ide_project') + + # Deleting model 'TemplateProject' + db.delete_table(u'ide_templateproject') + + # Deleting model 'BuildResult' + db.delete_table(u'ide_buildresult') + + # Deleting model 'ResourceFile' + db.delete_table(u'ide_resourcefile') + + # Deleting model 'ResourceIdentifier' + db.delete_table(u'ide_resourceidentifier') + + # Deleting model 'SourceFile' + db.delete_table(u'ide_sourcefile') + + + models = { + u'auth.group': { + 'Meta': {'object_name': 'Group'}, + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), + 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) + }, + u'auth.permission': { + 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, + 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) + }, + u'auth.user': { + 'Meta': {'object_name': 'User'}, + 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), + 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), + 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), + 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), + 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), + 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), + 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), + 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) + }, + u'contenttypes.contenttype': { + 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, + 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + u'ide.buildresult': { + 'Meta': {'object_name': 'BuildResult'}, + 'finished': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'builds'", 'to': u"orm['ide.Project']"}), + 'started': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}), + 'state': ('django.db.models.fields.IntegerField', [], {'default': '1'}), + 'uuid': ('django.db.models.fields.CharField', [], {'default': "'7d2901ebedec4f708e706c6424a71e73'", 'max_length': '32'}) + }, + u'ide.project': { + 'Meta': {'unique_together': "(('owner', 'name'),)", 'object_name': 'Project'}, + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), + 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}), + 'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}), + 'version_def_name': ('django.db.models.fields.CharField', [], {'default': "'APP_RESOURCES'", 'max_length': '50'}) + }, + u'ide.resourcefile': { + 'Meta': {'unique_together': "(('project', 'file_name'),)", 'object_name': 'ResourceFile'}, + 'file_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'kind': ('django.db.models.fields.CharField', [], {'max_length': '9'}), + 'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'resources'", 'to': u"orm['ide.Project']"}) + }, + u'ide.resourceidentifier': { + 'Meta': {'unique_together': "(('resource_file', 'resource_id'),)", 'object_name': 'ResourceIdentifier'}, + 'character_regex': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'resource_file': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'identifiers'", 'to': u"orm['ide.ResourceFile']"}), + 'resource_id': ('django.db.models.fields.CharField', [], {'max_length': '100'}) + }, + u'ide.sourcefile': { + 'Meta': {'unique_together': "(('project', 'file_name'),)", 'object_name': 'SourceFile'}, + 'file_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), + u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), + 'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'source_files'", 'to': u"orm['ide.Project']"}) + }, + u'ide.templateproject': { + 'Meta': {'object_name': 'TemplateProject', '_ormbases': [u'ide.Project']}, + u'project_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['ide.Project']", 'unique': 'True', 'primary_key': 'True'}), + 'template_kind': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}) + } + } + + complete_apps = ['ide'] \ No newline at end of file diff --git a/ide/migrations/0002_auto__add_usersettings.py b/ide/south_migrations/0002_auto__add_usersettings.py similarity index 100% rename from ide/migrations/0002_auto__add_usersettings.py rename to ide/south_migrations/0002_auto__add_usersettings.py diff --git a/ide/migrations/0003_auto__add_field_resourceidentifier_tracking.py b/ide/south_migrations/0003_auto__add_field_resourceidentifier_tracking.py similarity index 100% rename from ide/migrations/0003_auto__add_field_resourceidentifier_tracking.py rename to ide/south_migrations/0003_auto__add_field_resourceidentifier_tracking.py diff --git a/ide/migrations/0004_auto__add_usergithub.py b/ide/south_migrations/0004_auto__add_usergithub.py similarity index 100% rename from ide/migrations/0004_auto__add_usergithub.py rename to ide/south_migrations/0004_auto__add_usergithub.py diff --git a/ide/migrations/0005_auto__add_project_github_fields.py b/ide/south_migrations/0005_auto__add_project_github_fields.py similarity index 100% rename from ide/migrations/0005_auto__add_project_github_fields.py rename to ide/south_migrations/0005_auto__add_project_github_fields.py diff --git a/ide/migrations/0006_auto__add_field_project_github_hook.py b/ide/south_migrations/0006_auto__add_field_project_github_hook.py similarity index 100% rename from ide/migrations/0006_auto__add_field_project_github_hook.py rename to ide/south_migrations/0006_auto__add_field_project_github_hook.py diff --git a/ide/migrations/0007_auto__add_field_project_optimisation.py b/ide/south_migrations/0007_auto__add_field_project_optimisation.py similarity index 100% rename from ide/migrations/0007_auto__add_field_project_optimisation.py rename to ide/south_migrations/0007_auto__add_field_project_optimisation.py diff --git a/ide/migrations/0008_auto__add_field_buildresult_sizes.py b/ide/south_migrations/0008_auto__add_field_buildresult_sizes.py similarity index 100% rename from ide/migrations/0008_auto__add_field_buildresult_sizes.py rename to ide/south_migrations/0008_auto__add_field_buildresult_sizes.py diff --git a/ide/migrations/0009_auto__add_field_project_sdk_version__add_field_project_app_uuid__add_f.py b/ide/south_migrations/0009_auto__add_field_project_sdk_version__add_field_project_app_uuid__add_f.py similarity index 100% rename from ide/migrations/0009_auto__add_field_project_sdk_version__add_field_project_app_uuid__add_f.py rename to ide/south_migrations/0009_auto__add_field_project_sdk_version__add_field_project_app_uuid__add_f.py diff --git a/ide/migrations/0010_auto__add_field_project_app_keys.py b/ide/south_migrations/0010_auto__add_field_project_app_keys.py similarity index 100% rename from ide/migrations/0010_auto__add_field_project_app_keys.py rename to ide/south_migrations/0010_auto__add_field_project_app_keys.py diff --git a/ide/migrations/0011_auto__add_field_resourcefile_is_menu_icon.py b/ide/south_migrations/0011_auto__add_field_resourcefile_is_menu_icon.py similarity index 100% rename from ide/migrations/0011_auto__add_field_resourcefile_is_menu_icon.py rename to ide/south_migrations/0011_auto__add_field_resourcefile_is_menu_icon.py diff --git a/ide/migrations/0012_auto__add_field_usersettings_accepted_terms.py b/ide/south_migrations/0012_auto__add_field_usersettings_accepted_terms.py similarity index 100% rename from ide/migrations/0012_auto__add_field_usersettings_accepted_terms.py rename to ide/south_migrations/0012_auto__add_field_usersettings_accepted_terms.py diff --git a/ide/migrations/0013_auto__chg_field_buildresult_uuid__chg_field_usergithub_nonce__chg_fiel.py b/ide/south_migrations/0013_auto__chg_field_buildresult_uuid__chg_field_usergithub_nonce__chg_fiel.py similarity index 100% rename from ide/migrations/0013_auto__chg_field_buildresult_uuid__chg_field_usergithub_nonce__chg_fiel.py rename to ide/south_migrations/0013_auto__chg_field_buildresult_uuid__chg_field_usergithub_nonce__chg_fiel.py diff --git a/ide/migrations/0014_auto__add_field_project_app_jshint.py b/ide/south_migrations/0014_auto__add_field_project_app_jshint.py similarity index 100% rename from ide/migrations/0014_auto__add_field_project_app_jshint.py rename to ide/south_migrations/0014_auto__add_field_project_app_jshint.py diff --git a/ide/migrations/0015_auto__add_field_project_github_branch.py b/ide/south_migrations/0015_auto__add_field_project_github_branch.py similarity index 100% rename from ide/migrations/0015_auto__add_field_project_github_branch.py rename to ide/south_migrations/0015_auto__add_field_project_github_branch.py diff --git a/ide/migrations/0016_auto__add_field_usersettings_use_spaces__add_field_usersettings_tab_wi.py b/ide/south_migrations/0016_auto__add_field_usersettings_use_spaces__add_field_usersettings_tab_wi.py similarity index 100% rename from ide/migrations/0016_auto__add_field_usersettings_use_spaces__add_field_usersettings_tab_wi.py rename to ide/south_migrations/0016_auto__add_field_usersettings_use_spaces__add_field_usersettings_tab_wi.py diff --git a/ide/migrations/0017_auto__add_field_sourcefile_last_modified.py b/ide/south_migrations/0017_auto__add_field_sourcefile_last_modified.py similarity index 100% rename from ide/migrations/0017_auto__add_field_sourcefile_last_modified.py rename to ide/south_migrations/0017_auto__add_field_sourcefile_last_modified.py diff --git a/ide/migrations/0018_auto__add_field_project_project_type.py b/ide/south_migrations/0018_auto__add_field_project_project_type.py similarity index 100% rename from ide/migrations/0018_auto__add_field_project_project_type.py rename to ide/south_migrations/0018_auto__add_field_project_project_type.py diff --git a/ide/migrations/0019_auto__add_field_usersettings_whats_new.py b/ide/south_migrations/0019_auto__add_field_usersettings_whats_new.py similarity index 100% rename from ide/migrations/0019_auto__add_field_usersettings_whats_new.py rename to ide/south_migrations/0019_auto__add_field_usersettings_whats_new.py diff --git a/ide/migrations/0020_auto__del_unique_project_owner_name.py b/ide/south_migrations/0020_auto__del_unique_project_owner_name.py similarity index 100% rename from ide/migrations/0020_auto__del_unique_project_owner_name.py rename to ide/south_migrations/0020_auto__del_unique_project_owner_name.py diff --git a/ide/migrations/0021_auto__del_field_project_sdk_version__del_field_project_version_def_nam.py b/ide/south_migrations/0021_auto__del_field_project_sdk_version__del_field_project_version_def_nam.py similarity index 100% rename from ide/migrations/0021_auto__del_field_project_sdk_version__del_field_project_version_def_nam.py rename to ide/south_migrations/0021_auto__del_field_project_sdk_version__del_field_project_version_def_nam.py diff --git a/ide/migrations/0022_auto__add_field_sourcefile_target.py b/ide/south_migrations/0022_auto__add_field_sourcefile_target.py similarity index 100% rename from ide/migrations/0022_auto__add_field_sourcefile_target.py rename to ide/south_migrations/0022_auto__add_field_sourcefile_target.py diff --git a/ide/migrations/0023_auto__add_field_buildresult_worker_size.py b/ide/south_migrations/0023_auto__add_field_buildresult_worker_size.py similarity index 100% rename from ide/migrations/0023_auto__add_field_buildresult_worker_size.py rename to ide/south_migrations/0023_auto__add_field_buildresult_worker_size.py diff --git a/ide/migrations/0024_auto__add_field_resourceidentifier_compatibility.py b/ide/south_migrations/0024_auto__add_field_resourceidentifier_compatibility.py similarity index 100% rename from ide/migrations/0024_auto__add_field_resourceidentifier_compatibility.py rename to ide/south_migrations/0024_auto__add_field_resourceidentifier_compatibility.py diff --git a/ide/migrations/0025_auto__add_field_project_sdk_version.py b/ide/south_migrations/0025_auto__add_field_project_sdk_version.py similarity index 100% rename from ide/migrations/0025_auto__add_field_project_sdk_version.py rename to ide/south_migrations/0025_auto__add_field_project_sdk_version.py diff --git a/ide/migrations/0026_auto__add_buildsize.py b/ide/south_migrations/0026_auto__add_buildsize.py similarity index 100% rename from ide/migrations/0026_auto__add_buildsize.py rename to ide/south_migrations/0026_auto__add_buildsize.py diff --git a/ide/migrations/0027_migrate_build_sizes_to_table.py b/ide/south_migrations/0027_migrate_build_sizes_to_table.py similarity index 100% rename from ide/migrations/0027_migrate_build_sizes_to_table.py rename to ide/south_migrations/0027_migrate_build_sizes_to_table.py diff --git a/ide/migrations/0028_auto__del_field_buildresult_worker_size__del_field_buildresult_binary_.py b/ide/south_migrations/0028_auto__del_field_buildresult_worker_size__del_field_buildresult_binary_.py similarity index 100% rename from ide/migrations/0028_auto__del_field_buildresult_worker_size__del_field_buildresult_binary_.py rename to ide/south_migrations/0028_auto__del_field_buildresult_worker_size__del_field_buildresult_binary_.py diff --git a/ide/migrations/0029_auto__add_field_project_app_platforms.py b/ide/south_migrations/0029_auto__add_field_project_app_platforms.py similarity index 100% rename from ide/migrations/0029_auto__add_field_project_app_platforms.py rename to ide/south_migrations/0029_auto__add_field_project_app_platforms.py diff --git a/ide/migrations/0030_auto__add_resourcevariant__add_unique_resourcevariant_resource_file_va.py b/ide/south_migrations/0030_auto__add_resourcevariant__add_unique_resourcevariant_resource_file_va.py similarity index 100% rename from ide/migrations/0030_auto__add_resourcevariant__add_unique_resourcevariant_resource_file_va.py rename to ide/south_migrations/0030_auto__add_resourcevariant__add_unique_resourcevariant_resource_file_va.py diff --git a/ide/migrations/0031_create_resource_variants.py b/ide/south_migrations/0031_create_resource_variants.py similarity index 100% rename from ide/migrations/0031_create_resource_variants.py rename to ide/south_migrations/0031_create_resource_variants.py diff --git a/ide/migrations/0032_auto__del_field_project_app_version_code.py b/ide/south_migrations/0032_auto__del_field_project_app_version_code.py similarity index 100% rename from ide/migrations/0032_auto__del_field_project_app_version_code.py rename to ide/south_migrations/0032_auto__del_field_project_app_version_code.py diff --git a/ide/migrations/0033_migrate_pebblejs_to_sdk3.py b/ide/south_migrations/0033_migrate_pebblejs_to_sdk3.py similarity index 100% rename from ide/migrations/0033_migrate_pebblejs_to_sdk3.py rename to ide/south_migrations/0033_migrate_pebblejs_to_sdk3.py diff --git a/ide/migrations/0034_auto__add_field_sourcefile_folded_lines.py b/ide/south_migrations/0034_auto__add_field_sourcefile_folded_lines.py similarity index 100% rename from ide/migrations/0034_auto__add_field_sourcefile_folded_lines.py rename to ide/south_migrations/0034_auto__add_field_sourcefile_folded_lines.py diff --git a/ide/migrations/0035_auto__add_field_project_app_is_hidden__add_field_project_app_is_shown_.py b/ide/south_migrations/0035_auto__add_field_project_app_is_hidden__add_field_project_app_is_shown_.py similarity index 100% rename from ide/migrations/0035_auto__add_field_project_app_is_hidden__add_field_project_app_is_shown_.py rename to ide/south_migrations/0035_auto__add_field_project_app_is_hidden__add_field_project_app_is_shown_.py diff --git a/ide/migrations/0035_auto__add_field_resourcefile_target_platforms.py b/ide/south_migrations/0035_auto__add_field_resourcefile_target_platforms.py similarity index 100% rename from ide/migrations/0035_auto__add_field_resourcefile_target_platforms.py rename to ide/south_migrations/0035_auto__add_field_resourcefile_target_platforms.py diff --git a/ide/migrations/0036_auto__add_field_resourcevariant_tags__chg_field_resourcevariant_varian.py b/ide/south_migrations/0036_auto__add_field_resourcevariant_tags__chg_field_resourcevariant_varian.py similarity index 100% rename from ide/migrations/0036_auto__add_field_resourcevariant_tags__chg_field_resourcevariant_varian.py rename to ide/south_migrations/0036_auto__add_field_resourcevariant_tags__chg_field_resourcevariant_varian.py diff --git a/ide/migrations/0037_convert_suffixes_to_tags.py b/ide/south_migrations/0037_convert_suffixes_to_tags.py similarity index 100% rename from ide/migrations/0037_convert_suffixes_to_tags.py rename to ide/south_migrations/0037_convert_suffixes_to_tags.py diff --git a/ide/migrations/0038_auto__del_field_resourcevariant_variant__chg_field_resourcevariant_tag.py b/ide/south_migrations/0038_auto__del_field_resourcevariant_variant__chg_field_resourcevariant_tag.py similarity index 100% rename from ide/migrations/0038_auto__del_field_resourcevariant_variant__chg_field_resourcevariant_tag.py rename to ide/south_migrations/0038_auto__del_field_resourcevariant_variant__chg_field_resourcevariant_tag.py diff --git a/ide/migrations/0039_auto__add_field_resourceidentifier_target_platforms.py b/ide/south_migrations/0039_auto__add_field_resourceidentifier_target_platforms.py similarity index 100% rename from ide/migrations/0039_auto__add_field_resourceidentifier_target_platforms.py rename to ide/south_migrations/0039_auto__add_field_resourceidentifier_target_platforms.py diff --git a/ide/migrations/0040_fix_target_platforms.py b/ide/south_migrations/0040_fix_target_platforms.py similarity index 100% rename from ide/migrations/0040_fix_target_platforms.py rename to ide/south_migrations/0040_fix_target_platforms.py diff --git a/ide/migrations/0041_auto__del_field_resourcefile_target_platforms__del_unique_resourceiden.py b/ide/south_migrations/0041_auto__del_field_resourcefile_target_platforms__del_unique_resourceiden.py similarity index 100% rename from ide/migrations/0041_auto__del_field_resourcefile_target_platforms__del_unique_resourceiden.py rename to ide/south_migrations/0041_auto__del_field_resourcefile_target_platforms__del_unique_resourceiden.py diff --git a/ide/migrations/0042_auto__add_field_resourceidentifier_memory_format__add_field_resourceid.py b/ide/south_migrations/0042_auto__add_field_resourceidentifier_memory_format__add_field_resourceid.py similarity index 100% rename from ide/migrations/0042_auto__add_field_resourceidentifier_memory_format__add_field_resourceid.py rename to ide/south_migrations/0042_auto__add_field_resourceidentifier_memory_format__add_field_resourceid.py diff --git a/ide/migrations/0043_convert_resource_kinds_to_bitmap.py b/ide/south_migrations/0043_convert_resource_kinds_to_bitmap.py similarity index 100% rename from ide/migrations/0043_convert_resource_kinds_to_bitmap.py rename to ide/south_migrations/0043_convert_resource_kinds_to_bitmap.py diff --git a/ide/migrations/0044_auto__add_field_project_app_modern_multi_js.py b/ide/south_migrations/0044_auto__add_field_project_app_modern_multi_js.py similarity index 100% rename from ide/migrations/0044_auto__add_field_project_app_modern_multi_js.py rename to ide/south_migrations/0044_auto__add_field_project_app_modern_multi_js.py diff --git a/ide/migrations/0045_migrate_pebblejs_add_platforms.py b/ide/south_migrations/0045_migrate_pebblejs_add_platforms.py similarity index 100% rename from ide/migrations/0045_migrate_pebblejs_add_platforms.py rename to ide/south_migrations/0045_migrate_pebblejs_add_platforms.py diff --git a/ide/migrations/0047_remove_pebblejs_dependencies.py b/ide/south_migrations/0047_remove_pebblejs_dependencies.py similarity index 100% rename from ide/migrations/0047_remove_pebblejs_dependencies.py rename to ide/south_migrations/0047_remove_pebblejs_dependencies.py diff --git a/ide/migrations/0048_create_pkjs_target.py b/ide/south_migrations/0048_create_pkjs_target.py similarity index 100% rename from ide/migrations/0048_create_pkjs_target.py rename to ide/south_migrations/0048_create_pkjs_target.py diff --git a/ide/migrations/0049_add_project_dependencies.py b/ide/south_migrations/0049_add_project_dependencies.py similarity index 100% rename from ide/migrations/0049_add_project_dependencies.py rename to ide/south_migrations/0049_add_project_dependencies.py diff --git a/ide/migrations/0050_auto__del_unique_sourcefile_project_file_name__add_unique_sourcefile_p.py b/ide/south_migrations/0050_auto__del_unique_sourcefile_project_file_name__add_unique_sourcefile_p.py similarity index 100% rename from ide/migrations/0050_auto__del_unique_sourcefile_project_file_name__add_unique_sourcefile_p.py rename to ide/south_migrations/0050_auto__del_unique_sourcefile_project_file_name__add_unique_sourcefile_p.py diff --git a/ide/migrations/0051_auto__chg_field_resourceidentifier_target_platforms.py b/ide/south_migrations/0051_auto__chg_field_resourceidentifier_target_platforms.py similarity index 100% rename from ide/migrations/0051_auto__chg_field_resourceidentifier_target_platforms.py rename to ide/south_migrations/0051_auto__chg_field_resourceidentifier_target_platforms.py diff --git a/auth/__init__.py b/ide/south_migrations/__init__.py similarity index 100% rename from auth/__init__.py rename to ide/south_migrations/__init__.py diff --git a/ide/static/ide/css/ide.css b/ide/static/ide/css/ide.css index c5017e42..182242bf 100644 --- a/ide/static/ide/css/ide.css +++ b/ide/static/ide/css/ide.css @@ -5,6 +5,20 @@ /* New stuff. */ +.clearfix:after { + visibility: hidden; + display: block; + font-size: 0; + content: " "; + clear: both; + height: 0; + } +.clearfix { display: inline-block; } +/* start commented backslash hack \*/ +* html .clearfix { height: 1%; } +.clearfix { display: block; } +/* close commented backslash hack */ + /* Page layout framework */ /* This disables bouncing in Safari */ @@ -35,7 +49,7 @@ strong { font-family: PFD-Regular, 'Helvetica Neue', Helvetica, Arial, sans-serif; font-size: large; padding-left: 40px; - + z-index: 1; -webkit-transition: top 0.5s, padding-top 0.5s; -moz-transition: top 0.5s, padding-top 0.5s; -ms-transition: top 0.5s, padding-top 0.5s; @@ -104,6 +118,8 @@ ul.nav-pills { left: 0; right: 0; overflow: auto; + min-width: 200px; + min-height: 200px; } /* Sidebar */ @@ -320,6 +336,19 @@ ul.nav-pills { background-color: #00B293; } +.run-test-btn { + background: #0093B2 url('../img/run.png') no-repeat 15px; + color: white; +} +.run-test-btn:hover { + background-color: #0093B2; + color: white; +} + +.show-test-btn { + background: #FFFFFF url('../img/info.png') no-repeat center; +} + .save-btn { background: white url('../img/save.png') no-repeat center; } @@ -1291,6 +1320,414 @@ button#add-filter { margin: 0; } +/* Monkey editor */ + +.form-horizontal .control-group label.control-label-multiline { + line-height: 25px; +} + +.modal-checkbox-explanation { + display: inline-block; + width: calc(100% - 213px); + padding-left: 13px; +} + +.monkey-no-tests { + font-size: larger; + line-height: 40px; +} +.monkey-no-tests button { + vertical-align: middle; +} +.monkey-no-tests p + p { + margin-top: 20px; +} + +#right-pane { + box-sizing: border-box; + width: 0; + right: 0; + top: 0; + bottom: 0; + position: absolute; + border-left: #444 3px solid; +} + +.monkey-pane { + padding: 10px; + overflow: scroll; + height: 100%; + background-color: #333; + box-sizing: border-box; +} + +.monkey-pane h2 { + padding: 10px; + text-align: center; +} + +.monkey-screenshot-set { + margin-bottom: 30px; +} + +.monkey-pane .image-resource-preview { + display: inline-block; +} + +.monkey-screenshot-container { + display: inline-block; + width: 200px; + height: 200px; + text-align: center; + vertical-align: top; +} + +.monkey-screenshot-container .progress { + margin: 75px 20px; +} + +.monkey-screenshot-container > div { + margin: auto; +} + +.monkey-pane .platform-chalk > * { + width: 180px; + height: 180px; +} +.monkey-pane .platform-chalk > img { + border-radius: 100px; +} +.monkey-pane .platform-aplite > *, .monkey-pane .platform-basalt > *, +.monkey-pane .screenshot-empty> *{ + width: 144px; + height: 168px; +} + +.screenshot-empty .btn { + font-size: smaller; + margin: 5px; + background-color: #444; + color: white; +} +.screenshot-empty .btn[disabled] { + background-color: #393939 !important; + color: #888; +} + +.monkey-platforms span { + width: 200px; + color: white; + font-size: 30px; + display: inline-block; + text-align: center; +} + +.monkey-platforms button { + width: 160px; + font-size: 15px; + margin: 3px 20px; + display: inline-block; + text-align: center; +} + +.monkey-screenshot-name { + color: white; + font-weight: bold; + font-size: 20px; +} + +.monkey-form-buttons { + max-width: 300px; + margin: auto; +} + +.monkey-form-buttons button { + margin-right: 20px; + margin-bottom: 10px; +} + +#monkey-upload-previews .image-resource-preview { + display: inline-block; +} + +.monkey-screenshot-set.disabled { + opacity: 0.6; + transition: opacity 1s; +} + +.will-delete { + opacity: 0.3; +} +.monkey-screenshot-will-delete-warning { + position: relative; + top: -110px; + margin-top: -1em; + text-align: center; + font-size: 30px; + color: #ccc; + pointer-events: none; +} + +.monkey-screenshot-set .image-resource-preview { + position: relative; +} + +.monkey-progress-overlay { + position: absolute; + top: 0; + bottom: 0; + left: 0; + right: 0; + text-align: center; + background-color: rgba(0, 0, 0, 0.3); + padding-top: 400px; +} + +.monkey-screenshot-set input { + width: calc(100% - 20px); +} + +.monkey-pane .field-help { + position: relative; + float: right; +} + +.monkey-screenshots .image-resource-preview img { + cursor: pointer; + background-color: rgba(100, 100, 100, 0.5); +} + +.monkey-screenshots .monkey-modified { + border: 2px white dashed; +} + +.monkey-screenshots .monkey-hover { + border: 2px #5bc0de dashed; +} + +.monkey-screenshots .settings-status-icons { + padding-right: 10px; +} + +.monkey-screenshot-set .image-resource-preview .delete-btn { + position: absolute; + right: 10px; + width: 40px; + height: 40px; + top: 10px; + opacity: 0; +} +.monkey-screenshot-set .image-resource-preview:hover .delete-btn, +.monkey-screenshot-set .image-resource-preview .delete-btn:focus { + opacity: 0.5; +} +.monkey-screenshot-set .image-resource-preview .delete-btn:hover { + opacity: 1; +} + +.screenshot-empty .delete-btn { + display: none; +} + +.monkey-select-platform { + text-decoration: underline; +} +.monkey-select-platform:hover { + cursor: pointer; +} + +.monkey-run-deleted { + text-decoration: line-through; +} + +/* Test Manager pane */ + + + +.testmanager-pane .well { + margin: 30px; + padding-left: 60px; + padding-right: 60px; +} + +.testmanager-pane .well > div { + min-width: 400px; +} + +.testmanager-download-btn { + width: 220px; + margin-left: 20px; +} + +.infoTable th { + width: 40%; + text-align: right; + padding-right: 20px; +} + +.testmanager-pane table { + border-collapse: collapse; + width: 100%; +} + +.testmanager-pane table td { + color: #CCC; +} + +.testmanager-pane tr.clickable { + cursor: pointer; +} +.testmanager-pane tr.clickable:hover { + background-color: #555; +} + +td.test-run { + text-transform: capitalize; +} +.testmanager-pane td.test-error { + color: #FA0; +} +.testmanager-pane td.test-failed { + color: #E5171F; +} +.testmanager-pane td.test-passed { + color: #00B293; +} + +.testmanager-pane table.table tr { + height: 37px; +} + +pre.test-script { + white-space: pre-wrap; + overflow: scroll; +} + +.testmanager-pane a:not(.btn) { + text-decoration: underline; + color: #00a8c6; +} + +@media (min-width: 1320px) { + .leftside { + left: 0; + position: absolute; + width: 50%; + margin-top: 30px; + } + + .leftside .well { + margin-right: 15px; + margin-top: 0; + } + + .rightside { + right: 0; + position: absolute; + width: 50%; + margin-top: 30px; + } + + .rightside .well { + margin-left: 15px; + margin-top: 0; + } + .testmanager-backbutton-1 { + display: none; + } +} + +@media (max-width: 1319px) { + .testmanager-pane .well { + min-width: 520px; + } + .testmanager-page-detail .leftside { + display: none; + } + .testmanager-table-filler { + display: none; + } +} + +.testmanager-pane .progress { + margin-left: 0; + margin-right: 0; +} +.testmanager-pane .progress .bar { + width: 100%; +} +.testmanager-pane tr { + height: 33px; +} + +.testmanager-run pre { + /* TODO: style better? */ + padding: 3px; +} + +tr.flash { + animation-name: flash; + animation-duration: 1.5s; +} + +@keyframes flash { + from {background-color: green;} + to {background-color: initial;} +} + +.testmanager-pane tr.selected { + background-color: #4a4a4a; +} + +.alert.alert-error pre { + max-height: 10em; + overflow: auto; + padding: 5px; + background-color: #a2171e; +} + +.paginator button { + width: 40px; + border-right: thin #aaa solid; + border-radius: initial; +} +.paginator button:last-child { + border-top-right-radius: 5px; + border-bottom-right-radius: 5px; + border-right: none; +} +.paginator button:first-child { + border-top-left-radius: 5px; + border-bottom-left-radius: 5px; +} +.paginator button.btn.selected:focus { + outline: none; + box-shadow: 0 0 5px 2px rgba(255, 19, 19, 0.7); +} +.paginator .selected { + color: white; + background-color: #f77; +} +.paginator { + text-align: center; +} +.paginator button[disabled] { + background-color: #eee !important; +} + +.well .button-close { + float: right; + background-color: #a2171e; + display: inline-block; + text-align: center; + vertical-align: text-top; + padding: 3px 8px; + border-radius: 10px; + border: none; + color: white; + font-size: 15px; + cursor: pointer; +} .text-icon { border: 2px white solid; diff --git a/ide/static/ide/img/info.png b/ide/static/ide/img/info.png new file mode 100644 index 00000000..37c15436 Binary files /dev/null and b/ide/static/ide/img/info.png differ diff --git a/ide/static/ide/js/autocomplete.js b/ide/static/ide/js/autocomplete.js index a7f002d6..bcaa9712 100644 --- a/ide/static/ide/js/autocomplete.js +++ b/ide/static/ide/js/autocomplete.js @@ -223,7 +223,10 @@ CloudPebble.Editor.Autocomplete = new (function() { mRunning = true; var request_function; if (token.type === 'meta') { - request_function = try_complete_macro + request_function = try_complete_macro; + } + else if (editor.options.mode == 'MonkeyScript') { + request_function = CloudPebble.MonkeyScript.request; } else { request_function = CloudPebble.YCM.request; diff --git a/ide/static/ide/js/cloudpebble.js b/ide/static/ide/js/cloudpebble.js index 306c0633..8e6bdcc7 100644 --- a/ide/static/ide/js/cloudpebble.js +++ b/ide/static/ide/js/cloudpebble.js @@ -48,11 +48,13 @@ CloudPebble.TargetNames = { 'common': gettext("Shared JavaScript") }; + CloudPebble.ProjectInfo = {}; CloudPebble.Init = function() { jquery_csrf_setup(); + // Load in project data. Ajax.Get('/ide/project/' + PROJECT_ID + '/info').then(function(data) { CloudPebble.ProjectInfo = data; @@ -67,6 +69,7 @@ CloudPebble.Init = function() { CloudPebble.Dependencies.Init(); CloudPebble.Documentation.Init(); CloudPebble.FuzzyPrompt.Init(); + CloudPebble.TestManager.Init(); CloudPebble.ProgressBar.Hide(); // Add source files. @@ -74,6 +77,10 @@ CloudPebble.Init = function() { CloudPebble.Editor.Add(value); }); + $.each(data.test_files, function(index, value) { + CloudPebble.Editor.AddTest(value) + }); + $.each(data.resources, function(index, value) { CloudPebble.Resources.Add(value); }); @@ -137,6 +144,7 @@ CloudPebble.Prompts = { $('#modal-text-input').modal('hide'); } }); + event.preventDefault(); }; $('#modal-text-confirm-button').unbind('click').click(submit); $('#modal-text-input form').unbind('submit').submit(submit); @@ -231,6 +239,28 @@ CloudPebble.Utils = { var t = Math.round(Math.abs(Date.parse(s2.replace(' ','T')) - Date.parse(s1.replace(' ','T'))) / 1000); var n = t.toFixed(0); return interpolate(ngettext("%s second", "%s seconds", n), [n]); + }, + /** Convert a data URI to a Blob so it can be uploaded normally + * http://stackoverflow.com/questions/4998908/convert-data-uri-to-file-then-append-to-formdata + */ + ConvertDataURItoBlob: function(dataURI) { + // Convert base64/URLEncoded data component to raw binary data held in a string + var byteString; + if (dataURI.split(',')[0].indexOf('base64') >= 0) + byteString = atob(dataURI.split(',')[1]); + else + byteString = unescape(dataURI.split(',')[1]); + + // Separate out the mime component + var mimeString = dataURI.split(',')[0].split(':')[1].split(';')[0]; + + // Write the bytes of the string to a typed array + var ia = new Uint8Array(byteString.length); + for (var i = 0; i < byteString.length; i++) { + ia[i] = byteString.charCodeAt(i); + } + + return new Blob([ia], {type:mimeString}); } }; diff --git a/ide/static/ide/js/compile.js b/ide/static/ide/js/compile.js index 26ab6846..23fc85c5 100644 --- a/ide/static/ide/js/compile.js +++ b/ide/static/ide/js/compile.js @@ -83,35 +83,38 @@ CloudPebble.Compile = (function() { }); }; + var fetch_build_history = function() { + return Ajax.Get('/ide/project/' + PROJECT_ID + '/build/history').then(function(data) { + mLastBuild = (data.builds.length > 0) ? data.builds[0] : null; + return data; + }); + }; + var update_build_history = function(pane) { var check = function() { - return Ajax.Get('/ide/project/' + PROJECT_ID + '/build/history').then(function(data) { + return fetch_build_history().then(function(data) { CloudPebble.ProgressBar.Hide(); pane.removeClass('hide'); - if (data.builds.length > 0) { + if(data.builds.length > 0) { update_last_build(pane, data.builds[0]); } else { update_last_build(pane, null); } pane.find('#run-build-table').html(''); - $.each(data.builds, function (index, value) { + $.each(data.builds, function(index, value) { pane.find('#run-build-table').append(build_history_row(value)); }); - if (data.builds.length > 0 && data.builds[0].state == 1) { - return Promise.delay(1000).then(function () { - return check(); + if(data.builds.length > 0 && data.builds[0].state == 1) { + return Promise.delay(1000).then(function() { + return check() }); - } else - - - if (mRunningBuild) { + } else if(mRunningBuild) { mRunningBuild = false; return (data.builds[0].state == 3) } }); }; - return check().catch(function(error) { alert(interpolate(gettext("Something went wrong:\n%s"), [error.message])); // This should be prettier. CloudPebble.Sidebar.DestroyActive(); @@ -179,7 +182,6 @@ CloudPebble.Compile = (function() { commands[gettext("Show Phone Logs")] = function() { show_app_logs(ConnectionType.Phone); }; commands[gettext("Show Emulator Logs")] = function() { show_app_logs(ConnectionType.Qemu); }; commands[gettext("Show Last Build Log")] = function() {show_build_log(mLastBuild.id)}; - commands[gettext("Compilation")] = function() { show_compile_pane();}; commands[gettext("Clear App Logs")] = function() { show_clear_logs_prompt(); }; commands[gettext("Take Screenshot")] = function() { take_screenshot(); }; CloudPebble.FuzzyPrompt.AddCommands(commands); @@ -279,30 +281,23 @@ CloudPebble.Compile = (function() { pane.find('#last-compilation-pbw').removeClass('hide').attr('href', build.download); pane.find("#run-on-phone").removeClass('hide'); if(build.sizes) { - if(build.sizes.aplite) { - var aplite_size_text = format_build_size(build.sizes.aplite, 24576, 10240, 98304); - pane.find('#last-compilation-size-aplite').removeClass('hide').find('.text').text(aplite_size_text); - } else { - pane.find('#last-compilation-size-aplite').addClass('hide'); - } - if(build.sizes.basalt) { - var basalt_size_text = format_build_size(build.sizes.basalt, 65536, 10240, 262144); - pane.find('#last-compilation-size-basalt').removeClass('hide').find('.text').text(basalt_size_text); - } else { - pane.find('#last-compilation-size-basalt').addClass('hide'); - } - if(build.sizes.chalk) { - var chalk_size_text = format_build_size(build.sizes.chalk, 65536, 10240, 262144); - pane.find('#last-compilation-size-chalk').removeClass('hide').find('.text').text(chalk_size_text); - } else { - pane.find('#last-compilation-size-chalk').addClass('hide'); - } - if(build.sizes.diorite) { - var diorite_size_text = format_build_size(build.sizes.diorite, 65536, 10240, 262144); - pane.find('#last-compilation-size-diorite').removeClass('hide').find('.text').text(diorite_size_text); - } else { - pane.find('#last-compilation-size-diorite').addClass('hide'); - } + var build_size_params = { + aplite: [24576, 10240, 98304], + basalt: [65536, 10240, 262144], + chalk: [65536, 10240, 262144], + diorite: [65536, 10240, 262144] + }; + _.each(['aplite', 'basalt', 'chalk', 'basalt'], function(platform) { + if (build.sizes[platform]) { + var params = build_size_params[platform]; + var size_text = format_build_size(build.sizes[platform], params[0], params[1], params[2]); + pane.find('#last-compilation-size-'+platform).removeClass('hide').find('.text').text(size_text); + } + else { + pane.find('#last-compilation-size-'+platform).addClass('hide'); + } + }); + } // Only enable emulator buttons for built platforms. pane.find('#run-qemu .btn-primary').attr('disabled', function() { @@ -505,6 +500,10 @@ CloudPebble.Compile = (function() { } } + /** + * Install the app on to a watch or emulator + * @param kind platform kind + */ var install_on_watch = function(kind) { var modal = $('#phone-install-progress'); return SharedPebble.getPebble(kind).then(function(pebble) { @@ -770,16 +769,28 @@ CloudPebble.Compile = (function() { return 0; }; + var get_platforms_compiled_for = function() { + var get_keys = function() { + return _.keys((mLastBuild || {}).sizes); + }; + if (mLastBuild) return Promise.resolve(get_keys()); + return fetch_build_history().then(get_keys); + }; + return { Show: function() { show_compile_pane(); }, Init: function() { init(); + }, RunBuild: function() { return run_build(); }, + GetPlatformsCompiledFor: function() { + return get_platforms_compiled_for(); + }, /** * Get the platform to install and run the the app on, given details of the project and last build. * @returns {number} @@ -814,7 +825,9 @@ CloudPebble.Compile = (function() { } }, DoInstall: function() { - return install_on_watch(CloudPebble.Compile.GetPlatformForInstall()); + return fetch_build_history().then(function() { + return install_on_watch(CloudPebble.Compile.GetPlatformForInstall()); + }); } }; })(); diff --git a/ide/static/ide/js/dependencies.js b/ide/static/ide/js/dependencies.js index b2c48c8c..7e0ed808 100644 --- a/ide/static/ide/js/dependencies.js +++ b/ide/static/ide/js/dependencies.js @@ -609,9 +609,6 @@ CloudPebble.Dependencies = (function() { show_dependencies_pane(); }, Init: function() { - var commands = {}; - commands[gettext("Dependencies")] = CloudPebble.Dependencies.Show; - CloudPebble.FuzzyPrompt.AddCommands(commands); dependencies_template = $('#dependencies-pane-template').remove().removeClass('hide'); alerts.init(dependencies_template); diff --git a/ide/static/ide/js/editor.js b/ide/static/ide/js/editor.js index ac4498a5..a105388e 100644 --- a/ide/static/ide/js/editor.js +++ b/ide/static/ide/js/editor.js @@ -41,9 +41,58 @@ CloudPebble.Editor = (function() { }); }; + var add_test_file = function(file) { + file.target = 'test'; + CloudPebble.Sidebar.AddTestFile(file, function() { + edit_source_file(file); + }); + + project_source_files[file.name] = file; + }; + + function run_test(test_id, options) { + options = _.defaults(options, { + update: false, + platform: CloudPebble.Compile.GetPlatformForInstall() + }); + var platform = options.platform | ConnectionType.Qemu; + var platform_name = ConnectionPlatformNames[platform]; + + CloudPebble.Prompts.Progress.Show("Testing", "Starting test"); + CloudPebble.Editor.SaveAll().then(function() { + return CloudPebble.Compile.GetPlatformsCompiledFor(); + }).then(function(platforms) { + if (platforms.length == 0) { + throw new Error(gettext("Project must be compiled before testing")) + } + if (!_.contains(platforms, platform_name)) { + throw new Error(gettext("Project not compiled for ")+platform_name); + } + }).then(function () { + return SharedPebble.getPebble(platform); + }).then(function() { + return SharedPebble.getEmulator(platform); + }).then(function (emulator) { + CloudPebble.Prompts.Progress.Update(gettext("Starting test")); + return emulator.runTest(PROJECT_ID, test_id, platform_name, options.update); + }).then(function (result) { + return CloudPebble.TestManager.ShowLiveTestRun(result['subscribe_url'], result['session_id'], result['run_id']); + }).then(function(){ + CloudPebble.Prompts.Progress.Hide(); + }).catch(function (error) { + CloudPebble.Prompts.Progress.Update(error.message); + CloudPebble.Prompts.Progress.Fail(); + throw error; + }); + } + + var rename_file = function(file, new_name) { var old_name = file.name; // Check no-change or duplicate filenames + var url_kind = (file.target == 'test' ? 'tests' : 'source'); + var sidebar_kind = (file.target == 'test' ? 'test' : 'source'); + if (new_name == old_name) { return Promise.resolve(); } @@ -51,7 +100,7 @@ CloudPebble.Editor = (function() { if (project_source_files[new_path]) { return Promise.reject(new Error(interpolate(gettext("A file of called '%s' of type '%s' already exists."), [new_name, CloudPebble.TargetNames[file.target]]))); } - return Ajax.Post("/ide/project/" + PROJECT_ID + "/source/" + file.id + "/rename", { + return Ajax.Post("/ide/project/" + PROJECT_ID + "/" + url_kind + "/" + file.id + "/rename", { old_name: file.name, new_name: new_name, modified: file.lastModified @@ -63,7 +112,7 @@ CloudPebble.Editor = (function() { file.name = new_name; file.file_path = new_file_path; file.lastModified = response.modified; - CloudPebble.Sidebar.SetItemName('source', file.id, new_name); + CloudPebble.Sidebar.SetItemName(sidebar_kind, file.id, new_name); CloudPebble.FuzzyPrompt.SetCurrentItemName(new_name); project_source_files[file.file_path] = file; return null; @@ -71,10 +120,16 @@ CloudPebble.Editor = (function() { }; var edit_source_file = function(file, show_ui_editor) { + var url_kind = (file.target == 'test' ? 'tests' : 'source'); + var sidebar_id = (file.target == 'test' ? 'test' : 'source') + '-' + file.id; + CloudPebble.FuzzyPrompt.SetCurrentItemName(file.name); // See if we already had it open. CloudPebble.Sidebar.SuspendActive(); - if(CloudPebble.Sidebar.Restore('source-'+file.id)) { + if(CloudPebble.Sidebar.Restore(sidebar_id)) { + if (file.target != 'test' || !CloudPebble.SidePane.restorePane('monkey-screenshots', file.id)) { + CloudPebble.SidePane.setSize(0); + } if (resume_fullscreen) { fullscreen(open_codemirrors[file.id], true); } @@ -83,7 +138,8 @@ CloudPebble.Editor = (function() { CloudPebble.ProgressBar.Show(); // Open it. - return Ajax.Get('/ide/project/' + PROJECT_ID + '/source/' + file.id + '/load').then(function(data) { + return Ajax.Get('/ide/project/' + PROJECT_ID + '/' + url_kind + '/' + file.id + '/load').then(function(data) { + var screenshot_pane; var source = data.source; file.lastModified = data.modified; var pane = $('
'); @@ -92,6 +148,10 @@ CloudPebble.Editor = (function() { file_kind = 'js'; file_mode = 'javascript'; } + else if (file.target == "test") { + file_kind = 'monkey'; + file_mode = 'MonkeyScript' + } else if (/\.json$/.test(file.name)) { file_kind = 'json'; file_mode = 'application/json'; @@ -103,7 +163,7 @@ CloudPebble.Editor = (function() { function file_kind_in(options) { return _.contains(options, file_kind); } - var language_has_autocomplete = (file_kind == 'c'); + var language_has_autocomplete = file_kind_in(['c', 'monkey']); var is_autocompleting = false; var settings = { indentUnit: USER_SETTINGS.tab_width, @@ -276,7 +336,6 @@ CloudPebble.Editor = (function() { CodeMirror.commands.autocomplete(code_mirror); }); } - if(file_kind_in(['json', 'js'])) { var warning_lines = []; var do_hint = function() { @@ -465,11 +524,11 @@ CloudPebble.Editor = (function() { } var check_safe = function() { - return Ajax.Get('/ide/project/' + PROJECT_ID + '/source/' + file.id + '/is_safe?modified=' + file.lastModified).then(function(data) { + return Ajax.Get('/ide/project/' + PROJECT_ID + '/' + url_kind + '/' + file.id + '/is_safe?modified=' + file.lastModified).then(function(data) { if(!data.safe) { if(was_clean) { code_mirror.setOption('readOnly', true); - return CloudPebble.Get('/ide/project/' + PROJECT_ID + '/source/' + file.id + '/load', function(data) { + return Ajax.Get('/ide/project/' + PROJECT_ID + '/' + url_kind + '/' + file.id + '/load').then(function(data) { code_mirror.setValue(data.source); file.lastModified = data.modified; was_clean = true; // this will get reset to false by setValue. @@ -498,19 +557,30 @@ CloudPebble.Editor = (function() { fullscreen(code_mirror, false); resume_fullscreen = true; } + CloudPebble.SidePane.suspendActivePane(); + CloudPebble.SidePane.setSize(0); }, onDestroy: function() { if(!was_clean) { --unsaved_files; } + + if (screenshot_pane) screenshot_pane.destroy(); delete open_codemirrors[file.id]; } }); + if (file_kind == 'monkey') { + screenshot_pane = new CloudPebble.MonkeyScreenshots.ScreenshotPane(file.id); + CloudPebble.SidePane.addPane(screenshot_pane.getPane(), 'monkey-screenshots', file.id); + code_mirror.screenshot_pane = screenshot_pane; + } + + var was_clean = true; code_mirror.on('change', function() { if(was_clean) { - CloudPebble.Sidebar.SetIcon('source-' + file.id, 'edit'); + CloudPebble.Sidebar.SetIcon(sidebar_id, 'edit'); was_clean = false; ++unsaved_files; } @@ -519,7 +589,7 @@ CloudPebble.Editor = (function() { var mark_clean = function() { was_clean = true; --unsaved_files; - CloudPebble.Sidebar.ClearIcon('source-' + file.id); + CloudPebble.Sidebar.ClearIcon(sidebar_id); }; var save = function() { @@ -534,7 +604,7 @@ CloudPebble.Editor = (function() { save_btn.prop('disabled', true); delete_btn.prop('disabled', true); - return Ajax.Post("/ide/project/" + PROJECT_ID + "/source/" + file.id + "/save", { + return Ajax.Post("/ide/project/" + PROJECT_ID + "/" + url_kind + "/" + file.id + "/save", { content: code_mirror.getValue(), modified: file.lastModified, folded_lines: JSON.stringify(code_mirror.get_folded_lines()) @@ -550,13 +620,18 @@ CloudPebble.Editor = (function() { var show_rename_prompt = function() { var old_type = file.name.split('.').pop(); - var c_pattern = "[a-zA-Z0-9_-]+\.(c|h)$"; - var js_pattern = "[a-zA-Z0-9_-]+\.js$"; + var c_pattern = "^[a-zA-Z0-9_-]+\.(c|h)$"; + var js_pattern = "^[a-zA-Z0-9_-]+\.js$"; + var test_pattern = "^[/a-zA-Z0-9_-]+$"; + var pattern = ""; - if (old_type == "c" || old_type == "h") { + if (file.target == 'test') { + pattern = test_pattern; + } + else if (old_type == "c" || old_type == "h") { pattern = c_pattern; } - if (old_type == "js") { + else if (old_type == "js") { pattern = js_pattern; } CloudPebble.Prompts.Prompt( @@ -644,12 +719,20 @@ CloudPebble.Editor = (function() { // Add some buttons var button_holder = $('

'); - var run_btn = $(''); - var save_btn = $(''); - var discard_btn = $(''); - var delete_btn = $(''); - var ib_btn = $(''); - var rename_btn = $(''); + var run_btn; + if (file.target === 'test') { + run_btn = $('').addClass('btn run-test-btn').prop('title', gettext("Run test in emulator")); + } + else { + run_btn = $('').addClass('btn run-btn').prop('prop', gettext("Save, build, install and run")); + } + var save_btn = $('').prop('title', gettext('Save')); + var discard_btn = $(' +

+ ) + } + else { + return ( +
+
+ +
+ + +
+ +
+ ) + } + } + }); + + function ScreenshotTitle(props) { + const className = classNames("monkey-screenshot-title", { + 'will-delete': props.will_delete + }); + const onChange = function(event) { + Screenshots.setName(props.index, event.target.value); + }; + return ( +
+ + {props.changed && ( + + + + )} +
+ ) + } + + function ScreenshotSet(props) { + const will_delete = !props.is_new_set && _.every(props.files, function(file) {return !file.file && !file.src}); + if (will_delete && (_.isUndefined(props.id) || _.isNull(props.id)) && props.name.length == 0) { + return null; + } + const className = classNames("monkey-screenshot-set", { + disabled: props.disabled + }); + + return ( +
+
+ {props.platforms.map(function(platform) { + return ( +
+ {props.progress && _.has(props.progress, platform) + ? + + : + + } +
+ ) + })} +
+ {!props.is_new_set && + } + {props.will_delete &&
{gettext('This screenshot set will be deleted')}
} +
+ ) + } + + /** A form which displays all current screenshots, + * and an extra row for adding new screenshots + */ + function ScreenshotForm(props) { + const onSubmit = function(event) { + event.preventDefault(); + Screenshots.save(); + }; + return ( +
+
+ {props.screenshots.map(function(screenshot_set, index) { + return ( + + ) + })} + +
+
+ ) + } + + /** A clickable title for toggling the sidebar state */ + function PlatformTitle(props) { + const platform = props.platform; + const onClick = function() { + Platforms.toggle(platform); + }; + return ({platform}) + } + + /** ScreenshotManager contains all of the screenshot manager UI */ + const ScreenshotManager = React.createClass({ + componentDidMount: function() { + const help = gettext('

Click on the + buttons or drag in image files to add screenshots to test against.

' + + '

To add or modify the screenshots for a single platform across multiple sets of screenshots, ' + + 'drag in multiple images.

'); + $(this.refs.help).popover({ + trigger: 'hover', + content: help, + html: true, + container: '#help-prompt-holder', + placement: 'left', + animation: false + }); + }, + render: function() { + const {screenshots, platforms, disabled, progress, activePebble, loading} = this.props; + const stopEvent = function(event) { + // We cancel any drop events over the UI so that the user doesn't experience unexpected behaviour of they + // accidentally drop an image outside of a screenshot box. + event.preventDefault(); + event.stopPropagation(); + }; + const onCancel = function() { + CloudPebble.Prompts.Confirm(gettext("Reset all changes?"), gettext("This cannot be undone."), function() { + Screenshots.loadScreenshots(); + }); + }; + return ( +
+ +

{gettext('Screenshots')}

+ + {!!this.props.error && } + +
+ {this.props.platforms.map((platform) => { + return ( + + ) + })} +
+ + + {loading && } + +
+ + +
+
+ ); + } + }); + + /** ScreenshotManagerContainer listens to changes in screenshot model, and passes them to the UI */ + const ScreenshotManagerContainer = React.createClass({ + getInitialState: function() { + return { + screenshots: Screenshots.getScreenshots(), + error: null, + loading: false, + disabled: false, + platforms: Platforms.initial(), + progress: {}, + activePebble: !!SharedPebble.getPebbleNow() + } + }, + componentDidMount: function() { + this.listener = _.extend({}, Backbone.Events); + // Listen to changes in the available platforms + this.listener.listenTo(Platforms, 'changed', (state) => { this.setState(state) }); + // Listen to events which directly enable/disable the form + this.listener.listenTo(Screenshots, 'disable', () => { this.setState({disabled: true}) }); + this.listener.listenTo(Screenshots, 'enable', () => { this.setState({disabled: false}) }); + // Listen to updates in screenshot and error information + // If we get a 'changed' or 'error' event, we know that loading is done. + this.listener.listenTo(Screenshots, 'changed', (screenshots) => { + this.setState({ + screenshots: screenshots, + error: null, + loading: false, + changed: Screenshots.isModified() + }) + }); + this.listener.listenTo(Screenshots, 'error', (error) => { + this.setState({ + error: error, + loading: false + }) + }); + // Listen to event indicating upload progress or things loading + this.listener.listenTo(Screenshots, 'progress', (progress) => { this.setState({progress: progress}) }); + this.listener.listenTo(Screenshots, 'waiting', () => { + this.setState({ + loading: true + }) + }); + // Listen to the activePebble + this.listener.listenTo(SharedPebble, 'status', (pebble, code) => { + if (code == 0) { + this.setState({activePebble: true}); + } + }); + this.listener.listenTo(SharedPebble, 'close error', () => { + this.setState({activePebble: false}); + }); + }, + componentWillUnmount: function() { + this.listener.stopListening(); + }, + render: function(props) { + return () + } + }); + + return { + render: function(element, props) { + const elm = React.createElement(ScreenshotManagerContainer, props); + ReactDOM.render(elm, element); + } + } +}); diff --git a/ide/static/ide/js/monkey/test_manager.jsx b/ide/static/ide/js/monkey/test_manager.jsx new file mode 100644 index 00000000..dcaed13b --- /dev/null +++ b/ide/static/ide/js/monkey/test_manager.jsx @@ -0,0 +1,548 @@ +CloudPebble.TestManager = (function() { + const POLLING_PERIOD = 10000; + let ui, api; + + function API(project_id) { + const base_url = `/ide/project/${project_id}/`; + /** + * A store keeps track of some state which is fetched from the server. Any time this state is changed, + * it fires a 'changed' event. + */ + const Store = _.extend({ + state: {}, + /** default URL to send GET requests to**/ + url: '', + /** name of key to use when returning from getState() **/ + key: 'data', + /** name of key in the resulting request to fetch data from**/ + server_key: 'data', + ignore_refresh_options: false, + + /** Trigger a Backbone event asynchronously. + * @param {string} event name of event + * @param {object} data object to send + */ + triggerLater(event, data) { + _.defer(() => { + this.trigger(event, data); + }); + }, + /** + * This function should return true for any object which we would expect to be sent in a GET request + * @param options The options for the GET request. + * @returns {Function} by default, returns a function which always returns True, since the default behaviour + * of 'refresh' is to fetch all items. + */ + filter_function(options) { + return () => !!options; + }, + /** + * Given the result of a request, a function which should filter out any objects which might be deleted, + * update the state of the store and send a 'changed' event. + * @param result an object with a single key containing an array of data. + * @param options e.g. options to pass to the filter function + */ + syncData(result, options) { + const filter_function = this.filter_function(options); + const data = result[this.server_key]; + let data_list = data; + if (!_.isArray(data)) { + data_list = [data]; + } + this.state = _.pick(this.state, filter_function); + _.extend(this.state, _.indexBy(data_list, 'id')); + this.triggerLater('changed', this.getState()); + return data; + }, + /** + * This function is called when the object is requested, and should + * return any other promises which need to be resolved before the request + * can be considered complete. + * @param id the ID of the object for which relevant objects need to be fetched. + * @returns {Promise} + */ + get_extra_requests(id) { + return Promise.resolve(); + }, + /** + * Send a GET request to refresh the store with arbitrary parameters + * @param options the GET parameters. + * @returns {Promise} Ajax request + */ + refresh(options) { + let url = base_url + this.url; + let query = {}; + if (this.ignore_refresh_options) { + options = {}; + } + else { + options = _.isNumber(options) ? {id: options} : (options); + } + if (_.isEqual(_.keys(options), ['id'])) { + url += `/${options.id}`; + } + else { + query = options; + } + + return Ajax.Ajax(url, { + data: query + }).then((result) => { + return this.syncData(result, options); + }).catch((error) => { + this.triggerLater('error', {text: error.message, errorFor: this.name}); + throw error; + }); + }, + + /** + * This function should return false if all data needed to display the object referenced by the ID + * (for example, a test session and all of its test runs) are in a state which should never change + * (e.g. none are pending). + * @param id ID of the object to evaluate + * @returns {boolean} True if new GET requests may return different information. + */ + requestRequired(id) { + return true; + }, + + /** + * Make all the requests necessary to show an item from the sotre with a particular ID. + * @param id ID of the object to fetch. + * @returns {Promise} A promise composed of multiple promises/requests. + */ + request(id) { + if (this.requestRequired(id)) { + return Promise.all([ + this.refresh(id), + this.get_extra_requests(id) + ]); + } + else { + return Promise.resolve(); + } + }, + /** + * The ordering function should sort the store's data. + */ + ordering(data) { + return data; + }, + /** + * Convert this object's state dictionary to a sorted data array. + * @returns {{string: Array}} object with a single K:V pair containing the sorted data list + */ + getState() { + const state = {}; + state[this.key] = this.ordering(_.map(this.state, (x)=>x)); + return state; + }, + /** Get the initial state of the store. */ + initial() { + return this.getState(); + } + }, Backbone.Events); + + /** + * The TestStore downloads a list of TestFile objects from the server + * @constructor + */ + function TestsStore() { + _.extend(this, Store); + this.url = 'tests'; + this.server_key = 'tests'; + this.key = 'tests'; + this.name = "Tests"; + this.ignore_refresh_options = true; + + /** Update the runs for this test when we navigate to its page */ + this.get_extra_requests = (id) => Runs.refresh({test: id}); + } + + /** + * The SessionStore downloads all of the TestSession objects from the server + * @constructor + */ + function SessionsStore() { + _.extend(this, Store); + this.url = 'test_sessions'; + this.key = 'sessions'; + this.name = "Sessions"; + + /** Post a new test session, running all of the tests in the project. */ + this.new = function() { + return Ajax.Ajax(`${base_url}test_sessions/run`, { + method: 'POST' + }).then((result) => { + result.session.is_new = true; + this.syncData({data: [result.session]}, {}); + Tests.refresh(); + }); + }; + + /** Ensure that we don't send useless requests for completed test sessions */ + this.requestRequired = function(id) { + const session = _.find(this.state, {id: id}); + if (!session) return true; + // If we have not fetched all runs for this session + const got_all_runs = (_.filter(Runs.state, {session_id: id}).length == session.run_count); + // or the session is still pending + const is_pending = (session.status == 0); + // then we need to send new GET requests for it. + return is_pending || !got_all_runs; + }; + + /** Update the runs for this session when we navigate to its page */ + this.get_extra_requests = (id) => Runs.refresh({session: id}); + + /** Sorts sessions by date */ + this.ordering = function(sessions) { + return _.sortBy(sessions, session => -(new Date(session.date_added))); + } + } + + function LogsStore() { + _.extend(this, Store); + this.url = 'test_logs'; + this.key = 'logs'; + this.name = 'Logs'; + this.state = {}; + this.subscriptions = {}; + + /** A request is required if this or RunStore do not have information on the run, or if there is an active + * live log subscription for the run, or if its state is PENDING. + */ + this.requestRequired = function(id) { + const run = _.find(Runs.state, {id: id}); + return (!run || !this.state[id] || run.code == 0 || this.subscriptions[id]); + }; + + this.refresh = function(id) { + const self = this; + const url = `${base_url}${this.url}/${id}`; + + return Runs.refresh({id: id}).then((run)=> { + if (run.code == 0) { + if (!this.subscriptions[id] && run.subscribe_url) { + return this.subscribe(run.id, run.session_id, run.subscribe_url); + } + } + else { + return Ajax.Ajax(url).then((data)=> { + self.state[id] = {text: data, id}; + self.triggerLater('changed', self.getState()); + }); + } + }); + }; + this.subscribe = function(id, session_id, url) { + if (this.subscriptions[id]) return true; + return new Promise((resolve, reject) => { + const evtSource = new EventSource(url); + let done = false; + this.state[id] = {text: '', id}; + + const onClose = () => { + delete this.subscriptions[id]; + if (!done) { + done = true; + setTimeout(() => { + Runs.refresh({id}); + Tests.refresh(); + Sessions.refresh({id: session_id}); + }, 1000); + } + }; + evtSource.addEventListener('log', (e) => { + this.state[id].text += `${e.data}\n`; + this.triggerLater('changed', this.getState()); + }); + evtSource.addEventListener('done', () => { + evtSource.close(); + onClose(); + }); + evtSource.onopen = () => { + this.subscriptions[id] = true; + resolve(); + }; + evtSource.onerror = () => { + onClose(); + reject(); + }; + }); + } + } + + /** + * The RunsStore keeps track of TestRun objects. + * @constructor + */ + function RunsStore() { + _.extend(this, Store); + this.url = 'test_runs'; + this.key = 'runs'; + this.name = "Runs"; + this.logs = {}; + + this.requestRequired = function(id) { + const run = Runs.find({id: id}); + return (!run || run.code == 0); + }; + + /** + * Filters out runs with selected IDs or sessions, before fetching them again + */ + this.filter_function = (options) => (run) => { + if (!options) { + return false; + } + return !(_.isUndefined(run) || + (options.test && run.test && run.test.id == options.test) || + (options.session && run.session_id == options.session)); + }; + /** Sorts test runs by name and date */ + this.ordering = function(runs) { + return _.chain(runs).sortBy('name').sortBy((run) => -(new Date(run.date_added))).value(); + } + } + + /** + * An object to manage navigation within the React app. + * It deals with cancelling navigation requests if new requests are made, and navigates to 'error' pages + * if any requests fail. It can delay navigation until a request is finished, but then skip the wait if the + * page has been previously navigated to. + * @constructor + */ + function RouteStore() { + _.extend(this, Backbone.Events); + const page_already_fetched = {}; + let currently_waiting_for = null; + let route = []; + let interval = null; + let polling_request = null; + let default_request_function = () => Promise.resolve(); + const routes_to_stores = {}; + + /** make a key from a page/id pair */ + const key = (page, id) => `${page}:${id}`; + /** split a page:id key */ + const from_key = (key) => { + if (!key) return null; + const split = key.split(':'); + const ret = {page: split[0]}; + if (split[1]) { + ret['id'] = parseInt(split[1], 10); + } + return ret; + }; + + /** Record that a page/id has been loaded before */ + function setCached(page, id) { + page_already_fetched[key(page, id)] = true; + } + + /** Set the current request to (a promise object) and cancel the previous one. */ + function setCurrentRequest(page, id) { + currently_waiting_for = [page, id]; + } + + /** Check if the current request is for a particular page/id combo */ + function isCurrentRequest(page, id) { + return _.isEqual(currently_waiting_for, [page, id]); + } + + /** Find the store corresponding to the current page, and ask it to request data for a particular + * object ID. */ + function requestPage(page, id) { + const store = routes_to_stores[page.split('/').pop()]; + if (store) { + return store.request(id); + } + } + + /** Get the current route as a list of objects */ + this.getRoute = function() { + return {route: route.map(from_key)} + }; + + /** Emit an event signifying a change to a new route */ + this.triggerCurrent = function() { + _.defer(() => {this.trigger('changed', this.getRoute());}); + }; + + /** Check if a page/id has been loaded before */ + this.isCached = function(page, id) { + return !!page_already_fetched[key(page, id)]; + }; + + this.setDefaultRequest = function(requestor) { + default_request_function = requestor; + }; + + /** + * Instantly navigate to a page/id. + * @param page name of the page. If is starts with a '/', navigate down one level. + * @param id ID of object to be shown. + */ + this.switchPage = function(page, id) { + let new_route; + setCurrentRequest(null); + if (page.startsWith('/')) { + new_route = route.concat([key(page.slice(1), id)]); + } + else { + new_route = [key(page, id)]; + } + route = new_route; + setCached(page, id); + this.triggerCurrent(); + }; + + /** Navigate up one level */ + this.up = function() { + setCurrentRequest(null); + route.pop(); + return this.poll().then(()=>this.triggerCurrent()); + }; + + /** + * Navigate to a page/id after a promise is completed. + * If the page has been navigated to in the past, don't bother waiting. If the request fails, navigate to + * an error page. + * @param page Page name + * @param id Page item ID + * @returns {*} + */ + this.navigate = function(page, id) { + const promise = requestPage(page, id); + // If we've already been the page, don't actually wait for the request + if (this.isCached(page, id) || !promise) { + this.switchPage(page, id); + return Promise.resolve(); + } + else { + // Otherwise, wait for it to finish. In the meantime, show a loading bar if it takes too long. + setCurrentRequest(page, id); + const timeout = setTimeout(() => { + this.trigger('changed', {route: [{page: 'loading'}]}); + }, 300); + return promise.then(() => { + // When the request finishes, remember that it's been visited and then navigate to + // the requested page. + setCached(page, id); + if (isCurrentRequest(page, id)) { + this.switchPage(page, id); + } + // If the current request doesn't match this one, then this request is abandoned. + }).finally(() => { + // No matter how the request ends, clear the loading-bar timeout. + clearTimeout(timeout); + }); + } + + }; + + this.poll = function() { + const full_route = this.getRoute().route; + let request; + if (full_route.length > 0) { + // If we're in a sub-page, get the data which that page needs + const page = full_route[full_route.length - 1].page; + const id = full_route[full_route.length - 1].id; + request = requestPage(page, id); + } + else { + // Otherwise fetch whatever is needed for the dashboard. + request = default_request_function(); + } + polling_request = request; + request.finally(()=> {polling_request = null}); + return request; + }; + + this.resumePolling = function() { + if (interval || polling_request) return; + interval = setInterval(() => { + this.poll(); + }, POLLING_PERIOD); + }; + + this.pausePolling = function() { + if (!interval) return; + clearInterval(interval); + interval = null; + }; + + this.registerRoutes = function(routes) { + _.each(routes, (store) => { + routes_to_stores[store.key] = store; + }); + }; + + this.initial = function() { + return this.getRoute() + }; + + /** There is no concept of refresh for the store. */ + this.refresh = ()=> {}; + } + + var Tests = new TestsStore(); + var Sessions = new SessionsStore(); + var Runs = new RunsStore(); + var Logs = new LogsStore(); + var Route = new RouteStore(); + Route.registerRoutes([Tests, Sessions, Runs, Logs]); + Route.setDefaultRequest(()=>Promise.all([Tests.refresh(), Sessions.refresh()])); + + return {Tests, Sessions, Route, Runs, Logs} + } + + function get_api() { + return api ? api : (api = new API(PROJECT_ID)); + } + + function get_interface() { + return ui ? ui : (ui = CloudPebble.TestManager.Interface(get_api())); + } + + function show_test_manager_pane() { + const api = get_api(); + const ui = get_interface(); + return api.Route.poll().then(() => { + CloudPebble.Sidebar.SuspendActive(); + if (!CloudPebble.Sidebar.Restore("testmanager")) { + ga('send', 'event', 'project', 'load testmanager'); + const pane = $('
').attr('id', '#testmanager-pane-template').toggleClass('testmanager-pane', true); + CloudPebble.Sidebar.SetActivePane(pane, { + id: 'testmanager', + onSuspend: ()=>api.Route.pausePolling(), + onRestore: ()=>api.Route.resumePolling() + }); + ui.render(pane.get(0), {project_id: PROJECT_ID}); + api.Route.resumePolling(); + } + }); + } + + return { + Show() { + show_test_manager_pane(); + }, + ShowTest(test_id) { + const api = get_api(); + return api.Route.navigate('tests', test_id).then(show_test_manager_pane); + }, + ShowLiveTestRun(url, session_id, run_id) { + const api = get_api(); + return api.Tests.refresh() + .then(() => api.Route.navigate('sessions', session_id)) + .then(() => api.Route.navigate('/logs', run_id)) + .then(show_test_manager_pane); + }, + Init() { + const commands = {[gettext('Test Manager')]: CloudPebble.TestManager.Show}; + CloudPebble.FuzzyPrompt.AddCommands(commands); + } + } +})(); \ No newline at end of file diff --git a/ide/static/ide/js/monkey/test_manager_interface.jsx b/ide/static/ide/js/monkey/test_manager_interface.jsx new file mode 100644 index 00000000..60798872 --- /dev/null +++ b/ide/static/ide/js/monkey/test_manager_interface.jsx @@ -0,0 +1,709 @@ +CloudPebble.TestManager.Interface = (function(API) { + const CODES = { + '-2': gettext('error'), + '-1': gettext('failed'), + '0': gettext('pending'), + '1': gettext('passed') + }; + + function SessionKindLabel(props) { + return ({(props.long ? + (props.kind == 'live' ? gettext('Run in CloudPebble') : gettext('Batch run')) : + (props.kind == 'live' ? gettext('Live') : gettext('Batch'))) + }) + } + + /** + * The Pagination object is a mixin providing most of the functions needed to support and + * render a pagination switcher. + * Classes which use Pagination should define this.pageSize and have a 'getLength()' function. + */ + const Pagination = { + /** + * Given a number of pages and current page, generate Pagination indexes + * @param page + * @param pageMax + * @param delta the number of items to show on each side of the current page + * @param minimum_bounds the guaranteed number of items to show at each end + * @param dot_coverage the minimum number of dots to represent with a '...' + * @returns {Array.} an array of numbers and one or two '...' strings + */ + calculatePaging: function(page, pageMax, delta = 2, minimum_bounds = 1, dot_coverage = 2) { + if (pageMax == 1) return [1]; + // 'left' and 'right' represent the indices of the pages to show around the current page. + let left = page - delta; + let right = page + delta; + // If they are close enough to first or last pages, shift them + if (left <= minimum_bounds + dot_coverage) { + left -= (dot_coverage - 1); + } + if (right >= pageMax - minimum_bounds - dot_coverage + 1) { + right += (dot_coverage - 1); + } + // Ensure that 'left'/'right' don't cross into the boundary pages + left = Math.max(left, minimum_bounds + 1); + right = Math.min(right, pageMax - minimum_bounds); + // Build the list of pages to show + let range = _.range(1, minimum_bounds + 1).concat(_.range(left, right + 1), _.range(pageMax - minimum_bounds + 1, pageMax + 1)); + // Add '...'s to fill in the gaps, if necessary + if (left > dot_coverage + minimum_bounds) range.splice(minimum_bounds, 0, '...l'); + if (right < pageMax - minimum_bounds - dot_coverage + 1) range.splice(range.length - (minimum_bounds), 0, '...r'); + return range; + }, + getInitialState: function() { + return { + page: 0 + } + }, + gotoPage: function(n) {this.setState({page: Math.max(0, Math.min(this.maxPages(), n))});}, + maxPages: function() {return Math.floor((this.getLength() - 1) / this.pageSize);}, + page: function(arr) {return arr.slice(this.state.page * this.pageSize, (this.state.page + 1) * this.pageSize);}, + renderButton: function(num) { + const className = classNames('btn', { + 'selected': num - 1 == this.state.page + }); + + return (_.isString(num) + ? + : + ); + }, + fillEmpty: function(items) { + const num_fillers = this.pageSize - items.length; + return items.concat(_.map(new Array(num_fillers), function(_, i) { + return () + })); + }, + renderPager: function() { + const pageMax = this.maxPages(); + if (pageMax <= 0) return null; + const indices = this.calculatePaging(this.state.page + 1, pageMax + 1); + return ( +
+ {indices.map(this.renderButton)} +
+ ); + } + }; + + /** + * Renders a
with class='well'. + */ + function Well(props) { + let {className, children, ...other} = props; + const finalClassName = classNames('well', className); + return
{children}
; + } + + /** + * The 'Anchor' class is an tag which automatically preventDefaults clicks. + */ + function Anchor(props) { + const {onClick, children, ...other} = props; + const clicked = function(event) { + event.preventDefault(); + if (_.isFunction(onClick)) { + onClick(); + } + return false; + }; + return ({children}); + } + + /** + * Renders a with the colour/content sent to represent a test result + */ + function TestResultCell(props) { + const result_name = CODES[props.code]; + const classes = "test-run test-" + result_name; + return ({result_name}); + } + + function ViewTestSourceLink(props) { + const onClick = function() { + const file = CloudPebble.Editor.GetAllFiles()[props.name]; + CloudPebble.Editor.Open(file); + }; + return (Edit) + } + + /** + * TestList allows navigation between each individual tests + */ + const TestList = React.createClass({ + mixins: [Pagination], + pageSize: 5, + getLength: function() { + return this.props.tests.length; + }, + render: function() { + let tests = this.page(this.props.tests).map((test) => { + const onClickTest = function() { + API.Route.navigate('tests', test.id); + }; + const className = classNames("clickable", { + selected: (this.props.selected == test.id) + }); + return ( + + {test.name} + + + + ); + }); + tests = this.fillEmpty(tests); + return ( +
+ + + + + + + + + {tests} +
{gettext('Name')}{gettext('Last Status')}
+ {this.renderPager()} +
+ ); + } + }); + + const RunTitle = function(props) { + const titleClassName = classNames({ + 'monkey-run-deleted': (!props.run.test) + }); + return ({props.run.name}) + }; + + /** + * RunList shows a list of test runs, e.g. for a single test or session + */ + const RunList = React.createClass({ + mixins: [Pagination], + pageSize: 18, + getLength: function() { + return _.keys(this.props.runs).length; + }, + renderRow: function(run) { + const datestring = CloudPebble.Utils.FormatDatetime(run.date_added); + const {session, test} = this.props; + const show_logs = function() { + if (run.test) { + API.Route.navigate('/logs', run.id); + } + }; + return ( + + {!test && } + {!session && {datestring}} + + {run.platform} + + ); + }, + render: function() { + const {runs, session, test} = this.props; + const paged_runs = this.page(runs); + if (_.keys(paged_runs).length == 0) { + return (

{gettext('This test has never been run!')}

); + } + let children = _.map(paged_runs, this.renderRow); + children = this.fillEmpty(children); + + return ( +
+ + + + {test ? null : } + {session ? null : } + + + + + {children} +
{gettext('Name')}{gettext('Date')}{gettext('Status')}{gettext('Platform')}
+ {this.renderPager()} +
+ ) + } + }); + + + const SessionListRow = React.createClass({ + getInitialState: function() { + return {flashing: !!this.props.session.is_new}; + }, + componentDidMount: function() { + if (this.state.flashing) { + setTimeout(() => { + this.setState({flashing: false}); + }, 1500); + } + }, + onClickSession: function() { + API.Route.navigate('sessions', this.props.session.id); + }, + render: function() { + const session = this.props.session; + const datestring = CloudPebble.Utils.FormatDatetime(session.date_added); + const rowClassName = classNames("clickable", { + selected: (this.props.selected), + flash: (this.state.flashing) + }); + const passesClassName = classNames({ + 'test-failed': session.fails > 0, + 'test-passed': session.passes == session.run_count, + 'test-pending': session.fails == 0 && (session.passes != session.run_count) + }); + + return ( + + {datestring} + + + {session.passes + '/' + session.run_count} + + ) + } + }); + + /** + * SessionList allows navigation through every test job. + */ + const SessionList = React.createClass({ + mixins: [Pagination], + pageSize: 10, + getLength: function() { + return this.props.sessions.length; + }, + render: function() { + let sessions = this.page(this.props.sessions).map((session) => { + return (); + }); + sessions = this.fillEmpty(sessions); + return ( +
+ + + + + + + + + + {sessions} +
{gettext('Date')}{gettext('Kind')}{gettext('Status')}{gettext('Passes')}
+ {this.renderPager()} +
); + } + }); + + /** + * SingleSession shows the info for a particular testing job, and all the tests run for it. + */ + function SingleSession(session) { + const filtered = _.filter(session.runs, (run) => { return run.session_id == session.id }); + const datestring = CloudPebble.Utils.FormatDatetime(session.date_added); + return ( +
+ + + + + + + + + + + + + + + +
{gettext('Date')}{datestring}
{gettext('Passes')}{(_.countBy(filtered, 'code')[1] || 0) + '/' + filtered.length}
{gettext('Test Kind')}
+ +
+ ); + } + + /** + * SingleTest shows the details for a single test, and all times it has been run + */ + function SingleTest(test) { + const filtered = _.filter(test.runs, (run) => {return !_.isUndefined(run.test) && run.test.id == test.id }); + // TODO: 'goto source' + return ( +
+ + + + + + + + + + + + + + + +
{gettext('Test')}{test.name}
{gettext('Passes')}{(_.countBy(filtered, 'code')[1] || 0) + '/' + filtered.length}
+ +
+ ); + } + + /** + * A LogArtefact is a link to a resource inside a log script which shows a popover if it is an image + */ + const LogArtefact = React.createClass({ + url: function() { + // TODO: consider a way of getting this URL from Django instead of hardcoding it. + return '/orchestrator/artefacts/' + this.props.link; + }, + componentDidMount: function() { + if (this.props.link.endsWith("png")) { + $(this.refs.a).popover({ + animation: false, + delay: {show: 250}, + container: 'body', + trigger: 'hover', + html: true, + placement: 'top', + content: `` + }); + } + }, + render: function() { + return ({this.props.name}); + } + }); + + /** + * LogScript renders a pebblesdk test log in react, converting artefact links to actual links. + */ + function LogScript(props) { + const log = props.log; + const artefacts = props.artefacts || []; + const filename = function(str) { + return str.substring(str.lastIndexOf('/') + 1); + }; + + // Build up a list of the locations of all artefacts + const matches = []; + let start = 0; + do { + // Log through the log file, find the closest match to the current position + const match = artefacts.reduce((closest, match, i) => { + const pos = log.indexOf(artefacts[i][0], start); + return (pos > closest.pos || pos == -1) ? closest : { + pos: pos, + found: artefacts[i][0], + replace: artefacts[i][1] + }; + }, {pos: Infinity}); + + // Add the match to the list and roll the current position forward + matches.push(match); + start = match.pos + 1; + } + while (start < Infinity); + + // Replace each artefact match with a link to the artefact. + const pieces = []; + matches.reduce((pos, match, i) => { + pieces.push(log.slice(pos, match.pos)); + if (match.replace) { + pieces.push(); + pos = match.pos + match.found.length; + return pos; + } + }, 0); + + // Return the list of log elements inside a
+        return (
+            
{pieces}
+ ) + } + + /** + * The TestRun shows the details for a single test run, and its logs + */ + function TestRun(props) { + const {run, test, logs, session} = props; + const datestring = CloudPebble.Utils.FormatDatetime(session.date_added); + const is_live_log = (!!logs && !run.logs); + const run_completed = run.date_completed ? CloudPebble.Utils.FormatDatetime(run.date_completed) : null; + return ( +
+ + + + + + + + + + + + + + + {run_completed && + + + } + + + + +
{gettext('Test')} + {API.Route.navigate('/tests', test.id)}}>{test.name} + on {run.platform} +
{gettext('Test Kind')} + +
{gettext('Start Date')} + {API.Route.navigate('/sessions', session.id)}}>{datestring} +
{gettext('Completion date')}{run_completed}
{gettext('Result')}
+
+ + {!!run.logs && {gettext('Download logs')}} + {(!run.logs && !is_live_log) && {gettext('No logs to show')}} + {is_live_log && Test in progress} +
+ ); + } + + /** + * A simple animated loading bar div + */ + function Loading() { + return ( +
+
+
); + } + + /** + * Renders a button which starts all tests in batch mode when clicked. + */ + const BatchRunButton = React.createClass({ + getInitialState: function() { + return {batch_waiting: false}; + }, + onClick: function() { + this.setState({batch_waiting: true}); + API.Sessions.new().finally(() => { + this.setState({batch_waiting: false}); + }); + }, + render: function() { + return () + } + }); + + /** + * Button to download all the tests as a zip + */ + function TestDownloadButton(props) { + return ({gettext('Download tests as zip')}); + } + + /** + * The Dashboard shows the list of all tests and jobs + */ + function Dashboard(props) { + const top_page = props.route[0] ? props.route[0].page : null; + const top_id = props.route[0] ? props.route[0].id : null; + return ( +
+ {props.tests.length > 0 && ( + + + + + )} + +

{gettext('Tests')}

+ +
+ +

{gettext('Jobs')}

+ +
+
+ ); + } + + /** Error renders a big scary red error Well with an 'X' button for closing */ + function Error(props) { + return ( + + +

{interpolate(gettext("Error trying to fetch %s: %s"), [props.errorFor, props.text])}

+
+ ) + } + + function BackButton(props) { + const mapping = { + logs: gettext('Run'), + tests: gettext('Test'), + sessions: gettext('Session') + }; + const route = props.route; + let page, id, text; + if (route.length > 1) { + page = mapping[route[route.length - 2].page]; + id = route[route.length - 2].id; + text = interpolate(gettext('← Back to %s %s'), [page, id]); + } + else { + text = gettext('← Back'); + } + return ( + {API.Route.up()}}> + {text} + + ) + } + + /** + * TestPage renders a different page depending on the current route. + */ + function TestPage(props) { + const route = props.route; + let session, test, run, log; + if (route.length == 0) return null; + const page = route[route.length - 1].page; + const id = route[route.length - 1].id; + switch (page) { + case 'sessions': + session = _.findWhere(props.sessions, {id: id}); + return (); + case 'tests': + test = _.findWhere(props.tests, {id: id}); + return (); + case 'loading': + return (); + case 'logs': + run = _.findWhere(props.runs, {id: id}); + test = _.findWhere(props.tests, {id: run.test.id}); + session = _.findWhere(props.sessions, {id: run.session_id}); + log = _.findWhere(props.logs, {id: id}); + return (); + } + } + + /** + * The TestManager is parent UI for everything, rendering the dashboard on the left, detail page on the right, + * "run tests" button and any errors. + */ + function TestManager(props) { + const route = props.route; + const is_log = (route.length > 0 && (route[route.length - 1].page == 'logs')); + const className = 'testmanager-page-' + (route.length == 0 ? 'dashboard' : 'detail'); + + // This logic is used to always render test logs across the full screen width. + const leftclass = is_log ? 'hide' : 'leftside'; + const rightclass = is_log ? '' : 'rightside'; + + return ( +
+ {!!props.error && } +
+ +
+
+ {route.length > 0 && + + + + + } +
+
+ ); + } + + /** + * Renders the text and "create a test" button displayed when the user has no tests or test runs. + */ + const NoTestsDisplay = function(props) { + const createTest = function() { + CloudPebble.Editor.CreateTest(); + }; + return ( + +
+

The Test Manager allows lets you browse results and logs for your project's automated tests.

+

+ + to get started +

+
+
+ ) + }; + + /** + * The TestManagerContainer listens to data changes and passes them to the UI. + */ + const TestManagerContainer = React.createClass({ + getInitialState: function() { + return _.extend({'error': null}, API.Route.initial(), API.Sessions.initial(), API.Tests.initial(), API.Runs.initial()); + }, + componentDidMount: function() { + // Listen to all stores + this.listener = _.extend({}, Backbone.Events); + _.each([API.Route, API.Tests, API.Sessions, API.Runs, API.Logs], (store) => { + this.listener.listenTo(store, 'changed', (data) => { this.setState(data) }); + this.listener.listenTo(store, 'error', (error) => { this.setState({'error': error}) }); + }); + }, + closeError: function() { + this.setState({'error': null}); + }, + componentWillUnmount: function() { + this.listener.stopListening(); + }, + render: function() { + if (this.state.tests.length == 0 && this.state.sessions.length == 0) { + return () + } + else { + return () + } + + } + }); + + return { + render: function(element, props) { + const elm = React.createElement(TestManagerContainer, props); + ReactDOM.render(elm, element); + }, + refresh: function() { + _.each(API.Sessions, API.Tests, API.Runs, (api) => { + api.refresh(); + }); + } + }; +}); diff --git a/ide/static/ide/js/monkeyscript.js b/ide/static/ide/js/monkeyscript.js new file mode 100644 index 00000000..708c4f34 --- /dev/null +++ b/ide/static/ide/js/monkeyscript.js @@ -0,0 +1,305 @@ +/** + * Created by katharine on 7/23/15. + */ + +CloudPebble.MonkeyScript = (function () { + var DO_COMMANDS = ['airplane_mode', 'charging', 'launch_app', 'long_click', 'multi_click', + 'reset', 'screenshot', 'set_time', 'single_click', 'wait']; + + var EXPECT_COMMANDS = ['captured_output', 'equal', 'not_equal', 'reset_output', 'screenshot']; + + var KEYWORDS = ['do', 'expect']; + + + function nextState(state, kind, nextkind) { + if (!_.isString(kind)) throw new Error("Invalid argument"); + state.kind = kind; + state.nextkind = (_.isUndefined(nextkind) ? null : nextkind); + } + + function pushSpace(state, nextkind) { + nextState(state, 'space', nextkind); + } + function pushSpaceOrComment(state, nextKind) { + nextState(state, 'space_or_comment', nextKind) + } + + function resetState(state) { + state.kind = 'keyword'; + state.nextkind = null; + state.keyword = null; + state.command = null; + } + + var make_mode = function (is_highlighter) { + // Return a CSS class or a list of suggestions, depending on the parser's mode. + var result = (is_highlighter ? function (kind, suggestions) { + return kind; + } : function (kind, suggestions) { + return suggestions; + }); + + return function () { + return { + startState: function () { + var state = {}; + resetState(state); + return state; + }, + token: function (stream, state) { + if (stream.sol()) { + resetState(state); + } + + if (state.kind == 'keyword') { + // Allow infinite whitespace at the start of lines + if (stream.eatSpace()) { + return; + } + + // Allow fully commented lines + if (stream.match('#')) { + stream.skipToEnd(); + return result('comment'); + } + + // Keywords only contain letters + var keyword = stream.match(/^[a-z]+/i); + if (!keyword) { + stream.skipToEnd(); + return result('error'); + } + + if (_.contains(KEYWORDS, keyword[0])) { + // Commands come exactly one space after a valid keyword + state.keyword = keyword[0]; + pushSpace(state, 'command'); + return result('keyword'); + } + else if (stream.peek() == ' ') { + // Invalid keywords are errors if they are followed by spaces + stream.skipToEnd(); + return result('error'); + } + else { + // If they are not followed by spaces, they are just unfinished + stream.skipToEnd(); + return result('keyword', KEYWORDS); + } + } + if (state.kind == 'space_or_comment') { + // The user may finish with a comment (and perhaps no arguments) + if (stream.match(/\s*(#.*)?$/)) { + resetState(state); + return result('comment'); + } + else { + pushSpace(state, state.nextkind) + } + } + if (state.kind == 'space') { + // Match a single space + if (!stream.match(/ /)) { + stream.skipToEnd(); + return result('error'); + } + // Then, match no more spaces + nextState(state, 'nospace', state.nextkind); + return null; + } + if (state.kind == 'nospace') { + // Any spaces matched after 'space' state are errors + if (stream.match(/ +/)) { + stream.skipToEnd(); + return result('error'); + } + nextState(state, state.nextkind); + return null; + } + if (state.kind == 'command') { + // Commands are text with underscores + var command = stream.match(/^[a-z_]+/i); + var suggestions = []; + if (!command) { + stream.skipToEnd(); + return result('error'); + } + // The available commands depend on whether the keyword was 'do' or 'expect' + command = command[0]; + if (state.keyword == 'do') { + suggestions = DO_COMMANDS; + } else if (state.keyword == 'expect') { + suggestions = EXPECT_COMMANDS; + } + + if (_.contains(suggestions, command)) { + // Move into arguments if the command is valid + if (stream.eol()) { + resetState(state); + } else { + state.command = command; + pushSpaceOrComment(state, 'argument'); + } + return result('variable'); + } + else if (stream.peek() == ' ') { + // Invalid commands are errors if followed by spaces + stream.skipToEnd(); + resetState(state); + return result('error', suggestions); + } + else { + // If they are not followed by spaces, they are just unfinished + stream.skipToEnd(); + return result('variable', suggestions); + } + } + if (state.kind == 'argument') { + // Or there may be no arguments at all + if (stream.eol()) { + resetState(state); + return null; + } + + // Arguments might start with quotes. These are quoted arguments. + var arg, another, content; + var quote = stream.match(/['"]/); + if (quote) { + quote = quote[0]; + // Match a right quote after some arbitrary text. + arg = stream.match(new RegExp("((\\\\"+ quote + "|[^" + quote + "])*)(" + quote + ")?")); + another = (!!arg && arg[3]); + content = (!!arg ? arg[1] : null); + } + else { + // If the argument is not quoted it may not contain quotes or spaces + arg = stream.match(/[^'"\s]+/); + another = (!!arg); + content = (!!arg ? arg[0] : null); + } + if (!quote && !content) { + // If no quotes or content was matched, the input was invalid. + nextState(state, 'end'); + return result('error'); + } + if (state.command == 'screenshot') { + // Screenshot commands are a special case. There is only one argument + // and it must look like a file name. + another = false; + var is_valid = (!!content.match(/^([.a-zA-Z0-9_-]+)$/i)); + nextState(state, 'end'); + return result(is_valid ? null : 'error', {command: 'screenshot'}); + } + if (another) { + // If there's no reason not to have more arguments, do so + pushSpaceOrComment(state, 'argument'); + } + else { + stream.skipToEnd(); + resetState(state); + } + return (!!quote ? result('string') : null); + } + if (state.kind == 'end') { + // 'End' comes after certain things e.g. screenshot names + // It may be a comment, or whitespace. + resetState(state); + stream.eatSpace(); + if (stream.match('#')) { + stream.skipToEnd(); + return result('comment'); + } + if (!stream.eol()) { + stream.skipToEnd(); + return result('error'); + } + return null; + } + }, + lineComment: '#' + } + }; + }; + + $(function () { + CodeMirror.defineMode('MonkeyScript', make_mode(true)); + CodeMirror.defineMode('MonkeyScript_autocomplete', make_mode(false)); + }); + + return { + request: function (endpoint, editor, cursor) { + // Get autocompletion suggestions for MonkeyScript. + if (endpoint == 'completions') { + cursor = cursor || editor.getCursor(); + // all_suggestions will contain the suggestions for the token at the cursor + var all_suggestions = []; + // pieces is an array of the parsed tokens + var pieces = []; + // Only parse the line up to the cursor. Since MonkeyScript is stateless, we only have to parse one line. + var line = editor.getRange({line: cursor.line, ch: 0}, cursor); + + // With the CodeMirror.runMode addon, run the monkeyscript parser to generate suggestions + // The last set of values from the parser gives us the current suggestions and search string. + CodeMirror.runMode(line, "MonkeyScript_autocomplete", function (text, token_suggestions) { + pieces.push(text); + all_suggestions = _.clone(token_suggestions) || []; + }); + var search_string = pieces[pieces.length - 1]; + + // If the suggestions value is an object, an external lookup is needed for autocomplete suggestions + if (!_.isArray(all_suggestions)) { + if (all_suggestions.command == 'screenshot') { + // Autocompletion for screenshots gets them from the editor's screenshot pane + all_suggestions = _.map(editor.screenshot_pane.getScreenshots(), function (screenshot) { + return screenshot.name + ".png"; + }); + } + else { + all_suggestions = []; + } + } + + var keys; + // Don't bother searching if the typed command is longer than the longest possible command. + var max_suggestion_length = _.max(all_suggestions, function (x) { + return x.length + }).length; + if (search_string.length > max_suggestion_length) { + keys = []; + } + else { + // Fuse sorts the suggestions based on the closest match to the currently typed token and returns an array of indices. + // If these are too high, most of the keywords get matched and reordered with each new letter, which is irritating, + // so low values are used to keep the number of matches low, while still tolerating typing errors. + keys = (new Fuse(all_suggestions, { + distance: 3, + threshold: 0.3 + })).search(search_string || ""); + } + // Build the sorted_suggestions array using the indices + var sorted_suggestions = []; + _.each(keys, function (key) { + sorted_suggestions.push(all_suggestions[key]); + all_suggestions[key] = null; + }); + // Then append all unmatched suggestions, so the user can see all options + _.each(all_suggestions, function (suggestion) { + if (suggestion) sorted_suggestions.push(suggestion); + }); + + // Compute the start column by summing the length of all words on the line up to the cursor, except the final one. + var start_column = pieces.slice(0, pieces.length - 1).join("").length + 1; + + // Finally, return the suggestions in the format expected by autocomplete.js + var final_suggestions = _.map(sorted_suggestions, function (suggestion) { + return {insertion_text: suggestion} + }); + + return Promise.resolve({ + completions: final_suggestions, + start_column: start_column + }); + } + } + } +})(); diff --git a/ide/static/ide/js/new_owner.js b/ide/static/ide/js/new_owner.js index 31357751..92d84317 100644 --- a/ide/static/ide/js/new_owner.js +++ b/ide/static/ide/js/new_owner.js @@ -23,7 +23,7 @@ dialog.find('p').text("This isn't going too well…"); } Ajax.Post('/ide/transition/export', {}).then(function(data) { - return CloudPebble.PollTask(data.task_id, {on_bad_request: show_warning}); + return Ajax.PollTask(data.task_id, {on_bad_request: show_warning}); }).then(function(result) { dialog.find('.progress').removeClass('progress-striped').addClass('progress-success'); dialog.find('p').html("Download"); diff --git a/ide/static/ide/js/pebble.js b/ide/static/ide/js/pebble.js index dedaa5b7..ebdb3db8 100644 --- a/ide/static/ide/js/pebble.js +++ b/ide/static/ide/js/pebble.js @@ -12,6 +12,12 @@ var ConnectionType = { QemuDiorite: 34 }; +var QEMUConnectionTypes = { + 'aplite': ConnectionType.QemuAplite, + 'basalt': ConnectionType.QemuBasalt, + 'chalk': ConnectionType.QemuChalk +}; + var ConnectionPlatformNames = { 2: 'aplite', 6: 'aplite', @@ -94,6 +100,10 @@ var SharedPebble = new (function() { }); } + this.getCurrentEmulator = function() { + return mEmulator; + }; + this.getEmulator = function(kind) { if(mEmulator != null) { if((kind & mConnectionType) == kind) { diff --git a/ide/static/ide/js/qemu.js b/ide/static/ide/js/qemu.js index 8dcda074..00c579fa 100644 --- a/ide/static/ide/js/qemu.js +++ b/ide/static/ide/js/qemu.js @@ -294,6 +294,27 @@ }); }; + this.runTest = function(project_id, test_id, platform, update) { + var data ={ + emu: mInstanceID, + token: mToken, + platform: platform, + host: mHost + }; + if (update) { + data['update'] = true; + } + return Ajax.Ajax({ + method: 'POST', + url: '/ide/project/'+project_id+'/tests/'+test_id+'/run_qemu', + data: data + }).then(function(result) { + return Ajax.PollTask(result.task_id).then(function() { + return result; + }); + }); + }; + this.getWebsocketURL = function() { return (mSecure ? 'wss' : 'ws') + '://' + mHost + ':' + mAPIPort + '/qemu/' + mInstanceID + '/ws/phone'; }; diff --git a/ide/static/ide/js/settings.js b/ide/static/ide/js/settings.js index 42b9e15f..1cb8c26b 100644 --- a/ide/static/ide/js/settings.js +++ b/ide/static/ide/js/settings.js @@ -402,14 +402,6 @@ CloudPebble.Settings = (function() { show_settings_pane(); }, Init: function() { - var commands = {}; - commands[gettext("Add New Resource")] = CloudPebble.Resources.Create; - commands[gettext("Compilation")] = CloudPebble.Compile.Show; - commands[gettext("Settings")] = CloudPebble.Settings.Show; - commands["GitHub"] = CloudPebble.GitHub.Show; - commands[gettext("Timeline")] = CloudPebble.Timeline.show; - commands[gettext("Add New Source File")] = CloudPebble.Editor.Create; - CloudPebble.FuzzyPrompt.AddCommands(commands); settings_template = $('#settings-pane-template').remove().removeClass('hide'); }, AddResource: function(resource) { diff --git a/ide/static/ide/js/sidebar.js b/ide/static/ide/js/sidebar.js index ad85694f..95d178ac 100644 --- a/ide/static/ide/js/sidebar.js +++ b/ide/static/ide/js/sidebar.js @@ -145,6 +145,16 @@ CloudPebble.Sidebar = (function() { var section = get_source_section(file.target); return render_file_link("sidebar-pane-source-" + file.id, file.name, on_click).appendTo(section); }, + AddTestFile: function(file, on_click) { + var end = $('#end-test-files'); + var link = $(''); + link.text(file.name + ' '); + link.click(on_click); + var li = $(' {% endif %} + {% if project.owner.is_testbench_user %} + + {% endif %}
+
+
@@ -144,7 +139,38 @@

{% block modals %} -
+ +