diff --git a/bin/load-mocks b/bin/load-mocks index 6ddab5ea..c9e09f42 100755 --- a/bin/load-mocks +++ b/bin/load-mocks @@ -14,7 +14,8 @@ from datetime import datetime, timedelta from uuid import uuid4 from freight.config import db -from freight.models import App, LogChunk, Task, TaskStatus, Repository, User +from freight.models import (App, LogChunk, Task, TaskConfig, TaskConfigType, TaskStatus, + Repository, User) from freight.testutils.fixtures import Fixtures fixtures = Fixtures() @@ -47,6 +48,20 @@ def main(): name='Freight', ) + deploy_config = TaskConfig.query.filter( + TaskConfig.app_id == app.id, + TaskConfig.type == TaskConfigType.deploy, + ).first() + if not deploy_config: + deploy_config = fixtures.create_taskconfig(app, type=TaskConfigType.deploy) + + build_config = TaskConfig.query.filter( + TaskConfig.app_id == app.id, + TaskConfig.type == TaskConfigType.build, + ).first() + if not build_config: + build_config = fixtures.create_taskconfig(app, type=TaskConfigType.build) + Task.query.filter( Task.status.in_([TaskStatus.in_progress, TaskStatus.pending]), ).update({ @@ -59,7 +74,6 @@ def main(): user=user, sha=uuid4().hex, ref='master', - environment='production', status=TaskStatus.finished, date_started=datetime.utcnow() - timedelta(minutes=3), date_finished=datetime.utcnow(), @@ -88,16 +102,28 @@ def main(): db.session.commit() - task = fixtures.create_task( + sha = uuid4().hex + task1 = fixtures.create_task( app=app, user=user, - sha=uuid4().hex, + sha=sha, + ref='master', + status=TaskStatus.in_progress, + date_started=datetime.utcnow(), + ) + + task2 = fixtures.create_task( + task_type='build', + app=app, + user=user, + sha=sha, ref='master', - environment='production', status=TaskStatus.in_progress, date_started=datetime.utcnow(), ) + deploy = fixtures.create_deploy(task1, app) + build = fixtures.create_build(task2, app) if __name__ == '__main__': diff --git a/freight/api/bases/__init__.py b/freight/api/bases/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/freight/api/bases/details.py b/freight/api/bases/details.py new file mode 100644 index 00000000..a4114fc9 --- /dev/null +++ b/freight/api/bases/details.py @@ -0,0 +1,82 @@ +from __future__ import absolute_import + +from flask_restful import reqparse + +from freight.api.base import ApiView +from freight.api.serializer import serialize +from freight.config import db, redis +from freight.models import App, Task, TaskStatus +from freight.notifiers import NotifierEvent +from freight.notifiers.utils import send_task_notifications +from freight.utils.redis import lock + + +class BaseMixin(object): + def _get_obj(self, app=None, env=None, number=None, obj_model=None, **kwargs): + obj_id = [v for k, v in kwargs.iteritems() if k.endswith('_id')] + if obj_id: + return obj_model.query.get(obj_id[0]) + try: + app = App.query.filter(App.name == app)[0] + except IndexError: + return None + try: + # HACK(jtcunning): Only difference in build and deploy. + try: + return obj_model.query.filter( + obj_model.app_id == app.id, + obj_model.environment == env, + obj_model.number == number, + )[0] + except AttributeError: + return obj_model.query.filter( + obj_model.app_id == app.id, + obj_model.number == number, + )[0] + except IndexError: + return None + + +class BaseDetailsApiView(ApiView, BaseMixin): + def __init__(self): + raise NotImplementedError + # self.obj_model = Task + + def get(self, **kwargs): + """ + Retrive a task. + """ + kwargs['obj_model'] = self.obj_model + obj = self._get_obj(**kwargs) + if obj is None: + return self.error('Invalid obj', name='invalid_resource', status_code=404) + + return self.respond(serialize(obj)) + + put_parser = reqparse.RequestParser() + put_parser.add_argument('status', choices=('cancelled',)) + + def put(self, **kwargs): + kwargs['obj_model'] = self.obj_model + obj = self._get_obj(**kwargs) + if obj is None: + return self.error('Invalid obj', name='invalid_resource', status_code=404) + + with lock(redis, '{}:{}'.format(type(obj).__name__, obj.id), timeout=5): + # we have to refetch in order to ensure lock state changes + obj = self.obj_model.query.get(obj.id) + task = Task.query.get(obj.task_id) + args = self.put_parser.parse_args() + if args.status: + assert task.status in (TaskStatus.pending, TaskStatus.in_progress) + assert args.status == 'cancelled' + did_cancel = task.status == TaskStatus.pending + task.status = TaskStatus.cancelled + + db.session.add(task) + db.session.commit() + + if args.status and did_cancel: + send_task_notifications(task, NotifierEvent.TASK_FINISHED) + + return self.respond(serialize(obj)) diff --git a/freight/api/bases/index.py b/freight/api/bases/index.py new file mode 100644 index 00000000..370400f0 --- /dev/null +++ b/freight/api/bases/index.py @@ -0,0 +1,219 @@ +from __future__ import absolute_import + +import json + +from flask_restful import reqparse, inputs + +from freight import checks, vcs +from freight.api.base import ApiView +from freight.api.serializer import serialize +from freight.config import db, redis +from freight.exceptions import CheckError, CheckPending +from freight.models import ( + App, Repository, Task, TaskStatus, User, + TaskConfig, TaskConfigType, +) +from freight.notifiers import NotifierEvent +from freight.notifiers.utils import send_task_notifications +from freight.utils.auth import get_current_user +from freight.utils.redis import lock +from freight.utils.workspace import Workspace + + +class BaseIndexApiView(ApiView): + """ + All of the helper functions that deploy and build share. + """ + def __init__(self): + # self.obj_model = Dummy + # self.sequence_model = DummySequence + + raise NotImplementedError + + def _get_internal_ref(self, app, env, ref): + # find the most recent green status for this app + if ref == ':current': + return app.get_current_sha(env) + + # the previous stable ref (before current) + if ref == ':previous': + current_sha = app.get_current_sha(env) + + if not current_sha: + return + + return app.get_previous_sha(env, current_sha=current_sha) + raise ValueError('Unknown ref: {}'.format(ref)) + + get_parser = reqparse.RequestParser() + get_parser.add_argument('app', location='args') + get_parser.add_argument('user', location='args') + get_parser.add_argument('env', location='args') + get_parser.add_argument('ref', location='args') + get_parser.add_argument('status', location='args', action='append') + + def get(self): + """ + Retrieve a list of objects. + + If any parameters are invalid the result will simply be an empty list. + """ + args = self.get_parser.parse_args() + + qs_filters = [] + + if args.app: + app = App.query.filter(App.name == args.app).first() + if not app: + return self.respond([]) + qs_filters.append(self.obj_model.app_id == app.id) + + if args.user: + user = User.query.filter(User.name == args.user).first() + if not user: + return self.respond([]) + qs_filters.append(Task.user_id == user.id) + + if args.env: + qs_filters.append(self.obj_model.environment == args.env) + + if args.ref: + qs_filters.append(Task.ref == args.ref) + + if args.status: + status_list = map(TaskStatus.label_to_id, args.status) + qs_filters.append(Task.status.in_(status_list)) + + obj_qs = self.obj_model.query.filter(*qs_filters).order_by(self.obj_model.id.desc()) + + return self.paginate(obj_qs, on_results=serialize) + + post_parser = reqparse.RequestParser() + post_parser.add_argument('app', required=True) + post_parser.add_argument('params', type=json.loads) + post_parser.add_argument('user') + post_parser.add_argument('env', default='production') + post_parser.add_argument('ref') + post_parser.add_argument('force', default=False, type=inputs.boolean) + + def post(self): + """ + Given any constraints for a task are within acceptable bounds, create + a new task and enqueue it. + """ + args = self.post_parser.parse_args() + + user = get_current_user() + if not user: + username = args.user + if not username: + return self.error('Missing required argument "user"', status_code=400) + + with lock(redis, 'user:create:{}'.format(username), timeout=5): + # TODO(dcramer): this needs to be a get_or_create pattern and + # ideally moved outside of the lock + user = User.query.filter(User.name == username).first() + if not user: + user = User(name=username) + db.session.add(user) + db.session.flush() + elif args.user: + return self.error('Cannot specify user when using session authentication.', status_code=400) + + app = App.query.filter(App.name == args.app).first() + if not app: + return self.error('Invalid app', name='invalid_resource', status_code=404) + + obj_config = TaskConfig.query.filter( + TaskConfig.app_id == app.id, + # TODO(jtcunning) Ehhhhhhhhhhh. + TaskConfig.type == getattr(TaskConfigType, self.obj_model.__name__.lower()), + ).first() + if not obj_config: + return self.error('Missing config', name='missing_conf', status_code=404) + + params = None + + repo = Repository.query.get(app.repository_id) + + workspace = Workspace( + path=repo.get_path(), + ) + + vcs_backend = vcs.get( + repo.vcs, + url=repo.url, + workspace=workspace, + ) + + with lock(redis, 'repo:update:{}'.format(repo.id)): + vcs_backend.clone_or_update() + + ref = args.ref or app.get_default_ref(args.env) + + # look for our special refs (prefixed via a colon) + # TODO(dcramer): this should be supported outside of just this endpoint + if ref.startswith(':'): + sha = self._get_internal_ref(app, args.env, ref) + if not sha: + return self.error('Invalid ref', name='invalid_ref', status_code=400) + else: + try: + sha = vcs_backend.get_sha(ref) + except vcs.UnknownRevision: + return self.error('Invalid ref', name='invalid_ref', status_code=400) + + if args.params is not None: + params = args.params + + if not args.force: + for check_config in obj_config.checks: + check = checks.get(check_config['type']) + try: + check.check(app, sha, check_config['config']) + except CheckPending: + pass + except CheckError as e: + return self.error( + message=unicode(e), + name='check_failed', + ) + + with lock(redis, '{}:create:{}'.format(self.obj_model.__name__, app.id), timeout=5): + task = Task( + app_id=app.id, + # TODO(dcramer): ref should default based on app config + ref=ref, + sha=sha, + params=params, + status=TaskStatus.pending, + user_id=user.id, + provider=obj_config.provider, + data={ + 'force': args.force, + 'provider_config': obj_config.provider_config, + 'notifiers': obj_config.notifiers, + 'checks': obj_config.checks, + }, + ) + db.session.add(task) + db.session.flush() + db.session.refresh(task) + kwargs = { + 'task_id': task.id, + 'app_id': app.id, + } + + if hasattr(self.obj_model, 'environment'): + kwargs['number'] = self.sequence_model.get_clause(app.id, args.env) + kwargs['environment'] = args.env + else: + kwargs['number'] = self.sequence_model.get_clause(app.id) + + obj = self.obj_model(**kwargs) + db.session.add(obj) + db.session.commit() + + send_task_notifications(task, NotifierEvent.TASK_QUEUED) + + return self.respond(serialize(obj), status_code=201) diff --git a/freight/api/bases/log.py b/freight/api/bases/log.py new file mode 100644 index 00000000..2d195d89 --- /dev/null +++ b/freight/api/bases/log.py @@ -0,0 +1,78 @@ +from __future__ import absolute_import + +from flask_restful import reqparse + +from freight.api.base import ApiView +from freight.api.bases.details import BaseMixin +from freight.config import db +from freight.models import LogChunk + + +class BaseLogApiView(ApiView, BaseMixin): + get_parser = reqparse.RequestParser() + get_parser.add_argument('offset', location='args', type=int, default=0) + get_parser.add_argument('limit', location='args', type=int) + + def __init__(self): + raise NotImplementedError + # self.obj_model = Task + + def get(self, **kwargs): + """ + Retrieve a log. + """ + kwargs['obj_model'] = self.obj_model + obj = self._get_obj(**kwargs) + if obj is None: + return self.error( + 'Invalid {}'.format(type(obj)), + name='invalid_resource', + status_code=404 + ) + + args = self.get_parser.parse_args() + + queryset = db.session.query( + LogChunk.text, LogChunk.offset, LogChunk.size + ).filter( + LogChunk.task_id == obj.task_id, + ).order_by(LogChunk.offset.asc()) + + if args.offset == -1: + # starting from the end so we need to know total size + tail = db.session.query(LogChunk.offset + LogChunk.size).filter( + LogChunk.task_id == obj.task_id, + ).order_by(LogChunk.offset.desc()).limit(1).scalar() + + if tail is None: + logchunks = [] + else: + if args.limit: + queryset = queryset.filter( + (LogChunk.offset + LogChunk.size) >= max(tail - args.limit + 1, 0), + ) + else: + if args.offset: + queryset = queryset.filter( + LogChunk.offset >= args.offset, + ) + if args.limit: + queryset = queryset.filter( + LogChunk.offset < args.offset + args.limit, + ) + + logchunks = list(queryset) + + if logchunks: + next_offset = logchunks[-1].offset + logchunks[-1].size + else: + next_offset = args.offset + + links = [self.build_cursor_link('next', next_offset)] + + context = { + 'text': ''.join(l.text for l in logchunks), + 'nextOffset': next_offset, + } + + return self.respond(context, links=links) diff --git a/freight/api/build_details.py b/freight/api/build_details.py new file mode 100644 index 00000000..5d86dd96 --- /dev/null +++ b/freight/api/build_details.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import + +from freight.api.bases.details import BaseDetailsApiView +from freight.models import Build + + +class BuildDetailsApiView(BaseDetailsApiView): + def __init__(self): + self.obj_model = Build diff --git a/freight/api/build_index.py b/freight/api/build_index.py new file mode 100644 index 00000000..a22358c8 --- /dev/null +++ b/freight/api/build_index.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import + +from freight.api.bases.index import BaseIndexApiView +from freight.models import Build, BuildSequence + + +class BuildIndexApiView(BaseIndexApiView): + def __init__(self): + self.obj_model = Build + self.sequence_model = BuildSequence diff --git a/freight/api/build_log.py b/freight/api/build_log.py new file mode 100644 index 00000000..245a2d37 --- /dev/null +++ b/freight/api/build_log.py @@ -0,0 +1,9 @@ +from __future__ import absolute_import + +from freight.api.bases.log import BaseLogApiView +from freight.models import Build + + +class BuildLogApiView(BaseLogApiView): + def __init__(self): + self.obj_model = Build diff --git a/freight/api/deploy_details.py b/freight/api/deploy_details.py index cd88bb2a..f35384f0 100644 --- a/freight/api/deploy_details.py +++ b/freight/api/deploy_details.py @@ -1,68 +1,9 @@ from __future__ import absolute_import -from flask_restful import reqparse +from freight.api.bases.details import BaseDetailsApiView +from freight.models import Deploy -from freight.api.base import ApiView -from freight.api.serializer import serialize -from freight.config import db, redis -from freight.models import App, Task, Deploy, TaskStatus -from freight.notifiers import NotifierEvent -from freight.notifiers.utils import send_task_notifications -from freight.utils.redis import lock - -class DeployMixin(object): - def _get_deploy(self, app=None, env=None, number=None, deploy_id=None): - if deploy_id: - return Deploy.query.get(deploy_id) - try: - app = App.query.filter(App.name == app)[0] - except IndexError: - return None - try: - return Deploy.query.filter( - Deploy.app_id == app.id, - Deploy.environment == env, - Deploy.number == number, - )[0] - except IndexError: - return None - - -class DeployDetailsApiView(ApiView, DeployMixin): - def get(self, **kwargs): - """ - Retrive a task. - """ - deploy = self._get_deploy(**kwargs) - if deploy is None: - return self.error('Invalid deploy', name='invalid_resource', status_code=404) - - return self.respond(serialize(deploy)) - - put_parser = reqparse.RequestParser() - put_parser.add_argument('status', choices=('cancelled',)) - - def put(self, **kwargs): - deploy = self._get_deploy(**kwargs) - if deploy is None: - return self.error('Invalid deploy', name='invalid_resource', status_code=404) - - with lock(redis, 'deploy:{}'.format(deploy.id), timeout=5): - # we have to refetch in order to ensure lock state changes - deploy = Deploy.query.get(deploy.id) - task = Task.query.get(deploy.task_id) - args = self.put_parser.parse_args() - if args.status: - assert task.status in (TaskStatus.pending, TaskStatus.in_progress) - assert args.status == 'cancelled' - did_cancel = task.status == TaskStatus.pending - task.status = TaskStatus.cancelled - - db.session.add(task) - db.session.commit() - - if args.status and did_cancel: - send_task_notifications(task, NotifierEvent.TASK_FINISHED) - - return self.respond(serialize(deploy)) +class DeployDetailsApiView(BaseDetailsApiView): + def __init__(self): + self.obj_model = Deploy diff --git a/freight/api/deploy_index.py b/freight/api/deploy_index.py index 540f76f2..f786f679 100644 --- a/freight/api/deploy_index.py +++ b/freight/api/deploy_index.py @@ -1,204 +1,10 @@ from __future__ import absolute_import -import json +from freight.api.bases.index import BaseIndexApiView +from freight.models import Deploy, DeploySequence -from flask_restful import reqparse, inputs -from freight import checks, vcs -from freight.api.base import ApiView -from freight.api.serializer import serialize -from freight.config import db, redis -from freight.exceptions import CheckError, CheckPending -from freight.models import ( - App, Repository, Task, Deploy, DeploySequence, TaskStatus, User, - TaskConfig, TaskConfigType, -) -from freight.notifiers import NotifierEvent -from freight.notifiers.utils import send_task_notifications -from freight.utils.auth import get_current_user -from freight.utils.redis import lock -from freight.utils.workspace import Workspace - - -class DeployIndexApiView(ApiView): - def _get_internal_ref(self, app, env, ref): - # find the most recent green deploy for this app - if ref == ':current': - return app.get_current_sha(env) - - # the previous stable ref (before current) - if ref == ':previous': - current_sha = app.get_current_sha(env) - - if not current_sha: - return - - return app.get_previous_sha(env, current_sha=current_sha) - raise ValueError('Unknown ref: {}'.format(ref)) - - get_parser = reqparse.RequestParser() - get_parser.add_argument('app', location='args') - get_parser.add_argument('user', location='args') - get_parser.add_argument('env', location='args') - get_parser.add_argument('ref', location='args') - get_parser.add_argument('status', location='args', action='append') - - def get(self): - """ - Retrieve a list of deploys. - - If any parameters are invalid the result will simply be an empty list. - """ - args = self.get_parser.parse_args() - - qs_filters = [] - - if args.app: - app = App.query.filter(App.name == args.app).first() - if not app: - return self.respond([]) - qs_filters.append(Deploy.app_id == app.id) - - if args.user: - user = User.query.filter(User.name == args.user).first() - if not user: - return self.respond([]) - qs_filters.append(Task.user_id == user.id) - - if args.env: - qs_filters.append(Deploy.environment == args.env) - - if args.ref: - qs_filters.append(Task.ref == args.ref) - - if args.status: - status_list = map(TaskStatus.label_to_id, args.status) - qs_filters.append(Task.status.in_(status_list)) - - deploy_qs = Deploy.query.filter(*qs_filters).order_by(Deploy.id.desc()) - - return self.paginate(deploy_qs, on_results=serialize) - - post_parser = reqparse.RequestParser() - post_parser.add_argument('app', required=True) - post_parser.add_argument('params', type=json.loads) - post_parser.add_argument('user') - post_parser.add_argument('env', default='production') - post_parser.add_argument('ref') - post_parser.add_argument('force', default=False, type=inputs.boolean) - - def post(self): - """ - Given any constraints for a task are within acceptable bounds, create - a new task and enqueue it. - """ - args = self.post_parser.parse_args() - - user = get_current_user() - if not user: - username = args.user - if not username: - return self.error('Missing required argument "user"', status_code=400) - - with lock(redis, 'user:create:{}'.format(username), timeout=5): - # TODO(dcramer): this needs to be a get_or_create pattern and - # ideally moved outside of the lock - user = User.query.filter(User.name == username).first() - if not user: - user = User(name=username) - db.session.add(user) - db.session.flush() - elif args.user: - return self.error('Cannot specify user when using session authentication.', status_code=400) - - app = App.query.filter(App.name == args.app).first() - if not app: - return self.error('Invalid app', name='invalid_resource', status_code=404) - - deploy_config = TaskConfig.query.filter( - TaskConfig.app_id == app.id, - TaskConfig.type == TaskConfigType.deploy, - ).first() - if not deploy_config: - return self.error('Missing deploy config', name='missing_conf', status_code=404) - - params = None - - repo = Repository.query.get(app.repository_id) - - workspace = Workspace( - path=repo.get_path(), - ) - - vcs_backend = vcs.get( - repo.vcs, - url=repo.url, - workspace=workspace, - ) - - with lock(redis, 'repo:update:{}'.format(repo.id)): - vcs_backend.clone_or_update() - - ref = args.ref or app.get_default_ref(args.env) - - # look for our special refs (prefixed via a colon) - # TODO(dcramer): this should be supported outside of just this endpoint - if ref.startswith(':'): - sha = self._get_internal_ref(app, args.env, ref) - if not sha: - return self.error('Invalid ref', name='invalid_ref', status_code=400) - else: - try: - sha = vcs_backend.get_sha(ref) - except vcs.UnknownRevision: - return self.error('Invalid ref', name='invalid_ref', status_code=400) - - if args.params is not None: - params = args.params - - if not args.force: - for check_config in deploy_config.checks: - check = checks.get(check_config['type']) - try: - check.check(app, sha, check_config['config']) - except CheckPending: - pass - except CheckError as e: - return self.error( - message=unicode(e), - name='check_failed', - ) - - with lock(redis, 'deploy:create:{}'.format(app.id), timeout=5): - task = Task( - app_id=app.id, - # TODO(dcramer): ref should default based on app config - ref=ref, - sha=sha, - params=params, - status=TaskStatus.pending, - user_id=user.id, - provider=deploy_config.provider, - data={ - 'force': args.force, - 'provider_config': deploy_config.provider_config, - 'notifiers': deploy_config.notifiers, - 'checks': deploy_config.checks, - }, - ) - db.session.add(task) - db.session.flush() - db.session.refresh(task) - - deploy = Deploy( - task_id=task.id, - app_id=app.id, - environment=args.env, - number=DeploySequence.get_clause(app.id, args.env), - ) - db.session.add(deploy) - db.session.commit() - - send_task_notifications(task, NotifierEvent.TASK_QUEUED) - - return self.respond(serialize(deploy), status_code=201) +class DeployIndexApiView(BaseIndexApiView): + def __init__(self): + self.obj_model = Deploy + self.sequence_model = DeploySequence diff --git a/freight/api/deploy_log.py b/freight/api/deploy_log.py index 87bf6206..e31a6634 100644 --- a/freight/api/deploy_log.py +++ b/freight/api/deploy_log.py @@ -1,70 +1,9 @@ from __future__ import absolute_import -from flask_restful import reqparse +from freight.api.bases.log import BaseLogApiView +from freight.models import Deploy -from freight.api.base import ApiView -from freight.config import db -from freight.models import LogChunk -from .deploy_details import DeployMixin - - -class DeployLogApiView(ApiView, DeployMixin): - get_parser = reqparse.RequestParser() - get_parser.add_argument('offset', location='args', type=int, default=0) - get_parser.add_argument('limit', location='args', type=int) - - def get(self, **kwargs): - """ - Retrieve deploy log. - """ - deploy = self._get_deploy(**kwargs) - if deploy is None: - return self.error('Invalid deploy', name='invalid_resource', status_code=404) - - args = self.get_parser.parse_args() - - queryset = db.session.query( - LogChunk.text, LogChunk.offset, LogChunk.size - ).filter( - LogChunk.task_id == deploy.task_id, - ).order_by(LogChunk.offset.asc()) - - if args.offset == -1: - # starting from the end so we need to know total size - tail = db.session.query(LogChunk.offset + LogChunk.size).filter( - LogChunk.task_id == deploy.task_id, - ).order_by(LogChunk.offset.desc()).limit(1).scalar() - - if tail is None: - logchunks = [] - else: - if args.limit: - queryset = queryset.filter( - (LogChunk.offset + LogChunk.size) >= max(tail - args.limit + 1, 0), - ) - else: - if args.offset: - queryset = queryset.filter( - LogChunk.offset >= args.offset, - ) - if args.limit: - queryset = queryset.filter( - LogChunk.offset < args.offset + args.limit, - ) - - logchunks = list(queryset) - - if logchunks: - next_offset = logchunks[-1].offset + logchunks[-1].size - else: - next_offset = args.offset - - links = [self.build_cursor_link('next', next_offset)] - - context = { - 'text': ''.join(l.text for l in logchunks), - 'nextOffset': next_offset, - } - - return self.respond(context, links=links) +class DeployLogApiView(BaseLogApiView): + def __init__(self): + self.obj_model = Deploy diff --git a/freight/api/serializer/__init__.py b/freight/api/serializer/__init__.py index 4bd8767e..d56ab2ac 100644 --- a/freight/api/serializer/__init__.py +++ b/freight/api/serializer/__init__.py @@ -7,3 +7,4 @@ from . import app # NOQA from . import deploy # NOQA from . import user # NOQA +from . import build # NOQA diff --git a/freight/api/serializer/build.py b/freight/api/serializer/build.py new file mode 100644 index 00000000..dbd760f8 --- /dev/null +++ b/freight/api/serializer/build.py @@ -0,0 +1,82 @@ +from __future__ import absolute_import + +from datetime import datetime, timedelta +from sqlalchemy.sql import func + +from freight.config import db +from freight.models import App, Task, Build, TaskStatus, User + +from .base import Serializer +from .manager import add, serialize + + +@add(Build) +class BuildSerializer(Serializer): + def get_attrs(self, item_list): + apps = { + a.id: a + for a in App.query.filter( + App.id.in_(set(i.app_id for i in item_list)), + ) + } + + tasks = { + t.id: t + for t in Task.query.filter( + Task.id.in_(set(i.task_id for i in item_list)), + ) + } + + estimatedDurations = dict(db.session.query( + Task.app_id, + func.avg(Task.date_finished - Task.date_started), + ).filter( + Task.date_finished > datetime.utcnow() - timedelta(days=7), + Task.status == TaskStatus.finished, + ).group_by(Task.app_id)) + + user_ids = set(tasks[d.task_id].user_id for d in item_list) + if user_ids: + user_map = { + u.id: u + for u in User.query.filter(User.id.in_(user_ids)) + } + else: + user_map = {} + + attrs = {} + for item in item_list: + estimatedDuration = estimatedDurations.get(tasks[item.task_id].app_id) + if estimatedDuration: + estimatedDuration = estimatedDuration.total_seconds() + + attrs[item] = { + 'app': apps[item.app_id], + 'task': tasks[item.task_id], + 'user': user_map.get(tasks[item.task_id].user_id), + 'estimatedDuration': estimatedDuration, + } + return attrs + + def serialize(self, item, attrs): + app = attrs['app'] + task = attrs['task'] + + return { + 'id': str(item.id), + 'name': '{}#{}'.format(app.name, item.number), + 'app': { + 'id': str(app.id), + 'name': app.name, + }, + 'user': serialize(attrs['user']), + 'sha': task.sha, + 'ref': task.ref, + 'number': item.number, + 'status': task.status_label, + 'duration': task.duration, + 'estimatedDuration': task.duration or attrs['estimatedDuration'], + 'dateCreated': self.format_datetime(task.date_created), + 'dateStarted': self.format_datetime(task.date_started), + 'dateFinished': self.format_datetime(task.date_finished), + } diff --git a/freight/checks/__init__.py b/freight/checks/__init__.py index ac24a4f5..9f6a863a 100644 --- a/freight/checks/__init__.py +++ b/freight/checks/__init__.py @@ -3,8 +3,10 @@ from .base import Check # NOQA from .manager import CheckManager from .github import GitHubContextCheck +from .build import BuildStatusCheck manager = CheckManager() manager.add('github', GitHubContextCheck) +manager.add('build', BuildStatusCheck) get = manager.get diff --git a/freight/checks/build.py b/freight/checks/build.py new file mode 100644 index 00000000..b8679686 --- /dev/null +++ b/freight/checks/build.py @@ -0,0 +1,34 @@ +from __future__ import absolute_import + +__all__ = ['BuildStatusCheck'] + +import logging + +from freight.exceptions import CheckFailed, CheckPending +from freight.models import Build, Task + +from .base import Check + +CHECK = 'Build for {} {} {}.' +logger = logging.getLogger(__name__) + + +class BuildStatusCheck(Check): + def check(self, app, sha, config): + build = Build.query.filter( + Build.app_id == app.id, + Task.sha == sha, + ).first() + + label = '{}:{}'.format(app.name, sha) + + if not build: + raise CheckFailed(CHECK.format(label, 'was', 'not found')) + + status = Task.query.get(build.task_id).status_label + + if status in ['pending', 'in_progress']: + raise CheckPending(CHECK.format(label, 'is', status)) + elif status != 'success': + raise CheckFailed(CHECK.format(label, 'was', status)) + logger.debug(CHECK.format(label, 'was', status)) diff --git a/freight/config.py b/freight/config.py index 3b9e4905..91ddffaa 100644 --- a/freight/config.py +++ b/freight/config.py @@ -163,6 +163,9 @@ def configure_api(app): from freight.api.deploy_details import DeployDetailsApiView from freight.api.deploy_index import DeployIndexApiView from freight.api.deploy_log import DeployLogApiView + from freight.api.build_details import BuildDetailsApiView + from freight.api.build_index import BuildIndexApiView + from freight.api.build_log import BuildLogApiView api.add_resource(AppIndexApiView, '/apps/') api.add_resource(AppDetailsApiView, '/apps//') @@ -170,6 +173,7 @@ def configure_api(app): api.add_resource(DeployIndexApiView, '/tasks/', endpoint='deploy-index-deprecated') api.add_resource(DeployIndexApiView, '/deploys/') + api.add_resource(BuildIndexApiView, '/builds/') # old style api.add_resource(DeployDetailsApiView, '/deploys//') @@ -185,6 +189,10 @@ def configure_api(app): api.add_resource(DeployLogApiView, '/deploys////log/', endpoint='deploy-log') + # builds + api.add_resource(BuildDetailsApiView, '/builds///') + api.add_resource(BuildLogApiView, '/builds///log/') + # catchall should be the last resource api.add_resource(ApiCatchall, '/') diff --git a/freight/models/app.py b/freight/models/app.py index 720eeee5..f2f9222f 100644 --- a/freight/models/app.py +++ b/freight/models/app.py @@ -49,6 +49,14 @@ def deploy_config(self): TaskConfig.type == TaskConfigType.deploy, ).first() + @property + def build_config(self): + from freight.models import TaskConfig, TaskConfigType + return TaskConfig.query.filter( + TaskConfig.app_id == self.id, + TaskConfig.type == TaskConfigType.build, + ).first() + def get_default_ref(self, env): data = self.environments.get(env) if not data: diff --git a/freight/models/build.py b/freight/models/build.py new file mode 100644 index 00000000..c21b6a81 --- /dev/null +++ b/freight/models/build.py @@ -0,0 +1,22 @@ +from __future__ import absolute_import + +from sqlalchemy import Column, ForeignKey, Integer +from sqlalchemy.schema import Index, UniqueConstraint + +from freight.config import db + + +class Build(db.Model): + __tablename__ = 'build' + __table_args__ = ( + Index('idx_build_task_id', 'task_id'), + Index('idx_build_app_id', 'app_id'), + UniqueConstraint('task_id', 'app_id', 'number', name='unq_build_number'), + ) + + id = Column(Integer, primary_key=True) + task_id = Column(Integer, ForeignKey('task.id', ondelete='CASCADE'), + nullable=False) + app_id = Column(Integer, ForeignKey('app.id', ondelete='CASCADE'), + nullable=False) + number = Column(Integer, nullable=False) diff --git a/freight/models/buildsequence.py b/freight/models/buildsequence.py new file mode 100644 index 00000000..93b285fa --- /dev/null +++ b/freight/models/buildsequence.py @@ -0,0 +1,18 @@ +from __future__ import absolute_import + +from sqlalchemy import Column, Integer +from sqlalchemy.sql import func, select + +from freight.config import db + + +class BuildSequence(db.Model): + __tablename__ = 'buildsequence' + + app_id = Column(Integer, nullable=False, primary_key=True) + value = Column(Integer, default=0, server_default='0', nullable=False, + primary_key=True) + + @classmethod + def get_clause(self, app_id): + return select([func.next_build_number(app_id)]) diff --git a/freight/models/taskconfig.py b/freight/models/taskconfig.py index 84beaa00..f14d72f0 100644 --- a/freight/models/taskconfig.py +++ b/freight/models/taskconfig.py @@ -9,6 +9,7 @@ class TaskConfigType(object): deploy = 0 + build = 1 @classmethod def get_label(cls, status): @@ -21,6 +22,7 @@ def label_to_id(cls, label): TYPE_LABELS = { TaskConfigType.deploy: 'deploy', + TaskConfigType.build: 'build', } TYPE_LABELS_REV = { v: k for k, v in TYPE_LABELS.items() diff --git a/freight/testutils/fixtures.py b/freight/testutils/fixtures.py index c79aedec..89099342 100644 --- a/freight/testutils/fixtures.py +++ b/freight/testutils/fixtures.py @@ -7,7 +7,7 @@ from freight.config import db from freight.constants import PROJECT_ROOT from freight.models import ( - App, Repository, Task, DeploySequence, Deploy, TaskStatus, User, + App, Build, BuildSequence, Repository, Task, DeploySequence, Deploy, TaskStatus, User, TaskConfig, TaskConfigType, ) @@ -55,13 +55,18 @@ def create_app(self, repository, **kwargs): return app - def create_task(self, app, user, **kwargs): + def create_task(self, app, user, task_type='deploy', **kwargs): + if task_type == 'deploy': + kwargs.setdefault('data', {'provider_config': app.deploy_config.provider_config}) + kwargs.setdefault('params', {'task': 'deploy'}) + elif task_type == 'build': + kwargs.setdefault('data', {'provider_config': app.build_config.provider_config}) + kwargs.setdefault('params', {'task': 'build'}) + kwargs.setdefault('provider', 'shell') kwargs.setdefault('ref', 'master') kwargs.setdefault('sha', 'HEAD') kwargs.setdefault('status', TaskStatus.in_progress) - kwargs.setdefault('data', {'provider_config': app.deploy_config.provider_config}) - kwargs.setdefault('params', {'task': 'deploy'}) task = Task( app_id=app.id, @@ -87,6 +92,18 @@ def create_deploy(self, task, app, **kwargs): return deploy + def create_build(self, task, app, **kwargs): + build = Build( + task_id=task.id, + app_id=app.id, + number=BuildSequence.get_clause(app.id), + **kwargs + ) + db.session.add(build) + db.session.commit() + + return build + def create_repo(self, **kwargs): kwargs.setdefault('url', PROJECT_ROOT) kwargs.setdefault('vcs', 'git') diff --git a/migrations/versions/18ff76b912af_create_buildsequence_table.py b/migrations/versions/18ff76b912af_create_buildsequence_table.py new file mode 100644 index 00000000..1e1eadf0 --- /dev/null +++ b/migrations/versions/18ff76b912af_create_buildsequence_table.py @@ -0,0 +1,61 @@ +""" +Create buildsequence table. + +Revision ID: 18ff76b912af +Revises: 493c6c33fa27 +Create Date: 2016-05-05 14:47:50.243970 +""" + +# revision identifiers, used by Alembic. +revision = '18ff76b912af' +down_revision = '493c6c33fa27' + +from alembic import op +import sqlalchemy as sa + +NEXT_VALUE_FUNCTION = """ +CREATE OR REPLACE FUNCTION next_build_number(int) RETURNS int AS $$ +DECLARE + cur_app_id ALIAS FOR $1; + next_value int; +BEGIN + LOOP + UPDATE buildsequence SET value = value + 1 + WHERE app_id = cur_app_id + RETURNING value INTO next_value; + IF FOUND THEN + RETURN next_value; + END IF; + + BEGIN + INSERT INTO buildsequence (app_id, value) + VALUES (cur_app_id, 1) + RETURNING value INTO next_value; + RETURN next_value; + EXCEPTION WHEN unique_violation THEN + -- do nothing + END; + END LOOP; +END; +$$ LANGUAGE plpgsql +""" + +ADD_BUILD_SEQUENCES = """ +INSERT INTO buildsequence (app_id, value) +SELECT app_id, max(id) FROM build GROUP BY app_id +""" + + +def upgrade(): + op.create_table('buildsequence', + sa.Column('app_id', sa.Integer(), nullable=False), + sa.Column('value', sa.Integer(), server_default='0', nullable=False), + sa.PrimaryKeyConstraint('app_id', 'value') + ) + op.execute(NEXT_VALUE_FUNCTION) + op.execute(ADD_BUILD_SEQUENCES) + + +def downgrade(): + op.execute('DROP FUNCTION IF EXISTS next_build_number(int)') + op.drop_table('buildsequence') diff --git a/migrations/versions/493c6c33fa27_create_build_table.py b/migrations/versions/493c6c33fa27_create_build_table.py new file mode 100644 index 00000000..54eb615d --- /dev/null +++ b/migrations/versions/493c6c33fa27_create_build_table.py @@ -0,0 +1,35 @@ +""" +Create build table. + +Revision ID: 493c6c33fa27 +Revises: 205fd513c96 +Create Date: 2016-05-05 14:31:38.491777 +""" + +# revision identifiers, used by Alembic. +revision = '493c6c33fa27' +down_revision = '205fd513c96' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + op.create_table('build', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('task_id', sa.Integer(), nullable=False), + sa.Column('app_id', sa.Integer(), nullable=False), + sa.Column('number', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['app_id'], ['app.id'], ondelete='CASCADE'), + sa.ForeignKeyConstraint(['task_id'], ['task.id'], ondelete='CASCADE'), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('task_id', 'app_id', 'number', name='unq_build_number') + ) + op.create_index('idx_build_app_id', 'build', ['app_id'], unique=False) + op.create_index('idx_build_task_id', 'build', ['task_id'], unique=False) + + +def downgrade(): + op.drop_index('idx_build_task_id', table_name='build') + op.drop_index('idx_build_app_id', table_name='build') + op.drop_table('build')