diff --git a/.gitignore b/.gitignore index 8c7afb8..16a16de 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,9 @@ **/**/*.pyc *.egg-info **/*.pyc +.idea serviced.log - +plain.output codekit-config.json # Mac Files diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000..1bcedba --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,82 @@ +# PROPOSED pipeline +# prerequisites: if ubuntu:latest does not contain pip or docker, +# create an image install them and use this new image in the pipeline + +stages: + - build + - test + - deploy + - test-prod + +check-lint: + image: ubuntu:latest + stage: build + script: + - pip install -r tests/requirments.txt + - pycodestyle tests/ --max-line-length=120 + rules: + - if: $CI_PIPELINE_SOURCE == "push" + +docker-build: + image: ubuntu:latest + stage: build + script: + - docker build . --file Dockerfile + rules: + - if: $CI_PIPELINE_SOURCE == "push" + +e2e-test: + image: ubuntu:latest + stage: test + variables: + ENDPOINT: "http://localhost:8000" + script: + - docker-compose build && docker-compose up + - virtualenv venv + - source venv/bin/activate + - pip install -r tests/requirments.txt + - behave tests/features/ -D endpoint=$ENDPOINT + artifacts: + paths: + - plain.output + rules: + - if: $CI_PIPELINE_SOURCE == "merge_request_event" + +deploy-prod: + image: ubuntu:latest + stage: deploy + script: echo "DEPLOY" # - commands to deploy in prod + rules: + - if: '$CI_PIPELINE_SOURCE == "push" && $CI_BUILD_REF_NAME == "master"' + when: manual + +e2e-test-prod: + needs: + - deploy-prod + image: ubuntu:latest + stage: test-prod + variables: + ENDPOINT: "https://produrl" + script: + - virtualenv venv + - source venv/bin/activate + - pip install -r tests/requirments.txt + - behave tests/features/ --tags="@prod" -D endpoint=$ENDPOINT + artifacts: + paths: + - plain.output + rules: + - if: '$CI_PIPELINE_SOURCE == "push" && $CI_BUILD_REF_NAME == "master"' + +non-functional-test: + image: ubuntu:latest + stage: test-prod + variables: + ENDPOINT: "https://produrl" + script: + - echo "non-functional tests" # commands to run non-functional tests + artifacts: + paths: + - test.output + rules: + - if: $CI_PIPELINE_SOURCE == "schedule" diff --git a/behave.ini b/behave.ini new file mode 100644 index 0000000..97f3439 --- /dev/null +++ b/behave.ini @@ -0,0 +1,4 @@ +[behave] +stderr_capture=False +stdout_capture=False +format=plain diff --git a/pipeline.png b/pipeline.png new file mode 100644 index 0000000..5c619e1 Binary files /dev/null and b/pipeline.png differ diff --git a/requestbin/api.py b/requestbin/api.py index 2a83065..6215c14 100644 --- a/requestbin/api.py +++ b/requestbin/api.py @@ -19,7 +19,8 @@ def _response(object, code=200): @app.endpoint('api.bins') def bins(): private = request.form.get('private') in ['true', 'on'] - bin = db.create_bin(private) + name = request.form.get('given_name', None) + bin = db.create_bin(private, name) if bin.private: session[bin.name] = bin.secret_key return _response(bin.to_dict()) diff --git a/requestbin/db.py b/requestbin/db.py index e1ca0be..0cb7461 100644 --- a/requestbin/db.py +++ b/requestbin/db.py @@ -15,8 +15,8 @@ db = klass(bin_ttl) -def create_bin(private=False): - return db.create_bin(private) +def create_bin(private=False, custon_name=None): + return db.create_bin(private, custon_name) def create_request(bin, request): return db.create_request(bin, request) diff --git a/requestbin/models.py b/requestbin/models.py index 8b3ba03..c7b02ac 100644 --- a/requestbin/models.py +++ b/requestbin/models.py @@ -16,11 +16,14 @@ class Bin(object): max_requests = config.MAX_REQUESTS - def __init__(self, private=False): + def __init__(self, private=False, name=None): self.created = time.time() self.private = private self.color = random_color() - self.name = tinyid(8) + if name is None: + self.name = tinyid(8) + else: + self.name = name self.favicon_uri = solid16x16gif_datauri(*self.color) self.requests = [] self.secret_key = os.urandom(24) if self.private else None diff --git a/requestbin/storage/memory.py b/requestbin/storage/memory.py index 8e8f54b..bacd7d2 100644 --- a/requestbin/storage/memory.py +++ b/requestbin/storage/memory.py @@ -27,8 +27,8 @@ def _expire_bins(self): if bin.created < expiry: self.bins.pop(name) - def create_bin(self, private=False): - bin = Bin(private) + def create_bin(self, private=False, given_name=None): + bin = Bin(private, name) self.bins[bin.name] = bin return self.bins[bin.name] diff --git a/requestbin/storage/redis.py b/requestbin/storage/redis.py index ae0df9a..28df9dd 100644 --- a/requestbin/storage/redis.py +++ b/requestbin/storage/redis.py @@ -22,8 +22,8 @@ def _key(self, name): def _request_count_key(self): return '{}-requests'.format(self.prefix) - def create_bin(self, private=False): - bin = Bin(private) + def create_bin(self, private=False, given_name=None): + bin = Bin(private, given_name) key = self._key(bin.name) self.redis.set(key, bin.dump()) self.redis.expireat(key, int(bin.created+self.bin_ttl)) diff --git a/tests/README.md b/tests/README.md new file mode 100644 index 0000000..c350e98 --- /dev/null +++ b/tests/README.md @@ -0,0 +1,66 @@ +# E2E tests + +## How to run the tests +### Setup +Create a python virtual env +```shell +virtualenv venv +``` +Activate it +```shell +source venv/bin/activate +``` +Install prerequisites +```shell +pip install -r tests/requirments.txt +``` +## Run the tests +```shell +behave tests/features/ -D endpoint="endpoint to test" +``` +The results are visible on stdout but also an output file is created +```shell +plain.output +``` + +## Lint +```shell + pycodestyle tests/ --max-line-length=120 +``` + +## Pipeline + +![pipeline](pipeline.png) + +I propose 4 stages for the pipeline +- Build +- Test +- Deploy +- Test-prod +#### Build +This stage is triggered and every push and consists in 2 jobs, +build the docker image and checking the codestytle for the testing project + +#### Test +In this stage the e2e tests run, it is building and running the docker container and +firing the e2e tests, this stage is triggered on a merge request event. +The results of the tests are available on the artifacts of the job, visible and downloadable on gitlab + +#### Deploy +This stage is a manual step to deploy on production. +It is available after the code is merged on master. +I decided to have this step manual for several reasons like: +- not every code change should be deployed immediately in production +- I imagined a release team responsible to deploy on production, that will also create a release note, + maybe also perform some manual check +- not all the engineer have the "power" to deploy on production + +Side note, I did not implemented the deploy scripts + +#### Test-prod +This stage is composed by 2 job. +- The first `e2e-test-prod` is triggered as soon as `deploy-prod` is finished. +It is running the e2e-tests but against the real endpoint that is specified on the job. +It is running only the scenarios that are tagged with `@prod` +- The second `non-functional-test` it is instead a scheduled job, so can be scheduled to run nightly/weekly/etc. +It runs non-functional test (performance) that I did not implemented in this homework but they should be taken in consideration diff --git a/tests/features/environment.py b/tests/features/environment.py new file mode 100644 index 0000000..96ed516 --- /dev/null +++ b/tests/features/environment.py @@ -0,0 +1,61 @@ +import requests +from hamcrest import * + +bin_name = "stuart" +payload = { + "data": { + "schedulePackage": { + "success": True, + "error": None, + "package": { + "id": "12345", + "deliveries": [ + { + "tasks": [ + { + "type": "PICKUP", + "address": { + "geocoded": "Carrer de Pau Claris 130, 08009, Barcelona, Spain", + "location": { + "lat": "41.39317", + "long": "2.16699" + } + } + }, + { + "type": "DROPOFF", + "address": { + "geocoded": "Carrer de Pau Claris 170, 08037, Barcelona, Spain", + "location": { + "lat": "41.39546", + "long": "2.16385" + } + } + } + ] + } + ], + "status": "NOT_ASSIGNED", + "createdAt": "2022-03-23T15:06:09+01:00", + "ref": "67890" + } + } + } +} + + +def before_all(context): + """ + Run before the tests + - create a named bin + - post the webhook to the bin + """ + uri = context.config.userdata.get("endpoint", "http://localhost:9000") + context.vars = {'uri': uri, 'bin_name': bin_name} + + if requests.get('{}/{}'.format(context.vars['uri'], context.vars['bin_name'])).status_code != 200: + create_bin = requests.post('{}/api/v1/bins'.format(context.vars['uri']), data={"given_name": bin_name}) + assert_that(create_bin.status_code, equal_to(200)) + + webhook = requests.post('{}/{}'.format(context.vars['uri'], context.vars['bin_name']), json=payload) + assert_that(webhook.status_code, equal_to(200)) diff --git a/tests/features/steps/test.py b/tests/features/steps/test.py new file mode 100644 index 0000000..b884c90 --- /dev/null +++ b/tests/features/steps/test.py @@ -0,0 +1,108 @@ +from behave import * +from hamcrest import * +import requests +import json + +use_step_matcher("re") + + +@given('The webhooks from a bin') +def step_get_webhook(context): + """Get webhooks from bin, wait 5 secs to connect and 10 to receive the data""" + + try: + res = requests.get('{}/api/v1/bins/{}/requests'.format(context.vars['uri'], context.vars['bin_name']), + timeout=(5, 10)) + assert_that(res.status_code, equal_to(200)) + except requests.exceptions.ReadTimeout: + raise "Webhook not available after 10 seconds" + + webhook_ids = [] + + for req in res.json(): + if 'id' in req: + webhook_ids.append(req['id']) + context.vars['ids'] = webhook_ids + + +@when('I get the content of them') +def step_get_content(context): + """Get body of the webhooks""" + + body = [] + for idx in context.vars['ids']: + res = requests.get('{}/api/v1/bins/{}/requests/{}'.format(context.vars['uri'], context.vars['bin_name'], idx)) + assert_that(res.status_code, equal_to(200)) + + if res.json()['content_length'] > 0: + body.append(json.loads(res.json()['body'])) + if body: + context.vars['body'] = body + else: + raise ValueError + + +@then('I check that in the "{name}" the key "{expected_value}" is present') +def step_check_body(context, name, expected_value): + """Check that the body contains a specific key""" + + for datas in context.vars['body']: + assert_that(datas[name], has_key(expected_value)) + + +@then('I check that in (?P.*) (?P.*) (?Pis|is not) (?P.*)') +def step_check_data(context, name, key, check, expected_value): + """Check key are present and have a specific value""" + + for val in context.vars['body']: + if name in val['data']: + assert_that(val['data'][name], has_key(key)) + if check == "is": + assert_that(str(val['data'][name][key]), equal_to(expected_value)) + else: + assert str(val['data'][name][key]) != expected_value + + +@then('I check that (?P.*) type in (?P.*) (?Pis|is not) a (?Parray|dictionary)') +def step_check_data(context, name, key, check, expected_type): + """Check key are present and the value is list or dict""" + + type_checker = dict if expected_type == "dictionary" else list + for val in context.vars['body']: + if name in val['data']: + assert_that(val['data'][name], has_key(key)) + if check == "is": + assert_that(val['data'][name][key], instance_of(type_checker)) + else: + assert_that(val['data'][name][key], is_not(instance_of(type_checker))) + + +@then('I verify that package contains (?Pid|deliveries)') +def step_check_package(context, expected_value): + """Check that id or deliveries are present in package""" + + for val in context.vars['body']: + package = val['data']['schedulePackage']['package'] + assert_that(package, has_key(expected_value)) + + +@then('I find the (?Ppickup|dropoff) address in deliveries') +def step_check_address(context, value): + """Check that there is 1 and only 1 pickup/dropoff address with coordinates per task""" + + for val in context.vars['body']: + if val['data']['schedulePackage']['success']: + deliveries = val['data']['schedulePackage']['package']['deliveries'] + for task in deliveries: + n_type = 0 + for t in task['tasks']: + if t['type'] == value.upper(): + n_type += 1 + assert_that(t, has_key('address')) + assert_that(t['address'], has_key('geocoded')) + assert_that(t['address'], has_key('location')) + assert_that(t['address']['location'], has_key('lat')) + assert_that(t['address']['location'], has_key('long')) + assert_that(n_type, equal_to(1)) + else: + raise ValueError diff --git a/tests/features/test.feature b/tests/features/test.feature new file mode 100644 index 0000000..a3a961d --- /dev/null +++ b/tests/features/test.feature @@ -0,0 +1,26 @@ +Feature: Test webhook + + Scenario: Check webhook contains schedulePackage + Given The webhooks from a bin + When I get the content of them + Then I check that in the "data" the key "schedulePackage" is present + + Scenario: Check the package is been delivered without errors + Given The webhooks from a bin + When I get the content of them + Then I check that in schedulePackage success is True + And I check that in schedulePackage error is None + And I check that package type in schedulePackage is a dictionary + + Scenario: Check the package contains id and deliveries + Given The webhooks from a bin + When I get the content of them + Then I verify that package contains id + And I verify that package contains deliveries + + @prod + Scenario: Check the deliveries has pickup and dropoff location + Given The webhooks from a bin + When I get the content of them + Then I find the pickup address in deliveries + And I find the dropoff address in deliveries diff --git a/tests/requirments.txt b/tests/requirments.txt new file mode 100644 index 0000000..78301b1 --- /dev/null +++ b/tests/requirments.txt @@ -0,0 +1,4 @@ +behave == 1.2.6 +requests == 2.27.1 +pyhamcrest == 1.10.1 +pycodestyle == 2.4.0 \ No newline at end of file