Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[FLOC-4199] Verify that the version.html value matches the version being published #2662

Open
wants to merge 14 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
137 changes: 115 additions & 22 deletions admin/aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,12 @@
import os
from characteristic import attributes, Attribute
from effect import Effect, sync_performer, TypeDispatcher
from effect.do import do
from effect.do import do, do_return

import boto
from boto.s3.website import RoutingRules
from pyrsistent import PClass, pmap_field, PMap, field, discard, freeze
from twisted.python.filepath import FilePath


@attributes([
Expand Down Expand Up @@ -55,7 +58,7 @@ class UpdateS3ErrorPage(object):
def error_key(self):
"""
"""
return '{}error_pages/404.html'.format(self.target_prefix)
return u'{}error_pages/404.html'.format(self.target_prefix)


@sync_performer
Expand Down Expand Up @@ -281,10 +284,51 @@ def perform_download_s3_key(dispatcher, intent):

bucket = s3.get_bucket(intent.source_bucket)
key = bucket.get_key(intent.source_key)
if key is None:
raise KeyError(intent.source_key)
with intent.target_path.open('w') as target_file:
key.get_contents_to_file(target_file)


@attributes([
"source_bucket",
"source_key",
])
class ReadS3Key(object):
"""
Read a file from S3.

:ivar bytes source_bucket: Name of bucket to read key from.
:ivar bytes source_key: Name of key to read.
"""


@sync_performer
@do
def perform_read_s3_key(dispatcher, intent):
"""
See :class:`ReadS3Key`.
"""
target_file = FilePath(
u'/tmp/{}.perform_read_s3_key'.format(
__file__.replace(u"/", "!"),
)
).temporarySibling()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Looking at DownloadS3Key, it would be less code and simpler to just download to a string (which boto can do), than do some gyrations with a temporary file, just to reuse a bit of code.

target_file.requireCreate(False)
try:
yield Effect(
DownloadS3Key(
source_bucket=intent.source_bucket,
source_key=intent.source_key,
target_path=target_file,
)
)
yield do_return(target_file.getContent())
finally:
if target_file.exists():
target_file.remove()


@attributes([
"source_path",
"target_bucket",
Expand Down Expand Up @@ -364,6 +408,7 @@ def perform_upload_s3_key(dispatcher, intent):
CopyS3Keys: perform_copy_s3_keys,
DownloadS3KeyRecursively: perform_download_s3_key_recursively,
DownloadS3Key: perform_download_s3_key,
ReadS3Key: perform_read_s3_key,
UploadToS3Recursively: perform_upload_s3_key_recursively,
UploadToS3: perform_upload_s3_key,
CreateCloudFrontInvalidation: perform_create_cloudfront_invalidation,
Expand All @@ -380,11 +425,31 @@ def __new__(cls, value, content_type):
return self


@attributes([
Attribute('routing_rules'),
Attribute('s3_buckets'),
Attribute('error_key', default_factory=dict)
])
class FakeAWSState(PClass):
"""
The immutable state of ``FakeAWS``

:ivar routing_rules: Dictionary of routing rules for S3 buckets. They are
represented as dictonaries mapping key prefixes to replacements. Other
types of rules and attributes are supported or represented.
:ivar s3_buckets: Dictionary of fake S3 buckets. Each bucket is represented
as a dictonary mapping keys to contents. Other attributes are ignored.
:ivar cloudfront_invalidations: List of
:class:`CreateCloudFrontInvalidation` that have been requested.
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Document error_key

"""
routing_rules = pmap_field(
key_type=unicode,
value_type=RoutingRules
)
s3_buckets = pmap_field(
key_type=unicode,
value_type=PMap
)
error_key = pmap_field(key_type=unicode, value_type=unicode)
cloudfront_invalidations = field(initial=freeze([]))
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

❗ If you used more specific types for all these, you woudn't need to litter the test code with tons of freeze calls, which are just noise.



@attributes(['state'])
class FakeAWS(object):
"""
Enough of a fake implementation of AWS to test
Expand All @@ -399,23 +464,29 @@ class FakeAWS(object):
:class:`CreateCloudFrontInvalidation` that have been requested.
"""
def __init__(self):
self.cloudfront_invalidations = []
self.initial_state = self.state
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This attribute seems unused?


@sync_performer
def _perform_update_s3_routing_rules(self, dispatcher, intent):
"""
See :class:`UpdateS3RoutingRule`.
"""
self.routing_rules[intent.bucket] = intent.routing_rules
self.state = self.state.transform(
['routing_rules', intent.bucket],
intent.routing_rules
)

@sync_performer
def _perform_update_s3_error_page(self, dispatcher, intent):
"""
See :class:`UpdateS3ErrorPage`.
"""
new_error_key = intent.error_key
old_error_key = self.error_key.get(intent.bucket)
self.error_key[intent.bucket] = new_error_key
old_error_key = self.state.error_key.get(intent.bucket)
self.state = self.state.transform(
[u'error_key', intent.bucket],
new_error_key
)
if old_error_key == new_error_key:
return None
return old_error_key
Expand All @@ -425,34 +496,42 @@ def _perform_create_cloudfront_invalidation(self, dispatcher, intent):
"""
See :class:`CreateCloudFrontInvalidation`.
"""
self.cloudfront_invalidations.append(intent)
self.state = self.state.transform(
['cloudfront_invalidations'],
lambda l: l.append(intent)
)

@sync_performer
def _perform_delete_s3_keys(self, dispatcher, intent):
"""
See :class:`DeleteS3Keys`.
"""
bucket = self.s3_buckets[intent.bucket]
for key in intent.keys:
del bucket[intent.prefix + key]
self.state = self.state.transform(
['s3_buckets', intent.bucket, intent.prefix + key],
discard,
)

@sync_performer
def _perform_copy_s3_keys(self, dispatcher, intent):
"""
See :class:`CopyS3Keys`.
"""
source_bucket = self.s3_buckets[intent.source_bucket]
destination_bucket = self.s3_buckets[intent.destination_bucket]
source_bucket = self.state.s3_buckets[intent.source_bucket]
for key in intent.keys:
destination_bucket[intent.destination_prefix + key] = (
source_bucket[intent.source_prefix + key])
self.state = self.state.transform(
['s3_buckets',
intent.destination_bucket,
intent.destination_prefix + key],
source_bucket[intent.source_prefix + key]
)

@sync_performer
def _perform_list_s3_keys(self, dispatcher, intent):
"""
See :class:`ListS3Keys`.
"""
bucket = self.s3_buckets[intent.bucket]
bucket = self.state.s3_buckets[intent.bucket]
return {key[len(intent.prefix):]
for key in bucket
if key.startswith(intent.prefix)}
Expand All @@ -462,21 +541,26 @@ def _perform_download_s3_key(self, dispatcher, intent):
"""
See :class:`DownloadS3Key`.
"""
bucket = self.s3_buckets[intent.source_bucket]
bucket = self.state.s3_buckets[intent.source_bucket]
intent.target_path.setContent(bucket[intent.source_key])

@sync_performer
def _perform_upload_s3_key(self, dispatcher, intent):
"""
See :class:`UploadToS3`.
"""
bucket = self.s3_buckets[intent.target_bucket]

with intent.file.open() as source_file:
content = source_file.read()
# XXX: Need to think about this.
# The fake currently only allows unicode content.
content_type = intent.content_type
if content_type is not None:
content = ContentTypeUnicode(content, content_type)
bucket[intent.target_key] = content
self.state = self.state.transform(
['s3_buckets', intent.target_bucket, intent.target_key],
content
)

def get_dispatcher(self):
"""
Expand All @@ -487,6 +571,7 @@ def get_dispatcher(self):
# Share implementation with real implementation
DownloadS3KeyRecursively: perform_download_s3_key_recursively,
UploadToS3Recursively: perform_upload_s3_key_recursively,
ReadS3Key: perform_read_s3_key,

# Fake implementation
UpdateS3RoutingRules: self._perform_update_s3_routing_rules,
Expand All @@ -499,3 +584,11 @@ def get_dispatcher(self):
CreateCloudFrontInvalidation:
self._perform_create_cloudfront_invalidation,
})


def fake_aws(routing_rules, s3_buckets):
initial_state = FakeAWSState(
routing_rules=routing_rules,
s3_buckets=freeze(s3_buckets),
)
return FakeAWS(state=initial_state)
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Remove this.

58 changes: 45 additions & 13 deletions admin/release.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

import yaml
import os
import re
import sys
import tempfile
import virtualenv
Expand Down Expand Up @@ -52,6 +53,7 @@
DeleteS3Keys,
CopyS3Keys,
DownloadS3KeyRecursively,
ReadS3Key,
UploadToS3,
UploadToS3Recursively,
CreateCloudFrontInvalidation,
Expand Down Expand Up @@ -91,6 +93,16 @@ class DocumentationRelease(Exception):
"""


@attributes(['documentation_version', 'expected_version'])
class UnexpectedDocumentationVersion(Exception):
"""
Raised if the source documentation is found to have a different version
than is being published.
"""
def __str__(self):
return self.__repr__()


class Environments(Names):
"""
The environments that documentation can be published to.
Expand Down Expand Up @@ -142,17 +154,25 @@ class DocumentationConfiguration(object):
DOCUMENTATION_CONFIGURATIONS = {
Environments.PRODUCTION:
DocumentationConfiguration(
documentation_bucket="clusterhq-docs",
cloudfront_cname="docs.clusterhq.com",
dev_bucket="clusterhq-staging-docs"),
documentation_bucket=u"clusterhq-docs",
cloudfront_cname=u"docs.clusterhq.com",
dev_bucket=u"clusterhq-staging-docs"),
Environments.STAGING:
DocumentationConfiguration(
documentation_bucket="clusterhq-staging-docs",
cloudfront_cname="docs.staging.clusterhq.com",
dev_bucket="clusterhq-staging-docs"),
documentation_bucket=u"clusterhq-staging-docs",
cloudfront_cname=u"docs.staging.clusterhq.com",
dev_bucket=u"clusterhq-staging-docs"),
}


def strip_html_tags(html):
"""
:param unicode html: The HTML content.
:returns: ``html`` with all HTML tags removed.
"""
return re.sub(r"</?[^>]+>", "", html)


def parse_routing_rules(routing_config, hostname):
"""
Parse routing rule description.
Expand Down Expand Up @@ -224,15 +244,27 @@ def publish_docs(flocker_version, doc_version, environment, routing_config):
raise NotTagged()
configuration = DOCUMENTATION_CONFIGURATIONS[environment]

dev_prefix = 'release/flocker-%s/' % (flocker_version,)
version_prefix = 'en/%s/' % (get_doc_version(doc_version),)
dev_prefix = u'release/flocker-{}/'.format(flocker_version)
version_prefix = u'en/{}/'.format(get_doc_version(doc_version))

is_dev = not is_release(doc_version)
if is_dev:
stable_prefix = "en/devel/"
stable_prefix = u"en/devel/"
else:
stable_prefix = "en/latest/"
stable_prefix = u"en/latest/"

found_version_html = yield Effect(
ReadS3Key(
source_bucket=configuration.dev_bucket,
source_key=dev_prefix + u"version.html",
)
)
found_version_number = strip_html_tags(found_version_html).strip()
if found_version_number != doc_version:
raise UnexpectedDocumentationVersion(
documentation_version=found_version_number,
expected_version=doc_version
)
# Get the list of keys in the new documentation.
new_version_keys = yield Effect(
ListS3Keys(bucket=configuration.dev_bucket,
Expand Down Expand Up @@ -287,16 +319,17 @@ def publish_docs(flocker_version, doc_version, environment, routing_config):
UpdateS3ErrorPage(bucket=configuration.documentation_bucket,
target_prefix=version_prefix))

# XXX: We also need to calculate and invalidate the changed "latest" keys.
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is a bug in the invalidation rules.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is there a ticket for this?

# The changed keys are the new keys, the keys that were deleted from this
# version, and the keys for the previous version.
changed_keys = (new_version_keys | existing_version_keys)

# S3 serves /index.html when given /, so any changed /index.html means
# that / changed as well.
# Note that we check for '/index.html' but remove 'index.html'
changed_keys |= {key_name[:-len('index.html')]
changed_keys |= {key_name[:-len(u'index.html')]
for key_name in changed_keys
if key_name.endswith('/index.html')}
if key_name.endswith(u'/index.html')}

# Always update the root.
changed_keys |= {''}
Expand All @@ -306,7 +339,6 @@ def publish_docs(flocker_version, doc_version, environment, routing_config):
changed_paths = {prefix + key_name
for key_name in changed_keys
for prefix in [stable_prefix, version_prefix]}

yield Effect(UpdateS3RoutingRules(
bucket=configuration.documentation_bucket,
routing_rules=parse_routing_rules(
Expand Down
Loading