diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 00000000..00505ea8 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,10 @@ +.env +scripts/proctor.rb +_output/* + +*.swp +*.swo +.idea +.DS_Store +.vscode/ +dist/ diff --git a/.env.sample b/.env.sample deleted file mode 100644 index e0426dc8..00000000 --- a/.env.sample +++ /dev/null @@ -1,31 +0,0 @@ -export PROCTOR_KUBE_CONFIG="out-of-cluster" -export PROCTOR_LOG_LEVEL="debug" -export PROCTOR_APP_PORT="5000" -export PROCTOR_DEFAULT_NAMESPACE="default" -export PROCTOR_REDIS_ADDRESS="localhost:6379" -export PROCTOR_REDIS_MAX_ACTIVE_CONNECTIONS="10" -export PROCTOR_KUBE_JOB_ACTIVE_DEADLINE_SECONDS="60" -export PROCTOR_LOGS_STREAM_READ_BUFFER_SIZE="140" -export PROCTOR_LOGS_STREAM_WRITE_BUFFER_SIZE="4096" -export PROCTOR_KUBE_CLUSTER_HOST_NAME="localhost:8001" -export PROCTOR_KUBE_POD_LIST_WAIT_TIME="5" -export PROCTOR_KUBE_CA_CERT_ENCODED="LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCmNlcnRpZmljYXRlCi0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K" -export PROCTOR_KUBE_BASIC_AUTH_ENCODED="Y2EtY2VydAo=" -export PROCTOR_POSTGRES_USER="postgres" -export PROCTOR_POSTGRES_PASSWORD="" -export PROCTOR_POSTGRES_HOST="localhost" -export PROCTOR_POSTGRES_PORT="5432" -export PROCTOR_POSTGRES_DATABASE="proctord_development" -export PROCTOR_POSTGRES_MAX_CONNECTIONS="50" -export PROCTOR_POSTGRES_CONNECTIONS_MAX_LIFETIME="30" -export PROCTOR_NEW_RELIC_APP_NAME="PROCTORD" -export PROCTOR_NEW_RELIC_LICENCE_KEY="nrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnr" -export PROCTOR_MIN_CLIENT_VERSION="v0.2.0" -export PROCTOR_SCHEDULED_JOBS_FETCH_INTERVAL_IN_MINS="5" -export PROCTOR_MAIL_USERNAME="user@mail.com" -export PROCTOR_MAIL_PASSWORD="password" -export PROCTOR_MAIL_SERVER_HOST="smtp.mail.com" -export PROCTOR_MAIL_SERVER_PORT="123" -export PROCTOR_JOB_POD_ANNOTATIONS="{\"key.one\":\"true\"}" -export PROCTOR_SENTRY_DSN="foo" -export PROCTOR_DOCS_PATH="/path/to/docs/dir" \ No newline at end of file diff --git a/.env.test b/.env.test index 16837d39..fcf3d8ff 100644 --- a/.env.test +++ b/.env.test @@ -1,18 +1,18 @@ -export ENVIRONMENT=test export PROCTOR_KUBE_CONFIG=out-of-cluster +export PROCTOR_KUBE_CONTEXT=minikube export PROCTOR_LOG_LEVEL=debug export PROCTOR_APP_PORT=5000 export PROCTOR_DEFAULT_NAMESPACE=default export PROCTOR_REDIS_ADDRESS=localhost:6379 +export PROCTOR_REDIS_PASSWORD= export PROCTOR_REDIS_MAX_ACTIVE_CONNECTIONS=10 export PROCTOR_KUBE_JOB_ACTIVE_DEADLINE_SECONDS=60 export PROCTOR_KUBE_JOB_RETRIES=0 +export PROCTOR_KUBE_SERVICE_ACCOUNT_NAME=default export PROCTOR_LOGS_STREAM_READ_BUFFER_SIZE=140 export PROCTOR_LOGS_STREAM_WRITE_BUFFER_SIZE=4096 -export PROCTOR_KUBE_CLUSTER_HOST_NAME=localhost:8001 -export PROCTOR_KUBE_POD_LIST_WAIT_TIME=5 -export PROCTOR_KUBE_CA_CERT_ENCODED=LS0tLS1CRUdJTiBDRVJUSUZJQ0FURS0tLS0tCmNlcnRpZmljYXRlCi0tLS0tRU5EIENFUlRJRklDQVRFLS0tLS0K -export PROCTOR_KUBE_BASIC_AUTH_ENCODED=YWRtaW46cGFzc3dvcmQK +export PROCTOR_KUBE_WAIT_FOR_RESOURCE_POLL_COUNT=5 +export PROCTOR_KUBE_LOG_PROCESS_WAIT_TIME=60 export PROCTOR_POSTGRES_USER=postgres export PROCTOR_POSTGRES_PASSWORD= export PROCTOR_POSTGRES_HOST=localhost @@ -20,15 +20,19 @@ export PROCTOR_POSTGRES_PORT=5432 export PROCTOR_POSTGRES_DATABASE=proctord_test export PROCTOR_POSTGRES_MAX_CONNECTIONS=50 export PROCTOR_POSTGRES_CONNECTIONS_MAX_LIFETIME=30 -export PROCTOR_NEW_RELIC_APP_NAME="PROCTORD" -export PROCTOR_NEW_RELIC_LICENCE_KEY="nrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnr" -export PROCTOR_MIN_CLIENT_VERSION="v0.2.0" -export PROCTOR_SCHEDULED_JOBS_FETCH_INTERVAL_IN_MINS="5" -export PROCTOR_MAIL_USERNAME="user@mail.com" -export PROCTOR_MAIL_PASSWORD="password" -export PROCTOR_MAIL_SERVER_HOST="smtp.mail.com" -export PROCTOR_MAIL_SERVER_PORT="123" -export PROCTOR_JOB_POD_ANNOTATIONS="{\"key.one\":\"true\"}" -export PROCTOR_SENTRY_DSN="foo" -export PROCTOR_DOCS_PATH="/path/to/docs/dir" - +export PROCTOR_NEW_RELIC_APP_NAME=PROCTORD +export PROCTOR_NEW_RELIC_LICENCE_KEY=0123456789012345678901234567890123456789 +export PROCTOR_MIN_CLIENT_VERSION=v2.0.0 +export PROCTOR_SCHEDULED_JOBS_FETCH_INTERVAL_IN_MINS=5 +export PROCTOR_MAIL_USERNAME=user@mail.com +export PROCTOR_MAIL_PASSWORD=password +export PROCTOR_MAIL_SERVER_HOST=smtp.mail.com +export PROCTOR_MAIL_SERVER_PORT=123 +export PROCTOR_JOB_POD_ANNOTATIONS={\"key.one\":\"true\"} +export PROCTOR_DOCS_PATH=/path/to/docs/dir +export PROCTOR_AUTH_PLUGIN_BINARY= +export PROCTOR_AUTH_PLUGIN_EXPORTED=GateAuth +export PROCTOR_AUTH_ENABLED=false +export PROCTOR_NOTIFICATION_PLUGIN_BINARY= +export PROCTOR_NOTIFICATION_PLUGIN_EXPORTED=SlackNotification +export PROCTOR_REQUIRED_ADMIN_GROUP=proctor_admin diff --git a/.gitignore b/.gitignore index c9729aab..001dc7f5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,16 +1,12 @@ - - homebrew-gojek scripts/proctor.rb _output/* -proctord .env -proctord - *.swp *.swo .idea .DS_Store -.vscode/ \ No newline at end of file +.vscode/ +dist/ \ No newline at end of file diff --git a/.goreleaser.yml b/.goreleaser.yml index 8490ef83..40f032bd 100644 --- a/.goreleaser.yml +++ b/.goreleaser.yml @@ -1,10 +1,27 @@ # This is an example goreleaser.yaml file with some sane defaults. # Make sure to check the documentation at http://goreleaser.com builds: -- env: - - CGO_ENABLED=0 -archive: - replacements: + - + id: "proctor-server" + main: ./cmd/server/main.go + binary: proctor-server + goos: + - darwin + - linux + env: + - CGO_ENABLED=0 + - + id: "proctor-cli" + main: ./cmd/cli/main.go + binary: proctor2 + goos: + - darwin + - linux + - windows + env: + - CGO_ENABLED=0 +archives: +- replacements: darwin: Darwin linux: Linux windows: Windows diff --git a/.travis.yml b/.travis.yml index 44daf6b7..1c3c9609 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,14 +2,14 @@ dist: trusty sudo: required language: go go: -- "1.12" + - "1.12" services: - redis-server - postgresql before_script: -- sudo service redis-server start + - sudo service redis-server start stages: - test @@ -18,7 +18,4 @@ jobs: include: - stage: test script: - - make db.setup test-with-race - -after_success: - - scripts/release.sh + - make db.setup test diff --git a/AUTHORS.md b/AUTHORS.md index a7a37732..f9a1fb58 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -1,4 +1,6 @@ # Proctor Team * [Akshat](//github.com/olttwa) +* [Jasoet](//github.com/jasoet) +* [Bimo Horizon](//github.com/bimozx) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6b9e998c..3be328d0 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,6 +1,6 @@ Proctor - Contributing -Proctor `github.com/gojektech/proctor` is an open-source project. +Proctor `github.com/gojek/proctor` is an open-source project. It is licensed using the [Apache License 2.0][1]. We appreciate pull requests; here are our guidelines: @@ -36,7 +36,7 @@ Much Thanks! ❤❤❤ GO-JEK Tech [1]: http://www.apache.org/licenses/LICENSE-2.0 -[2]: https://github.com/gojektech/proctor/issues +[2]: https://github.com/gojek/proctor/issues [3]: https://golang.org/doc/effective_go.html [4]: http://gun.io/blog/how-to-github-fork-branch-and-pull-request [5]: https://chris.beams.io/posts/git-commit/ diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..8aa78a54 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,18 @@ +FROM golang:1.12 AS builder +WORKDIR /go/src/app +COPY . . +RUN make plugin.auth +RUN make plugin.slack +RUN make server + +FROM ubuntu:latest +RUN apt-get update +RUN apt-get install -y ca-certificates +WORKDIR /app/ +COPY --from=builder /go/src/app/_output/bin/server . +COPY --from=builder /go/src/app/_output/bin/plugin/auth.so . +COPY --from=builder /go/src/app/_output/bin/plugin/slack.so . +COPY --from=builder /go/src/app/migrations ./migrations + +ENTRYPOINT ["./server"] +CMD ["s"] diff --git a/Makefile b/Makefile index 72953353..e836b63b 100644 --- a/Makefile +++ b/Makefile @@ -9,10 +9,14 @@ export $(shell sed 's/=.*//' .env.test) SRC_DIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST)))) OUT_DIR := $(SRC_DIR)/_output BIN_DIR := $(OUT_DIR)/bin +PLUGIN_DIR := $(BIN_DIR)/plugin +FTEST_DIR := test/procs +CONFIG_DIR := test/config GOPROXY ?= https://proxy.golang.org GO111MODULE := on +CONFIG_LOCATION := $(SRC_DIR) -$(@info $(shell mkdir -p $(OUT_DIR) $(BIN_DIR))) +$(@info $(shell mkdir -p $(OUT_DIR) $(BIN_DIR) $(PLUGIN_DIR)) .PHONY: build build: test-with-race server cli @@ -23,23 +27,51 @@ test-with-race: test .PHONY: test test: - go test $(RACE_FLAG) -coverprofile=$(OUT_DIR)/coverage.out ./... + ENABLE_INTEGRATION_TEST=false \ + go test -race -coverprofile=$(OUT_DIR)/coverage.out ./... + +.PHONY: itest +itest: plugin.auth plugin.slack + PROCTOR_AUTH_PLUGIN_BINARY=$(PLUGIN_DIR)/auth.so \ + PROCTOR_NOTIFICATION_PLUGIN_BINARY=$(PLUGIN_DIR)/slack.so \ + ENABLE_INTEGRATION_TEST=true \ + go test -p 1 -race -coverprofile=$(OUT_DIR)/coverage.out ./... + +.PHONY: plugin.itest +plugin.itest: plugin.auth plugin.slack + PROCTOR_AUTH_PLUGIN_BINARY=$(PLUGIN_DIR)/auth.so \ + PROCTOR_NOTIFICATION_PLUGIN_BINARY=$(PLUGIN_DIR)/slack.so \ + ENABLE_PLUGIN_INTEGRATION_TEST=true \ + go test -race -coverprofile=$(OUT_DIR)/coverage.out ./... .PHONY: server server: - go build -o $(BIN_DIR)/server ./exec/server/server.go + PROCTOR_AUTH_PLUGIN_BINARY=$(PLUGIN_DIR)/auth.so \ + go build -race -o $(BIN_DIR)/server ./cmd/server/main.go -.PHONY: start-server -start-server: - $(BIN_DIR)/server s +.PHONY: plugin.auth +plugin.auth: + go build -race -buildmode=plugin -o $(PLUGIN_DIR)/auth.so ./plugins/gate-auth-plugin/auth.go + +.PHONY: plugin.slack +plugin.slack: + go build -race -buildmode=plugin -o $(PLUGIN_DIR)/slack.so ./plugins/slack-notification-plugin/slack_notification.go .PHONY: cli cli: - go build -o $(BIN_DIR)/cli ./exec/cli/cli.go + go build -race -o $(BIN_DIR)/cli ./cmd/cli/main.go + +build-all: server cli plugin.auth plugin.slack + +.PHONY: start-server +start-server: + PROCTOR_AUTH_PLUGIN_BINARY=$(PLUGIN_DIR)/auth.so \ + PROCTOR_NOTIFICATION_PLUGIN_BINARY=$(PLUGIN_DIR)/slack.so \ + $(BIN_DIR)/server s generate: - go get github.com/go-bindata/go-bindata - $(GOPATH)/bin/go-bindata -pkg config -o config/data.go data/config_template.yaml + go get -u github.com/go-bindata/go-bindata/... + $(GOPATH)/bin/go-bindata -pkg config -o internal/app/cli/config/data.go internal/app/cli/config_template.yaml db.setup: db.create db.migrate @@ -54,3 +86,50 @@ db.rollback: server db.teardown: -PGPASSWORD=$(PROCTOR_POSTGRES_PASSWORD) psql -h $(PROCTOR_POSTGRES_HOST) -p $(PROCTOR_POSTGRES_PORT) -c 'drop database $(PROCTOR_POSTGRES_DATABASE);' -U $(PROCTOR_POSTGRES_USER) + redis-cli FLUSHALL + +.PHONY: ftest.package.procs +ftest.package.procs: + PROCTOR_JOBS_PATH=$(FTEST_DIR) \ + ruby ./test/package_procs.rb + +.PHONY: ftest.update.metadata +ftest.update.metadata: + PROCTOR_JOBS_PATH=$(FTEST_DIR) \ + PROCTOR_URI=http://localhost:$(PROCTOR_APP_PORT)/metadata \ + ruby ./test/update_metadata.rb + +.PHONY: ftest.update.secret +ftest.update.secret: + curl -X POST \ + http://localhost:5000/secret \ + -H 'Content-Type: application/json' \ + -d '{"job_name": "say-hello-world","secrets": {"SAMPLE_SECRET_ONE": "Secret One :*","SAMPLE_SECRET_TWO": "Secret Two :V"}}' + +.PHONY: ftest.proctor.list +ftest.proctor.list: + LOCAL_CONFIG_DIR=$(CONFIG_DIR) $(BIN_DIR)/cli list + +.PHONY: ftest.proctor.describe +ftest.proctor.describe: + LOCAL_CONFIG_DIR=$(CONFIG_DIR) $(BIN_DIR)/cli describe say-hello-world + +.PHONY: ftest.proctor.template +ftest.proctor.template: + LOCAL_CONFIG_DIR=$(CONFIG_DIR) $(BIN_DIR)/cli template say-hello-world say-hello-world.yaml + +.PHONY: ftest.proctor.execute +ftest.proctor.execute: + LOCAL_CONFIG_DIR=$(CONFIG_DIR) $(BIN_DIR)/cli execute say-hello-world SAMPLE_ARG_ONE=foo SAMPLE_ARG_TWO=bar + +.PHONY: ftest.proctor.execute-with-yaml +ftest.proctor.execute-with-yaml: + LOCAL_CONFIG_DIR=$(CONFIG_DIR) $(BIN_DIR)/cli execute say-hello-world -f $(FTEST_DIR)/say-hello-world/say_hello_world.yaml + +.PHONY: ftest.proctor.logs +ftest.proctor.logs: + LOCAL_CONFIG_DIR=$(CONFIG_DIR) $(BIN_DIR)/cli logs $(EXECUTION_ID) + +.PHONY: ftest.proctor.status +ftest.proctor.status: + LOCAL_CONFIG_DIR=$(CONFIG_DIR) $(BIN_DIR)/cli status $(EXECUTION_ID) diff --git a/README.md b/README.md index 2b5f067f..7bbcac77 100644 --- a/README.md +++ b/README.md @@ -1,52 +1,136 @@ # Proctor -

+

- Build Status - - + Build Status + +

## Description +Proctor is a set of components that allow user to do automated task with configurable access policy. +Bundle repetitive task as a automation and turn it into `procs` to make it easier for user to do it themself. -Proctor is a developer friendly automation orchestrator. It helps everyone use automation and contribute to it +Before we goes deep down into explanation about proctor, you may want to read about [Proctor Glossary](docs/glossary.md) -### Dev environment setup +## Feature list +Section for full features list of proctor CLI is separated [here](./docs/features.md) + +## Installation +This section provide installation for unix environment. + +General step * Install and setup golang -* Clone the repository -* Run `make build` +* Clone this repository +* Run `make build`. This will generate binary for proctor cli and service + +For proctor service + +* Make sure you have running Redis server +* Make sure you have running Postgres server +* Make sure you have running Kubernetes Cluster, for setting up local cluster, please refer [here](https://kubernetes.io/docs/setup/learning-environment/minikube/) +* Copy `.env.sample` into `.env` file. Please refer [here](#proctor-service-configuration-explanation) for configuration explanation +* Make sure you set correct value in `.env` for Kubernetes, Postgresql, and Redis +* Export value of `.env` by running `source .env` +* Run `make db.setup` to setup local postgresql and migration +* Run `./_output/bin/server s` to start proctor service + +For proctor cli + +* Run `./_output/bin/cli config PROCTOR_HOST=` to point you proctor cli to local proctor service +* Run `./_output/bin/cli` to see complete usage of proctor cli +* Run `make ftest.update.metadata` to generate sample available command +* Test the client with `./_output/bin/cli list` + +## Proctor Components +Here's the overview of proctor components. +![Proctor component](./assets/img/proctor_components.jpg) + +#### Proctor CLI +Proctor cli is a command line interface that used by client to interact with Proctor service. +Proctor cli read user configuration such as Proctor service host, user email, and user token from `~/.proctor/proctor.yaml`. + +#### Proctor Service +Proctor service govern the main process of Proctor such as: + * Create execution context + * Create and read procs metadata + * Create and read procs secret + * Order the execution of procs + * Get execution status and log of running procs + +#### Context Store +Currently Proctor service use postgres to store execution context of procs. + +#### Metadata Store +Metadata store contain all procs metadata, procs that doesn't have metadata on store cannot be executed. + +#### Secret Store +Secret store contain secret value that needed by procs to executed. + +#### Executor +Executor is the one that executing the procs, we use Kubernetes Job as executor. +Proctor service will send the procs name, needed args then executor will pull necessary image to run the procs. +Proctor service will occasionally contact executor to get status of requested procs. + +## Procs Execution Flow +Here's what happen between Proctor components when client want to execute a procs. + 1. Cli send execution request to service. This request consist of procs name, procs args, and user credentials. + 2. Service get metadata and secret for requested procs. + 3. Service create execution context to store data related to procs execution. + 4. Service tell the executor to run the procs image along with user argument and procs secret. + 5. Service watch the process run by executor by getting the log and execution status then write it to execution context. + +## Security flow +Some route is protected by authentication, authorization or both. +Authenticated user means that user should have account related with proctor. +Authorized user means that user should be part of groups that defined on procs meatadata, for example when procs authorized groups is `proctor-user`, and `dev` then user need to be a member of both groups. + +A request need head these headers to pass auth process: +``` +'Access-Token: ' +'Email-Id: ' +``` + +List of routes that require authentication: + - POST /execution + - GET /execution/{contextId}/status + - GET /execution/logs + - GET /metadata + - POST /metadata + - POST /secret + - POST /schedule + - GET /schedule + - GET /schedule/{scheduleID} + - DELETE /schedule/{scheduleID} + +List of routes that require authorization: + - POST /execution + - POST /schedule + +Proctor doesn't come with built in auth implementation, it's using configurable [plugin](#plugin) mechanism. -### proctord +## Plugin -* `proctord` is the heart of the automation orchestrator -* It is a web service that handles management and execution of procs +Proctor service use plugin for: + 1. Authentication + 2. Authorization + 3. Notification -### Dev environment setup +It create limitation that proctor service can only be used on Linux and MacOS (Until the time go plugin support other OS). -* Ensure local postgres server is up and running -* Ensure local redis server is up and running -* Install kubectl -* Configure kubectl to point to desired kubernetes cluster. For setting up kubernetes cluster locally, refer [here](https://kubernetes.io/docs/getting-started-guides/minikube/) -* Run a kubectl proxy server on your local machine -* [Configure proctord](#proctord-configuration) -* Setup & Run database migrations by running this command `make db.setup` from the repo directory -* Start service by `make start-server` -* Run `curl {host-address:port}/ping` for health-check of service +For details about plugin please read [here](./docs/plugin.md) -#### proctord configuration +## Procs Creation -* Copy `.env.sample` into `.env` file -* Please refer meaning of `proctord` configuration [here](#proctord-configuration-explanation) -* Modify configuration for dev setup in `.env` file -* Export environment variables configured in `.env` file by running `source .env` -* `proctor server` gets configuration from environment variables. +You can read [here](./docs/creating_procs.md) to learn more about creating procs. -#### proctord configuration explanation +## Proctor Service Configuration Explanation * `PROCTOR_APP_PORT` is port on which service will run * `PROCTOR_LOG_LEVEL` defines log levels of service. Available options are: `debug`,`info`,`warn`,`error`,`fatal`,`panic` * `PROCTOR_REDIS_ADDRESS` is hostname and port of redis store for jobs configuration and metadata +* `PROCTOR_REDIS_PASSWORD` is password to access redis store for jobs configuration and metadata * `PROCTOR_REDIS_MAX_ACTIVE_CONNECTIONS` defines maximum active connections to redis. Maximum idle connections is half of this config * `PROCTOR_LOGS_STREAM_READ_BUFFER_SIZE` and `PROCTOR_LOGS_STREAM_WRITE_BUFFER_SIZE` is the buffer size for websocket connection while streaming logs * `PROCTOR_KUBE_CONFIG` needs to be set only if service is running outside a kubernetes cluster @@ -55,9 +139,7 @@ Proctor is a developer friendly automation orchestrator. It helps everyone use a * If a job doesn't reach completion, it is terminated after `PROCTOR_KUBE_JOB_ACTIVE_DEADLINE_SECONDS` * `PROCTOR_KUBE_JOB_RETRIES` is the number of retries for a kubernetes job (on failure) * `PROCTOR_DEFAULT_NAMESPACE` is the namespace under which jobs will be run in kubernetes cluster. By default, K8s has namespace "default". If you set another value, please create namespace in K8s before deploying `proctord` -* `PROCTOR_KUBE_CLUSTER_HOST_NAME` is address/ip address to api-server of kube cluster. It is used for fetching logs of a pod using https -* `PROCTOR_KUBE_CA_CERT_ENCODED` is the CA cert file encoded in base64. This is used for establishing authority while talking to kubernetes api-server on a public https call -* `PROCTOR_KUBE_BASIC_AUTH_ENCODED` is the base64 encoded authentication of kubernetes. Enocde `username:password` to base64 and set this config. +* `PROCTOR_KUBE_CONTEXT` is used the name of context you want to use when running out of cluster. * Before streaming logs of jobs, `PROCTOR_KUBE_POD_LIST_WAIT_TIME` is the time to wait until jobs and pods are in active/successful/failed state * `PROCTOR_POSTGRES_USER`, `PROCTOR_POSTGRES_PASSWORD`, `PROCTOR_POSTGRES_HOST` and `PROCTOR_POSTGRES_PORT` is the username and password to the postgres database you wish to connect to * Set `PROCTOR_POSTGRES_DATABASE` to `proctord_development` for development purpose @@ -69,4 +151,7 @@ Proctor is a developer friendly automation orchestrator. It helps everyone use a * `PROCTOR_SCHEDULED_JOBS_FETCH_INTERVAL_IN_MINS` is the interval at which the scheduler fetches updated jobs from database * `PROCTOR_MAIL_USERNAME`, `PROCTOR_MAIL_PASSWORD`, `PROCTOR_MAIL_SERVER_HOST`, `PROCTOR_MAIL_SERVER_PORT` are the creds required to send notification to users on scheduled jobs execution * `PROCTOR_JOB_POD_ANNOTATIONS` is used to set any kubernetes pod specific annotations. -* `PROCTOR_SENTRY_DSN` is used to set sentry DSN. +* `PROCTOR_AUTH_ENABLED` is used to set whether Authentication is enabled or not. +* `PROCTOR_AUTH_PLUGIN_BINARY` binary location of AUTH Plugin +* `PROCTOR_AUTH_PLUGIN_EXPORTED` variable name exported by the Auth Plugin +* `PROCTOR_REQUIRED_ADMIN_GROUP` list group required by user to access admin features for proctor such as post Metadata and Secrets diff --git a/doc/proctor-logo.png b/assets/img/proctor-logo.png similarity index 100% rename from doc/proctor-logo.png rename to assets/img/proctor-logo.png diff --git a/assets/img/proctor_components.jpg b/assets/img/proctor_components.jpg new file mode 100644 index 00000000..ab3f9945 Binary files /dev/null and b/assets/img/proctor_components.jpg differ diff --git a/doc/proctor_describe.gif b/assets/img/proctor_describe.gif similarity index 100% rename from doc/proctor_describe.gif rename to assets/img/proctor_describe.gif diff --git a/doc/proctor_execute.gif b/assets/img/proctor_execute.gif similarity index 100% rename from doc/proctor_execute.gif rename to assets/img/proctor_execute.gif diff --git a/doc/schedule.gif b/assets/img/schedule.gif similarity index 100% rename from doc/schedule.gif rename to assets/img/schedule.gif diff --git a/doc/schedule_describe.gif b/assets/img/schedule_describe.gif similarity index 100% rename from doc/schedule_describe.gif rename to assets/img/schedule_describe.gif diff --git a/doc/schedule_list.gif b/assets/img/schedule_list.gif similarity index 100% rename from doc/schedule_list.gif rename to assets/img/schedule_list.gif diff --git a/doc/schedule_remove.gif b/assets/img/schedule_remove.gif similarity index 100% rename from doc/schedule_remove.gif rename to assets/img/schedule_remove.gif diff --git a/cmd/cli/main.go b/cmd/cli/main.go new file mode 100644 index 00000000..29e4d2a6 --- /dev/null +++ b/cmd/cli/main.go @@ -0,0 +1,18 @@ +package main + +import ( + "proctor/internal/app/cli/command" + "proctor/internal/app/cli/command/version/github" + "proctor/internal/app/cli/config" + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" +) + +func main() { + printer := io.GetPrinter() + proctorConfigLoader := config.NewLoader() + proctorDClient := daemon.NewClient(printer, proctorConfigLoader) + githubClient := github.NewClient() + + command.Execute(printer, proctorDClient, githubClient) +} diff --git a/cmd/execution/executioner.go b/cmd/execution/executioner.go deleted file mode 100644 index aa45b60c..00000000 --- a/cmd/execution/executioner.go +++ /dev/null @@ -1,80 +0,0 @@ -package execution - -import ( - "fmt" - "strings" - - "github.com/fatih/color" - "proctor/daemon" - "proctor/io" - proctord_utility "proctor/proctord/utility" - "github.com/spf13/cobra" -) - -func NewCmd(printer io.Printer, proctorDClient daemon.Client, osExitFunc func(int)) *cobra.Command { - return &cobra.Command{ - Use: "execute", - Short: "Execute a proc with given arguments", - Long: "To execute a proc, this command helps communicate with `proctord` and streams to logs of proc in execution", - Example: "proctor execute proc-one SOME_VAR=foo ANOTHER_VAR=bar\nproctor execute proc-two ANY_VAR=baz", - Args: cobra.MinimumNArgs(1), - - Run: func(cmd *cobra.Command, args []string) { - procName := args[0] - printer.Println(fmt.Sprintf("%-40s %-100s", "Executing Proc", procName), color.Reset) - - procArgs := make(map[string]string) - if len(args) > 1 { - printer.Println("With Variables", color.FgMagenta) - for _, v := range args[1:] { - arg := strings.Split(v, "=") - - if len(arg) < 2 { - printer.Println(fmt.Sprintf("%-40s %-100s", "\nIncorrect variable format\n", v), color.FgRed) - continue - } - - combinedArgValue := strings.Join(arg[1:], "=") - procArgs[arg[0]] = combinedArgValue - - printer.Println(fmt.Sprintf("%-40s %-100s", arg[0], combinedArgValue), color.Reset) - } - } else { - printer.Println("With No Variables", color.FgRed) - } - - executedProcName, err := proctorDClient.ExecuteProc(procName, procArgs) - if err != nil { - printer.Println(err.Error(), color.FgRed) - print() - osExitFunc(1) - return - } - - printer.Println("Proc submitted for execution. \nStreaming logs:", color.FgGreen) - err = proctorDClient.StreamProcLogs(executedProcName) - if err != nil { - printer.Println("Error Streaming Logs", color.FgRed) - osExitFunc(1) - return - } - - printer.Println("Log stream of proc completed.", color.FgGreen) - - procExecutionStatus, err := proctorDClient.GetDefinitiveProcExecutionStatus(executedProcName) - if err != nil { - printer.Println("Error Fetching Proc execution status", color.FgRed) - osExitFunc(1) - return - } - - if procExecutionStatus != proctord_utility.JobSucceeded { - printer.Println("Proc execution failed", color.FgRed) - osExitFunc(1) - return - } - - printer.Println("Proc execution successful", color.FgGreen) - }, - } -} diff --git a/cmd/execution/executioner_test.go b/cmd/execution/executioner_test.go deleted file mode 100644 index 298ae01f..00000000 --- a/cmd/execution/executioner_test.go +++ /dev/null @@ -1,216 +0,0 @@ -package execution - -import ( - "errors" - "fmt" - "github.com/stretchr/testify/mock" - "testing" - - "github.com/fatih/color" - "proctor/daemon" - "proctor/io" - "proctor/proctord/utility" - "github.com/spf13/cobra" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/suite" -) - -type ExecutionCmdTestSuite struct { - suite.Suite - mockPrinter *io.MockPrinter - mockProctorDClient *daemon.MockClient - testExecutionCmd *cobra.Command -} - -func (s *ExecutionCmdTestSuite) SetupTest() { - s.mockPrinter = &io.MockPrinter{} - s.mockProctorDClient = &daemon.MockClient{} - s.testExecutionCmd = NewCmd(s.mockPrinter, s.mockProctorDClient, func(exitCode int) {}) -} - -func (s *ExecutionCmdTestSuite) TestExecutionCmdUsage() { - assert.Equal(s.T(), "execute", s.testExecutionCmd.Use) -} - -func (s *ExecutionCmdTestSuite) TestExecutionCmdHelp() { - assert.Equal(s.T(), "Execute a proc with given arguments", s.testExecutionCmd.Short) - assert.Equal(s.T(), "To execute a proc, this command helps communicate with `proctord` and streams to logs of proc in execution", s.testExecutionCmd.Long) - assert.Equal(s.T(), "proctor execute proc-one SOME_VAR=foo ANOTHER_VAR=bar\nproctor execute proc-two ANY_VAR=baz", s.testExecutionCmd.Example) -} - -func (s *ExecutionCmdTestSuite) TestExecutionCmd() { - args := []string{"say-hello-world", "SAMPLE_ARG_ONE=any", "SAMPLE_ARG_TWO=variable"} - procArgs := make(map[string]string) - procArgs["SAMPLE_ARG_ONE"] = "any" - procArgs["SAMPLE_ARG_TWO"] = "variable" - - s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "Executing Proc", "say-hello-world"), color.Reset).Once() - s.mockPrinter.On("Println", "With Variables", color.FgMagenta).Once() - s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "SAMPLE_ARG_ONE", "any"), color.Reset).Once() - s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "SAMPLE_ARG_TWO", "variable"), color.Reset).Once() - - s.mockProctorDClient.On("ExecuteProc", "say-hello-world", procArgs).Return("executed-proc-name", nil).Once() - - s.mockPrinter.On("Println", "Proc submitted for execution. \nStreaming logs:", color.FgGreen).Once() - - s.mockProctorDClient.On("StreamProcLogs", "executed-proc-name").Return(nil).Once() - s.mockPrinter.On("Println", "Log stream of proc completed.", color.FgGreen).Once() - - s.mockProctorDClient.On("GetDefinitiveProcExecutionStatus", "executed-proc-name").Return(utility.JobSucceeded, nil).Once() - s.mockPrinter.On("Println", "Proc execution successful", color.FgGreen).Once() - - s.testExecutionCmd.Run(&cobra.Command{}, args) - - s.mockProctorDClient.AssertExpectations(s.T()) - s.mockPrinter.AssertExpectations(s.T()) -} - -func (s *ExecutionCmdTestSuite) TestExecutionCmdForNoProcVariables() { - args := []string{"say-hello-world"} - - s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "Executing Proc", "say-hello-world"), color.Reset).Once() - s.mockPrinter.On("Println", "With No Variables", color.FgRed).Once() - - procArgs := make(map[string]string) - s.mockProctorDClient.On("ExecuteProc", "say-hello-world", procArgs).Return("executed-proc-name", nil).Once() - - s.mockPrinter.On("Println", "Proc submitted for execution. \nStreaming logs:", color.FgGreen).Once() - - s.mockProctorDClient.On("StreamProcLogs", "executed-proc-name").Return(nil).Once() - s.mockPrinter.On("Println", "Log stream of proc completed.", color.FgGreen).Once() - - s.mockProctorDClient.On("GetDefinitiveProcExecutionStatus", "executed-proc-name").Return(utility.JobSucceeded, nil).Once() - s.mockPrinter.On("Println", "Proc execution successful", color.FgGreen).Once() - - s.testExecutionCmd.Run(&cobra.Command{}, args) - - s.mockProctorDClient.AssertExpectations(s.T()) - s.mockPrinter.AssertExpectations(s.T()) -} - -func (s *ExecutionCmdTestSuite) TestExecutionCmdForIncorrectVariableFormat() { - args := []string{"say-hello-world", "incorrect-format"} - - s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "Executing Proc", "say-hello-world"), color.Reset).Once() - s.mockPrinter.On("Println", "With Variables", color.FgMagenta).Once() - s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "\nIncorrect variable format\n", "incorrect-format"), color.FgRed).Once() - - procArgs := make(map[string]string) - s.mockProctorDClient.On("ExecuteProc", "say-hello-world", procArgs).Return("executed-proc-name", nil).Once() - - s.mockPrinter.On("Println", "Proc submitted for execution. \nStreaming logs:", color.FgGreen).Once() - - s.mockProctorDClient.On("StreamProcLogs", "executed-proc-name").Return(nil).Once() - s.mockPrinter.On("Println", "Log stream of proc completed.", color.FgGreen).Once() - - s.mockProctorDClient.On("GetDefinitiveProcExecutionStatus", "executed-proc-name").Return(utility.JobSucceeded, nil).Once() - s.mockPrinter.On("Println", "Proc execution successful", color.FgGreen).Once() - - s.testExecutionCmd.Run(&cobra.Command{}, args) - - s.mockProctorDClient.AssertExpectations(s.T()) - s.mockPrinter.AssertExpectations(s.T()) -} - -func (s *ExecutionCmdTestSuite) TestExecutionCmdForProctorDExecutionFailure() { - args := []string{"say-hello-world"} - - s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "Executing Proc", "say-hello-world"), color.Reset).Once() - s.mockPrinter.On("Println", "With No Variables", color.FgRed).Once() - - procArgs := make(map[string]string) - s.mockProctorDClient.On("ExecuteProc", "say-hello-world", procArgs).Return("", errors.New("test error")).Once() - - s.mockPrinter.On("Println", mock.Anything, color.FgRed).Once() - - osExitFunc := func(exitCode int) { - assert.Equal(s.T(), 1, exitCode) - } - testExecutionCmdOSExit := NewCmd(s.mockPrinter, s.mockProctorDClient, osExitFunc) - testExecutionCmdOSExit.Run(&cobra.Command{}, args) - - s.mockProctorDClient.AssertExpectations(s.T()) - s.mockPrinter.AssertExpectations(s.T()) -} - -func (s *ExecutionCmdTestSuite) TestExecutionCmdForProctorDLogStreamingFailure() { - args := []string{"say-hello-world"} - - s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "Executing Proc", "say-hello-world"), color.Reset).Once() - s.mockPrinter.On("Println", "With No Variables", color.FgRed).Once() - - procArgs := make(map[string]string) - s.mockProctorDClient.On("ExecuteProc", "say-hello-world", procArgs).Return("executed-proc-name", nil).Once() - - s.mockPrinter.On("Println", "Proc submitted for execution. \nStreaming logs:", color.FgGreen).Once() - - s.mockProctorDClient.On("StreamProcLogs", "executed-proc-name").Return(errors.New("error")).Once() - s.mockPrinter.On("Println", "Error Streaming Logs", color.FgRed).Once() - - osExitFunc := func(exitCode int) { - assert.Equal(s.T(), 1, exitCode) - } - testExecutionCmdOSExit := NewCmd(s.mockPrinter, s.mockProctorDClient, osExitFunc) - testExecutionCmdOSExit.Run(&cobra.Command{}, args) - - s.mockProctorDClient.AssertExpectations(s.T()) - s.mockPrinter.AssertExpectations(s.T()) -} - -func (s *ExecutionCmdTestSuite) TestExecutionCmdForProctorDGetDefinitiveProcExecutionStatusError() { - args := []string{"say-hello-world"} - - s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "Executing Proc", "say-hello-world"), color.Reset).Once() - s.mockPrinter.On("Println", "With No Variables", color.FgRed).Once() - - procArgs := make(map[string]string) - s.mockProctorDClient.On("ExecuteProc", "say-hello-world", procArgs).Return("executed-proc-name", nil).Once() - - s.mockPrinter.On("Println", "Proc submitted for execution. \nStreaming logs:", color.FgGreen).Once() - - s.mockProctorDClient.On("StreamProcLogs", "executed-proc-name").Return(nil).Once() - s.mockPrinter.On("Println", "Log stream of proc completed.", color.FgGreen).Once() - - s.mockProctorDClient.On("GetDefinitiveProcExecutionStatus", "executed-proc-name").Return("", errors.New("some error")).Once() - s.mockPrinter.On("Println", "Error Fetching Proc execution status", color.FgRed).Once() - - osExitFunc := func(exitCode int) { - assert.Equal(s.T(), 1, exitCode) - } - testExecutionCmdOSExit := NewCmd(s.mockPrinter, s.mockProctorDClient, osExitFunc) - testExecutionCmdOSExit.Run(&cobra.Command{}, args) - - s.mockProctorDClient.AssertExpectations(s.T()) - s.mockPrinter.AssertExpectations(s.T()) -} - -func (s *ExecutionCmdTestSuite) TestExecutionCmdForProctorDGetDefinitiveProcExecutionStatusFailure() { - args := []string{"say-hello-world"} - - s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "Executing Proc", "say-hello-world"), color.Reset).Once() - s.mockPrinter.On("Println", "With No Variables", color.FgRed).Once() - - procArgs := make(map[string]string) - s.mockProctorDClient.On("ExecuteProc", "say-hello-world", procArgs).Return("executed-proc-name", nil).Once() - - s.mockPrinter.On("Println", "Proc submitted for execution. \nStreaming logs:", color.FgGreen).Once() - - s.mockProctorDClient.On("StreamProcLogs", "executed-proc-name").Return(nil).Once() - s.mockPrinter.On("Println", "Log stream of proc completed.", color.FgGreen).Once() - - s.mockProctorDClient.On("GetDefinitiveProcExecutionStatus", "executed-proc-name").Return(utility.JobFailed, nil).Once() - s.mockPrinter.On("Println", "Proc execution failed", color.FgRed).Once() - - osExitFunc := func(exitCode int) { - assert.Equal(s.T(), 1, exitCode) - } - testExecutionCmdOSExit := NewCmd(s.mockPrinter, s.mockProctorDClient, osExitFunc) - testExecutionCmdOSExit.Run(&cobra.Command{}, args) - - s.mockProctorDClient.AssertExpectations(s.T()) - s.mockPrinter.AssertExpectations(s.T()) -} - -func TestExecutionCmdTestSuite(t *testing.T) { - suite.Run(t, new(ExecutionCmdTestSuite)) -} diff --git a/exec/server.go b/cmd/server/main.go similarity index 75% rename from exec/server.go rename to cmd/server/main.go index 35203bd8..6d3f608e 100644 --- a/exec/server.go +++ b/cmd/server/main.go @@ -2,20 +2,16 @@ package main import ( "os" + "proctor/internal/app/service/infra/db/migration" + "proctor/internal/app/service/infra/logger" + "proctor/internal/app/service/schedule/worker" + "proctor/internal/app/service/server" - "github.com/getsentry/raven-go" "github.com/urfave/cli" - - "proctor/proctord/config" - "proctor/proctord/logger" - "proctor/proctord/scheduler" - "proctor/proctord/server" - "proctor/proctord/storage/postgres" ) func main() { logger.Setup() - raven.SetDSN(config.SentryDSN()) proctord := cli.NewApp() proctord.Name = "proctord" @@ -26,7 +22,7 @@ func main() { Name: "migrate", Description: "Run database migrations for proctord", Action: func(c *cli.Context) { - err := postgres.Up() + err := migration.Up() if err != nil { panic(err.Error()) } @@ -37,7 +33,7 @@ func main() { Name: "rollback", Description: "Rollback database migrations by one step for proctord", Action: func(c *cli.Context) { - err := postgres.DownOneStep() + err := migration.DownOneStep() if err != nil { panic(err.Error()) } @@ -56,10 +52,10 @@ func main() { Name: "start-scheduler", Usage: "starts scheduler", Action: func(c *cli.Context) error { - return scheduler.Start() + return worker.Start() }, }, } - proctord.Run(os.Args) + _ = proctord.Run(os.Args) } diff --git a/config.yaml b/config.yaml new file mode 100644 index 00000000..2c8408bd --- /dev/null +++ b/config.yaml @@ -0,0 +1,51 @@ +kube: + config: out-of-cluster + context: minikube + job: + active.deadline.seconds: 60 + retries: 0 + service.account.name: default + wait.for.resource.poll.count: 5 + log.process.wait.time: 60 +log.level: debug +app.port: 5000 +default.namespace: default +redis: + address: localhost:6379 + max.active.connections: 10 +logs.stream: + read.buffer.size: 140 + write.buffer.size: 4096 +postgres: + user: postgres + password: + host: localhost + port: 5432 + database: proctord_test + max.connections: 50 + connections.max.lifetime: 30 +new.relic: + app.name: proctor-service + licence.key: +min.client.version: v2.0.0 +scheduled.jobs.fetch.interval.in.mins: 5 +mail: + username: user@mail.com + password: password + server: + host: smtp.mail.com + port: 123 +job.pod.annotations: "{\"key.one\":\"true\"}" +docs.path: /path/to/docs/dir +auth: + enabled: false + plugin: + binary: + exported: GateAuth + required.admin.group: proctor_admin +notification.plugin: + binary: + exported: SlackNotification + + + diff --git a/daemon/client.go b/daemon/client.go deleted file mode 100644 index 9805fc1e..00000000 --- a/daemon/client.go +++ /dev/null @@ -1,429 +0,0 @@ -package daemon - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - io_reader "io" - "io/ioutil" - "net" - "net/http" - "net/url" - "os" - "os/signal" - "time" - - "proctor/cmd/version" - - "github.com/briandowns/spinner" - "github.com/fatih/color" - "proctor/config" - "proctor/io" - proc_metadata "proctor/proctord/jobs/metadata" - "proctor/proctord/jobs/schedule" - "proctor/proctord/utility" - "github.com/gorilla/websocket" -) - -type Client interface { - ListProcs() ([]proc_metadata.Metadata, error) - ExecuteProc(string, map[string]string) (string, error) - StreamProcLogs(string) error - GetDefinitiveProcExecutionStatus(string) (string, error) - ScheduleJob(string, string, string, string,string, map[string]string) (string, error) - ListScheduledProcs() ([]schedule.ScheduledJob, error) - DescribeScheduledProc(string) (schedule.ScheduledJob, error) - RemoveScheduledProc(string) error -} - -type client struct { - printer io.Printer - proctorConfigLoader config.Loader - proctordHost string - emailId string - accessToken string - clientVersion string - connectionTimeoutSecs time.Duration - procExecutionStatusPollCount int -} - -type ProcToExecute struct { - Name string `json:"name"` - Args map[string]string `json:"args"` -} - -type ScheduleJobPayload struct { - ID string `json:"id"` - Name string `json:"name"` - Tags string `json:"tags"` - Time string `json:"time"` - NotificationEmails string `json:"notification_emails"` - Group string `json:"group_name"` - Args map[string]string `json:"args"` -} - -func NewClient(printer io.Printer, proctorConfigLoader config.Loader) Client { - return &client{ - clientVersion: version.ClientVersion, - printer: printer, - proctorConfigLoader: proctorConfigLoader, - } -} - -func (c *client) ScheduleJob(name, tags, time, notificationEmails, group string, jobArgs map[string]string) (string, error) { - err := c.loadProctorConfig() - if err != nil { - return "", err - } - jobPayload := ScheduleJobPayload{ - Name: name, - Tags: tags, - Time: time, - NotificationEmails: notificationEmails, - Args: jobArgs, - Group: group, - } - - requestBody, err := json.Marshal(jobPayload) - if err != nil { - return "", err - } - - client := &http.Client{} - req, err := http.NewRequest("POST", "http://"+c.proctordHost+"/jobs/schedule", bytes.NewReader(requestBody)) - req.Header.Add("Content-Type", "application/json") - req.Header.Add(utility.UserEmailHeaderKey, c.emailId) - req.Header.Add(utility.AccessTokenHeaderKey, c.accessToken) - req.Header.Add(utility.ClientVersionHeaderKey, c.clientVersion) - resp, err := client.Do(req) - - if err != nil { - return "", buildNetworkError(err) - } - - defer resp.Body.Close() - if resp.StatusCode != http.StatusCreated { - return "", buildHTTPError(c, resp) - } - - var scheduledJob ScheduleJobPayload - err = json.NewDecoder(resp.Body).Decode(&scheduledJob) - - return scheduledJob.ID, err -} - -func (c *client) loadProctorConfig() error { - proctorConfig, err := c.proctorConfigLoader.Load() - if err != (config.ConfigError{}) { - c.printer.Println(err.RootError().Error(), color.FgRed) - c.printer.Println(err.Message, color.FgGreen) - return errors.New("Encountered error while loading config, exiting.") - } - - c.proctordHost = proctorConfig.Host - c.emailId = proctorConfig.Email - c.accessToken = proctorConfig.AccessToken - c.connectionTimeoutSecs = proctorConfig.ConnectionTimeoutSecs - c.procExecutionStatusPollCount = proctorConfig.ProcExecutionStatusPollCount - - return nil -} - -func (c *client) ListProcs() ([]proc_metadata.Metadata, error) { - err := c.loadProctorConfig() - if err != nil { - return []proc_metadata.Metadata{}, err - } - - client := &http.Client{ - Timeout: c.connectionTimeoutSecs, - } - req, err := http.NewRequest("GET", "http://"+c.proctordHost+"/jobs/metadata", nil) - req.Header.Add(utility.UserEmailHeaderKey, c.emailId) - req.Header.Add(utility.AccessTokenHeaderKey, c.accessToken) - req.Header.Add(utility.ClientVersionHeaderKey, c.clientVersion) - - resp, err := client.Do(req) - if err != nil { - return []proc_metadata.Metadata{}, buildNetworkError(err) - } - - defer resp.Body.Close() - if resp.StatusCode != http.StatusOK { - return []proc_metadata.Metadata{}, buildHTTPError(c, resp) - } - - var procList []proc_metadata.Metadata - err = json.NewDecoder(resp.Body).Decode(&procList) - return procList, err -} - -func (c *client) ListScheduledProcs() ([]schedule.ScheduledJob, error) { - err := c.loadProctorConfig() - if err != nil { - return []schedule.ScheduledJob{}, err - } - - client := &http.Client{ - Timeout: c.connectionTimeoutSecs, - } - req, err := http.NewRequest("GET", "http://"+c.proctordHost+"/jobs/schedule", nil) - req.Header.Add(utility.UserEmailHeaderKey, c.emailId) - req.Header.Add(utility.AccessTokenHeaderKey, c.accessToken) - req.Header.Add(utility.ClientVersionHeaderKey, c.clientVersion) - - resp, err := client.Do(req) - if err != nil { - return []schedule.ScheduledJob{}, buildNetworkError(err) - } - - defer resp.Body.Close() - if resp.StatusCode != http.StatusOK { - return []schedule.ScheduledJob{}, buildHTTPError(c, resp) - } - - var scheduledProcsList []schedule.ScheduledJob - err = json.NewDecoder(resp.Body).Decode(&scheduledProcsList) - return scheduledProcsList, err -} - -func (c *client) DescribeScheduledProc(jobID string) (schedule.ScheduledJob, error) { - err := c.loadProctorConfig() - if err != nil { - return schedule.ScheduledJob{}, err - } - - client := &http.Client{ - Timeout: c.connectionTimeoutSecs, - } - url := fmt.Sprintf("http://"+c.proctordHost+"/jobs/schedule/%s", jobID) - req, err := http.NewRequest("GET", url, nil) - req.Header.Add(utility.UserEmailHeaderKey, c.emailId) - req.Header.Add(utility.AccessTokenHeaderKey, c.accessToken) - req.Header.Add(utility.ClientVersionHeaderKey, c.clientVersion) - - resp, err := client.Do(req) - if err != nil { - return schedule.ScheduledJob{}, buildNetworkError(err) - } - - defer resp.Body.Close() - if resp.StatusCode != http.StatusOK { - return schedule.ScheduledJob{}, buildHTTPError(c, resp) - } - - var scheduledProc schedule.ScheduledJob - err = json.NewDecoder(resp.Body).Decode(&scheduledProc) - return scheduledProc, err -} - -func (c *client) RemoveScheduledProc(jobID string) error { - err := c.loadProctorConfig() - if err != nil { - return err - } - - client := &http.Client{ - Timeout: c.connectionTimeoutSecs, - } - url := fmt.Sprintf("http://"+c.proctordHost+"/jobs/schedule/%s", jobID) - req, err := http.NewRequest("DELETE", url, nil) - req.Header.Add(utility.UserEmailHeaderKey, c.emailId) - req.Header.Add(utility.AccessTokenHeaderKey, c.accessToken) - req.Header.Add(utility.ClientVersionHeaderKey, c.clientVersion) - - resp, err := client.Do(req) - if err != nil { - return buildNetworkError(err) - } - - defer resp.Body.Close() - if resp.StatusCode != http.StatusOK { - return buildHTTPError(c, resp) - } - - return nil -} - -func (c *client) ExecuteProc(name string, args map[string]string) (string, error) { - err := c.loadProctorConfig() - if err != nil { - return "", err - } - - procToExecute := ProcToExecute{ - Name: name, - Args: args, - } - - requestBody, err := json.Marshal(procToExecute) - if err != nil { - return "", err - } - - client := &http.Client{} - req, err := http.NewRequest("POST", "http://"+c.proctordHost+"/jobs/execute", bytes.NewReader(requestBody)) - req.Header.Add("Content-Type", "application/json") - req.Header.Add(utility.UserEmailHeaderKey, c.emailId) - req.Header.Add(utility.AccessTokenHeaderKey, c.accessToken) - req.Header.Add(utility.ClientVersionHeaderKey, c.clientVersion) - resp, err := client.Do(req) - if err != nil { - return "", buildNetworkError(err) - } - - defer resp.Body.Close() - if resp.StatusCode != http.StatusCreated { - return "", buildHTTPError(c, resp) - } - - var executedProc ProcToExecute - err = json.NewDecoder(resp.Body).Decode(&executedProc) - - return executedProc.Name, err -} - -func (c *client) StreamProcLogs(name string) error { - err := c.loadProctorConfig() - if err != nil { - return err - } - - animation := spinner.New(spinner.CharSets[9], 100*time.Millisecond) - animation.Color("green") - animation.Start() - - interrupt := make(chan os.Signal, 1) - signal.Notify(interrupt, os.Interrupt) - - proctodWebsocketURL := url.URL{Scheme: "ws", Host: c.proctordHost, Path: "/jobs/logs"} - proctodWebsocketURLWithProcName := proctodWebsocketURL.String() + "?" + "job_name=" + name - - headers := make(map[string][]string) - token := []string{c.accessToken} - emailId := []string{c.emailId} - clientVersion := []string{c.clientVersion} - headers[utility.AccessTokenHeaderKey] = token - headers[utility.UserEmailHeaderKey] = emailId - headers[utility.ClientVersionHeaderKey] = clientVersion - - wsConn, response, err := websocket.DefaultDialer.Dial(proctodWebsocketURLWithProcName, headers) - if err != nil { - animation.Stop() - if response.StatusCode == http.StatusUnauthorized { - if c.emailId == "" || c.accessToken == "" { - return fmt.Errorf("%s\n%s", utility.UnauthorizedErrorHeader, utility.UnauthorizedErrorMissingConfig) - } - return fmt.Errorf("%s\n%s", utility.UnauthorizedErrorHeader, utility.UnauthorizedErrorInvalidConfig) - } - return err - } - defer wsConn.Close() - - logStreaming := make(chan int) - go func() { - for { - _, message, err := wsConn.ReadMessage() - animation.Stop() - if err != nil { - fmt.Println() - logStreaming <- 0 - return - } - fmt.Println(string(message)) - } - }() - - for { - select { - case <-interrupt: - color.New(color.FgRed).Println("User interrupt while streaming proc logs") - err := wsConn.WriteMessage(websocket.CloseMessage, websocket.FormatCloseMessage(websocket.CloseNormalClosure, "")) - return err - case <-logStreaming: - return nil - } - } -} - -func (c *client) GetDefinitiveProcExecutionStatus(procName string) (string, error) { - err := c.loadProctorConfig() - if err != nil { - return "", err - } - - for count := 0; count < c.procExecutionStatusPollCount; count += 1 { - httpClient := &http.Client{ - Timeout: c.connectionTimeoutSecs, - } - - req, err := http.NewRequest("GET", "http://"+c.proctordHost+"/jobs/execute/"+procName+"/status", nil) - req.Header.Add(utility.UserEmailHeaderKey, c.emailId) - req.Header.Add(utility.AccessTokenHeaderKey, c.accessToken) - req.Header.Add(utility.ClientVersionHeaderKey, c.clientVersion) - - resp, err := httpClient.Do(req) - if err != nil { - return "", buildNetworkError(err) - } - - if resp.StatusCode != http.StatusOK { - return "", buildHTTPError(c, resp) - } - - body, err := ioutil.ReadAll(resp.Body) - defer resp.Body.Close() - if err != nil { - return "", err - } - - procExecutionStatus := string(body) - if procExecutionStatus == utility.JobSucceeded || procExecutionStatus == utility.JobFailed { - return procExecutionStatus, nil - } - - time.Sleep(time.Duration(count) * 100 * time.Millisecond) - } - return "", errors.New(fmt.Sprintf("No definitive status received for proc name %s from proctord", procName)) -} - -func buildNetworkError(err error) error { - if netError, ok := err.(net.Error); ok && netError.Timeout() { - return fmt.Errorf("%s\n%s\n%s", utility.GenericTimeoutErrorHeader, netError.Error(), utility.GenericTimeoutErrorBody) - } - return fmt.Errorf("%s\n%s", utility.GenericNetworkErrorHeader, err.Error()) -} - -func buildHTTPError(c *client, resp *http.Response) error { - if resp.StatusCode == http.StatusUnauthorized { - if c.emailId == "" || c.accessToken == "" { - return fmt.Errorf("%s\n%s", utility.UnauthorizedErrorHeader, utility.UnauthorizedErrorMissingConfig) - } - return fmt.Errorf("%s\n%s", utility.UnauthorizedErrorHeader, utility.UnauthorizedErrorInvalidConfig) - } - - if resp.StatusCode == http.StatusBadRequest { - return getHttpResponseError(resp.Body) - } - - if resp.StatusCode == http.StatusNoContent { - return fmt.Errorf(utility.NoScheduledJobsError) - } - - if resp.StatusCode == http.StatusNotFound { - return fmt.Errorf(utility.JobNotFoundError) - } - - if resp.StatusCode == http.StatusForbidden { - return fmt.Errorf(utility.JobForbiddenErrorHeader) - } - - return fmt.Errorf("%s\nStatus Code: %d, %s", utility.GenericResponseErrorHeader, resp.StatusCode, http.StatusText(resp.StatusCode)) -} - -func getHttpResponseError(response io_reader.ReadCloser) error { - body, _ := ioutil.ReadAll(response) - bodyString := string(body) - return fmt.Errorf(bodyString) -} diff --git a/daemon/client_mock.go b/daemon/client_mock.go deleted file mode 100644 index bb6f386d..00000000 --- a/daemon/client_mock.go +++ /dev/null @@ -1,51 +0,0 @@ -package daemon - -import ( - proc_metadata "proctor/proctord/jobs/metadata" - "proctor/proctord/jobs/schedule" - "github.com/stretchr/testify/mock" -) - -type MockClient struct { - mock.Mock -} - -func (m *MockClient) ListProcs() ([]proc_metadata.Metadata, error) { - args := m.Called() - return args.Get(0).([]proc_metadata.Metadata), args.Error(1) -} - -func (m *MockClient) ListScheduledProcs() ([]schedule.ScheduledJob, error) { - args := m.Called() - return args.Get(0).([]schedule.ScheduledJob), args.Error(1) -} - -func (m *MockClient) ExecuteProc(name string, procArgs map[string]string) (string, error) { - args := m.Called(name, procArgs) - return args.Get(0).(string), args.Error(1) -} - -func (m *MockClient) StreamProcLogs(name string) error { - args := m.Called(name) - return args.Error(0) -} - -func (m *MockClient) GetDefinitiveProcExecutionStatus(name string) (string, error) { - args := m.Called(name) - return args.Get(0).(string), args.Error(1) -} - -func (m *MockClient) ScheduleJob(name, tags, time, notificationEmails string,group string, jobArgs map[string]string) (string, error) { - args := m.Called(name, tags, time, notificationEmails, group, jobArgs) - return args.Get(0).(string), args.Error(1) -} - -func (m *MockClient) DescribeScheduledProc(jobID string) (schedule.ScheduledJob, error) { - args := m.Called(jobID) - return args.Get(0).(schedule.ScheduledJob), args.Error(1) -} - -func (m *MockClient) RemoveScheduledProc(jobID string) error { - args := m.Called(jobID) - return args.Error(0) -} diff --git a/docs/creating_procs.md b/docs/creating_procs.md new file mode 100644 index 00000000..064c559b --- /dev/null +++ b/docs/creating_procs.md @@ -0,0 +1,115 @@ +# Creating procs + +Main purpose of proctor is running procs so you need to know how to create procs. +Here are the recommended steps to create procs. + +#### 1. Define the task you want to automate. +Candidate task for a procs usually involving access to restricted resources or having complicated steps. + +#### 2. Define the interface of the task. +A task takes input, performs operation and provide output. + +#### 3. Research how to automate the task +Before start writing the script, please do research on how to do the automation by reading the documentation or stuff. + +#### 4. Write the script +Because procs will be run by executor(which is computer) you need to write down the script. +Write a script such that it's runnable on your local machine. For this step you can hardcode on input, we'll extract it out later. + +#### 5. Test the script +Don't skip this step, please test it on your local. + +#### 6. Package script in a docker image +Proctor leverage container to easily run task with all it's dependency, that's why you need to package your script into a docker image. +Install every dependency on your dockerfile. Provide an `ENTRYPOINT` to run the script by default when the container is spun up from the image. + +#### 7. Extract all hard-coded variables and secrets as configurable ENV vars +The image are meant to be reusable so user can use it according their use case, this is why every variables that define the behaviour of the automation should be extracted as args. + +#### 8. Perform validations on ENV vars +Some ENV vars are mandatory in order to use the automation, adding validation before running script helps failing fast and provide better error messages to user. + +#### 9. Test the image (Pass all variables as env) +Build the image then run it in local docker to make sure your image run as expected. +After you complete this step, your task can be automated using docker on any machine, post these steps you automation will evolve into a `proc` + +#### 10. Create metadata +Create metadata file to describe information about your procs to user. +Metadata look like this; +```json +{ + "name": "echo-worker", + "description": "This procs will echo your name", + "image_name": "walbertusd/echo-worker", + "env_vars": { + "secrets": [ + { + "name": "SECRET_NAME", + "description": "My other secret name" + } + ], + "args": [ + { + "name": "NAME", + "description": "Name to be echoed" + } + ] + }, + "authorized_groups": [ + "my-group" + ], + "author": "Dembo", + "contributors": "Dembo", + "organization": "GoJek" +} +``` + +#### 11. Upload metadata +Send `POST` request to your proctor service on `/metadata`, it receive array of metadata as json so your request body should look like this: +```json +[ + { + "name": "echo-worker", + "description": "This procs will echo your name", + "image_name": "walbertusd/echo-worker", + "env_vars": { + "secrets": [ + { + "name": "SECRET_NAME", + "description": "My other secret name" + } + ], + "args": [ + { + "name": "NAME", + "description": "Name to be echoed" + } + ] + }, + "authorized_groups": [ + "my-group" + ], + "author": "Dembo", + "contributors": "Dembo", + "organization": "GoJek" + } +] +``` + +#### 12. Store secret +User aren't supposed to know the secret value to run your jobs so make sure to keep it secret. +Send `POST` request to your proctor service on `/secret`, your request body will look like this: +```json +{ + "job_name": "echo-worker", + "secrets": { + "SECRET_NAME": "Iron Man" + } +} +``` + +#### 13. Test your proctor using CLI +Execute your procs using CLI, make sure it success and the resulting log is correct. + +#### 14. Complete +Congratulations! You've just automated one repetitive task! diff --git a/docs/features.md b/docs/features.md new file mode 100644 index 00000000..05a23969 --- /dev/null +++ b/docs/features.md @@ -0,0 +1,266 @@ +# Proctor CLI Feature + +- [proctor config \](#proctor-config) +- [proctor config show](#proctor-config-show) +- [proctor describe \](#proctor-describe-proc) +- [proctor execute \ \](#proctor-execute-proc-params) +- [proctor help](#proctor-help) +- [proctor list](#proctor-list) +- [proctor logs \](#proctor-logs-execution-id) +- [proctor schedule \ \](#proctor-schedule-proc-params) +- [proctor schedule describe \](#proctor-schedule-describe-schedule-id) +- [proctor schedule list](#proctor-schedule-list) +- [proctor schedule remove <\schedule-id\>](#proctor-schedule-remove-schedule-id) +- [proctor status \](#proctor-status-execution-id) +- [proctor template \](#proctor-template-proc) + +## proctor config +Set configuration to run proctor. +Proctor client keep the configuration on ~/.proctor/proctor.yaml + +### Params +| Key | Description | Required | +|:------------- |:------------------------------------- |:--------:| +| PROCTOR_HOST | Host address of proctor service | yes | +| EMAIL_ID | Email account for auth process | no | +| ACCESS_TOKEN | Account access token for auth process | no | + +### Example +```shell script +foo@bar:~$ proctor config PROCTOR_HOST=proctor.com EMAIL_ID=mr.proctor@gmail.com ACCESS_TOKEN=MR_PROCTOR +Proctor client configured successfully +``` + +## proctor config show +Use this command to show current configuration + +### Example +```shell script +foo@bar:~$ proctor config show +PROCTOR_HOST: proctor.com +EMAIL_ID: mr.proctor@gmail.com +ACCESS_TOKEN: MR_PROCTOR +``` + +## proctor describe \ +Use this command to learn more about \ + +### Params +| Key | Description | Required | +|:------------- |:------------------------------------- |:--------:| +| \ | Name of proc | yes | + +### Example +```shell script +foo@bar:~$ proctor describe echo-worker +Description I will echo your name +Contributors Mr.Proctor +Organization Proctor +Authorized Groups [] + +Args +name name to echo +``` + +## proctor execute \ \ +Execute proc on server with specified params + +### Params +| Key | Description | Required | +|:------------- |:------------------------------------- |:--------:| +| \ | Name of proc | yes | + +Other params depend on proc requirements + +### Example +```shell script +foo@bar:~$ proctor execute echo-worker name=Mr.Proctor +Executing Proc echo-worker +With Variables +name Mr.Proctor + +Execution Created +ID 1826735143124102 +Name proctor-1289c792631 + +Streaming logs +Mr.Proctor +Mr.Proctor +Mr.Proctor +Mr.Proctor +Mr.Proctor +Mr.Proctor +Mr.Proctor +Mr.Proctor +Mr.Proctor +Mr.Proctor + +Execution completed. +``` + +## proctor help +Show proctor client help text + +### Example +```shell script +foo@bar~:$ proctor help +A command-line interface to run procs + +Usage: + proctor [command] + +Available Commands: + config Configure proctor client + describe Help on executing a proc + execute Execute a proc with given arguments + help Help about any command + list List procs available for execution + logs Get logs of an execution context + schedule Create scheduled procs + status Get status of an execution context + template Get input template of a procs + version Print version of Proctor command-line tool + +Flags: + -h, --help help for proctor + +Use "proctor [command] --help" for more information about a command. +``` + +## proctor list +Show list of available procs + +### Example +```shell script +foo@bar~:$ proctor list +List of Procs: + +echo-worker echo-worker + +For detailed information of any proc, run: +proctor describe +``` + +## proctor logs \ +Stream logs from executed proc, this process will continue until execution complete + +### Params +| Key | Description | Required | +|:---------------- |:----------------------------------------- |:--------:| +| \ | Execution ID generated by execute command | yes | + +Other params depend on proc requirements + +### Example +```shell script +foo@bar:~$ proctor logs 1826735143124102 +Getting logs +ID 1826735143124102 + +Streaming logs +Mr.Proctor +Mr.Proctor +Mr.Proctor +Mr.Proctor +Mr.Proctor +Mr.Proctor +Mr.Proctor +Mr.Proctor +Mr.Proctor +Mr.Proctor + + +Execution completed. +``` + +## proctor schedule \ \ +Run a proc on scheduled basis specified with cron format + +### Params +| Key | Description | Required | +|:---------------- |:----------------------------------------- |:--------:| +| \ | Proc name to scheduled | yes | + +### Flags +| Flag | Description | Required | +|:------------- |:------------------------------------------------------- |:--------:| +| --cron -c | Schedule in cron format ([link](https://crontab.guru/)) | yes | +| --group -g | Group Name | yes | +| --notify -n | Email to notify schedule progress | yes | +| --tags -T | Schedule tags for management purpose | yes | + + +### Example +```shell script +foo@bar:~$ proctor schedule echo-worker name=Mr.Proctor -g proctor -c '0 2 * * *' -T 'proctor' -n mr.proctor@gmail.com +Creating Scheduled Job echo-worker +With Variables +name Mr.Proctor +Scheduled Job UUID : 269349349056612582 +``` + +## proctor schedule describe \ +Get more information about scheduled proc + +### Params +| Key | Description | Required | +|:---------------- |:----------------------------------------- |:--------:| +| \ | Schedule id to learn | yes | + +### Example +```shell script +foo@bar:~$ proctor schedule describe 269349349056612582 +ID 269349349056612582 +PROC NAME echo-worker +GROUP NAME proctor +TAGS proctor +Cron 0 0 2 * * * +Notifier mr.proctor@gmail.com + +Args +name Mr.Proctor +``` + +## proctor schedule list +Get list of scheduled procs + +### Example +```shell script +foo@bar:~$ proctor schedule list +ID PROC NAME GROUP NAME TAGS +269349349056612582 diagnose-vm system-test test,proctor +``` + +## proctor schedule remove <\schedule-id\> +Remove scheduled procs + +### Params +| Key | Description | Required | +|:---------------- |:----------------------------------------- |:--------:| +| \ | Schedule id to remove | yes | + +### Example +```shell script +foo@bar:~$ proctor schedule remove 269349349056612582 +Sucessfully removed the scheduled job ID: 269349349056612582 +``` + +## proctor status \ +Get status of executed proc + +### Params +| Key | Description | Required | +|:---------------- |:----------------------------------------- |:--------:| +| \ | Execution id to get | yes | + +### Example +```shell script +foo@bar:~$ proctor status 1826735143124102 +Getting status +ID 1826735143124102 +Job Name echo-worker +Status FINISHED +Updated At 2019-10-03 04:00:58.083798 +0000 +0000 +Execution completed. +``` + diff --git a/docs/glossary.md b/docs/glossary.md new file mode 100644 index 00000000..342c6f2d --- /dev/null +++ b/docs/glossary.md @@ -0,0 +1,30 @@ +# Proctor Glossary + +### Execution context +Execution context (or simply context) is a record of an execution that happens on proctor, both from user execution request or invocation from the scheduler. +Context contain data such as: + * procs name + * context name + * user email + * procs tag + * args given + * procs output + * procs status + +### Procs +Procs is a job to execute using Proctor, its bundled as Docker image and have a set of metadata and secrets. Both metadata and secret is a key-value pair + +### Metadata +Procs metadata or mostly addressed as metadata contain metadata of procs such as: + * name: Name of the procs + * description: Description of the procs + * author: Who create this procs + * contributors: People that contribute to the procs + * organization: Which org own this procs + * env_vars: + * secrets: Secret value that required by procs to run + * args: Arguments that can be passed to procs + +### Secret +Secret variable required to run procs such as credentials for cloud platform. +Proctor user shouldn't know the value of secret. diff --git a/docs/plugin.md b/docs/plugin.md new file mode 100644 index 00000000..845e7557 --- /dev/null +++ b/docs/plugin.md @@ -0,0 +1,52 @@ +# Plugins + +Proctor using GO plugin, for official documentation about it please read [here](https://golang.org/pkg/plugin/). + +Proctor decide to use plugin for some feature to make it easily integrate into different component. + +### Notification Plugin + +Plugin can be made for publishing notification to external channel such as slack. +In order to use notification plugin you should compile the plugin and fill these environment variables: + 1. PROCTOR_NOTIFICATION_PLUGIN_BINARY + + Fill this variable with path to compiled plugin separated by comma since it's possible to use multiple notification channel. + + 2. PROCTOR_NOTIFICATION_PLUGIN_EXPORTED + + Fill this variable with name of variable exported from respective plugin binary separated by comma + +### Authentication and Authorization Plugin + +Authentication and authorization process is delegated to plugin so you can use any existing user management system with proctor. +In order to use auth plugin you should compile the plugin and fill these environment variables: + 1. PROCTOR_AUTH_PLUGIN_BINARY + + Fill this variable with path to compiled plugin, only single plugin is allowed + + 2. PROCTOR_AUTH_PLUGIN_EXPORTED + + Fill this variable with name of variable exported from plugin binary + + 3. PROCTOR_AUTH_ENABLED + + Fill this with `true` to activate auth using plugin + +### Provided plugin + +#### Gate Auth Plugin + +In order to use gate auth plugin, you need a running [Gate](https://github.com/gate-sso/gate) server. +Authenticated user mean an user that registered to Gate server. +Authorized user mean an user need to be member of at least one group from groups list specified on authorized_groups metadata for procs. + +Compile gate auth plugin by running `make plugin.auth` and fill `PROCTOR_AUTH_PLUGIN_BINARY` with generated `auth.so` in `./_output/bin/plugin/auth.so`. + + + +#### Slack Notification Plugin + +Proctor will send notification to slack when some event happen, see below for a list of events and it's content. +Create a [slack app](https://api.slack.com/incoming-webhooks) then fill `SLACK_PLUGIN_URL` environment variable with incoming webhook url, it should look like `https://hooks.slack.com/services/T00000000/B00000000/XXXXXXXXXXXXXXXXXXXXXXXX`. + +Compile slack notification plugin by running `make plugin.slack` and fill `PROCTOR_NOTIFICATION_PLUGIN_BINARY` with generated `slack.so` in `./_output/bin/plugin/slack.so`. diff --git a/exec/cli/cli.go b/exec/cli/cli.go deleted file mode 100644 index a8db7cd5..00000000 --- a/exec/cli/cli.go +++ /dev/null @@ -1,18 +0,0 @@ -package main - -import ( - "proctor/cmd" - "proctor/config" - "proctor/daemon" - "proctor/io" - "proctor/cmd/version/github" -) - -func main() { - printer := io.GetPrinter() - proctorConfigLoader := config.NewLoader() - proctorDClient := daemon.NewClient(printer, proctorConfigLoader) - githubClient := github.NewClient() - - cmd.Execute(printer, proctorDClient, githubClient) -} diff --git a/exec/server/server.go b/exec/server/server.go deleted file mode 100644 index 35203bd8..00000000 --- a/exec/server/server.go +++ /dev/null @@ -1,65 +0,0 @@ -package main - -import ( - "os" - - "github.com/getsentry/raven-go" - "github.com/urfave/cli" - - "proctor/proctord/config" - "proctor/proctord/logger" - "proctor/proctord/scheduler" - "proctor/proctord/server" - "proctor/proctord/storage/postgres" -) - -func main() { - logger.Setup() - raven.SetDSN(config.SentryDSN()) - - proctord := cli.NewApp() - proctord.Name = "proctord" - proctord.Usage = "Handle executing jobs and maintaining their configuration" - proctord.Version = "0.2.0" - proctord.Commands = []cli.Command{ - { - Name: "migrate", - Description: "Run database migrations for proctord", - Action: func(c *cli.Context) { - err := postgres.Up() - if err != nil { - panic(err.Error()) - } - logger.Info("Migration successful") - }, - }, - { - Name: "rollback", - Description: "Rollback database migrations by one step for proctord", - Action: func(c *cli.Context) { - err := postgres.DownOneStep() - if err != nil { - panic(err.Error()) - } - logger.Info("Rollback successful") - }, - }, - { - Name: "start", - Aliases: []string{"s"}, - Usage: "starts server", - Action: func(c *cli.Context) error { - return server.Start() - }, - }, - { - Name: "start-scheduler", - Usage: "starts scheduler", - Action: func(c *cli.Context) error { - return scheduler.Start() - }, - }, - } - - proctord.Run(os.Args) -} diff --git a/go.mod b/go.mod index b566eb87..0a434dc8 100644 --- a/go.mod +++ b/go.mod @@ -6,15 +6,15 @@ require ( github.com/Microsoft/go-winio v0.4.12 // indirect github.com/badoux/checkmail v0.0.0-20181210160741-9661bd69e9ad github.com/briandowns/spinner v0.0.0-20190319032542-ac46072a5a91 - github.com/certifi/gocertifi v0.0.0-20190506164543-d2eda7129713 // indirect + github.com/brianvoe/gofakeit v3.18.0+incompatible github.com/docker/distribution v2.7.1+incompatible // indirect - github.com/docker/docker v1.13.1 // indirect + github.com/docker/docker v1.13.1 github.com/docker/go-connections v0.4.0 // indirect github.com/docker/go-units v0.4.0 // indirect github.com/evanphx/json-patch v4.4.0+incompatible // indirect github.com/fatih/color v1.7.0 github.com/garyburd/redigo v1.6.0 - github.com/getsentry/raven-go v0.2.0 + github.com/go-resty/resty/v2 v2.0.0 github.com/google/go-github v17.0.0+incompatible github.com/google/go-querystring v1.0.0 // indirect github.com/google/gofuzz v1.0.0 // indirect @@ -31,20 +31,22 @@ require ( github.com/mattes/migrate v3.0.1+incompatible github.com/mattn/go-colorable v0.1.2 // indirect github.com/modern-go/reflect2 v1.0.1 // indirect - github.com/newrelic/go-agent v1.4.0 + github.com/newrelic/go-agent v2.9.0+incompatible github.com/opencontainers/go-digest v1.0.0-rc1 // indirect github.com/pkg/errors v0.8.1 - github.com/robfig/cron v1.1.0 + github.com/prometheus/client_golang v0.9.3 + github.com/robfig/cron v1.2.0 github.com/satori/go.uuid v1.2.0 github.com/sirupsen/logrus v1.4.2 + github.com/sony/sonyflake v1.0.0 github.com/spf13/cobra v0.0.4 github.com/spf13/viper v1.4.0 github.com/stretchr/testify v1.3.0 github.com/thingful/httpmock v0.0.2 - github.com/tylerb/graceful v1.2.15 github.com/urfave/cli v1.20.0 github.com/urfave/negroni v1.0.0 gopkg.in/inf.v0 v0.9.1 // indirect + gopkg.in/yaml.v2 v2.2.2 k8s.io/api v0.0.0-20190409021203-6e4e0e4f393b k8s.io/apimachinery v0.0.0-20190404173353-6a84e37a896d k8s.io/client-go v11.0.1-0.20190409021438-1a26190bd76a+incompatible diff --git a/go.sum b/go.sum index b0fb1e56..906326f9 100644 --- a/go.sum +++ b/go.sum @@ -18,11 +18,12 @@ github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5 github.com/badoux/checkmail v0.0.0-20181210160741-9661bd69e9ad h1:kXfVkP8xPSJXzicomzjECcw6tv1Wl9h1lNenWBfNKdg= github.com/badoux/checkmail v0.0.0-20181210160741-9661bd69e9ad/go.mod h1:r5ZalvRl3tXevRNJkwIB6DC4DD3DMjIlY9NEU1XGoaQ= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0 h1:HWo1m869IqiPhD389kmkxeTalrjNbbJTC8LXupb+sl0= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/briandowns/spinner v0.0.0-20190319032542-ac46072a5a91 h1:GMmnK0dvr0Sf0gx3DvTbln0c8DE07B7sPVD9dgHOqo4= github.com/briandowns/spinner v0.0.0-20190319032542-ac46072a5a91/go.mod h1:hw/JEQBIE+c/BLI4aKM8UU8v+ZqrD3h7HC27kKt8JQU= -github.com/certifi/gocertifi v0.0.0-20190506164543-d2eda7129713 h1:UNOqI3EKhvbqV8f1Vm3NIwkrhq388sGCeAH2Op7w0rc= -github.com/certifi/gocertifi v0.0.0-20190506164543-d2eda7129713/go.mod h1:GJKEexRPVJrBSOjoqN5VNOIKJ5Q3RViH6eu3puDRwx4= +github.com/brianvoe/gofakeit v3.18.0+incompatible h1:wDOmHc9DLG4nRjUVVaxA+CEglKOW72Y5+4WNxUIkjM8= +github.com/brianvoe/gofakeit v3.18.0+incompatible/go.mod h1:kfwdRA90vvNhPutZWfH7WPaDzUjz+CZFqG+rPkOjGOc= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= @@ -36,6 +37,8 @@ github.com/davecgh/go-spew v0.0.0-20151105211317-5215b55f46b2/go.mod h1:J7Y8YcW2 github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/deckarep/golang-set v1.7.1 h1:SCQV0S6gTtp6itiFrTqI+pfmJ4LN85S1YzhDf9rTHJQ= +github.com/deckarep/golang-set v1.7.1/go.mod h1:93vsz/8Wt4joVM7c2AVqh+YRMiUSc14yDtF28KmMOgQ= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= github.com/docker/distribution v2.7.1+incompatible h1:a5mlkVzth6W5A4fOsS3D2EO5BUmsJpcB+cRlLU7cSug= @@ -55,8 +58,6 @@ github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/garyburd/redigo v1.6.0 h1:0VruCpn7yAIIu7pWVClQC8wxCJEcG3nyzpMSHKi1PQc= github.com/garyburd/redigo v1.6.0/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY= -github.com/getsentry/raven-go v0.2.0 h1:no+xWJRb5ZI7eE8TWgIq1jLulQiIoLG0IfYxv5JYMGs= -github.com/getsentry/raven-go v0.2.0/go.mod h1:KungGk8q33+aIAZUIVWZDr2OfAEBsO49PX4NzFV5kcQ= github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= @@ -66,6 +67,8 @@ github.com/go-openapi/jsonpointer v0.0.0-20160704185906-46af16f9f7b1/go.mod h1:+ github.com/go-openapi/jsonreference v0.0.0-20160704190145-13c6e3589ad9/go.mod h1:W3Z9FmVs9qj+KR4zFKmDPGiLdk1D9Rlm7cyMvf57TTg= github.com/go-openapi/spec v0.0.0-20160808142527-6aced65f8501/go.mod h1:J8+jY1nAiCcj+friV/PDoE1/3eeccG9LYBs0tYvLOWc= github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dpr1UfpPtxFw+EFuQ41HhCWZfha5jSVRG7C7I= +github.com/go-resty/resty/v2 v2.0.0 h1:9Nq/U+V4xsoDnDa/iTrABDWUCuk3Ne92XFHPe6dKWUc= +github.com/go-resty/resty/v2 v2.0.0/go.mod h1:dZGr0i9PLlaaTD4H/hoZIDjQ+r6xq8mgbRzHZf7f2J8= github.com/go-sql-driver/mysql v1.4.0 h1:7LxgVwFb2hIQtMm87NdgAVfXjnt4OePseqT1tKx+opk= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= @@ -141,6 +144,7 @@ github.com/mattn/go-isatty v0.0.8 h1:HLtExJ+uU2HOZ+wI0Tt5DtUDrx8yhUqDcp7fYERX4CE github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-sqlite3 v1.9.0 h1:pDRiWfl+++eC2FEFRy6jXmQlvp4Yh3z1MJKg4UeYM/4= github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= +github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE= @@ -152,8 +156,8 @@ github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9 github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= -github.com/newrelic/go-agent v1.4.0 h1:wY5acGl5vqk/4VJ+plJRTA87Utrr3k0sWNvlN857yH8= -github.com/newrelic/go-agent v1.4.0/go.mod h1:a8Fv1b/fYhFSReoTU6HDkTYIMZeSVNffmoS726Y0LzQ= +github.com/newrelic/go-agent v2.9.0+incompatible h1:f47VHKmZ/3KXzMSg77CGroizhcyP4JaE6zMYOeXxkDg= +github.com/newrelic/go-agent v2.9.0+incompatible/go.mod h1:a8Fv1b/fYhFSReoTU6HDkTYIMZeSVNffmoS726Y0LzQ= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c h1:Hww8mOyEKTeON4bZn7FrlLismspbPc1teNRUVH7wLQ8= github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= @@ -170,16 +174,20 @@ github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77 github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.3 h1:9iH4JKXLzFbOAdtqv/a+j8aewx2Y8lAjAydhbaScPF8= github.com/prometheus/client_golang v0.9.3/go.mod h1:/TN21ttK/J9q6uSwhBd54HahCDft0ttaMvbicHlPoso= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90 h1:S/YWwWx/RA8rT8tKFRuGUZhuA90OyIBpPCXkcbwU8DE= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/common v0.4.0 h1:7etb9YClo3a6HjLzfl6rIQaU+FDfi0VSX39io3aQ+DM= github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084 h1:sofwID9zm4tzrgykg80hfFph1mryUeLRsUfoocVVmRY= github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= -github.com/robfig/cron v1.1.0 h1:jk4/Hud3TTdcrJgUOBgsqrZBarcxl6ADIjSC2iniwLY= -github.com/robfig/cron v1.1.0/go.mod h1:JGuDeoQd7Z6yL4zQhZ3OPEVHB7fL6Ka6skscFHfmt2k= +github.com/robfig/cron v1.2.0 h1:ZjScXvvxeQ63Dbyxy76Fj3AT3Ut0aKsyd2/tl3DTMuQ= +github.com/robfig/cron v1.2.0/go.mod h1:JGuDeoQd7Z6yL4zQhZ3OPEVHB7fL6Ka6skscFHfmt2k= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww= @@ -188,6 +196,8 @@ github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPx github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/sony/sonyflake v1.0.0 h1:MpU6Ro7tfXwgn2l5eluf9xQvQJDROTBImNCfRXn/YeM= +github.com/sony/sonyflake v1.0.0/go.mod h1:Jv3cfhf/UFtolOTTRd3q4Nl6ENqM+KfyZ5PseKfZGF4= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= @@ -215,8 +225,6 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV github.com/thingful/httpmock v0.0.2 h1:1eWdzHkrygIk0LAcvX3M36Ol3IoIOwHWIqxUyGLsEdw= github.com/thingful/httpmock v0.0.2/go.mod h1:7l+awGvIFiugIInunvwUQYHNg5U0KXLPbNQshUfqAIk= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= -github.com/tylerb/graceful v1.2.15 h1:B0x01Y8fsJpogzZTkDg6BDi6eMf03s01lEKGdrv83oA= -github.com/tylerb/graceful v1.2.15/go.mod h1:LPYTbOYmUTdabwRt0TGhLllQ0MUNbs0Y5q1WXJOI9II= github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/urfave/cli v1.20.0 h1:fDqGv3UG/4jbVl/QkFwEdddtEDjh/5Ov6X+0B/3bPaw= @@ -243,6 +251,8 @@ golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73r golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190522155817-f3200d17e092 h1:4QSRKanuywn15aTZvI/mIDEgPQpswuFndXpOj3rKEco= golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7 h1:rTIdg5QFRR7XCaK4LCjBiPbx8j4DQRpdYMnGn/bJUEU= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be h1:vEDujvNQGv4jgYKudGeI/+DAX4Jffq6hpD55MmoEvKs= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -268,6 +278,7 @@ golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxb golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181011042414-1f849cf54d09/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd h1:/e+gpKk9r3dJobndpTytxS2gOy6m5uvpg+ISQoEcusQ= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= google.golang.org/appengine v1.1.0 h1:igQkv0AAhEIvTEpD5LIpAfav2eeVO9HBTjvKHVJPRSs= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= diff --git a/internal/app/cli/cli.go b/internal/app/cli/cli.go new file mode 100644 index 00000000..7f1e458c --- /dev/null +++ b/internal/app/cli/cli.go @@ -0,0 +1 @@ +package cli diff --git a/cmd/config/manager.go b/internal/app/cli/command/config/manager.go similarity index 72% rename from cmd/config/manager.go rename to internal/app/cli/command/config/manager.go index 3cd1cd3e..822d241a 100644 --- a/cmd/config/manager.go +++ b/internal/app/cli/command/config/manager.go @@ -6,11 +6,11 @@ import ( "io/ioutil" "os" "path/filepath" + "proctor/internal/app/cli/config" + "proctor/internal/app/cli/utility/io" "strings" "github.com/fatih/color" - proctor_config "proctor/config" - "proctor/io" "github.com/spf13/cobra" ) @@ -28,11 +28,11 @@ func NewCmd(printer io.Printer) *cobra.Command { Use: "config", Short: "Configure proctor client", Long: "This command helps configure client with proctord host, email id and access token", - Example: fmt.Sprintf("proctor config %s=example.proctor.com %s=example@proctor.com %s=XXXXX", proctor_config.ProctorHost, proctor_config.EmailId, proctor_config.AccessToken), + Example: fmt.Sprintf("proctor config %s=example.proctor.com %s=example@proctor.com %s=XXXXX", config.ProctorHost, config.EmailId, config.AccessToken), Args: cobra.MinimumNArgs(1), Run: func(cmd *cobra.Command, args []string) { - configFile := filepath.Join(proctor_config.ConfigFileDir(), "proctor.yaml") + configFile := filepath.Join(config.ConfigFileDir(), "proctor.yaml") if _, err := os.Stat(configFile); err == nil { printer.Println("[Warning] This will overwrite current config:", color.FgYellow) existingProctorConfig, err := ioutil.ReadFile(configFile) @@ -58,7 +58,7 @@ func NewCmd(printer io.Printer) *cobra.Command { } } - CreateDirIfNotExist(proctor_config.ConfigFileDir()) + CreateDirIfNotExist(config.ConfigFileDir()) var configFileContent string for _, v := range args { arg := strings.Split(v, "=") @@ -69,16 +69,16 @@ func NewCmd(printer io.Printer) *cobra.Command { } switch arg[0] { - case proctor_config.ProctorHost: - configFileContent += fmt.Sprintf("%s: %s\n", proctor_config.ProctorHost, arg[1]) - case proctor_config.EmailId: - configFileContent += fmt.Sprintf("%s: %s\n", proctor_config.EmailId, arg[1]) - case proctor_config.AccessToken: - configFileContent += fmt.Sprintf("%s: %s\n", proctor_config.AccessToken, arg[1]) - case proctor_config.ConnectionTimeoutSecs: - configFileContent += fmt.Sprintf("%s: %s\n", proctor_config.ConnectionTimeoutSecs, arg[1]) - case proctor_config.ProcExecutionStatusPollCount: - configFileContent += fmt.Sprintf("%s: %s\n", proctor_config.ProcExecutionStatusPollCount, arg[1]) + case config.ProctorHost: + configFileContent += fmt.Sprintf("%s: %s\n", config.ProctorHost, arg[1]) + case config.EmailId: + configFileContent += fmt.Sprintf("%s: %s\n", config.EmailId, arg[1]) + case config.AccessToken: + configFileContent += fmt.Sprintf("%s: %s\n", config.AccessToken, arg[1]) + case config.ConnectionTimeoutSecs: + configFileContent += fmt.Sprintf("%s: %s\n", config.ConnectionTimeoutSecs, arg[1]) + case config.ProcExecutionStatusPollCount: + configFileContent += fmt.Sprintf("%s: %s\n", config.ProcExecutionStatusPollCount, arg[1]) default: printer.Println(fmt.Sprintf("Proctor doesn't support config key: %s", arg[0]), color.FgYellow) } @@ -88,10 +88,12 @@ func NewCmd(printer io.Printer) *cobra.Command { f, err := os.Create(configFile) if err != nil { printer.Println(fmt.Sprintf("Error creating config file %s: %s", configFile, err.Error()), color.FgRed) + return } _, err = f.Write(configFileContentBytes) if err != nil { printer.Println(fmt.Sprintf("Error writing content %v \n to config file %s: %s", configFileContentBytes, configFile, err.Error()), color.FgRed) + return } defer f.Close() printer.Println("Proctor client configured successfully", color.FgGreen) diff --git a/cmd/config/view/view.go b/internal/app/cli/command/config/view/view.go similarity index 76% rename from cmd/config/view/view.go rename to internal/app/cli/command/config/view/view.go index 293ddd66..03aaeb32 100644 --- a/cmd/config/view/view.go +++ b/internal/app/cli/command/config/view/view.go @@ -5,22 +5,13 @@ import ( "io/ioutil" "os" "path/filepath" + "proctor/internal/app/cli/config" "github.com/fatih/color" - proctor_config "proctor/config" - "proctor/io" "github.com/spf13/cobra" + "proctor/internal/app/cli/utility/io" ) -func CreateDirIfNotExist(dir string) { - if _, err := os.Stat(dir); os.IsNotExist(err) { - err = os.MkdirAll(dir, 0755) - if err != nil { - panic(err) - } - } -} - func NewCmd(printer io.Printer) *cobra.Command { return &cobra.Command{ Use: "show", @@ -29,7 +20,7 @@ func NewCmd(printer io.Printer) *cobra.Command { Example: fmt.Sprintf("proctor config show"), Run: func(cmd *cobra.Command, args []string) { - configFile := filepath.Join(proctor_config.ConfigFileDir(), "proctor.yaml") + configFile := filepath.Join(config.ConfigFileDir(), "proctor.yaml") if _, err := os.Stat(configFile); os.IsNotExist(err) { printer.Println(fmt.Sprintf("Client Config is absent: %s", configFile), color.FgRed) printer.Println(fmt.Sprintf("Setup config using `proctor config PROCTOR_HOST=some.host ...`"), color.FgRed) diff --git a/cmd/description/descriptor.go b/internal/app/cli/command/description/descriptor.go similarity index 92% rename from cmd/description/descriptor.go rename to internal/app/cli/command/description/descriptor.go index 5d00fc0a..d4540646 100644 --- a/cmd/description/descriptor.go +++ b/internal/app/cli/command/description/descriptor.go @@ -2,13 +2,13 @@ package description import ( "fmt" + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" "strings" "github.com/fatih/color" - "proctor/daemon" - "proctor/io" - proc_metadata "proctor/proctord/jobs/metadata" "github.com/spf13/cobra" + modelMetadata "proctor/internal/pkg/model/metadata" ) func NewCmd(printer io.Printer, proctorDClient daemon.Client) *cobra.Command { @@ -31,7 +31,7 @@ func NewCmd(printer io.Printer, proctorDClient daemon.Client) *cobra.Command { } userProvidedProcName := args[0] - desiredProc := proc_metadata.Metadata{} + desiredProc := modelMetadata.Metadata{} for _, proc := range procList { if userProvidedProcName == proc.Name { desiredProc = proc diff --git a/cmd/description/descriptor_test.go b/internal/app/cli/command/description/descriptor_test.go similarity index 87% rename from cmd/description/descriptor_test.go rename to internal/app/cli/command/description/descriptor_test.go index 3f327364..cc11145c 100644 --- a/cmd/description/descriptor_test.go +++ b/internal/app/cli/command/description/descriptor_test.go @@ -3,13 +3,13 @@ package description import ( "errors" "fmt" + daemon2 "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" "strings" "testing" - "proctor/daemon" - "proctor/io" - proc_metadata "proctor/proctord/jobs/metadata" - "proctor/proctord/jobs/metadata/env" + procMetadata "proctor/internal/pkg/model/metadata" + "proctor/internal/pkg/model/metadata/env" "github.com/fatih/color" "github.com/spf13/cobra" @@ -20,13 +20,13 @@ import ( type DescribeCmdTestSuite struct { suite.Suite mockPrinter *io.MockPrinter - mockProctorDClient *daemon.MockClient + mockProctorDClient *daemon2.MockClient testDescribeCmd *cobra.Command } func (s *DescribeCmdTestSuite) SetupTest() { s.mockPrinter = &io.MockPrinter{} - s.mockProctorDClient = &daemon.MockClient{} + s.mockProctorDClient = &daemon2.MockClient{} s.testDescribeCmd = NewCmd(s.mockPrinter, s.mockProctorDClient) } @@ -51,7 +51,7 @@ func (s *DescribeCmdTestSuite) TestDescribeCmdRun() { Description: "secret one description", } - anyProc := proc_metadata.Metadata{ + anyProc := procMetadata.Metadata{ Name: "do-something", Description: "does something", Contributors: "user@example.com", @@ -62,7 +62,7 @@ func (s *DescribeCmdTestSuite) TestDescribeCmdRun() { Secrets: []env.VarMetadata{secret}, }, } - procList := []proc_metadata.Metadata{anyProc} + procList := []procMetadata.Metadata{anyProc} s.mockProctorDClient.On("ListProcs").Return(procList, nil).Once() @@ -89,7 +89,7 @@ func (s *DescribeCmdTestSuite) TestDescribeCmdForIncorrectUsage() { } func (s *DescribeCmdTestSuite) TestDescribeCmdRunProctorDClientFailure() { - s.mockProctorDClient.On("ListProcs").Return([]proc_metadata.Metadata{}, errors.New("test error")).Once() + s.mockProctorDClient.On("ListProcs").Return([]procMetadata.Metadata{}, errors.New("test error")).Once() s.mockPrinter.On("Println", "test error", color.FgRed).Once() s.testDescribeCmd.Run(&cobra.Command{}, []string{"do-something"}) @@ -99,7 +99,7 @@ func (s *DescribeCmdTestSuite) TestDescribeCmdRunProctorDClientFailure() { } func (s *DescribeCmdTestSuite) TestDescribeCmdRunProcNotSupported() { - s.mockProctorDClient.On("ListProcs").Return([]proc_metadata.Metadata{}, nil).Once() + s.mockProctorDClient.On("ListProcs").Return([]procMetadata.Metadata{}, nil).Once() testProcName := "do-something" s.mockPrinter.On("Println", fmt.Sprintf("Proctor doesn't support Proc `%s`\nRun `proctor list` to view supported Procs", testProcName), color.FgRed).Once() diff --git a/internal/app/cli/command/execution/executioner.go b/internal/app/cli/command/execution/executioner.go new file mode 100644 index 00000000..9027c9c7 --- /dev/null +++ b/internal/app/cli/command/execution/executioner.go @@ -0,0 +1,82 @@ +package execution + +import ( + "fmt" + "strings" + + "github.com/fatih/color" + "github.com/spf13/cobra" + + "proctor/internal/app/cli/daemon" + utilArgs "proctor/internal/app/cli/utility/args" + utilFile "proctor/internal/app/cli/utility/file" + utilIO "proctor/internal/app/cli/utility/io" +) + +func NewCmd(printer utilIO.Printer, proctorDClient daemon.Client, osExitFunc func(int)) *cobra.Command { + executionCmd := &cobra.Command{ + Use: "execute", + Short: "Execute a proc with given arguments", + Long: "To execute a proc, this command helps to communicate with `proctord` and streams to logs of proc in execution", + Example: "proctor execute proc-one SOME_VAR=foo ANOTHER_VAR=bar\nproctor execute proc-two ANY_VAR=baz", + Args: cobra.MinimumNArgs(1), + + Run: func(cmd *cobra.Command, args []string) { + procName := args[0] + printer.Println(fmt.Sprintf("%-40s %-100s", "Executing Proc", procName), color.Reset) + + filename, err := cmd.Flags().GetString("filename") + if err != nil && !strings.Contains(err.Error(), "flag accessed but not defined") { + printer.Println(err.Error(), color.FgRed) + } + + procArgs := make(map[string]string) + if filename != "" { + parseErr := utilFile.ParseYAML(filename, procArgs) + if err != nil { + printer.Println(parseErr.Error(), color.FgRed) + } + } + if len(procArgs) > 1 || len(args) > 1 { + printer.Println("With Variables", color.FgMagenta) + for _, v := range args[1:] { + utilArgs.ParseArg(printer, procArgs, v) + } + + for field, value := range procArgs { + printer.Println(fmt.Sprintf("%-40s %-100s", field, value), color.Reset) + } + } else { + printer.Println("With No Variables", color.FgRed) + } + + executionResult, err := proctorDClient.ExecuteProc(procName, procArgs) + if err != nil { + printer.Println(err.Error(), color.FgRed) + print() + osExitFunc(1) + return + } + + printer.Println("\nExecution Created", color.FgGreen) + printer.Println(fmt.Sprintf("%-40s %-100v", "ID", executionResult.ExecutionId), color.FgGreen) + printer.Println(fmt.Sprintf("%-40s %-100s", "Name", executionResult.ExecutionName), color.FgGreen) + + printer.Println("\nStreaming logs", color.FgGreen) + err = proctorDClient.StreamProcLogs(executionResult.ExecutionId) + if err != nil { + printer.Println("Error while Streaming Log.", color.FgRed) + osExitFunc(1) + return + } + + printer.Println("Execution completed.", color.FgGreen) + }, + } + var Filename string + + executionCmd.Flags().StringVarP(&Filename, "filename", "f", "", "Filename") + executionCmd.MarkFlagFilename("filename") + + return executionCmd +} diff --git a/internal/app/cli/command/execution/executioner_test.go b/internal/app/cli/command/execution/executioner_test.go new file mode 100644 index 00000000..7c108d42 --- /dev/null +++ b/internal/app/cli/command/execution/executioner_test.go @@ -0,0 +1,300 @@ +package execution + +import ( + "errors" + "fmt" + "io/ioutil" + "os" + "testing" + + "github.com/fatih/color" + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" + + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" + "proctor/internal/pkg/model/execution" +) + +type ExecutionCmdTestSuite struct { + suite.Suite + mockPrinter *io.MockPrinter + mockProctorDClient *daemon.MockClient + testExecutionCmd *cobra.Command +} + +func (s *ExecutionCmdTestSuite) SetupTest() { + s.mockPrinter = &io.MockPrinter{} + s.mockProctorDClient = &daemon.MockClient{} + s.testExecutionCmd = NewCmd(s.mockPrinter, s.mockProctorDClient, func(exitCode int) {}) +} + +func (s *ExecutionCmdTestSuite) TestExecutionCmdUsage() { + assert.Equal(s.T(), "execute", s.testExecutionCmd.Use) +} + +func (s *ExecutionCmdTestSuite) TestExecutionCmdHelp() { + assert.Equal(s.T(), "Execute a proc with given arguments", s.testExecutionCmd.Short) + assert.Equal(s.T(), "To execute a proc, this command helps to communicate with `proctord` and streams to logs of proc in execution", s.testExecutionCmd.Long) + assert.Equal(s.T(), "proctor execute proc-one SOME_VAR=foo ANOTHER_VAR=bar\nproctor execute proc-two ANY_VAR=baz", s.testExecutionCmd.Example) +} + +func (s *ExecutionCmdTestSuite) TestExecutionCmd() { + args := []string{"say-hello-world", "SAMPLE_ARG_ONE=any", "SAMPLE_ARG_TWO=variable"} + procArgs := make(map[string]string) + procArgs["SAMPLE_ARG_ONE"] = "any" + procArgs["SAMPLE_ARG_TWO"] = "variable" + + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "Executing Proc", "say-hello-world"), color.Reset).Once() + s.mockPrinter.On("Println", "With Variables", color.FgMagenta).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "SAMPLE_ARG_ONE", "any"), color.Reset).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "SAMPLE_ARG_TWO", "variable"), color.Reset).Once() + + executionResult := &execution.ExecutionResult{ + ExecutionId: uint64(42), + ExecutionName: "Test", + } + + s.mockProctorDClient.On("ExecuteProc", "say-hello-world", procArgs).Return(executionResult, nil).Once() + s.mockPrinter.On("Println", "\nExecution Created", color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "ID", executionResult.ExecutionId), color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "Name", executionResult.ExecutionName), color.FgGreen).Once() + s.mockPrinter.On("Println", "\nStreaming logs", color.FgGreen).Once() + + s.mockProctorDClient.On("StreamProcLogs", executionResult.ExecutionId).Return(nil).Once() + + s.mockPrinter.On("Println", "Execution completed.", color.FgGreen).Once() + + s.testExecutionCmd.Run(&cobra.Command{}, args) + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) +} + +func (s *ExecutionCmdTestSuite) TestExecutionCmdForYAMLInput() { + t := s.T() + + filename := "/tmp/yaml-input-test" + testYAML := []byte("SAMPLE_ARG_ONE: any\nSAMPLE_ARG_TWO: variable") + err := ioutil.WriteFile(filename, testYAML, 0644) + defer os.Remove(filename) + assert.NoError(t, err) + + args := []string{"say-hello-world", "-f", filename} + procArgs := make(map[string]string) + procArgs["SAMPLE_ARG_ONE"] = "any" + procArgs["SAMPLE_ARG_TWO"] = "variable" + + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "Executing Proc", "say-hello-world"), color.Reset).Once() + s.mockPrinter.On("Println", "With Variables", color.FgMagenta).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "SAMPLE_ARG_ONE", "any"), color.Reset).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "SAMPLE_ARG_TWO", "variable"), color.Reset).Once() + + executionResult := &execution.ExecutionResult{ + ExecutionId: uint64(42), + ExecutionName: "Test", + } + + s.mockProctorDClient.On("ExecuteProc", "say-hello-world", procArgs).Return(executionResult, nil).Once() + s.mockPrinter.On("Println", "\nExecution Created", color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "ID", executionResult.ExecutionId), color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "Name", executionResult.ExecutionName), color.FgGreen).Once() + s.mockPrinter.On("Println", "\nStreaming logs", color.FgGreen).Once() + + s.mockProctorDClient.On("StreamProcLogs", executionResult.ExecutionId).Return(nil).Once() + + s.mockPrinter.On("Println", "Execution completed.", color.FgGreen).Once() + + s.testExecutionCmd.SetArgs(args) + s.testExecutionCmd.Execute() + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) +} + +func (s *ExecutionCmdTestSuite) TestExecutionCmdForNoProcVariables() { + args := []string{"say-hello-world"} + + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "Executing Proc", "say-hello-world"), color.Reset).Once() + s.mockPrinter.On("Println", "With No Variables", color.FgRed).Once() + + executionResult := &execution.ExecutionResult{ + ExecutionId: uint64(42), + } + + procArgs := make(map[string]string) + s.mockProctorDClient.On("ExecuteProc", "say-hello-world", procArgs).Return(executionResult, nil).Once() + + s.mockPrinter.On("Println", "\nExecution Created", color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "ID", executionResult.ExecutionId), color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "Name", executionResult.ExecutionName), color.FgGreen).Once() + s.mockPrinter.On("Println", "\nStreaming logs", color.FgGreen).Once() + + s.mockProctorDClient.On("StreamProcLogs", executionResult.ExecutionId).Return(nil).Once() + + s.mockPrinter.On("Println", "Execution completed.", color.FgGreen).Once() + + s.testExecutionCmd.SetArgs(args) + s.testExecutionCmd.Execute() + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) +} + +func (s *ExecutionCmdTestSuite) TestExecutionCmdForIncorrectVariableFormat() { + args := []string{"say-hello-world", "incorrect-format"} + + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "Executing Proc", "say-hello-world"), color.Reset).Once() + s.mockPrinter.On("Println", "With Variables", color.FgMagenta).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "\nIncorrect variable format\n", "incorrect-format"), color.FgRed).Once() + + executionResult := &execution.ExecutionResult{ + ExecutionId: uint64(42), + } + + procArgs := make(map[string]string) + s.mockProctorDClient.On("ExecuteProc", "say-hello-world", procArgs).Return(executionResult, nil).Once() + + s.mockPrinter.On("Println", "\nExecution Created", color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "ID", executionResult.ExecutionId), color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "Name", executionResult.ExecutionName), color.FgGreen).Once() + s.mockPrinter.On("Println", "\nStreaming logs", color.FgGreen).Once() + + s.mockProctorDClient.On("StreamProcLogs", executionResult.ExecutionId).Return(nil).Once() + + s.mockPrinter.On("Println", "Execution completed.", color.FgGreen).Once() + + s.testExecutionCmd.SetArgs(args) + s.testExecutionCmd.Execute() + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) +} + +func (s *ExecutionCmdTestSuite) TestExecutionCmdForProctorDExecutionFailure() { + args := []string{"say-hello-world"} + + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "Executing Proc", "say-hello-world"), color.Reset).Once() + s.mockPrinter.On("Println", "With No Variables", color.FgRed).Once() + + executionResult := &execution.ExecutionResult{ + ExecutionId: uint64(42), + } + procArgs := make(map[string]string) + s.mockProctorDClient.On("ExecuteProc", "say-hello-world", procArgs).Return(executionResult, errors.New("test error")).Once() + + s.mockPrinter.On("Println", mock.Anything, color.FgRed).Once() + + osExitFunc := func(exitCode int) { + assert.Equal(s.T(), 1, exitCode) + } + testExecutionCmdOSExit := NewCmd(s.mockPrinter, s.mockProctorDClient, osExitFunc) + testExecutionCmdOSExit.SetArgs(args) + testExecutionCmdOSExit.Execute() + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) +} + +func (s *ExecutionCmdTestSuite) TestExecutionCmdForProctorDLogStreamingFailure() { + args := []string{"say-hello-world"} + + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "Executing Proc", "say-hello-world"), color.Reset).Once() + s.mockPrinter.On("Println", "With No Variables", color.FgRed).Once() + + executionResult := &execution.ExecutionResult{ + ExecutionId: uint64(42), + } + procArgs := make(map[string]string) + s.mockProctorDClient.On("ExecuteProc", "say-hello-world", procArgs).Return(executionResult, nil).Once() + + s.mockPrinter.On("Println", "\nExecution Created", color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "ID", executionResult.ExecutionId), color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "Name", executionResult.ExecutionName), color.FgGreen).Once() + s.mockPrinter.On("Println", "\nStreaming logs", color.FgGreen).Once() + + s.mockProctorDClient.On("StreamProcLogs", executionResult.ExecutionId).Return(errors.New("error")).Once() + + s.mockPrinter.On("Println", "Error while Streaming Log.", color.FgRed).Once() + + osExitFunc := func(exitCode int) { + assert.Equal(s.T(), 1, exitCode) + } + testExecutionCmdOSExit := NewCmd(s.mockPrinter, s.mockProctorDClient, osExitFunc) + testExecutionCmdOSExit.SetArgs(args) + testExecutionCmdOSExit.Execute() + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) +} + +func (s *ExecutionCmdTestSuite) TestExecutionCmdForProctorDGetDefinitiveProcExecutionStatusError() { + args := []string{"say-hello-world"} + + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "Executing Proc", "say-hello-world"), color.Reset).Once() + s.mockPrinter.On("Println", "With No Variables", color.FgRed).Once() + + executionResult := &execution.ExecutionResult{ + ExecutionId: uint64(42), + } + + procArgs := make(map[string]string) + s.mockProctorDClient.On("ExecuteProc", "say-hello-world", procArgs).Return(executionResult, nil).Once() + + s.mockPrinter.On("Println", "\nExecution Created", color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "ID", executionResult.ExecutionId), color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "Name", executionResult.ExecutionName), color.FgGreen).Once() + s.mockPrinter.On("Println", "\nStreaming logs", color.FgGreen).Once() + + s.mockProctorDClient.On("StreamProcLogs", executionResult.ExecutionId).Return(errors.New("error")).Once() + + s.mockPrinter.On("Println", "Error while Streaming Log.", color.FgRed).Once() + + osExitFunc := func(exitCode int) { + assert.Equal(s.T(), 1, exitCode) + } + testExecutionCmdOSExit := NewCmd(s.mockPrinter, s.mockProctorDClient, osExitFunc) + testExecutionCmdOSExit.SetArgs(args) + testExecutionCmdOSExit.Execute() + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) +} + +func (s *ExecutionCmdTestSuite) TestExecutionCmdForProctorDGetDefinitiveProcExecutionStatusFailure() { + args := []string{"say-hello-world"} + + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "Executing Proc", "say-hello-world"), color.Reset).Once() + s.mockPrinter.On("Println", "With No Variables", color.FgRed).Once() + + executionResult := &execution.ExecutionResult{ + ExecutionId: uint64(42), + } + procArgs := make(map[string]string) + s.mockProctorDClient.On("ExecuteProc", "say-hello-world", procArgs).Return(executionResult, nil).Once() + + s.mockPrinter.On("Println", "\nExecution Created", color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "ID", executionResult.ExecutionId), color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "Name", executionResult.ExecutionName), color.FgGreen).Once() + s.mockPrinter.On("Println", "\nStreaming logs", color.FgGreen).Once() + + s.mockProctorDClient.On("StreamProcLogs", executionResult.ExecutionId).Return(errors.New("error")).Once() + + s.mockPrinter.On("Println", "Error while Streaming Log.", color.FgRed).Once() + + osExitFunc := func(exitCode int) { + assert.Equal(s.T(), 1, exitCode) + } + testExecutionCmdOSExit := NewCmd(s.mockPrinter, s.mockProctorDClient, osExitFunc) + testExecutionCmdOSExit.SetArgs(args) + testExecutionCmdOSExit.Execute() + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) +} + +func TestExecutionCmdTestSuite(t *testing.T) { + suite.Run(t, new(ExecutionCmdTestSuite)) +} diff --git a/cmd/list/lister.go b/internal/app/cli/command/list/lister.go similarity index 87% rename from cmd/list/lister.go rename to internal/app/cli/command/list/lister.go index 51e35267..c1e222ef 100644 --- a/cmd/list/lister.go +++ b/internal/app/cli/command/list/lister.go @@ -2,12 +2,12 @@ package list import ( "fmt" + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" "github.com/fatih/color" - "proctor/daemon" - "proctor/io" "github.com/spf13/cobra" - "proctor/utility/sort" + "proctor/internal/app/cli/utility/sort" ) func NewCmd(printer io.Printer, proctorDClient daemon.Client) *cobra.Command { diff --git a/cmd/list/lister_test.go b/internal/app/cli/command/list/lister_test.go similarity index 86% rename from cmd/list/lister_test.go rename to internal/app/cli/command/list/lister_test.go index f162ccee..efac7918 100644 --- a/cmd/list/lister_test.go +++ b/internal/app/cli/command/list/lister_test.go @@ -3,15 +3,15 @@ package list import ( "errors" "fmt" + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" "testing" "github.com/fatih/color" - "proctor/daemon" - "proctor/io" - proc_metadata "proctor/proctord/jobs/metadata" "github.com/spf13/cobra" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" + modelMetadata "proctor/internal/pkg/model/metadata" ) type ListCmdTestSuite struct { @@ -38,15 +38,15 @@ func (s *ListCmdTestSuite) TestListCmdHelp() { } func (s *ListCmdTestSuite) TestListCmdRun() { - procOne := proc_metadata.Metadata{ + procOne := modelMetadata.Metadata{ Name: "one", Description: "proc one description", } - procTwo := proc_metadata.Metadata{ + procTwo := modelMetadata.Metadata{ Name: "two", Description: "proc two description", } - procList := []proc_metadata.Metadata{procOne, procTwo} + procList := []modelMetadata.Metadata{procOne, procTwo} s.mockProctorDClient.On("ListProcs").Return(procList, nil).Once() @@ -61,7 +61,7 @@ func (s *ListCmdTestSuite) TestListCmdRun() { } func (s *ListCmdTestSuite) TestListCmdRunProctorDClientFailure() { - s.mockProctorDClient.On("ListProcs").Return([]proc_metadata.Metadata{}, errors.New("Error!!!\nUnknown Error.")).Once() + s.mockProctorDClient.On("ListProcs").Return([]modelMetadata.Metadata{}, errors.New("Error!!!\nUnknown Error.")).Once() s.mockPrinter.On("Println", "Error!!!\nUnknown Error.", color.FgRed).Once() s.testListCmd.Run(&cobra.Command{}, []string{}) diff --git a/internal/app/cli/command/log/log.go b/internal/app/cli/command/log/log.go new file mode 100644 index 00000000..ccfe7940 --- /dev/null +++ b/internal/app/cli/command/log/log.go @@ -0,0 +1,43 @@ +package log + +import ( + "fmt" + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" + "strconv" + + "github.com/fatih/color" + "github.com/spf13/cobra" +) + +func NewCmd(printer io.Printer, proctorDClient daemon.Client, osExitFunc func(int)) *cobra.Command { + return &cobra.Command{ + Use: "logs", + Short: "Get logs of an execution context", + Long: "To get a log of execution context, this command helps retrieve logs from previous execution", + Example: "proctor logs 123", + Args: cobra.MinimumNArgs(1), + + Run: func(cmd *cobra.Command, args []string) { + executionIDParam := args[0] + executionID, err := strconv.ParseUint(executionIDParam, 10, 64) + if executionIDParam == "" || err != nil { + printer.Println("No valid execution context id provided as argument", color.FgRed) + return + } + + printer.Println("Getting logs", color.FgGreen) + printer.Println(fmt.Sprintf("%-40s %-100v", "ID", executionID), color.FgGreen) + + printer.Println("\nStreaming logs", color.FgGreen) + err = proctorDClient.StreamProcLogs(executionID) + if err != nil { + printer.Println("Error while Streaming Log.", color.FgRed) + osExitFunc(1) + return + } + + printer.Println("Execution completed.", color.FgGreen) + }, + } +} diff --git a/internal/app/cli/command/log/log_test.go b/internal/app/cli/command/log/log_test.go new file mode 100644 index 00000000..9fc3949d --- /dev/null +++ b/internal/app/cli/command/log/log_test.go @@ -0,0 +1,87 @@ +package log + +import ( + "errors" + "fmt" + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" + "testing" + + "github.com/fatih/color" + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" +) + +type LogCmdTestSuite struct { + suite.Suite + mockPrinter *io.MockPrinter + mockProctorDClient *daemon.MockClient + testLogCmd *cobra.Command +} + +func (s *LogCmdTestSuite) SetupTest() { + s.mockPrinter = &io.MockPrinter{} + s.mockProctorDClient = &daemon.MockClient{} + s.testLogCmd = NewCmd(s.mockPrinter, s.mockProctorDClient, func(exitCode int) {}) +} + +func (s *LogCmdTestSuite) TestLogCmdUsage() { + assert.Equal(s.T(), "logs", s.testLogCmd.Use) +} + +func (s *LogCmdTestSuite) TestLogCmdHelp() { + assert.Equal(s.T(), "Get logs of an execution context", s.testLogCmd.Short) + assert.Equal(s.T(), "To get a log of execution context, this command helps retrieve logs from previous execution", s.testLogCmd.Long) + assert.Equal(s.T(), "proctor logs 123", s.testLogCmd.Example) +} + +func (s *LogCmdTestSuite) TestLogCmd() { + executionID := uint64(42) + + s.mockPrinter.On("Println", "Getting logs", color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "ID", executionID), color.FgGreen).Once() + s.mockPrinter.On("Println", "\nStreaming logs", color.FgGreen).Once() + + s.mockProctorDClient.On("StreamProcLogs", executionID).Return(nil).Once() + s.mockPrinter.On("Println", "Execution completed.", color.FgGreen).Once() + + s.testLogCmd.Run(&cobra.Command{}, []string{"42"}) + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) +} + +func (s *LogCmdTestSuite) TestLogCmdInvalidExecutionIDError() { + t := s.T() + + s.mockPrinter.On("Println", "No valid execution context id provided as argument", color.FgRed).Once() + + s.testLogCmd.Run(&cobra.Command{}, []string{"foo"}) + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) + s.mockPrinter.AssertNotCalled(t, "Println", "Execution completed.", color.FgGreen) +} + +func (s *LogCmdTestSuite) TestLogCmdInvalidStreamProcLogsError() { + t := s.T() + + executionID := uint64(42) + + s.mockPrinter.On("Println", "Getting logs", color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "ID", executionID), color.FgGreen).Once() + s.mockPrinter.On("Println", "\nStreaming logs", color.FgGreen).Once() + + s.mockProctorDClient.On("StreamProcLogs", executionID).Return(errors.New("test")).Once() + s.mockPrinter.On("Println", "Error while Streaming Log.", color.FgRed).Once() + s.testLogCmd.Run(&cobra.Command{}, []string{"42"}) + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) + s.mockPrinter.AssertNotCalled(t, "Println", "Execution completed.", color.FgGreen) +} + +func TestLogCmdTestSuite(t *testing.T) { + suite.Run(t, new(LogCmdTestSuite)) +} diff --git a/cmd/root.go b/internal/app/cli/command/root.go similarity index 51% rename from cmd/root.go rename to internal/app/cli/command/root.go index 44c3d864..fe2ddfe8 100644 --- a/cmd/root.go +++ b/internal/app/cli/command/root.go @@ -1,24 +1,27 @@ -package cmd +package command import ( "fmt" - "proctor/cmd/schedule/remove" "os" - "proctor/cmd/config" - "proctor/cmd/config/view" - "proctor/cmd/description" - "proctor/cmd/execution" - "proctor/cmd/list" - "proctor/cmd/schedule" - schedule_list "proctor/cmd/schedule/list" - schedule_describe "proctor/cmd/schedule/describe" - "proctor/cmd/version" - "proctor/daemon" - "proctor/io" - "github.com/spf13/cobra" - "proctor/cmd/version/github" + + "proctor/internal/app/cli/command/config" + "proctor/internal/app/cli/command/config/view" + "proctor/internal/app/cli/command/description" + "proctor/internal/app/cli/command/execution" + "proctor/internal/app/cli/command/list" + "proctor/internal/app/cli/command/log" + "proctor/internal/app/cli/command/schedule" + scheduleDescribe "proctor/internal/app/cli/command/schedule/describe" + scheduleList "proctor/internal/app/cli/command/schedule/list" + "proctor/internal/app/cli/command/schedule/remove" + "proctor/internal/app/cli/command/status" + "proctor/internal/app/cli/command/template" + "proctor/internal/app/cli/command/version" + "proctor/internal/app/cli/command/version/github" + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" ) var ( @@ -40,9 +43,18 @@ func Execute(printer io.Printer, proctorDClient daemon.Client, githubClient gith executionCmd := execution.NewCmd(printer, proctorDClient, os.Exit) rootCmd.AddCommand(executionCmd) + logCmd := log.NewCmd(printer, proctorDClient, os.Exit) + rootCmd.AddCommand(logCmd) + + statusCmd := status.NewCmd(printer, proctorDClient, os.Exit) + rootCmd.AddCommand(statusCmd) + listCmd := list.NewCmd(printer, proctorDClient) rootCmd.AddCommand(listCmd) + templateCmd := template.NewCmd(printer, proctorDClient) + rootCmd.AddCommand(templateCmd) + configCmd := config.NewCmd(printer) configShowCmd := view.NewCmd(printer) rootCmd.AddCommand(configCmd) @@ -50,24 +62,13 @@ func Execute(printer io.Printer, proctorDClient daemon.Client, githubClient gith scheduleCmd := schedule.NewCmd(printer, proctorDClient) rootCmd.AddCommand(scheduleCmd) - scheduleListCmd := schedule_list.NewCmd(printer, proctorDClient) + scheduleListCmd := scheduleList.NewCmd(printer, proctorDClient) scheduleCmd.AddCommand(scheduleListCmd) - scheduleDescribeCmd := schedule_describe.NewCmd(printer, proctorDClient) + scheduleDescribeCmd := scheduleDescribe.NewCmd(printer, proctorDClient) scheduleCmd.AddCommand(scheduleDescribeCmd) scheduleRemoveCmd := remove.NewCmd(printer, proctorDClient) scheduleCmd.AddCommand(scheduleRemoveCmd) - var Time, NotifyEmails, Tags, Group string - - scheduleCmd.PersistentFlags().StringVarP(&Time, "time", "t", "", "Schedule time") - scheduleCmd.MarkFlagRequired("time") - scheduleCmd.PersistentFlags().StringVarP(&Group, "group", "g", "", "Group Name") - scheduleCmd.MarkFlagRequired("group") - scheduleCmd.PersistentFlags().StringVarP(&NotifyEmails, "notify", "n", "", "Notifier Email ID's") - scheduleCmd.MarkFlagRequired("notify") - scheduleCmd.PersistentFlags().StringVarP(&Tags, "tags", "T", "", "Tags") - scheduleCmd.MarkFlagRequired("tags") - if err := rootCmd.Execute(); err != nil { fmt.Println(err) os.Exit(1) diff --git a/cmd/root_test.go b/internal/app/cli/command/root_test.go similarity index 81% rename from cmd/root_test.go rename to internal/app/cli/command/root_test.go index de58895a..67ac520b 100644 --- a/cmd/root_test.go +++ b/internal/app/cli/command/root_test.go @@ -1,13 +1,13 @@ -package cmd +package command import ( + "proctor/internal/app/cli/command/version/github" + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" "testing" - "proctor/daemon" - "proctor/io" "github.com/spf13/cobra" "github.com/stretchr/testify/assert" - "proctor/cmd/version/github" ) func TestRootCmdUsage(t *testing.T) { @@ -32,6 +32,8 @@ func TestRootCmdSubCommands(t *testing.T) { assert.True(t, contains(rootCmd.Commands(), "describe")) assert.True(t, contains(rootCmd.Commands(), "execute")) + assert.True(t, contains(rootCmd.Commands(), "logs")) + assert.True(t, contains(rootCmd.Commands(), "status")) assert.True(t, contains(rootCmd.Commands(), "help")) assert.True(t, contains(rootCmd.Commands(), "list")) assert.True(t, contains(rootCmd.Commands(), "config")) diff --git a/cmd/schedule/describe/describe.go b/internal/app/cli/command/schedule/describe/describe.go similarity index 69% rename from cmd/schedule/describe/describe.go rename to internal/app/cli/command/schedule/describe/describe.go index 4c6be1f4..6eea5c82 100644 --- a/cmd/schedule/describe/describe.go +++ b/internal/app/cli/command/schedule/describe/describe.go @@ -2,10 +2,13 @@ package describe import ( "fmt" + "strconv" + "github.com/fatih/color" - "proctor/daemon" - "proctor/io" "github.com/spf13/cobra" + + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" ) func NewCmd(printer io.Printer, proctorDClient daemon.Client) *cobra.Command { @@ -13,21 +16,27 @@ func NewCmd(printer io.Printer, proctorDClient daemon.Client) *cobra.Command { Use: "describe", Short: "Describe scheduled job", Long: "This command helps to describe scheduled job", - Example: fmt.Sprintf("proctor schedule describe D958FCCC-F2B3-49D1-B83A-4E70A2A775A0"), + Example: fmt.Sprintf("proctor schedule describe 502376124721"), + Args: cobra.MinimumNArgs(1), Run: func(cmd *cobra.Command, args []string) { - jobID := args[0] + jobID, err := strconv.ParseUint(args[0], 10, 64) + if err != nil { + printer.Println(err.Error(), color.FgRed) + return + } + scheduledProc, err := proctorDClient.DescribeScheduledProc(jobID) if err != nil { printer.Println(err.Error(), color.FgRed) return } - printer.Println(fmt.Sprintf("%-40s %-100s", "ID", scheduledProc.ID), color.Reset) + printer.Println(fmt.Sprintf("%-40s %-100d", "ID", scheduledProc.ID), color.Reset) printer.Println(fmt.Sprintf("%-40s %-100s", "PROC NAME", scheduledProc.Name), color.Reset) printer.Println(fmt.Sprintf("%-40s %-100s", "GROUP NAME", scheduledProc.Group), color.Reset) printer.Println(fmt.Sprintf("%-40s %-100s", "TAGS", scheduledProc.Tags), color.Reset) - printer.Println(fmt.Sprintf("%-40s %-100s", "Time", scheduledProc.Time), color.Reset) + printer.Println(fmt.Sprintf("%-40s %-100s", "Cron", scheduledProc.Cron), color.Reset) printer.Println(fmt.Sprintf("%-40s %-100s", "Notifier", scheduledProc.NotificationEmails), color.Reset) printer.Println("\nArgs", color.FgMagenta) diff --git a/cmd/schedule/describe/describe_test.go b/internal/app/cli/command/schedule/describe/describe_test.go similarity index 71% rename from cmd/schedule/describe/describe_test.go rename to internal/app/cli/command/schedule/describe/describe_test.go index e9cc7431..9d5b8a92 100644 --- a/cmd/schedule/describe/describe_test.go +++ b/internal/app/cli/command/schedule/describe/describe_test.go @@ -1,19 +1,19 @@ package describe import ( - "proctor/daemon" - "proctor/io" "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" "testing" - "github.com/stretchr/testify/assert" ) type ScheduleCreateCmdTestSuite struct { suite.Suite - mockPrinter *io.MockPrinter - mockProctorDClient *daemon.MockClient - testScheduleDescribeCmd *cobra.Command + mockPrinter *io.MockPrinter + mockProctorDClient *daemon.MockClient + testScheduleDescribeCmd *cobra.Command } func (s *ScheduleCreateCmdTestSuite) SetupTest() { @@ -25,7 +25,7 @@ func (s *ScheduleCreateCmdTestSuite) SetupTest() { func (s *ScheduleCreateCmdTestSuite) TestScheduleCreateCmdHelp() { assert.Equal(s.T(), "Describe scheduled job", s.testScheduleDescribeCmd.Short) assert.Equal(s.T(), "This command helps to describe scheduled job", s.testScheduleDescribeCmd.Long) - assert.Equal(s.T(), "proctor schedule describe D958FCCC-F2B3-49D1-B83A-4E70A2A775A0", s.testScheduleDescribeCmd.Example) + assert.Equal(s.T(), "proctor schedule describe 502376124721", s.testScheduleDescribeCmd.Example) } func TestScheduleCreateCmdTestSuite(t *testing.T) { diff --git a/cmd/schedule/list/list.go b/internal/app/cli/command/schedule/list/list.go similarity index 76% rename from cmd/schedule/list/list.go rename to internal/app/cli/command/schedule/list/list.go index 2aa49ed7..22703b4d 100644 --- a/cmd/schedule/list/list.go +++ b/internal/app/cli/command/schedule/list/list.go @@ -2,10 +2,10 @@ package list import ( "fmt" + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" "github.com/fatih/color" - "proctor/daemon" - "proctor/io" "github.com/spf13/cobra" ) @@ -25,7 +25,7 @@ func NewCmd(printer io.Printer, proctorDClient daemon.Client) *cobra.Command { printer.Println(fmt.Sprintf("%-40s %-30s %-20s %s", "ID", "PROC NAME", "GROUP NAME", "TAGS"), color.FgGreen) for _, scheduledProc := range scheduledProcs { - printer.Println(fmt.Sprintf("%-40s %-30s %-20s %s", scheduledProc.ID, scheduledProc.Name, scheduledProc.Group,scheduledProc.Tags), color.Reset) + printer.Println(fmt.Sprintf("%-40d %-30s %-20s %s", scheduledProc.ID, scheduledProc.Name, scheduledProc.Group, scheduledProc.Tags), color.Reset) } }, } diff --git a/cmd/schedule/list/list_test.go b/internal/app/cli/command/schedule/list/list_test.go similarity index 81% rename from cmd/schedule/list/list_test.go rename to internal/app/cli/command/schedule/list/list_test.go index d63433e7..654656a8 100644 --- a/cmd/schedule/list/list_test.go +++ b/internal/app/cli/command/schedule/list/list_test.go @@ -1,19 +1,19 @@ package list import ( - "proctor/daemon" - "proctor/io" "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" "testing" - "github.com/stretchr/testify/assert" ) type ScheduleCreateCmdTestSuite struct { suite.Suite - mockPrinter *io.MockPrinter - mockProctorDClient *daemon.MockClient - testScheduleListCmd *cobra.Command + mockPrinter *io.MockPrinter + mockProctorDClient *daemon.MockClient + testScheduleListCmd *cobra.Command } func (s *ScheduleCreateCmdTestSuite) SetupTest() { diff --git a/cmd/schedule/remove/remove.go b/internal/app/cli/command/schedule/remove/remove.go similarity index 86% rename from cmd/schedule/remove/remove.go rename to internal/app/cli/command/schedule/remove/remove.go index e46c56a7..f6871035 100644 --- a/cmd/schedule/remove/remove.go +++ b/internal/app/cli/command/schedule/remove/remove.go @@ -3,9 +3,9 @@ package remove import ( "fmt" "github.com/fatih/color" - "proctor/daemon" - "proctor/io" "github.com/spf13/cobra" + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" ) func NewCmd(printer io.Printer, proctorDClient daemon.Client) *cobra.Command { @@ -14,6 +14,7 @@ func NewCmd(printer io.Printer, proctorDClient daemon.Client) *cobra.Command { Short: "Remove scheduled job", Long: "This command helps to remove scheduled job", Example: fmt.Sprintf("proctor schedule remove D958FCCC-F2B3-49D1-B83A-4E70A2A775A0"), + Args: cobra.MinimumNArgs(1), Run: func(cmd *cobra.Command, args []string) { jobID := args[0] diff --git a/cmd/schedule/remove/remove_test.go b/internal/app/cli/command/schedule/remove/remove_test.go similarity index 81% rename from cmd/schedule/remove/remove_test.go rename to internal/app/cli/command/schedule/remove/remove_test.go index 0d4d6bed..e121162d 100644 --- a/cmd/schedule/remove/remove_test.go +++ b/internal/app/cli/command/schedule/remove/remove_test.go @@ -1,19 +1,19 @@ package remove import ( - "proctor/daemon" - "proctor/io" "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" "testing" - "github.com/stretchr/testify/assert" ) type ScheduleCreateCmdTestSuite struct { suite.Suite - mockPrinter *io.MockPrinter - mockProctorDClient *daemon.MockClient - testScheduleRemoveCmd *cobra.Command + mockPrinter *io.MockPrinter + mockProctorDClient *daemon.MockClient + testScheduleRemoveCmd *cobra.Command } func (s *ScheduleCreateCmdTestSuite) SetupTest() { diff --git a/cmd/schedule/schedule.go b/internal/app/cli/command/schedule/schedule.go similarity index 52% rename from cmd/schedule/schedule.go rename to internal/app/cli/command/schedule/schedule.go index 232fc4cc..cf13fc73 100644 --- a/cmd/schedule/schedule.go +++ b/internal/app/cli/command/schedule/schedule.go @@ -3,41 +3,41 @@ package schedule import ( "fmt" "github.com/fatih/color" - "proctor/daemon" - "proctor/io" "github.com/spf13/cobra" + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" "strings" ) -func NewCmd(printer io.Printer,proctorDClient daemon.Client) *cobra.Command { - return &cobra.Command{ +func NewCmd(printer io.Printer, proctorDClient daemon.Client) *cobra.Command { + scheduleCmd := &cobra.Command{ Use: "schedule", Short: "Create scheduled jobs", Long: "This command helps to create scheduled jobs", - Example: fmt.Sprintf("proctor schedule run-sample -g my-group -t '0 2 * * *' -n 'username@mail.com' -T 'sample,proctor' ARG_ONE1=foobar"), - Args: cobra.MinimumNArgs(1), + Example: fmt.Sprintf("proctor schedule run-sample -g my-group -c '0 2 * * *' -n 'username@mail.com' -T 'sample,proctor' ARG_ONE1=foobar"), + Args: cobra.MinimumNArgs(1), Run: func(cmd *cobra.Command, args []string) { procName := args[0] printer.Println(fmt.Sprintf("%-40s %-100s", "Creating Scheduled Job", procName), color.Reset) - time, err := cmd.Flags().GetString("time") + cron, err := cmd.Flags().GetString("cron") if err != nil { - printer.Println(err.Error(),color.FgRed) + printer.Println(err.Error(), color.FgRed) } notificationEmails, err := cmd.Flags().GetString("notify") if err != nil { - printer.Println(err.Error(),color.FgRed) + printer.Println(err.Error(), color.FgRed) } tags, err := cmd.Flags().GetString("tags") if err != nil { - printer.Println(err.Error(),color.FgRed) + printer.Println(err.Error(), color.FgRed) } group, err := cmd.Flags().GetString("group") if err != nil { - printer.Println(err.Error(),color.FgRed) + printer.Println(err.Error(), color.FgRed) } jobArgs := make(map[string]string) @@ -60,14 +60,26 @@ func NewCmd(printer io.Printer,proctorDClient daemon.Client) *cobra.Command { printer.Println("With No Variables", color.FgRed) } - scheduledJobID, err := proctorDClient.ScheduleJob(procName, tags, time, notificationEmails, group, jobArgs) + scheduledJobID, err := proctorDClient.ScheduleJob(procName, tags, cron, notificationEmails, group, jobArgs) if err != nil { printer.Println(err.Error(), color.FgRed) print() return } - printer.Println(fmt.Sprintf("Scheduled Job UUID : %s", scheduledJobID), color.FgGreen) + printer.Println(fmt.Sprintf("Scheduled Job UUID : %d", scheduledJobID), color.FgGreen) }, } -} + var Cron, NotifyEmails, Tags, Group string + + scheduleCmd.PersistentFlags().StringVarP(&Cron, "cron", "c", "", "Schedule cron") + _ = scheduleCmd.MarkFlagRequired("cron") + scheduleCmd.PersistentFlags().StringVarP(&Group, "group", "g", "", "Group Name") + _ = scheduleCmd.MarkFlagRequired("group") + scheduleCmd.PersistentFlags().StringVarP(&NotifyEmails, "notify", "n", "", "Notifier Email ID's") + _ = scheduleCmd.MarkFlagRequired("notify") + scheduleCmd.PersistentFlags().StringVarP(&Tags, "tags", "T", "", "Tags") + _ = scheduleCmd.MarkFlagRequired("tags") + + return scheduleCmd +} diff --git a/cmd/schedule/schedule_test.go b/internal/app/cli/command/schedule/schedule_test.go similarity index 76% rename from cmd/schedule/schedule_test.go rename to internal/app/cli/command/schedule/schedule_test.go index 48331d0a..1df34018 100644 --- a/cmd/schedule/schedule_test.go +++ b/internal/app/cli/command/schedule/schedule_test.go @@ -1,19 +1,19 @@ package schedule import ( - "proctor/daemon" - "proctor/io" "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" "testing" - "github.com/stretchr/testify/assert" ) type ScheduleCreateCmdTestSuite struct { suite.Suite - mockPrinter *io.MockPrinter - mockProctorDClient *daemon.MockClient - testScheduleCreateCmd *cobra.Command + mockPrinter *io.MockPrinter + mockProctorDClient *daemon.MockClient + testScheduleCreateCmd *cobra.Command } func (s *ScheduleCreateCmdTestSuite) SetupTest() { @@ -25,7 +25,7 @@ func (s *ScheduleCreateCmdTestSuite) SetupTest() { func (s *ScheduleCreateCmdTestSuite) TestScheduleCreateCmdHelp() { assert.Equal(s.T(), "Create scheduled jobs", s.testScheduleCreateCmd.Short) assert.Equal(s.T(), "This command helps to create scheduled jobs", s.testScheduleCreateCmd.Long) - assert.Equal(s.T(), "proctor schedule run-sample -g my-group -t '0 2 * * *' -n 'username@mail.com' -T 'sample,proctor' ARG_ONE1=foobar", s.testScheduleCreateCmd.Example) + assert.Equal(s.T(), "proctor schedule run-sample -g my-group -c '0 2 * * *' -n 'username@mail.com' -T 'sample,proctor' ARG_ONE1=foobar", s.testScheduleCreateCmd.Example) } func TestScheduleCreateCmdTestSuite(t *testing.T) { diff --git a/internal/app/cli/command/status/status.go b/internal/app/cli/command/status/status.go new file mode 100644 index 00000000..7cf5d999 --- /dev/null +++ b/internal/app/cli/command/status/status.go @@ -0,0 +1,45 @@ +package status + +import ( + "fmt" + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" + "strconv" + + "github.com/fatih/color" + "github.com/spf13/cobra" +) + +func NewCmd(printer io.Printer, proctorDClient daemon.Client, osExitFunc func(int)) *cobra.Command { + return &cobra.Command{ + Use: "status", + Short: "Get status of an execution context", + Long: "To get status of an execution context, this command retrieve status from previous execution", + Example: "proctor status 123", + Args: cobra.MinimumNArgs(1), + + Run: func(cmd *cobra.Command, args []string) { + executionIDParam := args[0] + executionID, err := strconv.ParseUint(executionIDParam, 10, 64) + if executionIDParam == "" || err != nil { + printer.Println("No valid execution context id provided as argument", color.FgRed) + return + } + + printer.Println("Getting status", color.FgGreen) + printer.Println(fmt.Sprintf("%-40s %-100v", "ID", executionID), color.FgGreen) + + executionContextStatus, err := proctorDClient.GetExecutionContextStatus(executionID) + if err != nil { + printer.Println(fmt.Sprintf("%-40s %-100v", "Error while Getting Status:", err.Error()), color.FgRed) + osExitFunc(1) + return + } + + printer.Println(fmt.Sprintf("%-40s %-100v", "Job Name", executionContextStatus.JobName), color.FgGreen) + printer.Println(fmt.Sprintf("%-40s %-100v", "Status", executionContextStatus.Status), color.FgGreen) + printer.Println(fmt.Sprintf("%-40s %-100v", "Updated At", executionContextStatus.UpdatedAt), color.FgGreen) + printer.Println("Execution completed.", color.FgGreen) + }, + } +} diff --git a/internal/app/cli/command/status/status_test.go b/internal/app/cli/command/status/status_test.go new file mode 100644 index 00000000..e2692108 --- /dev/null +++ b/internal/app/cli/command/status/status_test.go @@ -0,0 +1,101 @@ +package status + +import ( + "errors" + "fmt" + "testing" + + "github.com/fatih/color" + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" + + "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" + "proctor/internal/pkg/constant" + modelExecution "proctor/internal/pkg/model/execution" +) + +type StatusCmdTestSuite struct { + suite.Suite + mockPrinter *io.MockPrinter + mockProctorDClient *daemon.MockClient + testStatusCmd *cobra.Command +} + +func (s *StatusCmdTestSuite) SetupTest() { + s.mockPrinter = &io.MockPrinter{} + s.mockProctorDClient = &daemon.MockClient{} + s.testStatusCmd = NewCmd(s.mockPrinter, s.mockProctorDClient, func(exitCode int) {}) +} + +func (s *StatusCmdTestSuite) TestStatusCmdUsage() { + assert.Equal(s.T(), "status", s.testStatusCmd.Use) +} + +func (s *StatusCmdTestSuite) TestStatusCmdHelp() { + assert.Equal(s.T(), "Get status of an execution context", s.testStatusCmd.Short) + assert.Equal(s.T(), "To get status of an execution context, this command retrieve status from previous execution", s.testStatusCmd.Long) + assert.Equal(s.T(), "proctor status 123", s.testStatusCmd.Example) +} + +func (s *StatusCmdTestSuite) TestStatusCmd() { + executionID := uint64(42) + + s.mockPrinter.On("Println", "Getting status", color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "ID", executionID), color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "Job Name", "foo"), color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "Status", constant.JobSucceeded), color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "Updated At", ""), color.FgGreen).Once() + + executionResult := &modelExecution.ExecutionResult{ + ExecutionId: uint64(0), + JobName: "foo", + ExecutionName: "", + ImageTag: "", + CreatedAt: "", + UpdatedAt: "", + Status: constant.JobSucceeded, + } + s.mockProctorDClient.On("GetExecutionContextStatus", executionID).Return(executionResult, nil).Once() + s.mockPrinter.On("Println", "Execution completed.", color.FgGreen).Once() + + s.testStatusCmd.Run(&cobra.Command{}, []string{"42"}) + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) +} + +func (s *StatusCmdTestSuite) TestStatusCmdInvalidExecutionIDError() { + t := s.T() + + s.mockPrinter.On("Println", "No valid execution context id provided as argument", color.FgRed).Once() + + s.testStatusCmd.Run(&cobra.Command{}, []string{"foo"}) + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) + s.mockPrinter.AssertNotCalled(t, "Println", "Execution completed.", color.FgGreen) +} + +func (s *StatusCmdTestSuite) TestStatusCmdGetExecutionStatusError() { + t := s.T() + + executionID := uint64(42) + + s.mockPrinter.On("Println", "Getting status", color.FgGreen).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "ID", executionID), color.FgGreen).Once() + + s.mockProctorDClient.On("GetExecutionContextStatus", executionID).Return(&modelExecution.ExecutionResult{}, errors.New("test")).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100v", "Error while Getting Status:", "test"), color.FgRed).Once() + s.testStatusCmd.Run(&cobra.Command{}, []string{"42"}) + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) + s.mockPrinter.AssertNotCalled(t, "Println", "Execution completed.", color.FgGreen) + s.mockPrinter.AssertNotCalled(t, "Println", fmt.Sprintf("%-40s %-100v", "Status", constant.JobSucceeded), color.FgGreen) +} + +func TestStatusCmdTestSuite(t *testing.T) { + suite.Run(t, new(StatusCmdTestSuite)) +} diff --git a/internal/app/cli/command/template/template.go b/internal/app/cli/command/template/template.go new file mode 100644 index 00000000..80113753 --- /dev/null +++ b/internal/app/cli/command/template/template.go @@ -0,0 +1,63 @@ +package template + +import ( + "fmt" + + "github.com/fatih/color" + "github.com/spf13/cobra" + + "proctor/internal/app/cli/daemon" + utilFile "proctor/internal/app/cli/utility/file" + utilIO "proctor/internal/app/cli/utility/io" + modelMetadata "proctor/internal/pkg/model/metadata" +) + +func NewCmd(printer utilIO.Printer, proctorDClient daemon.Client) *cobra.Command { + return &cobra.Command{ + Use: "template", + Short: "Get input template of a procs", + Long: "To get input template of a procs, this command retrieve an example template derived from stored metadata", + Example: "proctor template say-hello-world say-hello-world.yaml", + Args: cobra.MinimumNArgs(2), + + Run: func(cmd *cobra.Command, args []string) { + if len(args) < 2 { + printer.Println("Incorrect command. See `proctor template --help` for usage", color.FgRed) + return + } + + userProvidedProcName := args[0] + filename := args[1] + + procList, err := proctorDClient.ListProcs() + if err != nil { + printer.Println(err.Error(), color.FgRed) + return + } + + desiredProc := modelMetadata.Metadata{} + for _, proc := range procList { + if userProvidedProcName == proc.Name { + desiredProc = proc + } + } + if len(desiredProc.Name) == 0 { + printer.Println(fmt.Sprintf("Proctor doesn't support Proc `%s`\nRun `proctor list` to view supported Procs", userProvidedProcName), color.FgRed) + return + } + + printer.Println("\nArgs", color.FgMagenta) + for _, arg := range desiredProc.EnvVars.Args { + printer.Println(fmt.Sprintf("%-40s %-100s", arg.Name, arg.Description), color.Reset) + } + + err = utilFile.WriteYAML(filename, desiredProc.EnvVars.Args) + if err != nil { + printer.Println(fmt.Sprintf("Error writing template file: %s", err.Error()), color.FgRed) + return + } + + printer.Println(fmt.Sprintf("\nTo %s, run:\nproctor execute %s -f %s ARG_ONE=foo ARG_TWO=bar", userProvidedProcName, userProvidedProcName, filename), color.FgGreen) + }, + } +} diff --git a/internal/app/cli/command/template/template_test.go b/internal/app/cli/command/template/template_test.go new file mode 100644 index 00000000..ea6e1dbb --- /dev/null +++ b/internal/app/cli/command/template/template_test.go @@ -0,0 +1,127 @@ +package template + +import ( + "errors" + "fmt" + "io/ioutil" + "os" + "testing" + + "github.com/fatih/color" + "github.com/spf13/cobra" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" + + daemon2 "proctor/internal/app/cli/daemon" + "proctor/internal/app/cli/utility/io" + procMetadata "proctor/internal/pkg/model/metadata" + "proctor/internal/pkg/model/metadata/env" +) + +type TemplateCmdTestSuite struct { + suite.Suite + mockPrinter *io.MockPrinter + mockProctorDClient *daemon2.MockClient + testTemplateCmd *cobra.Command +} + +func (s *TemplateCmdTestSuite) SetupTest() { + s.mockPrinter = &io.MockPrinter{} + s.mockProctorDClient = &daemon2.MockClient{} + s.testTemplateCmd = NewCmd(s.mockPrinter, s.mockProctorDClient) +} + +func (s *TemplateCmdTestSuite) TestTemplateCmdUsage() { + assert.Equal(s.T(), "template", s.testTemplateCmd.Use) +} + +func (s *TemplateCmdTestSuite) TestTemplateCmdHelp() { + assert.Equal(s.T(), "Get input template of a procs", s.testTemplateCmd.Short) + assert.Equal(s.T(), "To get input template of a procs, this command retrieve an example template derived from stored metadata", s.testTemplateCmd.Long) + assert.Equal(s.T(), "proctor template say-hello-world say-hello-world.yaml", s.testTemplateCmd.Example) +} + +func (s *TemplateCmdTestSuite) TestTemplateCmdRun() { + t := s.T() + + filename := "/tmp/yaml-test-template" + defer os.Remove(filename) + + arg := env.VarMetadata{ + Name: "arg-one", + Description: "arg one description", + } + + secret := env.VarMetadata{ + Name: "secret-one", + Description: "secret one description", + } + + anyProc := procMetadata.Metadata{ + Name: "do-something", + Description: "does something", + Contributors: "user@example.com", + Organization: "org", + AuthorizedGroups: []string{"group_one", "group_two"}, + EnvVars: env.Vars{ + Args: []env.VarMetadata{arg}, + Secrets: []env.VarMetadata{secret}, + }, + } + procList := []procMetadata.Metadata{anyProc} + + s.mockProctorDClient.On("ListProcs").Return(procList, nil).Once() + + s.mockPrinter.On("Println", "\nArgs", color.FgMagenta).Once() + s.mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", arg.Name, arg.Description), color.Reset).Once() + s.mockPrinter.On("Println", fmt.Sprintf("\nTo %s, run:\nproctor execute %s -f %s ARG_ONE=foo ARG_TWO=bar", anyProc.Name, anyProc.Name, filename), color.FgGreen).Once() + + s.testTemplateCmd.Run(&cobra.Command{}, []string{anyProc.Name, filename}) + + templateFile, err := os.Open(filename) + assert.NoError(t, err) + defer templateFile.Close() + + templateBuffer, err := ioutil.ReadAll(templateFile) + assert.Equal(t, templateBuffer, []byte("# arg one description\narg-one:\n")) + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) +} + +func (s *TemplateCmdTestSuite) TestTemplateCmdForIncorrectUsage() { + s.mockPrinter.On("Println", "Incorrect command. See `proctor template --help` for usage", color.FgRed).Once() + + s.testTemplateCmd.Run(&cobra.Command{}, []string{}) + + s.mockPrinter.AssertExpectations(s.T()) +} + +func (s *TemplateCmdTestSuite) TestTemplateCmdRunProctorDClientFailure() { + filename := "/tmp/yaml-test-template" + + s.mockProctorDClient.On("ListProcs").Return([]procMetadata.Metadata{}, errors.New("test error")).Once() + s.mockPrinter.On("Println", "test error", color.FgRed).Once() + + s.testTemplateCmd.Run(&cobra.Command{}, []string{"do-something", filename}) + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) +} + +func (s *TemplateCmdTestSuite) TestTemplateCmdRunProcNotSupported() { + filename := "/tmp/yaml-test-template" + + s.mockProctorDClient.On("ListProcs").Return([]procMetadata.Metadata{}, nil).Once() + testProcName := "do-something" + s.mockPrinter.On("Println", fmt.Sprintf("Proctor doesn't support Proc `%s`\nRun `proctor list` to view supported Procs", testProcName), color.FgRed).Once() + + s.testTemplateCmd.Run(&cobra.Command{}, []string{testProcName, filename}) + + s.mockProctorDClient.AssertExpectations(s.T()) + s.mockPrinter.AssertExpectations(s.T()) +} + +func TestTemplateCmdTestSuite(t *testing.T) { + suite.Run(t, new(TemplateCmdTestSuite)) +} diff --git a/cmd/version/github/client.go b/internal/app/cli/command/version/github/client.go similarity index 83% rename from cmd/version/github/client.go rename to internal/app/cli/command/version/github/client.go index 20ee8d51..10b0cad1 100644 --- a/cmd/version/github/client.go +++ b/internal/app/cli/command/version/github/client.go @@ -19,5 +19,11 @@ func NewClient() *client { func (gc *client) LatestRelease(owner, repository string) (string, error) { release, _, err := gc.client.Repositories.GetLatestRelease(context.Background(), owner, repository) - return *release.TagName, err + releaseTag := "" + + if err == nil { + releaseTag = *release.TagName + } + + return releaseTag, err } diff --git a/cmd/version/github/client_mock.go b/internal/app/cli/command/version/github/client_mock.go similarity index 100% rename from cmd/version/github/client_mock.go rename to internal/app/cli/command/version/github/client_mock.go diff --git a/cmd/version/version.go b/internal/app/cli/command/version/version.go similarity index 81% rename from cmd/version/version.go rename to internal/app/cli/command/version/version.go index e483dd7b..cf4f5163 100644 --- a/cmd/version/version.go +++ b/internal/app/cli/command/version/version.go @@ -2,14 +2,14 @@ package version import ( "fmt" + "proctor/internal/app/cli/command/version/github" + "proctor/internal/app/cli/utility/io" "github.com/fatih/color" - "proctor/cmd/version/github" - "proctor/io" "github.com/spf13/cobra" ) -const ClientVersion = "v0.6.0" +const ClientVersion = "v2.0.8" func NewCmd(printer io.Printer, fetcher github.LatestReleaseFetcher) *cobra.Command { return &cobra.Command{ @@ -18,7 +18,7 @@ func NewCmd(printer io.Printer, fetcher github.LatestReleaseFetcher) *cobra.Comm Long: `Example: proctor version`, Run: func(cmd *cobra.Command, args []string) { printer.Println(fmt.Sprintf("Proctor: A Developer Friendly Automation Orchestrator %s", ClientVersion), color.Reset) - release, e := fetcher.LatestRelease("gojektech", "proctor") + release, e := fetcher.LatestRelease("gopaytech", "proctor") if e == nil && release != ClientVersion { printer.Println(fmt.Sprintf("Your version of Proctor client is out of date! The latest version is %s You can update by either running brew upgrade proctor or downloading a release for your OS here: https://proctor/releases", release), color.Reset) } diff --git a/cmd/version/version_test.go b/internal/app/cli/command/version/version_test.go similarity index 80% rename from cmd/version/version_test.go rename to internal/app/cli/command/version/version_test.go index 70e2cf03..997fabb7 100644 --- a/cmd/version/version_test.go +++ b/internal/app/cli/command/version/version_test.go @@ -2,17 +2,17 @@ package version import ( "fmt" + "proctor/internal/app/cli/command/version/github" + "proctor/internal/app/cli/utility/io" "testing" "github.com/fatih/color" - gh "proctor/cmd/version/github" - "proctor/io" "github.com/spf13/cobra" "github.com/stretchr/testify/assert" ) func TestVersionCmdUsage(t *testing.T) { - githubClient := &gh.MockClient{} + githubClient := &github.MockClient{} versionCmd := NewCmd(&io.MockPrinter{}, githubClient) assert.Equal(t, "version", versionCmd.Use) assert.Equal(t, "Print version of Proctor command-line tool", versionCmd.Short) @@ -21,12 +21,12 @@ func TestVersionCmdUsage(t *testing.T) { func TestLatestVersionCmd(t *testing.T) { mockPrinter := &io.MockPrinter{} - githubClient := &gh.MockClient{} + githubClient := &github.MockClient{} versionCmd := NewCmd(mockPrinter, githubClient) - version := "v0.6.0" + version := "v2.0.8" mockPrinter.On("Println", fmt.Sprintf("Proctor: A Developer Friendly Automation Orchestrator %s", ClientVersion), color.Reset).Once() - githubClient.On("LatestRelease", "gojektech", "proctor").Return(version, nil) + githubClient.On("LatestRelease", "gopaytech", "proctor").Return(version, nil) versionCmd.Run(&cobra.Command{}, []string{}) @@ -35,7 +35,7 @@ func TestLatestVersionCmd(t *testing.T) { func TestOldVersionCmd(t *testing.T) { mockPrinter := &io.MockPrinter{} - githubClient := &gh.MockClient{} + githubClient := &github.MockClient{} version := "v1000.0.0" versionCmd := NewCmd(mockPrinter, githubClient) @@ -43,7 +43,7 @@ func TestOldVersionCmd(t *testing.T) { mockPrinter.On("Println", fmt.Sprintf("Your version of Proctor client is out of date!"+ " The latest version is %s You can update by either running brew upgrade proctor or downloading a release for your OS here:"+ " https://proctor/releases", version), color.Reset).Once() - githubClient.On("LatestRelease", "gojektech", "proctor").Return(version, nil) + githubClient.On("LatestRelease", "gopaytech", "proctor").Return(version, nil) versionCmd.Run(&cobra.Command{}, []string{}) diff --git a/config/config.go b/internal/app/cli/config/config.go similarity index 81% rename from config/config.go rename to internal/app/cli/config/config.go index 78fd43cf..fc1f91d9 100644 --- a/config/config.go +++ b/internal/app/cli/config/config.go @@ -5,8 +5,8 @@ import ( "os" "time" - "proctor/proctord/utility" "github.com/pkg/errors" + "proctor/internal/pkg/constant" "github.com/spf13/viper" ) @@ -73,20 +73,29 @@ func (loader *loader) Load() (ProctorConfig, ConfigError) { proctorHost := viper.GetString(ProctorHost) if proctorHost == "" { - return ProctorConfig{}, ConfigError{error: errors.New("Mandatory Config Missing"), Message: utility.ConfigProctorHostMissingError} + return ProctorConfig{}, ConfigError{error: errors.New("Mandatory Config Missing"), Message: constant.ConfigProctorHostMissingError} } emailId := viper.GetString(EmailId) accessToken := viper.GetString(AccessToken) connectionTimeout := time.Duration(viper.GetInt(ConnectionTimeoutSecs)) * time.Second procExecutionStatusPollCount := viper.GetInt(ProcExecutionStatusPollCount) - return ProctorConfig{Host: proctorHost, Email: emailId, AccessToken: accessToken, ConnectionTimeoutSecs: connectionTimeout, ProcExecutionStatusPollCount: procExecutionStatusPollCount}, ConfigError{} + return ProctorConfig{ + Host: proctorHost, + Email: emailId, + AccessToken: accessToken, + ConnectionTimeoutSecs: connectionTimeout, + ProcExecutionStatusPollCount: procExecutionStatusPollCount, + }, ConfigError{} } // Returns Config file directory // This allows to test on dev environment without conflicting with installed proctor config file func ConfigFileDir() string { - if os.Getenv(Environment) == "test" { + localConfigDir, localConfigAvailable := os.LookupEnv("LOCAL_CONFIG_DIR") + if localConfigAvailable { + return localConfigDir + } else if os.Getenv(Environment) == "test" { return "/tmp" } else { return fmt.Sprintf("%s/.proctor", os.Getenv("HOME")) diff --git a/config/config_mock.go b/internal/app/cli/config/config_mock.go similarity index 100% rename from config/config_mock.go rename to internal/app/cli/config/config_mock.go diff --git a/config/config_test.go b/internal/app/cli/config/config_test.go similarity index 100% rename from config/config_test.go rename to internal/app/cli/config/config_test.go diff --git a/config/data.go b/internal/app/cli/config/data.go similarity index 97% rename from config/data.go rename to internal/app/cli/config/data.go index 7f433388..ba202b8b 100644 --- a/config/data.go +++ b/internal/app/cli/config/data.go @@ -184,8 +184,8 @@ type bintree struct { } var _bintree = &bintree{nil, map[string]*bintree{ - "data": &bintree{nil, map[string]*bintree{ - "config_template.yaml": &bintree{dataConfig_templateYaml, map[string]*bintree{}}, + "data": {nil, map[string]*bintree{ + "config_template.yaml": {dataConfig_templateYaml, map[string]*bintree{}}, }}, }} diff --git a/data/config_template.yaml b/internal/app/cli/config_template.yaml similarity index 100% rename from data/config_template.yaml rename to internal/app/cli/config_template.yaml diff --git a/internal/app/cli/daemon/client.go b/internal/app/cli/daemon/client.go new file mode 100644 index 00000000..b22bef70 --- /dev/null +++ b/internal/app/cli/daemon/client.go @@ -0,0 +1,451 @@ +package daemon + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + ioReader "io" + "io/ioutil" + "net" + "net/http" + "net/url" + "os" + "os/signal" + "time" + + "github.com/briandowns/spinner" + "github.com/fatih/color" + "github.com/gorilla/websocket" + + "proctor/internal/app/cli/command/version" + "proctor/internal/app/cli/config" + "proctor/internal/app/cli/utility/io" + "proctor/internal/pkg/constant" + modelExecution "proctor/internal/pkg/model/execution" + modelMetadata "proctor/internal/pkg/model/metadata" + modelSchedule "proctor/internal/pkg/model/schedule" +) + +const ( + ExecutionRoute string = "/execution" + ExecutionLogsRoute string = "/execution/logs" + MetadataRoute string = "/metadata" + ScheduleRoute string = "/schedule" +) + +type Client interface { + ListProcs() ([]modelMetadata.Metadata, error) + ExecuteProc(string, map[string]string) (*modelExecution.ExecutionResult, error) + StreamProcLogs(executionId uint64) error + GetExecutionContextStatusWithPolling(executionId uint64) (*modelExecution.ExecutionResult, error) + GetExecutionContextStatus(executionId uint64) (*modelExecution.ExecutionResult, error) + ScheduleJob(string, string, string, string, string, map[string]string) (uint64, error) + ListScheduledProcs() ([]modelSchedule.ScheduledJob, error) + DescribeScheduledProc(uint64) (modelSchedule.ScheduledJob, error) + RemoveScheduledProc(string) error +} + +type client struct { + printer io.Printer + proctorConfigLoader config.Loader + proctordHost string + emailId string + accessToken string + clientVersion string + connectionTimeoutSecs time.Duration + procExecutionStatusPollCount int +} + +type ProcToExecute struct { + Name string `json:"name"` + Args map[string]string `json:"args"` +} + +func NewClient(printer io.Printer, proctorConfigLoader config.Loader) Client { + return &client{ + clientVersion: version.ClientVersion, + printer: printer, + proctorConfigLoader: proctorConfigLoader, + } +} + +func (c *client) ScheduleJob(name, tags, cron, notificationEmails, group string, jobArgs map[string]string) (uint64, error) { + err := c.loadProctorConfig() + if err != nil { + return 0, err + } + jobPayload := modelSchedule.ScheduledJob{ + Name: name, + Tags: tags, + Cron: cron, + NotificationEmails: notificationEmails, + Args: jobArgs, + Group: group, + Enabled: true, + } + + requestBody, err := json.Marshal(jobPayload) + if err != nil { + return 0, err + } + + client := &http.Client{} + req, err := http.NewRequest("POST", "http://"+c.proctordHost+ScheduleRoute, bytes.NewReader(requestBody)) + req.Header.Add("Content-Type", "application/json") + req.Header.Add(constant.UserEmailHeaderKey, c.emailId) + req.Header.Add(constant.AccessTokenHeaderKey, c.accessToken) + req.Header.Add(constant.ClientVersionHeaderKey, c.clientVersion) + resp, err := client.Do(req) + + if err != nil { + return 0, buildNetworkError(err) + } + + defer resp.Body.Close() + if resp.StatusCode != http.StatusCreated { + return 0, buildHTTPError(c, resp) + } + + var scheduledJob modelSchedule.ScheduledJob + err = json.NewDecoder(resp.Body).Decode(&scheduledJob) + + return scheduledJob.ID, err +} + +func (c *client) loadProctorConfig() error { + proctorConfig, err := c.proctorConfigLoader.Load() + if err != (config.ConfigError{}) { + c.printer.Println(err.RootError().Error(), color.FgRed) + c.printer.Println(err.Message, color.FgGreen) + return errors.New("Encountered error while loading config, exiting.") + } + + c.proctordHost = proctorConfig.Host + c.emailId = proctorConfig.Email + c.accessToken = proctorConfig.AccessToken + c.connectionTimeoutSecs = proctorConfig.ConnectionTimeoutSecs + c.procExecutionStatusPollCount = proctorConfig.ProcExecutionStatusPollCount + + return nil +} + +func (c *client) ListProcs() ([]modelMetadata.Metadata, error) { + err := c.loadProctorConfig() + if err != nil { + return []modelMetadata.Metadata{}, err + } + + client := &http.Client{ + Timeout: c.connectionTimeoutSecs, + } + req, err := http.NewRequest("GET", "http://"+c.proctordHost+MetadataRoute, nil) + req.Header.Add(constant.UserEmailHeaderKey, c.emailId) + req.Header.Add(constant.AccessTokenHeaderKey, c.accessToken) + req.Header.Add(constant.ClientVersionHeaderKey, c.clientVersion) + + resp, err := client.Do(req) + if err != nil { + return []modelMetadata.Metadata{}, buildNetworkError(err) + } + + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + return []modelMetadata.Metadata{}, buildHTTPError(c, resp) + } + + var procList []modelMetadata.Metadata + err = json.NewDecoder(resp.Body).Decode(&procList) + return procList, err +} + +func (c *client) ListScheduledProcs() ([]modelSchedule.ScheduledJob, error) { + err := c.loadProctorConfig() + if err != nil { + return []modelSchedule.ScheduledJob{}, err + } + + client := &http.Client{ + Timeout: c.connectionTimeoutSecs, + } + req, err := http.NewRequest("GET", "http://"+c.proctordHost+ScheduleRoute, nil) + req.Header.Add(constant.UserEmailHeaderKey, c.emailId) + req.Header.Add(constant.AccessTokenHeaderKey, c.accessToken) + req.Header.Add(constant.ClientVersionHeaderKey, c.clientVersion) + + resp, err := client.Do(req) + if err != nil { + return []modelSchedule.ScheduledJob{}, buildNetworkError(err) + } + + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + return []modelSchedule.ScheduledJob{}, buildHTTPError(c, resp) + } + + var scheduledProcsList []modelSchedule.ScheduledJob + err = json.NewDecoder(resp.Body).Decode(&scheduledProcsList) + return scheduledProcsList, err +} + +func (c *client) DescribeScheduledProc(jobID uint64) (modelSchedule.ScheduledJob, error) { + err := c.loadProctorConfig() + if err != nil { + return modelSchedule.ScheduledJob{}, err + } + + client := &http.Client{ + Timeout: c.connectionTimeoutSecs, + } + url := fmt.Sprintf("http://"+c.proctordHost+ScheduleRoute+"/%d", jobID) + req, err := http.NewRequest("GET", url, nil) + req.Header.Add(constant.UserEmailHeaderKey, c.emailId) + req.Header.Add(constant.AccessTokenHeaderKey, c.accessToken) + req.Header.Add(constant.ClientVersionHeaderKey, c.clientVersion) + + resp, err := client.Do(req) + if err != nil { + return modelSchedule.ScheduledJob{}, buildNetworkError(err) + } + + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + return modelSchedule.ScheduledJob{}, buildHTTPError(c, resp) + } + + var scheduledProc modelSchedule.ScheduledJob + err = json.NewDecoder(resp.Body).Decode(&scheduledProc) + return scheduledProc, err +} + +func (c *client) RemoveScheduledProc(jobID string) error { + err := c.loadProctorConfig() + if err != nil { + return err + } + + client := &http.Client{ + Timeout: c.connectionTimeoutSecs, + } + url := fmt.Sprintf("http://"+c.proctordHost+ScheduleRoute+"/%s", jobID) + req, err := http.NewRequest("DELETE", url, nil) + req.Header.Add(constant.UserEmailHeaderKey, c.emailId) + req.Header.Add(constant.AccessTokenHeaderKey, c.accessToken) + req.Header.Add(constant.ClientVersionHeaderKey, c.clientVersion) + + resp, err := client.Do(req) + if err != nil { + return buildNetworkError(err) + } + + defer resp.Body.Close() + if resp.StatusCode != http.StatusOK { + return buildHTTPError(c, resp) + } + + return nil +} + +func (c *client) ExecuteProc(name string, args map[string]string) (*modelExecution.ExecutionResult, error) { + err := c.loadProctorConfig() + if err != nil { + return nil, err + } + + procToExecute := ProcToExecute{ + Name: name, + Args: args, + } + + requestBody, err := json.Marshal(procToExecute) + if err != nil { + return nil, err + } + + client := &http.Client{} + req, err := http.NewRequest("POST", "http://"+c.proctordHost+ExecutionRoute, bytes.NewReader(requestBody)) + req.Header.Add("Content-Type", "application/json") + req.Header.Add(constant.UserEmailHeaderKey, c.emailId) + req.Header.Add(constant.AccessTokenHeaderKey, c.accessToken) + req.Header.Add(constant.ClientVersionHeaderKey, c.clientVersion) + resp, err := client.Do(req) + if err != nil { + return nil, buildNetworkError(err) + } + + defer resp.Body.Close() + if resp.StatusCode != http.StatusCreated { + return nil, buildHTTPError(c, resp) + } + + var executionResult modelExecution.ExecutionResult + err = json.NewDecoder(resp.Body).Decode(&executionResult) + + return &executionResult, err +} + +func (c *client) StreamProcLogs(executionId uint64) error { + err := c.loadProctorConfig() + if err != nil { + return err + } + + animation := spinner.New(spinner.CharSets[9], 100*time.Millisecond) + animation.Color("green") + animation.Start() + + interrupt := make(chan os.Signal, 1) + signal.Notify(interrupt, os.Interrupt) + + proctodWebsocketURL := url.URL{Scheme: "ws", Host: c.proctordHost, Path: ExecutionLogsRoute} + proctodWebsocketURLWithProcName := fmt.Sprintf("%s?context_id=%v", proctodWebsocketURL.String(), executionId) + + headers := make(map[string][]string) + token := []string{c.accessToken} + emailId := []string{c.emailId} + clientVersion := []string{c.clientVersion} + headers[constant.AccessTokenHeaderKey] = token + headers[constant.UserEmailHeaderKey] = emailId + headers[constant.ClientVersionHeaderKey] = clientVersion + + wsConn, response, err := websocket.DefaultDialer.Dial(proctodWebsocketURLWithProcName, headers) + if err != nil { + animation.Stop() + if response.StatusCode == http.StatusUnauthorized { + if c.emailId == "" || c.accessToken == "" { + return fmt.Errorf("%s\n%s", constant.UnauthorizedErrorHeader, constant.UnauthorizedErrorMissingConfig) + } + return fmt.Errorf("%s\n%s", constant.UnauthorizedErrorHeader, constant.UnauthorizedErrorInvalidConfig) + } + return err + } + defer wsConn.Close() + + logStreaming := make(chan int) + go func() { + for { + _, message, err := wsConn.ReadMessage() + animation.Stop() + if err != nil { + fmt.Println() + logStreaming <- 0 + return + } + fmt.Println(string(message)) + } + }() + + for { + select { + case <-interrupt: + color.New(color.FgRed).Println("User interrupt while streaming proc logs") + err := wsConn.WriteMessage(websocket.CloseMessage, websocket.FormatCloseMessage(websocket.CloseNormalClosure, "")) + return err + case <-logStreaming: + return nil + } + } +} + +func (c *client) GetExecutionContextStatusWithPolling(executionId uint64) (*modelExecution.ExecutionResult, error) { + err := c.loadProctorConfig() + if err != nil { + return nil, err + } + + for count := 0; count < c.procExecutionStatusPollCount; count += 1 { + executionContextStatus, err := c.GetExecutionContextStatus(executionId) + if err != nil { + return nil, err + } + if executionContextStatus.Status == constant.JobSucceeded || executionContextStatus.Status == constant.JobFailed { + return executionContextStatus, nil + } + + time.Sleep(time.Duration(count) * 100 * time.Millisecond) + } + return nil, errors.New(fmt.Sprintf("No definitive status received for execution with id %v from proctord", executionId)) +} + +func (c *client) GetExecutionContextStatus(executionId uint64) (*modelExecution.ExecutionResult, error) { + err := c.loadProctorConfig() + if err != nil { + return nil, err + } + + httpClient := &http.Client{ + Timeout: c.connectionTimeoutSecs, + } + + req, err := http.NewRequest("GET", fmt.Sprintf("http://%s%s/%v/status", c.proctordHost, ExecutionRoute, executionId), nil) + req.Header.Add(constant.UserEmailHeaderKey, c.emailId) + req.Header.Add(constant.AccessTokenHeaderKey, c.accessToken) + req.Header.Add(constant.ClientVersionHeaderKey, c.clientVersion) + + resp, err := httpClient.Do(req) + if err != nil { + return nil, buildNetworkError(err) + } + + if resp.StatusCode != http.StatusOK { + return nil, buildHTTPError(c, resp) + } + + body, err := ioutil.ReadAll(resp.Body) + defer resp.Body.Close() + if err != nil { + return nil, err + } + + var executionResult modelExecution.ExecutionResult + err = json.Unmarshal(body, &executionResult) + if err != nil { + return nil, err + } + + return &executionResult, nil +} + +func buildNetworkError(err error) error { + if netError, ok := err.(net.Error); ok && netError.Timeout() { + return fmt.Errorf("%s\n%s\n%s", constant.GenericTimeoutErrorHeader, netError.Error(), constant.GenericTimeoutErrorBody) + } + return fmt.Errorf("%s\n%s", constant.GenericNetworkErrorHeader, err.Error()) +} + +func buildHTTPError(c *client, resp *http.Response) error { + if resp.StatusCode == http.StatusUnauthorized { + if c.emailId == "" || c.accessToken == "" { + return fmt.Errorf("%s\n%s", constant.UnauthorizedErrorHeader, constant.UnauthorizedErrorMissingConfig) + } + return fmt.Errorf("%s\n%s", constant.UnauthorizedErrorHeader, constant.UnauthorizedErrorInvalidConfig) + } + + if resp.StatusCode == http.StatusBadRequest { + return getHTTPResponseError(resp.Body) + } + + if resp.StatusCode == http.StatusNoContent { + return fmt.Errorf(constant.NoScheduledJobsError) + } + + if resp.StatusCode == http.StatusNotFound { + return fmt.Errorf(constant.JobNotFoundError) + } + + if resp.StatusCode == http.StatusForbidden { + return fmt.Errorf(constant.JobForbiddenErrorHeader) + } + + if resp.StatusCode == http.StatusInternalServerError { + return getHTTPResponseError(resp.Body) + } + + return fmt.Errorf("%s\nStatus Code: %d, %s", constant.GenericResponseErrorHeader, resp.StatusCode, http.StatusText(resp.StatusCode)) +} + +func getHTTPResponseError(response ioReader.ReadCloser) error { + body, _ := ioutil.ReadAll(response) + bodyString := string(body) + return fmt.Errorf(bodyString) +} diff --git a/internal/app/cli/daemon/client_mock.go b/internal/app/cli/daemon/client_mock.go new file mode 100644 index 00000000..1a22cd50 --- /dev/null +++ b/internal/app/cli/daemon/client_mock.go @@ -0,0 +1,58 @@ +package daemon + +import ( + "github.com/stretchr/testify/mock" + + modelExecution "proctor/internal/pkg/model/execution" + modelMetadata "proctor/internal/pkg/model/metadata" + modelSchedule "proctor/internal/pkg/model/schedule" +) + +type MockClient struct { + mock.Mock +} + +func (m *MockClient) ListProcs() ([]modelMetadata.Metadata, error) { + args := m.Called() + return args.Get(0).([]modelMetadata.Metadata), args.Error(1) +} + +func (m *MockClient) ListScheduledProcs() ([]modelSchedule.ScheduledJob, error) { + args := m.Called() + return args.Get(0).([]modelSchedule.ScheduledJob), args.Error(1) +} + +func (m *MockClient) ExecuteProc(name string, procArgs map[string]string) (*modelExecution.ExecutionResult, error) { + args := m.Called(name, procArgs) + return args.Get(0).(*modelExecution.ExecutionResult), args.Error(1) +} + +func (m *MockClient) StreamProcLogs(executionId uint64) error { + args := m.Called(executionId) + return args.Error(0) +} + +func (m *MockClient) GetExecutionContextStatusWithPolling(executionId uint64) (*modelExecution.ExecutionResult, error) { + args := m.Called(executionId) + return args.Get(0).(*modelExecution.ExecutionResult), args.Error(1) +} + +func (m *MockClient) GetExecutionContextStatus(executionId uint64) (*modelExecution.ExecutionResult, error) { + args := m.Called(executionId) + return args.Get(0).(*modelExecution.ExecutionResult), args.Error(1) +} + +func (m *MockClient) ScheduleJob(name, tags, time, notificationEmails string, group string, jobArgs map[string]string) (uint64, error) { + args := m.Called(name, tags, time, notificationEmails, group, jobArgs) + return args.Get(0).(uint64), args.Error(1) +} + +func (m *MockClient) DescribeScheduledProc(jobID uint64) (modelSchedule.ScheduledJob, error) { + args := m.Called(jobID) + return args.Get(0).(modelSchedule.ScheduledJob), args.Error(1) +} + +func (m *MockClient) RemoveScheduledProc(jobID string) error { + args := m.Called(jobID) + return args.Error(0) +} diff --git a/daemon/client_test.go b/internal/app/cli/daemon/client_test.go similarity index 50% rename from daemon/client_test.go rename to internal/app/cli/daemon/client_test.go index c826f08a..8dbdf92d 100644 --- a/daemon/client_test.go +++ b/internal/app/cli/daemon/client_test.go @@ -8,18 +8,18 @@ import ( "strings" "testing" - "proctor/cmd/version" - - "proctor/config" - "proctor/io" "github.com/gorilla/websocket" - "github.com/thingful/httpmock" - - proc_metadata "proctor/proctord/jobs/metadata" - "proctor/proctord/jobs/metadata/env" - "proctor/proctord/utility" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" + "github.com/thingful/httpmock" + + "proctor/internal/app/cli/command/version" + "proctor/internal/app/cli/config" + "proctor/internal/app/cli/utility/io" + "proctor/internal/pkg/constant" + modelExecution "proctor/internal/pkg/model/execution" + modelMetadata "proctor/internal/pkg/model/metadata" + "proctor/internal/pkg/model/metadata/env" ) type TestConnectionError struct { @@ -58,11 +58,11 @@ func (s *ClientTestSuite) TestListProcsReturnsListOfProcsWithDetails() { defer httpmock.DeactivateAndReset() body := `[ { "name": "job-1", "description": "job description", "image_name": "hub.docker.com/job-1:latest", "env_vars": { "secrets": [ { "name": "SECRET1", "description": "Base64 encoded secret for authentication." } ], "args": [ { "name": "ARG1", "description": "Argument name" } ] } } ]` - var args = []env.VarMetadata{env.VarMetadata{Name: "ARG1", Description: "Argument name"}} - var secrets = []env.VarMetadata{env.VarMetadata{Name: "SECRET1", Description: "Base64 encoded secret for authentication."}} + var args = []env.VarMetadata{{Name: "ARG1", Description: "Argument name"}} + var secrets = []env.VarMetadata{{Name: "SECRET1", Description: "Base64 encoded secret for authentication."}} envVars := env.Vars{Secrets: secrets, Args: args} - var expectedProcList = []proc_metadata.Metadata{ - proc_metadata.Metadata{ + var expectedProcList = []modelMetadata.Metadata{ + { Name: "job-1", Description: "job description", ImageName: "hub.docker.com/job-1:latest", @@ -70,21 +70,9 @@ func (s *ClientTestSuite) TestListProcsReturnsListOfProcsWithDetails() { }, } - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - "http://"+proctorConfig.Host+"/jobs/metadata", - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(200, body), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(200, body) + mockError := error(nil) + mockRequest(proctorConfig, "GET", "http://"+proctorConfig.Host+MetadataRoute, mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() @@ -103,30 +91,18 @@ func (s *ClientTestSuite) TestListProcsReturnErrorFromResponseBody() { httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - "http://"+proctorConfig.Host+"/jobs/metadata", - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(500, `{}`), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(500, "list proc error") + mockError := error(nil) + mockRequest(proctorConfig, "GET", "http://"+proctorConfig.Host+MetadataRoute, mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() procList, err := s.testClient.ListProcs() - assert.Equal(t, []proc_metadata.Metadata{}, procList) + assert.Equal(t, []modelMetadata.Metadata{}, procList) assert.Error(t, err) s.mockConfigLoader.AssertExpectations(t) - assert.Equal(t, "Server Error!!!\nStatus Code: 500, Internal Server Error", err.Error()) + assert.Equal(t, "list proc error", err.Error()) } func (s *ClientTestSuite) TestListProcsReturnClientSideTimeoutError() { @@ -137,28 +113,16 @@ func (s *ClientTestSuite) TestListProcsReturnClientSideTimeoutError() { httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - "http://"+proctorConfig.Host+"/jobs/metadata", - func(req *http.Request) (*http.Response, error) { - return nil, TestConnectionError{message: "Unable to reach http://proctor.example.com/", timeout: true} - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + var mockResponse *http.Response + mockError := TestConnectionError{message: "Unable to reach http://proctor.example.com/", timeout: true} + mockRequest(proctorConfig, "GET", "http://"+proctorConfig.Host+MetadataRoute, mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() procList, err := s.testClient.ListProcs() - assert.Equal(t, errors.New("Connection Timeout!!!\nGet http://proctor.example.com/jobs/metadata: Unable to reach http://proctor.example.com/\nPlease check your Internet/VPN connection for connectivity to ProctorD."), err) - assert.Equal(t, []proc_metadata.Metadata{}, procList) + assert.Equal(t, errors.New("Connection Timeout!!!\nGet http://proctor.example.com/metadata: Unable to reach http://proctor.example.com/\nPlease check your Internet/VPN connection for connectivity to ProctorD."), err) + assert.Equal(t, []modelMetadata.Metadata{}, procList) s.mockConfigLoader.AssertExpectations(t) } @@ -170,28 +134,16 @@ func (s *ClientTestSuite) TestListProcsReturnClientSideConnectionError() { httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - "http://"+proctorConfig.Host+"/jobs/metadata", - func(req *http.Request) (*http.Response, error) { - return nil, TestConnectionError{message: "Unknown Error", timeout: false} - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + var mockResponse *http.Response + mockError := TestConnectionError{message: "Unknown Error", timeout: false} + mockRequest(proctorConfig, "GET", "http://"+proctorConfig.Host+MetadataRoute, mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() procList, err := s.testClient.ListProcs() - assert.Equal(t, errors.New("Network Error!!!\nGet http://proctor.example.com/jobs/metadata: Unknown Error"), err) - assert.Equal(t, []proc_metadata.Metadata{}, procList) + assert.Equal(t, errors.New("Network Error!!!\nGet http://proctor.example.com/metadata: Unknown Error"), err) + assert.Equal(t, []modelMetadata.Metadata{}, procList) s.mockConfigLoader.AssertExpectations(t) } @@ -203,27 +155,15 @@ func (s *ClientTestSuite) TestListProcsForUnauthorizedUser() { httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - "http://"+proctorConfig.Host+"/jobs/metadata", - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(401, `{}`), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(401, `{}`) + mockError := error(nil) + mockRequest(proctorConfig, "GET", "http://"+proctorConfig.Host+MetadataRoute, mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() procList, err := s.testClient.ListProcs() - assert.Equal(t, []proc_metadata.Metadata{}, procList) + assert.Equal(t, []modelMetadata.Metadata{}, procList) assert.Equal(t, "Unauthorized Access!!!\nPlease check the EMAIL_ID and ACCESS_TOKEN validity in proctor config file.", err.Error()) s.mockConfigLoader.AssertExpectations(t) } @@ -235,26 +175,14 @@ func (s *ClientTestSuite) TestListProcsForUnauthorizedErrorWithConfigMissing() { httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - "http://"+proctorConfig.Host+"/jobs/metadata", - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(401, `{}`), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{""}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(401, `{}`) + mockError := error(nil) + mockRequest(proctorConfig, "GET", "http://"+proctorConfig.Host+MetadataRoute, mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() procList, err := s.testClient.ListProcs() - assert.Equal(t, []proc_metadata.Metadata{}, procList) + assert.Equal(t, []modelMetadata.Metadata{}, procList) assert.Equal(t, "Unauthorized Access!!!\nEMAIL_ID or ACCESS_TOKEN is not present in proctor config file.", err.Error()) s.mockConfigLoader.AssertExpectations(t) } @@ -262,8 +190,17 @@ func (s *ClientTestSuite) TestListProcsForUnauthorizedErrorWithConfigMissing() { func (s *ClientTestSuite) TestExecuteProc() { t := s.T() + executionName := "proctor-777b1dfb-ea27-46d9-b02c-839b75a542e2" proctorConfig := config.ProctorConfig{Host: "proctor.example.com", Email: "proctor@example.com", AccessToken: "access-token"} - expectedProcResponse := "proctor-777b1dfb-ea27-46d9-b02c-839b75a542e2" + expectedProcResponse := &modelExecution.ExecutionResult{ + ExecutionId: uint64(0), + JobName: "", + ExecutionName: executionName, + ImageTag: "", + CreatedAt: "", + UpdatedAt: "", + Status: "", + } body := `{ "name": "proctor-777b1dfb-ea27-46d9-b02c-839b75a542e2"}` procName := "run-sample" procArgs := map[string]string{"SAMPLE_ARG1": "sample-value"} @@ -271,21 +208,9 @@ func (s *ClientTestSuite) TestExecuteProc() { httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "POST", - "http://"+proctorConfig.Host+"/jobs/execute", - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(201, body), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(201, body) + mockError := error(nil) + mockRequest(proctorConfig, "POST", "http://"+proctorConfig.Host+ExecutionRoute, mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() @@ -300,38 +225,26 @@ func (s *ClientTestSuite) TestSuccessScheduledJob() { t := s.T() proctorConfig := config.ProctorConfig{Host: "proctor.example.com", Email: "proctor@example.com", AccessToken: "access-token"} - expectedProcResponse := "8965fce9-5025-43b3-b21c-920c5ff41cd9" + expectedProcResponse := uint64(7) procName := "run-sample" - time := "*/1 * * * *" + cron := "*/1 * * * *" notificationEmails := "user@mail.com" tags := "db,backup" group := "test" procArgs := map[string]string{"ARG_ONE": "sample-value"} - body := `{"id":"8965fce9-5025-43b3-b21c-920c5ff41cd9","name":"run-sample","args":{"ARG_ONE":"sample-value"},"notification_emails":"user@mail.com","time":"*/1 * * * *","tags":"db,backup", "group":"test"}` + body := `{"id":7,"name":"run-sample","args":{"ARG_ONE":"sample-value"},"notification_emails":"user@mail.com","cron":"*/1 * * * *","tags":"db,backup", "group":"test"}` httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "POST", - "http://"+proctorConfig.Host+"/jobs/schedule", - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(201, body), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(201, body) + mockError := error(nil) + mockRequest(proctorConfig, "POST", "http://"+proctorConfig.Host+ScheduleRoute, mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() - executeProcResponse, err := s.testClient.ScheduleJob(procName, tags, time, notificationEmails, group, procArgs) + executeProcResponse, err := s.testClient.ScheduleJob(procName, tags, cron, notificationEmails, group, procArgs) assert.NoError(t, err) assert.Equal(t, expectedProcResponse, executeProcResponse) @@ -343,7 +256,7 @@ func (s *ClientTestSuite) TestSchedulingAlreadyExistedScheduledJob() { proctorConfig := config.ProctorConfig{Host: "proctor.example.com", Email: "proctor@example.com", AccessToken: "access-token"} procName := "run-sample" - time := "*/1 * * * *" + cron := "*/1 * * * *" notificationEmails := "user@mail.com" tags := "db,backup" procArgs := map[string]string{"ARG_ONE": "sample-value"} @@ -352,25 +265,13 @@ func (s *ClientTestSuite) TestSchedulingAlreadyExistedScheduledJob() { httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "POST", - "http://"+proctorConfig.Host+"/jobs/schedule", - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(409, "Server Error!!!\nStatus Code: 409, Conflict"), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(409, "Server Error!!!\nStatus Code: 409, Conflict") + mockError := error(nil) + mockRequest(proctorConfig, "POST", "http://"+proctorConfig.Host+ScheduleRoute, mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() - _, err := s.testClient.ScheduleJob(procName, tags, time, notificationEmails, group, procArgs) + _, err := s.testClient.ScheduleJob(procName, tags, cron, notificationEmails, group, procArgs) assert.Equal(t, "Server Error!!!\nStatus Code: 409, Conflict", err.Error()) s.mockConfigLoader.AssertExpectations(t) } @@ -378,33 +279,21 @@ func (s *ClientTestSuite) TestSchedulingAlreadyExistedScheduledJob() { func (s *ClientTestSuite) TestExecuteProcInternalServerError() { t := s.T() proctorConfig := config.ProctorConfig{Host: "proctor.example.com", Email: "proctor@example.com", AccessToken: "access-token"} - expectedProcResponse := "" procName := "run-sample" procArgs := map[string]string{"SAMPLE_ARG1": "sample-value"} httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "POST", - "http://"+proctorConfig.Host+"/jobs/execute", - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(500, ""), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(500, "Execute Error") + mockError := error(nil) + mockRequest(proctorConfig, "POST", "http://"+proctorConfig.Host+ExecutionRoute, mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() executeProcResponse, err := s.testClient.ExecuteProc(procName, procArgs) - assert.Equal(t, "Server Error!!!\nStatus Code: 500, Internal Server Error", err.Error()) + var expectedProcResponse *modelExecution.ExecutionResult + assert.Equal(t, "Execute Error", err.Error()) assert.Equal(t, expectedProcResponse, executeProcResponse) s.mockConfigLoader.AssertExpectations(t) } @@ -416,27 +305,16 @@ func (s *ClientTestSuite) TestExecuteProcUnAuthorized() { httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "POST", - "http://"+proctorConfig.Host+"/jobs/execute", - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(401, ""), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(401, "") + mockError := error(nil) + mockRequest(proctorConfig, "POST", "http://"+proctorConfig.Host+ExecutionRoute, mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() executeProcResponse, err := s.testClient.ExecuteProc("run-sample", map[string]string{"SAMPLE_ARG1": "sample-value"}) - assert.Equal(t, "", executeProcResponse) + var expectedProcResponse *modelExecution.ExecutionResult + assert.Equal(t, expectedProcResponse, executeProcResponse) assert.Equal(t, "Unauthorized Access!!!\nPlease check the EMAIL_ID and ACCESS_TOKEN validity in proctor config file.", err.Error()) s.mockConfigLoader.AssertExpectations(t) } @@ -448,27 +326,16 @@ func (s *ClientTestSuite) TestExecuteProcUnAuthorizedWhenEmailAndAccessTokenNotS httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "POST", - "http://"+proctorConfig.Host+"/jobs/execute", - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(401, ""), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{""}, - utility.AccessTokenHeaderKey: []string{""}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(401, "") + mockError := error(nil) + mockRequest(proctorConfig, "POST", "http://"+proctorConfig.Host+ExecutionRoute, mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() executeProcResponse, err := s.testClient.ExecuteProc("run-sample", map[string]string{"SAMPLE_ARG1": "sample-value"}) - assert.Equal(t, "", executeProcResponse) + var expectedProcResponse *modelExecution.ExecutionResult + assert.Equal(t, expectedProcResponse, executeProcResponse) assert.Equal(t, "Unauthorized Access!!!\nEMAIL_ID or ACCESS_TOKEN is not present in proctor config file.", err.Error()) s.mockConfigLoader.AssertExpectations(t) } @@ -480,28 +347,17 @@ func (s *ClientTestSuite) TestExecuteProcsReturnClientSideConnectionError() { httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "POST", - "http://"+proctorConfig.Host+"/jobs/execute", - func(req *http.Request) (*http.Response, error) { - return nil, TestConnectionError{message: "Unknown Error", timeout: false} - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + var mockResponse *http.Response = nil + mockError := TestConnectionError{message: "Unknown Error", timeout: false} + mockRequest(proctorConfig, "POST", "http://"+proctorConfig.Host+ExecutionRoute, mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() response, err := s.testClient.ExecuteProc("run-sample", map[string]string{"SAMPLE_ARG1": "sample-value"}) - assert.Equal(t, "", response) - assert.Equal(t, errors.New("Network Error!!!\nPost http://proctor.example.com/jobs/execute: Unknown Error"), err) + var expectedProcResponse *modelExecution.ExecutionResult + assert.Equal(t, expectedProcResponse, response) + assert.Equal(t, errors.New("Network Error!!!\nPost http://proctor.example.com/execution: Unknown Error"), err) s.mockConfigLoader.AssertExpectations(t) } @@ -514,9 +370,9 @@ func (s *ClientTestSuite) TestLogStreamForAuthorizedUser() { logStreamAuthorizer := func(t *testing.T) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { upgrader := websocket.Upgrader{} - assert.Equal(t, "proctor@example.com", r.Header.Get(utility.UserEmailHeaderKey)) - assert.Equal(t, "access-token", r.Header.Get(utility.AccessTokenHeaderKey)) - assert.Equal(t, version.ClientVersion, r.Header.Get(utility.ClientVersionHeaderKey)) + assert.Equal(t, "proctor@example.com", r.Header.Get(constant.UserEmailHeaderKey)) + assert.Equal(t, "access-token", r.Header.Get(constant.AccessTokenHeaderKey)) + assert.Equal(t, version.ClientVersion, r.Header.Get(constant.ClientVersionHeaderKey)) conn, _ := upgrader.Upgrade(w, r, nil) defer conn.Close() } @@ -527,7 +383,7 @@ func (s *ClientTestSuite) TestLogStreamForAuthorizedUser() { s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() - err := s.testClient.StreamProcLogs("test-job-id") + err := s.testClient.StreamProcLogs(uint64(42)) assert.NoError(t, err) s.mockConfigLoader.AssertExpectations(t) } @@ -543,7 +399,7 @@ func (s *ClientTestSuite) TestLogStreamForBadWebSocketHandshake() { s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() - errStreamLogs := s.testClient.StreamProcLogs("test-job-id") + errStreamLogs := s.testClient.StreamProcLogs(uint64(42)) assert.Equal(t, errors.New("websocket: bad handshake"), errStreamLogs) s.mockConfigLoader.AssertExpectations(t) } @@ -561,13 +417,13 @@ func (s *ClientTestSuite) TestLogStreamForUnauthorizedUser() { s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() - errStreamLogs := s.testClient.StreamProcLogs("test-job-id") + errStreamLogs := s.testClient.StreamProcLogs(uint64(42)) assert.Error(t, errors.New(http.StatusText(http.StatusUnauthorized)), errStreamLogs) s.mockConfigLoader.AssertExpectations(t) } -func (s *ClientTestSuite) TestGetDefinitiveProcExecutionStatusForSucceededProcs() { +func (s *ClientTestSuite) TestGetExecutionContextStatusForSucceededProcs() { t := s.T() proctorConfig := config.ProctorConfig{Host: "proctor.example.com", Email: "proctor@example.com", AccessToken: "access-token", ProcExecutionStatusPollCount: 1} @@ -575,35 +431,31 @@ func (s *ClientTestSuite) TestGetDefinitiveProcExecutionStatusForSucceededProcs( httpmock.Activate() defer httpmock.DeactivateAndReset() - expectedProcExecutionStatus := utility.JobSucceeded - responseBody := expectedProcExecutionStatus + expectedExecutionContextStatus := &modelExecution.ExecutionResult{ + ExecutionId: uint64(0), + JobName: "", + ExecutionName: "", + ImageTag: "", + CreatedAt: "", + UpdatedAt: "", + Status: constant.JobSucceeded, + } + responseBody := fmt.Sprintf(`{ "status": "%s" }`, constant.JobSucceeded) - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - "http://"+proctorConfig.Host+"/jobs/execute/some-proc-name/status", - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(200, responseBody), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(200, responseBody) + mockError := error(nil) + mockRequest(proctorConfig, "GET", "http://"+proctorConfig.Host+ExecutionRoute+"/42/status", mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() - procExecutionStatus, err := s.testClient.GetDefinitiveProcExecutionStatus("some-proc-name") + executionContextStatus, err := s.testClient.GetExecutionContextStatus(uint64(42)) assert.NoError(t, err) s.mockConfigLoader.AssertExpectations(t) - assert.Equal(t, expectedProcExecutionStatus, procExecutionStatus) + assert.Equal(t, expectedExecutionContextStatus, executionContextStatus) } -func (s *ClientTestSuite) TestGetDefinitiveProcExecutionStatusForFailedProcs() { +func (s *ClientTestSuite) TestGetExecutionContextStatusForFailedProcs() { t := s.T() proctorConfig := config.ProctorConfig{Host: "proctor.example.com", Email: "proctor@example.com", AccessToken: "access-token", ProcExecutionStatusPollCount: 1} @@ -611,35 +463,31 @@ func (s *ClientTestSuite) TestGetDefinitiveProcExecutionStatusForFailedProcs() { httpmock.Activate() defer httpmock.DeactivateAndReset() - expectedProcExecutionStatus := utility.JobFailed - responseBody := expectedProcExecutionStatus + expectedExecutionContextStatus := &modelExecution.ExecutionResult{ + ExecutionId: uint64(0), + JobName: "", + ExecutionName: "", + ImageTag: "", + CreatedAt: "", + UpdatedAt: "", + Status: constant.JobFailed, + } + responseBody := fmt.Sprintf(`{ "status": "%s" }`, constant.JobFailed) - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - "http://"+proctorConfig.Host+"/jobs/execute/some-proc-name/status", - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(200, responseBody), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(200, responseBody) + mockError := error(nil) + mockRequest(proctorConfig, "GET", "http://"+proctorConfig.Host+ExecutionRoute+"/42/status", mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() - procExecutionStatus, err := s.testClient.GetDefinitiveProcExecutionStatus("some-proc-name") + executionContextStatus, err := s.testClient.GetExecutionContextStatus(uint64(42)) assert.NoError(t, err) s.mockConfigLoader.AssertExpectations(t) - assert.Equal(t, expectedProcExecutionStatus, procExecutionStatus) + assert.Equal(t, expectedExecutionContextStatus, executionContextStatus) } -func (s *ClientTestSuite) TestGetDefinitiveProcExecutionStatusForHTTPRequestFailure() { +func (s *ClientTestSuite) TestGetExecutionContextStatusForHTTPRequestFailure() { t := s.T() proctorConfig := config.ProctorConfig{Host: "proctor.example.com", Email: "proctor@example.com", AccessToken: "access-token", ProcExecutionStatusPollCount: 1} @@ -647,32 +495,21 @@ func (s *ClientTestSuite) TestGetDefinitiveProcExecutionStatusForHTTPRequestFail httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - "http://"+proctorConfig.Host+"/jobs/execute/some-proc-name/status", - func(req *http.Request) (*http.Response, error) { - return nil, TestConnectionError{message: "Unable to reach http://proctor.example.com/", timeout: true} - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + var mockResponse *http.Response = nil + mockError := TestConnectionError{message: "Unable to reach http://proctor.example.com/", timeout: true} + mockRequest(proctorConfig, "GET", "http://"+proctorConfig.Host+ExecutionRoute+"/42/status", mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() - procExecutionStatus, err := s.testClient.GetDefinitiveProcExecutionStatus("some-proc-name") + executionContextStatus, err := s.testClient.GetExecutionContextStatus(uint64(42)) - assert.Equal(t, errors.New("Connection Timeout!!!\nGet http://proctor.example.com/jobs/execute/some-proc-name/status: Unable to reach http://proctor.example.com/\nPlease check your Internet/VPN connection for connectivity to ProctorD."), err) + assert.Equal(t, errors.New("Connection Timeout!!!\nGet http://proctor.example.com/execution/42/status: Unable to reach http://proctor.example.com/\nPlease check your Internet/VPN connection for connectivity to ProctorD."), err) s.mockConfigLoader.AssertExpectations(t) - assert.Equal(t, "", procExecutionStatus) + var executionResult *modelExecution.ExecutionResult + assert.Equal(t, executionResult, executionContextStatus) } -func (s *ClientTestSuite) TestGetDefinitiveProcExecutionStatusForNonOKResponse() { +func (s *ClientTestSuite) TestGetExecutionContextStatusForNonOKResponse() { t := s.T() proctorConfig := config.ProctorConfig{Host: "proctor.example.com", Email: "proctor@example.com", AccessToken: "access-token", ProcExecutionStatusPollCount: 1} @@ -680,32 +517,85 @@ func (s *ClientTestSuite) TestGetDefinitiveProcExecutionStatusForNonOKResponse() httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - "http://"+proctorConfig.Host+"/jobs/execute/some-proc-name/status", - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(500, ""), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(500, "execute Error") + mockError := error(nil) + mockRequest(proctorConfig, "GET", "http://"+proctorConfig.Host+ExecutionRoute+"/42/status", mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() - procExecutionStatus, err := s.testClient.GetDefinitiveProcExecutionStatus("some-proc-name") + executionContextStatus, err := s.testClient.GetExecutionContextStatus(uint64(42)) + + assert.Equal(t, errors.New("execute Error"), err) + s.mockConfigLoader.AssertExpectations(t) + var executionResult *modelExecution.ExecutionResult + assert.Equal(t, executionResult, executionContextStatus) +} + +func (s *ClientTestSuite) TestGetExecutionContextStatusWithPollingForCompletedProcs() { + t := s.T() + + proctorConfig := config.ProctorConfig{Host: "proctor.example.com", Email: "proctor@example.com", AccessToken: "access-token", ProcExecutionStatusPollCount: 1} + + httpmock.Activate() + defer httpmock.DeactivateAndReset() + + completedProcs := []struct { + expectedExecutionContextStatus string + executionID uint64 + }{ + {constant.JobSucceeded, uint64(42)}, + {constant.JobFailed, uint64(43)}, + } + + for _, proc := range completedProcs { + expectedExecutionContextStatus := &modelExecution.ExecutionResult{ + ExecutionId: proc.executionID, + JobName: "", + ExecutionName: "", + ImageTag: "", + CreatedAt: "", + UpdatedAt: "", + Status: proc.expectedExecutionContextStatus, + } + responseBody := fmt.Sprintf(`{ "id": %v, "status": "%s" }`, fmt.Sprint(proc.executionID), proc.expectedExecutionContextStatus) + + mockResponse := httpmock.NewStringResponse(200, responseBody) + mockError := error(nil) + mockRequest(proctorConfig, "GET", "http://"+proctorConfig.Host+ExecutionRoute+"/"+fmt.Sprint(proc.executionID)+"/status", mockResponse, mockError) + + s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Twice() + + executionContextStatus, err := s.testClient.GetExecutionContextStatusWithPolling(proc.executionID) + + assert.NoError(t, err) + s.mockConfigLoader.AssertExpectations(t) + assert.Equal(t, expectedExecutionContextStatus, executionContextStatus) + } +} + +func (s *ClientTestSuite) TestGetExecutionContextStatusWithPollingForGetError() { + t := s.T() + + proctorConfig := config.ProctorConfig{Host: "proctor.example.com", Email: "proctor@example.com", AccessToken: "access-token", ProcExecutionStatusPollCount: 1} + + httpmock.Activate() + defer httpmock.DeactivateAndReset() + + var mockResponse *http.Response = nil + mockError := TestConnectionError{message: "Unable to reach http://proctor.example.com/", timeout: true} + mockRequest(proctorConfig, "GET", "http://"+proctorConfig.Host+ExecutionRoute+"/42/status", mockResponse, mockError) + + s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Twice() - assert.Equal(t, errors.New("Server Error!!!\nStatus Code: 500, Internal Server Error"), err) + executionContextStatus, err := s.testClient.GetExecutionContextStatusWithPolling(uint64(42)) + + assert.Equal(t, errors.New("Connection Timeout!!!\nGet http://proctor.example.com/execution/42/status: Unable to reach http://proctor.example.com/\nPlease check your Internet/VPN connection for connectivity to ProctorD."), err) s.mockConfigLoader.AssertExpectations(t) - assert.Equal(t, "", procExecutionStatus) + var executionResult *modelExecution.ExecutionResult + assert.Equal(t, executionResult, executionContextStatus) } -func (s *ClientTestSuite) TestGetDefinitiveProcExecutionStatusWhenPollCountReached() { +func (s *ClientTestSuite) TestGetExecutionContextStatusWithPollingWhenPollCountReached() { t := s.T() expectedRequestsToProctorDCount := 2 @@ -713,8 +603,7 @@ func (s *ClientTestSuite) TestGetDefinitiveProcExecutionStatusWhenPollCountReach proctorConfig := config.ProctorConfig{Host: "proctor.example.com", Email: "proctor@example.com", AccessToken: "access-token", ProcExecutionStatusPollCount: expectedRequestsToProctorDCount} - expectedProcExecutionStatus := utility.JobWaiting - responseBody := expectedProcExecutionStatus + responseBody := fmt.Sprintf(`{ "status": "%s" }`, constant.JobWaiting) httpmock.Activate() defer httpmock.DeactivateAndReset() @@ -722,55 +611,44 @@ func (s *ClientTestSuite) TestGetDefinitiveProcExecutionStatusWhenPollCountReach httpmock.RegisterStubRequest( httpmock.NewStubRequest( "GET", - "http://"+proctorConfig.Host+"/jobs/execute/some-proc-name/status", + "http://"+proctorConfig.Host+ExecutionRoute+"/42/status", func(req *http.Request) (*http.Response, error) { requestsToProctorDCount += 1 return httpmock.NewStringResponse(200, responseBody), nil }, ).WithHeader( &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, + constant.UserEmailHeaderKey: []string{"proctor@example.com"}, + constant.AccessTokenHeaderKey: []string{"access-token"}, + constant.ClientVersionHeaderKey: []string{version.ClientVersion}, }, ), ) - s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() + s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Times(3) - procExecutionStatus, err := s.testClient.GetDefinitiveProcExecutionStatus("some-proc-name") + executionContextStatus, err := s.testClient.GetExecutionContextStatusWithPolling(uint64(42)) - assert.Equal(t, errors.New("No definitive status received for proc name some-proc-name from proctord"), err) + assert.Equal(t, errors.New("No definitive status received for execution with id 42 from proctord"), err) s.mockConfigLoader.AssertExpectations(t) assert.Equal(t, expectedRequestsToProctorDCount, requestsToProctorDCount) - assert.Equal(t, "", procExecutionStatus) + var executionResult *modelExecution.ExecutionResult + assert.Equal(t, executionResult, executionContextStatus) } func (s *ClientTestSuite) TestSuccessDescribeScheduledJob() { t := s.T() proctorConfig := config.ProctorConfig{Host: "proctor.example.com", Email: "proctor@example.com", AccessToken: "access-token"} - jobID := "8965fce9-5025-43b3-b21c-920c5ff41cd9" - body := `{"id":"8965fce9-5025-43b3-b21c-920c5ff41cd9","name":"run-sample","args":{"ARG_ONE":"sample-value"},"notification_emails":"user@mail.com","time":"*/1 * * * *","tags":"db,backup"}` + jobID := uint64(7) + body := `{"id":7,"jobName":"run-sample","args":{"ARG_ONE":"sample-value"},"notification_emails":"user@mail.com","cron":"*/1 * * * *","tags":"db,backup"}` httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - fmt.Sprintf("http://"+proctorConfig.Host+"/jobs/schedule/%s", jobID), - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(200, body), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(200, body) + mockError := error(nil) + mockRequest(proctorConfig, "GET", fmt.Sprintf("http://"+proctorConfig.Host+ScheduleRoute+"/%d", jobID), mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() @@ -786,27 +664,15 @@ func (s *ClientTestSuite) TestDescribeScheduledJobWithInvalidJobID() { t := s.T() proctorConfig := config.ProctorConfig{Host: "proctor.example.com", Email: "proctor@example.com", AccessToken: "access-token"} - jobID := "invalid-job-id" + jobID := uint64(0) body := "Invalid Job ID" httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - fmt.Sprintf("http://"+proctorConfig.Host+"/jobs/schedule/%s", jobID), - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(400, body), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(400, body) + mockError := error(nil) + mockRequest(proctorConfig, "GET", fmt.Sprintf("http://"+proctorConfig.Host+ScheduleRoute+"/%d", jobID), mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() @@ -820,26 +686,14 @@ func (s *ClientTestSuite) TestDescribeScheduledJobWhenJobIDNotFound() { t := s.T() proctorConfig := config.ProctorConfig{Host: "proctor.example.com", Email: "proctor@example.com", AccessToken: "access-token"} - jobID := "invalid-job-id" + jobID := uint64(7) httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - fmt.Sprintf("http://"+proctorConfig.Host+"/jobs/schedule/%s", jobID), - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(404, "Job not found"), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(404, "Job not found") + mockError := error(nil) + mockRequest(proctorConfig, "GET", fmt.Sprintf("http://"+proctorConfig.Host+ScheduleRoute+"/%d", jobID), mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() @@ -853,32 +707,20 @@ func (s *ClientTestSuite) TestDescribeScheduledJobWitInternalServerError() { t := s.T() proctorConfig := config.ProctorConfig{Host: "proctor.example.com", Email: "proctor@example.com", AccessToken: "access-token"} - jobID := "invalid-job-id" + jobID := uint64(0) httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - fmt.Sprintf("http://"+proctorConfig.Host+"/jobs/schedule/%s", jobID), - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(500, ""), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(500, "Schedule Failed") + mockError := error(nil) + mockRequest(proctorConfig, "GET", fmt.Sprintf("http://"+proctorConfig.Host+ScheduleRoute+"/%d", jobID), mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() _, err := s.testClient.DescribeScheduledProc(jobID) - assert.Equal(t, "Server Error!!!\nStatus Code: 500, Internal Server Error", err.Error()) + assert.Equal(t, "Schedule Failed", err.Error()) s.mockConfigLoader.AssertExpectations(t) } @@ -886,27 +728,15 @@ func (s *ClientTestSuite) TestSuccessListOfScheduledJobs() { t := s.T() proctorConfig := config.ProctorConfig{Host: "proctor.example.com", Email: "proctor@example.com", AccessToken: "access-token"} - jobID := "c3e040b1-c2b8-4d23-bebd-246c8b7c6f87" - body := `[{"id":"c3e040b1-c2b8-4d23-bebd-246c8b7c6f87","name":"run-sample","args":{"ARG2":"bar","ARG3":"test","ARG_ONE1":"foobar"},"notification_emails":"username@mail.com","time":"0 2 * * *","tags":"sample,proctor"}]` + jobID := uint64(7) + body := `[{"id":7,"jobName":"run-sample","args":{"ARG2":"bar","ARG3":"test","ARG_ONE1":"foobar"},"notification_emails":"username@mail.com","cron":"0 2 * * *","tags":"sample,proctor"}]` httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - fmt.Sprintf("http://"+proctorConfig.Host+"/jobs/schedule"), - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(200, body), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(200, body) + mockError := error(nil) + mockRequest(proctorConfig, "GET", fmt.Sprintf("http://"+proctorConfig.Host+ScheduleRoute), mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() @@ -927,21 +757,9 @@ func (s *ClientTestSuite) TestSuccessListOfScheduledJobsWhenNoJobsScheduled() { httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - fmt.Sprintf("http://"+proctorConfig.Host+"/jobs/schedule"), - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(204, body), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(204, body) + mockError := error(nil) + mockRequest(proctorConfig, "GET", fmt.Sprintf("http://"+proctorConfig.Host+ScheduleRoute), mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() @@ -959,27 +777,15 @@ func (s *ClientTestSuite) TestSuccessListOfScheduledJobsWhenServerReturnInternal httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "GET", - fmt.Sprintf("http://"+proctorConfig.Host+"/jobs/schedule"), - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(500, ""), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(500, "Schedule Error") + mockError := error(nil) + mockRequest(proctorConfig, "GET", fmt.Sprintf("http://"+proctorConfig.Host+ScheduleRoute), mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() _, err := s.testClient.ListScheduledProcs() - assert.Equal(t, "Server Error!!!\nStatus Code: 500, Internal Server Error", err.Error()) + assert.Equal(t, "Schedule Error", err.Error()) s.mockConfigLoader.AssertExpectations(t) } @@ -993,21 +799,9 @@ func (s *ClientTestSuite) TestSuccessRemoveScheduledJob() { httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "DELETE", - fmt.Sprintf("http://"+proctorConfig.Host+"/jobs/schedule/%s", jobID), - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(200, body), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(200, body) + mockError := error(nil) + mockRequest(proctorConfig, "DELETE", fmt.Sprintf("http://"+proctorConfig.Host+ScheduleRoute+"/%s", jobID), mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() @@ -1026,21 +820,9 @@ func (s *ClientTestSuite) TestRemoveScheduledJobWithInvalidJobID() { httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "DELETE", - fmt.Sprintf("http://"+proctorConfig.Host+"/jobs/schedule/%s", jobID), - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(400, "Invalid Job ID"), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(400, "Invalid Job ID") + mockError := error(nil) + mockRequest(proctorConfig, "DELETE", fmt.Sprintf("http://"+proctorConfig.Host+ScheduleRoute+"/%s", jobID), mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() @@ -1059,21 +841,9 @@ func (s *ClientTestSuite) TestRemoveScheduledJobWhenJobIDNotFound() { httpmock.Activate() defer httpmock.DeactivateAndReset() - httpmock.RegisterStubRequest( - httpmock.NewStubRequest( - "DELETE", - fmt.Sprintf("http://"+proctorConfig.Host+"/jobs/schedule/%s", jobID), - func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(404, "Job not found"), nil - }, - ).WithHeader( - &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, - }, - ), - ) + mockResponse := httpmock.NewStringResponse(404, "Job not found") + mockError := error(nil) + mockRequest(proctorConfig, "DELETE", fmt.Sprintf("http://"+proctorConfig.Host+ScheduleRoute+"/%s", jobID), mockResponse, mockError) s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() @@ -1092,26 +862,32 @@ func (s *ClientTestSuite) TestRemoveScheduledJobWitInternalServerError() { httpmock.Activate() defer httpmock.DeactivateAndReset() + mockResponse := httpmock.NewStringResponse(500, "Schedule Error") + mockError := error(nil) + mockRequest(proctorConfig, "DELETE", fmt.Sprintf("http://"+proctorConfig.Host+ScheduleRoute+"/%s", jobID), mockResponse, mockError) + + s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() + + err := s.testClient.RemoveScheduledProc(jobID) + + assert.Equal(t, "Schedule Error", err.Error()) + s.mockConfigLoader.AssertExpectations(t) +} + +func mockRequest(proctorConfig config.ProctorConfig, method string, url string, mockResponse *http.Response, mockError error) { httpmock.RegisterStubRequest( httpmock.NewStubRequest( - "DELETE", - fmt.Sprintf("http://"+proctorConfig.Host+"/jobs/schedule/%s", jobID), + method, + url, func(req *http.Request) (*http.Response, error) { - return httpmock.NewStringResponse(500, ""), nil + return mockResponse, mockError }, ).WithHeader( &http.Header{ - utility.UserEmailHeaderKey: []string{"proctor@example.com"}, - utility.AccessTokenHeaderKey: []string{"access-token"}, - utility.ClientVersionHeaderKey: []string{version.ClientVersion}, + constant.UserEmailHeaderKey: []string{proctorConfig.Email}, + constant.AccessTokenHeaderKey: []string{proctorConfig.AccessToken}, + constant.ClientVersionHeaderKey: []string{version.ClientVersion}, }, ), ) - - s.mockConfigLoader.On("Load").Return(proctorConfig, config.ConfigError{}).Once() - - err := s.testClient.RemoveScheduledProc(jobID) - - assert.Equal(t, "Server Error!!!\nStatus Code: 500, Internal Server Error", err.Error()) - s.mockConfigLoader.AssertExpectations(t) } diff --git a/internal/app/cli/utility/args/args.go b/internal/app/cli/utility/args/args.go new file mode 100644 index 00000000..fb5c276e --- /dev/null +++ b/internal/app/cli/utility/args/args.go @@ -0,0 +1,22 @@ +package args + +import ( + "fmt" + "strings" + + "github.com/fatih/color" + + "proctor/internal/app/cli/utility/io" +) + +func ParseArg(printer io.Printer, procArgs map[string]string, arg string) { + parsedArg := strings.Split(arg, "=") + + if len(parsedArg) < 2 { + printer.Println(fmt.Sprintf("%-40s %-100s", "\nIncorrect variable format\n", arg), color.FgRed) + return + } + + combinedArgValue := strings.Join(parsedArg[1:], "=") + procArgs[parsedArg[0]] = combinedArgValue +} diff --git a/internal/app/cli/utility/args/args_test.go b/internal/app/cli/utility/args/args_test.go new file mode 100644 index 00000000..908f805d --- /dev/null +++ b/internal/app/cli/utility/args/args_test.go @@ -0,0 +1,28 @@ +package args + +import ( + "fmt" + "testing" + + "github.com/fatih/color" + "github.com/stretchr/testify/assert" + + "proctor/internal/app/cli/utility/io" +) + +func TestParseArg(t *testing.T) { + procArgs := make(map[string]string) + mockPrinter := &io.MockPrinter{} + ParseArg(mockPrinter, procArgs, "foo=moo") + assert.Equal(t, procArgs["foo"], "moo") +} + +func TestParseArgError(t *testing.T) { + procArgs := make(map[string]string) + mockPrinter := &io.MockPrinter{} + + mockPrinter.On("Println", fmt.Sprintf("%-40s %-100s", "\nIncorrect variable format\n", "foo"), color.FgRed) + defer mockPrinter.AssertExpectations(t) + + ParseArg(mockPrinter, procArgs, "foo") +} diff --git a/internal/app/cli/utility/file/file.go b/internal/app/cli/utility/file/file.go new file mode 100644 index 00000000..8a5cd343 --- /dev/null +++ b/internal/app/cli/utility/file/file.go @@ -0,0 +1,41 @@ +package file + +import ( + "io/ioutil" + "os" + + "gopkg.in/yaml.v2" + + "proctor/internal/pkg/model/metadata/env" +) + +func ParseYAML(filename string, procArgs map[string]string) error { + file, err := os.Open(filename) + if err != nil { + return err + } + defer file.Close() + + buffer, err := ioutil.ReadAll(file) + + err = yaml.Unmarshal(buffer, &procArgs) + if err != nil { + return err + } + + return nil +} + +func WriteYAML(filename string, procArgs []env.VarMetadata) error { + var content string + + for _, procArg := range procArgs { + content += "# " + procArg.Description + content += "\n" + content += procArg.Name + ":" + content += "\n" + } + + err := ioutil.WriteFile(filename, []byte(content), 0644) + return err +} diff --git a/internal/app/cli/utility/file/file_test.go b/internal/app/cli/utility/file/file_test.go new file mode 100644 index 00000000..5b310111 --- /dev/null +++ b/internal/app/cli/utility/file/file_test.go @@ -0,0 +1,67 @@ +package file + +import ( + "io/ioutil" + "os" + "proctor/internal/pkg/model/metadata/env" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParseYAML(t *testing.T) { + filename := "/tmp/yaml-test-parse" + testYAML := []byte("foo: bar\nmoo: zoo") + err := ioutil.WriteFile(filename, testYAML, 0644) + defer os.Remove(filename) + assert.NoError(t, err) + + procArgs := make(map[string]string) + err = ParseYAML(filename, procArgs) + assert.NoError(t, err) + assert.Equal(t, procArgs["foo"], "bar") + assert.Equal(t, procArgs["moo"], "zoo") +} + +func TestParseYAMLError(t *testing.T) { + + errorTests := []struct { + Filename string + ErrorMessage string + }{ + {"/tmp/foo", "no such file or directory"}, + {"/tmp/yaml-test-parse-error", "cannot unmarshal"}, + } + + filename := "/tmp/yaml-test-parse-error" + testYAML := []byte("foo bar") + err := ioutil.WriteFile(filename, testYAML, 0644) + defer os.Remove(filename) + assert.NoError(t, err) + + for _, errorTest := range errorTests { + procArgs := make(map[string]string) + err = ParseYAML(errorTest.Filename, procArgs) + assert.Contains(t, err.Error(), errorTest.ErrorMessage) + } +} + +func TestWriteYAML(t *testing.T) { + filename := "/tmp/yaml-test-write" + procArgs := []env.VarMetadata{ + {"foo", "bar"}, + {"moo", "zoo"}, + } + + err := WriteYAML(filename, procArgs) + assert.NoError(t, err) + defer os.Remove(filename) + + file, err := os.Open(filename) + assert.NoError(t, err) + defer file.Close() + + buffer, err := ioutil.ReadAll(file) + assert.NoError(t, err) + assert.Equal(t, buffer, []byte("# bar\nfoo:\n# zoo\nmoo:\n")) +} diff --git a/io/printer.go b/internal/app/cli/utility/io/printer.go similarity index 100% rename from io/printer.go rename to internal/app/cli/utility/io/printer.go diff --git a/io/printer_mock.go b/internal/app/cli/utility/io/printer_mock.go similarity index 100% rename from io/printer_mock.go rename to internal/app/cli/utility/io/printer_mock.go diff --git a/utility/sort/sort.go b/internal/app/cli/utility/sort/sort.go similarity index 81% rename from utility/sort/sort.go rename to internal/app/cli/utility/sort/sort.go index 29f73924..e7816c2b 100644 --- a/utility/sort/sort.go +++ b/internal/app/cli/utility/sort/sort.go @@ -1,7 +1,7 @@ package sort import ( - "proctor/proctord/jobs/metadata" + "proctor/internal/pkg/model/metadata" "sort" ) diff --git a/utility/sort/sort_test.go b/internal/app/cli/utility/sort/sort_test.go similarity index 91% rename from utility/sort/sort_test.go rename to internal/app/cli/utility/sort/sort_test.go index 958b1dfd..a8e16edc 100644 --- a/utility/sort/sort_test.go +++ b/internal/app/cli/utility/sort/sort_test.go @@ -1,8 +1,8 @@ package sort import ( - "proctor/proctord/jobs/metadata" "github.com/stretchr/testify/assert" + "proctor/internal/pkg/model/metadata" "testing" ) diff --git a/proctord/docs/css/print.css b/internal/app/service/docs/css/print.css similarity index 100% rename from proctord/docs/css/print.css rename to internal/app/service/docs/css/print.css diff --git a/proctord/docs/css/reset.css b/internal/app/service/docs/css/reset.css similarity index 100% rename from proctord/docs/css/reset.css rename to internal/app/service/docs/css/reset.css diff --git a/proctord/docs/css/screen.css b/internal/app/service/docs/css/screen.css similarity index 100% rename from proctord/docs/css/screen.css rename to internal/app/service/docs/css/screen.css diff --git a/proctord/docs/css/style.css b/internal/app/service/docs/css/style.css similarity index 100% rename from proctord/docs/css/style.css rename to internal/app/service/docs/css/style.css diff --git a/proctord/docs/css/typography.css b/internal/app/service/docs/css/typography.css similarity index 100% rename from proctord/docs/css/typography.css rename to internal/app/service/docs/css/typography.css diff --git a/proctord/docs/fonts/DroidSans-Bold.ttf b/internal/app/service/docs/fonts/DroidSans-Bold.ttf similarity index 100% rename from proctord/docs/fonts/DroidSans-Bold.ttf rename to internal/app/service/docs/fonts/DroidSans-Bold.ttf diff --git a/proctord/docs/fonts/DroidSans.ttf b/internal/app/service/docs/fonts/DroidSans.ttf similarity index 100% rename from proctord/docs/fonts/DroidSans.ttf rename to internal/app/service/docs/fonts/DroidSans.ttf diff --git a/proctord/docs/handler.go b/internal/app/service/docs/handler.go similarity index 100% rename from proctord/docs/handler.go rename to internal/app/service/docs/handler.go diff --git a/proctord/docs/images/collapse.gif b/internal/app/service/docs/images/collapse.gif similarity index 100% rename from proctord/docs/images/collapse.gif rename to internal/app/service/docs/images/collapse.gif diff --git a/proctord/docs/images/expand.gif b/internal/app/service/docs/images/expand.gif similarity index 100% rename from proctord/docs/images/expand.gif rename to internal/app/service/docs/images/expand.gif diff --git a/proctord/docs/images/explorer_icons.png b/internal/app/service/docs/images/explorer_icons.png similarity index 100% rename from proctord/docs/images/explorer_icons.png rename to internal/app/service/docs/images/explorer_icons.png diff --git a/proctord/docs/images/favicon-16x16.png b/internal/app/service/docs/images/favicon-16x16.png similarity index 100% rename from proctord/docs/images/favicon-16x16.png rename to internal/app/service/docs/images/favicon-16x16.png diff --git a/proctord/docs/images/favicon-32x32.png b/internal/app/service/docs/images/favicon-32x32.png similarity index 100% rename from proctord/docs/images/favicon-32x32.png rename to internal/app/service/docs/images/favicon-32x32.png diff --git a/proctord/docs/images/favicon.ico b/internal/app/service/docs/images/favicon.ico similarity index 100% rename from proctord/docs/images/favicon.ico rename to internal/app/service/docs/images/favicon.ico diff --git a/proctord/docs/images/logo_small.png b/internal/app/service/docs/images/logo_small.png similarity index 100% rename from proctord/docs/images/logo_small.png rename to internal/app/service/docs/images/logo_small.png diff --git a/proctord/docs/images/pet_store_api.png b/internal/app/service/docs/images/pet_store_api.png similarity index 100% rename from proctord/docs/images/pet_store_api.png rename to internal/app/service/docs/images/pet_store_api.png diff --git a/proctord/docs/images/throbber.gif b/internal/app/service/docs/images/throbber.gif similarity index 100% rename from proctord/docs/images/throbber.gif rename to internal/app/service/docs/images/throbber.gif diff --git a/proctord/docs/images/wordnik_api.png b/internal/app/service/docs/images/wordnik_api.png similarity index 100% rename from proctord/docs/images/wordnik_api.png rename to internal/app/service/docs/images/wordnik_api.png diff --git a/proctord/docs/index.html b/internal/app/service/docs/index.html similarity index 100% rename from proctord/docs/index.html rename to internal/app/service/docs/index.html diff --git a/proctord/docs/lang/ca.js b/internal/app/service/docs/lang/ca.js similarity index 100% rename from proctord/docs/lang/ca.js rename to internal/app/service/docs/lang/ca.js diff --git a/proctord/docs/lang/el.js b/internal/app/service/docs/lang/el.js similarity index 100% rename from proctord/docs/lang/el.js rename to internal/app/service/docs/lang/el.js diff --git a/proctord/docs/lang/en.js b/internal/app/service/docs/lang/en.js similarity index 100% rename from proctord/docs/lang/en.js rename to internal/app/service/docs/lang/en.js diff --git a/proctord/docs/lang/es.js b/internal/app/service/docs/lang/es.js similarity index 100% rename from proctord/docs/lang/es.js rename to internal/app/service/docs/lang/es.js diff --git a/proctord/docs/lang/fr.js b/internal/app/service/docs/lang/fr.js similarity index 100% rename from proctord/docs/lang/fr.js rename to internal/app/service/docs/lang/fr.js diff --git a/proctord/docs/lang/geo.js b/internal/app/service/docs/lang/geo.js similarity index 100% rename from proctord/docs/lang/geo.js rename to internal/app/service/docs/lang/geo.js diff --git a/proctord/docs/lang/it.js b/internal/app/service/docs/lang/it.js similarity index 100% rename from proctord/docs/lang/it.js rename to internal/app/service/docs/lang/it.js diff --git a/proctord/docs/lang/ja.js b/internal/app/service/docs/lang/ja.js similarity index 100% rename from proctord/docs/lang/ja.js rename to internal/app/service/docs/lang/ja.js diff --git a/proctord/docs/lang/ko-kr.js b/internal/app/service/docs/lang/ko-kr.js similarity index 100% rename from proctord/docs/lang/ko-kr.js rename to internal/app/service/docs/lang/ko-kr.js diff --git a/proctord/docs/lang/pl.js b/internal/app/service/docs/lang/pl.js similarity index 100% rename from proctord/docs/lang/pl.js rename to internal/app/service/docs/lang/pl.js diff --git a/proctord/docs/lang/pt.js b/internal/app/service/docs/lang/pt.js similarity index 100% rename from proctord/docs/lang/pt.js rename to internal/app/service/docs/lang/pt.js diff --git a/proctord/docs/lang/ru.js b/internal/app/service/docs/lang/ru.js similarity index 100% rename from proctord/docs/lang/ru.js rename to internal/app/service/docs/lang/ru.js diff --git a/proctord/docs/lang/tr.js b/internal/app/service/docs/lang/tr.js similarity index 100% rename from proctord/docs/lang/tr.js rename to internal/app/service/docs/lang/tr.js diff --git a/proctord/docs/lang/translator.js b/internal/app/service/docs/lang/translator.js similarity index 100% rename from proctord/docs/lang/translator.js rename to internal/app/service/docs/lang/translator.js diff --git a/proctord/docs/lang/zh-cn.js b/internal/app/service/docs/lang/zh-cn.js similarity index 100% rename from proctord/docs/lang/zh-cn.js rename to internal/app/service/docs/lang/zh-cn.js diff --git a/proctord/docs/lib/backbone-min.js b/internal/app/service/docs/lib/backbone-min.js similarity index 100% rename from proctord/docs/lib/backbone-min.js rename to internal/app/service/docs/lib/backbone-min.js diff --git a/proctord/docs/lib/es5-shim.js b/internal/app/service/docs/lib/es5-shim.js similarity index 100% rename from proctord/docs/lib/es5-shim.js rename to internal/app/service/docs/lib/es5-shim.js diff --git a/proctord/docs/lib/handlebars-4.0.5.js b/internal/app/service/docs/lib/handlebars-4.0.5.js similarity index 100% rename from proctord/docs/lib/handlebars-4.0.5.js rename to internal/app/service/docs/lib/handlebars-4.0.5.js diff --git a/proctord/docs/lib/highlight.9.1.0.pack.js b/internal/app/service/docs/lib/highlight.9.1.0.pack.js similarity index 100% rename from proctord/docs/lib/highlight.9.1.0.pack.js rename to internal/app/service/docs/lib/highlight.9.1.0.pack.js diff --git a/proctord/docs/lib/highlight.9.1.0.pack_extended.js b/internal/app/service/docs/lib/highlight.9.1.0.pack_extended.js similarity index 100% rename from proctord/docs/lib/highlight.9.1.0.pack_extended.js rename to internal/app/service/docs/lib/highlight.9.1.0.pack_extended.js diff --git a/proctord/docs/lib/jquery-1.8.0.min.js b/internal/app/service/docs/lib/jquery-1.8.0.min.js similarity index 100% rename from proctord/docs/lib/jquery-1.8.0.min.js rename to internal/app/service/docs/lib/jquery-1.8.0.min.js diff --git a/proctord/docs/lib/jquery.ba-bbq.min.js b/internal/app/service/docs/lib/jquery.ba-bbq.min.js similarity index 100% rename from proctord/docs/lib/jquery.ba-bbq.min.js rename to internal/app/service/docs/lib/jquery.ba-bbq.min.js diff --git a/proctord/docs/lib/jquery.slideto.min.js b/internal/app/service/docs/lib/jquery.slideto.min.js similarity index 100% rename from proctord/docs/lib/jquery.slideto.min.js rename to internal/app/service/docs/lib/jquery.slideto.min.js diff --git a/proctord/docs/lib/jquery.wiggle.min.js b/internal/app/service/docs/lib/jquery.wiggle.min.js similarity index 100% rename from proctord/docs/lib/jquery.wiggle.min.js rename to internal/app/service/docs/lib/jquery.wiggle.min.js diff --git a/proctord/docs/lib/js-yaml.min.js b/internal/app/service/docs/lib/js-yaml.min.js similarity index 100% rename from proctord/docs/lib/js-yaml.min.js rename to internal/app/service/docs/lib/js-yaml.min.js diff --git a/proctord/docs/lib/jsoneditor.min.js b/internal/app/service/docs/lib/jsoneditor.min.js similarity index 100% rename from proctord/docs/lib/jsoneditor.min.js rename to internal/app/service/docs/lib/jsoneditor.min.js diff --git a/proctord/docs/lib/lodash.min.js b/internal/app/service/docs/lib/lodash.min.js similarity index 100% rename from proctord/docs/lib/lodash.min.js rename to internal/app/service/docs/lib/lodash.min.js diff --git a/proctord/docs/lib/marked.js b/internal/app/service/docs/lib/marked.js similarity index 100% rename from proctord/docs/lib/marked.js rename to internal/app/service/docs/lib/marked.js diff --git a/proctord/docs/lib/object-assign-pollyfill.js b/internal/app/service/docs/lib/object-assign-pollyfill.js similarity index 100% rename from proctord/docs/lib/object-assign-pollyfill.js rename to internal/app/service/docs/lib/object-assign-pollyfill.js diff --git a/proctord/docs/lib/sanitize-html.min.js b/internal/app/service/docs/lib/sanitize-html.min.js similarity index 100% rename from proctord/docs/lib/sanitize-html.min.js rename to internal/app/service/docs/lib/sanitize-html.min.js diff --git a/proctord/docs/lib/swagger-oauth.js b/internal/app/service/docs/lib/swagger-oauth.js similarity index 100% rename from proctord/docs/lib/swagger-oauth.js rename to internal/app/service/docs/lib/swagger-oauth.js diff --git a/proctord/docs/o2c.html b/internal/app/service/docs/o2c.html similarity index 100% rename from proctord/docs/o2c.html rename to internal/app/service/docs/o2c.html diff --git a/proctord/docs/swagger-ui.js b/internal/app/service/docs/swagger-ui.js similarity index 98% rename from proctord/docs/swagger-ui.js rename to internal/app/service/docs/swagger-ui.js index cd008ef9..d5780c21 100644 --- a/proctord/docs/swagger-ui.js +++ b/internal/app/service/docs/swagger-ui.js @@ -3181,8 +3181,8 @@ Handlebars.registerHelper('escape', function (value) { (function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.sanitizeHtml=f()}})(function(){var define,module,exports;return function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o=0){globRegex.push(quoteRegexp(name).replace(/\\\*/g,".*"))}else{allowedAttributesMap[tag].push(name)}});allowedAttributesGlobMap[tag]=new RegExp("^("+globRegex.join("|")+")$")})}var allowedClassesMap={};each(options.allowedClasses,function(classes,tag){if(allowedAttributesMap){if(!has(allowedAttributesMap,tag)){allowedAttributesMap[tag]=[]}allowedAttributesMap[tag].push("class")}allowedClassesMap[tag]=classes});var transformTagsMap={};var transformTagsAll;each(options.transformTags,function(transform,tag){var transFun;if(typeof transform==="function"){transFun=transform}else if(typeof transform==="string"){transFun=sanitizeHtml.simpleTransform(transform)}if(tag==="*"){transformTagsAll=transFun}else{transformTagsMap[tag]=transFun}});var depth=0;var stack=[];var skipMap={};var transformMap={};var skipText=false;var skipTextDepth=0;var parser=new htmlparser.Parser({onopentag:function(name,attribs){if(skipText){skipTextDepth++;return}var frame=new Frame(name,attribs);stack.push(frame);var skip=false;var hasText=frame.text?true:false;var transformedTag;if(has(transformTagsMap,name)){transformedTag=transformTagsMap[name](name,attribs);frame.attribs=attribs=transformedTag.attribs;if(transformedTag.text!==undefined){frame.innerText=transformedTag.text}if(name!==transformedTag.tagName){frame.name=name=transformedTag.tagName;transformMap[depth]=transformedTag.tagName}}if(transformTagsAll){transformedTag=transformTagsAll(name,attribs);frame.attribs=attribs=transformedTag.attribs;if(name!==transformedTag.tagName){frame.name=name=transformedTag.tagName;transformMap[depth]=transformedTag.tagName}}if(options.allowedTags&&options.allowedTags.indexOf(name)===-1){skip=true;if(nonTextTagsArray.indexOf(name)!==-1){skipText=true;skipTextDepth=1}skipMap[depth]=true}depth++;if(skip){return}result+="<"+name;if(!allowedAttributesMap||has(allowedAttributesMap,name)||allowedAttributesMap["*"]){each(attribs,function(value,a){if(!allowedAttributesMap||has(allowedAttributesMap,name)&&allowedAttributesMap[name].indexOf(a)!==-1||allowedAttributesMap["*"]&&allowedAttributesMap["*"].indexOf(a)!==-1||has(allowedAttributesGlobMap,name)&&allowedAttributesGlobMap[name].test(a)||allowedAttributesGlobMap["*"]&&allowedAttributesGlobMap["*"].test(a)){if(a==="href"||a==="src"){if(naughtyHref(name,value)){delete frame.attribs[a];return}}if(a==="class"){value=filterClasses(value,allowedClassesMap[name]);if(!value.length){delete frame.attribs[a];return}}result+=" "+a;if(value.length){result+='="'+escapeHtml(value)+'"'}}else{delete frame.attribs[a]}})}if(options.selfClosing.indexOf(name)!==-1){result+=" />"}else{result+=">";if(frame.innerText&&!hasText&&!options.textFilter){result+=frame.innerText}}},ontext:function(text){if(skipText){return}var lastFrame=stack[stack.length-1];var tag;if(lastFrame){tag=lastFrame.tag;text=lastFrame.innerText!==undefined?lastFrame.innerText:text}if(tag==="script"||tag==="style"){result+=text}else{var escaped=escapeHtml(text);if(options.textFilter){result+=options.textFilter(escaped)}else{result+=escaped}}if(stack.length){var frame=stack[stack.length-1];frame.text+=text}},onclosetag:function(name){if(skipText){skipTextDepth--;if(!skipTextDepth){skipText=false}else{return}}var frame=stack.pop();if(!frame){return}skipText=false;depth--;if(skipMap[depth]){delete skipMap[depth];frame.updateParentNodeText();return}if(transformMap[depth]){name=transformMap[depth];delete transformMap[depth]}if(options.exclusiveFilter&&options.exclusiveFilter(frame)){result=result.substr(0,frame.tagPosition);return}frame.updateParentNodeText();if(options.selfClosing.indexOf(name)!==-1){return}result+=""}},options.parser);parser.write(html);parser.end();return result;function escapeHtml(s){if(typeof s!=="string"){s=s+""}return s.replace(/\&/g,"&").replace(//g,">").replace(/\"/g,""")}function naughtyHref(name,href){href=href.replace(/[\x00-\x20]+/g,"");href=href.replace(/<\!\-\-.*?\-\-\>/g,"");var matches=href.match(/^([a-zA-Z]+)\:/);if(!matches){return false}var scheme=matches[1].toLowerCase();if(has(options.allowedSchemesByTag,name)){return options.allowedSchemesByTag[name].indexOf(scheme)===-1}return!options.allowedSchemes||options.allowedSchemes.indexOf(scheme)===-1}function filterClasses(classes,allowed){if(!allowed){return classes}classes=classes.split(/\s+/);return classes.filter(function(clss){return allowed.indexOf(clss)!==-1}).join(" ")}}var htmlParserDefaults={decodeEntities:true};sanitizeHtml.defaults={allowedTags:["h3","h4","h5","h6","blockquote","p","a","ul","ol","nl","li","b","i","strong","em","strike","code","hr","br","div","table","thead","caption","tbody","tr","th","td","pre"],allowedAttributes:{a:["href","name","target"],img:["src"]},selfClosing:["img","br","hr","area","base","basefont","input","link","meta"],allowedSchemes:["http","https","ftp","mailto"],allowedSchemesByTag:{}};sanitizeHtml.simpleTransform=function(newTagName,newAttribs,merge){merge=merge===undefined?true:merge;newAttribs=newAttribs||{};return function(tagName,attribs){var attrib;if(merge){for(attrib in newAttribs){attribs[attrib]=newAttribs[attrib]}}else{attribs=newAttribs}return{tagName:newTagName,attribs:attribs}}}},{htmlparser2:36,"regexp-quote":54,xtend:58}],2:[function(require,module,exports){"use strict";exports.toByteArray=toByteArray;exports.fromByteArray=fromByteArray;var lookup=[];var revLookup=[];var Arr=typeof Uint8Array!=="undefined"?Uint8Array:Array;function init(){var code="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";for(var i=0,len=code.length;i0){throw new Error("Invalid string. Length must be a multiple of 4")}placeHolders=b64[len-2]==="="?2:b64[len-1]==="="?1:0;arr=new Arr(len*3/4-placeHolders);l=placeHolders>0?len-4:len;var L=0;for(i=0,j=0;i>16&255;arr[L++]=tmp>>8&255;arr[L++]=tmp&255}if(placeHolders===2){tmp=revLookup[b64.charCodeAt(i)]<<2|revLookup[b64.charCodeAt(i+1)]>>4;arr[L++]=tmp&255}else if(placeHolders===1){tmp=revLookup[b64.charCodeAt(i)]<<10|revLookup[b64.charCodeAt(i+1)]<<4|revLookup[b64.charCodeAt(i+2)]>>2;arr[L++]=tmp>>8&255;arr[L++]=tmp&255}return arr}function tripletToBase64(num){return lookup[num>>18&63]+lookup[num>>12&63]+lookup[num>>6&63]+lookup[num&63]}function encodeChunk(uint8,start,end){var tmp;var output=[];for(var i=start;ilen2?len2:i+maxChunkLength))}if(extraBytes===1){tmp=uint8[len-1];output+=lookup[tmp>>2];output+=lookup[tmp<<4&63];output+="=="}else if(extraBytes===2){tmp=(uint8[len-2]<<8)+uint8[len-1];output+=lookup[tmp>>10];output+=lookup[tmp>>4&63];output+=lookup[tmp<<2&63];output+="="}parts.push(output);return parts.join("")}},{}],3:[function(require,module,exports){},{}],4:[function(require,module,exports){(function(global){"use strict";var buffer=require("buffer");var Buffer=buffer.Buffer;var SlowBuffer=buffer.SlowBuffer;var MAX_LEN=buffer.kMaxLength||2147483647;exports.alloc=function alloc(size,fill,encoding){if(typeof Buffer.alloc==="function"){return Buffer.alloc(size,fill,encoding)}if(typeof encoding==="number"){throw new TypeError("encoding must not be number")}if(typeof size!=="number"){throw new TypeError("size must be a number")}if(size>MAX_LEN){throw new RangeError("size is too large")}var enc=encoding;var _fill=fill;if(_fill===undefined){enc=undefined;_fill=0}var buf=new Buffer(size);if(typeof _fill==="string"){var fillBuf=new Buffer(_fill,enc);var flen=fillBuf.length;var i=-1;while(++iMAX_LEN){throw new RangeError("size is too large")}return new Buffer(size)};exports.from=function from(value,encodingOrOffset,length){if(typeof Buffer.from==="function"&&(!global.Uint8Array||Uint8Array.from!==Buffer.from)){return Buffer.from(value,encodingOrOffset,length)}if(typeof value==="number"){throw new TypeError('"value" argument must not be a number')}if(typeof value==="string"){return new Buffer(value,encodingOrOffset)}if(typeof ArrayBuffer!=="undefined"&&value instanceof ArrayBuffer){var offset=encodingOrOffset;if(arguments.length===1){return new Buffer(value)}if(typeof offset==="undefined"){offset=0}var len=length;if(typeof len==="undefined"){len=value.byteLength-offset}if(offset>=value.byteLength){throw new RangeError("'offset' is out of bounds")}if(len>value.byteLength-offset){throw new RangeError("'length' is out of bounds")}return new Buffer(value.slice(offset,offset+len))}if(Buffer.isBuffer(value)){var out=new Buffer(value.length);value.copy(out,0,0,value.length);return out}if(value){if(Array.isArray(value)||typeof ArrayBuffer!=="undefined"&&value.buffer instanceof ArrayBuffer||"length"in value){return new Buffer(value)}if(value.type==="Buffer"&&Array.isArray(value.data)){return new Buffer(value.data)}}throw new TypeError("First argument must be a string, Buffer, "+"ArrayBuffer, Array, or array-like object.")};exports.allocUnsafeSlow=function allocUnsafeSlow(size){if(typeof Buffer.allocUnsafeSlow==="function"){return Buffer.allocUnsafeSlow(size)}if(typeof size!=="number"){throw new TypeError("size must be a number")}if(size>=MAX_LEN){throw new RangeError("size is too large")}return new SlowBuffer(size)}}).call(this,typeof global!=="undefined"?global:typeof self!=="undefined"?self:typeof window!=="undefined"?window:{})},{buffer:5}],5:[function(require,module,exports){(function(global){"use strict";var base64=require("base64-js");var ieee754=require("ieee754");var isArray=require("isarray");exports.Buffer=Buffer;exports.SlowBuffer=SlowBuffer;exports.INSPECT_MAX_BYTES=50;Buffer.TYPED_ARRAY_SUPPORT=global.TYPED_ARRAY_SUPPORT!==undefined?global.TYPED_ARRAY_SUPPORT:typedArraySupport();exports.kMaxLength=kMaxLength();function typedArraySupport(){try{var arr=new Uint8Array(1);arr.__proto__={__proto__:Uint8Array.prototype,foo:function(){return 42}};return arr.foo()===42&&typeof arr.subarray==="function"&&arr.subarray(1,1).byteLength===0}catch(e){return false}}function kMaxLength(){return Buffer.TYPED_ARRAY_SUPPORT?2147483647:1073741823}function createBuffer(that,length){if(kMaxLength()=kMaxLength()){throw new RangeError("Attempt to allocate Buffer larger than maximum "+"size: 0x"+kMaxLength().toString(16)+" bytes")}return length|0}function SlowBuffer(length){if(+length!=length){length=0}return Buffer.alloc(+length)}Buffer.isBuffer=function isBuffer(b){return!!(b!=null&&b._isBuffer)};Buffer.compare=function compare(a,b){if(!Buffer.isBuffer(a)||!Buffer.isBuffer(b)){throw new TypeError("Arguments must be Buffers")}if(a===b)return 0;var x=a.length;var y=b.length;for(var i=0,len=Math.min(x,y);i>>1;case"base64":return base64ToBytes(string).length;default:if(loweredCase)return utf8ToBytes(string).length;encoding=(""+encoding).toLowerCase();loweredCase=true}}}Buffer.byteLength=byteLength;function slowToString(encoding,start,end){var loweredCase=false;if(start===undefined||start<0){start=0}if(start>this.length){return""}if(end===undefined||end>this.length){end=this.length}if(end<=0){return""}end>>>=0;start>>>=0;if(end<=start){return""}if(!encoding)encoding="utf8";while(true){switch(encoding){case"hex":return hexSlice(this,start,end);case"utf8":case"utf-8":return utf8Slice(this,start,end);case"ascii":return asciiSlice(this,start,end);case"latin1":case"binary":return latin1Slice(this,start,end);case"base64":return base64Slice(this,start,end);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return utf16leSlice(this,start,end);default:if(loweredCase)throw new TypeError("Unknown encoding: "+encoding);encoding=(encoding+"").toLowerCase();loweredCase=true}}}Buffer.prototype._isBuffer=true;function swap(b,n,m){var i=b[n];b[n]=b[m];b[m]=i}Buffer.prototype.swap16=function swap16(){var len=this.length;if(len%2!==0){throw new RangeError("Buffer size must be a multiple of 16-bits")}for(var i=0;i0){str=this.toString("hex",0,max).match(/.{2}/g).join(" ");if(this.length>max)str+=" ... "}return""};Buffer.prototype.compare=function compare(target,start,end,thisStart,thisEnd){if(!Buffer.isBuffer(target)){throw new TypeError("Argument must be a Buffer")}if(start===undefined){start=0}if(end===undefined){end=target?target.length:0}if(thisStart===undefined){thisStart=0}if(thisEnd===undefined){thisEnd=this.length}if(start<0||end>target.length||thisStart<0||thisEnd>this.length){throw new RangeError("out of range index")}if(thisStart>=thisEnd&&start>=end){return 0}if(thisStart>=thisEnd){return-1}if(start>=end){return 1}start>>>=0;end>>>=0;thisStart>>>=0;thisEnd>>>=0;if(this===target)return 0;var x=thisEnd-thisStart;var y=end-start;var len=Math.min(x,y);var thisCopy=this.slice(thisStart,thisEnd);var targetCopy=target.slice(start,end);for(var i=0;i2147483647){byteOffset=2147483647}else if(byteOffset<-2147483648){byteOffset=-2147483648}byteOffset=+byteOffset;if(isNaN(byteOffset)){byteOffset=dir?0:buffer.length-1}if(byteOffset<0)byteOffset=buffer.length+byteOffset;if(byteOffset>=buffer.length){if(dir)return-1;else byteOffset=buffer.length-1}else if(byteOffset<0){if(dir)byteOffset=0;else return-1}if(typeof val==="string"){val=Buffer.from(val,encoding)}if(Buffer.isBuffer(val)){if(val.length===0){return-1}return arrayIndexOf(buffer,val,byteOffset,encoding,dir)}else if(typeof val==="number"){val=val&255;if(Buffer.TYPED_ARRAY_SUPPORT&&typeof Uint8Array.prototype.indexOf==="function"){if(dir){return Uint8Array.prototype.indexOf.call(buffer,val,byteOffset)}else{return Uint8Array.prototype.lastIndexOf.call(buffer,val,byteOffset)}}return arrayIndexOf(buffer,[val],byteOffset,encoding,dir)}throw new TypeError("val must be string, number or Buffer")}function arrayIndexOf(arr,val,byteOffset,encoding,dir){var indexSize=1;var arrLength=arr.length;var valLength=val.length;if(encoding!==undefined){encoding=String(encoding).toLowerCase();if(encoding==="ucs2"||encoding==="ucs-2"||encoding==="utf16le"||encoding==="utf-16le"){if(arr.length<2||val.length<2){return-1}indexSize=2;arrLength/=2;valLength/=2;byteOffset/=2}}function read(buf,i){if(indexSize===1){return buf[i]}else{return buf.readUInt16BE(i*indexSize)}}var i;if(dir){var foundIndex=-1;for(i=byteOffset;iarrLength)byteOffset=arrLength-valLength;for(i=byteOffset;i>=0;i--){var found=true;for(var j=0;jremaining){length=remaining}}var strLen=string.length;if(strLen%2!==0)throw new TypeError("Invalid hex string");if(length>strLen/2){length=strLen/2}for(var i=0;iremaining)length=remaining;if(string.length>0&&(length<0||offset<0)||offset>this.length){throw new RangeError("Attempt to write outside buffer bounds")}if(!encoding)encoding="utf8";var loweredCase=false;for(;;){switch(encoding){case"hex":return hexWrite(this,string,offset,length);case"utf8":case"utf-8":return utf8Write(this,string,offset,length);case"ascii":return asciiWrite(this,string,offset,length);case"latin1":case"binary":return latin1Write(this,string,offset,length);case"base64":return base64Write(this,string,offset,length);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return ucs2Write(this,string,offset,length);default:if(loweredCase)throw new TypeError("Unknown encoding: "+encoding);encoding=(""+encoding).toLowerCase();loweredCase=true}}};Buffer.prototype.toJSON=function toJSON(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}};function base64Slice(buf,start,end){if(start===0&&end===buf.length){return base64.fromByteArray(buf)}else{return base64.fromByteArray(buf.slice(start,end))}}function utf8Slice(buf,start,end){end=Math.min(buf.length,end);var res=[];var i=start;while(i239?4:firstByte>223?3:firstByte>191?2:1;if(i+bytesPerSequence<=end){var secondByte,thirdByte,fourthByte,tempCodePoint;switch(bytesPerSequence){case 1:if(firstByte<128){codePoint=firstByte}break;case 2:secondByte=buf[i+1];if((secondByte&192)===128){tempCodePoint=(firstByte&31)<<6|secondByte&63;if(tempCodePoint>127){codePoint=tempCodePoint}}break;case 3:secondByte=buf[i+1];thirdByte=buf[i+2];if((secondByte&192)===128&&(thirdByte&192)===128){tempCodePoint=(firstByte&15)<<12|(secondByte&63)<<6|thirdByte&63;if(tempCodePoint>2047&&(tempCodePoint<55296||tempCodePoint>57343)){codePoint=tempCodePoint}}break;case 4:secondByte=buf[i+1];thirdByte=buf[i+2];fourthByte=buf[i+3];if((secondByte&192)===128&&(thirdByte&192)===128&&(fourthByte&192)===128){tempCodePoint=(firstByte&15)<<18|(secondByte&63)<<12|(thirdByte&63)<<6|fourthByte&63;if(tempCodePoint>65535&&tempCodePoint<1114112){codePoint=tempCodePoint}}}}if(codePoint===null){codePoint=65533;bytesPerSequence=1}else if(codePoint>65535){codePoint-=65536;res.push(codePoint>>>10&1023|55296);codePoint=56320|codePoint&1023}res.push(codePoint);i+=bytesPerSequence}return decodeCodePointsArray(res)}var MAX_ARGUMENTS_LENGTH=4096;function decodeCodePointsArray(codePoints){var len=codePoints.length;if(len<=MAX_ARGUMENTS_LENGTH){return String.fromCharCode.apply(String,codePoints)}var res="";var i=0;while(ilen)end=len;var out="";for(var i=start;ilen){start=len}if(end<0){end+=len;if(end<0)end=0}else if(end>len){end=len}if(endlength)throw new RangeError("Trying to access beyond buffer length")}Buffer.prototype.readUIntLE=function readUIntLE(offset,byteLength,noAssert){offset=offset|0;byteLength=byteLength|0;if(!noAssert)checkOffset(offset,byteLength,this.length);var val=this[offset];var mul=1;var i=0;while(++i0&&(mul*=256)){val+=this[offset+--byteLength]*mul}return val};Buffer.prototype.readUInt8=function readUInt8(offset,noAssert){if(!noAssert)checkOffset(offset,1,this.length);return this[offset]};Buffer.prototype.readUInt16LE=function readUInt16LE(offset,noAssert){if(!noAssert)checkOffset(offset,2,this.length);return this[offset]|this[offset+1]<<8};Buffer.prototype.readUInt16BE=function readUInt16BE(offset,noAssert){if(!noAssert)checkOffset(offset,2,this.length);return this[offset]<<8|this[offset+1]};Buffer.prototype.readUInt32LE=function readUInt32LE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return(this[offset]|this[offset+1]<<8|this[offset+2]<<16)+this[offset+3]*16777216};Buffer.prototype.readUInt32BE=function readUInt32BE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return this[offset]*16777216+(this[offset+1]<<16|this[offset+2]<<8|this[offset+3])};Buffer.prototype.readIntLE=function readIntLE(offset,byteLength,noAssert){offset=offset|0;byteLength=byteLength|0;if(!noAssert)checkOffset(offset,byteLength,this.length);var val=this[offset];var mul=1;var i=0;while(++i=mul)val-=Math.pow(2,8*byteLength);return val};Buffer.prototype.readIntBE=function readIntBE(offset,byteLength,noAssert){offset=offset|0;byteLength=byteLength|0;if(!noAssert)checkOffset(offset,byteLength,this.length); var i=byteLength;var mul=1;var val=this[offset+--i];while(i>0&&(mul*=256)){val+=this[offset+--i]*mul}mul*=128;if(val>=mul)val-=Math.pow(2,8*byteLength);return val};Buffer.prototype.readInt8=function readInt8(offset,noAssert){if(!noAssert)checkOffset(offset,1,this.length);if(!(this[offset]&128))return this[offset];return(255-this[offset]+1)*-1};Buffer.prototype.readInt16LE=function readInt16LE(offset,noAssert){if(!noAssert)checkOffset(offset,2,this.length);var val=this[offset]|this[offset+1]<<8;return val&32768?val|4294901760:val};Buffer.prototype.readInt16BE=function readInt16BE(offset,noAssert){if(!noAssert)checkOffset(offset,2,this.length);var val=this[offset+1]|this[offset]<<8;return val&32768?val|4294901760:val};Buffer.prototype.readInt32LE=function readInt32LE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return this[offset]|this[offset+1]<<8|this[offset+2]<<16|this[offset+3]<<24};Buffer.prototype.readInt32BE=function readInt32BE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return this[offset]<<24|this[offset+1]<<16|this[offset+2]<<8|this[offset+3]};Buffer.prototype.readFloatLE=function readFloatLE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return ieee754.read(this,offset,true,23,4)};Buffer.prototype.readFloatBE=function readFloatBE(offset,noAssert){if(!noAssert)checkOffset(offset,4,this.length);return ieee754.read(this,offset,false,23,4)};Buffer.prototype.readDoubleLE=function readDoubleLE(offset,noAssert){if(!noAssert)checkOffset(offset,8,this.length);return ieee754.read(this,offset,true,52,8)};Buffer.prototype.readDoubleBE=function readDoubleBE(offset,noAssert){if(!noAssert)checkOffset(offset,8,this.length);return ieee754.read(this,offset,false,52,8)};function checkInt(buf,value,offset,ext,max,min){if(!Buffer.isBuffer(buf))throw new TypeError('"buffer" argument must be a Buffer instance');if(value>max||valuebuf.length)throw new RangeError("Index out of range")}Buffer.prototype.writeUIntLE=function writeUIntLE(value,offset,byteLength,noAssert){value=+value;offset=offset|0;byteLength=byteLength|0;if(!noAssert){var maxBytes=Math.pow(2,8*byteLength)-1;checkInt(this,value,offset,byteLength,maxBytes,0)}var mul=1;var i=0;this[offset]=value&255;while(++i=0&&(mul*=256)){this[offset+i]=value/mul&255}return offset+byteLength};Buffer.prototype.writeUInt8=function writeUInt8(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,1,255,0);if(!Buffer.TYPED_ARRAY_SUPPORT)value=Math.floor(value);this[offset]=value&255;return offset+1};function objectWriteUInt16(buf,value,offset,littleEndian){if(value<0)value=65535+value+1;for(var i=0,j=Math.min(buf.length-offset,2);i>>(littleEndian?i:1-i)*8}}Buffer.prototype.writeUInt16LE=function writeUInt16LE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,2,65535,0);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value&255;this[offset+1]=value>>>8}else{objectWriteUInt16(this,value,offset,true)}return offset+2};Buffer.prototype.writeUInt16BE=function writeUInt16BE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,2,65535,0);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value>>>8;this[offset+1]=value&255}else{objectWriteUInt16(this,value,offset,false)}return offset+2};function objectWriteUInt32(buf,value,offset,littleEndian){if(value<0)value=4294967295+value+1;for(var i=0,j=Math.min(buf.length-offset,4);i>>(littleEndian?i:3-i)*8&255}}Buffer.prototype.writeUInt32LE=function writeUInt32LE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,4,4294967295,0);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset+3]=value>>>24;this[offset+2]=value>>>16;this[offset+1]=value>>>8;this[offset]=value&255}else{objectWriteUInt32(this,value,offset,true)}return offset+4};Buffer.prototype.writeUInt32BE=function writeUInt32BE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,4,4294967295,0);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value>>>24;this[offset+1]=value>>>16;this[offset+2]=value>>>8;this[offset+3]=value&255}else{objectWriteUInt32(this,value,offset,false)}return offset+4};Buffer.prototype.writeIntLE=function writeIntLE(value,offset,byteLength,noAssert){value=+value;offset=offset|0;if(!noAssert){var limit=Math.pow(2,8*byteLength-1);checkInt(this,value,offset,byteLength,limit-1,-limit)}var i=0;var mul=1;var sub=0;this[offset]=value&255;while(++i>0)-sub&255}return offset+byteLength};Buffer.prototype.writeIntBE=function writeIntBE(value,offset,byteLength,noAssert){value=+value;offset=offset|0;if(!noAssert){var limit=Math.pow(2,8*byteLength-1);checkInt(this,value,offset,byteLength,limit-1,-limit)}var i=byteLength-1;var mul=1;var sub=0;this[offset+i]=value&255;while(--i>=0&&(mul*=256)){if(value<0&&sub===0&&this[offset+i+1]!==0){sub=1}this[offset+i]=(value/mul>>0)-sub&255}return offset+byteLength};Buffer.prototype.writeInt8=function writeInt8(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,1,127,-128);if(!Buffer.TYPED_ARRAY_SUPPORT)value=Math.floor(value);if(value<0)value=255+value+1;this[offset]=value&255;return offset+1};Buffer.prototype.writeInt16LE=function writeInt16LE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,2,32767,-32768);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value&255;this[offset+1]=value>>>8}else{objectWriteUInt16(this,value,offset,true)}return offset+2};Buffer.prototype.writeInt16BE=function writeInt16BE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,2,32767,-32768);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value>>>8;this[offset+1]=value&255}else{objectWriteUInt16(this,value,offset,false)}return offset+2};Buffer.prototype.writeInt32LE=function writeInt32LE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,4,2147483647,-2147483648);if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value&255;this[offset+1]=value>>>8;this[offset+2]=value>>>16;this[offset+3]=value>>>24}else{objectWriteUInt32(this,value,offset,true)}return offset+4};Buffer.prototype.writeInt32BE=function writeInt32BE(value,offset,noAssert){value=+value;offset=offset|0;if(!noAssert)checkInt(this,value,offset,4,2147483647,-2147483648);if(value<0)value=4294967295+value+1;if(Buffer.TYPED_ARRAY_SUPPORT){this[offset]=value>>>24;this[offset+1]=value>>>16;this[offset+2]=value>>>8;this[offset+3]=value&255}else{objectWriteUInt32(this,value,offset,false)}return offset+4};function checkIEEE754(buf,value,offset,ext,max,min){if(offset+ext>buf.length)throw new RangeError("Index out of range");if(offset<0)throw new RangeError("Index out of range")}function writeFloat(buf,value,offset,littleEndian,noAssert){if(!noAssert){checkIEEE754(buf,value,offset,4,3.4028234663852886e38,-3.4028234663852886e38)}ieee754.write(buf,value,offset,littleEndian,23,4);return offset+4}Buffer.prototype.writeFloatLE=function writeFloatLE(value,offset,noAssert){return writeFloat(this,value,offset,true,noAssert)};Buffer.prototype.writeFloatBE=function writeFloatBE(value,offset,noAssert){return writeFloat(this,value,offset,false,noAssert)};function writeDouble(buf,value,offset,littleEndian,noAssert){if(!noAssert){checkIEEE754(buf,value,offset,8,1.7976931348623157e308,-1.7976931348623157e308)}ieee754.write(buf,value,offset,littleEndian,52,8);return offset+8}Buffer.prototype.writeDoubleLE=function writeDoubleLE(value,offset,noAssert){return writeDouble(this,value,offset,true,noAssert)};Buffer.prototype.writeDoubleBE=function writeDoubleBE(value,offset,noAssert){return writeDouble(this,value,offset,false,noAssert)};Buffer.prototype.copy=function copy(target,targetStart,start,end){if(!start)start=0;if(!end&&end!==0)end=this.length;if(targetStart>=target.length)targetStart=target.length;if(!targetStart)targetStart=0;if(end>0&&end=this.length)throw new RangeError("sourceStart out of bounds");if(end<0)throw new RangeError("sourceEnd out of bounds");if(end>this.length)end=this.length;if(target.length-targetStart=0;--i){target[i+targetStart]=this[i+start]}}else if(len<1e3||!Buffer.TYPED_ARRAY_SUPPORT){for(i=0;i>>0;end=end===undefined?this.length:end>>>0;if(!val)val=0;var i;if(typeof val==="number"){for(i=start;i55295&&codePoint<57344){if(!leadSurrogate){if(codePoint>56319){if((units-=3)>-1)bytes.push(239,191,189);continue}else if(i+1===length){if((units-=3)>-1)bytes.push(239,191,189);continue}leadSurrogate=codePoint;continue}if(codePoint<56320){if((units-=3)>-1)bytes.push(239,191,189);leadSurrogate=codePoint;continue}codePoint=(leadSurrogate-55296<<10|codePoint-56320)+65536}else if(leadSurrogate){if((units-=3)>-1)bytes.push(239,191,189)}leadSurrogate=null;if(codePoint<128){if((units-=1)<0)break;bytes.push(codePoint)}else if(codePoint<2048){if((units-=2)<0)break;bytes.push(codePoint>>6|192,codePoint&63|128)}else if(codePoint<65536){if((units-=3)<0)break;bytes.push(codePoint>>12|224,codePoint>>6&63|128,codePoint&63|128)}else if(codePoint<1114112){if((units-=4)<0)break;bytes.push(codePoint>>18|240,codePoint>>12&63|128,codePoint>>6&63|128,codePoint&63|128)}else{throw new Error("Invalid code point")}}return bytes}function asciiToBytes(str){var byteArray=[];for(var i=0;i>8;lo=c%256;byteArray.push(lo);byteArray.push(hi)}return byteArray}function base64ToBytes(str){return base64.toByteArray(base64clean(str))}function blitBuffer(src,dst,offset,length){for(var i=0;i=dst.length||i>=src.length)break;dst[i+offset]=src[i]}return i}function isnan(val){return val!==val}}).call(this,typeof global!=="undefined"?global:typeof self!=="undefined"?self:typeof window!=="undefined"?window:{})},{"base64-js":2,ieee754:37,isarray:40}],6:[function(require,module,exports){(function(Buffer){function isArray(arg){if(Array.isArray){return Array.isArray(arg)}return objectToString(arg)==="[object Array]"}exports.isArray=isArray;function isBoolean(arg){return typeof arg==="boolean"}exports.isBoolean=isBoolean;function isNull(arg){return arg===null}exports.isNull=isNull;function isNullOrUndefined(arg){return arg==null}exports.isNullOrUndefined=isNullOrUndefined;function isNumber(arg){return typeof arg==="number"}exports.isNumber=isNumber;function isString(arg){return typeof arg==="string"}exports.isString=isString;function isSymbol(arg){return typeof arg==="symbol"}exports.isSymbol=isSymbol;function isUndefined(arg){return arg===void 0}exports.isUndefined=isUndefined;function isRegExp(re){return objectToString(re)==="[object RegExp]"}exports.isRegExp=isRegExp;function isObject(arg){return typeof arg==="object"&&arg!==null}exports.isObject=isObject;function isDate(d){return objectToString(d)==="[object Date]"}exports.isDate=isDate;function isError(e){return objectToString(e)==="[object Error]"||e instanceof Error}exports.isError=isError;function isFunction(arg){return typeof arg==="function"}exports.isFunction=isFunction;function isPrimitive(arg){return arg===null||typeof arg==="boolean"||typeof arg==="number"||typeof arg==="string"||typeof arg==="symbol"||typeof arg==="undefined"}exports.isPrimitive=isPrimitive;exports.isBuffer=Buffer.isBuffer;function objectToString(o){return Object.prototype.toString.call(o)}}).call(this,{isBuffer:require("../../is-buffer/index.js")})},{"../../is-buffer/index.js":39}],7:[function(require,module,exports){var ElementType=require("domelementtype");var entities=require("entities");var booleanAttributes={__proto__:null,allowfullscreen:true,async:true,autofocus:true,autoplay:true,checked:true,controls:true,default:true,defer:true,disabled:true,hidden:true,ismap:true,loop:true,multiple:true,muted:true,open:true,readonly:true,required:true,reversed:true,scoped:true,seamless:true,selected:true,typemustmatch:true};var unencodedElements={__proto__:null,style:true,script:true,xmp:true,iframe:true,noembed:true,noframes:true,plaintext:true,noscript:true};function formatAttrs(attributes,opts){if(!attributes)return;var output="",value;for(var key in attributes){value=attributes[key];if(output){output+=" "}if(!value&&booleanAttributes[key]){output+=key}else{output+=key+'="'+(opts.decodeEntities?entities.encodeXML(value):value)+'"'}}return output}var singleTag={__proto__:null,area:true,base:true,basefont:true,br:true,col:true,command:true,embed:true,frame:true,hr:true,img:true,input:true,isindex:true,keygen:true,link:true,meta:true,param:true,source:true,track:true,wbr:true};var render=module.exports=function(dom,opts){if(!Array.isArray(dom)&&!dom.cheerio)dom=[dom];opts=opts||{};var output="";for(var i=0;i"}else{tag+=">";if(elem.children){tag+=render(elem.children,opts)}if(!singleTag[elem.name]||opts.xmlMode){tag+=""}}return tag}function renderDirective(elem){return"<"+elem.data+">"}function renderText(elem,opts){var data=elem.data||"";if(opts.decodeEntities&&!(elem.parent&&elem.parent.name in unencodedElements)){data=entities.encodeXML(data)}return data}function renderCdata(elem){return""}function renderComment(elem){return""}},{domelementtype:8,entities:20}],8:[function(require,module,exports){module.exports={Text:"text",Directive:"directive",Comment:"comment",Script:"script",Style:"style",Tag:"tag",CDATA:"cdata",isTag:function(elem){return elem.type==="tag"||elem.type==="script"||elem.type==="style"}}},{}],9:[function(require,module,exports){module.exports={Text:"text",Directive:"directive",Comment:"comment",Script:"script",Style:"style",Tag:"tag",CDATA:"cdata",Doctype:"doctype",isTag:function(elem){return elem.type==="tag"||elem.type==="script"||elem.type==="style"}}},{}],10:[function(require,module,exports){var ElementType=require("domelementtype");var re_whitespace=/\s+/g;var NodePrototype=require("./lib/node");var ElementPrototype=require("./lib/element");function DomHandler(callback,options,elementCB){if(typeof callback==="object"){elementCB=options;options=callback;callback=null}else if(typeof options==="function"){elementCB=options;options=defaultOpts}this._callback=callback;this._options=options||defaultOpts;this._elementCB=elementCB;this.dom=[];this._done=false;this._tagStack=[];this._parser=this._parser||null}var defaultOpts={normalizeWhitespace:false,withStartIndices:false};DomHandler.prototype.onparserinit=function(parser){this._parser=parser};DomHandler.prototype.onreset=function(){DomHandler.call(this,this._callback,this._options,this._elementCB)};DomHandler.prototype.onend=function(){if(this._done)return;this._done=true;this._parser=null;this._handleCallback(null)};DomHandler.prototype._handleCallback=DomHandler.prototype.onerror=function(error){if(typeof this._callback==="function"){this._callback(error,this.dom)}else{if(error)throw error}};DomHandler.prototype.onclosetag=function(){var elem=this._tagStack.pop();if(this._elementCB)this._elementCB(elem)};DomHandler.prototype._addDomElement=function(element){var parent=this._tagStack[this._tagStack.length-1];var siblings=parent?parent.children:this.dom;var previousSibling=siblings[siblings.length-1];element.next=null;if(this._options.withStartIndices){element.startIndex=this._parser.startIndex}if(this._options.withDomLvl1){element.__proto__=element.type==="tag"?ElementPrototype:NodePrototype}if(previousSibling){element.prev=previousSibling;previousSibling.next=element}else{element.prev=null}siblings.push(element);element.parent=parent||null};DomHandler.prototype.onopentag=function(name,attribs){var element={type:name==="script"?ElementType.Script:name==="style"?ElementType.Style:ElementType.Tag,name:name,attribs:attribs,children:[]};this._addDomElement(element);this._tagStack.push(element)};DomHandler.prototype.ontext=function(data){var normalize=this._options.normalizeWhitespace||this._options.ignoreWhitespace;var lastTag;if(!this._tagStack.length&&this.dom.length&&(lastTag=this.dom[this.dom.length-1]).type===ElementType.Text){if(normalize){lastTag.data=(lastTag.data+data).replace(re_whitespace," ")}else{lastTag.data+=data}}else{if(this._tagStack.length&&(lastTag=this._tagStack[this._tagStack.length-1])&&(lastTag=lastTag.children[lastTag.children.length-1])&&lastTag.type===ElementType.Text){if(normalize){lastTag.data=(lastTag.data+data).replace(re_whitespace," ")}else{lastTag.data+=data}}else{if(normalize){data=data.replace(re_whitespace," ")}this._addDomElement({data:data,type:ElementType.Text})}}};DomHandler.prototype.oncomment=function(data){var lastTag=this._tagStack[this._tagStack.length-1];if(lastTag&&lastTag.type===ElementType.Comment){lastTag.data+=data;return}var element={data:data,type:ElementType.Comment};this._addDomElement(element);this._tagStack.push(element)};DomHandler.prototype.oncdatastart=function(){var element={children:[{data:"",type:ElementType.Text}],type:ElementType.CDATA};this._addDomElement(element);this._tagStack.push(element)};DomHandler.prototype.oncommentend=DomHandler.prototype.oncdataend=function(){this._tagStack.pop()};DomHandler.prototype.onprocessinginstruction=function(name,data){this._addDomElement({name:name,data:data,type:ElementType.Directive})};module.exports=DomHandler},{"./lib/element":11,"./lib/node":12,domelementtype:9}],11:[function(require,module,exports){var NodePrototype=require("./node");var ElementPrototype=module.exports=Object.create(NodePrototype);var domLvl1={tagName:"name"};Object.keys(domLvl1).forEach(function(key){var shorthand=domLvl1[key];Object.defineProperty(ElementPrototype,key,{get:function(){return this[shorthand]||null},set:function(val){this[shorthand]=val;return val}})})},{"./node":12}],12:[function(require,module,exports){var NodePrototype=module.exports={get firstChild(){var children=this.children;return children&&children[0]||null},get lastChild(){var children=this.children;return children&&children[children.length-1]||null},get nodeType(){return nodeTypes[this.type]||nodeTypes.element}};var domLvl1={tagName:"name",childNodes:"children",parentNode:"parent",previousSibling:"prev",nextSibling:"next",nodeValue:"data"};var nodeTypes={element:1,text:3,cdata:4,comment:8};Object.keys(domLvl1).forEach(function(key){var shorthand=domLvl1[key];Object.defineProperty(NodePrototype,key,{get:function(){return this[shorthand]||null},set:function(val){this[shorthand]=val;return val}})})},{}],13:[function(require,module,exports){var DomUtils=module.exports;[require("./lib/stringify"),require("./lib/traversal"),require("./lib/manipulation"),require("./lib/querying"),require("./lib/legacy"),require("./lib/helpers")].forEach(function(ext){Object.keys(ext).forEach(function(key){DomUtils[key]=ext[key].bind(DomUtils)})})},{"./lib/helpers":14,"./lib/legacy":15,"./lib/manipulation":16,"./lib/querying":17,"./lib/stringify":18,"./lib/traversal":19}],14:[function(require,module,exports){exports.removeSubsets=function(nodes){var idx=nodes.length,node,ancestor,replace;while(--idx>-1){node=ancestor=nodes[idx];nodes[idx]=null;replace=true;while(ancestor){if(nodes.indexOf(ancestor)>-1){replace=false;nodes.splice(idx,1);break}ancestor=ancestor.parent}if(replace){nodes[idx]=node}}return nodes};var POSITION={DISCONNECTED:1,PRECEDING:2,FOLLOWING:4,CONTAINS:8,CONTAINED_BY:16};var comparePos=exports.compareDocumentPosition=function(nodeA,nodeB){var aParents=[];var bParents=[];var current,sharedParent,siblings,aSibling,bSibling,idx;if(nodeA===nodeB){return 0}current=nodeA;while(current){aParents.unshift(current);current=current.parent}current=nodeB;while(current){bParents.unshift(current);current=current.parent}idx=0;while(aParents[idx]===bParents[idx]){idx++}if(idx===0){return POSITION.DISCONNECTED}sharedParent=aParents[idx-1];siblings=sharedParent.children;aSibling=aParents[idx];bSibling=bParents[idx];if(siblings.indexOf(aSibling)>siblings.indexOf(bSibling)){if(sharedParent===nodeB){return POSITION.FOLLOWING|POSITION.CONTAINED_BY}return POSITION.FOLLOWING}else{if(sharedParent===nodeA){return POSITION.PRECEDING|POSITION.CONTAINS}return POSITION.PRECEDING}};exports.uniqueSort=function(nodes){var idx=nodes.length,node,position;nodes=nodes.slice();while(--idx>-1){node=nodes[idx];position=nodes.indexOf(node);if(position>-1&&position0){childs=find(test,childs,recurse,limit);result=result.concat(childs);limit-=childs.length;if(limit<=0)break}}return result}function findOneChild(test,elems){for(var i=0,l=elems.length;i0){elem=findOne(test,elems[i].children)}}return elem}function existsOne(test,elems){for(var i=0,l=elems.length;i0&&existsOne(test,elems[i].children))){return true}}return false}function findAll(test,elems){var result=[];for(var i=0,j=elems.length;i0){result=result.concat(findAll(test,elems[i].children))}}return result}},{domelementtype:9}],18:[function(require,module,exports){var ElementType=require("domelementtype"),getOuterHTML=require("dom-serializer"),isTag=ElementType.isTag;module.exports={getInnerHTML:getInnerHTML,getOuterHTML:getOuterHTML,getText:getText};function getInnerHTML(elem,opts){return elem.children?elem.children.map(function(elem){return getOuterHTML(elem,opts)}).join(""):""}function getText(elem){if(Array.isArray(elem))return elem.map(getText).join("");if(isTag(elem)||elem.type===ElementType.CDATA)return getText(elem.children);if(elem.type===ElementType.Text)return elem.data;return""}},{"dom-serializer":7,domelementtype:9}],19:[function(require,module,exports){var getChildren=exports.getChildren=function(elem){return elem.children};var getParent=exports.getParent=function(elem){return elem.parent};exports.getSiblings=function(elem){var parent=getParent(elem);return parent?getChildren(parent):[elem]};exports.getAttributeValue=function(elem,name){return elem.attribs&&elem.attribs[name]};exports.hasAttrib=function(elem,name){return!!elem.attribs&&hasOwnProperty.call(elem.attribs,name)};exports.getName=function(elem){return elem.name}},{}],20:[function(require,module,exports){var encode=require("./lib/encode.js"),decode=require("./lib/decode.js");exports.decode=function(data,level){return(!level||level<=0?decode.XML:decode.HTML)(data)};exports.decodeStrict=function(data,level){return(!level||level<=0?decode.XML:decode.HTMLStrict)(data)};exports.encode=function(data,level){return(!level||level<=0?encode.XML:encode.HTML)(data)};exports.encodeXML=encode.XML;exports.encodeHTML4=exports.encodeHTML5=exports.encodeHTML=encode.HTML;exports.decodeXML=exports.decodeXMLStrict=decode.XML;exports.decodeHTML4=exports.decodeHTML5=exports.decodeHTML=decode.HTML;exports.decodeHTML4Strict=exports.decodeHTML5Strict=exports.decodeHTMLStrict=decode.HTMLStrict;exports.escape=encode.escape},{"./lib/decode.js":21,"./lib/encode.js":23}],21:[function(require,module,exports){var entityMap=require("../maps/entities.json"),legacyMap=require("../maps/legacy.json"),xmlMap=require("../maps/xml.json"),decodeCodePoint=require("./decode_codepoint.js");var decodeXMLStrict=getStrictDecoder(xmlMap),decodeHTMLStrict=getStrictDecoder(entityMap);function getStrictDecoder(map){var keys=Object.keys(map).join("|"),replace=getReplacer(map);keys+="|#[xX][\\da-fA-F]+|#\\d+";var re=new RegExp("&(?:"+keys+");","g");return function(str){return String(str).replace(re,replace)}}var decodeHTML=function(){var legacy=Object.keys(legacyMap).sort(sorter);var keys=Object.keys(entityMap).sort(sorter);for(var i=0,j=0;i=55296&&codePoint<=57343||codePoint>1114111){return"�"}if(codePoint in decodeMap){codePoint=decodeMap[codePoint]}var output="";if(codePoint>65535){codePoint-=65536;output+=String.fromCharCode(codePoint>>>10&1023|55296);codePoint=56320|codePoint&1023}output+=String.fromCharCode(codePoint);return output}},{"../maps/decode.json":24}],23:[function(require,module,exports){var inverseXML=getInverseObj(require("../maps/xml.json")),xmlReplacer=getInverseReplacer(inverseXML);exports.XML=getInverse(inverseXML,xmlReplacer);var inverseHTML=getInverseObj(require("../maps/entities.json")),htmlReplacer=getInverseReplacer(inverseHTML);exports.HTML=getInverse(inverseHTML,htmlReplacer);function getInverseObj(obj){return Object.keys(obj).sort().reduce(function(inverse,name){inverse[obj[name]]="&"+name+";";return inverse},{})}function getInverseReplacer(inverse){var single=[],multiple=[];Object.keys(inverse).forEach(function(k){if(k.length===1){single.push("\\"+k)}else{multiple.push(k)}});multiple.unshift("["+single.join("")+"]");return new RegExp(multiple.join("|"),"g")}var re_nonASCII=/[^\0-\x7F]/g,re_astralSymbols=/[\uD800-\uDBFF][\uDC00-\uDFFF]/g;function singleCharReplacer(c){return"&#x"+c.charCodeAt(0).toString(16).toUpperCase()+";"}function astralReplacer(c){var high=c.charCodeAt(0);var low=c.charCodeAt(1);var codePoint=(high-55296)*1024+low-56320+65536;return"&#x"+codePoint.toString(16).toUpperCase()+";"}function getInverse(inverse,re){function func(name){return inverse[name]}return function(data){return data.replace(re,func).replace(re_astralSymbols,astralReplacer).replace(re_nonASCII,singleCharReplacer)}}var re_xmlChars=getInverseReplacer(inverseXML);function escapeXML(data){return data.replace(re_xmlChars,singleCharReplacer).replace(re_astralSymbols,astralReplacer).replace(re_nonASCII,singleCharReplacer)}exports.escape=escapeXML},{"../maps/entities.json":25,"../maps/xml.json":27}],24:[function(require,module,exports){module.exports={0:65533,128:8364,130:8218,131:402,132:8222,133:8230,134:8224,135:8225,136:710,137:8240,138:352,139:8249,140:338,142:381,145:8216,146:8217,147:8220,148:8221,149:8226,150:8211,151:8212,152:732,153:8482,154:353,155:8250,156:339,158:382,159:376}},{}],25:[function(require,module,exports){module.exports={Aacute:"Á",aacute:"á",Abreve:"Ă",abreve:"ă",ac:"∾",acd:"∿",acE:"∾̳",Acirc:"Â",acirc:"â",acute:"´",Acy:"А",acy:"а",AElig:"Æ",aelig:"æ",af:"⁡",Afr:"𝔄",afr:"𝔞",Agrave:"À",agrave:"à",alefsym:"ℵ",aleph:"ℵ",Alpha:"Α",alpha:"α",Amacr:"Ā",amacr:"ā",amalg:"⨿",amp:"&",AMP:"&",andand:"⩕",And:"⩓",and:"∧",andd:"⩜",andslope:"⩘",andv:"⩚",ang:"∠",ange:"⦤",angle:"∠",angmsdaa:"⦨",angmsdab:"⦩",angmsdac:"⦪",angmsdad:"⦫",angmsdae:"⦬",angmsdaf:"⦭",angmsdag:"⦮",angmsdah:"⦯",angmsd:"∡",angrt:"∟",angrtvb:"⊾",angrtvbd:"⦝",angsph:"∢",angst:"Å",angzarr:"⍼",Aogon:"Ą",aogon:"ą",Aopf:"𝔸",aopf:"𝕒",apacir:"⩯",ap:"≈",apE:"⩰",ape:"≊",apid:"≋",apos:"'",ApplyFunction:"⁡",approx:"≈",approxeq:"≊",Aring:"Å",aring:"å",Ascr:"𝒜",ascr:"𝒶",Assign:"≔",ast:"*",asymp:"≈",asympeq:"≍",Atilde:"Ã",atilde:"ã",Auml:"Ä",auml:"ä",awconint:"∳",awint:"⨑",backcong:"≌",backepsilon:"϶",backprime:"‵",backsim:"∽",backsimeq:"⋍",Backslash:"∖",Barv:"⫧",barvee:"⊽",barwed:"⌅",Barwed:"⌆",barwedge:"⌅",bbrk:"⎵",bbrktbrk:"⎶",bcong:"≌",Bcy:"Б",bcy:"б",bdquo:"„",becaus:"∵",because:"∵",Because:"∵",bemptyv:"⦰",bepsi:"϶",bernou:"ℬ",Bernoullis:"ℬ",Beta:"Β",beta:"β",beth:"ℶ",between:"≬",Bfr:"𝔅",bfr:"𝔟",bigcap:"⋂",bigcirc:"◯",bigcup:"⋃",bigodot:"⨀",bigoplus:"⨁",bigotimes:"⨂",bigsqcup:"⨆",bigstar:"★",bigtriangledown:"▽",bigtriangleup:"△",biguplus:"⨄",bigvee:"⋁",bigwedge:"⋀",bkarow:"⤍",blacklozenge:"⧫",blacksquare:"▪",blacktriangle:"▴",blacktriangledown:"▾",blacktriangleleft:"◂",blacktriangleright:"▸",blank:"␣",blk12:"▒",blk14:"░",blk34:"▓",block:"█",bne:"=⃥",bnequiv:"≡⃥",bNot:"⫭",bnot:"⌐",Bopf:"𝔹",bopf:"𝕓",bot:"⊥",bottom:"⊥",bowtie:"⋈",boxbox:"⧉",boxdl:"┐",boxdL:"╕",boxDl:"╖",boxDL:"╗",boxdr:"┌",boxdR:"╒",boxDr:"╓",boxDR:"╔",boxh:"─",boxH:"═",boxhd:"┬",boxHd:"╤",boxhD:"╥",boxHD:"╦",boxhu:"┴",boxHu:"╧",boxhU:"╨",boxHU:"╩",boxminus:"⊟",boxplus:"⊞",boxtimes:"⊠",boxul:"┘",boxuL:"╛",boxUl:"╜",boxUL:"╝",boxur:"└",boxuR:"╘",boxUr:"╙",boxUR:"╚",boxv:"│",boxV:"║",boxvh:"┼",boxvH:"╪",boxVh:"╫",boxVH:"╬",boxvl:"┤",boxvL:"╡",boxVl:"╢",boxVL:"╣",boxvr:"├",boxvR:"╞",boxVr:"╟",boxVR:"╠",bprime:"‵",breve:"˘",Breve:"˘",brvbar:"¦",bscr:"𝒷",Bscr:"ℬ",bsemi:"⁏",bsim:"∽",bsime:"⋍",bsolb:"⧅",bsol:"\\",bsolhsub:"⟈",bull:"•",bullet:"•",bump:"≎",bumpE:"⪮",bumpe:"≏",Bumpeq:"≎",bumpeq:"≏",Cacute:"Ć",cacute:"ć",capand:"⩄",capbrcup:"⩉",capcap:"⩋",cap:"∩",Cap:"⋒",capcup:"⩇",capdot:"⩀",CapitalDifferentialD:"ⅅ",caps:"∩︀",caret:"⁁",caron:"ˇ",Cayleys:"ℭ",ccaps:"⩍",Ccaron:"Č",ccaron:"č",Ccedil:"Ç",ccedil:"ç",Ccirc:"Ĉ",ccirc:"ĉ",Cconint:"∰",ccups:"⩌",ccupssm:"⩐",Cdot:"Ċ",cdot:"ċ",cedil:"¸",Cedilla:"¸",cemptyv:"⦲",cent:"¢",centerdot:"·",CenterDot:"·",cfr:"𝔠",Cfr:"ℭ",CHcy:"Ч",chcy:"ч",check:"✓",checkmark:"✓",Chi:"Χ",chi:"χ",circ:"ˆ",circeq:"≗",circlearrowleft:"↺",circlearrowright:"↻",circledast:"⊛",circledcirc:"⊚",circleddash:"⊝",CircleDot:"⊙",circledR:"®",circledS:"Ⓢ",CircleMinus:"⊖",CirclePlus:"⊕",CircleTimes:"⊗",cir:"○",cirE:"⧃",cire:"≗",cirfnint:"⨐",cirmid:"⫯",cirscir:"⧂",ClockwiseContourIntegral:"∲",CloseCurlyDoubleQuote:"”",CloseCurlyQuote:"’",clubs:"♣",clubsuit:"♣",colon:":",Colon:"∷",Colone:"⩴",colone:"≔",coloneq:"≔",comma:",",commat:"@",comp:"∁",compfn:"∘",complement:"∁",complexes:"ℂ",cong:"≅",congdot:"⩭",Congruent:"≡",conint:"∮",Conint:"∯",ContourIntegral:"∮",copf:"𝕔",Copf:"ℂ",coprod:"∐",Coproduct:"∐",copy:"©",COPY:"©",copysr:"℗",CounterClockwiseContourIntegral:"∳",crarr:"↵",cross:"✗",Cross:"⨯",Cscr:"𝒞",cscr:"𝒸",csub:"⫏",csube:"⫑",csup:"⫐",csupe:"⫒",ctdot:"⋯",cudarrl:"⤸",cudarrr:"⤵",cuepr:"⋞",cuesc:"⋟",cularr:"↶",cularrp:"⤽",cupbrcap:"⩈",cupcap:"⩆",CupCap:"≍",cup:"∪",Cup:"⋓",cupcup:"⩊",cupdot:"⊍",cupor:"⩅",cups:"∪︀",curarr:"↷",curarrm:"⤼",curlyeqprec:"⋞",curlyeqsucc:"⋟",curlyvee:"⋎",curlywedge:"⋏",curren:"¤",curvearrowleft:"↶",curvearrowright:"↷",cuvee:"⋎",cuwed:"⋏",cwconint:"∲",cwint:"∱",cylcty:"⌭",dagger:"†",Dagger:"‡",daleth:"ℸ",darr:"↓",Darr:"↡",dArr:"⇓",dash:"‐",Dashv:"⫤",dashv:"⊣",dbkarow:"⤏",dblac:"˝",Dcaron:"Ď",dcaron:"ď",Dcy:"Д",dcy:"д",ddagger:"‡",ddarr:"⇊",DD:"ⅅ",dd:"ⅆ",DDotrahd:"⤑",ddotseq:"⩷",deg:"°",Del:"∇",Delta:"Δ",delta:"δ",demptyv:"⦱",dfisht:"⥿",Dfr:"𝔇",dfr:"𝔡",dHar:"⥥",dharl:"⇃",dharr:"⇂",DiacriticalAcute:"´",DiacriticalDot:"˙",DiacriticalDoubleAcute:"˝",DiacriticalGrave:"`",DiacriticalTilde:"˜",diam:"⋄",diamond:"⋄",Diamond:"⋄",diamondsuit:"♦",diams:"♦",die:"¨",DifferentialD:"ⅆ",digamma:"ϝ",disin:"⋲",div:"÷",divide:"÷",divideontimes:"⋇",divonx:"⋇",DJcy:"Ђ",djcy:"ђ",dlcorn:"⌞",dlcrop:"⌍",dollar:"$",Dopf:"𝔻",dopf:"𝕕",Dot:"¨",dot:"˙",DotDot:"⃜",doteq:"≐",doteqdot:"≑",DotEqual:"≐",dotminus:"∸",dotplus:"∔",dotsquare:"⊡",doublebarwedge:"⌆",DoubleContourIntegral:"∯",DoubleDot:"¨",DoubleDownArrow:"⇓",DoubleLeftArrow:"⇐",DoubleLeftRightArrow:"⇔",DoubleLeftTee:"⫤",DoubleLongLeftArrow:"⟸",DoubleLongLeftRightArrow:"⟺",DoubleLongRightArrow:"⟹",DoubleRightArrow:"⇒",DoubleRightTee:"⊨",DoubleUpArrow:"⇑",DoubleUpDownArrow:"⇕",DoubleVerticalBar:"∥",DownArrowBar:"⤓",downarrow:"↓",DownArrow:"↓",Downarrow:"⇓",DownArrowUpArrow:"⇵",DownBreve:"̑",downdownarrows:"⇊",downharpoonleft:"⇃",downharpoonright:"⇂",DownLeftRightVector:"⥐",DownLeftTeeVector:"⥞",DownLeftVectorBar:"⥖",DownLeftVector:"↽",DownRightTeeVector:"⥟",DownRightVectorBar:"⥗",DownRightVector:"⇁",DownTeeArrow:"↧",DownTee:"⊤",drbkarow:"⤐",drcorn:"⌟",drcrop:"⌌",Dscr:"𝒟",dscr:"𝒹",DScy:"Ѕ",dscy:"ѕ",dsol:"⧶",Dstrok:"Đ",dstrok:"đ",dtdot:"⋱",dtri:"▿",dtrif:"▾",duarr:"⇵",duhar:"⥯",dwangle:"⦦",DZcy:"Џ",dzcy:"џ",dzigrarr:"⟿",Eacute:"É",eacute:"é",easter:"⩮",Ecaron:"Ě",ecaron:"ě",Ecirc:"Ê",ecirc:"ê",ecir:"≖",ecolon:"≕",Ecy:"Э",ecy:"э",eDDot:"⩷",Edot:"Ė",edot:"ė",eDot:"≑",ee:"ⅇ",efDot:"≒",Efr:"𝔈",efr:"𝔢",eg:"⪚",Egrave:"È",egrave:"è",egs:"⪖",egsdot:"⪘",el:"⪙",Element:"∈",elinters:"⏧",ell:"ℓ",els:"⪕",elsdot:"⪗",Emacr:"Ē",emacr:"ē",empty:"∅",emptyset:"∅",EmptySmallSquare:"◻",emptyv:"∅",EmptyVerySmallSquare:"▫",emsp13:" ",emsp14:" ",emsp:" ",ENG:"Ŋ",eng:"ŋ",ensp:" ",Eogon:"Ę",eogon:"ę",Eopf:"𝔼",eopf:"𝕖",epar:"⋕",eparsl:"⧣",eplus:"⩱",epsi:"ε",Epsilon:"Ε",epsilon:"ε",epsiv:"ϵ",eqcirc:"≖",eqcolon:"≕",eqsim:"≂",eqslantgtr:"⪖",eqslantless:"⪕",Equal:"⩵",equals:"=",EqualTilde:"≂",equest:"≟",Equilibrium:"⇌",equiv:"≡",equivDD:"⩸",eqvparsl:"⧥",erarr:"⥱",erDot:"≓",escr:"ℯ",Escr:"ℰ",esdot:"≐",Esim:"⩳",esim:"≂",Eta:"Η",eta:"η",ETH:"Ð",eth:"ð",Euml:"Ë",euml:"ë",euro:"€",excl:"!",exist:"∃",Exists:"∃",expectation:"ℰ",exponentiale:"ⅇ",ExponentialE:"ⅇ",fallingdotseq:"≒",Fcy:"Ф",fcy:"ф",female:"♀",ffilig:"ffi",fflig:"ff",ffllig:"ffl",Ffr:"𝔉",ffr:"𝔣",filig:"fi",FilledSmallSquare:"◼",FilledVerySmallSquare:"▪",fjlig:"fj",flat:"♭",fllig:"fl",fltns:"▱",fnof:"ƒ",Fopf:"𝔽",fopf:"𝕗",forall:"∀",ForAll:"∀",fork:"⋔",forkv:"⫙",Fouriertrf:"ℱ",fpartint:"⨍",frac12:"½",frac13:"⅓",frac14:"¼",frac15:"⅕",frac16:"⅙",frac18:"⅛",frac23:"⅔",frac25:"⅖",frac34:"¾",frac35:"⅗",frac38:"⅜",frac45:"⅘",frac56:"⅚",frac58:"⅝",frac78:"⅞",frasl:"⁄",frown:"⌢",fscr:"𝒻",Fscr:"ℱ",gacute:"ǵ",Gamma:"Γ",gamma:"γ",Gammad:"Ϝ",gammad:"ϝ",gap:"⪆",Gbreve:"Ğ",gbreve:"ğ",Gcedil:"Ģ",Gcirc:"Ĝ",gcirc:"ĝ",Gcy:"Г",gcy:"г",Gdot:"Ġ",gdot:"ġ",ge:"≥",gE:"≧",gEl:"⪌",gel:"⋛",geq:"≥",geqq:"≧",geqslant:"⩾",gescc:"⪩",ges:"⩾",gesdot:"⪀",gesdoto:"⪂",gesdotol:"⪄",gesl:"⋛︀",gesles:"⪔",Gfr:"𝔊",gfr:"𝔤",gg:"≫",Gg:"⋙",ggg:"⋙",gimel:"ℷ",GJcy:"Ѓ",gjcy:"ѓ",gla:"⪥",gl:"≷",glE:"⪒",glj:"⪤",gnap:"⪊",gnapprox:"⪊",gne:"⪈",gnE:"≩",gneq:"⪈",gneqq:"≩",gnsim:"⋧",Gopf:"𝔾",gopf:"𝕘",grave:"`",GreaterEqual:"≥",GreaterEqualLess:"⋛",GreaterFullEqual:"≧",GreaterGreater:"⪢",GreaterLess:"≷",GreaterSlantEqual:"⩾",GreaterTilde:"≳",Gscr:"𝒢",gscr:"ℊ",gsim:"≳",gsime:"⪎",gsiml:"⪐",gtcc:"⪧",gtcir:"⩺",gt:">",GT:">",Gt:"≫",gtdot:"⋗",gtlPar:"⦕",gtquest:"⩼",gtrapprox:"⪆",gtrarr:"⥸",gtrdot:"⋗",gtreqless:"⋛",gtreqqless:"⪌",gtrless:"≷",gtrsim:"≳",gvertneqq:"≩︀",gvnE:"≩︀",Hacek:"ˇ",hairsp:" ",half:"½",hamilt:"ℋ",HARDcy:"Ъ",hardcy:"ъ",harrcir:"⥈",harr:"↔",hArr:"⇔",harrw:"↭",Hat:"^",hbar:"ℏ",Hcirc:"Ĥ",hcirc:"ĥ",hearts:"♥",heartsuit:"♥",hellip:"…",hercon:"⊹",hfr:"𝔥",Hfr:"ℌ",HilbertSpace:"ℋ",hksearow:"⤥",hkswarow:"⤦",hoarr:"⇿",homtht:"∻",hookleftarrow:"↩",hookrightarrow:"↪",hopf:"𝕙",Hopf:"ℍ",horbar:"―",HorizontalLine:"─",hscr:"𝒽",Hscr:"ℋ",hslash:"ℏ",Hstrok:"Ħ",hstrok:"ħ",HumpDownHump:"≎",HumpEqual:"≏",hybull:"⁃",hyphen:"‐",Iacute:"Í",iacute:"í",ic:"⁣",Icirc:"Î",icirc:"î",Icy:"И",icy:"и",Idot:"İ",IEcy:"Е",iecy:"е",iexcl:"¡",iff:"⇔",ifr:"𝔦",Ifr:"ℑ",Igrave:"Ì",igrave:"ì",ii:"ⅈ",iiiint:"⨌",iiint:"∭",iinfin:"⧜",iiota:"℩",IJlig:"IJ",ijlig:"ij",Imacr:"Ī",imacr:"ī",image:"ℑ",ImaginaryI:"ⅈ",imagline:"ℐ",imagpart:"ℑ",imath:"ı",Im:"ℑ",imof:"⊷",imped:"Ƶ",Implies:"⇒",incare:"℅",in:"∈",infin:"∞",infintie:"⧝",inodot:"ı",intcal:"⊺",int:"∫",Int:"∬",integers:"ℤ",Integral:"∫",intercal:"⊺",Intersection:"⋂",intlarhk:"⨗",intprod:"⨼",InvisibleComma:"⁣",InvisibleTimes:"⁢",IOcy:"Ё",iocy:"ё",Iogon:"Į",iogon:"į",Iopf:"𝕀",iopf:"𝕚",Iota:"Ι",iota:"ι",iprod:"⨼",iquest:"¿",iscr:"𝒾",Iscr:"ℐ",isin:"∈",isindot:"⋵",isinE:"⋹",isins:"⋴",isinsv:"⋳",isinv:"∈",it:"⁢",Itilde:"Ĩ",itilde:"ĩ",Iukcy:"І",iukcy:"і",Iuml:"Ï",iuml:"ï",Jcirc:"Ĵ",jcirc:"ĵ",Jcy:"Й",jcy:"й",Jfr:"𝔍",jfr:"𝔧",jmath:"ȷ",Jopf:"𝕁",jopf:"𝕛",Jscr:"𝒥",jscr:"𝒿",Jsercy:"Ј",jsercy:"ј",Jukcy:"Є",jukcy:"є",Kappa:"Κ",kappa:"κ",kappav:"ϰ",Kcedil:"Ķ",kcedil:"ķ",Kcy:"К",kcy:"к",Kfr:"𝔎",kfr:"𝔨",kgreen:"ĸ",KHcy:"Х",khcy:"х",KJcy:"Ќ",kjcy:"ќ",Kopf:"𝕂",kopf:"𝕜",Kscr:"𝒦",kscr:"𝓀",lAarr:"⇚",Lacute:"Ĺ",lacute:"ĺ",laemptyv:"⦴",lagran:"ℒ",Lambda:"Λ",lambda:"λ",lang:"⟨",Lang:"⟪",langd:"⦑",langle:"⟨",lap:"⪅",Laplacetrf:"ℒ",laquo:"«",larrb:"⇤",larrbfs:"⤟",larr:"←",Larr:"↞",lArr:"⇐",larrfs:"⤝",larrhk:"↩",larrlp:"↫",larrpl:"⤹",larrsim:"⥳",larrtl:"↢",latail:"⤙",lAtail:"⤛",lat:"⪫",late:"⪭",lates:"⪭︀",lbarr:"⤌",lBarr:"⤎",lbbrk:"❲",lbrace:"{",lbrack:"[",lbrke:"⦋",lbrksld:"⦏",lbrkslu:"⦍",Lcaron:"Ľ",lcaron:"ľ",Lcedil:"Ļ",lcedil:"ļ",lceil:"⌈",lcub:"{",Lcy:"Л",lcy:"л",ldca:"⤶",ldquo:"“",ldquor:"„",ldrdhar:"⥧",ldrushar:"⥋",ldsh:"↲",le:"≤",lE:"≦",LeftAngleBracket:"⟨",LeftArrowBar:"⇤",leftarrow:"←",LeftArrow:"←",Leftarrow:"⇐",LeftArrowRightArrow:"⇆",leftarrowtail:"↢",LeftCeiling:"⌈",LeftDoubleBracket:"⟦",LeftDownTeeVector:"⥡",LeftDownVectorBar:"⥙",LeftDownVector:"⇃",LeftFloor:"⌊",leftharpoondown:"↽",leftharpoonup:"↼",leftleftarrows:"⇇",leftrightarrow:"↔",LeftRightArrow:"↔",Leftrightarrow:"⇔",leftrightarrows:"⇆",leftrightharpoons:"⇋",leftrightsquigarrow:"↭",LeftRightVector:"⥎",LeftTeeArrow:"↤",LeftTee:"⊣",LeftTeeVector:"⥚",leftthreetimes:"⋋",LeftTriangleBar:"⧏",LeftTriangle:"⊲",LeftTriangleEqual:"⊴",LeftUpDownVector:"⥑",LeftUpTeeVector:"⥠",LeftUpVectorBar:"⥘",LeftUpVector:"↿",LeftVectorBar:"⥒",LeftVector:"↼",lEg:"⪋",leg:"⋚",leq:"≤",leqq:"≦",leqslant:"⩽",lescc:"⪨",les:"⩽",lesdot:"⩿",lesdoto:"⪁",lesdotor:"⪃",lesg:"⋚︀",lesges:"⪓",lessapprox:"⪅",lessdot:"⋖",lesseqgtr:"⋚",lesseqqgtr:"⪋",LessEqualGreater:"⋚",LessFullEqual:"≦",LessGreater:"≶",lessgtr:"≶",LessLess:"⪡",lesssim:"≲",LessSlantEqual:"⩽",LessTilde:"≲",lfisht:"⥼",lfloor:"⌊",Lfr:"𝔏",lfr:"𝔩",lg:"≶",lgE:"⪑",lHar:"⥢",lhard:"↽",lharu:"↼",lharul:"⥪",lhblk:"▄",LJcy:"Љ",ljcy:"љ",llarr:"⇇",ll:"≪",Ll:"⋘",llcorner:"⌞",Lleftarrow:"⇚",llhard:"⥫",lltri:"◺",Lmidot:"Ŀ",lmidot:"ŀ",lmoustache:"⎰",lmoust:"⎰",lnap:"⪉",lnapprox:"⪉",lne:"⪇",lnE:"≨",lneq:"⪇",lneqq:"≨",lnsim:"⋦",loang:"⟬",loarr:"⇽",lobrk:"⟦",longleftarrow:"⟵",LongLeftArrow:"⟵",Longleftarrow:"⟸",longleftrightarrow:"⟷",LongLeftRightArrow:"⟷",Longleftrightarrow:"⟺",longmapsto:"⟼",longrightarrow:"⟶",LongRightArrow:"⟶",Longrightarrow:"⟹",looparrowleft:"↫",looparrowright:"↬",lopar:"⦅",Lopf:"𝕃",lopf:"𝕝",loplus:"⨭",lotimes:"⨴",lowast:"∗",lowbar:"_",LowerLeftArrow:"↙",LowerRightArrow:"↘",loz:"◊",lozenge:"◊",lozf:"⧫",lpar:"(",lparlt:"⦓",lrarr:"⇆",lrcorner:"⌟",lrhar:"⇋",lrhard:"⥭",lrm:"‎",lrtri:"⊿",lsaquo:"‹",lscr:"𝓁",Lscr:"ℒ",lsh:"↰",Lsh:"↰",lsim:"≲",lsime:"⪍",lsimg:"⪏",lsqb:"[",lsquo:"‘",lsquor:"‚",Lstrok:"Ł",lstrok:"ł",ltcc:"⪦",ltcir:"⩹",lt:"<",LT:"<",Lt:"≪",ltdot:"⋖",lthree:"⋋",ltimes:"⋉",ltlarr:"⥶",ltquest:"⩻",ltri:"◃",ltrie:"⊴",ltrif:"◂",ltrPar:"⦖",lurdshar:"⥊",luruhar:"⥦",lvertneqq:"≨︀",lvnE:"≨︀",macr:"¯",male:"♂",malt:"✠",maltese:"✠",Map:"⤅",map:"↦",mapsto:"↦",mapstodown:"↧",mapstoleft:"↤",mapstoup:"↥",marker:"▮",mcomma:"⨩",Mcy:"М",mcy:"м",mdash:"—",mDDot:"∺",measuredangle:"∡",MediumSpace:" ",Mellintrf:"ℳ",Mfr:"𝔐",mfr:"𝔪",mho:"℧",micro:"µ",midast:"*",midcir:"⫰",mid:"∣",middot:"·",minusb:"⊟",minus:"−",minusd:"∸",minusdu:"⨪",MinusPlus:"∓",mlcp:"⫛",mldr:"…",mnplus:"∓",models:"⊧",Mopf:"𝕄",mopf:"𝕞",mp:"∓",mscr:"𝓂",Mscr:"ℳ",mstpos:"∾",Mu:"Μ",mu:"μ",multimap:"⊸",mumap:"⊸",nabla:"∇",Nacute:"Ń",nacute:"ń",nang:"∠⃒",nap:"≉",napE:"⩰̸",napid:"≋̸",napos:"ʼn",napprox:"≉",natural:"♮",naturals:"ℕ",natur:"♮",nbsp:" ",nbump:"≎̸",nbumpe:"≏̸",ncap:"⩃",Ncaron:"Ň",ncaron:"ň",Ncedil:"Ņ",ncedil:"ņ",ncong:"≇",ncongdot:"⩭̸",ncup:"⩂",Ncy:"Н",ncy:"н",ndash:"–",nearhk:"⤤",nearr:"↗",neArr:"⇗",nearrow:"↗",ne:"≠",nedot:"≐̸",NegativeMediumSpace:"​",NegativeThickSpace:"​",NegativeThinSpace:"​",NegativeVeryThinSpace:"​",nequiv:"≢",nesear:"⤨",nesim:"≂̸",NestedGreaterGreater:"≫",NestedLessLess:"≪",NewLine:"\n",nexist:"∄",nexists:"∄",Nfr:"𝔑",nfr:"𝔫",ngE:"≧̸",nge:"≱",ngeq:"≱",ngeqq:"≧̸",ngeqslant:"⩾̸",nges:"⩾̸",nGg:"⋙̸",ngsim:"≵",nGt:"≫⃒",ngt:"≯",ngtr:"≯",nGtv:"≫̸",nharr:"↮",nhArr:"⇎",nhpar:"⫲",ni:"∋",nis:"⋼",nisd:"⋺",niv:"∋",NJcy:"Њ",njcy:"њ",nlarr:"↚",nlArr:"⇍",nldr:"‥",nlE:"≦̸",nle:"≰",nleftarrow:"↚",nLeftarrow:"⇍",nleftrightarrow:"↮",nLeftrightarrow:"⇎",nleq:"≰",nleqq:"≦̸",nleqslant:"⩽̸",nles:"⩽̸",nless:"≮",nLl:"⋘̸",nlsim:"≴",nLt:"≪⃒",nlt:"≮",nltri:"⋪",nltrie:"⋬",nLtv:"≪̸",nmid:"∤",NoBreak:"⁠",NonBreakingSpace:" ",nopf:"𝕟",Nopf:"ℕ",Not:"⫬",not:"¬",NotCongruent:"≢",NotCupCap:"≭",NotDoubleVerticalBar:"∦",NotElement:"∉",NotEqual:"≠",NotEqualTilde:"≂̸",NotExists:"∄",NotGreater:"≯",NotGreaterEqual:"≱",NotGreaterFullEqual:"≧̸",NotGreaterGreater:"≫̸",NotGreaterLess:"≹",NotGreaterSlantEqual:"⩾̸",NotGreaterTilde:"≵",NotHumpDownHump:"≎̸",NotHumpEqual:"≏̸",notin:"∉",notindot:"⋵̸",notinE:"⋹̸",notinva:"∉",notinvb:"⋷",notinvc:"⋶",NotLeftTriangleBar:"⧏̸",NotLeftTriangle:"⋪",NotLeftTriangleEqual:"⋬",NotLess:"≮",NotLessEqual:"≰",NotLessGreater:"≸",NotLessLess:"≪̸",NotLessSlantEqual:"⩽̸",NotLessTilde:"≴",NotNestedGreaterGreater:"⪢̸",NotNestedLessLess:"⪡̸",notni:"∌",notniva:"∌",notnivb:"⋾",notnivc:"⋽",NotPrecedes:"⊀",NotPrecedesEqual:"⪯̸",NotPrecedesSlantEqual:"⋠",NotReverseElement:"∌",NotRightTriangleBar:"⧐̸",NotRightTriangle:"⋫",NotRightTriangleEqual:"⋭",NotSquareSubset:"⊏̸",NotSquareSubsetEqual:"⋢",NotSquareSuperset:"⊐̸",NotSquareSupersetEqual:"⋣",NotSubset:"⊂⃒",NotSubsetEqual:"⊈",NotSucceeds:"⊁",NotSucceedsEqual:"⪰̸",NotSucceedsSlantEqual:"⋡",NotSucceedsTilde:"≿̸",NotSuperset:"⊃⃒",NotSupersetEqual:"⊉",NotTilde:"≁",NotTildeEqual:"≄",NotTildeFullEqual:"≇",NotTildeTilde:"≉",NotVerticalBar:"∤",nparallel:"∦",npar:"∦",nparsl:"⫽⃥",npart:"∂̸",npolint:"⨔",npr:"⊀",nprcue:"⋠",nprec:"⊀",npreceq:"⪯̸",npre:"⪯̸",nrarrc:"⤳̸",nrarr:"↛",nrArr:"⇏",nrarrw:"↝̸",nrightarrow:"↛",nRightarrow:"⇏",nrtri:"⋫",nrtrie:"⋭",nsc:"⊁",nsccue:"⋡",nsce:"⪰̸",Nscr:"𝒩",nscr:"𝓃",nshortmid:"∤",nshortparallel:"∦",nsim:"≁",nsime:"≄",nsimeq:"≄",nsmid:"∤",nspar:"∦",nsqsube:"⋢",nsqsupe:"⋣",nsub:"⊄",nsubE:"⫅̸",nsube:"⊈",nsubset:"⊂⃒",nsubseteq:"⊈",nsubseteqq:"⫅̸",nsucc:"⊁",nsucceq:"⪰̸",nsup:"⊅",nsupE:"⫆̸",nsupe:"⊉",nsupset:"⊃⃒",nsupseteq:"⊉",nsupseteqq:"⫆̸",ntgl:"≹",Ntilde:"Ñ",ntilde:"ñ",ntlg:"≸",ntriangleleft:"⋪",ntrianglelefteq:"⋬",ntriangleright:"⋫",ntrianglerighteq:"⋭",Nu:"Ν",nu:"ν",num:"#",numero:"№",numsp:" ",nvap:"≍⃒",nvdash:"⊬",nvDash:"⊭",nVdash:"⊮",nVDash:"⊯",nvge:"≥⃒",nvgt:">⃒",nvHarr:"⤄",nvinfin:"⧞",nvlArr:"⤂",nvle:"≤⃒",nvlt:"<⃒",nvltrie:"⊴⃒",nvrArr:"⤃",nvrtrie:"⊵⃒",nvsim:"∼⃒",nwarhk:"⤣",nwarr:"↖",nwArr:"⇖",nwarrow:"↖",nwnear:"⤧",Oacute:"Ó",oacute:"ó",oast:"⊛",Ocirc:"Ô",ocirc:"ô",ocir:"⊚",Ocy:"О",ocy:"о",odash:"⊝",Odblac:"Ő",odblac:"ő",odiv:"⨸",odot:"⊙",odsold:"⦼",OElig:"Œ",oelig:"œ",ofcir:"⦿",Ofr:"𝔒",ofr:"𝔬",ogon:"˛",Ograve:"Ò",ograve:"ò",ogt:"⧁",ohbar:"⦵",ohm:"Ω",oint:"∮",olarr:"↺",olcir:"⦾",olcross:"⦻",oline:"‾",olt:"⧀",Omacr:"Ō",omacr:"ō",Omega:"Ω",omega:"ω",Omicron:"Ο",omicron:"ο",omid:"⦶",ominus:"⊖",Oopf:"𝕆",oopf:"𝕠",opar:"⦷",OpenCurlyDoubleQuote:"“",OpenCurlyQuote:"‘",operp:"⦹",oplus:"⊕",orarr:"↻",Or:"⩔",or:"∨",ord:"⩝",order:"ℴ",orderof:"ℴ",ordf:"ª",ordm:"º",origof:"⊶",oror:"⩖",orslope:"⩗",orv:"⩛",oS:"Ⓢ",Oscr:"𝒪",oscr:"ℴ",Oslash:"Ø",oslash:"ø",osol:"⊘",Otilde:"Õ",otilde:"õ",otimesas:"⨶",Otimes:"⨷",otimes:"⊗",Ouml:"Ö",ouml:"ö",ovbar:"⌽",OverBar:"‾",OverBrace:"⏞",OverBracket:"⎴",OverParenthesis:"⏜",para:"¶",parallel:"∥",par:"∥",parsim:"⫳",parsl:"⫽",part:"∂",PartialD:"∂",Pcy:"П",pcy:"п",percnt:"%",period:".",permil:"‰",perp:"⊥",pertenk:"‱",Pfr:"𝔓",pfr:"𝔭",Phi:"Φ",phi:"φ",phiv:"ϕ",phmmat:"ℳ",phone:"☎",Pi:"Π",pi:"π",pitchfork:"⋔",piv:"ϖ",planck:"ℏ",planckh:"ℎ",plankv:"ℏ",plusacir:"⨣",plusb:"⊞",pluscir:"⨢",plus:"+",plusdo:"∔",plusdu:"⨥",pluse:"⩲",PlusMinus:"±",plusmn:"±",plussim:"⨦",plustwo:"⨧",pm:"±",Poincareplane:"ℌ",pointint:"⨕",popf:"𝕡",Popf:"ℙ",pound:"£",prap:"⪷",Pr:"⪻",pr:"≺",prcue:"≼",precapprox:"⪷",prec:"≺",preccurlyeq:"≼",Precedes:"≺",PrecedesEqual:"⪯",PrecedesSlantEqual:"≼",PrecedesTilde:"≾",preceq:"⪯",precnapprox:"⪹",precneqq:"⪵",precnsim:"⋨",pre:"⪯",prE:"⪳",precsim:"≾",prime:"′",Prime:"″",primes:"ℙ",prnap:"⪹",prnE:"⪵",prnsim:"⋨",prod:"∏",Product:"∏",profalar:"⌮",profline:"⌒",profsurf:"⌓",prop:"∝",Proportional:"∝",Proportion:"∷",propto:"∝",prsim:"≾",prurel:"⊰",Pscr:"𝒫",pscr:"𝓅",Psi:"Ψ",psi:"ψ",puncsp:" ",Qfr:"𝔔",qfr:"𝔮",qint:"⨌",qopf:"𝕢",Qopf:"ℚ",qprime:"⁗",Qscr:"𝒬",qscr:"𝓆",quaternions:"ℍ",quatint:"⨖",quest:"?",questeq:"≟",quot:'"',QUOT:'"',rAarr:"⇛",race:"∽̱",Racute:"Ŕ",racute:"ŕ",radic:"√",raemptyv:"⦳",rang:"⟩",Rang:"⟫",rangd:"⦒",range:"⦥",rangle:"⟩",raquo:"»",rarrap:"⥵",rarrb:"⇥",rarrbfs:"⤠",rarrc:"⤳",rarr:"→",Rarr:"↠",rArr:"⇒",rarrfs:"⤞",rarrhk:"↪",rarrlp:"↬",rarrpl:"⥅",rarrsim:"⥴",Rarrtl:"⤖",rarrtl:"↣",rarrw:"↝",ratail:"⤚",rAtail:"⤜",ratio:"∶",rationals:"ℚ",rbarr:"⤍",rBarr:"⤏",RBarr:"⤐",rbbrk:"❳",rbrace:"}",rbrack:"]",rbrke:"⦌",rbrksld:"⦎",rbrkslu:"⦐",Rcaron:"Ř",rcaron:"ř",Rcedil:"Ŗ",rcedil:"ŗ",rceil:"⌉",rcub:"}",Rcy:"Р",rcy:"р",rdca:"⤷",rdldhar:"⥩",rdquo:"”",rdquor:"”",rdsh:"↳",real:"ℜ",realine:"ℛ",realpart:"ℜ",reals:"ℝ",Re:"ℜ",rect:"▭",reg:"®",REG:"®",ReverseElement:"∋",ReverseEquilibrium:"⇋",ReverseUpEquilibrium:"⥯",rfisht:"⥽",rfloor:"⌋",rfr:"𝔯",Rfr:"ℜ",rHar:"⥤",rhard:"⇁",rharu:"⇀",rharul:"⥬",Rho:"Ρ",rho:"ρ",rhov:"ϱ",RightAngleBracket:"⟩",RightArrowBar:"⇥",rightarrow:"→",RightArrow:"→",Rightarrow:"⇒",RightArrowLeftArrow:"⇄",rightarrowtail:"↣",RightCeiling:"⌉",RightDoubleBracket:"⟧",RightDownTeeVector:"⥝",RightDownVectorBar:"⥕",RightDownVector:"⇂",RightFloor:"⌋",rightharpoondown:"⇁",rightharpoonup:"⇀",rightleftarrows:"⇄",rightleftharpoons:"⇌",rightrightarrows:"⇉",rightsquigarrow:"↝",RightTeeArrow:"↦",RightTee:"⊢",RightTeeVector:"⥛",rightthreetimes:"⋌",RightTriangleBar:"⧐",RightTriangle:"⊳",RightTriangleEqual:"⊵",RightUpDownVector:"⥏",RightUpTeeVector:"⥜",RightUpVectorBar:"⥔",RightUpVector:"↾",RightVectorBar:"⥓",RightVector:"⇀",ring:"˚",risingdotseq:"≓",rlarr:"⇄",rlhar:"⇌",rlm:"‏",rmoustache:"⎱",rmoust:"⎱",rnmid:"⫮",roang:"⟭",roarr:"⇾",robrk:"⟧",ropar:"⦆",ropf:"𝕣",Ropf:"ℝ",roplus:"⨮",rotimes:"⨵",RoundImplies:"⥰",rpar:")",rpargt:"⦔",rppolint:"⨒",rrarr:"⇉",Rrightarrow:"⇛",rsaquo:"›",rscr:"𝓇",Rscr:"ℛ",rsh:"↱",Rsh:"↱",rsqb:"]",rsquo:"’",rsquor:"’",rthree:"⋌",rtimes:"⋊",rtri:"▹",rtrie:"⊵",rtrif:"▸",rtriltri:"⧎",RuleDelayed:"⧴",ruluhar:"⥨",rx:"℞",Sacute:"Ś",sacute:"ś",sbquo:"‚",scap:"⪸",Scaron:"Š",scaron:"š",Sc:"⪼",sc:"≻",sccue:"≽",sce:"⪰",scE:"⪴",Scedil:"Ş",scedil:"ş",Scirc:"Ŝ",scirc:"ŝ",scnap:"⪺",scnE:"⪶",scnsim:"⋩",scpolint:"⨓",scsim:"≿",Scy:"С",scy:"с",sdotb:"⊡",sdot:"⋅",sdote:"⩦",searhk:"⤥",searr:"↘",seArr:"⇘",searrow:"↘",sect:"§",semi:";",seswar:"⤩",setminus:"∖",setmn:"∖",sext:"✶",Sfr:"𝔖",sfr:"𝔰",sfrown:"⌢",sharp:"♯",SHCHcy:"Щ",shchcy:"щ",SHcy:"Ш",shcy:"ш",ShortDownArrow:"↓",ShortLeftArrow:"←",shortmid:"∣",shortparallel:"∥",ShortRightArrow:"→",ShortUpArrow:"↑",shy:"­",Sigma:"Σ",sigma:"σ",sigmaf:"ς",sigmav:"ς",sim:"∼",simdot:"⩪",sime:"≃",simeq:"≃",simg:"⪞",simgE:"⪠",siml:"⪝",simlE:"⪟",simne:"≆",simplus:"⨤",simrarr:"⥲",slarr:"←",SmallCircle:"∘",smallsetminus:"∖",smashp:"⨳",smeparsl:"⧤",smid:"∣",smile:"⌣",smt:"⪪",smte:"⪬",smtes:"⪬︀",SOFTcy:"Ь",softcy:"ь",solbar:"⌿",solb:"⧄",sol:"/",Sopf:"𝕊",sopf:"𝕤",spades:"♠",spadesuit:"♠",spar:"∥",sqcap:"⊓",sqcaps:"⊓︀",sqcup:"⊔",sqcups:"⊔︀",Sqrt:"√",sqsub:"⊏",sqsube:"⊑",sqsubset:"⊏",sqsubseteq:"⊑",sqsup:"⊐",sqsupe:"⊒",sqsupset:"⊐",sqsupseteq:"⊒",square:"□",Square:"□",SquareIntersection:"⊓",SquareSubset:"⊏",SquareSubsetEqual:"⊑",SquareSuperset:"⊐",SquareSupersetEqual:"⊒",SquareUnion:"⊔",squarf:"▪",squ:"□",squf:"▪",srarr:"→",Sscr:"𝒮",sscr:"𝓈",ssetmn:"∖",ssmile:"⌣",sstarf:"⋆",Star:"⋆",star:"☆",starf:"★",straightepsilon:"ϵ",straightphi:"ϕ",strns:"¯",sub:"⊂",Sub:"⋐",subdot:"⪽",subE:"⫅",sube:"⊆",subedot:"⫃",submult:"⫁",subnE:"⫋",subne:"⊊",subplus:"⪿",subrarr:"⥹",subset:"⊂",Subset:"⋐",subseteq:"⊆",subseteqq:"⫅",SubsetEqual:"⊆",subsetneq:"⊊",subsetneqq:"⫋",subsim:"⫇",subsub:"⫕",subsup:"⫓",succapprox:"⪸",succ:"≻",succcurlyeq:"≽",Succeeds:"≻",SucceedsEqual:"⪰",SucceedsSlantEqual:"≽",SucceedsTilde:"≿",succeq:"⪰",succnapprox:"⪺",succneqq:"⪶",succnsim:"⋩",succsim:"≿",SuchThat:"∋",sum:"∑",Sum:"∑",sung:"♪",sup1:"¹",sup2:"²",sup3:"³",sup:"⊃",Sup:"⋑",supdot:"⪾",supdsub:"⫘",supE:"⫆",supe:"⊇",supedot:"⫄",Superset:"⊃",SupersetEqual:"⊇",suphsol:"⟉",suphsub:"⫗",suplarr:"⥻",supmult:"⫂",supnE:"⫌",supne:"⊋",supplus:"⫀",supset:"⊃",Supset:"⋑",supseteq:"⊇",supseteqq:"⫆",supsetneq:"⊋",supsetneqq:"⫌",supsim:"⫈",supsub:"⫔",supsup:"⫖",swarhk:"⤦",swarr:"↙",swArr:"⇙",swarrow:"↙",swnwar:"⤪",szlig:"ß",Tab:"\t",target:"⌖",Tau:"Τ",tau:"τ",tbrk:"⎴",Tcaron:"Ť",tcaron:"ť",Tcedil:"Ţ",tcedil:"ţ",Tcy:"Т",tcy:"т",tdot:"⃛",telrec:"⌕",Tfr:"𝔗",tfr:"𝔱",there4:"∴",therefore:"∴",Therefore:"∴",Theta:"Θ",theta:"θ",thetasym:"ϑ",thetav:"ϑ",thickapprox:"≈",thicksim:"∼",ThickSpace:"  ",ThinSpace:" ",thinsp:" ",thkap:"≈",thksim:"∼",THORN:"Þ",thorn:"þ",tilde:"˜",Tilde:"∼",TildeEqual:"≃",TildeFullEqual:"≅",TildeTilde:"≈",timesbar:"⨱",timesb:"⊠",times:"×",timesd:"⨰",tint:"∭",toea:"⤨",topbot:"⌶",topcir:"⫱",top:"⊤",Topf:"𝕋",topf:"𝕥",topfork:"⫚",tosa:"⤩",tprime:"‴",trade:"™",TRADE:"™",triangle:"▵",triangledown:"▿",triangleleft:"◃",trianglelefteq:"⊴",triangleq:"≜",triangleright:"▹",trianglerighteq:"⊵",tridot:"◬",trie:"≜",triminus:"⨺",TripleDot:"⃛",triplus:"⨹",trisb:"⧍",tritime:"⨻",trpezium:"⏢",Tscr:"𝒯",tscr:"𝓉",TScy:"Ц",tscy:"ц",TSHcy:"Ћ",tshcy:"ћ",Tstrok:"Ŧ",tstrok:"ŧ",twixt:"≬",twoheadleftarrow:"↞",twoheadrightarrow:"↠",Uacute:"Ú",uacute:"ú",uarr:"↑",Uarr:"↟",uArr:"⇑",Uarrocir:"⥉",Ubrcy:"Ў",ubrcy:"ў",Ubreve:"Ŭ",ubreve:"ŭ",Ucirc:"Û",ucirc:"û",Ucy:"У",ucy:"у",udarr:"⇅",Udblac:"Ű",udblac:"ű",udhar:"⥮",ufisht:"⥾",Ufr:"𝔘",ufr:"𝔲",Ugrave:"Ù",ugrave:"ù",uHar:"⥣",uharl:"↿",uharr:"↾",uhblk:"▀",ulcorn:"⌜",ulcorner:"⌜",ulcrop:"⌏",ultri:"◸",Umacr:"Ū",umacr:"ū",uml:"¨",UnderBar:"_",UnderBrace:"⏟",UnderBracket:"⎵",UnderParenthesis:"⏝",Union:"⋃",UnionPlus:"⊎",Uogon:"Ų",uogon:"ų",Uopf:"𝕌",uopf:"𝕦",UpArrowBar:"⤒",uparrow:"↑",UpArrow:"↑",Uparrow:"⇑",UpArrowDownArrow:"⇅",updownarrow:"↕",UpDownArrow:"↕",Updownarrow:"⇕",UpEquilibrium:"⥮",upharpoonleft:"↿",upharpoonright:"↾",uplus:"⊎",UpperLeftArrow:"↖",UpperRightArrow:"↗",upsi:"υ",Upsi:"ϒ",upsih:"ϒ",Upsilon:"Υ",upsilon:"υ",UpTeeArrow:"↥",UpTee:"⊥",upuparrows:"⇈",urcorn:"⌝",urcorner:"⌝",urcrop:"⌎",Uring:"Ů",uring:"ů",urtri:"◹",Uscr:"𝒰",uscr:"𝓊",utdot:"⋰",Utilde:"Ũ",utilde:"ũ",utri:"▵",utrif:"▴",uuarr:"⇈",Uuml:"Ü",uuml:"ü",uwangle:"⦧",vangrt:"⦜",varepsilon:"ϵ",varkappa:"ϰ",varnothing:"∅",varphi:"ϕ",varpi:"ϖ",varpropto:"∝",varr:"↕",vArr:"⇕",varrho:"ϱ",varsigma:"ς",varsubsetneq:"⊊︀",varsubsetneqq:"⫋︀",varsupsetneq:"⊋︀",varsupsetneqq:"⫌︀",vartheta:"ϑ",vartriangleleft:"⊲",vartriangleright:"⊳",vBar:"⫨",Vbar:"⫫",vBarv:"⫩",Vcy:"В",vcy:"в",vdash:"⊢",vDash:"⊨",Vdash:"⊩",VDash:"⊫",Vdashl:"⫦",veebar:"⊻",vee:"∨",Vee:"⋁",veeeq:"≚",vellip:"⋮",verbar:"|",Verbar:"‖",vert:"|",Vert:"‖",VerticalBar:"∣",VerticalLine:"|",VerticalSeparator:"❘",VerticalTilde:"≀",VeryThinSpace:" ",Vfr:"𝔙",vfr:"𝔳",vltri:"⊲",vnsub:"⊂⃒",vnsup:"⊃⃒",Vopf:"𝕍",vopf:"𝕧",vprop:"∝",vrtri:"⊳",Vscr:"𝒱",vscr:"𝓋",vsubnE:"⫋︀",vsubne:"⊊︀",vsupnE:"⫌︀",vsupne:"⊋︀",Vvdash:"⊪",vzigzag:"⦚",Wcirc:"Ŵ",wcirc:"ŵ",wedbar:"⩟",wedge:"∧",Wedge:"⋀",wedgeq:"≙",weierp:"℘",Wfr:"𝔚",wfr:"𝔴",Wopf:"𝕎",wopf:"𝕨",wp:"℘",wr:"≀",wreath:"≀",Wscr:"𝒲",wscr:"𝓌",xcap:"⋂",xcirc:"◯",xcup:"⋃",xdtri:"▽",Xfr:"𝔛",xfr:"𝔵",xharr:"⟷",xhArr:"⟺",Xi:"Ξ",xi:"ξ",xlarr:"⟵",xlArr:"⟸",xmap:"⟼",xnis:"⋻",xodot:"⨀",Xopf:"𝕏",xopf:"𝕩",xoplus:"⨁",xotime:"⨂",xrarr:"⟶",xrArr:"⟹",Xscr:"𝒳",xscr:"𝓍",xsqcup:"⨆",xuplus:"⨄",xutri:"△",xvee:"⋁",xwedge:"⋀",Yacute:"Ý",yacute:"ý",YAcy:"Я",yacy:"я",Ycirc:"Ŷ",ycirc:"ŷ",Ycy:"Ы",ycy:"ы",yen:"¥",Yfr:"𝔜",yfr:"𝔶",YIcy:"Ї",yicy:"ї",Yopf:"𝕐",yopf:"𝕪",Yscr:"𝒴",yscr:"𝓎",YUcy:"Ю",yucy:"ю",yuml:"ÿ",Yuml:"Ÿ",Zacute:"Ź",zacute:"ź",Zcaron:"Ž",zcaron:"ž",Zcy:"З",zcy:"з",Zdot:"Ż",zdot:"ż",zeetrf:"ℨ",ZeroWidthSpace:"​",Zeta:"Ζ",zeta:"ζ",zfr:"𝔷",Zfr:"ℨ",ZHcy:"Ж",zhcy:"ж",zigrarr:"⇝",zopf:"𝕫",Zopf:"ℤ",Zscr:"𝒵",zscr:"𝓏",zwj:"‍",zwnj:"‌"}},{}],26:[function(require,module,exports){module.exports={Aacute:"Á",aacute:"á",Acirc:"Â",acirc:"â",acute:"´",AElig:"Æ",aelig:"æ",Agrave:"À",agrave:"à",amp:"&",AMP:"&",Aring:"Å",aring:"å",Atilde:"Ã",atilde:"ã",Auml:"Ä",auml:"ä",brvbar:"¦",Ccedil:"Ç",ccedil:"ç",cedil:"¸",cent:"¢",copy:"©",COPY:"©",curren:"¤",deg:"°",divide:"÷",Eacute:"É",eacute:"é",Ecirc:"Ê",ecirc:"ê",Egrave:"È",egrave:"è",ETH:"Ð",eth:"ð",Euml:"Ë",euml:"ë",frac12:"½",frac14:"¼",frac34:"¾",gt:">",GT:">",Iacute:"Í",iacute:"í",Icirc:"Î",icirc:"î",iexcl:"¡",Igrave:"Ì",igrave:"ì",iquest:"¿",Iuml:"Ï",iuml:"ï",laquo:"«",lt:"<",LT:"<",macr:"¯",micro:"µ",middot:"·",nbsp:" ",not:"¬",Ntilde:"Ñ",ntilde:"ñ",Oacute:"Ó",oacute:"ó",Ocirc:"Ô",ocirc:"ô",Ograve:"Ò",ograve:"ò",ordf:"ª",ordm:"º",Oslash:"Ø",oslash:"ø",Otilde:"Õ",otilde:"õ",Ouml:"Ö",ouml:"ö",para:"¶",plusmn:"±",pound:"£",quot:'"',QUOT:'"',raquo:"»",reg:"®",REG:"®",sect:"§",shy:"­",sup1:"¹",sup2:"²",sup3:"³",szlig:"ß",THORN:"Þ",thorn:"þ",times:"×",Uacute:"Ú",uacute:"ú",Ucirc:"Û",ucirc:"û",Ugrave:"Ù",ugrave:"ù",uml:"¨",Uuml:"Ü",uuml:"ü",Yacute:"Ý",yacute:"ý",yen:"¥",yuml:"ÿ"}},{}],27:[function(require,module,exports){module.exports={amp:"&",apos:"'",gt:">",lt:"<",quot:'"'}},{}],28:[function(require,module,exports){function EventEmitter(){this._events=this._events||{};this._maxListeners=this._maxListeners||undefined}module.exports=EventEmitter;EventEmitter.EventEmitter=EventEmitter;EventEmitter.prototype._events=undefined;EventEmitter.prototype._maxListeners=undefined;EventEmitter.defaultMaxListeners=10;EventEmitter.prototype.setMaxListeners=function(n){if(!isNumber(n)||n<0||isNaN(n))throw TypeError("n must be a positive number");this._maxListeners=n;return this};EventEmitter.prototype.emit=function(type){var er,handler,len,args,i,listeners;if(!this._events)this._events={};if(type==="error"){if(!this._events.error||isObject(this._events.error)&&!this._events.error.length){er=arguments[1];if(er instanceof Error){throw er}else{var err=new Error('Uncaught, unspecified "error" event. ('+er+")");err.context=er;throw err}}}handler=this._events[type];if(isUndefined(handler))return false;if(isFunction(handler)){switch(arguments.length){case 1:handler.call(this);break;case 2:handler.call(this,arguments[1]);break;case 3:handler.call(this,arguments[1],arguments[2]);break;default:args=Array.prototype.slice.call(arguments,1);handler.apply(this,args)}}else if(isObject(handler)){args=Array.prototype.slice.call(arguments,1);listeners=handler.slice();len=listeners.length;for(i=0;i0&&this._events[type].length>m){this._events[type].warned=true;console.error("(node) warning: possible EventEmitter memory "+"leak detected. %d listeners added. "+"Use emitter.setMaxListeners() to increase limit.",this._events[type].length);if(typeof console.trace==="function"){console.trace()}}}return this};EventEmitter.prototype.on=EventEmitter.prototype.addListener;EventEmitter.prototype.once=function(type,listener){if(!isFunction(listener))throw TypeError("listener must be a function");var fired=false;function g(){this.removeListener(type,g);if(!fired){fired=true;listener.apply(this,arguments)}}g.listener=listener;this.on(type,g);return this};EventEmitter.prototype.removeListener=function(type,listener){var list,position,length,i;if(!isFunction(listener))throw TypeError("listener must be a function");if(!this._events||!this._events[type])return this;list=this._events[type];length=list.length;position=-1;if(list===listener||isFunction(list.listener)&&list.listener===listener){delete this._events[type];if(this._events.removeListener)this.emit("removeListener",type,listener)}else if(isObject(list)){for(i=length;i-- >0;){if(list[i]===listener||list[i].listener&&list[i].listener===listener){position=i;break}}if(position<0)return this;if(list.length===1){list.length=0;delete this._events[type]}else{list.splice(position,1); -}if(this._events.removeListener)this.emit("removeListener",type,listener)}return this};EventEmitter.prototype.removeAllListeners=function(type){var key,listeners;if(!this._events)return this;if(!this._events.removeListener){if(arguments.length===0)this._events={};else if(this._events[type])delete this._events[type];return this}if(arguments.length===0){for(key in this._events){if(key==="removeListener")continue;this.removeAllListeners(key)}this.removeAllListeners("removeListener");this._events={};return this}listeners=this._events[type];if(isFunction(listeners)){this.removeListener(type,listeners)}else if(listeners){while(listeners.length)this.removeListener(type,listeners[listeners.length-1])}delete this._events[type];return this};EventEmitter.prototype.listeners=function(type){var ret;if(!this._events||!this._events[type])ret=[];else if(isFunction(this._events[type]))ret=[this._events[type]];else ret=this._events[type].slice();return ret};EventEmitter.prototype.listenerCount=function(type){if(this._events){var evlistener=this._events[type];if(isFunction(evlistener))return 1;else if(evlistener)return evlistener.length}return 0};EventEmitter.listenerCount=function(emitter,type){return emitter.listenerCount(type)};function isFunction(arg){return typeof arg==="function"}function isNumber(arg){return typeof arg==="number"}function isObject(arg){return typeof arg==="object"&&arg!==null}function isUndefined(arg){return arg===void 0}},{}],29:[function(require,module,exports){module.exports=CollectingHandler;function CollectingHandler(cbs){this._cbs=cbs||{};this.events=[]}var EVENTS=require("./").EVENTS;Object.keys(EVENTS).forEach(function(name){if(EVENTS[name]===0){name="on"+name;CollectingHandler.prototype[name]=function(){this.events.push([name]);if(this._cbs[name])this._cbs[name]()}}else if(EVENTS[name]===1){name="on"+name;CollectingHandler.prototype[name]=function(a){this.events.push([name,a]);if(this._cbs[name])this._cbs[name](a)}}else if(EVENTS[name]===2){name="on"+name;CollectingHandler.prototype[name]=function(a,b){this.events.push([name,a,b]);if(this._cbs[name])this._cbs[name](a,b)}}else{throw Error("wrong number of arguments")}});CollectingHandler.prototype.onreset=function(){this.events=[];if(this._cbs.onreset)this._cbs.onreset()};CollectingHandler.prototype.restart=function(){if(this._cbs.onreset)this._cbs.onreset();for(var i=0,len=this.events.length;i0;this._cbs.onclosetag(this._stack[--i]));}if(this._cbs.onend)this._cbs.onend()};Parser.prototype.reset=function(){if(this._cbs.onreset)this._cbs.onreset();this._tokenizer.reset();this._tagname="";this._attribname="";this._attribs=null;this._stack=[];if(this._cbs.onparserinit)this._cbs.onparserinit(this)};Parser.prototype.parseComplete=function(data){this.reset();this.end(data)};Parser.prototype.write=function(chunk){this._tokenizer.write(chunk)};Parser.prototype.end=function(chunk){this._tokenizer.end(chunk)};Parser.prototype.pause=function(){this._tokenizer.pause()};Parser.prototype.resume=function(){this._tokenizer.resume()};Parser.prototype.parseChunk=Parser.prototype.write;Parser.prototype.done=Parser.prototype.end;module.exports=Parser},{"./Tokenizer.js":34,events:28,inherits:38}],32:[function(require,module,exports){module.exports=ProxyHandler;function ProxyHandler(cbs){this._cbs=cbs||{}}var EVENTS=require("./").EVENTS;Object.keys(EVENTS).forEach(function(name){if(EVENTS[name]===0){name="on"+name;ProxyHandler.prototype[name]=function(){if(this._cbs[name])this._cbs[name]()}}else if(EVENTS[name]===1){name="on"+name;ProxyHandler.prototype[name]=function(a){if(this._cbs[name])this._cbs[name](a)}}else if(EVENTS[name]===2){name="on"+name;ProxyHandler.prototype[name]=function(a,b){if(this._cbs[name])this._cbs[name](a,b)}}else{throw Error("wrong number of arguments")}})},{"./":36}],33:[function(require,module,exports){module.exports=Stream;var Parser=require("./WritableStream.js");function Stream(options){Parser.call(this,new Cbs(this),options)}require("inherits")(Stream,Parser);Stream.prototype.readable=true;function Cbs(scope){this.scope=scope}var EVENTS=require("../").EVENTS;Object.keys(EVENTS).forEach(function(name){if(EVENTS[name]===0){Cbs.prototype["on"+name]=function(){this.scope.emit(name)}}else if(EVENTS[name]===1){Cbs.prototype["on"+name]=function(a){this.scope.emit(name,a)}}else if(EVENTS[name]===2){Cbs.prototype["on"+name]=function(a,b){this.scope.emit(name,a,b)}}else{throw Error("wrong number of arguments!")}})},{"../":36,"./WritableStream.js":35,inherits:38}],34:[function(require,module,exports){module.exports=Tokenizer;var decodeCodePoint=require("entities/lib/decode_codepoint.js"),entityMap=require("entities/maps/entities.json"),legacyMap=require("entities/maps/legacy.json"),xmlMap=require("entities/maps/xml.json"),i=0,TEXT=i++,BEFORE_TAG_NAME=i++,IN_TAG_NAME=i++,IN_SELF_CLOSING_TAG=i++,BEFORE_CLOSING_TAG_NAME=i++,IN_CLOSING_TAG_NAME=i++,AFTER_CLOSING_TAG_NAME=i++,BEFORE_ATTRIBUTE_NAME=i++,IN_ATTRIBUTE_NAME=i++,AFTER_ATTRIBUTE_NAME=i++,BEFORE_ATTRIBUTE_VALUE=i++,IN_ATTRIBUTE_VALUE_DQ=i++,IN_ATTRIBUTE_VALUE_SQ=i++,IN_ATTRIBUTE_VALUE_NQ=i++,BEFORE_DECLARATION=i++,IN_DECLARATION=i++,IN_PROCESSING_INSTRUCTION=i++,BEFORE_COMMENT=i++,IN_COMMENT=i++,AFTER_COMMENT_1=i++,AFTER_COMMENT_2=i++,BEFORE_CDATA_1=i++,BEFORE_CDATA_2=i++,BEFORE_CDATA_3=i++,BEFORE_CDATA_4=i++,BEFORE_CDATA_5=i++,BEFORE_CDATA_6=i++,IN_CDATA=i++,AFTER_CDATA_1=i++,AFTER_CDATA_2=i++,BEFORE_SPECIAL=i++,BEFORE_SPECIAL_END=i++,BEFORE_SCRIPT_1=i++,BEFORE_SCRIPT_2=i++,BEFORE_SCRIPT_3=i++,BEFORE_SCRIPT_4=i++,BEFORE_SCRIPT_5=i++,AFTER_SCRIPT_1=i++,AFTER_SCRIPT_2=i++,AFTER_SCRIPT_3=i++,AFTER_SCRIPT_4=i++,AFTER_SCRIPT_5=i++,BEFORE_STYLE_1=i++,BEFORE_STYLE_2=i++,BEFORE_STYLE_3=i++,BEFORE_STYLE_4=i++,AFTER_STYLE_1=i++,AFTER_STYLE_2=i++,AFTER_STYLE_3=i++,AFTER_STYLE_4=i++,BEFORE_ENTITY=i++,BEFORE_NUMERIC_ENTITY=i++,IN_NAMED_ENTITY=i++,IN_NUMERIC_ENTITY=i++,IN_HEX_ENTITY=i++,j=0,SPECIAL_NONE=j++,SPECIAL_SCRIPT=j++,SPECIAL_STYLE=j++;function whitespace(c){return c===" "||c==="\n"||c==="\t"||c==="\f"||c==="\r"}function characterState(char,SUCCESS){return function(c){if(c===char)this._state=SUCCESS}}function ifElseState(upper,SUCCESS,FAILURE){var lower=upper.toLowerCase();if(upper===lower){return function(c){if(c===lower){this._state=SUCCESS}else{this._state=FAILURE;this._index--}}}else{return function(c){if(c===lower||c===upper){this._state=SUCCESS}else{this._state=FAILURE;this._index--}}}}function consumeSpecialNameChar(upper,NEXT_STATE){var lower=upper.toLowerCase();return function(c){if(c===lower||c===upper){this._state=NEXT_STATE}else{this._state=IN_TAG_NAME;this._index--}}}function Tokenizer(options,cbs){this._state=TEXT;this._buffer="";this._sectionStart=0;this._index=0;this._bufferOffset=0;this._baseState=TEXT;this._special=SPECIAL_NONE;this._cbs=cbs;this._running=true;this._ended=false;this._xmlMode=!!(options&&options.xmlMode);this._decodeEntities=!!(options&&options.decodeEntities)}Tokenizer.prototype._stateText=function(c){if(c==="<"){if(this._index>this._sectionStart){this._cbs.ontext(this._getSection())}this._state=BEFORE_TAG_NAME;this._sectionStart=this._index}else if(this._decodeEntities&&this._special===SPECIAL_NONE&&c==="&"){if(this._index>this._sectionStart){this._cbs.ontext(this._getSection())}this._baseState=TEXT;this._state=BEFORE_ENTITY;this._sectionStart=this._index}};Tokenizer.prototype._stateBeforeTagName=function(c){if(c==="/"){this._state=BEFORE_CLOSING_TAG_NAME}else if(c==="<"){this._cbs.ontext(this._getSection());this._sectionStart=this._index}else if(c===">"||this._special!==SPECIAL_NONE||whitespace(c)){this._state=TEXT}else if(c==="!"){this._state=BEFORE_DECLARATION;this._sectionStart=this._index+1}else if(c==="?"){this._state=IN_PROCESSING_INSTRUCTION;this._sectionStart=this._index+1}else{this._state=!this._xmlMode&&(c==="s"||c==="S")?BEFORE_SPECIAL:IN_TAG_NAME;this._sectionStart=this._index}};Tokenizer.prototype._stateInTagName=function(c){if(c==="/"||c===">"||whitespace(c)){this._emitToken("onopentagname");this._state=BEFORE_ATTRIBUTE_NAME;this._index--}};Tokenizer.prototype._stateBeforeCloseingTagName=function(c){if(whitespace(c));else if(c===">"){this._state=TEXT}else if(this._special!==SPECIAL_NONE){if(c==="s"||c==="S"){this._state=BEFORE_SPECIAL_END}else{this._state=TEXT;this._index--}}else{this._state=IN_CLOSING_TAG_NAME;this._sectionStart=this._index}};Tokenizer.prototype._stateInCloseingTagName=function(c){if(c===">"||whitespace(c)){this._emitToken("onclosetag");this._state=AFTER_CLOSING_TAG_NAME;this._index--}};Tokenizer.prototype._stateAfterCloseingTagName=function(c){if(c===">"){this._state=TEXT;this._sectionStart=this._index+1}};Tokenizer.prototype._stateBeforeAttributeName=function(c){if(c===">"){this._cbs.onopentagend();this._state=TEXT;this._sectionStart=this._index+1}else if(c==="/"){this._state=IN_SELF_CLOSING_TAG}else if(!whitespace(c)){this._state=IN_ATTRIBUTE_NAME;this._sectionStart=this._index}};Tokenizer.prototype._stateInSelfClosingTag=function(c){if(c===">"){this._cbs.onselfclosingtag();this._state=TEXT;this._sectionStart=this._index+1}else if(!whitespace(c)){this._state=BEFORE_ATTRIBUTE_NAME;this._index--}};Tokenizer.prototype._stateInAttributeName=function(c){if(c==="="||c==="/"||c===">"||whitespace(c)){this._cbs.onattribname(this._getSection());this._sectionStart=-1;this._state=AFTER_ATTRIBUTE_NAME;this._index--}};Tokenizer.prototype._stateAfterAttributeName=function(c){if(c==="="){this._state=BEFORE_ATTRIBUTE_VALUE}else if(c==="/"||c===">"){this._cbs.onattribend();this._state=BEFORE_ATTRIBUTE_NAME;this._index--}else if(!whitespace(c)){this._cbs.onattribend();this._state=IN_ATTRIBUTE_NAME;this._sectionStart=this._index}};Tokenizer.prototype._stateBeforeAttributeValue=function(c){if(c==='"'){this._state=IN_ATTRIBUTE_VALUE_DQ;this._sectionStart=this._index+1}else if(c==="'"){this._state=IN_ATTRIBUTE_VALUE_SQ;this._sectionStart=this._index+1}else if(!whitespace(c)){this._state=IN_ATTRIBUTE_VALUE_NQ;this._sectionStart=this._index;this._index--}};Tokenizer.prototype._stateInAttributeValueDoubleQuotes=function(c){if(c==='"'){this._emitToken("onattribdata");this._cbs.onattribend();this._state=BEFORE_ATTRIBUTE_NAME}else if(this._decodeEntities&&c==="&"){this._emitToken("onattribdata");this._baseState=this._state;this._state=BEFORE_ENTITY;this._sectionStart=this._index}};Tokenizer.prototype._stateInAttributeValueSingleQuotes=function(c){if(c==="'"){this._emitToken("onattribdata");this._cbs.onattribend();this._state=BEFORE_ATTRIBUTE_NAME}else if(this._decodeEntities&&c==="&"){this._emitToken("onattribdata");this._baseState=this._state;this._state=BEFORE_ENTITY;this._sectionStart=this._index}};Tokenizer.prototype._stateInAttributeValueNoQuotes=function(c){if(whitespace(c)||c===">"){this._emitToken("onattribdata");this._cbs.onattribend();this._state=BEFORE_ATTRIBUTE_NAME;this._index--}else if(this._decodeEntities&&c==="&"){this._emitToken("onattribdata");this._baseState=this._state;this._state=BEFORE_ENTITY;this._sectionStart=this._index}};Tokenizer.prototype._stateBeforeDeclaration=function(c){this._state=c==="["?BEFORE_CDATA_1:c==="-"?BEFORE_COMMENT:IN_DECLARATION};Tokenizer.prototype._stateInDeclaration=function(c){if(c===">"){this._cbs.ondeclaration(this._getSection());this._state=TEXT;this._sectionStart=this._index+1}};Tokenizer.prototype._stateInProcessingInstruction=function(c){if(c===">"){this._cbs.onprocessinginstruction(this._getSection());this._state=TEXT;this._sectionStart=this._index+1}};Tokenizer.prototype._stateBeforeComment=function(c){if(c==="-"){this._state=IN_COMMENT;this._sectionStart=this._index+1}else{this._state=IN_DECLARATION}};Tokenizer.prototype._stateInComment=function(c){if(c==="-")this._state=AFTER_COMMENT_1};Tokenizer.prototype._stateAfterComment1=function(c){if(c==="-"){this._state=AFTER_COMMENT_2}else{this._state=IN_COMMENT}};Tokenizer.prototype._stateAfterComment2=function(c){if(c===">"){this._cbs.oncomment(this._buffer.substring(this._sectionStart,this._index-2));this._state=TEXT;this._sectionStart=this._index+1}else if(c!=="-"){this._state=IN_COMMENT}};Tokenizer.prototype._stateBeforeCdata1=ifElseState("C",BEFORE_CDATA_2,IN_DECLARATION);Tokenizer.prototype._stateBeforeCdata2=ifElseState("D",BEFORE_CDATA_3,IN_DECLARATION);Tokenizer.prototype._stateBeforeCdata3=ifElseState("A",BEFORE_CDATA_4,IN_DECLARATION);Tokenizer.prototype._stateBeforeCdata4=ifElseState("T",BEFORE_CDATA_5,IN_DECLARATION);Tokenizer.prototype._stateBeforeCdata5=ifElseState("A",BEFORE_CDATA_6,IN_DECLARATION);Tokenizer.prototype._stateBeforeCdata6=function(c){if(c==="["){this._state=IN_CDATA;this._sectionStart=this._index+1}else{this._state=IN_DECLARATION;this._index--}};Tokenizer.prototype._stateInCdata=function(c){if(c==="]")this._state=AFTER_CDATA_1};Tokenizer.prototype._stateAfterCdata1=characterState("]",AFTER_CDATA_2);Tokenizer.prototype._stateAfterCdata2=function(c){if(c===">"){this._cbs.oncdata(this._buffer.substring(this._sectionStart,this._index-2));this._state=TEXT;this._sectionStart=this._index+1}else if(c!=="]"){this._state=IN_CDATA}};Tokenizer.prototype._stateBeforeSpecial=function(c){if(c==="c"||c==="C"){this._state=BEFORE_SCRIPT_1}else if(c==="t"||c==="T"){this._state=BEFORE_STYLE_1}else{this._state=IN_TAG_NAME;this._index--}};Tokenizer.prototype._stateBeforeSpecialEnd=function(c){if(this._special===SPECIAL_SCRIPT&&(c==="c"||c==="C")){this._state=AFTER_SCRIPT_1}else if(this._special===SPECIAL_STYLE&&(c==="t"||c==="T")){this._state=AFTER_STYLE_1}else this._state=TEXT};Tokenizer.prototype._stateBeforeScript1=consumeSpecialNameChar("R",BEFORE_SCRIPT_2);Tokenizer.prototype._stateBeforeScript2=consumeSpecialNameChar("I",BEFORE_SCRIPT_3);Tokenizer.prototype._stateBeforeScript3=consumeSpecialNameChar("P",BEFORE_SCRIPT_4);Tokenizer.prototype._stateBeforeScript4=consumeSpecialNameChar("T",BEFORE_SCRIPT_5);Tokenizer.prototype._stateBeforeScript5=function(c){if(c==="/"||c===">"||whitespace(c)){this._special=SPECIAL_SCRIPT}this._state=IN_TAG_NAME;this._index--};Tokenizer.prototype._stateAfterScript1=ifElseState("R",AFTER_SCRIPT_2,TEXT);Tokenizer.prototype._stateAfterScript2=ifElseState("I",AFTER_SCRIPT_3,TEXT);Tokenizer.prototype._stateAfterScript3=ifElseState("P",AFTER_SCRIPT_4,TEXT);Tokenizer.prototype._stateAfterScript4=ifElseState("T",AFTER_SCRIPT_5,TEXT);Tokenizer.prototype._stateAfterScript5=function(c){if(c===">"||whitespace(c)){this._special=SPECIAL_NONE;this._state=IN_CLOSING_TAG_NAME;this._sectionStart=this._index-6;this._index--}else this._state=TEXT};Tokenizer.prototype._stateBeforeStyle1=consumeSpecialNameChar("Y",BEFORE_STYLE_2);Tokenizer.prototype._stateBeforeStyle2=consumeSpecialNameChar("L",BEFORE_STYLE_3);Tokenizer.prototype._stateBeforeStyle3=consumeSpecialNameChar("E",BEFORE_STYLE_4);Tokenizer.prototype._stateBeforeStyle4=function(c){if(c==="/"||c===">"||whitespace(c)){this._special=SPECIAL_STYLE}this._state=IN_TAG_NAME;this._index--};Tokenizer.prototype._stateAfterStyle1=ifElseState("Y",AFTER_STYLE_2,TEXT);Tokenizer.prototype._stateAfterStyle2=ifElseState("L",AFTER_STYLE_3,TEXT);Tokenizer.prototype._stateAfterStyle3=ifElseState("E",AFTER_STYLE_4,TEXT);Tokenizer.prototype._stateAfterStyle4=function(c){if(c===">"||whitespace(c)){this._special=SPECIAL_NONE;this._state=IN_CLOSING_TAG_NAME;this._sectionStart=this._index-5;this._index--}else this._state=TEXT};Tokenizer.prototype._stateBeforeEntity=ifElseState("#",BEFORE_NUMERIC_ENTITY,IN_NAMED_ENTITY);Tokenizer.prototype._stateBeforeNumericEntity=ifElseState("X",IN_HEX_ENTITY,IN_NUMERIC_ENTITY);Tokenizer.prototype._parseNamedEntityStrict=function(){if(this._sectionStart+16)limit=6;while(limit>=2){var entity=this._buffer.substr(start,limit);if(legacyMap.hasOwnProperty(entity)){this._emitPartial(legacyMap[entity]);this._sectionStart+=limit+1;return}else{limit--}}};Tokenizer.prototype._stateInNamedEntity=function(c){if(c===";"){this._parseNamedEntityStrict();if(this._sectionStart+1"z")&&(c<"A"||c>"Z")&&(c<"0"||c>"9")){if(this._xmlMode);else if(this._sectionStart+1===this._index);else if(this._baseState!==TEXT){if(c!=="="){this._parseNamedEntityStrict()}}else{this._parseLegacyEntity()}this._state=this._baseState;this._index--}};Tokenizer.prototype._decodeNumericEntity=function(offset,base){var sectionStart=this._sectionStart+offset;if(sectionStart!==this._index){var entity=this._buffer.substring(sectionStart,this._index);var parsed=parseInt(entity,base);this._emitPartial(decodeCodePoint(parsed));this._sectionStart=this._index}else{this._sectionStart--}this._state=this._baseState};Tokenizer.prototype._stateInNumericEntity=function(c){if(c===";"){this._decodeNumericEntity(2,10);this._sectionStart++}else if(c<"0"||c>"9"){if(!this._xmlMode){this._decodeNumericEntity(2,10)}else{this._state=this._baseState}this._index--}};Tokenizer.prototype._stateInHexEntity=function(c){if(c===";"){this._decodeNumericEntity(3,16);this._sectionStart++}else if((c<"a"||c>"f")&&(c<"A"||c>"F")&&(c<"0"||c>"9")){if(!this._xmlMode){this._decodeNumericEntity(3,16)}else{this._state=this._baseState}this._index--}};Tokenizer.prototype._cleanup=function(){if(this._sectionStart<0){this._buffer="";this._index=0;this._bufferOffset+=this._index}else if(this._running){if(this._state===TEXT){if(this._sectionStart!==this._index){this._cbs.ontext(this._buffer.substr(this._sectionStart))}this._buffer="";this._bufferOffset+=this._index;this._index=0}else if(this._sectionStart===this._index){this._buffer="";this._bufferOffset+=this._index;this._index=0}else{this._buffer=this._buffer.substr(this._sectionStart);this._index-=this._sectionStart;this._bufferOffset+=this._sectionStart}this._sectionStart=0}};Tokenizer.prototype.write=function(chunk){if(this._ended)this._cbs.onerror(Error(".write() after done!"));this._buffer+=chunk;this._parse()};Tokenizer.prototype._parse=function(){while(this._index>1;var nBits=-7;var i=isLE?nBytes-1:0;var d=isLE?-1:1;var s=buffer[offset+i];i+=d;e=s&(1<<-nBits)-1;s>>=-nBits;nBits+=eLen;for(;nBits>0;e=e*256+buffer[offset+i],i+=d,nBits-=8){}m=e&(1<<-nBits)-1;e>>=-nBits;nBits+=mLen;for(;nBits>0;m=m*256+buffer[offset+i],i+=d,nBits-=8){}if(e===0){e=1-eBias}else if(e===eMax){return m?NaN:(s?-1:1)*Infinity}else{m=m+Math.pow(2,mLen);e=e-eBias}return(s?-1:1)*m*Math.pow(2,e-mLen)};exports.write=function(buffer,value,offset,isLE,mLen,nBytes){var e,m,c;var eLen=nBytes*8-mLen-1;var eMax=(1<>1;var rt=mLen===23?Math.pow(2,-24)-Math.pow(2,-77):0;var i=isLE?0:nBytes-1;var d=isLE?1:-1;var s=value<0||value===0&&1/value<0?1:0;value=Math.abs(value);if(isNaN(value)||value===Infinity){m=isNaN(value)?1:0;e=eMax}else{e=Math.floor(Math.log(value)/Math.LN2);if(value*(c=Math.pow(2,-e))<1){e--;c*=2}if(e+eBias>=1){value+=rt/c}else{value+=rt*Math.pow(2,1-eBias)}if(value*c>=2){e++;c/=2}if(e+eBias>=eMax){m=0;e=eMax}else if(e+eBias>=1){m=(value*c-1)*Math.pow(2,mLen);e=e+eBias}else{m=value*Math.pow(2,eBias-1)*Math.pow(2,mLen);e=0}}for(;mLen>=8;buffer[offset+i]=m&255,i+=d,m/=256,mLen-=8){}e=e<0;buffer[offset+i]=e&255,i+=d,e/=256,eLen-=8){}buffer[offset+i-d]|=s*128}},{}],38:[function(require,module,exports){if(typeof Object.create==="function"){module.exports=function inherits(ctor,superCtor){ctor.super_=superCtor;ctor.prototype=Object.create(superCtor.prototype,{constructor:{value:ctor,enumerable:false,writable:true,configurable:true}})}}else{module.exports=function inherits(ctor,superCtor){ctor.super_=superCtor;var TempCtor=function(){};TempCtor.prototype=superCtor.prototype;ctor.prototype=new TempCtor;ctor.prototype.constructor=ctor}}},{}],39:[function(require,module,exports){module.exports=function(obj){return obj!=null&&(isBuffer(obj)||isSlowBuffer(obj)||!!obj._isBuffer)};function isBuffer(obj){return!!obj.constructor&&typeof obj.constructor.isBuffer==="function"&&obj.constructor.isBuffer(obj)}function isSlowBuffer(obj){return typeof obj.readFloatLE==="function"&&typeof obj.slice==="function"&&isBuffer(obj.slice(0,0))}},{}],40:[function(require,module,exports){var toString={}.toString;module.exports=Array.isArray||function(arr){return toString.call(arr)=="[object Array]"}},{}],41:[function(require,module,exports){(function(process){"use strict";if(!process.version||process.version.indexOf("v0.")===0||process.version.indexOf("v1.")===0&&process.version.indexOf("v1.8.")!==0){module.exports=nextTick}else{module.exports=process.nextTick}function nextTick(fn,arg1,arg2,arg3){if(typeof fn!=="function"){throw new TypeError('"callback" argument must be a function')}var len=arguments.length;var args,i;switch(len){case 0:case 1:return process.nextTick(fn);case 2:return process.nextTick(function afterTickOne(){fn.call(null,arg1)});case 3:return process.nextTick(function afterTickTwo(){fn.call(null,arg1,arg2)});case 4:return process.nextTick(function afterTickThree(){fn.call(null,arg1,arg2,arg3)});default:args=new Array(len-1);i=0;while(i1){for(var i=1;i0){if(state.ended&&!addToFront){var e=new Error("stream.push() after EOF");stream.emit("error",e)}else if(state.endEmitted&&addToFront){var _e=new Error("stream.unshift() after end event");stream.emit("error",_e)}else{var skipAdd;if(state.decoder&&!addToFront&&!encoding){chunk=state.decoder.write(chunk);skipAdd=!state.objectMode&&chunk.length===0}if(!addToFront)state.reading=false;if(!skipAdd){if(state.flowing&&state.length===0&&!state.sync){stream.emit("data",chunk);stream.read(0)}else{state.length+=state.objectMode?1:chunk.length;if(addToFront)state.buffer.unshift(chunk);else state.buffer.push(chunk);if(state.needReadable)emitReadable(stream)}}maybeReadMore(stream,state)}}else if(!addToFront){state.reading=false}return needMoreData(state)}function needMoreData(state){return!state.ended&&(state.needReadable||state.length=MAX_HWM){n=MAX_HWM}else{n--;n|=n>>>1;n|=n>>>2;n|=n>>>4;n|=n>>>8;n|=n>>>16;n++}return n}function howMuchToRead(n,state){if(n<=0||state.length===0&&state.ended)return 0;if(state.objectMode)return 1;if(n!==n){if(state.flowing&&state.length)return state.buffer.head.data.length;else return state.length}if(n>state.highWaterMark)state.highWaterMark=computeNewHighWaterMark(n);if(n<=state.length)return n;if(!state.ended){state.needReadable=true;return 0}return state.length}Readable.prototype.read=function(n){debug("read",n);n=parseInt(n,10);var state=this._readableState;var nOrig=n;if(n!==0)state.emittedReadable=false;if(n===0&&state.needReadable&&(state.length>=state.highWaterMark||state.ended)){debug("read: emitReadable",state.length,state.ended);if(state.length===0&&state.ended)endReadable(this);else emitReadable(this);return null}n=howMuchToRead(n,state);if(n===0&&state.ended){if(state.length===0)endReadable(this);return null}var doRead=state.needReadable;debug("need readable",doRead);if(state.length===0||state.length-n0)ret=fromList(n,state);else ret=null;if(ret===null){state.needReadable=true;n=0}else{state.length-=n}if(state.length===0){if(!state.ended)state.needReadable=true;if(nOrig!==n&&state.ended)endReadable(this)}if(ret!==null)this.emit("data",ret);return ret};function chunkInvalid(state,chunk){var er=null;if(!Buffer.isBuffer(chunk)&&typeof chunk!=="string"&&chunk!==null&&chunk!==undefined&&!state.objectMode){er=new TypeError("Invalid non-string/buffer chunk")}return er}function onEofChunk(stream,state){if(state.ended)return;if(state.decoder){var chunk=state.decoder.end();if(chunk&&chunk.length){state.buffer.push(chunk);state.length+=state.objectMode?1:chunk.length}}state.ended=true;emitReadable(stream)}function emitReadable(stream){var state=stream._readableState;state.needReadable=false;if(!state.emittedReadable){debug("emitReadable",state.flowing);state.emittedReadable=true;if(state.sync)processNextTick(emitReadable_,stream);else emitReadable_(stream)}}function emitReadable_(stream){debug("emit readable");stream.emit("readable");flow(stream)}function maybeReadMore(stream,state){if(!state.readingMore){state.readingMore=true;processNextTick(maybeReadMore_,stream,state)}}function maybeReadMore_(stream,state){var len=state.length;while(!state.reading&&!state.flowing&&!state.ended&&state.length1&&indexOf(state.pipes,dest)!==-1)&&!cleanedUp){debug("false write response, pause",src._readableState.awaitDrain);src._readableState.awaitDrain++;increasedAwaitDrain=true}src.pause()}}function onerror(er){debug("onerror",er);unpipe();dest.removeListener("error",onerror);if(EElistenerCount(dest,"error")===0)dest.emit("error",er)}prependListener(dest,"error",onerror);function onclose(){dest.removeListener("finish",onfinish);unpipe()}dest.once("close",onclose);function onfinish(){debug("onfinish");dest.removeListener("close",onclose);unpipe()}dest.once("finish",onfinish);function unpipe(){debug("unpipe");src.unpipe(dest)}dest.emit("pipe",src);if(!state.flowing){debug("pipe resume");src.resume()}return dest};function pipeOnDrain(src){return function(){var state=src._readableState;debug("pipeOnDrain",state.awaitDrain);if(state.awaitDrain)state.awaitDrain--;if(state.awaitDrain===0&&EElistenerCount(src,"data")){state.flowing=true;flow(src)}}}Readable.prototype.unpipe=function(dest){var state=this._readableState;if(state.pipesCount===0)return this;if(state.pipesCount===1){if(dest&&dest!==state.pipes)return this;if(!dest)dest=state.pipes;state.pipes=null;state.pipesCount=0;state.flowing=false;if(dest)dest.emit("unpipe",this);return this}if(!dest){var dests=state.pipes;var len=state.pipesCount;state.pipes=null;state.pipesCount=0;state.flowing=false;for(var _i=0;_i=state.length){if(state.decoder)ret=state.buffer.join("");else if(state.buffer.length===1)ret=state.buffer.head.data;else ret=state.buffer.concat(state.length);state.buffer.clear()}else{ret=fromListPartial(n,state.buffer,state.decoder)}return ret}function fromListPartial(n,list,hasStrings){var ret;if(nstr.length?str.length:n;if(nb===str.length)ret+=str;else ret+=str.slice(0,n);n-=nb;if(n===0){if(nb===str.length){++c;if(p.next)list.head=p.next;else list.head=list.tail=null}else{list.head=p;p.data=str.slice(nb)}break}++c}list.length-=c;return ret}function copyFromBuffer(n,list){var ret=bufferShim.allocUnsafe(n);var p=list.head;var c=1;p.data.copy(ret);n-=p.data.length;while(p=p.next){var buf=p.data;var nb=n>buf.length?buf.length:n;buf.copy(ret,ret.length-n,0,nb);n-=nb;if(n===0){if(nb===buf.length){++c;if(p.next)list.head=p.next;else list.head=list.tail=null}else{list.head=p;p.data=buf.slice(nb)}break}++c}list.length-=c;return ret}function endReadable(stream){var state=stream._readableState;if(state.length>0)throw new Error('"endReadable()" called on non-empty stream');if(!state.endEmitted){state.ended=true;processNextTick(endReadableNT,state,stream)}}function endReadableNT(state,stream){if(!state.endEmitted&&state.length===0){state.endEmitted=true;stream.readable=false;stream.emit("end")}}function forEach(xs,f){for(var i=0,l=xs.length;i-1?setImmediate:processNextTick;Writable.WritableState=WritableState;var util=require("core-util-is");util.inherits=require("inherits");var internalUtil={deprecate:require("util-deprecate")};var Stream;(function(){try{Stream=require("st"+"ream")}catch(_){}finally{if(!Stream)Stream=require("events").EventEmitter}})();var Buffer=require("buffer").Buffer;var bufferShim=require("buffer-shims");util.inherits(Writable,Stream);function nop(){}function WriteReq(chunk,encoding,cb){this.chunk=chunk;this.encoding=encoding;this.callback=cb;this.next=null}var Duplex;function WritableState(options,stream){Duplex=Duplex||require("./_stream_duplex");options=options||{};this.objectMode=!!options.objectMode;if(stream instanceof Duplex)this.objectMode=this.objectMode||!!options.writableObjectMode;var hwm=options.highWaterMark;var defaultHwm=this.objectMode?16:16*1024;this.highWaterMark=hwm||hwm===0?hwm:defaultHwm;this.highWaterMark=~~this.highWaterMark;this.needDrain=false;this.ending=false;this.ended=false;this.finished=false;var noDecode=options.decodeStrings===false;this.decodeStrings=!noDecode;this.defaultEncoding=options.defaultEncoding||"utf8";this.length=0;this.writing=false;this.corked=0;this.sync=true;this.bufferProcessing=false;this.onwrite=function(er){onwrite(stream,er)};this.writecb=null;this.writelen=0;this.bufferedRequest=null;this.lastBufferedRequest=null;this.pendingcb=0;this.prefinished=false;this.errorEmitted=false;this.bufferedRequestCount=0;this.corkedRequestsFree=new CorkedRequest(this)}WritableState.prototype.getBuffer=function writableStateGetBuffer(){var current=this.bufferedRequest;var out=[];while(current){out.push(current);current=current.next}return out};(function(){try{Object.defineProperty(WritableState.prototype,"buffer",{get:internalUtil.deprecate(function(){return this.getBuffer()},"_writableState.buffer is deprecated. Use _writableState.getBuffer "+"instead.")})}catch(_){}})();var Duplex;function Writable(options){Duplex=Duplex||require("./_stream_duplex");if(!(this instanceof Writable)&&!(this instanceof Duplex))return new Writable(options);this._writableState=new WritableState(options,this);this.writable=true;if(options){if(typeof options.write==="function")this._write=options.write;if(typeof options.writev==="function")this._writev=options.writev}Stream.call(this)}Writable.prototype.pipe=function(){this.emit("error",new Error("Cannot pipe, not readable"))};function writeAfterEnd(stream,cb){var er=new Error("write after end");stream.emit("error",er);processNextTick(cb,er)}function validChunk(stream,state,chunk,cb){var valid=true;var er=false;if(chunk===null){er=new TypeError("May not write null values to stream")}else if(!Buffer.isBuffer(chunk)&&typeof chunk!=="string"&&chunk!==undefined&&!state.objectMode){er=new TypeError("Invalid non-string/buffer chunk")}if(er){stream.emit("error",er);processNextTick(cb,er);valid=false}return valid}Writable.prototype.write=function(chunk,encoding,cb){var state=this._writableState;var ret=false;if(typeof encoding==="function"){cb=encoding;encoding=null}if(Buffer.isBuffer(chunk))encoding="buffer";else if(!encoding)encoding=state.defaultEncoding;if(typeof cb!=="function")cb=nop;if(state.ended)writeAfterEnd(this,cb);else if(validChunk(this,state,chunk,cb)){ +}if(this._events.removeListener)this.emit("removeListener",type,listener)}return this};EventEmitter.prototype.removeAllListeners=function(type){var key,listeners;if(!this._events)return this;if(!this._events.removeListener){if(arguments.length===0)this._events={};else if(this._events[type])delete this._events[type];return this}if(arguments.length===0){for(key in this._events){if(key==="removeListener")continue;this.removeAllListeners(key)}this.removeAllListeners("removeListener");this._events={};return this}listeners=this._events[type];if(isFunction(listeners)){this.removeListener(type,listeners)}else if(listeners){while(listeners.length)this.removeListener(type,listeners[listeners.length-1])}delete this._events[type];return this};EventEmitter.prototype.listeners=function(type){var ret;if(!this._events||!this._events[type])ret=[];else if(isFunction(this._events[type]))ret=[this._events[type]];else ret=this._events[type].slice();return ret};EventEmitter.prototype.listenerCount=function(type){if(this._events){var evlistener=this._events[type];if(isFunction(evlistener))return 1;else if(evlistener)return evlistener.length}return 0};EventEmitter.listenerCount=function(emitter,type){return emitter.listenerCount(type)};function isFunction(arg){return typeof arg==="function"}function isNumber(arg){return typeof arg==="number"}function isObject(arg){return typeof arg==="object"&&arg!==null}function isUndefined(arg){return arg===void 0}},{}],29:[function(require,module,exports){module.exports=CollectingHandler;function CollectingHandler(cbs){this._cbs=cbs||{};this.events=[]}var EVENTS=require("./").EVENTS;Object.keys(EVENTS).forEach(function(name){if(EVENTS[name]===0){name="on"+name;CollectingHandler.prototype[name]=function(){this.events.push([name]);if(this._cbs[name])this._cbs[name]()}}else if(EVENTS[name]===1){name="on"+name;CollectingHandler.prototype[name]=function(a){this.events.push([name,a]);if(this._cbs[name])this._cbs[name](a)}}else if(EVENTS[name]===2){name="on"+name;CollectingHandler.prototype[name]=function(a, b){this.events.push([name,a,b]);if(this._cbs[name])this._cbs[name](a,b)}}else{throw Error("wrong number of arguments")}});CollectingHandler.prototype.onreset=function(){this.events=[];if(this._cbs.onreset)this._cbs.onreset()};CollectingHandler.prototype.restart=function(){if(this._cbs.onreset)this._cbs.onreset();for(var i=0,len=this.events.length;i0;this._cbs.onclosetag(this._stack[--i]));}if(this._cbs.onend)this._cbs.onend()};Parser.prototype.reset=function(){if(this._cbs.onreset)this._cbs.onreset();this._tokenizer.reset();this._tagname="";this._attribname="";this._attribs=null;this._stack=[];if(this._cbs.onparserinit)this._cbs.onparserinit(this)};Parser.prototype.parseComplete=function(data){this.reset();this.end(data)};Parser.prototype.write=function(chunk){this._tokenizer.write(chunk)};Parser.prototype.end=function(chunk){this._tokenizer.end(chunk)};Parser.prototype.pause=function(){this._tokenizer.pause()};Parser.prototype.resume=function(){this._tokenizer.resume()};Parser.prototype.parseChunk=Parser.prototype.write;Parser.prototype.done=Parser.prototype.end;module.exports=Parser},{"./Tokenizer.js":34,events:28,inherits:38}],32:[function(require, module, exports){module.exports=ProxyHandler;function ProxyHandler(cbs){this._cbs=cbs||{}}var EVENTS=require("./").EVENTS;Object.keys(EVENTS).forEach(function(name){if(EVENTS[name]===0){name="on"+name;ProxyHandler.prototype[name]=function(){if(this._cbs[name])this._cbs[name]()}}else if(EVENTS[name]===1){name="on"+name;ProxyHandler.prototype[name]=function(a){if(this._cbs[name])this._cbs[name](a)}}else if(EVENTS[name]===2){name="on"+name;ProxyHandler.prototype[name]=function(a, b){if(this._cbs[name])this._cbs[name](a,b)}}else{throw Error("wrong number of arguments")}})},{"./":36}],33:[function(require, module, exports){module.exports=Stream;var Parser=require("./WritableStream.js");function Stream(options){Parser.call(this,new Cbs(this),options)}require("inherits")(Stream,Parser);Stream.prototype.readable=true;function Cbs(scope){this.scope=scope}var EVENTS=require("./.").EVENTS;Object.keys(EVENTS).forEach(function(name){if(EVENTS[name]===0){Cbs.prototype["on"+name]=function(){this.scope.emit(name)}}else if(EVENTS[name]===1){Cbs.prototype["on"+name]=function(a){this.scope.emit(name,a)}}else if(EVENTS[name]===2){Cbs.prototype["on"+name]=function(a, b){this.scope.emit(name,a,b)}}else{throw Error("wrong number of arguments!")}})},{"../":36,"./WritableStream.js":35,inherits:38}],34:[function(require, module, exports){module.exports=Tokenizer;var decodeCodePoint=require("entities/lib/decode_codepoint.js"),entityMap=require("entities/maps/entities.json"),legacyMap=require("entities/maps/legacy.json"),xmlMap=require("entities/maps/xml.json"),i=0,TEXT=i++,BEFORE_TAG_NAME=i++,IN_TAG_NAME=i++,IN_SELF_CLOSING_TAG=i++,BEFORE_CLOSING_TAG_NAME=i++,IN_CLOSING_TAG_NAME=i++,AFTER_CLOSING_TAG_NAME=i++,BEFORE_ATTRIBUTE_NAME=i++,IN_ATTRIBUTE_NAME=i++,AFTER_ATTRIBUTE_NAME=i++,BEFORE_ATTRIBUTE_VALUE=i++,IN_ATTRIBUTE_VALUE_DQ=i++,IN_ATTRIBUTE_VALUE_SQ=i++,IN_ATTRIBUTE_VALUE_NQ=i++,BEFORE_DECLARATION=i++,IN_DECLARATION=i++,IN_PROCESSING_INSTRUCTION=i++,BEFORE_COMMENT=i++,IN_COMMENT=i++,AFTER_COMMENT_1=i++,AFTER_COMMENT_2=i++,BEFORE_CDATA_1=i++,BEFORE_CDATA_2=i++,BEFORE_CDATA_3=i++,BEFORE_CDATA_4=i++,BEFORE_CDATA_5=i++,BEFORE_CDATA_6=i++,IN_CDATA=i++,AFTER_CDATA_1=i++,AFTER_CDATA_2=i++,BEFORE_SPECIAL=i++,BEFORE_SPECIAL_END=i++,BEFORE_SCRIPT_1=i++,BEFORE_SCRIPT_2=i++,BEFORE_SCRIPT_3=i++,BEFORE_SCRIPT_4=i++,BEFORE_SCRIPT_5=i++,AFTER_SCRIPT_1=i++,AFTER_SCRIPT_2=i++,AFTER_SCRIPT_3=i++,AFTER_SCRIPT_4=i++,AFTER_SCRIPT_5=i++,BEFORE_STYLE_1=i++,BEFORE_STYLE_2=i++,BEFORE_STYLE_3=i++,BEFORE_STYLE_4=i++,AFTER_STYLE_1=i++,AFTER_STYLE_2=i++,AFTER_STYLE_3=i++,AFTER_STYLE_4=i++,BEFORE_ENTITY=i++,BEFORE_NUMERIC_ENTITY=i++,IN_NAMED_ENTITY=i++,IN_NUMERIC_ENTITY=i++,IN_HEX_ENTITY=i++,j=0,SPECIAL_NONE=j++,SPECIAL_SCRIPT=j++,SPECIAL_STYLE=j++;function whitespace(c){return c===" "||c==="\n"||c==="\t"||c==="\f"||c==="\r"}function characterState(char,SUCCESS){return function(c){if(c===char)this._state=SUCCESS}}function ifElseState(upper,SUCCESS,FAILURE){var lower=upper.toLowerCase();if(upper===lower){return function(c){if(c===lower){this._state=SUCCESS}else{this._state=FAILURE;this._index--}}}else{return function(c){if(c===lower||c===upper){this._state=SUCCESS}else{this._state=FAILURE;this._index--}}}}function consumeSpecialNameChar(upper,NEXT_STATE){var lower=upper.toLowerCase();return function(c){if(c===lower||c===upper){this._state=NEXT_STATE}else{this._state=IN_TAG_NAME;this._index--}}}function Tokenizer(options,cbs){this._state=TEXT;this._buffer="";this._sectionStart=0;this._index=0;this._bufferOffset=0;this._baseState=TEXT;this._special=SPECIAL_NONE;this._cbs=cbs;this._running=true;this._ended=false;this._xmlMode=!!(options&&options.xmlMode);this._decodeEntities=!!(options&&options.decodeEntities)}Tokenizer.prototype._stateText=function(c){if(c==="<"){if(this._index>this._sectionStart){this._cbs.ontext(this._getSection())}this._state=BEFORE_TAG_NAME;this._sectionStart=this._index}else if(this._decodeEntities&&this._special===SPECIAL_NONE&&c==="&"){if(this._index>this._sectionStart){this._cbs.ontext(this._getSection())}this._baseState=TEXT;this._state=BEFORE_ENTITY;this._sectionStart=this._index}};Tokenizer.prototype._stateBeforeTagName=function(c){if(c==="/"){this._state=BEFORE_CLOSING_TAG_NAME}else if(c==="<"){this._cbs.ontext(this._getSection());this._sectionStart=this._index}else if(c===">"||this._special!==SPECIAL_NONE||whitespace(c)){this._state=TEXT}else if(c==="!"){this._state=BEFORE_DECLARATION;this._sectionStart=this._index+1}else if(c==="?"){this._state=IN_PROCESSING_INSTRUCTION;this._sectionStart=this._index+1}else{this._state=!this._xmlMode&&(c==="s"||c==="S")?BEFORE_SPECIAL:IN_TAG_NAME;this._sectionStart=this._index}};Tokenizer.prototype._stateInTagName=function(c){if(c==="/"||c===">"||whitespace(c)){this._emitToken("onopentagname");this._state=BEFORE_ATTRIBUTE_NAME;this._index--}};Tokenizer.prototype._stateBeforeCloseingTagName=function(c){if(whitespace(c));else if(c===">"){this._state=TEXT}else if(this._special!==SPECIAL_NONE){if(c==="s"||c==="S"){this._state=BEFORE_SPECIAL_END}else{this._state=TEXT;this._index--}}else{this._state=IN_CLOSING_TAG_NAME;this._sectionStart=this._index}};Tokenizer.prototype._stateInCloseingTagName=function(c){if(c===">"||whitespace(c)){this._emitToken("onclosetag");this._state=AFTER_CLOSING_TAG_NAME;this._index--}};Tokenizer.prototype._stateAfterCloseingTagName=function(c){if(c===">"){this._state=TEXT;this._sectionStart=this._index+1}};Tokenizer.prototype._stateBeforeAttributeName=function(c){if(c===">"){this._cbs.onopentagend();this._state=TEXT;this._sectionStart=this._index+1}else if(c==="/"){this._state=IN_SELF_CLOSING_TAG}else if(!whitespace(c)){this._state=IN_ATTRIBUTE_NAME;this._sectionStart=this._index}};Tokenizer.prototype._stateInSelfClosingTag=function(c){if(c===">"){this._cbs.onselfclosingtag();this._state=TEXT;this._sectionStart=this._index+1}else if(!whitespace(c)){this._state=BEFORE_ATTRIBUTE_NAME;this._index--}};Tokenizer.prototype._stateInAttributeName=function(c){if(c==="="||c==="/"||c===">"||whitespace(c)){this._cbs.onattribname(this._getSection());this._sectionStart=-1;this._state=AFTER_ATTRIBUTE_NAME;this._index--}};Tokenizer.prototype._stateAfterAttributeName=function(c){if(c==="="){this._state=BEFORE_ATTRIBUTE_VALUE}else if(c==="/"||c===">"){this._cbs.onattribend();this._state=BEFORE_ATTRIBUTE_NAME;this._index--}else if(!whitespace(c)){this._cbs.onattribend();this._state=IN_ATTRIBUTE_NAME;this._sectionStart=this._index}};Tokenizer.prototype._stateBeforeAttributeValue=function(c){if(c==='"'){this._state=IN_ATTRIBUTE_VALUE_DQ;this._sectionStart=this._index+1}else if(c==="'"){this._state=IN_ATTRIBUTE_VALUE_SQ;this._sectionStart=this._index+1}else if(!whitespace(c)){this._state=IN_ATTRIBUTE_VALUE_NQ;this._sectionStart=this._index;this._index--}};Tokenizer.prototype._stateInAttributeValueDoubleQuotes=function(c){if(c==='"'){this._emitToken("onattribdata");this._cbs.onattribend();this._state=BEFORE_ATTRIBUTE_NAME}else if(this._decodeEntities&&c==="&"){this._emitToken("onattribdata");this._baseState=this._state;this._state=BEFORE_ENTITY;this._sectionStart=this._index}};Tokenizer.prototype._stateInAttributeValueSingleQuotes=function(c){if(c==="'"){this._emitToken("onattribdata");this._cbs.onattribend();this._state=BEFORE_ATTRIBUTE_NAME}else if(this._decodeEntities&&c==="&"){this._emitToken("onattribdata");this._baseState=this._state;this._state=BEFORE_ENTITY;this._sectionStart=this._index}};Tokenizer.prototype._stateInAttributeValueNoQuotes=function(c){if(whitespace(c)||c===">"){this._emitToken("onattribdata");this._cbs.onattribend();this._state=BEFORE_ATTRIBUTE_NAME;this._index--}else if(this._decodeEntities&&c==="&"){this._emitToken("onattribdata");this._baseState=this._state;this._state=BEFORE_ENTITY;this._sectionStart=this._index}};Tokenizer.prototype._stateBeforeDeclaration=function(c){this._state=c==="["?BEFORE_CDATA_1:c==="-"?BEFORE_COMMENT:IN_DECLARATION};Tokenizer.prototype._stateInDeclaration=function(c){if(c===">"){this._cbs.ondeclaration(this._getSection());this._state=TEXT;this._sectionStart=this._index+1}};Tokenizer.prototype._stateInProcessingInstruction=function(c){if(c===">"){this._cbs.onprocessinginstruction(this._getSection());this._state=TEXT;this._sectionStart=this._index+1}};Tokenizer.prototype._stateBeforeComment=function(c){if(c==="-"){this._state=IN_COMMENT;this._sectionStart=this._index+1}else{this._state=IN_DECLARATION}};Tokenizer.prototype._stateInComment=function(c){if(c==="-")this._state=AFTER_COMMENT_1};Tokenizer.prototype._stateAfterComment1=function(c){if(c==="-"){this._state=AFTER_COMMENT_2}else{this._state=IN_COMMENT}};Tokenizer.prototype._stateAfterComment2=function(c){if(c===">"){this._cbs.oncomment(this._buffer.substring(this._sectionStart,this._index-2));this._state=TEXT;this._sectionStart=this._index+1}else if(c!=="-"){this._state=IN_COMMENT}};Tokenizer.prototype._stateBeforeCdata1=ifElseState("C",BEFORE_CDATA_2,IN_DECLARATION);Tokenizer.prototype._stateBeforeCdata2=ifElseState("D",BEFORE_CDATA_3,IN_DECLARATION);Tokenizer.prototype._stateBeforeCdata3=ifElseState("A",BEFORE_CDATA_4,IN_DECLARATION);Tokenizer.prototype._stateBeforeCdata4=ifElseState("T",BEFORE_CDATA_5,IN_DECLARATION);Tokenizer.prototype._stateBeforeCdata5=ifElseState("A",BEFORE_CDATA_6,IN_DECLARATION);Tokenizer.prototype._stateBeforeCdata6=function(c){if(c==="["){this._state=IN_CDATA;this._sectionStart=this._index+1}else{this._state=IN_DECLARATION;this._index--}};Tokenizer.prototype._stateInCdata=function(c){if(c==="]")this._state=AFTER_CDATA_1};Tokenizer.prototype._stateAfterCdata1=characterState("]",AFTER_CDATA_2);Tokenizer.prototype._stateAfterCdata2=function(c){if(c===">"){this._cbs.oncdata(this._buffer.substring(this._sectionStart,this._index-2));this._state=TEXT;this._sectionStart=this._index+1}else if(c!=="]"){this._state=IN_CDATA}};Tokenizer.prototype._stateBeforeSpecial=function(c){if(c==="c"||c==="C"){this._state=BEFORE_SCRIPT_1}else if(c==="t"||c==="T"){this._state=BEFORE_STYLE_1}else{this._state=IN_TAG_NAME;this._index--}};Tokenizer.prototype._stateBeforeSpecialEnd=function(c){if(this._special===SPECIAL_SCRIPT&&(c==="c"||c==="C")){this._state=AFTER_SCRIPT_1}else if(this._special===SPECIAL_STYLE&&(c==="t"||c==="T")){this._state=AFTER_STYLE_1}else this._state=TEXT};Tokenizer.prototype._stateBeforeScript1=consumeSpecialNameChar("R",BEFORE_SCRIPT_2);Tokenizer.prototype._stateBeforeScript2=consumeSpecialNameChar("I",BEFORE_SCRIPT_3);Tokenizer.prototype._stateBeforeScript3=consumeSpecialNameChar("P",BEFORE_SCRIPT_4);Tokenizer.prototype._stateBeforeScript4=consumeSpecialNameChar("T",BEFORE_SCRIPT_5);Tokenizer.prototype._stateBeforeScript5=function(c){if(c==="/"||c===">"||whitespace(c)){this._special=SPECIAL_SCRIPT}this._state=IN_TAG_NAME;this._index--};Tokenizer.prototype._stateAfterScript1=ifElseState("R",AFTER_SCRIPT_2,TEXT);Tokenizer.prototype._stateAfterScript2=ifElseState("I",AFTER_SCRIPT_3,TEXT);Tokenizer.prototype._stateAfterScript3=ifElseState("P",AFTER_SCRIPT_4,TEXT);Tokenizer.prototype._stateAfterScript4=ifElseState("T",AFTER_SCRIPT_5,TEXT);Tokenizer.prototype._stateAfterScript5=function(c){if(c===">"||whitespace(c)){this._special=SPECIAL_NONE;this._state=IN_CLOSING_TAG_NAME;this._sectionStart=this._index-6;this._index--}else this._state=TEXT};Tokenizer.prototype._stateBeforeStyle1=consumeSpecialNameChar("Y",BEFORE_STYLE_2);Tokenizer.prototype._stateBeforeStyle2=consumeSpecialNameChar("L",BEFORE_STYLE_3);Tokenizer.prototype._stateBeforeStyle3=consumeSpecialNameChar("E",BEFORE_STYLE_4);Tokenizer.prototype._stateBeforeStyle4=function(c){if(c==="/"||c===">"||whitespace(c)){this._special=SPECIAL_STYLE}this._state=IN_TAG_NAME;this._index--};Tokenizer.prototype._stateAfterStyle1=ifElseState("Y",AFTER_STYLE_2,TEXT);Tokenizer.prototype._stateAfterStyle2=ifElseState("L",AFTER_STYLE_3,TEXT);Tokenizer.prototype._stateAfterStyle3=ifElseState("E",AFTER_STYLE_4,TEXT);Tokenizer.prototype._stateAfterStyle4=function(c){if(c===">"||whitespace(c)){this._special=SPECIAL_NONE;this._state=IN_CLOSING_TAG_NAME;this._sectionStart=this._index-5;this._index--}else this._state=TEXT};Tokenizer.prototype._stateBeforeEntity=ifElseState("#",BEFORE_NUMERIC_ENTITY,IN_NAMED_ENTITY);Tokenizer.prototype._stateBeforeNumericEntity=ifElseState("X",IN_HEX_ENTITY,IN_NUMERIC_ENTITY);Tokenizer.prototype._parseNamedEntityStrict=function(){if(this._sectionStart+16)limit=6;while(limit>=2){var entity=this._buffer.substr(start,limit);if(legacyMap.hasOwnProperty(entity)){this._emitPartial(legacyMap[entity]);this._sectionStart+=limit+1;return}else{limit--}}};Tokenizer.prototype._stateInNamedEntity=function(c){if(c===";"){this._parseNamedEntityStrict();if(this._sectionStart+1"z")&&(c<"A"||c>"Z")&&(c<"0"||c>"9")){if(this._xmlMode);else if(this._sectionStart+1===this._index);else if(this._baseState!==TEXT){if(c!=="="){this._parseNamedEntityStrict()}}else{this._parseLegacyEntity()}this._state=this._baseState;this._index--}};Tokenizer.prototype._decodeNumericEntity=function(offset,base){var sectionStart=this._sectionStart+offset;if(sectionStart!==this._index){var entity=this._buffer.substring(sectionStart,this._index);var parsed=parseInt(entity,base);this._emitPartial(decodeCodePoint(parsed));this._sectionStart=this._index}else{this._sectionStart--}this._state=this._baseState};Tokenizer.prototype._stateInNumericEntity=function(c){if(c===";"){this._decodeNumericEntity(2,10);this._sectionStart++}else if(c<"0"||c>"9"){if(!this._xmlMode){this._decodeNumericEntity(2,10)}else{this._state=this._baseState}this._index--}};Tokenizer.prototype._stateInHexEntity=function(c){if(c===";"){this._decodeNumericEntity(3,16);this._sectionStart++}else if((c<"a"||c>"f")&&(c<"A"||c>"F")&&(c<"0"||c>"9")){if(!this._xmlMode){this._decodeNumericEntity(3,16)}else{this._state=this._baseState}this._index--}};Tokenizer.prototype._cleanup=function(){if(this._sectionStart<0){this._buffer="";this._index=0;this._bufferOffset+=this._index}else if(this._running){if(this._state===TEXT){if(this._sectionStart!==this._index){this._cbs.ontext(this._buffer.substr(this._sectionStart))}this._buffer="";this._bufferOffset+=this._index;this._index=0}else if(this._sectionStart===this._index){this._buffer="";this._bufferOffset+=this._index;this._index=0}else{this._buffer=this._buffer.substr(this._sectionStart);this._index-=this._sectionStart;this._bufferOffset+=this._sectionStart}this._sectionStart=0}};Tokenizer.prototype.write=function(chunk){if(this._ended)this._cbs.onerror(Error(".write() after done!"));this._buffer+=chunk;this._parse()};Tokenizer.prototype._parse=function(){while(this._index>1;var nBits=-7;var i=isLE?nBytes-1:0;var d=isLE?-1:1;var s=buffer[offset+i];i+=d;e=s&(1<<-nBits)-1;s>>=-nBits;nBits+=eLen;for(;nBits>0;e=e*256+buffer[offset+i],i+=d,nBits-=8){}m=e&(1<<-nBits)-1;e>>=-nBits;nBits+=mLen;for(;nBits>0;m=m*256+buffer[offset+i],i+=d,nBits-=8){}if(e===0){e=1-eBias}else if(e===eMax){return m?NaN:(s?-1:1)*Infinity}else{m=m+Math.pow(2,mLen);e=e-eBias}return(s?-1:1)*m*Math.pow(2,e-mLen)};exports.write=function(buffer,value,offset,isLE,mLen,nBytes){var e,m,c;var eLen=nBytes*8-mLen-1;var eMax=(1<>1;var rt=mLen===23?Math.pow(2,-24)-Math.pow(2,-77):0;var i=isLE?0:nBytes-1;var d=isLE?1:-1;var s=value<0||value===0&&1/value<0?1:0;value=Math.abs(value);if(isNaN(value)||value===Infinity){m=isNaN(value)?1:0;e=eMax}else{e=Math.floor(Math.log(value)/Math.LN2);if(value*(c=Math.pow(2,-e))<1){e--;c*=2}if(e+eBias>=1){value+=rt/c}else{value+=rt*Math.pow(2,1-eBias)}if(value*c>=2){e++;c/=2}if(e+eBias>=eMax){m=0;e=eMax}else if(e+eBias>=1){m=(value*c-1)*Math.pow(2,mLen);e=e+eBias}else{m=value*Math.pow(2,eBias-1)*Math.pow(2,mLen);e=0}}for(;mLen>=8;buffer[offset+i]=m&255,i+=d,m/=256,mLen-=8){}e=e<0;buffer[offset+i]=e&255,i+=d,e/=256,eLen-=8){}buffer[offset+i-d]|=s*128}},{}],38:[function(require,module,exports){if(typeof Object.create==="function"){module.exports=function inherits(ctor,superCtor){ctor.super_=superCtor;ctor.prototype=Object.create(superCtor.prototype,{constructor:{value:ctor,enumerable:false,writable:true,configurable:true}})}}else{module.exports=function inherits(ctor,superCtor){ctor.super_=superCtor;var TempCtor=function(){};TempCtor.prototype=superCtor.prototype;ctor.prototype=new TempCtor;ctor.prototype.constructor=ctor}}},{}],39:[function(require,module,exports){module.exports=function(obj){return obj!=null&&(isBuffer(obj)||isSlowBuffer(obj)||!!obj._isBuffer)};function isBuffer(obj){return!!obj.constructor&&typeof obj.constructor.isBuffer==="function"&&obj.constructor.isBuffer(obj)}function isSlowBuffer(obj){return typeof obj.readFloatLE==="function"&&typeof obj.slice==="function"&&isBuffer(obj.slice(0,0))}},{}],40:[function(require,module,exports){var toString={}.toString;module.exports=Array.isArray||function(arr){return toString.call(arr)=="[object Array]"}},{}],41:[function(require,module,exports){(function(process){"use strict";if(!process.version||process.version.indexOf("v0.")===0||process.version.indexOf("v1.")===0&&process.version.indexOf("v1.8.")!==0){module.exports=nextTick}else{module.exports=process.nextTick}function nextTick(fn,arg1,arg2,arg3){if(typeof fn!=="function"){throw new TypeError('"callback" argument must be a function')}var len=arguments.length;var args,i;switch(len){case 0:case 1:return process.nextTick(fn);case 2:return process.nextTick(function afterTickOne(){fn.call(null,arg1)});case 3:return process.nextTick(function afterTickTwo(){fn.call(null,arg1,arg2)});case 4:return process.nextTick(function afterTickThree(){fn.call(null,arg1,arg2,arg3)});default:args=new Array(len-1);i=0;while(i1){for(var i=1;i0){if(state.ended&&!addToFront){var e=new Error("stream.push() after EOF");stream.emit("error",e)}else if(state.endEmitted&&addToFront){var _e=new Error("stream.unshift() after end event");stream.emit("error",_e)}else{var skipAdd;if(state.decoder&&!addToFront&&!encoding){chunk=state.decoder.write(chunk);skipAdd=!state.objectMode&&chunk.length===0}if(!addToFront)state.reading=false;if(!skipAdd){if(state.flowing&&state.length===0&&!state.sync){stream.emit("data",chunk);stream.read(0)}else{state.length+=state.objectMode?1:chunk.length;if(addToFront)state.buffer.unshift(chunk);else state.buffer.push(chunk);if(state.needReadable)emitReadable(stream)}}maybeReadMore(stream,state)}}else if(!addToFront){state.reading=false}return needMoreData(state)}function needMoreData(state){return!state.ended&&(state.needReadable||state.length=MAX_HWM){n=MAX_HWM}else{n--;n|=n>>>1;n|=n>>>2;n|=n>>>4;n|=n>>>8;n|=n>>>16;n++}return n}function howMuchToRead(n, state){if(n<=0||state.length===0&&state.ended)return 0;if(state.objectMode)return 1;if(n!==n){if(state.flowing&&state.length)return state.buffer.head.data.length;else return state.length}if(n>state.highWaterMark)state.highWaterMark=computeNewHighWaterMark(n);if(n<=state.length)return n;if(!state.ended){state.needReadable=true;return 0}return state.length}Readable.prototype.read=function(n){debug("read",n);n=parseInt(n,10);var state=this._readableState;var nOrig=n;if(n!==0)state.emittedReadable=false;if(n===0&&state.needReadable&&(state.length>=state.highWaterMark||state.ended)){debug("read: emitReadable",state.length,state.ended);if(state.length===0&&state.ended)endReadable(this);else emitReadable(this);return null}n=howMuchToRead(n,state);if(n===0&&state.ended){if(state.length===0)endReadable(this);return null}var doRead=state.needReadable;debug("need readable",doRead);if(state.length===0||state.length-n0)ret=fromList(n,state);else ret=null;if(ret===null){state.needReadable=true;n=0}else{state.length-=n}if(state.length===0){if(!state.ended)state.needReadable=true;if(nOrig!==n&&state.ended)endReadable(this)}if(ret!==null)this.emit("data",ret);return ret};function chunkInvalid(state,chunk){var er=null;if(!Buffer.isBuffer(chunk)&&typeof chunk!=="string"&&chunk!==null&&chunk!==undefined&&!state.objectMode){er=new TypeError("Invalid non-string/buffer chunk")}return er}function onEofChunk(stream,state){if(state.ended)return;if(state.decoder){var chunk=state.decoder.end();if(chunk&&chunk.length){state.buffer.push(chunk);state.length+=state.objectMode?1:chunk.length}}state.ended=true;emitReadable(stream)}function emitReadable(stream){var state=stream._readableState;state.needReadable=false;if(!state.emittedReadable){debug("emitReadable",state.flowing);state.emittedReadable=true;if(state.sync)processNextTick(emitReadable_,stream);else emitReadable_(stream)}}function emitReadable_(stream){debug("emit readable");stream.emit("readable");flow(stream)}function maybeReadMore(stream,state){if(!state.readingMore){state.readingMore=true;processNextTick(maybeReadMore_,stream,state)}}function maybeReadMore_(stream,state){var len=state.length;while(!state.reading&&!state.flowing&&!state.ended&&state.length1&&indexOf(state.pipes,dest)!==-1)&&!cleanedUp){debug("false write response, pause",src._readableState.awaitDrain);src._readableState.awaitDrain++;increasedAwaitDrain=true}src.pause()}}function onerror(er){debug("onerror",er);unpipe();dest.removeListener("error",onerror);if(EElistenerCount(dest,"error")===0)dest.emit("error",er)}prependListener(dest,"error",onerror);function onclose(){dest.removeListener("finish",onfinish);unpipe()}dest.once("close",onclose);function onfinish(){debug("onfinish");dest.removeListener("close",onclose);unpipe()}dest.once("finish",onfinish);function unpipe(){debug("unpipe");src.unpipe(dest)}dest.emit("pipe",src);if(!state.flowing){debug("pipe resume");src.resume()}return dest};function pipeOnDrain(src){return function(){var state=src._readableState;debug("pipeOnDrain",state.awaitDrain);if(state.awaitDrain)state.awaitDrain--;if(state.awaitDrain===0&&EElistenerCount(src,"data")){state.flowing=true;flow(src)}}}Readable.prototype.unpipe=function(dest){var state=this._readableState;if(state.pipesCount===0)return this;if(state.pipesCount===1){if(dest&&dest!==state.pipes)return this;if(!dest)dest=state.pipes;state.pipes=null;state.pipesCount=0;state.flowing=false;if(dest)dest.emit("unpipe",this);return this}if(!dest){var dests=state.pipes;var len=state.pipesCount;state.pipes=null;state.pipesCount=0;state.flowing=false;for(var _i=0;_i=state.length){if(state.decoder)ret=state.buffer.join("");else if(state.buffer.length===1)ret=state.buffer.head.data;else ret=state.buffer.concat(state.length);state.buffer.clear()}else{ret=fromListPartial(n,state.buffer,state.decoder)}return ret}function fromListPartial(n,list,hasStrings){var ret;if(nstr.length?str.length:n;if(nb===str.length)ret+=str;else ret+=str.slice(0,n);n-=nb;if(n===0){if(nb===str.length){++c;if(p.next)list.head=p.next;else list.head=list.tail=null}else{list.head=p;p.data=str.slice(nb)}break}++c}list.length-=c;return ret}function copyFromBuffer(n,list){var ret=bufferShim.allocUnsafe(n);var p=list.head;var c=1;p.data.copy(ret);n-=p.data.length;while(p=p.next){var buf=p.data;var nb=n>buf.length?buf.length:n;buf.copy(ret,ret.length-n,0,nb);n-=nb;if(n===0){if(nb===buf.length){++c;if(p.next)list.head=p.next;else list.head=list.tail=null}else{list.head=p;p.data=buf.slice(nb)}break}++c}list.length-=c;return ret}function endReadable(stream){var state=stream._readableState;if(state.length>0)throw new Error('"endReadable()" called on non-empty stream');if(!state.endEmitted){state.ended=true;processNextTick(endReadableNT,state,stream)}}function endReadableNT(state,stream){if(!state.endEmitted&&state.length===0){state.endEmitted=true;stream.readable=false;stream.emit("end")}}function forEach(xs,f){for(var i=0,l=xs.length;i-1?setImmediate:processNextTick;Writable.WritableState=WritableState;var util=require("core-util-is");util.inherits=require("inherits");var internalUtil={deprecate:require("util-deprecate")};var Stream;(function(){try{Stream=require("st"+"ream")}catch(_){}finally{if(!Stream)Stream=require("events").EventEmitter}})();var Buffer=require("buffer").Buffer;var bufferShim=require("buffer-shims");util.inherits(Writable,Stream);function nop(){}function WriteReq(chunk,encoding,cb){this.chunk=chunk;this.encoding=encoding;this.callback=cb;this.next=null}var Duplex;function WritableState(options,stream){Duplex=Duplex||require("./_stream_duplex");options=options||{};this.objectMode=!!options.objectMode;if(stream instanceof Duplex)this.objectMode=this.objectMode||!!options.writableObjectMode;var hwm=options.highWaterMark;var defaultHwm=this.objectMode?16:16*1024;this.highWaterMark=hwm||hwm===0?hwm:defaultHwm;this.highWaterMark=~~this.highWaterMark;this.needDrain=false;this.ending=false;this.ended=false;this.finished=false;var noDecode=options.decodeStrings===false;this.decodeStrings=!noDecode;this.defaultEncoding=options.defaultEncoding||"utf8";this.length=0;this.writing=false;this.corked=0;this.sync=true;this.bufferProcessing=false;this.onwrite=function(er){onwrite(stream,er)};this.writecb=null;this.writelen=0;this.bufferedRequest=null;this.lastBufferedRequest=null;this.pendingcb=0;this.prefinished=false;this.errorEmitted=false;this.bufferedRequestCount=0;this.corkedRequestsFree=new CorkedRequest(this)}WritableState.prototype.getBuffer=function writableStateGetBuffer(){var current=this.bufferedRequest;var out=[];while(current){out.push(current);current=current.next}return out};(function(){try{Object.defineProperty(WritableState.prototype,"buffer",{get:internalUtil.deprecate(function(){return this.getBuffer()},"_writableState.buffer is deprecated. Use _writableState.getBuffer "+"instead.")})}catch(_){}})();var Duplex;function Writable(options){Duplex=Duplex||require("./_stream_duplex");if(!(this instanceof Writable)&&!(this instanceof Duplex))return new Writable(options);this._writableState=new WritableState(options,this);this.writable=true;if(options){if(typeof options.write==="function")this._write=options.write;if(typeof options.writev==="function")this._writev=options.writev}Stream.call(this)}Writable.prototype.pipe=function(){this.emit("error",new Error("Cannot pipe, not readable"))};function writeAfterEnd(stream,cb){var er=new Error("write after end");stream.emit("error",er);processNextTick(cb,er)}function validChunk(stream,state,chunk,cb){var valid=true;var er=false;if(chunk===null){er=new TypeError("May not write null values to stream")}else if(!Buffer.isBuffer(chunk)&&typeof chunk!=="string"&&chunk!==undefined&&!state.objectMode){er=new TypeError("Invalid non-string/buffer chunk")}if(er){stream.emit("error",er);processNextTick(cb,er);valid=false}return valid}Writable.prototype.write=function(chunk,encoding,cb){var state=this._writableState;var ret=false;if(typeof encoding==="function"){cb=encoding;encoding=null}if(Buffer.isBuffer(chunk))encoding="buffer";else if(!encoding)encoding=state.defaultEncoding;if(typeof cb!=="function")cb=nop;if(state.ended)writeAfterEnd(this,cb);else if(validChunk(this,state,chunk,cb)){ state.pendingcb++;ret=writeOrBuffer(this,state,chunk,encoding,cb)}return ret};Writable.prototype.cork=function(){var state=this._writableState;state.corked++};Writable.prototype.uncork=function(){var state=this._writableState;if(state.corked){state.corked--;if(!state.writing&&!state.corked&&!state.finished&&!state.bufferProcessing&&state.bufferedRequest)clearBuffer(this,state)}};Writable.prototype.setDefaultEncoding=function setDefaultEncoding(encoding){if(typeof encoding==="string")encoding=encoding.toLowerCase();if(!(["hex","utf8","utf-8","ascii","binary","base64","ucs2","ucs-2","utf16le","utf-16le","raw"].indexOf((encoding+"").toLowerCase())>-1))throw new TypeError("Unknown encoding: "+encoding);this._writableState.defaultEncoding=encoding;return this};function decodeChunk(state,chunk,encoding){if(!state.objectMode&&state.decodeStrings!==false&&typeof chunk==="string"){chunk=bufferShim.from(chunk,encoding)}return chunk}function writeOrBuffer(stream,state,chunk,encoding,cb){chunk=decodeChunk(state,chunk,encoding);if(Buffer.isBuffer(chunk))encoding="buffer";var len=state.objectMode?1:chunk.length;state.length+=len;var ret=state.length0)this.tail.next=entry;else this.head=entry;this.tail=entry;++this.length};BufferList.prototype.unshift=function(v){var entry={data:v,next:this.head};if(this.length===0)this.tail=entry;this.head=entry;++this.length};BufferList.prototype.shift=function(){if(this.length===0)return;var ret=this.head.data;if(this.length===1)this.head=this.tail=null;else this.head=this.head.next;--this.length;return ret};BufferList.prototype.clear=function(){this.head=this.tail=null;this.length=0};BufferList.prototype.join=function(s){if(this.length===0)return"";var p=this.head;var ret=""+p.data;while(p=p.next){ret+=s+p.data}return ret};BufferList.prototype.concat=function(n){if(this.length===0)return bufferShim.alloc(0);if(this.length===1)return this.head.data;var ret=bufferShim.allocUnsafe(n>>>0);var p=this.head;var i=0;while(p){p.data.copy(ret,i);i+=p.data.length;p=p.next}return ret}},{buffer:5,"buffer-shims":4}],50:[function(require,module,exports){module.exports=require("./lib/_stream_passthrough.js")},{"./lib/_stream_passthrough.js":45}],51:[function(require,module,exports){(function(process){var Stream=function(){try{return require("st"+"ream")}catch(_){}}();exports=module.exports=require("./lib/_stream_readable.js");exports.Stream=Stream||exports;exports.Readable=exports;exports.Writable=require("./lib/_stream_writable.js");exports.Duplex=require("./lib/_stream_duplex.js");exports.Transform=require("./lib/_stream_transform.js");exports.PassThrough=require("./lib/_stream_passthrough.js");if(!process.browser&&process.env.READABLE_STREAM==="disable"&&Stream){module.exports=Stream}}).call(this,require("_process"))},{"./lib/_stream_duplex.js":44,"./lib/_stream_passthrough.js":45,"./lib/_stream_readable.js":46,"./lib/_stream_transform.js":47,"./lib/_stream_writable.js":48,_process:42}],52:[function(require,module,exports){module.exports=require("./lib/_stream_transform.js")},{"./lib/_stream_transform.js":47}],53:[function(require,module,exports){module.exports=require("./lib/_stream_writable.js")},{"./lib/_stream_writable.js":48}],54:[function(require,module,exports){module.exports=function(string){return string.replace(/[-\\^$*+?.()|[\]{}]/g,"\\$&")}},{}],55:[function(require,module,exports){module.exports=Stream;var EE=require("events").EventEmitter;var inherits=require("inherits");inherits(Stream,EE);Stream.Readable=require("readable-stream/readable.js");Stream.Writable=require("readable-stream/writable.js");Stream.Duplex=require("readable-stream/duplex.js");Stream.Transform=require("readable-stream/transform.js");Stream.PassThrough=require("readable-stream/passthrough.js");Stream.Stream=Stream;function Stream(){EE.call(this)}Stream.prototype.pipe=function(dest,options){var source=this;function ondata(chunk){if(dest.writable){if(false===dest.write(chunk)&&source.pause){source.pause()}}}source.on("data",ondata);function ondrain(){if(source.readable&&source.resume){source.resume()}}dest.on("drain",ondrain);if(!dest._isStdio&&(!options||options.end!==false)){source.on("end",onend);source.on("close",onclose)}var didOnEnd=false;function onend(){if(didOnEnd)return;didOnEnd=true;dest.end()}function onclose(){if(didOnEnd)return;didOnEnd=true;if(typeof dest.destroy==="function")dest.destroy()}function onerror(er){cleanup();if(EE.listenerCount(this,"error")===0){throw er}}source.on("error",onerror);dest.on("error",onerror);function cleanup(){source.removeListener("data",ondata);dest.removeListener("drain",ondrain);source.removeListener("end",onend);source.removeListener("close",onclose);source.removeListener("error",onerror);dest.removeListener("error",onerror);source.removeListener("end",cleanup);source.removeListener("close",cleanup);dest.removeListener("close",cleanup)}source.on("end",cleanup);source.on("close",cleanup);dest.on("close",cleanup);dest.emit("pipe",source);return dest}},{events:28,inherits:38,"readable-stream/duplex.js":43,"readable-stream/passthrough.js":50,"readable-stream/readable.js":51,"readable-stream/transform.js":52,"readable-stream/writable.js":53}],56:[function(require,module,exports){var Buffer=require("buffer").Buffer;var isBufferEncoding=Buffer.isEncoding||function(encoding){switch(encoding&&encoding.toLowerCase()){case"hex":case"utf8":case"utf-8":case"ascii":case"binary":case"base64":case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":case"raw":return true;default:return false}};function assertEncoding(encoding){if(encoding&&!isBufferEncoding(encoding)){throw new Error("Unknown encoding: "+encoding)}}var StringDecoder=exports.StringDecoder=function(encoding){this.encoding=(encoding||"utf8").toLowerCase().replace(/[-_]/,"");assertEncoding(encoding);switch(this.encoding){case"utf8":this.surrogateSize=3;break;case"ucs2":case"utf16le":this.surrogateSize=2;this.detectIncompleteChar=utf16DetectIncompleteChar;break;case"base64":this.surrogateSize=3;this.detectIncompleteChar=base64DetectIncompleteChar;break;default:this.write=passThroughWrite;return}this.charBuffer=new Buffer(6);this.charReceived=0;this.charLength=0};StringDecoder.prototype.write=function(buffer){var charStr="";while(this.charLength){var available=buffer.length>=this.charLength-this.charReceived?this.charLength-this.charReceived:buffer.length;buffer.copy(this.charBuffer,this.charReceived,0,available);this.charReceived+=available;if(this.charReceived=55296&&charCode<=56319){this.charLength+=this.surrogateSize;charStr="";continue}this.charReceived=this.charLength=0;if(buffer.length===0){return charStr}break}this.detectIncompleteChar(buffer);var end=buffer.length;if(this.charLength){buffer.copy(this.charBuffer,0,buffer.length-this.charReceived,end);end-=this.charReceived}charStr+=buffer.toString(this.encoding,0,end);var end=charStr.length-1;var charCode=charStr.charCodeAt(end);if(charCode>=55296&&charCode<=56319){var size=this.surrogateSize;this.charLength+=size;this.charReceived+=size;this.charBuffer.copy(this.charBuffer,size,0,size);buffer.copy(this.charBuffer,0,0,size);return charStr.substring(0,end)}return charStr};StringDecoder.prototype.detectIncompleteChar=function(buffer){var i=buffer.length>=3?3:buffer.length;for(;i>0;i--){var c=buffer[buffer.length-i];if(i==1&&c>>5==6){this.charLength=2;break}if(i<=2&&c>>4==14){this.charLength=3;break}if(i<=3&&c>>3==30){this.charLength=4;break}}this.charReceived=i};StringDecoder.prototype.end=function(buffer){var res="";if(buffer&&buffer.length)res=this.write(buffer);if(this.charReceived){var cr=this.charReceived;var buf=this.charBuffer;var enc=this.encoding;res+=buf.slice(0,cr).toString(enc)}return res};function passThroughWrite(buffer){return buffer.toString(this.encoding)}function utf16DetectIncompleteChar(buffer){this.charReceived=buffer.length%2;this.charLength=this.charReceived?2:0}function base64DetectIncompleteChar(buffer){this.charReceived=buffer.length%3;this.charLength=this.charReceived?3:0}},{buffer:5}],57:[function(require,module,exports){(function(global){module.exports=deprecate;function deprecate(fn,msg){if(config("noDeprecation")){return fn}var warned=false;function deprecated(){if(!warned){if(config("throwDeprecation")){throw new Error(msg)}else if(config("traceDeprecation")){console.trace(msg)}else{console.warn(msg)}warned=true}return fn.apply(this,arguments)}return deprecated}function config(name){try{if(!global.localStorage)return false}catch(_){return false}var val=global.localStorage[name];if(null==val)return false;return String(val).toLowerCase()==="true"}}).call(this,typeof global!=="undefined"?global:typeof self!=="undefined"?self:typeof window!=="undefined"?window:{})},{}],58:[function(require,module,exports){module.exports=extend;var hasOwnProperty=Object.prototype.hasOwnProperty;function extend(){var target={};for(var i=0;i 0 { + err = service.repository.UpdateJobOutput(executionContext.ExecutionID, executionContext.Output) + logger.LogErrors(err, "update execution context output", executionContext) + } +} + +func (service *executionService) watchProcess(context model.ExecutionContext) { + logger.Info("Start Watch Process for Job With Context ID: ", context.ExecutionID, ", name: ", context.Name, ", and Status: "+context.Status) + + waitTime := config.Config().KubeLogProcessWaitTime * time.Second + err := service.kubernetesClient.WaitForReadyJob(context.Name, waitTime) + + if err != nil { + context.Status = status.JobCreationFailed + return + } + + context.Status = status.JobReady + logger.Info("Job Ready for ", context.ExecutionID) + + pod, err := service.kubernetesClient.WaitForReadyPod(context.Name, waitTime) + logger.LogErrors(err, "wait for ready pod", pod) + if err != nil { + context.Status = status.PodCreationFailed + return + } + + if pod.Status.Phase == v1.PodFailed { + context.Status = status.PodFailed + logger.Info("Pod Failed for ", context.ExecutionID, " reason: ", pod.Status.Reason, " message: ", pod.Status.Message) + } else { + context.Status = status.PodReady + logger.Info("Pod Ready for ", context.ExecutionID) + } + + podLog, err := service.kubernetesClient.GetPodLogs(pod) + if err != nil { + context.Status = status.FetchPodLogFailed + return + } + + scanner := bufio.NewScanner(podLog) + scanner.Split(bufio.ScanLines) + + var buffer bytes.Buffer + for scanner.Scan() { + buffer.WriteString(scanner.Text() + "\n") + } + + output := types.GzippedText(buffer.Bytes()) + + context.Output = output + logger.Info("Execution Output Produced ", context.ExecutionID, " with length ", len(output)) + + if context.Status == status.PodReady { + context.Status = status.Finished + } + + defer service.update(context) + + return +} + +func mergeArgs(argsOne, argsTwo map[string]string) map[string]string { + result := make(map[string]string) + + for k, v := range argsOne { + result[k] = v + } + for k, v := range argsTwo { + result[k] = v + } + return result +} diff --git a/internal/app/service/execution/service/execution_integration_test.go b/internal/app/service/execution/service/execution_integration_test.go new file mode 100644 index 00000000..e5b88515 --- /dev/null +++ b/internal/app/service/execution/service/execution_integration_test.go @@ -0,0 +1,121 @@ +package service + +import ( + "bufio" + fake "github.com/brianvoe/gofakeit" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" + "os" + "proctor/internal/app/service/execution/repository" + "proctor/internal/app/service/execution/status" + "proctor/internal/app/service/infra/config" + "proctor/internal/app/service/infra/db/postgresql" + "proctor/internal/app/service/infra/kubernetes" + "proctor/internal/app/service/infra/kubernetes/http" + svcMetadataRepository "proctor/internal/app/service/metadata/repository" + svcSecretRepository "proctor/internal/app/service/secret/repository" + "proctor/internal/pkg/model/metadata" + "proctor/internal/pkg/model/metadata/env" + "testing" + "time" +) + +type TestExecutionIntegrationSuite struct { + suite.Suite + service ExecutionService + kubernetesClient kubernetes.KubernetesClient + repository repository.ExecutionContextRepository + mockMetadataRepository *svcMetadataRepository.MockMetadataRepository + mockSecretRepository *svcSecretRepository.MockSecretRepository +} + +func (suite *TestExecutionIntegrationSuite) SetupTest() { + httpClient, _ := http.NewClient() + suite.kubernetesClient = kubernetes.NewKubernetesClient(httpClient) + pgClient := postgresql.NewClient() + suite.repository = repository.NewExecutionContextRepository(pgClient) + suite.mockMetadataRepository = &svcMetadataRepository.MockMetadataRepository{} + suite.mockSecretRepository = &svcSecretRepository.MockSecretRepository{} + suite.service = NewExecutionService( + suite.kubernetesClient, + suite.repository, + suite.mockMetadataRepository, + suite.mockSecretRepository, + ) +} + +func (suite *TestExecutionIntegrationSuite) TestExecuteJobSuccess() { + t := suite.T() + jobName := fake.Username() + userEmail := fake.Email() + mapKey := fake.FirstName() + mapValue := fake.LastName() + + jobArgs := map[string]string{ + mapKey: mapValue, + } + + imageName := "ubuntu" + fakeMetadata := &metadata.Metadata{ + ImageName: imageName, + Author: "bimo.horizon", + Description: fake.HackerIngverb(), + Organization: fake.BuzzWord(), + AuthorizedGroups: []string{}, + EnvVars: env.Vars{ + Args: []env.VarMetadata{ + { + Name: fake.BeerYeast(), + Description: fake.JobDescriptor(), + }, + }, + Secrets: []env.VarMetadata{}, + }, + } + + suite.mockMetadataRepository.On("GetByName", jobName).Return(fakeMetadata, nil).Once() + suite.mockSecretRepository.On("GetByJobName", jobName).Return(map[string]string{}, nil).Once() + + context, _, err := suite.service.ExecuteWithCommand(jobName, userEmail, jobArgs, []string{"bash", "-c", "for run in {1..10}; do sleep 1 && echo bimo; done"}) + assert.NoError(t, err) + assert.NotNil(t, context) + + time.Sleep(30 * time.Second) + expectedContext, err := suite.repository.GetById(context.ExecutionID) + assert.NoError(t, err) + assert.NotNil(t, expectedContext) + assert.Equal(t, status.Finished, expectedContext.Status) + assert.NotNil(t, expectedContext.Output) +} + +func (suite *TestExecutionIntegrationSuite) TestStreamLogsSuccess() { + t := suite.T() + + _ = os.Setenv("PROCTOR_JOB_POD_ANNOTATIONS", "{\"key.one\":\"true\"}") + envVarsForContainer := map[string]string{"SAMPLE_ARG": "sample-value"} + sampleImageName := "busybox" + + executedJobname, err := suite.kubernetesClient.ExecuteJobWithCommand(sampleImageName, envVarsForContainer, []string{"echo", "Bimo Horizon"}) + assert.NoError(t, err) + + waitTime := config.Config().KubeLogProcessWaitTime * time.Second + logStream, err := suite.service.StreamJobLogs(executedJobname, waitTime) + assert.NoError(t, err) + + defer logStream.Close() + + bufioReader := bufio.NewReader(logStream) + + jobLogSingleLine, _, err := bufioReader.ReadLine() + assert.NoError(t, err) + + assert.Equal(t, "Bimo Horizon", string(jobLogSingleLine[:])) + +} + +func TestExecutionIntegrationSuiteTest(t *testing.T) { + value, available := os.LookupEnv("ENABLE_INTEGRATION_TEST") + if available == true && value == "true" { + suite.Run(t, new(TestExecutionIntegrationSuite)) + } +} diff --git a/internal/app/service/execution/service/execution_mock.go b/internal/app/service/execution/service/execution_mock.go new file mode 100644 index 00000000..60c7b86d --- /dev/null +++ b/internal/app/service/execution/service/execution_mock.go @@ -0,0 +1,35 @@ +package service + +import ( + "github.com/stretchr/testify/mock" + "io" + "proctor/internal/app/service/execution/model" + "time" +) + +type MockExecutionService struct { + mock.Mock +} + +func (mockService *MockExecutionService) Execute(jobName string, userEmail string, args map[string]string) (*model.ExecutionContext, string, error) { + arguments := mockService.Called(jobName, userEmail, args) + return arguments.Get(0).(*model.ExecutionContext), arguments.String(1), arguments.Error(2) +} + +func (mockService *MockExecutionService) ExecuteWithCommand(jobName string, userEmail string, args map[string]string, commands []string) (*model.ExecutionContext, string, error) { + arguments := mockService.Called(jobName, userEmail, args, commands) + return arguments.Get(0).(*model.ExecutionContext), arguments.String(1), arguments.Error(2) +} + +func (mockService *MockExecutionService) update(executionContext model.ExecutionContext) { + mockService.Called(executionContext) +} + +func (mockService *MockExecutionService) insertContext(executionContext model.ExecutionContext) { + mockService.Called(executionContext) +} + +func (mockService *MockExecutionService) StreamJobLogs(executionName string, waitTime time.Duration) (io.ReadCloser, error) { + args := mockService.Called(executionName, waitTime) + return args.Get(0).(io.ReadCloser), args.Error(1) +} diff --git a/internal/app/service/execution/service/execution_test.go b/internal/app/service/execution/service/execution_test.go new file mode 100644 index 00000000..bedc6f28 --- /dev/null +++ b/internal/app/service/execution/service/execution_test.go @@ -0,0 +1,160 @@ +package service + +import ( + "bytes" + "io/ioutil" + "strings" + "testing" + + fake "github.com/brianvoe/gofakeit" + "github.com/docker/docker/pkg/testutil/assert" + "github.com/jmoiron/sqlx/types" + "github.com/pkg/errors" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" + v1 "k8s.io/api/core/v1" + + "proctor/internal/app/service/execution/repository" + "proctor/internal/app/service/execution/status" + "proctor/internal/app/service/infra/kubernetes" + svcMetadataRepository "proctor/internal/app/service/metadata/repository" + svcSecretRepository "proctor/internal/app/service/secret/repository" + "proctor/internal/pkg/model/metadata" +) + +type TestExecutionServiceSuite struct { + suite.Suite + service ExecutionService + mockKubernetesClient *kubernetes.MockKubernetesClient + mockRepository *repository.MockExecutionContextRepository + mockMetadataRepository *svcMetadataRepository.MockMetadataRepository + mockSecretRepository *svcSecretRepository.MockSecretRepository +} + +func (suite *TestExecutionServiceSuite) SetupTest() { + suite.mockKubernetesClient = &kubernetes.MockKubernetesClient{} + suite.mockRepository = &repository.MockExecutionContextRepository{} + suite.mockMetadataRepository = &svcMetadataRepository.MockMetadataRepository{} + suite.mockSecretRepository = &svcSecretRepository.MockSecretRepository{} + suite.service = NewExecutionService( + suite.mockKubernetesClient, + suite.mockRepository, + suite.mockMetadataRepository, + suite.mockSecretRepository, + ) +} + +func (suite *TestExecutionServiceSuite) TestExecuteMetadataNotFound() { + t := suite.T() + jobName := fake.Username() + userEmail := fake.Email() + mapKey := fake.FirstName() + mapValue := fake.LastName() + + jobArgs := map[string]string{ + mapKey: mapValue, + } + + suite.mockMetadataRepository.On("GetByName", jobName).Return(&metadata.Metadata{}, errors.New("metadataNotFound")).Once() + suite.mockRepository.On("Insert", mock.Anything).Return(0, nil).Once() + + context, _, err := suite.service.Execute(jobName, userEmail, jobArgs) + + assert.Error(t, err, "metadata not found") + assert.NotNil(t, context) + assert.Equal(t, context.Status, status.RequirementNotMet) +} + +func (suite *TestExecutionServiceSuite) TestExecuteSecretNotFound() { + t := suite.T() + jobName := fake.Username() + userEmail := fake.Email() + mapKey := fake.FirstName() + mapValue := fake.LastName() + + jobArgs := map[string]string{ + mapKey: mapValue, + } + + suite.mockMetadataRepository.On("GetByName", jobName).Return(&metadata.Metadata{}, nil).Once() + suite.mockSecretRepository.On("GetByJobName", jobName).Return(map[string]string{}, errors.New("secret not found")).Once() + suite.mockRepository.On("Insert", mock.Anything).Return(0, nil).Once() + + context, _, err := suite.service.Execute(jobName, userEmail, jobArgs) + assert.Error(t, err, "secret not found") + assert.NotNil(t, context) + assert.Equal(t, context.Status, status.RequirementNotMet) +} + +func (suite *TestExecutionServiceSuite) TestExecuteJobFailed() { + t := suite.T() + jobName := fake.Username() + userEmail := fake.Email() + mapKey := fake.FirstName() + mapValue := fake.LastName() + + jobArgs := map[string]string{ + mapKey: mapValue, + } + + imageName := fake.BeerYeast() + fakeMetadata := &metadata.Metadata{ + ImageName: imageName, + } + + suite.mockMetadataRepository.On("GetByName", jobName).Return(fakeMetadata, nil).Once() + suite.mockSecretRepository.On("GetByJobName", jobName).Return(map[string]string{}, nil).Once() + suite.mockRepository.On("Insert", mock.Anything).Return(0, nil).Once() + suite.mockKubernetesClient.On("ExecuteJobWithCommand", imageName, mock.Anything, []string{}).Return("", errors.New("Execution Failed")) + + context, _, err := suite.service.Execute(jobName, userEmail, jobArgs) + assert.Error(t, err, "error when executing image") + assert.NotNil(t, context) + assert.Equal(t, context.Status, status.CreationFailed) +} + +func (suite *TestExecutionServiceSuite) TestExecuteJobSuccess() { + t := suite.T() + jobName := fake.Username() + userEmail := fake.Email() + mapKey := fake.FirstName() + mapValue := fake.LastName() + + jobArgs := map[string]string{ + mapKey: mapValue, + } + + imageName := fake.BeerYeast() + fakeMetadata := &metadata.Metadata{ + ImageName: imageName, + } + + // This is needed because #NopCloser adds additional foreign character + // at the end of the input string + logBuf := new(bytes.Buffer) + mockLog := ioutil.NopCloser(strings.NewReader("hello world")) + logBuf.ReadFrom(mockLog) + + suite.mockMetadataRepository.On("GetByName", jobName).Return(fakeMetadata, nil).Once() + suite.mockSecretRepository.On("GetByJobName", jobName).Return(map[string]string{}, nil).Once() + suite.mockRepository.On("Insert", mock.Anything).Return(0, nil).Times(3) + suite.mockRepository.On("UpdateStatus", mock.Anything, status.Finished).Return(nil).Once() + suite.mockRepository.On("UpdateJobOutput", mock.Anything, types.GzippedText(logBuf.String())).Return(nil).Once() + suite.mockRepository.On("GetById", mock.Anything).Return(0, nil).Times(3) + + executionName := "execution-name" + suite.mockKubernetesClient.On("ExecuteJobWithCommand", imageName, mock.Anything, []string{}).Return(executionName, nil) + suite.mockKubernetesClient.On("WaitForReadyJob", executionName, mock.Anything).Return(nil) + podDetail := &v1.Pod{} + suite.mockKubernetesClient.On("WaitForReadyPod", executionName, mock.Anything).Return(podDetail, nil) + suite.mockKubernetesClient.On("GetPodLogs", podDetail).Return(mockLog, nil) + + context, _, err := suite.service.Execute(jobName, userEmail, jobArgs) + assert.NilError(t, err) + assert.NotNil(t, context) + assert.Equal(t, context.Status, status.Created) +} + +func TestExecutionServiceSuiteTest(t *testing.T) { + suite.Run(t, new(TestExecutionServiceSuite)) +} diff --git a/internal/app/service/execution/status/execution.go b/internal/app/service/execution/status/execution.go new file mode 100644 index 00000000..422556ed --- /dev/null +++ b/internal/app/service/execution/status/execution.go @@ -0,0 +1,17 @@ +package status + +type ExecutionStatus string + +const ( + Received ExecutionStatus = "RECEIVED" + RequirementNotMet ExecutionStatus = "REQUIREMENT_NOT_MET" + Created ExecutionStatus = "CREATED" + CreationFailed ExecutionStatus = "CREATION_FAILED" + JobCreationFailed ExecutionStatus = "JOB_CREATION_FAILED" + JobReady ExecutionStatus = "JOB_READY" + PodCreationFailed ExecutionStatus = "POD_CREATION_FAILED" + PodReady ExecutionStatus = "POD_READY" + PodFailed ExecutionStatus = "POD_FAILED" + FetchPodLogFailed ExecutionStatus = "FETCH_POD_LOG_FAILED" + Finished ExecutionStatus = "FINISHED" +) diff --git a/internal/app/service/infra/config/config.go b/internal/app/service/infra/config/config.go new file mode 100644 index 00000000..3bf033df --- /dev/null +++ b/internal/app/service/infra/config/config.go @@ -0,0 +1,199 @@ +package config + +import ( + "encoding/json" + "fmt" + "github.com/spf13/viper" + "os" + "strings" + "sync" + "sync/atomic" + "time" +) + +func GetStringDefault(viper *viper.Viper, key string, defaultValue string) string { + viper.SetDefault(key, defaultValue) + return viper.GetString(key) +} + +func GetArrayString(viper *viper.Viper, key string) []string { + return strings.Split(viper.GetString(key), ",") +} + +func GetArrayStringDefault(viper *viper.Viper, key string, defaultValue []string) []string { + viper.SetDefault(key, strings.Join(defaultValue, ",")) + return strings.Split(viper.GetString(key), ",") +} + +func GetBoolDefault(viper *viper.Viper, key string, defaultValue bool) bool { + viper.SetDefault(key, defaultValue) + return viper.GetBool(key) +} + +func GetInt64Ref(viper *viper.Viper, key string) *int64 { + value := viper.GetInt64(key) + return &value +} + +func GetInt32Ref(viper *viper.Viper, key string) *int32 { + value := viper.GetInt32(key) + return &value +} + +func GetMapFromJson(viper *viper.Viper, key string) map[string]string { + var jsonStr = []byte(viper.GetString(key)) + var annotations map[string]string + + err := json.Unmarshal(jsonStr, &annotations) + if err != nil { + _ = fmt.Errorf("invalid Value for key %s, errors %v", key, err.Error()) + } + + return annotations +} + +var once sync.Once +var config ProctorConfig + +type ProctorConfig struct { + viper *viper.Viper + KubeConfig string + KubeContext string + LogLevel string + AppPort string + DefaultNamespace string + RedisAddress string + RedisPassword string + LogsStreamReadBufferSize int + RedisMaxActiveConnections int + LogsStreamWriteBufferSize int + KubeWaitForResourcePollCount int + KubeLogProcessWaitTime time.Duration + KubeJobActiveDeadlineSeconds *int64 + KubeJobRetries *int32 + KubeServiceAccountName string + PostgresUser string + PostgresPassword string + PostgresHost string + PostgresPort int + AuthPluginExported string + PostgresDatabase string + PostgresMaxConnections int + PostgresConnectionMaxLifetime int + NewRelicAppName string + NewRelicLicenseKey string + MinClientVersion string + ScheduledJobsFetchIntervalInMins int + MailUsername string + MailServerHost string + MailPassword string + MailServerPort string + JobPodAnnotations map[string]string + DocsPath string + AuthPluginBinary string + AuthEnabled bool + NotificationPluginBinary []string + NotificationPluginExported []string + AuthRequiredAdminGroup []string +} + +func load() ProctorConfig { + fang := viper.New() + + fang.SetEnvPrefix("PROCTOR") + fang.SetEnvKeyReplacer(strings.NewReplacer(".", "_")) + fang.AutomaticEnv() + + fang.SetConfigName("config") + fang.AddConfigPath(".") + fang.AddConfigPath("$HOME/.proctor") + value, available := os.LookupEnv("CONFIG_LOCATION") + if available { + fang.AddConfigPath(value) + } + _ = fang.ReadInConfig() + + proctorConfig := ProctorConfig{ + viper: fang, + KubeConfig: fang.GetString("kube.config"), + KubeContext: GetStringDefault(fang, "kube.context", "default"), + LogLevel: GetStringDefault(fang, "log.level", "DEBUG"), + AppPort: GetStringDefault(fang, "app.port", "5001"), + DefaultNamespace: fang.GetString("default.namespace"), + RedisAddress: fang.GetString("redis.address"), + RedisPassword: fang.GetString("redis.password"), + RedisMaxActiveConnections: fang.GetInt("redis.max.active.connections"), + LogsStreamReadBufferSize: fang.GetInt("logs.stream.read.buffer.size"), + LogsStreamWriteBufferSize: fang.GetInt("logs.stream.write.buffer.size"), + KubeWaitForResourcePollCount: fang.GetInt("kube.wait.for.resource.poll.count"), + KubeLogProcessWaitTime: time.Duration(fang.GetInt("kube.log.process.wait.time")), + KubeJobActiveDeadlineSeconds: GetInt64Ref(fang, "kube.job.active.deadline.seconds"), + KubeJobRetries: GetInt32Ref(fang, "kube.job.retries"), + KubeServiceAccountName: fang.GetString("kube.service.account.name"), + PostgresUser: fang.GetString("postgres.user"), + PostgresPassword: fang.GetString("postgres.password"), + PostgresHost: fang.GetString("postgres.host"), + PostgresPort: fang.GetInt("postgres.port"), + PostgresDatabase: fang.GetString("postgres.database"), + PostgresMaxConnections: fang.GetInt("postgres.max.connections"), + PostgresConnectionMaxLifetime: fang.GetInt("postgres.connections.max.lifetime"), + NewRelicAppName: fang.GetString("new.relic.app.name"), + NewRelicLicenseKey: fang.GetString("new.relic.licence.key"), + MinClientVersion: fang.GetString("min.client.version"), + ScheduledJobsFetchIntervalInMins: fang.GetInt("scheduled.jobs.fetch.interval.in.mins"), + MailUsername: fang.GetString("mail.username"), + MailServerHost: fang.GetString("mail.server.host"), + MailPassword: fang.GetString("mail.password"), + MailServerPort: fang.GetString("mail.server.port"), + JobPodAnnotations: GetMapFromJson(fang, "job.pod.annotations"), + DocsPath: fang.GetString("docs.path"), + AuthPluginBinary: fang.GetString("auth.plugin.binary"), + AuthPluginExported: GetStringDefault(fang, "auth.plugin.exported", "Auth"), + AuthEnabled: GetBoolDefault(fang, "auth.enabled", false), + NotificationPluginBinary: GetArrayString(fang, "notification.plugin.binary"), + NotificationPluginExported: GetArrayString(fang, "notification.plugin.exported"), + AuthRequiredAdminGroup: GetArrayStringDefault(fang, "auth.required.admin.group", []string{"proctor.admin"}), + } + + return proctorConfig +} + +type AtomBool struct{ flag int32 } + +func (b *AtomBool) Set(value bool) { + var i int32 = 0 + if value { + i = 1 + } + atomic.StoreInt32(&(b.flag), int32(i)) +} + +func (b *AtomBool) Get() bool { + if atomic.LoadInt32(&(b.flag)) != 0 { + return true + } + return false +} + +var reset = new(AtomBool) + +func init() { + reset.Set(false) +} + +func Reset() { + reset.Set(true) +} + +func Config() ProctorConfig { + once.Do(func() { + config = load() + }) + + if reset.Get() { + config = load() + reset.Set(false) + } + + return config +} diff --git a/internal/app/service/infra/config/config_test.go b/internal/app/service/infra/config/config_test.go new file mode 100644 index 00000000..2aa62c47 --- /dev/null +++ b/internal/app/service/infra/config/config_test.go @@ -0,0 +1,234 @@ +package config + +import ( + fake "github.com/brianvoe/gofakeit" + "github.com/stretchr/testify/assert" + "os" + "strconv" + "testing" +) + +func TestEnvironment(t *testing.T) { + fake.Seed(0) + value := fake.FirstName() + _ = os.Setenv("PROCTOR_KUBE_CONFIG", value) + + loadedConfig := load() + assert.Equal(t, value, loadedConfig.KubeConfig) +} + +func TestLogLevel(t *testing.T) { + fake.Seed(0) + value := fake.FirstName() + _ = os.Setenv("PROCTOR_LOG_LEVEL", value) + + assert.Equal(t, value, load().LogLevel) +} + +func TestAppPort(t *testing.T) { + fake.Seed(0) + value := strconv.FormatInt(int64(fake.Number(1000, 4000)), 10) + _ = os.Setenv("PROCTOR_APP_PORT", value) + + assert.Equal(t, value, load().AppPort) +} + +func TestDefaultNamespace(t *testing.T) { + fake.Seed(0) + value := fake.FirstName() + _ = os.Setenv("PROCTOR_DEFAULT_NAMESPACE", value) + + assert.Equal(t, value, load().DefaultNamespace) +} + +func TestRedisAddress(t *testing.T) { + fake.Seed(0) + value := fake.FirstName() + _ = os.Setenv("PROCTOR_REDIS_ADDRESS", value) + + assert.Equal(t, value, load().RedisAddress) +} + +func TestRedisPassword(t *testing.T) { + fake.Seed(0) + value := fake.FirstName() + _ = os.Setenv("PROCTOR_REDIS_PASSWORD", value) + + assert.Equal(t, value, load().RedisPassword) +} + +func TestRedisMaxActiveConnections(t *testing.T) { + fake.Seed(0) + number := fake.Number(10, 90) + value := strconv.FormatInt(int64(number), 10) + _ = os.Setenv("PROCTOR_REDIS_MAX_ACTIVE_CONNECTIONS", value) + + assert.Equal(t, number, load().RedisMaxActiveConnections) +} + +func TestLogsStreamReadBufferSize(t *testing.T) { + _ = os.Setenv("PROCTOR_LOGS_STREAM_READ_BUFFER_SIZE", "140") + + assert.Equal(t, 140, load().LogsStreamReadBufferSize) +} + +func TestLogsStreamWriteBufferSize(t *testing.T) { + _ = os.Setenv("PROCTOR_LOGS_STREAM_WRITE_BUFFER_SIZE", "4096") + + assert.Equal(t, 4096, load().LogsStreamWriteBufferSize) +} + +func TestKubeJobActiveDeadlineSeconds(t *testing.T) { + _ = os.Setenv("PROCTOR_KUBE_JOB_ACTIVE_DEADLINE_SECONDS", "900") + + expectedValue := int64(900) + assert.Equal(t, &expectedValue, load().KubeJobActiveDeadlineSeconds) +} + +func TestKubeJobRetries(t *testing.T) { + _ = os.Setenv("PROCTOR_KUBE_JOB_RETRIES", "0") + + expectedValue := int32(0) + assert.Equal(t, &expectedValue, load().KubeJobRetries) +} + +func TestKubeServiceName(t *testing.T) { + _ = os.Setenv("PROCTOR_KUBE_SERVICE_ACCOUNT_NAME", "proctor") + + expectedValue := "proctor" + assert.Equal(t, expectedValue, load().KubeServiceAccountName) +} + +func TestPostgresUser(t *testing.T) { + _ = os.Setenv("PROCTOR_POSTGRES_USER", "postgres") + + assert.Equal(t, "postgres", load().PostgresUser) +} + +func TestPostgresPassword(t *testing.T) { + _ = os.Setenv("PROCTOR_POSTGRES_PASSWORD", "ipsum-lorem") + + assert.Equal(t, "ipsum-lorem", load().PostgresPassword) +} + +func TestPostgresHost(t *testing.T) { + _ = os.Setenv("PROCTOR_POSTGRES_HOST", "localhost") + + assert.Equal(t, "localhost", load().PostgresHost) +} + +func TestPostgresPort(t *testing.T) { + _ = os.Setenv("PROCTOR_POSTGRES_PORT", "5432") + + assert.Equal(t, 5432, load().PostgresPort) +} + +func TestPostgresDatabase(t *testing.T) { + _ = os.Setenv("PROCTOR_POSTGRES_DATABASE", "proctord_development") + + assert.Equal(t, "proctord_development", load().PostgresDatabase) +} + +func TestPostgresMaxConnections(t *testing.T) { + _ = os.Setenv("PROCTOR_POSTGRES_MAX_CONNECTIONS", "50") + + assert.Equal(t, 50, load().PostgresMaxConnections) +} + +func TestPostgresConnectionMaxLifetime(t *testing.T) { + _ = os.Setenv("PROCTOR_POSTGRES_CONNECTIONS_MAX_LIFETIME", "30") + + assert.Equal(t, 30, load().PostgresConnectionMaxLifetime) +} + +func TestNewRelicAppName(t *testing.T) { + _ = os.Setenv("PROCTOR_NEW_RELIC_APP_NAME", "PROCTORD") + + assert.Equal(t, "PROCTORD", load().NewRelicAppName) +} + +func TestNewRelicLicenceKey(t *testing.T) { + _ = os.Setenv("PROCTOR_NEW_RELIC_LICENCE_KEY", "nrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnr") + + assert.Equal(t, "nrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnrnr", load().NewRelicLicenseKey) +} + +func TestMinClientVersion(t *testing.T) { + _ = os.Setenv("PROCTOR_MIN_CLIENT_VERSION", "0.2.0") + + assert.Equal(t, "0.2.0", load().MinClientVersion) +} + +func TestScheduledJobsFetchIntervalInMins(t *testing.T) { + _ = os.Setenv("PROCTOR_SCHEDULED_JOBS_FETCH_INTERVAL_IN_MINS", "5") + + assert.Equal(t, 5, load().ScheduledJobsFetchIntervalInMins) +} + +func TestMailUsername(t *testing.T) { + _ = os.Setenv("PROCTOR_MAIL_USERNAME", "foo@bar.com") + + assert.Equal(t, "foo@bar.com", load().MailUsername) +} + +func TestMailPassword(t *testing.T) { + _ = os.Setenv("PROCTOR_MAIL_PASSWORD", "password") + + assert.Equal(t, "password", load().MailPassword) +} + +func TestMailServerHost(t *testing.T) { + _ = os.Setenv("PROCTOR_MAIL_SERVER_HOST", "127.0.0.1") + + assert.Equal(t, "127.0.0.1", load().MailServerHost) +} + +func TestMailServerPort(t *testing.T) { + _ = os.Setenv("PROCTOR_MAIL_SERVER_PORT", "123") + + assert.Equal(t, "123", load().MailServerPort) +} + +func TestJobPodAnnotations(t *testing.T) { + _ = os.Setenv("PROCTOR_JOB_POD_ANNOTATIONS", "{\"key.one\":\"true\"}") + + assert.Equal(t, map[string]string{"key.one": "true"}, load().JobPodAnnotations) +} + +func TestDocsPath(t *testing.T) { + _ = os.Setenv("PROCTOR_DOCS_PATH", "path1") + + assert.Equal(t, "path1", load().DocsPath) +} + +func TestAuthPluginBinary(t *testing.T) { + _ = os.Setenv("PROCTOR_AUTH_PLUGIN_BINARY", "path1") + + assert.Equal(t, "path1", load().AuthPluginBinary) +} + +func TestAuthPluginExported(t *testing.T) { + _ = os.Setenv("PROCTOR_AUTH_PLUGIN_EXPORTED", "path1") + + assert.Equal(t, "path1", load().AuthPluginExported) +} + +func TestAuthEnabled(t *testing.T) { + _ = os.Setenv("PROCTOR_AUTH_ENABLED", "false") + + assert.Equal(t, false, load().AuthEnabled) +} + +func TestNotificationPluginBinary(t *testing.T) { + _ = os.Setenv("PROCTOR_NOTIFICATION_PLUGIN_BINARY", "path-notification,second-path") + + expected := []string{"path-notification", "second-path"} + assert.Equal(t, expected, load().NotificationPluginBinary) +} + +func TestNotificationPluginExported(t *testing.T) { + _ = os.Setenv("PROCTOR_NOTIFICATION_PLUGIN_EXPORTED", "plugin-notification,second-plugin") + + expected := []string{"plugin-notification", "second-plugin"} + assert.Equal(t, expected, load().NotificationPluginExported) +} diff --git a/proctord/storage/postgres/migrations.go b/internal/app/service/infra/db/migration/migrations.go similarity index 72% rename from proctord/storage/postgres/migrations.go rename to internal/app/service/infra/db/migration/migrations.go index d5f5c50e..2fc1758c 100644 --- a/proctord/storage/postgres/migrations.go +++ b/internal/app/service/infra/db/migration/migrations.go @@ -1,10 +1,10 @@ -package postgres +package migration import ( "fmt" + "proctor/internal/app/service/infra/config" + "proctor/internal/app/service/infra/logger" - "proctor/proctord/config" - "proctor/proctord/logger" "github.com/mattes/migrate" //postgres driver _ "github.com/mattes/migrate/database/postgres" @@ -15,7 +15,7 @@ import ( var migrationsPath, postgresConnectionURL string func init() { - postgresConnectionURL = fmt.Sprintf("postgres://%s:%s@%s:%d/%s?sslmode=disable", config.PostgresUser(), config.PostgresPassword(), config.PostgresHost(), config.PostgresPort(), config.PostgresDatabase()) + postgresConnectionURL = fmt.Sprintf("postgres://%s:%s@%s:%d/%s?sslmode=disable", config.Config().PostgresUser, config.Config().PostgresPassword, config.Config().PostgresHost, config.Config().PostgresPort, config.Config().PostgresDatabase) migrationsPath = "file://./migrations" } diff --git a/proctord/storage/postgres/client.go b/internal/app/service/infra/db/postgresql/client.go similarity index 69% rename from proctord/storage/postgres/client.go rename to internal/app/service/infra/db/postgresql/client.go index 465bd99d..3fecb24a 100644 --- a/proctord/storage/postgres/client.go +++ b/internal/app/service/infra/db/postgresql/client.go @@ -1,11 +1,11 @@ -package postgres +package postgresql import ( "fmt" + "proctor/internal/app/service/infra/config" + "proctor/internal/app/service/infra/logger" "time" - "proctor/proctord/config" - "proctor/proctord/logger" "github.com/jmoiron/sqlx" //postgres driver _ "github.com/lib/pq" @@ -23,16 +23,16 @@ type client struct { } func NewClient() Client { - dataSourceName := fmt.Sprintf("dbname=%s user=%s password=%s host=%s sslmode=disable", config.PostgresDatabase(), config.PostgresUser(), config.PostgresPassword(), config.PostgresHost()) + dataSourceName := fmt.Sprintf("dbname=%s user=%s password=%s host=%s sslmode=disable", config.Config().PostgresDatabase, config.Config().PostgresUser, config.Config().PostgresPassword, config.Config().PostgresHost) db, err := sqlx.Connect("postgres", dataSourceName) if err != nil { panic(err.Error()) } - db.SetMaxIdleConns(config.PostgresMaxConnections()) - db.SetMaxOpenConns(config.PostgresMaxConnections()) - db.SetConnMaxLifetime(time.Duration(config.PostgresConnectionMaxLifetime()) * time.Minute) + db.SetMaxIdleConns(config.Config().PostgresMaxConnections) + db.SetMaxOpenConns(config.Config().PostgresMaxConnections) + db.SetConnMaxLifetime(time.Duration(config.Config().PostgresConnectionMaxLifetime) * time.Minute) return &client{ db: db, diff --git a/proctord/storage/postgres/client_mock.go b/internal/app/service/infra/db/postgresql/client_mock.go similarity index 97% rename from proctord/storage/postgres/client_mock.go rename to internal/app/service/infra/db/postgresql/client_mock.go index 0babc2ef..6f4a2988 100644 --- a/proctord/storage/postgres/client_mock.go +++ b/internal/app/service/infra/db/postgresql/client_mock.go @@ -1,4 +1,4 @@ -package postgres +package postgresql import ( "github.com/jmoiron/sqlx" diff --git a/internal/app/service/infra/db/postgresql/client_test.go b/internal/app/service/infra/db/postgresql/client_test.go new file mode 100644 index 00000000..531dd9d9 --- /dev/null +++ b/internal/app/service/infra/db/postgresql/client_test.go @@ -0,0 +1,125 @@ +package postgresql + +import ( + "fmt" + "proctor/internal/app/service/infra/id" + "testing" + + "github.com/jmoiron/sqlx" + "github.com/stretchr/testify/assert" + + executionContextModel "proctor/internal/app/service/execution/model" + executionContextStatus "proctor/internal/app/service/execution/status" + "proctor/internal/app/service/infra/config" +) + +func TestNamedExec(t *testing.T) { + dataSourceName := fmt.Sprintf("dbname=%s user=%s password=%s host=%s sslmode=disable", config.Config().PostgresDatabase, config.Config().PostgresUser, config.Config().PostgresPassword, config.Config().PostgresHost) + + db, err := sqlx.Connect("postgres", dataSourceName) + assert.NoError(t, err) + + postgresClient := &client{db: db} + defer postgresClient.db.Close() + + executionContext := &executionContextModel.ExecutionContext{ + JobName: "test-job-name", + ImageTag: "test-image-name", + ExecutionID: uint64(1), + Args: map[string]string{"foo": "bar"}, + Status: executionContextStatus.Finished, + } + + _, err = postgresClient.NamedExec("INSERT INTO execution_context (id, job_name, image_tag, args, status) VALUES (:id, :job_name, :image_tag, :args, :status)", executionContext) + assert.NoError(t, err) + + var persistedExecutionContext executionContextModel.ExecutionContext + err = postgresClient.db.Get(&persistedExecutionContext, `SELECT id, job_name, image_tag, args, status FROM execution_context WHERE job_name='test-job-name'`) + assert.NoError(t, err) + + assert.Equal(t, executionContext.JobName, persistedExecutionContext.JobName) + assert.Equal(t, executionContext.ImageTag, persistedExecutionContext.ImageTag) + assert.Equal(t, executionContext.ExecutionID, persistedExecutionContext.ExecutionID) + assert.Equal(t, executionContext.Args, persistedExecutionContext.Args) + assert.Equal(t, executionContext.Status, persistedExecutionContext.Status) + + _, err = postgresClient.db.Exec("DELETE FROM execution_context WHERE job_name='test-job-name'") + assert.NoError(t, err) +} + +func TestSelect(t *testing.T) { + dataSourceName := fmt.Sprintf("dbname=%s user=%s password=%s host=%s sslmode=disable", config.Config().PostgresDatabase, config.Config().PostgresUser, config.Config().PostgresPassword, config.Config().PostgresHost) + + db, err := sqlx.Connect("postgres", dataSourceName) + assert.NoError(t, err) + + postgresClient := &client{db: db} + defer postgresClient.db.Close() + jobName := "test-job-name" + + snowflakeID, _ := id.NextID() + executionContext := &executionContextModel.ExecutionContext{ + ExecutionID: snowflakeID, + JobName: jobName, + ImageTag: "test-image-name", + Args: map[string]string{"foo": "bar"}, + Status: executionContextStatus.Finished, + } + + _, err = postgresClient.NamedExec("INSERT INTO execution_context (id,job_name, image_tag, args, status) VALUES (:id, :job_name, :image_tag, :args, :status)", executionContext) + assert.NoError(t, err) + + executionContextResult := []executionContextModel.ExecutionContext{} + err = postgresClient.Select(&executionContextResult, "SELECT status from execution_context where job_name = $1", jobName) + assert.NoError(t, err) + + assert.Equal(t, executionContext.Status, executionContextResult[0].Status) + + _, err = postgresClient.db.Exec("DELETE FROM execution_context WHERE job_name='test-job-name'") + assert.NoError(t, err) +} + +func TestSelectForNoRows(t *testing.T) { + dataSourceName := fmt.Sprintf("dbname=%s user=%s password=%s host=%s sslmode=disable", config.Config().PostgresDatabase, config.Config().PostgresUser, config.Config().PostgresPassword, config.Config().PostgresHost) + + db, err := sqlx.Connect("postgres", dataSourceName) + assert.NoError(t, err) + + postgresClient := &client{db: db} + defer postgresClient.db.Close() + jobName := "test-job-name" + + executionContextResult := []executionContextModel.ExecutionContext{} + err = postgresClient.Select(&executionContextResult, "SELECT status from execution_context where job_name = $1", jobName) + assert.NoError(t, err) + + assert.Equal(t, 0, len(executionContextResult)) + + assert.NoError(t, err) +} + +func TestClose(t *testing.T) { + dataSourceName := fmt.Sprintf("dbname=%s user=%s password=%s host=%s sslmode=disable", config.Config().PostgresDatabase, config.Config().PostgresUser, config.Config().PostgresPassword, config.Config().PostgresHost) + + db, err := sqlx.Connect("postgres", dataSourceName) + assert.NoError(t, err) + + postgresClient := &client{db: db} + err = postgresClient.Close() + defer postgresClient.db.Close() + + assert.NoError(t, err) +} + +func TestGetDB(t *testing.T) { + dataSourceName := fmt.Sprintf("dbname=%s user=%s password=%s host=%s sslmode=disable", config.Config().PostgresDatabase, config.Config().PostgresUser, config.Config().PostgresPassword, config.Config().PostgresHost) + + db, err := sqlx.Connect("postgres", dataSourceName) + assert.NoError(t, err) + + postgresClient := &client{db: db} + defer postgresClient.db.Close() + + assert.Equal(t, db, postgresClient.GetDB()) + assert.NoError(t, err) +} diff --git a/proctord/redis/client.go b/internal/app/service/infra/db/redis/client.go similarity index 57% rename from proctord/redis/client.go rename to internal/app/service/infra/db/redis/client.go index dda4f083..53be04e1 100644 --- a/proctord/redis/client.go +++ b/internal/app/service/infra/db/redis/client.go @@ -2,6 +2,8 @@ package redis import ( "github.com/garyburd/redigo/redis" + "proctor/internal/app/service/infra/config" + "time" ) type Client interface { @@ -24,6 +26,31 @@ func NewClient() Client { return &redisClient{connPool} } +func newPool() (*redis.Pool, error) { + dialPassword := redis.DialPassword(config.Config().RedisPassword) + dialAddress := config.Config().RedisAddress + pool := &redis.Pool{ + MaxIdle: config.Config().RedisMaxActiveConnections / 2, + MaxActive: config.Config().RedisMaxActiveConnections, + IdleTimeout: 5 * time.Second, + Dial: func() (redis.Conn, error) { return redis.Dial("tcp", dialAddress, dialPassword) }, + TestOnBorrow: func(c redis.Conn, t time.Time) error { + if time.Since(t) < time.Minute { + return nil + } + _, err := c.Do("PING") + return err + }, + Wait: true, + } + + conn := pool.Get() + defer conn.Close() + + _, err := conn.Do("PING") + return pool, err +} + func (c *redisClient) GET(key string) ([]byte, error) { conn := c.connPool.Get() defer conn.Close() diff --git a/proctord/redis/client_mock.go b/internal/app/service/infra/db/redis/client_mock.go similarity index 100% rename from proctord/redis/client_mock.go rename to internal/app/service/infra/db/redis/client_mock.go diff --git a/proctord/redis/client_test.go b/internal/app/service/infra/db/redis/client_test.go similarity index 98% rename from proctord/redis/client_test.go rename to internal/app/service/infra/db/redis/client_test.go index 77f7d495..692ce29f 100644 --- a/proctord/redis/client_test.go +++ b/internal/app/service/infra/db/redis/client_test.go @@ -99,7 +99,7 @@ func (s *RedisClientTestSuite) TestMGET() { } func (s *RedisClientTestSuite) TearDownSuite() { - s.testRedisConn.Close() + _ = s.testRedisConn.Close() } func TestRedisClientTestSuite(t *testing.T) { diff --git a/internal/app/service/infra/db/types/base64_map.go b/internal/app/service/infra/db/types/base64_map.go new file mode 100644 index 00000000..fcc8a47a --- /dev/null +++ b/internal/app/service/infra/db/types/base64_map.go @@ -0,0 +1,41 @@ +package types + +import ( + "database/sql/driver" + "encoding/base64" + "encoding/json" + "errors" +) + +type Base64Map map[string]string + +// Value implements the driver.Valuer interface, convert map into json and encode it as Base64 +func (g Base64Map) Value() (driver.Value, error) { + jsonByte, err := json.Marshal(g) + if err != nil { + return nil, err + } + + return base64.StdEncoding.EncodeToString(jsonByte), nil +} + +func (g *Base64Map) Scan(src interface{}) error { + var source string + switch src.(type) { + case string: + source = src.(string) + default: + return errors.New("incompatible type for Base64Map") + } + + jsonByte, err := base64.StdEncoding.DecodeString(source) + if err != nil { + return err + } + + err = json.Unmarshal(jsonByte, g) + if err != nil { + return err + } + return nil +} diff --git a/internal/app/service/infra/db/types/base64_map_test.go b/internal/app/service/infra/db/types/base64_map_test.go new file mode 100644 index 00000000..d0102583 --- /dev/null +++ b/internal/app/service/infra/db/types/base64_map_test.go @@ -0,0 +1,28 @@ +package types + +import ( + "github.com/stretchr/testify/assert" + "testing" +) + +func TestBase64Map(t *testing.T) { + maps := Base64Map{ + "name": "Bimo.zx", + "address": "Royal Orchid", + "age": "17", + } + v, err := maps.Value() + if err != nil { + t.Errorf("Was not expecting an error") + } + + expectedMaps := Base64Map{} + err = (&expectedMaps).Scan(v) + if err != nil { + t.Errorf("Was not expecting an error") + } + + assert.Equal(t, "Bimo.zx", expectedMaps["name"]) + assert.Equal(t, "Royal Orchid", expectedMaps["address"]) + assert.Equal(t, "17", expectedMaps["age"]) +} diff --git a/internal/app/service/infra/id/snowflake.go b/internal/app/service/infra/id/snowflake.go new file mode 100644 index 00000000..e522859f --- /dev/null +++ b/internal/app/service/infra/id/snowflake.go @@ -0,0 +1,26 @@ +package id + +import ( + "github.com/sony/sonyflake" + "math/rand" + "time" +) + +var snowflake *sonyflake.Sonyflake +var snowflakeSetting sonyflake.Settings + +func init() { + rand.Seed(time.Now().UnixNano()) + snowflakeSetting.MachineID = func() (machineId uint16, e error) { + return uint16(rand.Uint64()), nil + } +} + +func NextID() (uint64, error) { + snowflake = sonyflake.NewSonyflake(snowflakeSetting) + return snowflake.NextID() +} + +func Extract(id uint64) map[string]uint64 { + return sonyflake.Decompose(id) +} diff --git a/internal/app/service/infra/kubernetes/client.go b/internal/app/service/infra/kubernetes/client.go new file mode 100644 index 00000000..3bb7fcc9 --- /dev/null +++ b/internal/app/service/infra/kubernetes/client.go @@ -0,0 +1,343 @@ +package kubernetes + +import ( + "errors" + "fmt" + "io" + "net/http" + "os" + "path/filepath" + "time" + + uuid "github.com/satori/go.uuid" + batch "k8s.io/api/batch/v1" + "k8s.io/api/core/v1" + meta "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/watch" + "k8s.io/client-go/kubernetes" + "proctor/internal/pkg/constant" + //Package needed for kubernetes cluster in google cloud + _ "k8s.io/client-go/plugin/pkg/client/auth/gcp" + kubeRestClient "k8s.io/client-go/rest" + "k8s.io/client-go/tools/clientcmd" + + "proctor/internal/app/service/infra/config" + "proctor/internal/app/service/infra/logger" +) + +var typeMeta meta.TypeMeta +var namespace string +var timeoutError = errors.New("timeout when waiting job to be available") + +func init() { + typeMeta = meta.TypeMeta{ + Kind: "Job", + APIVersion: "batch/v1", + } + namespace = config.Config().DefaultNamespace +} + +type KubernetesClient interface { + ExecuteJobWithCommand(imageName string, args map[string]string, commands []string) (string, error) + ExecuteJob(imageName string, args map[string]string) (string, error) + JobExecutionStatus(executionName string) (string, error) + WaitForReadyJob(executionName string, waitTime time.Duration) error + WaitForReadyPod(executionName string, waitTime time.Duration) (*v1.Pod, error) + GetPodLogs(pod *v1.Pod) (io.ReadCloser, error) +} + +type kubernetesClient struct { + clientSet kubernetes.Interface + httpClient *http.Client +} + +func NewClientSet() (*kubernetes.Clientset, error) { + var kubeConfig *kubeRestClient.Config + if config.Config().KubeConfig == "out-of-cluster" { + logger.Info("service is running outside kube cluster") + home := os.Getenv("HOME") + + kubeConfigPath := filepath.Join(home, ".kube", "config") + + configOverrides := &clientcmd.ConfigOverrides{} + if config.Config().KubeContext != "default" { + configOverrides.CurrentContext = config.Config().KubeContext + } + + var err error + kubeConfig, err = clientcmd.NewNonInteractiveDeferredLoadingClientConfig( + &clientcmd.ClientConfigLoadingRules{ExplicitPath: kubeConfigPath}, + configOverrides).ClientConfig() + if err != nil { + return nil, err + } + + } else { + var err error + kubeConfig, err = kubeRestClient.InClusterConfig() + if err != nil { + return nil, err + } + } + + clientSet, err := kubernetes.NewForConfig(kubeConfig) + if err != nil { + return nil, err + } + return clientSet, nil +} + +func NewKubernetesClient(httpClient *http.Client) KubernetesClient { + newClient := &kubernetesClient{ + httpClient: httpClient, + } + + var err error + newClient.clientSet, err = NewClientSet() + if err != nil { + panic(err.Error()) + } + + return newClient +} + +func getEnvVars(envMap map[string]string) []v1.EnvVar { + var envVars []v1.EnvVar + for k, v := range envMap { + envVar := v1.EnvVar{ + Name: k, + Value: v, + } + envVars = append(envVars, envVar) + } + return envVars +} + +func uniqueName() string { + return "proctor" + "-" + uuid.NewV4().String() +} + +func jobLabel(executionName string) map[string]string { + return map[string]string{ + "job": executionName, + } +} + +func jobLabelSelector(executionName string) string { + return fmt.Sprintf("job=%s", executionName) +} + +func (client *kubernetesClient) ExecuteJob(imageName string, envMap map[string]string) (string, error) { + return client.ExecuteJobWithCommand(imageName, envMap, []string{}) +} + +func (client *kubernetesClient) ExecuteJobWithCommand(imageName string, envMap map[string]string, command []string) (string, error) { + executionName := uniqueName() + label := jobLabel(executionName) + + batchV1 := client.clientSet.BatchV1() + kubernetesJobs := batchV1.Jobs(namespace) + + container := v1.Container{ + Name: executionName, + Image: imageName, + Env: getEnvVars(envMap), + } + + if len(command) != 0 { + container.Command = command + } + + podSpec := v1.PodSpec{ + Containers: []v1.Container{container}, + RestartPolicy: v1.RestartPolicyNever, + ServiceAccountName: config.Config().KubeServiceAccountName, + } + + objectMeta := meta.ObjectMeta{ + Name: executionName, + Labels: label, + Annotations: config.Config().JobPodAnnotations, + } + + template := v1.PodTemplateSpec{ + ObjectMeta: objectMeta, + Spec: podSpec, + } + + jobSpec := batch.JobSpec{ + Template: template, + ActiveDeadlineSeconds: config.Config().KubeJobActiveDeadlineSeconds, + BackoffLimit: config.Config().KubeJobRetries, + } + + jobToRun := batch.Job{ + TypeMeta: typeMeta, + ObjectMeta: objectMeta, + Spec: jobSpec, + } + + _, err := kubernetesJobs.Create(&jobToRun) + if err != nil { + return "", err + } + return executionName, nil +} + +func (client *kubernetesClient) WaitForReadyJob(executionName string, waitTime time.Duration) error { + batchV1 := client.clientSet.BatchV1() + jobs := batchV1.Jobs(namespace) + listOptions := meta.ListOptions{ + TypeMeta: typeMeta, + LabelSelector: jobLabelSelector(executionName), + } + + var err error + for i := 0; i < config.Config().KubeWaitForResourcePollCount; i += 1 { + watchJob, watchErr := jobs.Watch(listOptions) + if watchErr != nil { + err = watchErr + continue + } + + timeoutChan := time.After(waitTime) + resultChan := watchJob.ResultChan() + + var job *batch.Job + for { + select { + case event := <-resultChan: + if event.Type == watch.Error { + err = watcherError("job", listOptions) + break + } + + // Ignore empty events + if event.Object == nil { + continue + } + + job = event.Object.(*batch.Job) + if job.Status.Active >= 1 || job.Status.Succeeded >= 1 || job.Status.Failed >= 1 { + watchJob.Stop() + return nil + } + case <-timeoutChan: + err = timeoutError + break + } + if err != nil { + watchJob.Stop() + break + } + } + } + + return err +} + +func (client *kubernetesClient) WaitForReadyPod(executionName string, waitTime time.Duration) (*v1.Pod, error) { + coreV1 := client.clientSet.CoreV1() + kubernetesPods := coreV1.Pods(namespace) + listOptions := meta.ListOptions{ + LabelSelector: jobLabelSelector(executionName), + } + + var err error + for i := 0; i < config.Config().KubeWaitForResourcePollCount; i += 1 { + watchJob, watchErr := kubernetesPods.Watch(listOptions) + if watchErr != nil { + err = watchErr + continue + } + + timeoutChan := time.After(waitTime) + resultChan := watchJob.ResultChan() + + var pod *v1.Pod + for { + select { + case event := <-resultChan: + if event.Type == watch.Error { + err = watcherError("pod", listOptions) + watchJob.Stop() + break + } + + // Ignore empty events + if event.Object == nil { + continue + } + + pod = event.Object.(*v1.Pod) + if pod.Status.Phase == v1.PodRunning || pod.Status.Phase == v1.PodSucceeded || pod.Status.Phase == v1.PodFailed { + watchJob.Stop() + return pod, nil + } + case <-timeoutChan: + err = timeoutError + watchJob.Stop() + break + } + if err != nil { + watchJob.Stop() + break + } + } + } + + logger.Info("Wait for ready pod return pod ", nil, " and error ", err) + return nil, err +} + +func (client *kubernetesClient) JobExecutionStatus(executionName string) (string, error) { + batchV1 := client.clientSet.BatchV1() + kubernetesJobs := batchV1.Jobs(namespace) + listOptions := meta.ListOptions{ + TypeMeta: typeMeta, + LabelSelector: jobLabelSelector(executionName), + } + + watchJob, err := kubernetesJobs.Watch(listOptions) + if err != nil { + return constant.JobFailed, err + } + + resultChan := watchJob.ResultChan() + defer watchJob.Stop() + var event watch.Event + var jobEvent *batch.Job + + for event = range resultChan { + if event.Type == watch.Error { + return constant.JobExecutionStatusFetchError, nil + } + + jobEvent = event.Object.(*batch.Job) + if jobEvent.Status.Succeeded >= int32(1) { + return constant.JobSucceeded, nil + } else if jobEvent.Status.Failed >= int32(1) { + return constant.JobFailed, nil + } + } + + return constant.NoDefinitiveJobExecutionStatusFound, nil +} + +func (client *kubernetesClient) GetPodLogs(pod *v1.Pod) (io.ReadCloser, error) { + logger.Debug("reading pod logs for: ", pod.Name) + podLogOpts := v1.PodLogOptions{ + Follow: true, + } + request := client.clientSet.CoreV1().Pods(pod.Namespace).GetLogs(pod.Name, &podLogOpts) + response, err := request.Stream() + + if err != nil { + return nil, err + } + return response, nil +} + +func watcherError(resource string, listOptions meta.ListOptions) error { + return fmt.Errorf("watch error when waiting for %s with list option %v", resource, listOptions) +} diff --git a/internal/app/service/infra/kubernetes/client_integration_test.go b/internal/app/service/infra/kubernetes/client_integration_test.go new file mode 100644 index 00000000..bb2c6084 --- /dev/null +++ b/internal/app/service/infra/kubernetes/client_integration_test.go @@ -0,0 +1,97 @@ +package kubernetes + +import ( + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" + "k8s.io/api/core/v1" + meta_v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/client-go/kubernetes" + "os" + "proctor/internal/app/service/infra/config" + kubeHTTPClient "proctor/internal/app/service/infra/kubernetes/http" + "proctor/internal/pkg/constant" + "testing" +) + +type IntegrationTestSuite struct { + suite.Suite + testClient KubernetesClient + clientSet kubernetes.Interface +} + +func (suite *IntegrationTestSuite) SetupTest() { + t := suite.T() + kubeHTTPClient, err := kubeHTTPClient.NewClient() + assert.NoError(t, err) + suite.testClient = NewKubernetesClient(kubeHTTPClient) + suite.clientSet, err = NewClientSet() + assert.NoError(t, err) +} + +func (suite *IntegrationTestSuite) TestJobExecution() { + t := suite.T() + _ = os.Setenv("PROCTOR_JOB_POD_ANNOTATIONS", "{\"key.one\":\"true\"}") + _ = os.Setenv("PROCTOR_KUBE_SERVICE_ACCOUNT_NAME", "default") + config.Reset() + envVarsForContainer := map[string]string{"SAMPLE_ARG": "sample-value"} + sampleImageName := "busybox" + + executedJobname, err := suite.testClient.ExecuteJobWithCommand(sampleImageName, envVarsForContainer, []string{"echo", "Bimo Horizon"}) + assert.NoError(t, err) + + typeMeta := meta_v1.TypeMeta{ + Kind: "Job", + APIVersion: "batch/v1", + } + + listOptions := meta_v1.ListOptions{ + TypeMeta: typeMeta, + LabelSelector: jobLabelSelector(executedJobname), + } + + namespace := config.Config().DefaultNamespace + listOfJobs, err := suite.clientSet.BatchV1().Jobs(namespace).List(listOptions) + assert.NoError(t, err) + executedJob := listOfJobs.Items[0] + + assert.Equal(t, executedJobname, executedJob.ObjectMeta.Name) + assert.Equal(t, executedJobname, executedJob.Spec.Template.ObjectMeta.Name) + + expectedLabel := jobLabel(executedJobname) + assert.Equal(t, expectedLabel, executedJob.ObjectMeta.Labels) + assert.Equal(t, map[string]string{"key.one": "true"}, executedJob.Spec.Template.Annotations) + assert.Equal(t, "default", executedJob.Spec.Template.Spec.ServiceAccountName) + + assert.Equal(t, config.Config().KubeJobActiveDeadlineSeconds, executedJob.Spec.ActiveDeadlineSeconds) + assert.Equal(t, config.Config().KubeJobRetries, executedJob.Spec.BackoffLimit) + + assert.Equal(t, v1.RestartPolicyNever, executedJob.Spec.Template.Spec.RestartPolicy) + + container := executedJob.Spec.Template.Spec.Containers[0] + assert.Equal(t, executedJobname, container.Name) + + assert.Equal(t, sampleImageName, container.Image) + + expectedEnvVars := getEnvVars(envVarsForContainer) + assert.Equal(t, expectedEnvVars, container.Env) +} + +func (suite *IntegrationTestSuite) TestJobExecutionStatus() { + t := suite.T() + _ = os.Setenv("PROCTOR_JOB_POD_ANNOTATIONS", "{\"key.one\":\"true\"}") + envVarsForContainer := map[string]string{"SAMPLE_ARG": "sample-value"} + sampleImageName := "busybox" + + executedJobname, err := suite.testClient.ExecuteJobWithCommand(sampleImageName, envVarsForContainer, []string{"echo", "Bimo Horizon"}) + assert.NoError(t, err) + + status, err := suite.testClient.JobExecutionStatus(executedJobname) + assert.Equal(t, status, constant.JobSucceeded) +} + +func TestIntegrationTestSuite(t *testing.T) { + value, available := os.LookupEnv("ENABLE_INTEGRATION_TEST") + if available == true && value == "true" { + suite.Run(t, new(IntegrationTestSuite)) + } +} diff --git a/internal/app/service/infra/kubernetes/client_mock.go b/internal/app/service/infra/kubernetes/client_mock.go new file mode 100644 index 00000000..e8c55879 --- /dev/null +++ b/internal/app/service/infra/kubernetes/client_mock.go @@ -0,0 +1,43 @@ +package kubernetes + +import ( + "io" + v1 "k8s.io/api/core/v1" + "time" + + "github.com/stretchr/testify/mock" +) + +type MockKubernetesClient struct { + mock.Mock +} + +func (m *MockKubernetesClient) ExecuteJob(jobName string, envMap map[string]string) (string, error) { + args := m.Called(jobName, envMap) + return args.String(0), args.Error(1) +} + +func (m *MockKubernetesClient) ExecuteJobWithCommand(jobName string, envMap map[string]string, command []string) (string, error) { + args := m.Called(jobName, envMap, command) + return args.String(0), args.Error(1) +} + +func (m *MockKubernetesClient) JobExecutionStatus(executionName string) (string, error) { + args := m.Called(executionName) + return args.String(0), args.Error(1) +} + +func (m *MockKubernetesClient) WaitForReadyJob(executionName string, waitTime time.Duration) error { + args := m.Called(executionName, waitTime) + return args.Error(0) +} + +func (m *MockKubernetesClient) WaitForReadyPod(executionName string, waitTime time.Duration) (*v1.Pod, error) { + args := m.Called(executionName, waitTime) + return args.Get(0).(*v1.Pod), args.Error(1) +} + +func (m *MockKubernetesClient) GetPodLogs(pod *v1.Pod) (io.ReadCloser, error) { + args := m.Called(pod) + return args.Get(0).(io.ReadCloser), args.Error(1) +} diff --git a/internal/app/service/infra/kubernetes/client_test.go b/internal/app/service/infra/kubernetes/client_test.go new file mode 100644 index 00000000..872f80b3 --- /dev/null +++ b/internal/app/service/infra/kubernetes/client_test.go @@ -0,0 +1,409 @@ +package kubernetes + +import ( + "fmt" + "net/http" + "os" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/suite" + batchV1 "k8s.io/api/batch/v1" + v1 "k8s.io/api/core/v1" + meta "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/watch" + fakeclientset "k8s.io/client-go/kubernetes/fake" + batch "k8s.io/client-go/kubernetes/typed/batch/v1" + testing_kubernetes "k8s.io/client-go/testing" + + "proctor/internal/app/service/infra/config" + "proctor/internal/pkg/constant" +) + +type ClientTestSuite struct { + suite.Suite + testClient KubernetesClient + testKubernetesJobs batch.JobInterface + fakeClientSet *fakeclientset.Clientset + jobName string + podName string + fakeClientSetStreaming *fakeclientset.Clientset + fakeHTTPClient *http.Client + testClientStreaming KubernetesClient +} + +func (suite *ClientTestSuite) SetupTest() { + suite.fakeClientSet = fakeclientset.NewSimpleClientset() + suite.testClient = &kubernetesClient{ + clientSet: suite.fakeClientSet, + } + suite.jobName = "job1" + suite.podName = "pod1" + namespace := config.Config().DefaultNamespace + suite.fakeClientSetStreaming = fakeclientset.NewSimpleClientset(&v1.Pod{ + TypeMeta: meta.TypeMeta{ + Kind: "Pod", + APIVersion: "v1", + }, + ObjectMeta: meta.ObjectMeta{ + Name: suite.podName, + Namespace: namespace, + Labels: map[string]string{ + "tag": "", + "job": suite.jobName, + }, + }, + Status: v1.PodStatus{ + Phase: v1.PodSucceeded, + }, + }) + + suite.fakeHTTPClient = &http.Client{} + suite.testClientStreaming = &kubernetesClient{ + clientSet: suite.fakeClientSetStreaming, + httpClient: suite.fakeHTTPClient, + } +} + +func (suite *ClientTestSuite) TestJobExecution() { + t := suite.T() + _ = os.Setenv("PROCTOR_JOB_POD_ANNOTATIONS", "{\"key.one\":\"true\"}") + _ = os.Setenv("PROCTOR_KUBE_SERVICE_ACCOUNT_NAME", "default") + config.Reset() + envVarsForContainer := map[string]string{"SAMPLE_ARG": "sample-value"} + sampleImageName := "img1" + + executedJobname, err := suite.testClient.ExecuteJob(sampleImageName, envVarsForContainer) + assert.NoError(t, err) + + typeMeta := meta.TypeMeta{ + Kind: "Job", + APIVersion: "batch/v1", + } + + listOptions := meta.ListOptions{ + TypeMeta: typeMeta, + LabelSelector: jobLabelSelector(executedJobname), + } + namespace := config.Config().DefaultNamespace + listOfJobs, err := suite.fakeClientSet.BatchV1().Jobs(namespace).List(listOptions) + assert.NoError(t, err) + executedJob := listOfJobs.Items[0] + + assert.Equal(t, typeMeta, executedJob.TypeMeta) + + assert.Equal(t, executedJobname, executedJob.ObjectMeta.Name) + assert.Equal(t, executedJobname, executedJob.Spec.Template.ObjectMeta.Name) + + expectedLabel := jobLabel(executedJobname) + assert.Equal(t, expectedLabel, executedJob.ObjectMeta.Labels) + assert.Equal(t, expectedLabel, executedJob.Spec.Template.ObjectMeta.Labels) + assert.Equal(t, map[string]string{"key.one": "true"}, executedJob.Spec.Template.Annotations) + assert.Equal(t, "default", executedJob.Spec.Template.Spec.ServiceAccountName) + + assert.Equal(t, config.Config().KubeJobActiveDeadlineSeconds, executedJob.Spec.ActiveDeadlineSeconds) + assert.Equal(t, config.Config().KubeJobRetries, executedJob.Spec.BackoffLimit) + + assert.Equal(t, v1.RestartPolicyNever, executedJob.Spec.Template.Spec.RestartPolicy) + + container := executedJob.Spec.Template.Spec.Containers[0] + assert.Equal(t, executedJobname, container.Name) + + assert.Equal(t, sampleImageName, container.Image) + + expectedEnvVars := getEnvVars(envVarsForContainer) + assert.Equal(t, expectedEnvVars, container.Env) +} + +func (suite *ClientTestSuite) TestWaitForReadyJob() { + t := suite.T() + + var testJob batchV1.Job + uniqueJobName := "proctor-job-1" + label := jobLabel(uniqueJobName) + objectMeta := meta.ObjectMeta{ + Name: uniqueJobName, + Labels: label, + } + testJob.ObjectMeta = objectMeta + waitTime := config.Config().KubeLogProcessWaitTime * time.Second + + watcher := watch.NewFake() + suite.fakeClientSet.PrependWatchReactor("jobs", testing_kubernetes.DefaultWatchReactor(watcher, nil)) + + go func() { + testJob.Status.Succeeded = 1 + watcher.Modify(&testJob) + + watcher.Stop() + }() + + err := suite.testClient.WaitForReadyJob(uniqueJobName, waitTime) + assert.NoError(t, err) +} + +func (suite *ClientTestSuite) TestWaitForReadyJobWatcherError() { + t := suite.T() + + var testJob batchV1.Job + uniqueJobName := "proctor-job-2" + label := jobLabel(uniqueJobName) + objectMeta := meta.ObjectMeta{ + Name: uniqueJobName, + Labels: label, + } + testJob.ObjectMeta = objectMeta + listOptions := meta.ListOptions{ + TypeMeta: typeMeta, + LabelSelector: jobLabelSelector(uniqueJobName), + } + config.Reset() + waitTime := config.Config().KubeLogProcessWaitTime * time.Second + + watcher := watch.NewRaceFreeFake() + suite.fakeClientSet.PrependWatchReactor("jobs", testing_kubernetes.DefaultWatchReactor(watcher, nil)) + + go func() { + watcher.Error(&testJob) + watcher.Error(&testJob) + watcher.Error(&testJob) + watcher.Error(&testJob) + watcher.Error(&testJob) + }() + + err := suite.testClient.WaitForReadyJob(uniqueJobName, waitTime) + assert.EqualError(t, err, fmt.Sprintf("watch error when waiting for job with list option %v", listOptions)) +} + +func (suite *ClientTestSuite) TestWaitForReadyJobTimeoutError() { + t := suite.T() + + var testJob batchV1.Job + uniqueJobName := "proctor-job-3" + label := jobLabel(uniqueJobName) + objectMeta := meta.ObjectMeta{ + Name: uniqueJobName, + Labels: label, + } + testJob.ObjectMeta = objectMeta + waitTime := time.Millisecond * 100 + + watcher := watch.NewRaceFreeFake() + suite.fakeClientSet.PrependWatchReactor("jobs", testing_kubernetes.DefaultWatchReactor(watcher, nil)) + + err := suite.testClient.WaitForReadyJob(uniqueJobName, waitTime) + assert.EqualError(t, err, "timeout when waiting job to be available") +} + +func (suite *ClientTestSuite) TestWaitForReadyPod() { + t := suite.T() + + var testPod v1.Pod + uniquePodName := "proctor-pod-1" + label := jobLabel(uniquePodName) + objectMeta := meta.ObjectMeta{ + Name: uniquePodName, + Labels: label, + } + testPod.ObjectMeta = objectMeta + config.Reset() + waitTime := config.Config().KubeLogProcessWaitTime * time.Second + + watcher := watch.NewFake() + suite.fakeClientSet.PrependWatchReactor("pods", testing_kubernetes.DefaultWatchReactor(watcher, nil)) + + go func() { + testPod.Status.Phase = v1.PodSucceeded + watcher.Modify(&testPod) + + watcher.Stop() + }() + + pod, err := suite.testClient.WaitForReadyPod(uniquePodName, waitTime) + assert.NoError(t, err) + assert.NotNil(t, pod) + assert.Equal(t, pod.Name, uniquePodName) +} + +func (suite *ClientTestSuite) TestWaitForReadyPodWatcherError() { + t := suite.T() + + var testPod v1.Pod + uniquePodName := "proctor-pod-2" + label := jobLabel(uniquePodName) + objectMeta := meta.ObjectMeta{ + Name: uniquePodName, + Labels: label, + } + testPod.ObjectMeta = objectMeta + listOptions := meta.ListOptions{ + LabelSelector: jobLabelSelector(uniquePodName), + } + config.Reset() + waitTime := config.Config().KubeLogProcessWaitTime * time.Second + + watcher := watch.NewRaceFreeFake() + suite.fakeClientSet.PrependWatchReactor("pods", testing_kubernetes.DefaultWatchReactor(watcher, nil)) + + go func() { + watcher.Error(&testPod) + watcher.Error(&testPod) + watcher.Error(&testPod) + watcher.Error(&testPod) + watcher.Error(&testPod) + }() + + _, err := suite.testClient.WaitForReadyPod(uniquePodName, waitTime) + assert.EqualError(t, err, fmt.Sprintf("watch error when waiting for pod with list option %v", listOptions)) +} + +func (suite *ClientTestSuite) TestWaitForReadyPodTimeoutError() { + t := suite.T() + + var testPod v1.Pod + uniquePodName := "proctor-pod-3" + label := jobLabel(uniquePodName) + objectMeta := meta.ObjectMeta{ + Name: uniquePodName, + Labels: label, + } + testPod.ObjectMeta = objectMeta + waitTime := time.Millisecond * 100 + + watcher := watch.NewFake() + suite.fakeClientSet.PrependWatchReactor("pods", testing_kubernetes.DefaultWatchReactor(watcher, nil)) + + _, err := suite.testClient.WaitForReadyPod(uniquePodName, waitTime) + assert.EqualError(t, err, "timeout when waiting job to be available") +} + +func (suite *ClientTestSuite) TestShouldReturnSuccessJobExecutionStatus() { + t := suite.T() + + watcher := watch.NewFake() + suite.fakeClientSet.PrependWatchReactor("jobs", testing_kubernetes.DefaultWatchReactor(watcher, nil)) + + var activeJob batchV1.Job + var succeededJob batchV1.Job + uniqueJobName := "proctor-job-4" + label := jobLabel(uniqueJobName) + objectMeta := meta.ObjectMeta{ + Name: uniqueJobName, + Labels: label, + } + activeJob.ObjectMeta = objectMeta + succeededJob.ObjectMeta = objectMeta + + go func() { + activeJob.Status.Active = 1 + watcher.Modify(&activeJob) + + succeededJob.Status.Active = 0 + succeededJob.Status.Succeeded = 1 + watcher.Modify(&succeededJob) + + time.Sleep(time.Second * 1) + watcher.Stop() + }() + + jobExecutionStatus, err := suite.testClient.JobExecutionStatus(uniqueJobName) + assert.NoError(t, err) + + assert.Equal(t, constant.JobSucceeded, jobExecutionStatus, "Should return SUCCEEDED") +} + +func (suite *ClientTestSuite) TestShouldReturnFailedJobExecutionStatus() { + t := suite.T() + + watcher := watch.NewFake() + suite.fakeClientSet.PrependWatchReactor("jobs", testing_kubernetes.DefaultWatchReactor(watcher, nil)) + + var activeJob batchV1.Job + var failedJob batchV1.Job + uniqueJobName := "proctor-job-5" + label := jobLabel(uniqueJobName) + objectMeta := meta.ObjectMeta{ + Name: uniqueJobName, + Labels: label, + } + activeJob.ObjectMeta = objectMeta + failedJob.ObjectMeta = objectMeta + + go func() { + activeJob.Status.Active = 1 + watcher.Modify(&activeJob) + failedJob.Status.Active = 0 + failedJob.Status.Failed = 1 + watcher.Modify(&failedJob) + + time.Sleep(time.Second * 1) + watcher.Stop() + }() + + jobExecutionStatus, err := suite.testClient.JobExecutionStatus(uniqueJobName) + assert.NoError(t, err) + + assert.Equal(t, constant.JobFailed, jobExecutionStatus, "Should return FAILED") +} + +func (suite *ClientTestSuite) TestJobExecutionStatusForNonDefinitiveStatus() { + t := suite.T() + + watcher := watch.NewFake() + suite.fakeClientSet.PrependWatchReactor("jobs", testing_kubernetes.DefaultWatchReactor(watcher, nil)) + + var testJob batchV1.Job + uniqueJobName := "proctor-job-6" + label := jobLabel(uniqueJobName) + objectMeta := meta.ObjectMeta{ + Name: uniqueJobName, + Labels: label, + } + testJob.ObjectMeta = objectMeta + + go func() { + testJob.Status.Active = 1 + watcher.Modify(&testJob) + + time.Sleep(time.Second * 1) + watcher.Stop() + }() + + jobExecutionStatus, err := suite.testClient.JobExecutionStatus(uniqueJobName) + assert.NoError(t, err) + + assert.Equal(t, constant.NoDefinitiveJobExecutionStatusFound, jobExecutionStatus, "Should return NO_DEFINITIVE_JOB_EXECUTION_STATUS_FOUND") +} + +func (suite *ClientTestSuite) TestShouldReturnJobExecutionStatusFetchError() { + t := suite.T() + + watcher := watch.NewFake() + suite.fakeClientSet.PrependWatchReactor("jobs", testing_kubernetes.DefaultWatchReactor(watcher, nil)) + + var testJob batchV1.Job + uniqueJobName := "proctor-job-7" + label := jobLabel(uniqueJobName) + objectMeta := meta.ObjectMeta{ + Name: uniqueJobName, + Labels: label, + } + testJob.ObjectMeta = objectMeta + + go func() { + watcher.Error(&testJob) + + time.Sleep(time.Second * 1) + watcher.Stop() + }() + + jobExecutionStatus, err := suite.testClient.JobExecutionStatus(uniqueJobName) + assert.NoError(t, err) + + assert.Equal(t, constant.JobExecutionStatusFetchError, jobExecutionStatus, "Should return JOB_EXECUTION_STATUS_FETCH_ERROR") +} + +func TestClientTestSuite(t *testing.T) { + suite.Run(t, new(ClientTestSuite)) +} diff --git a/internal/app/service/infra/kubernetes/http/client.go b/internal/app/service/infra/kubernetes/http/client.go new file mode 100644 index 00000000..1cf9e8a6 --- /dev/null +++ b/internal/app/service/infra/kubernetes/http/client.go @@ -0,0 +1,10 @@ +package http + +import ( + "net/http" +) + +func NewClient() (*http.Client, error) { + httpClient := &http.Client{} + return httpClient, nil +} diff --git a/internal/app/service/infra/kubernetes/http/client_test.go b/internal/app/service/infra/kubernetes/http/client_test.go new file mode 100644 index 00000000..0e2e1c52 --- /dev/null +++ b/internal/app/service/infra/kubernetes/http/client_test.go @@ -0,0 +1,13 @@ +package http + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestNewClient(t *testing.T) { + httpClient, err := NewClient() + assert.NotNil(t, httpClient) + assert.NoError(t, err) +} diff --git a/proctord/logger/logrus.go b/internal/app/service/infra/logger/logrus.go similarity index 71% rename from proctord/logger/logrus.go rename to internal/app/service/infra/logger/logrus.go index 0fcbc5bf..d1e4e352 100644 --- a/proctord/logger/logrus.go +++ b/internal/app/service/infra/logger/logrus.go @@ -2,8 +2,7 @@ package logger import ( "os" - - "proctor/proctord/config" + "proctor/internal/app/service/infra/config" log "github.com/sirupsen/logrus" ) @@ -18,7 +17,7 @@ func Setup() { log.SetFormatter(&log.JSONFormatter{}) log.SetOutput(os.Stdout) - logLevel, err := log.ParseLevel(config.LogLevel()) + logLevel, err := log.ParseLevel(config.Config().LogLevel) if err != nil { log.Panic(err) } @@ -61,3 +60,11 @@ func Fatal(args ...interface{}) { func Panic(args ...interface{}) { log.Panic(args...) } + +func LogErrors(err error, action string, args ...interface{}) { + if err != nil { + Error("Failed to ", action, " with errors ", err, " and data ", args) + } else { + Debug("Success to ", action, " with data ", args) + } +} diff --git a/internal/app/service/infra/mail/mailer.go b/internal/app/service/infra/mail/mailer.go new file mode 100644 index 00000000..0bee1fb5 --- /dev/null +++ b/internal/app/service/infra/mail/mailer.go @@ -0,0 +1,59 @@ +package mail + +import ( + "bytes" + "fmt" + "net/smtp" + "strings" + + executionContextModel "proctor/internal/app/service/execution/model" + "proctor/internal/app/service/infra/config" + scheduleModel "proctor/internal/app/service/schedule/model" +) + +type Mailer interface { + Send(executionContext executionContextModel.ExecutionContext, schedule scheduleModel.Schedule) error +} + +type mailer struct { + from string + addr string + auth smtp.Auth +} + +func New(mailServerHost, mailServerPort string) Mailer { + auth := smtp.PlainAuth("", config.Config().MailUsername, config.Config().MailPassword, mailServerHost) + addr := mailServerHost + ":" + mailServerPort + + return &mailer{ + from: config.Config().MailUsername, + addr: addr, + auth: auth, + } +} + +func (mailer *mailer) Send(executionContext executionContextModel.ExecutionContext, schedule scheduleModel.Schedule) error { + message := constructMessage(executionContext.JobName, executionContext.ExecutionID, string(executionContext.Status), executionContext.Args) + recipients := strings.Split(schedule.NotificationEmails, ",") + return smtp.SendMail(mailer.addr, mailer.auth, mailer.from, recipients, message) +} + +func constructMessage(jobName string, executionID uint64, executionStatus string, executionArgs map[string]string) []byte { + subject := "Subject: " + jobName + " | scheduled execution " + executionStatus + body := "Proc execution details:\n" + + "\nName:\t" + jobName + + "\nArgs:\t" + MapToString(executionArgs) + + "\nID:\t" + fmt.Sprint(executionID) + + "\nStatus:\t" + executionStatus + + "\n\n\nThis is an auto-generated email" + + return []byte(subject + "\n\n" + body) +} + +func MapToString(someMap map[string]string) string { + b := new(bytes.Buffer) + for key, value := range someMap { + _, _ = fmt.Fprintf(b, "%s=\"%s\",", key, value) + } + return strings.TrimRight(b.String(), ",") +} diff --git a/internal/app/service/infra/mail/mailer_mock.go b/internal/app/service/infra/mail/mailer_mock.go new file mode 100644 index 00000000..572464e0 --- /dev/null +++ b/internal/app/service/infra/mail/mailer_mock.go @@ -0,0 +1,17 @@ +package mail + +import ( + "github.com/stretchr/testify/mock" + + executionContextModel "proctor/internal/app/service/execution/model" + scheduleModel "proctor/internal/app/service/schedule/model" +) + +type MockMailer struct { + mock.Mock +} + +func (m *MockMailer) Send(executionContext executionContextModel.ExecutionContext, schedule scheduleModel.Schedule) error { + args := m.Called(executionContext, schedule) + return args.Error(0) +} diff --git a/proctord/mail/mailer_test.go b/internal/app/service/infra/mail/mailer_test.go similarity index 66% rename from proctord/mail/mailer_test.go rename to internal/app/service/infra/mail/mailer_test.go index bf609647..618a5d5d 100644 --- a/proctord/mail/mailer_test.go +++ b/internal/app/service/infra/mail/mailer_test.go @@ -9,8 +9,10 @@ import ( "strings" "testing" - "proctor/proctord/config" - "proctor/proctord/utility" + executionContextModel "proctor/internal/app/service/execution/model" + executionStatus "proctor/internal/app/service/execution/status" + "proctor/internal/app/service/infra/config" + scheduleModel "proctor/internal/app/service/schedule/model" ) func TestSendMail(t *testing.T) { @@ -74,12 +76,17 @@ func TestSendMail(t *testing.T) { }(strings.Split(server, "\r\n")) mailer := New(strings.Split(l.Addr().String(), ":")[0], strings.Split(l.Addr().String(), ":")[1]) - jobName := "proc-name" - jobExecutionID := "some-id" - jobExecutionStatus := "SUCCEEDED" - jobArgs := map[string]string{"ARG_ONE": "foo"} - recipients := []string{"foo@bar.com", "goo@bar.com"} - err = mailer.Send(jobName, jobExecutionID, jobExecutionStatus, jobArgs, recipients) + executionContext := executionContextModel.ExecutionContext{ + JobName: "proc-name", + ExecutionID: uint64(1), + Status: executionStatus.Finished, + Args: map[string]string{"ARG_ONE": "foo"}, + } + schedule := scheduleModel.Schedule{ + NotificationEmails: "foo@bar.com,goo@bar.com", + } + recipients := strings.Split(schedule.NotificationEmails, ",") + err = mailer.Send(executionContext, schedule) if err != nil { t.Errorf("%v", err) } @@ -90,21 +97,21 @@ func TestSendMail(t *testing.T) { receivedMail := cmdbuf.String() - stringifiedJobArgs := utility.MapToString(jobArgs) + stringifiedJobArgs := MapToString(executionContext.Args) var sendMailClient = `EHLO localhost HELO localhost -MAIL FROM:<` + config.MailUsername() + `> +MAIL FROM:<` + config.Config().MailUsername + `> RCPT TO:<` + recipients[0] + `> RCPT TO:<` + recipients[1] + `> DATA -Subject: ` + jobName + ` | scheduled execution ` + jobExecutionStatus + ` +Subject: ` + executionContext.JobName + ` | scheduled execution ` + string(executionContext.Status) + ` Proc execution details: -Name: ` + jobName + ` +Name: ` + executionContext.JobName + ` Args: ` + stringifiedJobArgs + ` -ID: ` + jobExecutionID + ` -Status: ` + jobExecutionStatus + ` +ID: ` + fmt.Sprint(executionContext.ExecutionID) + ` +Status: ` + string(executionContext.Status) + ` This is an auto-generated email diff --git a/internal/app/service/infra/metrics/prometheus.go b/internal/app/service/infra/metrics/prometheus.go new file mode 100644 index 00000000..35988115 --- /dev/null +++ b/internal/app/service/infra/metrics/prometheus.go @@ -0,0 +1,13 @@ +package metrics + +import ( + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" +) + +var ( + ExecutionCounter = promauto.NewCounter(prometheus.CounterOpts{ + Name: "execution_total", + Help: "The total number of executions", + }) +) diff --git a/internal/app/service/infra/plugin/plugin.go b/internal/app/service/infra/plugin/plugin.go new file mode 100644 index 00000000..33b03603 --- /dev/null +++ b/internal/app/service/infra/plugin/plugin.go @@ -0,0 +1,32 @@ +package plugin + +import ( + "fmt" + "plugin" + "proctor/internal/app/service/infra/logger" +) + +type GoPlugin interface { + Load(pluginBinary string, exportedName string) (plugin.Symbol, error) +} + +type goPlugin struct{} + +func (g *goPlugin) Load(pluginBinary string, exportedName string) (plugin.Symbol, error) { + binary, err := plugin.Open(pluginBinary) + logger.LogErrors(err, "load auth plugin binary from location: ", pluginBinary) + if err != nil { + return nil, fmt.Errorf("failed to load plugin binary from location: %s", pluginBinary) + } + + raw, err := binary.Lookup(exportedName) + logger.LogErrors(err, "Lookup ", pluginBinary, " for ", exportedName) + if err != nil { + return nil, fmt.Errorf("failed to Lookup plugin binary from location: %s with Exported Name: %s", pluginBinary, exportedName) + } + return raw, nil +} + +func NewGoPlugin() GoPlugin { + return &goPlugin{} +} diff --git a/internal/app/service/infra/plugin/plugin_integration_test.go b/internal/app/service/infra/plugin/plugin_integration_test.go new file mode 100644 index 00000000..c2051a54 --- /dev/null +++ b/internal/app/service/infra/plugin/plugin_integration_test.go @@ -0,0 +1,93 @@ +package plugin + +import ( + "fmt" + "os" + "testing" + + "github.com/stretchr/testify/assert" + + "proctor/internal/app/service/infra/config" +) + +type context interface { + setUp(t *testing.T) + tearDown() + instance() *testContext +} + +type testContext struct { + goPlugin GoPlugin +} + +func (context *testContext) setUp(t *testing.T) { + value, available := os.LookupEnv("ENABLE_INTEGRATION_TEST") + if available != true || value != "true" { + t.SkipNow() + } + + context.goPlugin = NewGoPlugin() + assert.NotNil(t, context.goPlugin) +} + +func (context *testContext) tearDown() { +} + +func (context *testContext) instance() *testContext { + return context +} + +func newContext() context { + ctx := &testContext{} + return ctx +} + +func TestGoPlugin_LoadPluginFailed(t *testing.T) { + ctx := newContext() + ctx.setUp(t) + + binary := "non-existing-binary" + raw, err := ctx.instance().goPlugin.Load(binary, config.Config().AuthPluginExported) + assert.EqualError(t, err, fmt.Sprintf("failed to load plugin binary from location: %s", binary)) + assert.Nil(t, raw) +} + +func TestGoPlugin_LoadExportedFailed(t *testing.T) { + ctx := newContext() + ctx.setUp(t) + + exportedName := "non-existing-exported" + raw, err := ctx.instance().goPlugin.Load(config.Config().AuthPluginBinary, exportedName) + assert.EqualError(t, err, fmt.Sprintf("failed to Lookup plugin binary from location: %s with Exported Name: %s", config.Config().AuthPluginBinary, exportedName)) + assert.Nil(t, raw) +} + +func TestGoPlugin_LoadSuccessfully(t *testing.T) { + ctx := newContext() + ctx.setUp(t) + + raw, err := ctx.instance().goPlugin.Load(config.Config().AuthPluginBinary, config.Config().AuthPluginExported) + assert.NoError(t, err) + assert.NotNil(t, raw) +} + +func TestGoPlugin_LoadNotificationSuccessfully(t *testing.T) { + ctx := newContext() + ctx.setUp(t) + + pluginsBinary := config.Config().NotificationPluginBinary + pluginsExported := config.Config().NotificationPluginExported + for idx, pluginBinary := range pluginsBinary { + pluginExported := pluginsExported[idx] + raw, err := ctx.instance().goPlugin.Load(pluginBinary, pluginExported) + assert.NoError(t, err) + assert.NotNil(t, raw) + } +} + +func TestGoPlugin_ShitShit(t *testing.T) { + ctx := newContext() + ctx.setUp(t) + + assert.True(t, true) +} diff --git a/internal/app/service/infra/plugin/plugin_mock.go b/internal/app/service/infra/plugin/plugin_mock.go new file mode 100644 index 00000000..31efb947 --- /dev/null +++ b/internal/app/service/infra/plugin/plugin_mock.go @@ -0,0 +1,15 @@ +package plugin + +import ( + "github.com/stretchr/testify/mock" + "plugin" +) + +type GoPluginMock struct { + mock.Mock +} + +func (g *GoPluginMock) Load(pluginBinary string, exportedName string) (plugin.Symbol, error) { + args := g.Called(pluginBinary, exportedName) + return args.Get(0).(plugin.Symbol), args.Error(1) +} diff --git a/internal/app/service/metadata/handler/http.go b/internal/app/service/metadata/handler/http.go new file mode 100644 index 00000000..3069842a --- /dev/null +++ b/internal/app/service/metadata/handler/http.go @@ -0,0 +1,98 @@ +package handler + +import ( + "encoding/json" + "net/http" + "proctor/internal/app/service/infra/logger" + "proctor/internal/app/service/metadata/repository" + "proctor/internal/app/service/security/middleware" + "proctor/internal/pkg/constant" + modelMetadata "proctor/internal/pkg/model/metadata" + "proctor/pkg/auth" +) + +type metadataHTTPHandler struct { + repository repository.MetadataRepository +} + +type MetadataHTTPHandler interface { + Post() http.HandlerFunc + GetAll() http.HandlerFunc +} + +func NewMetadataHTTPHandler(repository repository.MetadataRepository) MetadataHTTPHandler { + return &metadataHTTPHandler{ + repository: repository, + } +} + +func (handler *metadataHTTPHandler) Post() http.HandlerFunc { + return func(response http.ResponseWriter, request *http.Request) { + var metadata []modelMetadata.Metadata + err := json.NewDecoder(request.Body).Decode(&metadata) + defer request.Body.Close() + if err != nil { + logger.Error("Error parsing request body", err.Error()) + + response.WriteHeader(http.StatusBadRequest) + _, _ = response.Write([]byte(constant.ClientError)) + return + } + + for _, metadata := range metadata { + err = handler.repository.Save(metadata) + if err != nil { + logger.Error("updating metadata to storage, failed", err.Error()) + + response.WriteHeader(http.StatusInternalServerError) + _, _ = response.Write([]byte(constant.ServerError)) + return + } + } + + response.WriteHeader(http.StatusCreated) + } +} + +func (handler *metadataHTTPHandler) GetAll() http.HandlerFunc { + return func(w http.ResponseWriter, req *http.Request) { + var metadataSlice []modelMetadata.Metadata + var err error + authEnabled, ok := req.Context().Value(middleware.ContextAuthEnabled).(bool) + if ok && authEnabled { + userDetailContext := req.Context().Value(middleware.ContextUserDetailKey) + if userDetailContext == nil { + w.WriteHeader(http.StatusUnauthorized) + return + } + + userDetail, ok := userDetailContext.(*auth.UserDetail) + if !ok { + w.WriteHeader(http.StatusUnauthorized) + return + } + + metadataSlice, err = handler.repository.GetAllByGroups(userDetail.Groups) + } else { + metadataSlice, err = handler.repository.GetAll() + } + if err != nil { + logger.Error("Error fetching metadata", err.Error()) + + w.WriteHeader(http.StatusInternalServerError) + _, _ = w.Write([]byte(constant.ServerError)) + return + } + + metadataByte, err := json.Marshal(metadataSlice) + if err != nil { + logger.Error("Error marshalling jobs metadata in json", err.Error()) + + w.WriteHeader(http.StatusInternalServerError) + _, _ = w.Write([]byte(constant.ServerError)) + return + } + + _, _ = w.Write(metadataByte) + } +} diff --git a/internal/app/service/metadata/handler/http_test.go b/internal/app/service/metadata/handler/http_test.go new file mode 100644 index 00000000..5ecc8056 --- /dev/null +++ b/internal/app/service/metadata/handler/http_test.go @@ -0,0 +1,206 @@ +package handler + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "net/http" + "net/http/httptest" + metadataRepository "proctor/internal/app/service/metadata/repository" + "proctor/internal/app/service/security/middleware" + "proctor/pkg/auth" + "testing" + + "proctor/internal/pkg/model/metadata/env" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" + "proctor/internal/pkg/constant" + modelMetadata "proctor/internal/pkg/model/metadata" +) + +type MetadataHandlerTestSuite struct { + suite.Suite + mockRepository *metadataRepository.MockMetadataRepository + metadataHTTPHandler MetadataHTTPHandler + serverError string +} + +func (s *MetadataHandlerTestSuite) SetupTest() { + s.mockRepository = &metadataRepository.MockMetadataRepository{} + + s.metadataHTTPHandler = NewMetadataHTTPHandler(s.mockRepository) + + s.serverError = "Something went wrong" +} + +func (s *MetadataHandlerTestSuite) TestSuccessfulMetadataSubmission() { + t := s.T() + + secrets := []env.VarMetadata{ + { + Name: "SAMPLE_SECRET", + Description: "description of secret", + }, + } + args := []env.VarMetadata{ + { + Name: "SAMPLE_ARG", + Description: "description of arg", + }, + } + envVars := env.Vars{ + Secrets: secrets, + Args: args, + } + metadata := modelMetadata.Metadata{ + Name: "run-sample", + Description: "This is a hello world script", + ImageName: "proctor-jobs-run-sample", + EnvVars: envVars, + AuthorizedGroups: []string{"group_one", "group_two"}, + Author: "Test User", + Contributors: "Test User", + Organization: "Test Org", + } + + jobsMetadata := []modelMetadata.Metadata{metadata} + + metadataSubmissionRequestBody, err := json.Marshal(jobsMetadata) + assert.NoError(t, err) + req := httptest.NewRequest("PUT", "/metadata", bytes.NewReader(metadataSubmissionRequestBody)) + responseRecorder := httptest.NewRecorder() + + s.mockRepository.On("Save", metadata).Return(nil).Once() + + s.metadataHTTPHandler.Post()(responseRecorder, req) + + s.mockRepository.AssertExpectations(t) + + assert.Equal(t, http.StatusCreated, responseRecorder.Code) +} + +func (s *MetadataHandlerTestSuite) TestJobMetadataSubmissionMalformedRequest() { + t := s.T() + + jobMetadataSubmissionRequest := fmt.Sprintf("{ some-malformed-reque") + req := httptest.NewRequest("PUT", "/metadata", bytes.NewReader([]byte(jobMetadataSubmissionRequest))) + responseRecorder := httptest.NewRecorder() + + s.metadataHTTPHandler.Post()(responseRecorder, req) + + s.mockRepository.AssertNotCalled(t, "Save", mock.Anything) + + assert.Equal(t, http.StatusBadRequest, responseRecorder.Code) + assert.Equal(t, constant.ClientError, responseRecorder.Body.String()) +} + +func (s *MetadataHandlerTestSuite) TestJobMetadataSubmissionForStoreFailure() { + t := s.T() + + metadata := modelMetadata.Metadata{} + + jobMetadata := []modelMetadata.Metadata{metadata} + + metadataSubmissionRequestBody, err := json.Marshal(jobMetadata) + assert.NoError(t, err) + req := httptest.NewRequest("PUT", "/metadata", bytes.NewReader(metadataSubmissionRequestBody)) + responseRecorder := httptest.NewRecorder() + + s.mockRepository.On("Save", metadata).Return(errors.New("error")).Once() + + s.metadataHTTPHandler.Post()(responseRecorder, req) + + s.mockRepository.AssertExpectations(t) + + assert.Equal(t, http.StatusInternalServerError, responseRecorder.Code) + assert.Equal(t, constant.ServerError, responseRecorder.Body.String()) +} + +func (s *MetadataHandlerTestSuite) TestHandleBulkDisplay() { + t := s.T() + + req := httptest.NewRequest("GET", "/metadata", bytes.NewReader([]byte{})) + groups := []string{"admin", "migratior"} + userDetail := &auth.UserDetail{ + Name: "jasoet", + Email: "jasoet@ambyar.com", + Active: true, + Groups: groups, + } + + ctx := context.WithValue(req.Context(), middleware.ContextUserDetailKey, userDetail) + ctx = context.WithValue(ctx, middleware.ContextAuthEnabled, true) + req = req.WithContext(ctx) + responseRecorder := httptest.NewRecorder() + + var jobsMetadata []modelMetadata.Metadata + s.mockRepository.On("GetAllByGroups", groups).Return(jobsMetadata, nil).Once() + + s.metadataHTTPHandler.GetAll()(responseRecorder, req) + + s.mockRepository.AssertExpectations(t) + + assert.Equal(t, http.StatusOK, responseRecorder.Code) + + expectedJobDetails, err := json.Marshal(jobsMetadata) + assert.NoError(t, err) + assert.Equal(t, expectedJobDetails, responseRecorder.Body.Bytes()) +} + +func (s *MetadataHandlerTestSuite) TestHandleBulkDisplayWithoutAuth() { + t := s.T() + + req := httptest.NewRequest("GET", "/metadata", bytes.NewReader([]byte{})) + ctx := context.WithValue(req.Context(), middleware.ContextAuthEnabled, false) + req = req.WithContext(ctx) + responseRecorder := httptest.NewRecorder() + + var jobsMetadata []modelMetadata.Metadata + s.mockRepository.On("GetAll").Return(jobsMetadata, nil).Once() + + s.metadataHTTPHandler.GetAll()(responseRecorder, req) + + s.mockRepository.AssertExpectations(t) + + assert.Equal(t, http.StatusOK, responseRecorder.Code) + + expectedJobDetails, err := json.Marshal(jobsMetadata) + assert.NoError(t, err) + assert.Equal(t, expectedJobDetails, responseRecorder.Body.Bytes()) +} + +func (s *MetadataHandlerTestSuite) TestHandleBulkDisplayStoreFailure() { + t := s.T() + + req := httptest.NewRequest("GET", "/metadata", bytes.NewReader([]byte{})) + groups := []string{"admin", "migratior"} + userDetail := &auth.UserDetail{ + Name: "jasoet", + Email: "jasoet@ambyar.com", + Active: true, + Groups: groups, + } + + ctx := context.WithValue(req.Context(), middleware.ContextUserDetailKey, userDetail) + ctx = context.WithValue(ctx, middleware.ContextAuthEnabled, true) + req = req.WithContext(ctx) + responseRecorder := httptest.NewRecorder() + + jobsMetadata := []modelMetadata.Metadata{} + s.mockRepository.On("GetAllByGroups", groups).Return(jobsMetadata, errors.New("error")).Once() + + s.metadataHTTPHandler.GetAll()(responseRecorder, req) + + s.mockRepository.AssertExpectations(t) + + assert.Equal(t, http.StatusInternalServerError, responseRecorder.Code) + assert.Equal(t, constant.ServerError, responseRecorder.Body.String()) +} + +func TestMetadataHandlerTestSuite(t *testing.T) { + suite.Run(t, new(MetadataHandlerTestSuite)) +} diff --git a/internal/app/service/metadata/repository/metadata.go b/internal/app/service/metadata/repository/metadata.go new file mode 100644 index 00000000..2cca83ee --- /dev/null +++ b/internal/app/service/metadata/repository/metadata.go @@ -0,0 +1,134 @@ +package repository + +import ( + "encoding/json" + "proctor/internal/app/service/infra/db/redis" + "proctor/internal/pkg/model/metadata" +) + +const KeySuffix = "-metadata" + +type MetadataRepository interface { + Save(metadata metadata.Metadata) error + GetAll() ([]metadata.Metadata, error) + GetAllByGroups(groups []string) ([]metadata.Metadata, error) + GetByName(name string) (*metadata.Metadata, error) +} + +type metadataRepository struct { + redisClient redis.Client +} + +func applySuffix(name string) string { + return name + KeySuffix +} + +func NewMetadataRepository(client redis.Client) MetadataRepository { + return &metadataRepository{ + redisClient: client, + } +} + +func (repository *metadataRepository) Save(metadata metadata.Metadata) error { + key := applySuffix(metadata.Name) + + jsonMetadata, err := json.Marshal(metadata) + if err != nil { + return err + } + + return repository.redisClient.SET(key, jsonMetadata) +} + +func (repository *metadataRepository) GetAll() ([]metadata.Metadata, error) { + searchKey := "*" + KeySuffix + + keys, err := repository.redisClient.KEYS(searchKey) + if err != nil { + return nil, err + } + + availableKeys := make([]interface{}, len(keys)) + for i := range keys { + availableKeys[i] = keys[i] + } + + values, err := repository.redisClient.MGET(availableKeys...) + if err != nil { + return nil, err + } + + metadataSlice := make([]metadata.Metadata, len(values)) + for i := range values { + err = json.Unmarshal(values[i], &metadataSlice[i]) + if err != nil { + return nil, err + } + } + + return metadataSlice, nil +} + +func (repository *metadataRepository) GetAllByGroups(groups []string) ([]metadata.Metadata, error) { + searchKey := "*" + KeySuffix + + keys, err := repository.redisClient.KEYS(searchKey) + if err != nil { + return nil, err + } + + availableKeys := make([]interface{}, len(keys)) + for i := range keys { + availableKeys[i] = keys[i] + } + + values, err := repository.redisClient.MGET(availableKeys...) + if err != nil { + return nil, err + } + + metadataSlice := make([]metadata.Metadata, len(values)) + for i := range values { + err = json.Unmarshal(values[i], &metadataSlice[i]) + if err != nil { + return nil, err + } + } + + var filteredMetadata []metadata.Metadata + for _, meta := range metadataSlice { + if len(meta.AuthorizedGroups) == 0 { + filteredMetadata = append(filteredMetadata, meta) + } else if duplicateItemExists(meta.AuthorizedGroups, groups) { + filteredMetadata = append(filteredMetadata, meta) + } + } + + return filteredMetadata, nil +} + +func duplicateItemExists(first []string, second []string) bool { + for _, firstString := range first { + for _, secondString := range second { + if firstString == secondString { + return true + } + } + } + return false +} + +func (repository *metadataRepository) GetByName(name string) (*metadata.Metadata, error) { + binaryMetadata, err := repository.redisClient.GET(applySuffix(name)) + if err != nil { + return nil, err + } + + var jobMetadata metadata.Metadata + err = json.Unmarshal(binaryMetadata, &jobMetadata) + if err != nil { + return nil, err + } + + return &jobMetadata, nil +} diff --git a/internal/app/service/metadata/repository/metadata_mock.go b/internal/app/service/metadata/repository/metadata_mock.go new file mode 100644 index 00000000..65252887 --- /dev/null +++ b/internal/app/service/metadata/repository/metadata_mock.go @@ -0,0 +1,30 @@ +package repository + +import ( + "github.com/stretchr/testify/mock" + modelMetadata "proctor/internal/pkg/model/metadata" +) + +type MockMetadataRepository struct { + mock.Mock +} + +func (m *MockMetadataRepository) Save(metadata modelMetadata.Metadata) error { + args := m.Called(metadata) + return args.Error(0) +} + +func (m *MockMetadataRepository) GetAll() ([]modelMetadata.Metadata, error) { + args := m.Called() + return args.Get(0).([]modelMetadata.Metadata), args.Error(1) +} + +func (m *MockMetadataRepository) GetAllByGroups(group []string) ([]modelMetadata.Metadata, error) { + args := m.Called(group) + return args.Get(0).([]modelMetadata.Metadata), args.Error(1) +} + +func (m *MockMetadataRepository) GetByName(name string) (*modelMetadata.Metadata, error) { + args := m.Called(name) + return args.Get(0).(*modelMetadata.Metadata), args.Error(1) +} diff --git a/internal/app/service/metadata/repository/metadata_test.go b/internal/app/service/metadata/repository/metadata_test.go new file mode 100644 index 00000000..bc0fdab9 --- /dev/null +++ b/internal/app/service/metadata/repository/metadata_test.go @@ -0,0 +1,237 @@ +package repository + +import ( + "encoding/json" + "errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" + "proctor/internal/app/service/infra/db/redis" + modelMetadata "proctor/internal/pkg/model/metadata" + "testing" +) + +type MetadataRepositoryTestSuite struct { + suite.Suite + mockRedisClient *redis.MockClient + testMetadataStore MetadataRepository +} + +func (s *MetadataRepositoryTestSuite) SetupTest() { + s.mockRedisClient = &redis.MockClient{} + + s.testMetadataStore = NewMetadataRepository(s.mockRedisClient) +} + +func (s *MetadataRepositoryTestSuite) TestSave() { + t := s.T() + + metadata := modelMetadata.Metadata{ + Name: "any-name", + ImageName: "any-image-name", + Description: "any-description", + Author: "Test User", + Contributors: "Test User, Test Admin", + Organization: "Test Org", + } + + jsonData, err := json.Marshal(metadata) + assert.NoError(t, err) + + s.mockRedisClient.On("SET", "any-name-metadata", jsonData).Return(nil).Once() + + err = s.testMetadataStore.Save(metadata) + assert.NoError(t, err) + s.mockRedisClient.AssertExpectations(t) +} + +func (s *MetadataRepositoryTestSuite) TestSaveFailure() { + t := s.T() + + metadata := modelMetadata.Metadata{} + + expectedError := errors.New("any-error") + s.mockRedisClient.On("SET", mock.Anything, mock.Anything).Return(expectedError).Once() + + err := s.testMetadataStore.Save(metadata) + assert.EqualError(t, err, "any-error") + s.mockRedisClient.AssertExpectations(t) +} + +func (s *MetadataRepositoryTestSuite) TestGetAll() { + t := s.T() + + metadata1 := modelMetadata.Metadata{ + Name: "job1", + ImageName: "job1-image-name", + Description: "desc1", + Author: "Test User", - Contributors: "Test User", - Organization: "Test Org", - } - - jobsMetadata := []Metadata{metadata} - - metadataSubmissionRequestBody, err := json.Marshal(jobsMetadata) - assert.NoError(t, err) - req := httptest.NewRequest("PUT", "/jobs/metadata", bytes.NewReader(metadataSubmissionRequestBody)) - responseRecorder := httptest.NewRecorder() - - s.mockStore.On("CreateOrUpdateJobMetadata", metadata).Return(nil).Once() - - s.testMetadataHandler.HandleSubmission()(responseRecorder, req) - - s.mockStore.AssertExpectations(t) - - assert.Equal(t, http.StatusCreated, responseRecorder.Code) -} - -func (s *MetadataHandlerTestSuite) TestJobMetadataSubmissionMalformedRequest() { - t := s.T() - - jobMetadataSubmissionRequest := fmt.Sprintf("{ some-malformed-reque") - req := httptest.NewRequest("PUT", "/jobs/metadata", bytes.NewReader([]byte(jobMetadataSubmissionRequest))) - responseRecorder := httptest.NewRecorder() - - s.testMetadataHandler.HandleSubmission()(responseRecorder, req) - - s.mockStore.AssertNotCalled(t, "CreateOrUpdateJobMetadata", mock.Anything) - - assert.Equal(t, http.StatusBadRequest, responseRecorder.Code) - assert.Equal(t, utility.ClientError, responseRecorder.Body.String()) -} - -func (s *MetadataHandlerTestSuite) TestJobMetadataSubmissionForStoreFailure() { - t := s.T() - - metadata := Metadata{} - - jobMetadata := []Metadata{metadata} - - metadataSubmissionRequestBody, err := json.Marshal(jobMetadata) - assert.NoError(t, err) - req := httptest.NewRequest("PUT", "/jobs/metadata", bytes.NewReader(metadataSubmissionRequestBody)) - responseRecorder := httptest.NewRecorder() - - s.mockStore.On("CreateOrUpdateJobMetadata", metadata).Return(errors.New("error")).Once() - - s.testMetadataHandler.HandleSubmission()(responseRecorder, req) - - s.mockStore.AssertExpectations(t) - - assert.Equal(t, http.StatusInternalServerError, responseRecorder.Code) - assert.Equal(t, utility.ServerError, responseRecorder.Body.String()) -} - -func (s *MetadataHandlerTestSuite) TestHandleBulkDisplay() { - t := s.T() - - req := httptest.NewRequest("GET", "/jobs/metadata", bytes.NewReader([]byte{})) - responseRecorder := httptest.NewRecorder() - - jobsMetadata := []Metadata{} - s.mockStore.On("GetAllJobsMetadata").Return(jobsMetadata, nil).Once() - - s.testMetadataHandler.HandleBulkDisplay()(responseRecorder, req) - - s.mockStore.AssertExpectations(t) - - assert.Equal(t, http.StatusOK, responseRecorder.Code) - - expectedJobDetails, err := json.Marshal(jobsMetadata) - assert.NoError(t, err) - assert.Equal(t, expectedJobDetails, responseRecorder.Body.Bytes()) -} - -func (s *MetadataHandlerTestSuite) TestHandleBulkDisplayStoreFailure() { - t := s.T() - - req := httptest.NewRequest("GET", "/jobs/metadata", bytes.NewReader([]byte{})) - responseRecorder := httptest.NewRecorder() - - jobsMetadata := []Metadata{} - s.mockStore.On("GetAllJobsMetadata").Return(jobsMetadata, errors.New("error")).Once() - - s.testMetadataHandler.HandleBulkDisplay()(responseRecorder, req) - - s.mockStore.AssertExpectations(t) - - assert.Equal(t, http.StatusInternalServerError, responseRecorder.Code) - assert.Equal(t, utility.ServerError, responseRecorder.Body.String()) -} - -func TestMetadataHandlerTestSuite(t *testing.T) { - suite.Run(t, new(MetadataHandlerTestSuite)) -} diff --git a/proctord/jobs/metadata/store.go b/proctord/jobs/metadata/store.go deleted file mode 100644 index 9aef873d..00000000 --- a/proctord/jobs/metadata/store.go +++ /dev/null @@ -1,83 +0,0 @@ -package metadata - -import ( - "encoding/json" - - "proctor/proctord/redis" -) - -const JobNameKeySuffix = "-metadata" - -type Store interface { - CreateOrUpdateJobMetadata(metadata Metadata) error - GetAllJobsMetadata() ([]Metadata, error) - GetJobMetadata(jobName string) (*Metadata, error) -} - -type store struct { - redisClient redis.Client -} - -func NewStore(redisClient redis.Client) Store { - return &store{ - redisClient: redisClient, - } -} - -func jobMetadataKey(jobName string) string { - return jobName + JobNameKeySuffix -} - -func (store *store) CreateOrUpdateJobMetadata(metadata Metadata) error { - jobNameKey := jobMetadataKey(metadata.Name) - - binaryJobMetadata, err := json.Marshal(metadata) - if err != nil { - return err - } - - return store.redisClient.SET(jobNameKey, binaryJobMetadata) -} - -func (store *store) GetAllJobsMetadata() ([]Metadata, error) { - jobNameKeyRegex := "*" + JobNameKeySuffix - - keys, err := store.redisClient.KEYS(jobNameKeyRegex) - if err != nil { - return nil, err - } - - jobKeys := make([]interface{}, len(keys)) - for i := range keys { - jobKeys[i] = keys[i] - } - values, err := store.redisClient.MGET(jobKeys...) - if err != nil { - return nil, err - } - - jobsMetadata := make([]Metadata, len(values)) - for i := range values { - err = json.Unmarshal(values[i], &jobsMetadata[i]) - if err != nil { - return nil, err - } - } - - return jobsMetadata, nil -} - -func (store *store) GetJobMetadata(jobName string) (*Metadata, error) { - binaryJobMetadata, err := store.redisClient.GET(jobMetadataKey(jobName)) - if err != nil { - return nil, err - } - - var jobMetadata Metadata - err = json.Unmarshal(binaryJobMetadata, &jobMetadata) - if err != nil { - return nil, err - } - - return &jobMetadata, nil -} diff --git a/proctord/jobs/metadata/store_mock.go b/proctord/jobs/metadata/store_mock.go deleted file mode 100644 index 74dbcadb..00000000 --- a/proctord/jobs/metadata/store_mock.go +++ /dev/null @@ -1,24 +0,0 @@ -package metadata - -import ( - "github.com/stretchr/testify/mock" -) - -type MockStore struct { - mock.Mock -} - -func (m *MockStore) CreateOrUpdateJobMetadata(metadata Metadata) error { - args := m.Called(metadata) - return args.Error(0) -} - -func (m *MockStore) GetAllJobsMetadata() ([]Metadata, error) { - args := m.Called() - return args.Get(0).([]Metadata), args.Error(1) -} - -func (m *MockStore) GetJobMetadata(jobName string) (*Metadata, error) { - args := m.Called(jobName) - return args.Get(0).(*Metadata), args.Error(1) -} diff --git a/proctord/jobs/metadata/store_test.go b/proctord/jobs/metadata/store_test.go deleted file mode 100644 index af9fd973..00000000 --- a/proctord/jobs/metadata/store_test.go +++ /dev/null @@ -1,158 +0,0 @@ -package metadata - -import ( - "encoding/json" - "errors" - "testing" - - "proctor/proctord/redis" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/suite" -) - -type MetadataStoreTestSuite struct { - suite.Suite - mockRedisClient *redis.MockClient - testMetadataStore Store -} - -func (s *MetadataStoreTestSuite) SetupTest() { - s.mockRedisClient = &redis.MockClient{} - - s.testMetadataStore = NewStore(s.mockRedisClient) -} - -func (s *MetadataStoreTestSuite) TestCreateOrUpdateJobMetadata() { - t := s.T() - - metadata := Metadata{ - Name: "any-name", - ImageName: "any-image-name", - Description: "any-description", - Author: "Test User", - Contributors: "Test User, Test Admin", - Organization: "Test Org", - } - - binaryJobMetadata, err := json.Marshal(metadata) - assert.NoError(t, err) - - s.mockRedisClient.On("SET", "any-name-metadata", binaryJobMetadata).Return(nil).Once() - - err = s.testMetadataStore.CreateOrUpdateJobMetadata(metadata) - assert.NoError(t, err) - s.mockRedisClient.AssertExpectations(t) -} - -func (s *MetadataStoreTestSuite) TestCreateOrUpdateJobMetadataForRedisClientFailure() { - t := s.T() - - metadata := Metadata{} - - expectedError := errors.New("any-error") - s.mockRedisClient.On("SET", mock.Anything, mock.Anything).Return(expectedError).Once() - - err := s.testMetadataStore.CreateOrUpdateJobMetadata(metadata) - assert.EqualError(t, err, "any-error") - s.mockRedisClient.AssertExpectations(t) -} - -func (s *MetadataStoreTestSuite) TestGetAllJobsMetadata() { - t := s.T() - - metadata1 := Metadata{ - Name: "job1", - ImageName: "job1-image-name", - Description: "desc1", - Author: "Test User 0 { - podJob := listOfPods.Items[0] - if podJob.Status.Phase == v1.PodRunning || podJob.Status.Phase == v1.PodSucceeded || podJob.Status.Phase == v1.PodFailed { - return client.getLogsStreamReaderFor(podJob.ObjectMeta.Name) - } - watchPod, err := kubernetesPods.Watch(listOptions) - if err != nil { - return nil, fmt.Errorf("Error watching kubernetes Pods %v", err) - } - - resultChan := watchPod.ResultChan() - defer watchPod.Stop() - - waitingForKubePods := make(chan bool) - go func() { - defer close(waitingForKubePods) - time.Sleep(time.Duration(config.KubePodsListWaitTime()) * time.Second) - waitingForKubePods <- true - }() - - select { - case <-resultChan: - continue - case <-waitingForKubePods: - return nil, fmt.Errorf("Pod didn't reach active state after waiting for %d minutes", config.KubePodsListWaitTime()) - } - - } else { - batchV1 := client.clientSet.BatchV1() - kubernetesJobs := batchV1.Jobs(namespace) - - watchJob, err := kubernetesJobs.Watch(listOptions) - if err != nil { - return nil, fmt.Errorf("Error watching kubernetes Jobs %v", err) - } - - resultChan := watchJob.ResultChan() - defer watchJob.Stop() - - waitingForKubeJobs := make(chan bool) - go func() { - defer close(waitingForKubeJobs) - time.Sleep(time.Duration(config.KubePodsListWaitTime()) * time.Second) - waitingForKubeJobs <- true - }() - - select { - case <-resultChan: - continue - case <-waitingForKubeJobs: - return nil, fmt.Errorf("Couldn't find a pod for job's given list options %v after waiting for %d minutes", listOptions, config.KubePodsListWaitTime()) - } - } - } -} - -func (client *client) JobExecutionStatus(jobExecutionID string) (string, error) { - batchV1 := client.clientSet.BatchV1() - kubernetesJobs := batchV1.Jobs(namespace) - listOptions := meta_v1.ListOptions{ - TypeMeta: typeMeta, - LabelSelector: jobLabelSelector(jobExecutionID), - } - - watchJob, err := kubernetesJobs.Watch(listOptions) - if err != nil { - return utility.JobFailed, err - } - - resultChan := watchJob.ResultChan() - defer watchJob.Stop() - var event watch.Event - var jobEvent *batch_v1.Job - - for event = range resultChan { - if event.Type == watch.Error { - return utility.JobExecutionStatusFetchError, nil - } - - jobEvent = event.Object.(*batch_v1.Job) - if jobEvent.Status.Succeeded >= int32(1) { - return utility.JobSucceeded, nil - } else if jobEvent.Status.Failed >= int32(1) { - return utility.JobFailed, nil - } - } - - return utility.NoDefinitiveJobExecutionStatusFound, nil -} - -func (client *client) getLogsStreamReaderFor(podName string) (io.ReadCloser, error) { - logger.Debug("reading pod logs for: ", podName) - - req, err := http.NewRequest("GET", "https://"+config.KubeClusterHostName()+"/api/v1/namespaces/"+namespace+"/pods/"+podName+"/log?follow=true", nil) - if err != nil { - return nil, err - } - req.Header.Set("Authorization", "Basic "+config.KubeBasicAuthEncoded()) - resp, err := client.httpClient.Do(req) - if err != nil { - return nil, err - } - return resp.Body, err -} diff --git a/proctord/kubernetes/client_mock.go b/proctord/kubernetes/client_mock.go deleted file mode 100644 index 3851fe94..00000000 --- a/proctord/kubernetes/client_mock.go +++ /dev/null @@ -1,27 +0,0 @@ -package kubernetes - -import ( - "io" - - "proctor/proctord/utility" - "github.com/stretchr/testify/mock" -) - -type MockClient struct { - mock.Mock -} - -func (m *MockClient) ExecuteJob(jobName string, envMap map[string]string) (string, error) { - args := m.Called(jobName, envMap) - return args.String(0), args.Error(1) -} - -func (m *MockClient) StreamJobLogs(jobName string) (io.ReadCloser, error) { - args := m.Called(jobName) - return args.Get(0).(*utility.Buffer), args.Error(1) -} - -func (m *MockClient) JobExecutionStatus(jobExecutionID string) (string, error) { - args := m.Called(jobExecutionID) - return args.String(0), args.Error(1) -} diff --git a/proctord/kubernetes/client_test.go b/proctord/kubernetes/client_test.go deleted file mode 100644 index 8389a2cb..00000000 --- a/proctord/kubernetes/client_test.go +++ /dev/null @@ -1,276 +0,0 @@ -package kubernetes - -import ( - "bufio" - "net/http" - "os" - "testing" - "time" - - "proctor/proctord/config" - "proctor/proctord/utility" - "github.com/jarcoal/httpmock" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/suite" - "k8s.io/api/core/v1" - meta_v1 "k8s.io/apimachinery/pkg/apis/meta/v1" - "k8s.io/apimachinery/pkg/watch" - batch_v1 "k8s.io/client-go/kubernetes/typed/batch/v1" - - batchV1 "k8s.io/api/batch/v1" - fakeclientset "k8s.io/client-go/kubernetes/fake" - testing_kubernetes "k8s.io/client-go/testing" -) - -type ClientTestSuite struct { - suite.Suite - testClient Client - testKubernetesJobs batch_v1.JobInterface - fakeClientSet *fakeclientset.Clientset - jobName string - podName string - fakeClientSetStreaming *fakeclientset.Clientset - fakeHttpClient *http.Client - testClientStreaming Client -} - -func (suite *ClientTestSuite) SetupTest() { - suite.fakeClientSet = fakeclientset.NewSimpleClientset() - suite.testClient = &client{ - clientSet: suite.fakeClientSet, - } - suite.jobName = "job1" - suite.podName = "pod1" - namespace := config.DefaultNamespace() - suite.fakeClientSetStreaming = fakeclientset.NewSimpleClientset(&v1.Pod{ - TypeMeta: meta_v1.TypeMeta{ - Kind: "Pod", - APIVersion: "v1", - }, - ObjectMeta: meta_v1.ObjectMeta{ - Name: suite.podName, - Namespace: namespace, - Labels: map[string]string{ - "tag": "", - "job": suite.jobName, - }, - }, - Status: v1.PodStatus{ - Phase: v1.PodSucceeded, - }, - }) - - suite.fakeHttpClient = &http.Client{} - suite.testClientStreaming = &client{ - clientSet: suite.fakeClientSetStreaming, - httpClient: suite.fakeHttpClient, - } -} - -func (suite *ClientTestSuite) TestJobExecution() { - t := suite.T() - os.Setenv("PROCTOR_JOB_POD_ANNOTATIONS", "{\"key.one\":\"true\"}") - envVarsForContainer := map[string]string{"SAMPLE_ARG": "samle-value"} - sampleImageName := "img1" - - executedJobname, err := suite.testClient.ExecuteJob(sampleImageName, envVarsForContainer) - assert.NoError(t, err) - - typeMeta := meta_v1.TypeMeta{ - Kind: "Job", - APIVersion: "batch/v1", - } - - listOptions := meta_v1.ListOptions{ - TypeMeta: typeMeta, - LabelSelector: jobLabelSelector(executedJobname), - } - namespace := config.DefaultNamespace() - listOfJobs, err := suite.fakeClientSet.BatchV1().Jobs(namespace).List(listOptions) - assert.NoError(t, err) - executedJob := listOfJobs.Items[0] - - assert.Equal(t, typeMeta, executedJob.TypeMeta) - - assert.Equal(t, executedJobname, executedJob.ObjectMeta.Name) - assert.Equal(t, executedJobname, executedJob.Spec.Template.ObjectMeta.Name) - - expectedLabel := jobLabel(executedJobname) - assert.Equal(t, expectedLabel, executedJob.ObjectMeta.Labels) - assert.Equal(t, expectedLabel, executedJob.Spec.Template.ObjectMeta.Labels) - assert.Equal(t, map[string]string{"key.one":"true"}, executedJob.Spec.Template.Annotations) - - assert.Equal(t, config.KubeJobActiveDeadlineSeconds(), executedJob.Spec.ActiveDeadlineSeconds) - assert.Equal(t, config.KubeJobRetries(), executedJob.Spec.BackoffLimit) - - assert.Equal(t, v1.RestartPolicyNever, executedJob.Spec.Template.Spec.RestartPolicy) - - container := executedJob.Spec.Template.Spec.Containers[0] - assert.Equal(t, executedJobname, container.Name) - - assert.Equal(t, sampleImageName, container.Image) - - expectedEnvVars := getEnvVars(envVarsForContainer) - assert.Equal(t, expectedEnvVars, container.Env) -} - -func (suite *ClientTestSuite) TestStreamLogsSuccess() { - t := suite.T() - - httpmock.ActivateNonDefault(suite.fakeHttpClient) - defer httpmock.DeactivateAndReset() - - namespace := config.DefaultNamespace() - httpmock.RegisterResponder("GET", "https://"+config.KubeClusterHostName()+"/api/v1/namespaces/"+namespace+"/pods/"+suite.podName+"/log?follow=true", - httpmock.NewStringResponder(200, "logs are streaming")) - - logStream, err := suite.testClientStreaming.StreamJobLogs(suite.jobName) - assert.NoError(t, err) - - defer logStream.Close() - - bufioReader := bufio.NewReader(logStream) - - jobLogSingleLine, _, err := bufioReader.ReadLine() - assert.NoError(t, err) - - assert.Equal(t, "logs are streaming", string(jobLogSingleLine[:])) - -} - -func (suite *ClientTestSuite) TestStreamLogsPodNotFoundFailure() { - t := suite.T() - - _, err := suite.testClientStreaming.StreamJobLogs("unknown-job") - assert.Error(t, err) -} - -func (suite *ClientTestSuite) TestShouldReturnSuccessJobExecutionStatus() { - t := suite.T() - - watcher := watch.NewFake() - suite.fakeClientSet.PrependWatchReactor("jobs", testing_kubernetes.DefaultWatchReactor(watcher, nil)) - - var activeJob batchV1.Job - var succeededJob batchV1.Job - uniqueJobName := "proctor-job-2" - label := jobLabel(uniqueJobName) - objectMeta := meta_v1.ObjectMeta{ - Name: uniqueJobName, - Labels: label, - } - activeJob.ObjectMeta = objectMeta - succeededJob.ObjectMeta = objectMeta - - go func() { - activeJob.Status.Active = 1 - watcher.Modify(&activeJob) - - succeededJob.Status.Active = 0 - succeededJob.Status.Succeeded = 1 - watcher.Modify(&succeededJob) - - time.Sleep(time.Second * 1) - watcher.Stop() - }() - - jobExecutionStatus, err := suite.testClient.JobExecutionStatus(uniqueJobName) - assert.NoError(t, err) - - assert.Equal(t, utility.JobSucceeded, jobExecutionStatus, "Should return SUCCEEDED") -} - -func (suite *ClientTestSuite) TestShouldReturnFailedJobExecutionStatus() { - t := suite.T() - - watcher := watch.NewFake() - suite.fakeClientSet.PrependWatchReactor("jobs", testing_kubernetes.DefaultWatchReactor(watcher, nil)) - - var activeJob batchV1.Job - var failedJob batchV1.Job - uniqueJobName := "proctor-job-1" - label := jobLabel(uniqueJobName) - objectMeta := meta_v1.ObjectMeta{ - Name: uniqueJobName, - Labels: label, - } - activeJob.ObjectMeta = objectMeta - failedJob.ObjectMeta = objectMeta - - go func() { - activeJob.Status.Active = 1 - watcher.Modify(&activeJob) - failedJob.Status.Active = 0 - failedJob.Status.Failed = 1 - watcher.Modify(&failedJob) - - time.Sleep(time.Second * 1) - watcher.Stop() - }() - - jobExecutionStatus, err := suite.testClient.JobExecutionStatus(uniqueJobName) - assert.NoError(t, err) - - assert.Equal(t, utility.JobFailed, jobExecutionStatus, "Should return FAILED") -} - -func (suite *ClientTestSuite) TestJobExecutionStatusForNonDefinitiveStatus() { - t := suite.T() - - watcher := watch.NewFake() - suite.fakeClientSet.PrependWatchReactor("jobs", testing_kubernetes.DefaultWatchReactor(watcher, nil)) - - var testJob batchV1.Job - uniqueJobName := "proctor-job-1" - label := jobLabel(uniqueJobName) - objectMeta := meta_v1.ObjectMeta{ - Name: uniqueJobName, - Labels: label, - } - testJob.ObjectMeta = objectMeta - - go func() { - testJob.Status.Active = 1 - watcher.Modify(&testJob) - - time.Sleep(time.Second * 1) - watcher.Stop() - }() - - jobExecutionStatus, err := suite.testClient.JobExecutionStatus(uniqueJobName) - assert.NoError(t, err) - - assert.Equal(t, utility.NoDefinitiveJobExecutionStatusFound, jobExecutionStatus, "Should return NO_DEFINITIVE_JOB_EXECUTION_STATUS_FOUND") -} - -func (suite *ClientTestSuite) TestShouldReturnJobExecutionStatusFetchError() { - t := suite.T() - - watcher := watch.NewFake() - suite.fakeClientSet.PrependWatchReactor("jobs", testing_kubernetes.DefaultWatchReactor(watcher, nil)) - - var testJob batchV1.Job - uniqueJobName := "proctor-job-3" - label := jobLabel(uniqueJobName) - objectMeta := meta_v1.ObjectMeta{ - Name: uniqueJobName, - Labels: label, - } - testJob.ObjectMeta = objectMeta - - go func() { - watcher.Error(&testJob) - - time.Sleep(time.Second * 1) - watcher.Stop() - }() - - jobExecutionStatus, err := suite.testClient.JobExecutionStatus(uniqueJobName) - assert.NoError(t, err) - - assert.Equal(t, utility.JobExecutionStatusFetchError, jobExecutionStatus, "Should return JOB_EXECUTION_STATUS_FETCH_ERROR") -} - -func TestClientTestSuite(t *testing.T) { - suite.Run(t, new(ClientTestSuite)) -} diff --git a/proctord/kubernetes/utils.go b/proctord/kubernetes/utils.go deleted file mode 100644 index 3957c5f4..00000000 --- a/proctord/kubernetes/utils.go +++ /dev/null @@ -1,25 +0,0 @@ -package kubernetes - -import ( - "flag" - "os" - "path/filepath" - - "proctor/proctord/config" - "proctor/proctord/logger" -) - -func KubeConfig() string { - if config.KubeConfig() == "out-of-cluster" { - logger.Info("service is running outside kube cluster") - home := os.Getenv("HOME") - - kubeConfig := flag.String("kubeconfig", filepath.Join(home, ".kube", "config"), "(optional) absolute path to the kubeconfig file") - flag.Parse() - - return *kubeConfig - } - logger.Info("Assuming service is running inside kube cluster") - logger.Info("Kube config provided is:", config.KubeConfig()) - return "" -} diff --git a/proctord/mail/mailer.go b/proctord/mail/mailer.go deleted file mode 100644 index ed913b7b..00000000 --- a/proctord/mail/mailer.go +++ /dev/null @@ -1,46 +0,0 @@ -package mail - -import ( - "net/smtp" - - "proctor/proctord/config" - "proctor/proctord/utility" -) - -type Mailer interface { - Send(string, string, string, map[string]string, []string) error -} - -type mailer struct { - from string - addr string - auth smtp.Auth -} - -func New(mailServerHost, mailServerPort string) Mailer { - auth := smtp.PlainAuth("", config.MailUsername(), config.MailPassword(), mailServerHost) - addr := mailServerHost + ":" + mailServerPort - - return &mailer{ - from: config.MailUsername(), - addr: addr, - auth: auth, - } -} - -func (mailer *mailer) Send(jobName, jobExecutionID, jobExecutionStatus string, jobArgs map[string]string, recipients []string) error { - message := constructMessage(jobName, jobExecutionID, jobExecutionStatus, jobArgs) - return smtp.SendMail(mailer.addr, mailer.auth, mailer.from, recipients, message) -} - -func constructMessage(jobName, jobExecutionID, jobExecutionStatus string, jobArgs map[string]string) []byte { - subject := "Subject: " + jobName + " | scheduled execution " + jobExecutionStatus - body := "Proc execution details:\n" + - "\nName:\t" + jobName + - "\nArgs:\t" + utility.MapToString(jobArgs) + - "\nID:\t" + jobExecutionID + - "\nStatus:\t" + jobExecutionStatus + - "\n\n\nThis is an auto-generated email" - - return []byte(subject + "\n\n" + body) -} diff --git a/proctord/mail/mailer_mock.go b/proctord/mail/mailer_mock.go deleted file mode 100644 index 013c91e6..00000000 --- a/proctord/mail/mailer_mock.go +++ /dev/null @@ -1,14 +0,0 @@ -package mail - -import ( - "github.com/stretchr/testify/mock" -) - -type MockMailer struct { - mock.Mock -} - -func (m *MockMailer) Send(jobName, jobExecutionID, jobExecutionStatus string, jobArgs map[string]string, recipients []string) error { - args := m.Called(jobName, jobExecutionID, jobExecutionStatus, jobArgs, recipients) - return args.Error(0) -} diff --git a/proctord/middleware/validate_client_version.go b/proctord/middleware/validate_client_version.go deleted file mode 100644 index 742941c3..00000000 --- a/proctord/middleware/validate_client_version.go +++ /dev/null @@ -1,38 +0,0 @@ -package middleware - -import ( - "fmt" - "proctor/proctord/config" - "proctor/proctord/utility" - "github.com/hashicorp/go-version" - "proctor/proctord/logger" - "net/http" -) - -func ValidateClientVersion(next http.HandlerFunc) http.HandlerFunc { - return func(w http.ResponseWriter, r *http.Request) { - - requestHeaderClientVersion := r.Header.Get(utility.ClientVersionHeaderKey) - - if requestHeaderClientVersion != "" { - clientVersion, err := version.NewVersion(requestHeaderClientVersion) - if err != nil { - logger.Error("Error while creating requestHeaderClientVersion", err.Error()) - } - - minClientVersion, err := version.NewVersion(config.MinClientVersion()) - if err != nil { - logger.Error("Error while creating minClientVersion", err.Error()) - } - - if clientVersion.LessThan(minClientVersion) { - w.WriteHeader(400) - w.Write([]byte(fmt.Sprintf(utility.ClientOutdatedErrorMessage, clientVersion))) - return - } - next.ServeHTTP(w, r) - } else { - next.ServeHTTP(w, r) - } - } -} diff --git a/proctord/redis/pool.go b/proctord/redis/pool.go deleted file mode 100644 index df891921..00000000 --- a/proctord/redis/pool.go +++ /dev/null @@ -1,32 +0,0 @@ -package redis - -import ( - "time" - - "proctor/proctord/config" - - "github.com/garyburd/redigo/redis" -) - -func newPool() (*redis.Pool, error) { - pool := &redis.Pool{ - MaxIdle: config.RedisMaxActiveConnections() / 2, - MaxActive: config.RedisMaxActiveConnections(), - IdleTimeout: 5 * time.Second, - Dial: func() (redis.Conn, error) { return redis.Dial("tcp", config.RedisAddress()) }, - TestOnBorrow: func(c redis.Conn, t time.Time) error { - if time.Since(t) < time.Minute { - return nil - } - _, err := c.Do("PING") - return err - }, - Wait: true, - } - - conn := pool.Get() - defer conn.Close() - - _, err := conn.Do("PING") - return pool, err -} diff --git a/proctord/scheduler/scheduler.go b/proctord/scheduler/scheduler.go deleted file mode 100644 index 0b0218f5..00000000 --- a/proctord/scheduler/scheduler.go +++ /dev/null @@ -1,53 +0,0 @@ -package scheduler - -import ( - "fmt" - "os" - "time" - - "proctor/proctord/audit" - "proctor/proctord/config" - http_client "proctor/proctord/http" - "proctor/proctord/jobs/execution" - "proctor/proctord/jobs/metadata" - "proctor/proctord/jobs/schedule" - "proctor/proctord/jobs/secrets" - "proctor/proctord/kubernetes" - "proctor/proctord/mail" - "proctor/proctord/redis" - "proctor/proctord/storage" - "proctor/proctord/storage/postgres" -) - -func Start() error { - fmt.Println("started scheduler") - - postgresClient := postgres.NewClient() - redisClient := redis.NewClient() - - store := storage.New(postgresClient) - metadataStore := metadata.NewStore(redisClient) - secretsStore := secrets.NewStore(redisClient) - - httpClient, err := http_client.NewClient() - if err != nil { - return err - } - kubeConfig := kubernetes.KubeConfig() - kubeClient := kubernetes.NewClient(kubeConfig, httpClient) - - jobExecutioner := execution.NewExecutioner(kubeClient, metadataStore, secretsStore) - - auditor := audit.New(store, kubeClient) - - mailer := mail.New(config.MailServerHost(), config.MailServerPort()) - - worker := schedule.NewWorker(store, jobExecutioner, auditor, mailer) - - ticker := time.NewTicker(time.Duration(config.ScheduledJobsFetchIntervalInMins()) * time.Minute) - signalsChan := make(chan os.Signal, 1) - worker.Run(ticker.C, signalsChan) - - postgresClient.Close() - return nil -} diff --git a/proctord/server/api.go b/proctord/server/api.go deleted file mode 100644 index c56a518c..00000000 --- a/proctord/server/api.go +++ /dev/null @@ -1,35 +0,0 @@ -package server - -import ( - "proctor/proctord/instrumentation" - "time" - - "proctor/proctord/config" - "proctor/proctord/logger" - - "github.com/tylerb/graceful" - "github.com/urfave/negroni" -) - -func Start() error { - err := instrumentation.InitNewRelic() - if err != nil { - logger.Fatal(err) - } - appPort := ":" + config.AppPort() - - server := negroni.New(negroni.NewRecovery()) - router, err := NewRouter() - if err != nil { - return err - } - server.UseHandler(router) - - logger.Info("Starting server on port", appPort) - - graceful.Run(appPort, 2*time.Second, server) - - postgresClient.Close() - logger.Info("Stopped server gracefully") - return nil -} diff --git a/proctord/server/router.go b/proctord/server/router.go deleted file mode 100644 index 3fe96839..00000000 --- a/proctord/server/router.go +++ /dev/null @@ -1,77 +0,0 @@ -package server - -import ( - "fmt" - "proctor/proctord/audit" - "proctor/proctord/config" - "proctor/proctord/docs" - http_client "proctor/proctord/http" - "proctor/proctord/jobs/execution" - "proctor/proctord/jobs/logs" - "proctor/proctord/jobs/metadata" - "proctor/proctord/jobs/schedule" - "proctor/proctord/jobs/secrets" - "proctor/proctord/kubernetes" - "proctor/proctord/middleware" - "proctor/proctord/redis" - "proctor/proctord/storage" - "proctor/proctord/storage/postgres" - "net/http" - "path" - - "proctor/proctord/instrumentation" - "github.com/gorilla/mux" -) - -var postgresClient postgres.Client - -func NewRouter() (*mux.Router, error) { - router := mux.NewRouter() - - redisClient := redis.NewClient() - postgresClient = postgres.NewClient() - - store := storage.New(postgresClient) - metadataStore := metadata.NewStore(redisClient) - secretsStore := secrets.NewStore(redisClient) - - httpClient, err := http_client.NewClient() - if err != nil { - return router, err - } - kubeConfig := kubernetes.KubeConfig() - kubeClient := kubernetes.NewClient(kubeConfig, httpClient) - - auditor := audit.New(store, kubeClient) - jobExecutioner := execution.NewExecutioner(kubeClient, metadataStore, secretsStore) - jobExecutionHandler := execution.NewExecutionHandler(auditor, store, jobExecutioner) - jobLogger := logs.NewLogger(kubeClient) - jobMetadataHandler := metadata.NewHandler(metadataStore) - jobSecretsHandler := secrets.NewHandler(secretsStore) - - scheduledJobsHandler := schedule.NewScheduler(store, metadataStore) - - router.HandleFunc("/ping", func(w http.ResponseWriter, req *http.Request) { - fmt.Fprintf(w, "pong") - }) - - - router.HandleFunc("/docs", docs.APIDocHandler) - router.PathPrefix("/docs/").Handler(http.StripPrefix("/docs/", http.FileServer(http.Dir(config.DocsPath())))) - router.HandleFunc("/swagger.yml", func(w http.ResponseWriter, r *http.Request) { - http.ServeFile(w, r, path.Join(config.DocsPath(), "swagger.yml")) - }) - - router.HandleFunc(instrumentation.Wrap("/jobs/execute", middleware.ValidateClientVersion(jobExecutionHandler.Handle()))).Methods("POST") - router.HandleFunc(instrumentation.Wrap("/jobs/execute/{name}/status", middleware.ValidateClientVersion(jobExecutionHandler.Status()))).Methods("GET") - router.HandleFunc(instrumentation.Wrap("/jobs/logs", middleware.ValidateClientVersion(jobLogger.Stream()))).Methods("GET") - router.HandleFunc(instrumentation.Wrap("/jobs/metadata", middleware.ValidateClientVersion(jobMetadataHandler.HandleSubmission()))).Methods("POST") - router.HandleFunc(instrumentation.Wrap("/jobs/metadata", middleware.ValidateClientVersion(jobMetadataHandler.HandleBulkDisplay()))).Methods("GET") - router.HandleFunc(instrumentation.Wrap("/jobs/secrets", middleware.ValidateClientVersion(jobSecretsHandler.HandleSubmission()))).Methods("POST") - router.HandleFunc(instrumentation.Wrap("/jobs/schedule", middleware.ValidateClientVersion(scheduledJobsHandler.Schedule()))).Methods("POST") - router.HandleFunc(instrumentation.Wrap("/jobs/schedule", middleware.ValidateClientVersion(scheduledJobsHandler.GetScheduledJobs()))).Methods("GET") - router.HandleFunc(instrumentation.Wrap("/jobs/schedule/{id}", middleware.ValidateClientVersion(scheduledJobsHandler.GetScheduledJob()))).Methods("GET") - router.HandleFunc(instrumentation.Wrap("/jobs/schedule/{id}", middleware.ValidateClientVersion(scheduledJobsHandler.RemoveScheduledJob()))).Methods("DELETE") - - return router, nil -} diff --git a/proctord/storage/postgres/client_test.go b/proctord/storage/postgres/client_test.go deleted file mode 100644 index d6ffb29c..00000000 --- a/proctord/storage/postgres/client_test.go +++ /dev/null @@ -1,97 +0,0 @@ -package postgres - -import ( - "fmt" - "testing" - - "proctor/proctord/config" - "github.com/jmoiron/sqlx" - "github.com/stretchr/testify/assert" -) - -func TestNamedExec(t *testing.T) { - dataSourceName := fmt.Sprintf("dbname=%s user=%s password=%s host=%s sslmode=disable", config.PostgresDatabase(), config.PostgresUser(), config.PostgresPassword(), config.PostgresHost()) - - db, err := sqlx.Connect("postgres", dataSourceName) - assert.NoError(t, err) - - postgresClient := &client{db: db} - defer postgresClient.db.Close() - - jobsExecutionAuditLog := &JobsExecutionAuditLog{ - JobName: "test-job-name", - ImageName: "test-image-name", - ExecutionID: StringToSQLString("test-submission-name"), - JobArgs: "test-job-args", - JobSubmissionStatus: "test-job-status", - JobExecutionStatus: "test-job-execution-status", - } - - _, err = postgresClient.NamedExec("INSERT INTO jobs_execution_audit_log (job_name, image_name, job_name_submitted_for_execution, job_args, job_submission_status, job_execution_status) VALUES (:job_name, :image_name, :job_name_submitted_for_execution, :job_args, :job_submission_status, :job_execution_status)", jobsExecutionAuditLog) - assert.NoError(t, err) - - var persistedJobsExecutionAuditLog JobsExecutionAuditLog - err = postgresClient.db.Get(&persistedJobsExecutionAuditLog, `SELECT job_name, image_name, job_name_submitted_for_execution, job_args, job_submission_status, job_execution_status FROM jobs_execution_audit_log WHERE job_name='test-job-name'`) - assert.NoError(t, err) - - assert.Equal(t, jobsExecutionAuditLog.JobName, persistedJobsExecutionAuditLog.JobName) - assert.Equal(t, jobsExecutionAuditLog.ImageName, persistedJobsExecutionAuditLog.ImageName) - assert.Equal(t, jobsExecutionAuditLog.ExecutionID.String, persistedJobsExecutionAuditLog.ExecutionID.String) - assert.Equal(t, jobsExecutionAuditLog.JobArgs, persistedJobsExecutionAuditLog.JobArgs) - assert.Equal(t, jobsExecutionAuditLog.JobSubmissionStatus, persistedJobsExecutionAuditLog.JobSubmissionStatus) - assert.Equal(t, jobsExecutionAuditLog.JobExecutionStatus, persistedJobsExecutionAuditLog.JobExecutionStatus) - - _, err = postgresClient.db.Exec("DELETE FROM jobs_execution_audit_log WHERE job_name='test-job-name'") - assert.NoError(t, err) -} - -func TestSelect(t *testing.T) { - dataSourceName := fmt.Sprintf("dbname=%s user=%s password=%s host=%s sslmode=disable", config.PostgresDatabase(), config.PostgresUser(), config.PostgresPassword(), config.PostgresHost()) - - db, err := sqlx.Connect("postgres", dataSourceName) - assert.NoError(t, err) - - postgresClient := &client{db: db} - defer postgresClient.db.Close() - jobName := "test-job-name" - - jobsExecutionAuditLog := &JobsExecutionAuditLog{ - JobName: jobName, - ImageName: "test-image-name", - ExecutionID: StringToSQLString("test-submission-name"), - JobArgs: "test-job-args", - JobSubmissionStatus: "test-job-status", - JobExecutionStatus: "test-job-execution-status", - } - - _, err = postgresClient.NamedExec("INSERT INTO jobs_execution_audit_log (job_name, image_name, job_name_submitted_for_execution, job_args, job_submission_status, job_execution_status) VALUES (:job_name, :image_name, :job_name_submitted_for_execution, :job_args, :job_submission_status, :job_execution_status)", jobsExecutionAuditLog) - assert.NoError(t, err) - - jobsExecutionAuditLogResult := []JobsExecutionAuditLog{} - err = postgresClient.db.Select(&jobsExecutionAuditLogResult, "SELECT job_execution_status from jobs_execution_audit_log where job_name = $1", jobName) - assert.NoError(t, err) - - assert.Equal(t, jobsExecutionAuditLog.JobExecutionStatus, jobsExecutionAuditLogResult[0].JobExecutionStatus) - - _, err = postgresClient.db.Exec("DELETE FROM jobs_execution_audit_log WHERE job_name='test-job-name'") - assert.NoError(t, err) -} - -func TestSelectForNoRows(t *testing.T) { - dataSourceName := fmt.Sprintf("dbname=%s user=%s password=%s host=%s sslmode=disable", config.PostgresDatabase(), config.PostgresUser(), config.PostgresPassword(), config.PostgresHost()) - - db, err := sqlx.Connect("postgres", dataSourceName) - assert.NoError(t, err) - - postgresClient := &client{db: db} - defer postgresClient.db.Close() - jobName := "test-job-name" - - jobsExecutionAuditLogResult := []JobsExecutionAuditLog{} - err = postgresClient.db.Select(&jobsExecutionAuditLogResult, "SELECT job_execution_status from jobs_execution_audit_log where job_name = $1", jobName) - assert.NoError(t, err) - - assert.Equal(t, 0, len(jobsExecutionAuditLogResult)) - - assert.NoError(t, err) -} diff --git a/proctord/storage/postgres/schema.go b/proctord/storage/postgres/schema.go deleted file mode 100644 index 4ac2ebaf..00000000 --- a/proctord/storage/postgres/schema.go +++ /dev/null @@ -1,51 +0,0 @@ -package postgres - -import ( - "database/sql" - "encoding/base64" - "encoding/json" - "time" - - "proctor/proctord/logger" -) - -type JobsExecutionAuditLog struct { - JobName string `db:"job_name"` - UserEmail string `db:"user_email"` - ImageName string `db:"image_name"` - ExecutionID sql.NullString `db:"job_name_submitted_for_execution"` - JobArgs string `db:"job_args"` - JobSubmissionStatus string `db:"job_submission_status"` - Errors string `db:"errors"` - JobExecutionStatus string `db:"job_execution_status"` - CreatedAt time.Time `db:"created_at"` - UpdatedAt time.Time `db:"updated_at"` -} - -func (j *JobsExecutionAuditLog) AddJobArgs(jobArgs map[string]string) { - jsonEncodedArgs, err := json.Marshal(jobArgs) - if err != nil { - logger.Error("Error marshaling job args: ", err.Error()) - return - } - - j.JobArgs = base64.StdEncoding.EncodeToString(jsonEncodedArgs) -} - -func (j *JobsExecutionAuditLog) AddExecutionID(jobExecutionID string) { - j.ExecutionID = StringToSQLString(jobExecutionID) -} - -type JobsSchedule struct { - ID string `db:"id"` - Name string `db:"name"` - Args string `db:"args"` - Tags string `db:"tags"` - Time string `db:"time"` - NotificationEmails string `db:"notification_emails"` - UserEmail string `db:"user_email"` - Group string `db:"group_name"` - Enabled bool `db:"enabled"` - CreatedAt time.Time `db:"created_at"` - UpdatedAt time.Time `db:"updated_at"` -} diff --git a/proctord/storage/postgres/utils.go b/proctord/storage/postgres/utils.go deleted file mode 100644 index ced778e4..00000000 --- a/proctord/storage/postgres/utils.go +++ /dev/null @@ -1,13 +0,0 @@ -package postgres - -import "database/sql" - -func StringToSQLString(str string) sql.NullString { - if len(str) == 0 { - return sql.NullString{} - } - return sql.NullString{ - String: str, - Valid: true, - } -} diff --git a/proctord/storage/postgres/utils_test.go b/proctord/storage/postgres/utils_test.go deleted file mode 100644 index d6029b18..00000000 --- a/proctord/storage/postgres/utils_test.go +++ /dev/null @@ -1,19 +0,0 @@ -package postgres - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestStringToSQLStringForEmptyString(t *testing.T) { - sqlString := StringToSQLString("") - assert.Equal(t, false, sqlString.Valid) -} - -func TestStringToSQLStringForNonEmptyString(t *testing.T) { - sqlString := StringToSQLString("any") - - assert.Equal(t, true, sqlString.Valid) - assert.Equal(t, "any", sqlString.String) -} diff --git a/proctord/storage/store.go b/proctord/storage/store.go deleted file mode 100644 index 8cc16e27..00000000 --- a/proctord/storage/store.go +++ /dev/null @@ -1,113 +0,0 @@ -package storage - -import ( - "encoding/base64" - "encoding/json" - "time" - - "proctor/proctord/storage/postgres" - "github.com/satori/go.uuid" -) - -type Store interface { - AuditJobsExecution(*postgres.JobsExecutionAuditLog) error - UpdateJobsExecutionAuditLog(string, string) error - GetJobExecutionStatus(string) (string, error) - InsertScheduledJob(string, string, string, string, string, string, map[string]string) (string, error) - GetScheduledJobs() ([]postgres.JobsSchedule, error) - GetEnabledScheduledJobs() ([]postgres.JobsSchedule, error) - GetScheduledJob(string) ([]postgres.JobsSchedule, error) - RemoveScheduledJob(string) (int64, error) -} - -type store struct { - postgresClient postgres.Client -} - -func New(postgresClient postgres.Client) Store { - return &store{ - postgresClient: postgresClient, - } -} - -func (store *store) AuditJobsExecution(jobsExecutionAuditLog *postgres.JobsExecutionAuditLog) error { - _, err := store.postgresClient.NamedExec("INSERT INTO jobs_execution_audit_log (job_name, user_email, image_name, job_name_submitted_for_execution, job_args, job_submission_status,"+ - " job_execution_status) VALUES (:job_name, :user_email, :image_name, :job_name_submitted_for_execution, :job_args, :job_submission_status, :job_execution_status)", - &jobsExecutionAuditLog) - return err -} - -func (store *store) UpdateJobsExecutionAuditLog(jobExecutionID, jobExecutionStatus string) error { - jobsExecutionAuditLog := postgres.JobsExecutionAuditLog{ - JobExecutionStatus: jobExecutionStatus, - ExecutionID: postgres.StringToSQLString(jobExecutionID), - UpdatedAt: time.Now(), - } - - _, err := store.postgresClient.NamedExec("UPDATE jobs_execution_audit_log SET job_execution_status = :job_execution_status, updated_at = :updated_at where job_name_submitted_for_execution = "+ - ":job_name_submitted_for_execution", &jobsExecutionAuditLog) - return err -} - -func (store *store) GetJobExecutionStatus(JobNameSubmittedForExecution string) (string, error) { - jobsExecutionAuditLogResult := []postgres.JobsExecutionAuditLog{} - err := store.postgresClient.Select(&jobsExecutionAuditLogResult, "SELECT job_execution_status from jobs_execution_audit_log where job_name_submitted_for_execution = $1", JobNameSubmittedForExecution) - if err != nil { - return "", err - } - - if len(jobsExecutionAuditLogResult) == 0 { - return "", nil - } - - return jobsExecutionAuditLogResult[0].JobExecutionStatus, nil -} - -func (store *store) InsertScheduledJob(name, tags, time, notificationEmails, userEmail, groupName string, args map[string]string) (string, error) { - jsonEncodedArgs, err := json.Marshal(args) - if err != nil { - return "", err - } - - jobsSchedule := postgres.JobsSchedule{ - ID: uuid.NewV4().String(), - Name: name, - Args: base64.StdEncoding.EncodeToString(jsonEncodedArgs), - Tags: tags, - Time: time, - NotificationEmails: notificationEmails, - UserEmail: userEmail, - Group: groupName, - Enabled: true, - } - _, err = store.postgresClient.NamedExec("INSERT INTO jobs_schedule (id, name, tags, time, notification_emails, user_email, group_name, args, enabled) "+ - "VALUES (:id, :name, :tags, :time, :notification_emails, :user_email, :group_name, :args, :enabled)", &jobsSchedule) - return jobsSchedule.ID, err -} - -func (store *store) GetScheduledJobs() ([]postgres.JobsSchedule, error) { - scheduledJobs := []postgres.JobsSchedule{} - err := store.postgresClient.Select(&scheduledJobs, "SELECT id, name, args, time, notification_emails, group_name, enabled from jobs_schedule") - return scheduledJobs, err -} - -func (store *store) GetEnabledScheduledJobs() ([]postgres.JobsSchedule, error) { - scheduledJobs := []postgres.JobsSchedule{} - err := store.postgresClient.Select(&scheduledJobs, "SELECT id, name, args, time, tags, notification_emails,group_name from jobs_schedule where enabled = 't'") - return scheduledJobs, err -} - -func (store *store) GetScheduledJob(jobID string) ([]postgres.JobsSchedule, error) { - scheduledJob := []postgres.JobsSchedule{} - err := store.postgresClient.Select(&scheduledJob, "SELECT id, name, args, time, tags, notification_emails,group_name from jobs_schedule where id = $1 and enabled = 't'", jobID) - return scheduledJob, err -} - -func (store *store) RemoveScheduledJob(jobID string) (int64, error) { - job := postgres.JobsSchedule{ - ID: jobID, - UpdatedAt: time.Now(), - } - rowsAffected, err := store.postgresClient.NamedExec("UPDATE jobs_schedule set enabled = 'f', updated_at = :updated_at where id = :id and enabled = 't'", &job) - return rowsAffected, err -} diff --git a/proctord/storage/store_mock.go b/proctord/storage/store_mock.go deleted file mode 100644 index b7e1d3b9..00000000 --- a/proctord/storage/store_mock.go +++ /dev/null @@ -1,50 +0,0 @@ -package storage - -import ( - "proctor/proctord/storage/postgres" - "github.com/stretchr/testify/mock" -) - -type MockStore struct { - mock.Mock -} - -func (m *MockStore) AuditJobsExecution(jobsExecutionAuditLog *postgres.JobsExecutionAuditLog) error { - args := m.Called(jobsExecutionAuditLog) - return args.Error(0) -} - -func (m *MockStore) UpdateJobsExecutionAuditLog(JobNameSubmittedForExecution, status string) error { - args := m.Called(JobNameSubmittedForExecution, status) - return args.Error(0) -} - -func (m *MockStore) GetJobExecutionStatus(jobName string) (string, error) { - args := m.Called(jobName) - return args.String(0), args.Error(1) -} - -func (m *MockStore) InsertScheduledJob(jobName, tags, time, notificationEmails, userEmail, groupName string, jobArgs map[string]string) (string, error) { - args := m.Called(jobName, tags, time, notificationEmails, userEmail, groupName, jobArgs) - return args.String(0), args.Error(1) -} - -func (m *MockStore) GetScheduledJobs() ([]postgres.JobsSchedule, error) { - args := m.Called() - return args.Get(0).([]postgres.JobsSchedule), args.Error(1) -} - -func (m *MockStore) GetEnabledScheduledJobs() ([]postgres.JobsSchedule, error) { - args := m.Called() - return args.Get(0).([]postgres.JobsSchedule), args.Error(1) -} - -func (m *MockStore) GetScheduledJob(jobID string) ([]postgres.JobsSchedule, error) { - args := m.Called(jobID) - return args.Get(0).([]postgres.JobsSchedule), args.Error(1) -} - -func (m *MockStore) RemoveScheduledJob(jobID string) (int64, error) { - args := m.Called(jobID) - return args.Get(0).(int64), args.Error(1) -} diff --git a/proctord/storage/store_test.go b/proctord/storage/store_test.go deleted file mode 100644 index 2d37ebf0..00000000 --- a/proctord/storage/store_test.go +++ /dev/null @@ -1,254 +0,0 @@ -package storage - -import ( - "bytes" - "encoding/gob" - "errors" - "proctor/proctord/storage/postgres" - "proctor/proctord/utility" - "github.com/satori/go.uuid" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" - "testing" -) - -func TestJobsExecutionAuditLog(t *testing.T) { - mockPostgresClient := &postgres.ClientMock{} - testStore := New(mockPostgresClient) - - jobExecutionAuditLog := &postgres.JobsExecutionAuditLog{ - JobName: "sample-job", - ImageName: "any-image", - UserEmail: "mrproctor@example.com", - } - - var encodedJobArgs bytes.Buffer - enc := gob.NewEncoder(&encodedJobArgs) - err := enc.Encode(map[string]string{}) - assert.NoError(t, err) - - mockPostgresClient.On("NamedExec", - "INSERT INTO jobs_execution_audit_log (job_name, user_email, image_name, job_name_submitted_for_execution, job_args, job_submission_status, job_execution_status) VALUES (:job_name, :user_email, :image_name, :job_name_submitted_for_execution, :job_args, :job_submission_status, :job_execution_status)", mock.Anything).Run(func(args mock.Arguments) { - }).Return(int64(1), nil).Once() - - err = testStore.AuditJobsExecution(jobExecutionAuditLog) - - assert.NoError(t, err) - mockPostgresClient.AssertExpectations(t) -} - -func TestJobsExecutionAuditLogPostgresClientFailure(t *testing.T) { - mockPostgresClient := &postgres.ClientMock{} - testStore := New(mockPostgresClient) - - jobExecutionAuditLog := &postgres.JobsExecutionAuditLog{ - JobName: "sample-job", - } - - var encodedJobArgs bytes.Buffer - enc := gob.NewEncoder(&encodedJobArgs) - err := enc.Encode(map[string]string{}) - assert.NoError(t, err) - - mockPostgresClient.On("NamedExec", - "INSERT INTO jobs_execution_audit_log (job_name, user_email, image_name, job_name_submitted_for_execution, job_args, job_submission_status, job_execution_status) VALUES (:job_name, :user_email, :image_name, :job_name_submitted_for_execution, :job_args, :job_submission_status, :job_execution_status)", - mock.Anything). - Return(int64(0), errors.New("error")). - Once() - - err = testStore.AuditJobsExecution(jobExecutionAuditLog) - - assert.Error(t, err) - mockPostgresClient.AssertExpectations(t) -} - -func TestUpdateJobsExecutionAuditLog(t *testing.T) { - mockPostgresClient := &postgres.ClientMock{} - testStore := New(mockPostgresClient) - - executionID := "any-submission" - jobExecutionStatus := "updated-status" - - mockPostgresClient.On("NamedExec", - "UPDATE jobs_execution_audit_log SET job_execution_status = :job_execution_status, updated_at = :updated_at where job_name_submitted_for_execution = :job_name_submitted_for_execution", - mock.Anything). - Run(func(args mock.Arguments) { - data := args.Get(1).(*postgres.JobsExecutionAuditLog) - - assert.Equal(t, postgres.StringToSQLString(executionID), data.ExecutionID) - assert.Equal(t, jobExecutionStatus, data.JobExecutionStatus) - }). - Return(int64(1), nil). - Once() - - err := testStore.UpdateJobsExecutionAuditLog(executionID, jobExecutionStatus) - - assert.NoError(t, err) - mockPostgresClient.AssertExpectations(t) -} - -func TestGetJobsStatusWhenJobIsPresent(t *testing.T) { - mockPostgresClient := &postgres.ClientMock{} - testStore := New(mockPostgresClient) - jobName := "any-job" - - dest := []postgres.JobsExecutionAuditLog{} - - mockPostgresClient.On("Select", - &dest, - "SELECT job_execution_status from jobs_execution_audit_log where job_name_submitted_for_execution = $1", - jobName). - Return(nil). - Run(func(args mock.Arguments) { - jobsExecutionAuditLogResult := args.Get(0).(*[]postgres.JobsExecutionAuditLog) - *jobsExecutionAuditLogResult = append(*jobsExecutionAuditLogResult, postgres.JobsExecutionAuditLog{ - JobExecutionStatus: utility.JobSucceeded, - }) - }). - Once() - - status, err := testStore.GetJobExecutionStatus(jobName) - assert.NoError(t, err) - - assert.Equal(t, utility.JobSucceeded, status) - - mockPostgresClient.AssertExpectations(t) -} - -func TestGetJobsStatusWhenJobIsNotPresent(t *testing.T) { - mockPostgresClient := &postgres.ClientMock{} - testStore := New(mockPostgresClient) - jobName := "any-job" - - dest := []postgres.JobsExecutionAuditLog{} - - mockPostgresClient.On("Select", - &dest, - "SELECT job_execution_status from jobs_execution_audit_log where job_name_submitted_for_execution = $1", - jobName). - Return(nil). - Once() - - status, err := testStore.GetJobExecutionStatus(jobName) - assert.NoError(t, err) - - assert.Equal(t, "", status) - - mockPostgresClient.AssertExpectations(t) -} - -func TestGetJobsStatusWhenError(t *testing.T) { - mockPostgresClient := &postgres.ClientMock{} - testStore := New(mockPostgresClient) - jobName := "any-job" - - dest := []postgres.JobsExecutionAuditLog{} - - mockPostgresClient.On("Select", - &dest, - "SELECT job_execution_status from jobs_execution_audit_log where job_name_submitted_for_execution = $1", - jobName). - Return(errors.New("error")). - Once() - - _, err := testStore.GetJobExecutionStatus(jobName) - assert.Error(t, err, "error") -} - -func TestJobsScheduleInsertionSuccessfull(t *testing.T) { - postgresClient := postgres.NewClient() - testStore := New(postgresClient) - - scheduledJobID, err := testStore.InsertScheduledJob("job-name", "tag-one", "* * 3 * *", "foo@bar.com", "ms@proctor.com","group1", map[string]string{}) - assert.NoError(t, err) - _, err = uuid.FromString(scheduledJobID) - assert.NoError(t, err) - - _, err = postgresClient.GetDB().Exec("truncate table jobs_schedule;") - assert.NoError(t, err) -} - -func TestJobsScheduleInsertionFailed(t *testing.T) { - mockPostgresClient := &postgres.ClientMock{} - testStore := New(mockPostgresClient) - - jobName := "job-name" - tag := "tag-one1" - time := "* * 3 * *" - notificationEmail := "foo@bar.com" - userEmail := "ms@proctor.com" - groupName := "group1" - - mockPostgresClient.On("NamedExec", - "INSERT INTO jobs_schedule (id, name, tags, time, notification_emails, user_email, group_name, args, enabled) "+ - "VALUES (:id, :name, :tags, :time, :notification_emails, :user_email, :group_name, :args, :enabled)", - mock.AnythingOfType("*postgres.JobsSchedule")).Run(func(args mock.Arguments) { - }).Return(int64(0), errors.New("any-error")). - Once() - - _, err := testStore.InsertScheduledJob(jobName, tag, time, notificationEmail, userEmail,groupName, map[string]string{}) - - assert.Error(t, err) - - mockPostgresClient.AssertExpectations(t) -} - -func TestGetScheduledJobByID(t *testing.T) { - postgresClient := postgres.NewClient() - testStore := New(postgresClient) - - jobID, err := testStore.InsertScheduledJob("job-name", "tag-one", "* * 3 * *", "foo@bar.com", "ms@proctor.com","group1", map[string]string{}) - assert.NoError(t, err) - - resultJob, err := testStore.GetScheduledJob(jobID) - assert.NoError(t, err) - assert.Equal(t, "job-name", resultJob[0].Name) - assert.Equal(t, "tag-one", resultJob[0].Tags) - assert.Equal(t, "* * 3 * *", resultJob[0].Time) - - _, err = postgresClient.GetDB().Exec("truncate table jobs_schedule;") - assert.NoError(t, err) -} - -func TestGetScheduledJobByIDReturnErrorIfIDnotFound(t *testing.T) { - postgresClient := postgres.NewClient() - testStore := New(postgresClient) - - resultJob, err := testStore.GetScheduledJob("86A7963B-3621-492D-8D6C-33076242256B") - assert.NoError(t, err) - assert.Equal(t, []postgres.JobsSchedule{}, resultJob) -} - -func TestRemoveScheduledJobByID(t *testing.T) { - postgresClient := postgres.NewClient() - testStore := New(postgresClient) - - jobID, err := testStore.InsertScheduledJob("job-name", "tag-one", "* * 3 * *", "foo@bar.com", "ms@proctor.com","group1", map[string]string{}) - assert.NoError(t, err) - - removedJobsCount, err := testStore.RemoveScheduledJob(jobID) - assert.NoError(t, err) - assert.Equal(t, int64(1), removedJobsCount) - - _, err = postgresClient.GetDB().Exec("truncate table jobs_schedule;") - assert.NoError(t, err) -} - -func TestRemoveScheduledJobByIDReturnErrorIfIDnotFound(t *testing.T) { - postgresClient := postgres.NewClient() - testStore := New(postgresClient) - - removedJobsCount, err := testStore.RemoveScheduledJob("86A7963B-3621-492D-8D6C-33076242256B") - assert.NoError(t, err) - assert.Equal(t, int64(0), removedJobsCount) -} - -func TestRemoveScheduledJobByIDReturnErrorIfIDIsInvalid(t *testing.T) { - postgresClient := postgres.NewClient() - testStore := New(postgresClient) - - removedJobsCount, err := testStore.RemoveScheduledJob("86A7963B") - assert.Error(t, err) - assert.Contains(t, err.Error(), "invalid input syntax") - assert.Equal(t, int64(0), removedJobsCount) -} diff --git a/proctord/utility/buffer.go b/proctord/utility/buffer.go deleted file mode 100644 index 94fc3c24..00000000 --- a/proctord/utility/buffer.go +++ /dev/null @@ -1,43 +0,0 @@ -package utility - -import ( - "bytes" - "sync" -) - -type Buffer struct { - mutex sync.Mutex - wasClosed bool - buffer bytes.Buffer -} - -func NewBuffer() *Buffer { - return &Buffer{ - wasClosed: false, - } -} - -func (b *Buffer) Close() error { - b.mutex.Lock() - defer b.mutex.Unlock() - b.wasClosed = true - return nil -} - -func (b *Buffer) WasClosed() bool { - b.mutex.Lock() - defer b.mutex.Unlock() - return b.wasClosed -} - -func (b *Buffer) Read(p []byte) (n int, err error) { - b.mutex.Lock() - defer b.mutex.Unlock() - return b.buffer.Read(p) -} - -func (b *Buffer) Write(p []byte) (n int, err error) { - b.mutex.Lock() - defer b.mutex.Unlock() - return b.buffer.Write(p) -} diff --git a/scripts/proctor.rb.tpl b/scripts/proctor.rb.tpl index f1291166..a325294c 100644 --- a/scripts/proctor.rb.tpl +++ b/scripts/proctor.rb.tpl @@ -1,10 +1,10 @@ class Proctor < Formula desc "Proctor CLI" - homepage "https://github.com/gojektech/proctor" - url "https://github.com/gojektech/proctor/releases/download/v{{ .Tag }}/proctor_{{ .Tag }}_Darwin_x86_64.tar.gz" + homepage "https://github.com/gojek/proctor" + url "https://github.com/gojek/proctor/releases/download/v{{ .Tag }}/proctor_{{ .Tag }}_Darwin_x86_64.tar.gz" version "{{ .Tag }}" sha256 "{{ .SHA }}" - head "https://github.com/gojektech/proctor.git" + head "https://github.com/gojek/proctor.git" bottle :unneeded diff --git a/scripts/release.sh b/scripts/release.sh index 1a92a453..f9145f1d 100755 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -6,13 +6,13 @@ if [ -n "$TRAVIS_TAG" ]; then curl -sL https://git.io/goreleaser | bash SHA=$(cat dist/checksums.txt | grep Darwin_x86_64 | awk '{ print $1}') go run scripts/proctor_template.go $TRAVIS_TAG $SHA - rm -rf homebrew-gojek - git clone "https://$GITHUB_TOKEN:@github.com/gojek/homebrew-gojek.git" + rm -rf homebrew-tap + git clone "https://$GITHUB_TOKEN:@github.com/gojek/homebrew-tap.git" cp scripts/proctor.rb homebrew-gojek/Formula/proctor.rb - cd homebrew-gojek + cd homebrew-tap git add . git commit -m "[TravisCI] updating brew formula for release $TRAVIS_TAG" - git push --force --quiet "https://$GITHUB_TOKEN:@github.com/gojektech/homebrew-gojek.git" + git push --force --quiet "https://$GITHUB_TOKEN:@github.com/gojek/homebrew-tap.git" fi diff --git a/test/config/proctor.yaml b/test/config/proctor.yaml new file mode 100644 index 00000000..0e0d200d --- /dev/null +++ b/test/config/proctor.yaml @@ -0,0 +1,3 @@ +PROCTOR_HOST: localhost:5000 +EMAIL_ID: deny.prasetyo@go-pay.co.id +ACCESS_TOKEN: PROCTORACCESSTOKENHERE diff --git a/test/package_procs.rb b/test/package_procs.rb new file mode 100644 index 00000000..77591e2d --- /dev/null +++ b/test/package_procs.rb @@ -0,0 +1,45 @@ +#!/usr/bin/env ruby + +require 'json' + +hub_username = ENV['DOCKERHUB_USERNAME'] +hub_password = ENV['DOCKERHUB_PASSWORD'] +jobs_path = ENV['PROCTOR_JOBS_PATH'] +team_name = ENV['PROCTOR_JOB_TEAM_NAME'] || "test" +container_registry = ENV["PROCTOR_CONTAINER_REGISTRY"] || "docker.io/proctorscripts" +metadata_file_name = ENV['PROCTOR_METADATA_FILE_NAME'] || "metadata.json" + +def run_cmd(cmd) + puts cmd + result = system(cmd) + if !result + puts "#{cmd} exited with non-zero code" + exit 1 + end +end + +def login(username, password) + run_cmd("docker login -u #{username} -p #{password}") +end + +for dir in Dir["#{jobs_path}/*/"] + metadata_file = dir + '/' + metadata_file_name + + login(hub_username, hub_password) + + if File.exist?(metadata_file) + file = File.read(metadata_file) + data_hash = JSON.parse(file) + image_name = "#{container_registry}/#{team_name}-#{data_hash['name']}:latest" + + Dir.chdir(dir) { + puts "===== build and push image =====" + run_cmd("docker build -t #{image_name} .") + run_cmd("docker push #{image_name}") + } + else + puts "#{dir} doesn't have metadata_file" + end + +end + diff --git a/procs/README.md b/test/procs/README.md similarity index 100% rename from procs/README.md rename to test/procs/README.md diff --git a/procs/say-hello-world/Dockerfile b/test/procs/say-hello-world/Dockerfile similarity index 100% rename from procs/say-hello-world/Dockerfile rename to test/procs/say-hello-world/Dockerfile diff --git a/procs/say-hello-world/metadata.json b/test/procs/say-hello-world/metadata.json similarity index 94% rename from procs/say-hello-world/metadata.json rename to test/procs/say-hello-world/metadata.json index 36ff8423..0b7598df 100644 --- a/procs/say-hello-world/metadata.json +++ b/test/procs/say-hello-world/metadata.json @@ -1,4 +1,5 @@ { + "name": "say-hello-world", "description":"This proc says hello world", "env_vars":{ "secrets": [ diff --git a/test/procs/say-hello-world/say_hello_world.sh b/test/procs/say-hello-world/say_hello_world.sh new file mode 100755 index 00000000..4322d212 --- /dev/null +++ b/test/procs/say-hello-world/say_hello_world.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash +echo "Hello World! + I received secrets: $SAMPLE_SECRET_ONE and $SAMPLE_SECRET_TWO + I received arguments: $SAMPLE_ARG_ONE and $SAMPLE_ARG_TWO" + diff --git a/test/procs/say-hello-world/say_hello_world.yaml b/test/procs/say-hello-world/say_hello_world.yaml new file mode 100644 index 00000000..99d012ef --- /dev/null +++ b/test/procs/say-hello-world/say_hello_world.yaml @@ -0,0 +1,2 @@ +SAMPLE_ARG_ONE: foo +SAMPLE_ARG_TWO: bar diff --git a/test/update_metadata.rb b/test/update_metadata.rb new file mode 100644 index 00000000..b6940f49 --- /dev/null +++ b/test/update_metadata.rb @@ -0,0 +1,50 @@ +#!/usr/bin/env ruby + +require 'json' +require 'net/http' +require 'uri' + +proctor_uri = ENV['PROCTOR_URI'] + +jobs_path = ENV['PROCTOR_JOBS_PATH'] +team_name = ENV['PROCTOR_JOB_TEAM_NAME'] || "test" +container_registry = ENV["PROCTOR_CONTAINER_REGISTRY"] || "docker.io/proctorscripts" +metadata_file_name = ENV['PROCTOR_METADATA_FILE_NAME'] || "metadata.json" + +sleep(2) + +jobs = [] + +for dir in Dir["#{jobs_path}/*/"] + metadata_file = dir + metadata_file_name + puts "Processing #{metadata_file}" + + if File.exist?(metadata_file) + file = File.read(metadata_file) + data_hash = JSON.parse(file) + image_name = "#{container_registry}/#{team_name}-#{data_hash['name']}:latest" + data_hash['image_name'] = image_name + jobs << data_hash + end +end + +uri = URI.parse(proctor_uri) +header = {"Content-Type" => "application/json"} + +# Create the HTTP objects +http = Net::HTTP.new(uri.host, uri.port) +request = Net::HTTP::Post.new(uri.request_uri, header) +request.body = jobs.to_json + +# Send the request +puts "making req with body #{request.body}" +response = http.request(request) + +if response.code == "201" + puts 'Updated proctor metadata' +else + puts 'Something went wrong while updating proctor metadata! Response from proctor:' + puts response + exit 1 +end +