diff --git a/lib/bolt_server/base_config.rb b/lib/bolt_server/base_config.rb index ae2a818c1b..50c66e1dfe 100644 --- a/lib/bolt_server/base_config.rb +++ b/lib/bolt_server/base_config.rb @@ -8,7 +8,7 @@ class BaseConfig def config_keys %w[host port ssl-cert ssl-key ssl-ca-cert ssl-cipher-suites loglevel logfile allowlist - projects-dir environments-codedir + environments-codedir environmentpath basemodulepath] end diff --git a/lib/bolt_server/config.rb b/lib/bolt_server/config.rb index 45c1efbe71..0572e4f803 100644 --- a/lib/bolt_server/config.rb +++ b/lib/bolt_server/config.rb @@ -8,7 +8,7 @@ module BoltServer class Config < BoltServer::BaseConfig def config_keys super + %w[concurrency cache-dir file-server-conn-timeout - file-server-uri projects-dir environments-codedir + file-server-uri environments-codedir environmentpath basemodulepath builtin-content-dir] end @@ -30,7 +30,7 @@ def defaults end def required_keys - super + %w[file-server-uri projects-dir] + super + %w[file-server-uri] end def service_name diff --git a/lib/bolt_server/file_cache.rb b/lib/bolt_server/file_cache.rb deleted file mode 100644 index 241842572c..0000000000 --- a/lib/bolt_server/file_cache.rb +++ /dev/null @@ -1,200 +0,0 @@ -# frozen_string_literal: true - -require 'concurrent/atomic/read_write_lock' -require 'concurrent/executor/single_thread_executor' -require 'concurrent/promise' -require 'concurrent/timer_task' -require 'digest' -require 'fileutils' -require 'net/http' -require 'logging' -require 'timeout' - -require 'bolt/error' - -module BoltServer - class FileCache - class Error < Bolt::Error - def initialize(msg) - super(msg, 'bolt-server/file-cache-error') - end - end - - PURGE_TIMEOUT = 60 * 60 - PURGE_INTERVAL = 24 * PURGE_TIMEOUT - PURGE_TTL = 7 * PURGE_INTERVAL - - def initialize(config, - executor: Concurrent::SingleThreadExecutor.new, - purge_interval: PURGE_INTERVAL, - purge_timeout: PURGE_TIMEOUT, - purge_ttl: PURGE_TTL, - cache_dir_mutex: Concurrent::ReadWriteLock.new, - do_purge: true) - @executor = executor - @cache_dir = config['cache-dir'] - @config = config - @logger = Bolt::Logger.logger(self) - @cache_dir_mutex = cache_dir_mutex - - if do_purge - @purge = Concurrent::TimerTask.new(execution_interval: purge_interval, - run_now: true) { expire(purge_ttl, purge_timeout) } - @purge.execute - end - end - - def tmppath - File.join(@cache_dir, 'tmp') - end - - def setup - FileUtils.mkdir_p(@cache_dir) - FileUtils.mkdir_p(tmppath) - self - end - - def ssl_cert - @ssl_cert ||= File.read(@config['ssl-cert']) - end - - def ssl_key - @ssl_key ||= File.read(@config['ssl-key']) - end - - def client - # rubocop:disable Naming/VariableNumber - @client ||= begin - uri = URI(@config['file-server-uri']) - https = Net::HTTP.new(uri.host, uri.port) - https.use_ssl = true - https.ssl_version = :TLSv1_2 - https.ca_file = @config['ssl-ca-cert'] - https.cert = OpenSSL::X509::Certificate.new(ssl_cert) - https.key = OpenSSL::PKey::RSA.new(ssl_key) - https.verify_mode = OpenSSL::SSL::VERIFY_PEER - https.open_timeout = @config['file-server-conn-timeout'] - https - end - # rubocop:enable Naming/VariableNumber - end - - def request_file(path, params, file) - uri = "#{@config['file-server-uri'].chomp('/')}#{path}" - uri = URI(uri) - uri.query = URI.encode_www_form(params) - - req = Net::HTTP::Get.new(uri) - - begin - client.request(req) do |resp| - if resp.code != "200" - msg = "Failed to download file: #{resp.body}" - @logger.warn resp.body - raise Error, msg - end - resp.read_body do |chunk| - file.write(chunk) - end - end - rescue StandardError => e - if e.is_a?(Bolt::Error) - raise e - else - @logger.warn e - raise Error, "Failed to download file: #{e.message}" - end - end - ensure - file.close - end - - def check_file(file_path, sha) - File.exist?(file_path) && Digest::SHA256.file(file_path) == sha - end - - def serial_execute(&block) - promise = Concurrent::Promise.new(executor: @executor, &block).execute.wait - raise promise.reason if promise.rejected? - promise.value - end - - # Create a cache dir if necessary and update it's last write time. Returns the dir. - # Acquires @cache_dir_mutex to ensure we don't try to purge the directory at the same time. - # Uses the directory mtime because it's simpler to ensure the directory exists and update - # mtime in a single place than with a file in a directory that may not exist. - def create_cache_dir(sha) - file_dir = File.join(@cache_dir, sha) - @cache_dir_mutex.with_read_lock do - # mkdir_p doesn't error if the file exists - FileUtils.mkdir_p(file_dir, mode: 0o750) - FileUtils.touch(file_dir) - end - file_dir - end - - def download_file(file_path, sha, uri) - if check_file(file_path, sha) - @logger.debug("File was downloaded while queued: #{file_path}") - return file_path - end - - @logger.debug("Downloading file: #{file_path}") - - tmpfile = Tempfile.new(sha, tmppath) - request_file(uri['path'], uri['params'], tmpfile) - - if Digest::SHA256.file(tmpfile.path) == sha - # mv doesn't error if the file exists - FileUtils.mv(tmpfile.path, file_path) - @logger.debug("Downloaded file: #{file_path}") - file_path - else - msg = "Downloaded file did not match checksum for: #{file_path}" - @logger.warn msg - raise Error, msg - end - end - - # If the file doesn't exist or is invalid redownload it - # This downloads, validates and moves into place - def update_file(file_data) - sha = file_data['sha256'] - file_dir = create_cache_dir(file_data['sha256']) - file_path = File.join(file_dir, File.basename(file_data['filename'])) - if check_file(file_path, sha) - @logger.debug("Using prexisting file: #{file_path}") - return file_path - end - - @logger.debug("Queueing download for: #{file_path}") - serial_execute { download_file(file_path, sha, file_data['uri']) } - end - - def expire(purge_ttl, purge_timeout) - expired_time = Time.now - purge_ttl - Timeout.timeout(purge_timeout) do - @cache_dir_mutex.with_write_lock do - Dir.glob(File.join(@cache_dir, '*')).select { |f| File.directory?(f) }.each do |dir| - if (mtime = File.mtime(dir)) < expired_time && dir != tmppath - @logger.debug("Removing #{dir}, last used at #{mtime}") - FileUtils.remove_dir(dir) - end - end - end - end - end - - def get_cached_project_file(versioned_project, file_name) - file_dir = create_cache_dir(versioned_project) - file_path = File.join(file_dir, file_name) - serial_execute { File.read(file_path) if File.exist?(file_path) } - end - - def cache_project_file(versioned_project, file_name, data) - file_dir = create_cache_dir(versioned_project) - file_path = File.join(file_dir, file_name) - serial_execute { File.open(file_path, 'w') { |f| f.write(data) } } - end - end -end diff --git a/lib/bolt_server/plugin.rb b/lib/bolt_server/plugin.rb deleted file mode 100644 index 26914b8165..0000000000 --- a/lib/bolt_server/plugin.rb +++ /dev/null @@ -1,13 +0,0 @@ -# frozen_string_literal: true - -require 'bolt/error' - -module BoltServer - class Plugin - class PluginNotSupported < Bolt::Error - def initialize(msg, plugin_name) - super(msg, 'bolt/plugin-not-supported', { "plugin_name" => plugin_name }) - end - end - end -end diff --git a/lib/bolt_server/plugin/puppet_connect_data.rb b/lib/bolt_server/plugin/puppet_connect_data.rb deleted file mode 100644 index dc9d16b751..0000000000 --- a/lib/bolt_server/plugin/puppet_connect_data.rb +++ /dev/null @@ -1,37 +0,0 @@ -# frozen_string_literal: true - -module BoltServer - class Plugin - class PuppetConnectData - def initialize(data, **_opts) - @data = data - end - - def name - 'puppet_connect_data' - end - - def hooks - %i[resolve_reference validate_resolve_reference] - end - - def resolve_reference(opts) - key = opts['key'] - - @data.dig(key, 'value') - end - - def validate_resolve_reference(opts) - unless opts['key'] - raise Bolt::ValidationError, - "puppet_connect_data plugin requires that 'key' be specified" - end - - unless @data.key?(opts['key']) - raise Bolt::ValidationError, - "puppet_connect_data plugin tried to lookup key '#{opts['key']}' but no value was found" - end - end - end - end -end diff --git a/lib/bolt_server/schemas/action-apply.json b/lib/bolt_server/schemas/action-apply.json deleted file mode 100644 index 9f91195a4a..0000000000 --- a/lib/bolt_server/schemas/action-apply.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "apply request", - "description": "POST /apply request schema", - "type": "object", - "properties": { - "versioned_project": { - "type": "string", - "description": "Project from which to load code" - }, - "parameters": { - "type": "object", - "properties": { - "catalog": { - "type": "object", - "description": "Compiled catalog to apply" - }, - "apply_options": { - "type": "object", - "description": "Options for application of a catalog" - } - } - }, - "job_id": { - "type": "integer", - "description": "job-id associated with request" - }, - "target": { "$ref": "partial:target-any" } - }, - "required": ["target", "versioned_project", "parameters", "job_id"], - "additionalProperties": false -} diff --git a/lib/bolt_server/schemas/action-apply_prep.json b/lib/bolt_server/schemas/action-apply_prep.json deleted file mode 100644 index a12c95c354..0000000000 --- a/lib/bolt_server/schemas/action-apply_prep.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "apply_prep request", - "description": "POST /apply_prep request schema", - "type": "object", - "properties": { - "versioned_project": { - "type": "String", - "description": "Project from which to load code" - }, - "target": { "$ref": "partial:target-any" }, - "job_id": { - "type": "integer", - "description": "job-id associated with request" - } - }, - "required": ["target", "versioned_project", "job_id"], - "additionalProperties": false -} diff --git a/lib/bolt_server/schemas/partials/task.json b/lib/bolt_server/schemas/partials/task.json index 1de4aa4ff6..a5e48e3034 100644 --- a/lib/bolt_server/schemas/partials/task.json +++ b/lib/bolt_server/schemas/partials/task.json @@ -63,24 +63,9 @@ "environment": { "description": "Environment the task is in", "type": "string" - }, - "versioned_project": { - "description": "Project the task is in", - "type": "string" } }, - "oneOf": [ - { - "required": [ - "environment" - ] - }, - { - "required": [ - "versioned_project" - ] - } - ], + "required": ["environment"], "additionalProperties": true } }, diff --git a/lib/bolt_server/transport_app.rb b/lib/bolt_server/transport_app.rb index c2b576eb8e..7abbcfb07b 100644 --- a/lib/bolt_server/transport_app.rb +++ b/lib/bolt_server/transport_app.rb @@ -5,10 +5,7 @@ require 'bolt' require 'bolt/error' require 'bolt/inventory' -require 'bolt/project' require 'bolt/target' -require 'bolt_server/file_cache' -require 'bolt_server/plugin' require 'bolt_server/request_error' require 'bolt/task/puppet_server' require 'json' @@ -17,13 +14,6 @@ # These are only needed for the `/plans` endpoint. require 'puppet' -# Needed by the `/project_file_metadatas` endpoint -require 'puppet/file_serving/fileset' - -# Needed by the 'project_facts_plugin_tarball' endpoint -require 'minitar' -require 'zlib' - module BoltServer class TransportApp < Sinatra::Base # This disables Sinatra's error page generation @@ -70,17 +60,9 @@ def initialize(config) @executor = Bolt::Executor.new(0) - @file_cache = BoltServer::FileCache.new(@config).setup - # This is needed until the PAL is threadsafe. @pal_mutex = Mutex.new - # Avoid redundant plugin tarbal construction - @plugin_mutex = Mutex.new - - # Avoid redundant project_task metadata construction - @task_metadata_mutex = Mutex.new - @logger = Bolt::Logger.logger(self) super(nil) @@ -170,94 +152,6 @@ def run_task(target, body) task_helper(target, task, body['parameters'] || {}, body['timeout']) end - def extract_install_task(target) - unless target.plugin_hooks['puppet_library']['task'] - raise BoltServer::RequestError, - "Target must have 'task' plugin hook" - end - install_task = target.plugin_hooks['puppet_library']['task'].split('::', 2) - install_task << 'init' if install_task.count == 1 - install_task - end - - # This helper is responsible for computing or retrieving from the cache a plugin tarball. There are - # two supported plugin types 'fact_plugins', and 'all_plugins'. Note that this is cached based on - # versioned_project as there are no plugins in the "builtin content" directory - def plugin_tarball(versioned_project, tarball_type) - tarball_types = %w[fact_plugins all_plugins] - unless tarball_types.include?(tarball_type) - raise ArgumentError, - "tarball_type must be one of: #{tarball_types.join(', ')}" - end - # lock this so that in the case an apply/apply_prep with multiple targets hits this endpoint - # the tarball computation only happens once (all the other targets will just need to read the cached data) - @plugin_mutex.synchronize do - if (tarball = @file_cache.get_cached_project_file(versioned_project, tarball_type)) - tarball - else - new_tarball = build_project_plugins_tarball(versioned_project) do |mod| - search_dirs = [] - search_dirs << mod.plugins if mod.plugins? - search_dirs << mod.pluginfacts if mod.pluginfacts? - if tarball_type == 'all_plugins' - search_dirs << mod.files if mod.files? - search_dirs << mod.scripts if mod.scripts? - type_files = "#{mod.path}/types" - search_dirs << type_files if File.exist?(type_files) - end - search_dirs - end - @file_cache.cache_project_file(versioned_project, tarball_type, new_tarball) - new_tarball - end - end - end - - # This helper is responsible for computing or retrieving task metadata for a project. - # It expects task name in segments and uses the combination of task name and versioned_project - # as a unique identifier for caching in addition to the job_id. The job id is added to protect against - # a case where the buildtin content is update (where the apply_helpers would be managed) - def project_task_metadata(versioned_project, task_name_segments, job_id) - cached_file_name = "#{task_name_segments.join('_')}_#{job_id}" - # lock this so that in the case an apply/apply_prep with multiple targets hits this endpoint the - # metadata computation will only be computed once, then the cache will be read. - @task_metadata_mutex.synchronize do - if (metadata = @file_cache.get_cached_project_file(versioned_project, cached_file_name)) - JSON.parse(metadata) - else - new_metadata = in_bolt_project(versioned_project) do |context| - ps_parameters = { - 'versioned_project' => versioned_project - } - pe_task_info(context[:pal], *task_name_segments, ps_parameters) - end - @file_cache.cache_project_file(versioned_project, cached_file_name, new_metadata.to_json) - new_metadata - end - end - end - - def apply_prep(target, body) - validate_schema(@schemas["action-apply_prep"], body) - plugins_tarball = plugin_tarball(body['versioned_project'], 'fact_plugins') - install_task_segments = extract_install_task(target.first) - task_data = project_task_metadata(body['versioned_project'], install_task_segments, body["job_id"]) - task = Bolt::Task::PuppetServer.new(task_data['name'], task_data['metadata'], task_data['files'], @file_cache) - install_task_result = task_helper(target, task, target.first.plugin_hooks['puppet_library']['parameters'] || {}) - return install_task_result unless install_task_result.ok - task_data = project_task_metadata(body['versioned_project'], %w[apply_helpers custom_facts], body["job_id"]) - task = Bolt::Task::PuppetServer.new(task_data['name'], task_data['metadata'], task_data['files'], @file_cache) - task_helper(target, task, { 'plugins' => plugins_tarball }) - end - - def apply(target, body) - validate_schema(@schemas["action-apply"], body) - plugins_tarball = plugin_tarball(body['versioned_project'], 'all_plugins') - task_data = project_task_metadata(body['versioned_project'], %w[apply_helpers apply_catalog], body["job_id"]) - task = Bolt::Task::PuppetServer.new(task_data['name'], task_data['metadata'], task_data['files'], @file_cache) - task_helper(target, task, body['parameters'].merge({ 'plugins' => plugins_tarball })) - end - def run_command(target, body) validate_schema(@schemas["action-run_command"], body) command = body['command'] @@ -375,37 +269,6 @@ def in_pe_pal_env(environment) end end - def config_from_project(versioned_project) - project_dir = File.join(@config['projects-dir'], versioned_project) - unless Dir.exist?(project_dir) - raise BoltServer::RequestError, - "versioned_project: '#{project_dir}' does not exist" - end - project = Bolt::Project.create_project(project_dir) - Bolt::Config.from_project(project, { log: { 'bolt-debug.log' => 'disable' } }) - end - - def pal_from_project_bolt_config(bolt_config) - modulepath_object = Bolt::Config::Modulepath.new( - bolt_config.modulepath, - boltlib_path: [PE_BOLTLIB_PATH, Bolt::Config::Modulepath::BOLTLIB_PATH], - builtin_content_path: @config['builtin-content-dir'] - ) - Bolt::PAL.new(modulepath_object, nil, nil, nil, nil, nil, bolt_config.project) - end - - def in_bolt_project(versioned_project) - @pal_mutex.synchronize do - bolt_config = config_from_project(versioned_project) - pal = pal_from_project_bolt_config(bolt_config) - context = { - pal: pal, - config: bolt_config - } - yield context - end - end - def pe_plan_info(pal, module_name, plan_name) # Handle case where plan name is simply module name with special `init.pp` plan plan_name = if plan_name == 'init' || plan_name.nil? @@ -482,107 +345,6 @@ def plan_list(pal) plans.map { |plan_name| { 'name' => plan_name } } end - def file_metadatas(versioned_project, module_name, file) - result = @pal_mutex.synchronize do - bolt_config = config_from_project(versioned_project) - pal = pal_from_project_bolt_config(bolt_config) - pal.in_bolt_compiler do - mod = Puppet.lookup(:current_environment).module(module_name) - raise BoltServer::RequestError, "module_name: '#{module_name}' does not exist" unless mod - # First, look in the 'old' location /files/. - # If not found, and the path starts with `files` or `scripts`, munge - # the path and look inside that directory. - if (abs_path = mod.file(file)) - { abs_file_path: abs_path, puppetserver_root: "modules/#{module_name}/#{file}" } - else - subdir, relative_path = file.split(File::SEPARATOR, 2) - abs_path, mount = case subdir - when 'files' - [mod.file(relative_path), 'modules'] - when 'scripts' - [mod.script(relative_path), 'scripts'] - end - next nil unless abs_path - { abs_file_path: abs_path, puppetserver_root: "#{mount}/#{module_name}/#{relative_path}" } - end - end - end - - unless result - raise BoltServer::RequestError, - "file: '#{file}' does not exist inside #{module_name} 'files' or 'scripts' directories" - end - - abs_file_path = result[:abs_file_path] - puppetserver_root = result[:puppetserver_root] - - fileset = Puppet::FileServing::Fileset.new(abs_file_path, 'recurse' => 'yes') - Puppet::FileServing::Fileset.merge(fileset).collect do |relative_file_path, base_path| - metadata = Puppet::FileServing::Metadata.new(base_path, relative_path: relative_file_path) - metadata.checksum_type = 'sha256' - metadata.links = 'follow' - metadata.collect - metadata.to_data_hash.merge(puppetserver_root: puppetserver_root) - end - end - - # The provided block takes a module object and returns the list - # of directories to search through. This is similar to - # Bolt::Applicator.build_plugin_tarball. - def build_project_plugins_tarball(versioned_project, &block) - start_time = Time.now - - # Fetch the plugin files - plugin_files = in_bolt_project(versioned_project) do |context| - files = {} - - # Bolt also sets plugin_modulepath to user modulepath so do it here too for - # consistency - plugin_modulepath = context[:pal].user_modulepath - Puppet.lookup(:current_environment).override_with(modulepath: plugin_modulepath).modules.each do |mod| - search_dirs = block.call(mod) - - files[mod] ||= [] - Find.find(*search_dirs).each do |file| - files[mod] << file if File.file?(file) - end - end - - files - end - - # Pack the plugin files - sio = StringIO.new - begin - output = Minitar::Output.new(Zlib::GzipWriter.new(sio)) - - plugin_files.each do |mod, files| - tar_dir = Pathname.new(mod.name) - mod_dir = Pathname.new(mod.path) - - files.each do |file| - tar_path = tar_dir + Pathname.new(file).relative_path_from(mod_dir) - stat = File.stat(file) - content = File.binread(file) - output.tar.add_file_simple( - tar_path.to_s, - data: content, - size: content.size, - mode: stat.mode & 0o777, - mtime: stat.mtime - ) - end - end - - duration = Time.now - start_time - @logger.trace("Packed plugins in #{duration * 1000} ms") - ensure - output.close - end - - Base64.encode64(sio.string) - end - get '/' do 200 end @@ -638,8 +400,7 @@ def make_ssh_target(target_hash) 'config' => { 'transport' => 'ssh', 'ssh' => opts.slice(*Bolt::Config::Transport::SSH.options) - }, - 'plugin_hooks' => target_hash['plugin_hooks'] + } } inventory = Bolt::Inventory.empty @@ -677,8 +438,7 @@ def make_winrm_target(target_hash) 'config' => { 'transport' => 'winrm', 'winrm' => opts.slice(*Bolt::Config::Transport::WinRM.options) - }, - 'plugin_hooks' => target_hash['plugin_hooks'] + } } inventory = Bolt::Inventory.empty @@ -713,18 +473,6 @@ def make_winrm_target(target_hash) end end - # Fetches the metadata for a single plan - # - # @param versioned_project [String] the project to fetch the plan from - get '/project_plans/:module_name/:plan_name' do - raise BoltServer::RequestError, "'versioned_project' is a required argument" if params['versioned_project'].nil? - in_bolt_project(params['versioned_project']) do |context| - plan_info = pe_plan_info(context[:pal], params[:module_name], params[:plan_name]) - plan_info = allowed_helper(context[:pal], plan_info, context[:config].project.plans) - [200, plan_info.to_json] - end - end - # Fetches the metadata for a single task # # @param environment [String] the environment to fetch the task from @@ -738,21 +486,6 @@ def make_winrm_target(target_hash) end end - # Fetches the metadata for a single task - # - # @param bolt_versioned_project [String] the reference to the bolt-project directory to load task metadata from - get '/project_tasks/:module_name/:task_name' do - raise BoltServer::RequestError, "'versioned_project' is a required argument" if params['versioned_project'].nil? - in_bolt_project(params['versioned_project']) do |context| - ps_parameters = { - 'versioned_project' => params['versioned_project'] - } - task_info = pe_task_info(context[:pal], params[:module_name], params[:task_name], ps_parameters) - task_info = allowed_helper(context[:pal], task_info, context[:config].project.tasks) - [200, task_info.to_json] - end - end - # Fetches the list of plans for an environment, optionally fetching all metadata for each plan # # @param environment [String] the environment to fetch the list of plans from @@ -777,25 +510,6 @@ def make_winrm_target(target_hash) end end - # Fetches the list of plans for a project - # - # @param versioned_project [String] the project to fetch the list of plans from - get '/project_plans' do - raise BoltServer::RequestError, "'versioned_project' is a required argument" if params['versioned_project'].nil? - in_bolt_project(params['versioned_project']) do |context| - plans_response = plan_list(context[:pal]) - - # Dig in context for the allowlist of plans from project object - plans_response.map! { |metadata| allowed_helper(context[:pal], metadata, context[:config].project.plans) } - - # We structure this array of plans to be an array of hashes so that it matches the structure - # returned by the puppetserver API that serves data like this. Structuring the output this way - # makes switching between puppetserver and bolt-server easier, which makes changes to switch - # to bolt-server smaller/simpler. - [200, plans_response.to_json] - end - end - # Fetches the list of tasks for an environment # # @param environment [String] the environment to fetch the list of tasks from @@ -811,62 +525,6 @@ def make_winrm_target(target_hash) end end - # Fetches the list of tasks for a bolt-project - # - # @param versioned_project [String] the project to fetch the list of tasks from - get '/project_tasks' do - raise BoltServer::RequestError, "'versioned_project' is a required argument" if params['versioned_project'].nil? - in_bolt_project(params['versioned_project']) do |context| - tasks_response = task_list(context[:pal]) - - # Dig in context for the allowlist of tasks from project object - tasks_response.map! { |metadata| allowed_helper(context[:pal], metadata, context[:config].project.tasks) } - - # We structure this array of tasks to be an array of hashes so that it matches the structure - # returned by the puppetserver API that serves data like this. Structuring the output this way - # makes switching between puppetserver and bolt-server easier, which makes changes to switch - # to bolt-server smaller/simpler. - [200, tasks_response.to_json] - end - end - - # Implements puppetserver's file_metadatas endpoint for projects. - # - # @param versioned_project [String] the versioned_project to fetch the file metadatas from - get '/project_file_metadatas/:module_name/*' do - raise BoltServer::RequestError, "'versioned_project' is a required argument" if params['versioned_project'].nil? - file = params[:splat].first - metadatas = file_metadatas(params['versioned_project'], params[:module_name], file) - [200, metadatas.to_json] - rescue ArgumentError => e - [500, e.message] - end - - # Returns the base64 encoded tar archive of plugin code that is needed to calculate - # custom facts - # - # @param versioned_project [String] the versioned_project to build the plugin tarball from - get '/project_facts_plugin_tarball' do - raise BoltServer::RequestError, "'versioned_project' is a required argument" if params['versioned_project'].nil? - content_type :json - - plugins_tarball = plugin_tarball(params['versioned_project'], 'fact_plugins') - - [200, plugins_tarball.to_json] - end - - # Returns the base64 encoded tar archive of _all_ plugin code for a project - # - # @param versioned_project [String] the versioned_project to build the plugin tarball from - get '/project_plugin_tarball' do - raise BoltServer::RequestError, "'versioned_project' is a required argument" if params['versioned_project'].nil? - content_type :json - - plugins_tarball = plugin_tarball(params['versioned_project'], 'all_plugins') - - [200, plugins_tarball.to_json] - end - error 404 do err = Bolt::Error.new("Could not find route #{request.path}", 'boltserver/not-found') diff --git a/spec/bolt_server/app_integration_spec.rb b/spec/bolt_server/app_integration_spec.rb index ff68f9c40d..34f2684dc1 100644 --- a/spec/bolt_server/app_integration_spec.rb +++ b/spec/bolt_server/app_integration_spec.rb @@ -176,191 +176,6 @@ def app end end - describe 'apply_prep' do - let(:path) { "/ssh/apply_prep" } - - it 'apply_prep runs install task configured in plugin_hooks and gathers custom facts' do - # Target a spec container that already has an agent on it - inventory = Bolt::Inventory.empty - puppet_6_agent_container = inventory.get_target(conn_uri('ssh', include_password: true, override_port: 20024)) - target = { - hostname: puppet_6_agent_container.host, - user: puppet_6_agent_container.user, - password: puppet_6_agent_container.password, - port: puppet_6_agent_container.port, - plugin_hooks: { 'puppet_library' => { 'plugin' => 'task', 'task' => 'fake_puppet_agent::install', - 'parameters' => {} } } - } - body = { - versioned_project: 'bolt_server_test_project', - target: target, - job_id: 1 - } - post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json') - expect(last_response).to be_ok - expect(last_response.status).to eq(200) - result = JSON.parse(last_response.body) - expect(result['status']).to eq('success') - expect(result['value'].keys).to include('os') - end - - it 'apply_prep fails when install task fails' do - # Target a spec container that already has an agent on it - inventory = Bolt::Inventory.empty - puppet_6_agent_container = inventory.get_target(conn_uri('ssh', include_password: true, override_port: 20024)) - target = { - hostname: puppet_6_agent_container.host, - user: puppet_6_agent_container.user, - password: puppet_6_agent_container.password, - port: puppet_6_agent_container.port, - plugin_hooks: { 'puppet_library' => { 'plugin' => 'task', 'task' => 'fake_puppet_agent::install', - 'parameters' => { 'fail' => true } } } - } - body = { - versioned_project: 'bolt_server_test_project', - target: target, - job_id: 1 - } - post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json') - expect(last_response).to be_ok - expect(last_response.status).to eq(200) - result = JSON.parse(last_response.body) - expect(result['status']).to eq('failure') - expect(result['object']).to eq('fake_puppet_agent::install') - end - - it 'apply_prep fails when target does not define suitable puppet_library plugin_hook' do - # Target a spec container that already has an agent on it - inventory = Bolt::Inventory.empty - puppet_6_agent_container = inventory.get_target(conn_uri('ssh', include_password: true, override_port: 20024)) - target = { - hostname: puppet_6_agent_container.host, - user: puppet_6_agent_container.user, - password: puppet_6_agent_container.password, - port: puppet_6_agent_container.port - } - body = { - versioned_project: 'bolt_server_test_project', - target: target, - job_id: 1 - } - post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json') - expect(last_response.status).to eq(400) - result = JSON.parse(last_response.body) - expect(result['kind']).to eq('bolt-server/request-error') - expect(result['msg']).to eq("Target must have 'task' plugin hook") - end - end - - describe "apply" do - def cross_platform_catalog(certname) - { - "catalog" => { - "tags" => [ - "settings" - ], - "name" => certname, - "version" => 1581636379, - "code_id" => nil, - "catalog_uuid" => "5a4372c6-253f-46df-be99-3c40c9922423", - "catalog_format" => 1, - "environment" => 'bolt_catalog', - "resources" => [ - { - "type" => "Stage", - "title" => "main", - "tags" => %w[ - stage - class - ], - "exported" => false, - "parameters" => { - "name" => "main" - } - }, - { - "type" => "Class", - "title" => "Settings", - "tags" => %w[ - class - settings - ], - "exported" => false - }, - { - "type" => "Class", - "title" => "main", - "tags" => [ - "class" - ], - "exported" => false, - "parameters" => { - "name" => "main" - } - }, - { - "type" => "Notify", - "title" => "hello world", - "tags" => %w[ - notify - class - ], - "line" => 1, - "exported" => false - } - ], - "edges" => [ - { - "source" => "Stage[main]", - "target" => "Class[Settings]" - }, - { - "source" => "Stage[main]", - "target" => "Class[main]" - }, - { - "source" => "Class[main]", - "target" => "Notify[hello world]" - } - ], - "classes" => [ - "settings" - ] - } - } - end - - it 'applies a catalog' do - # Target a spec container that already has an agent on it - path = 'ssh/apply' - inventory = Bolt::Inventory.empty - puppet_6_agent_container = inventory.get_target(conn_uri('ssh', include_password: true, override_port: 20024)) - target = { - hostname: puppet_6_agent_container.host, - user: puppet_6_agent_container.user, - password: puppet_6_agent_container.password, - port: puppet_6_agent_container.port, - plugin_hooks: { 'puppet_library' => { 'plugin' => 'task', 'task' => 'fake_puppet_agent::install', - 'parameters' => {} } } - } - body = { - versioned_project: 'bolt_server_test_project', - target: target, - parameters: { - catalog: cross_platform_catalog(target[:hostname])['catalog'], - apply_settings: {} - }, - job_id: 1 - } - post(path, JSON.generate(body), 'CONTENT_TYPE' => 'text/json') - expect(last_response).to be_ok - expect(last_response.status).to eq(200) - result = JSON.parse(last_response.body) - expect(result['status']).to eq('success') - expect(result['value']['resource_statuses'].keys).to include('Notify[hello world]') - end - end - describe "run_script" do let(:path) { '/ssh/run_script' } diff --git a/spec/bolt_server/file_cache_spec.rb b/spec/bolt_server/file_cache_spec.rb deleted file mode 100644 index a83ef43bc5..0000000000 --- a/spec/bolt_server/file_cache_spec.rb +++ /dev/null @@ -1,173 +0,0 @@ -# frozen_string_literal: true - -require 'fileutils' -require 'spec_helper' -require 'bolt_spec/conn' -require 'bolt_spec/bolt_server' -require 'bolt_server/config' -require 'bolt_server/file_cache' -require 'json' -require 'rack/test' - -describe BoltServer::FileCache, puppetserver: true do - include BoltSpec::BoltServer - - before(:all) do - wait_until_available(timeout: 120, interval: 1) - end - - before(:each) do - FileUtils.rm_rf(default_config['cache-dir']) - end - - let(:config) do - BoltServer::Config.new(default_config) - end - - let(:file_cache) do - cache = BoltServer::FileCache.new(config) - cache.setup - cache - end - - it 'gets files from puppetserver' do - data = get_task_data('sample::echo')['files'].first - - path = file_cache.update_file(data) - expect(path).to eq(File.join(default_config['cache-dir'], data['sha256'], data['filename'])) - expect(File.read(path)).to eq("#!/bin/sh\n\necho $(hostname) got passed the message: $PT_message\n") - end - - it 'overwrites bad files' do - data = get_task_data('sample::echo')['files'].first - expected_path = File.join(default_config['cache-dir'], data['sha256'], data['filename']) - FileUtils.mkdir_p(File.dirname(expected_path)) - File.write(expected_path, "bad-data") - - path = file_cache.update_file(data) - expect(path).to eq(expected_path) - expect(File.read(path)).to eq("#!/bin/sh\n\necho $(hostname) got passed the message: $PT_message\n") - end - - it 'does not download existing files' do - data = get_task_data('sample::echo')['files'].first - file_content = 'good-data' - data['sha256'] = Digest::SHA256.hexdigest(file_content) - expected_dir = File.join(default_config['cache-dir'], data['sha256']) - expected_path = File.join(expected_dir, data['filename']) - FileUtils.mkdir_p(expected_dir) - File.write(expected_path, file_content) - mtime = File.mtime(expected_dir) - sleep(1) - - path = file_cache.update_file(data) - expect(path).to eq(expected_path) - expect(File.read(path)).to eq(file_content) - expect(File.mtime(expected_dir)).not_to eq(mtime) - end - - it 'purges old files on startup' do - file_content = 'good-data' - sha = Digest::SHA256.hexdigest(file_content) - expected_dir = File.join(default_config['cache-dir'], sha) - FileUtils.mkdir_p(expected_dir) - File.write(File.join(expected_dir, 'task'), file_content) - FileUtils.touch(expected_dir, mtime: Time.now - (BoltServer::FileCache::PURGE_TTL + 1)) - FileUtils.touch(file_cache.tmppath, mtime: Time.now - (BoltServer::FileCache::PURGE_TTL + 1)) - - expect(file_cache).to be - gone = 10.times do - sleep 0.1 - break true unless File.exist?(expected_dir) - end - expect(gone).to eq(true) - expect(File.exist?(file_cache.tmppath)).to eq(true) - end - - it 'purges old files after the purge_interval' do - file_content = 'good-data' - sha = Digest::SHA256.hexdigest(file_content) - expected_dir = File.join(default_config['cache-dir'], sha) - FileUtils.mkdir_p(expected_dir) - File.write(File.join(expected_dir, 'task'), file_content) - - cache = BoltServer::FileCache.new(config, purge_interval: 1) - cache.setup - - FileUtils.touch(expected_dir, mtime: Time.now - (BoltServer::FileCache::PURGE_TTL + 1)) - FileUtils.touch(file_cache.tmppath, mtime: Time.now - (BoltServer::FileCache::PURGE_TTL + 1)) - gone = 10.times do - sleep 0.5 - break true unless File.exist?(expected_dir) - end - expect(gone).to eq(true) - expect(File.exist?(file_cache.tmppath)).to eq(true) - end - - it 'fails when the downloaded file is invalid' do - data = get_task_data('sample::echo')['files'].first - data['sha256'] = Digest::SHA256.hexdigest('bad-data') - - expect { file_cache.update_file(data) }.to raise_error(/did not match checksum/) - end - - context 'When do_purge is false' do - let(:file_cache) do - BoltServer::FileCache.new(config, do_purge: false) - end - - it 'will not set up purge timer' do - expect(file_cache.instance_variable_get(:@purge)).to be_nil - end - end - - context 'When do_purge is true' do - let(:file_cache) do - BoltServer::FileCache.new(config, do_purge: true) - end - - it 'will create and run the purge timer' do - expect(file_cache.instance_variable_get(:@purge)).to be_a(Concurrent::TimerTask) - end - end - - context 'When do_purge is true and cache_dir_mutex is specified' do - let(:other_mutex) { double('other_mutex') } - let(:file_cache) do - BoltServer::FileCache.new(config, - purge_interval: 1, - purge_timeout: 1, - purge_ttl: 1, - cache_dir_mutex: other_mutex, - do_purge: true) - end - - it 'will create and run the purge timer' do - expect(file_cache.instance_variable_get(:@purge)).to be_a(Concurrent::TimerTask) - expect(file_cache.instance_variable_get(:@cache_dir_mutex)).to eq(other_mutex) - - file_cache - end - end - - context "with project file caching" do - it 'properly manages reading and writing data from multiple threads' do - num_threads = 10 - file_chars = 10000 - file_content = 'a' * file_chars - # Prime the cache (even though the thread that writes is started first, there is no guarantee it executes first) - file_cache.cache_project_file('foo_bar', 'baz', file_content) - threads = num_threads.times.map do |i| - if i.even? - Thread.new { file_cache.cache_project_file('foo_bar', 'baz', file_content) } - else - Thread.new { file_cache.get_cached_project_file('foo_bar', 'baz') } - end - end - threads.each_with_index do |t, i| - expected = i.even? ? file_chars : file_content - expect(t.join.value).to eq(expected) - end - end - end -end diff --git a/spec/bolt_server/plugin/puppet_connect_data_spec.rb b/spec/bolt_server/plugin/puppet_connect_data_spec.rb deleted file mode 100644 index afd8055b2f..0000000000 --- a/spec/bolt_server/plugin/puppet_connect_data_spec.rb +++ /dev/null @@ -1,49 +0,0 @@ -# frozen_string_literal: true - -require 'bolt_server/plugin/puppet_connect_data' -require 'spec_helper' - -describe BoltServer::Plugin::PuppetConnectData do - let(:data) do - { 'mykey' => { 'value' => 'somevalue' } } - end - - subject { described_class.new(data) } - context 'initializing the plugin' do - it 'defines the correct plugin name' do - expect(subject.name).to eq('puppet_connect_data') - end - - it 'defines resolve_reference hooks' do - expect(subject.hooks).to include(:resolve_reference, :validate_resolve_reference) - end - end - - context 'validating references' do - it 'fails if no key is specified' do - reference = { '_plugin' => 'puppet_connect_data' } - expect { subject.validate_resolve_reference(reference) }.to raise_error( - Bolt::ValidationError, /requires.*key/ - ) - end - - it 'fails if no value exists for the key' do - reference = { '_plugin' => 'puppet_connect_data', 'key' => 'nosuchkey' } - expect { subject.validate_resolve_reference(reference) }.to raise_error( - Bolt::ValidationError, /tried to lookup key 'nosuchkey'/ - ) - end - - it 'succeeds if a value exists for the key' do - reference = { '_plugin' => 'puppet_connect_data', 'key' => 'mykey' } - expect { subject.validate_resolve_reference(reference) }.not_to raise_error - end - end - - context 'looking up data' do - it 'returns the "value" field for the key' do - reference = { '_plugin' => 'puppet_connect_data', 'key' => 'mykey' } - expect(subject.resolve_reference(reference)).to eq('somevalue') - end - end -end diff --git a/spec/bolt_server/transport_app_spec.rb b/spec/bolt_server/transport_app_spec.rb index 4a10caef87..9f6583a8eb 100644 --- a/spec/bolt_server/transport_app_spec.rb +++ b/spec/bolt_server/transport_app_spec.rb @@ -3,7 +3,6 @@ require 'spec_helper' require 'bolt_spec/bolt_server' require 'bolt_spec/conn' -require 'bolt_spec/file_cache' require 'bolt_spec/files' require 'bolt_server/config' require 'bolt_server/transport_app' @@ -12,7 +11,6 @@ require 'puppet/environments' require 'digest' require 'pathname' -require 'puppet/module_tool/tar' describe "BoltServer::TransportApp" do include BoltSpec::BoltServer @@ -23,14 +21,13 @@ let(:basedir) { fixtures_path('bolt_server') } let(:environment_dir) { File.join(basedir, 'environments', 'production') } - let(:project_dir) { File.join(basedir, 'projects') } def app # The moduledir and mock file cache are used in the tests for task # execution tests. Everything else uses the fixtures above. moduledir = fixtures_path('modules') mock_file_cache(moduledir) - config = BoltServer::Config.new({ 'projects-dir' => project_dir, 'environments-codedir' => basedir }) + config = BoltServer::Config.new({ 'environments-codedir' => basedir }) BoltServer::TransportApp.new(config) end @@ -44,23 +41,6 @@ def file_data(file) 'size' => File.size(file) } end - def with_project(bolt_project, inventory_content, inventory_name = 'inventory.yaml') - Dir.mktmpdir(nil, project_dir) do |tmpdir| - File.write(File.join(tmpdir, 'bolt-project.yaml'), bolt_project.to_yaml) - File.write(File.join(tmpdir, inventory_name), inventory_content.to_yaml) unless inventory_content.nil? - yield tmpdir - end - end - - def unpack_tarball(base64_encoding, tmpdir) - plugins = File.join(tmpdir, "plugins.tar.gz") - File.binwrite(plugins, Base64.decode64(base64_encoding)) - user = Etc.getpwuid.nil? ? Etc.getlogin : Etc.getpwuid.name - moduledir = File.join(tmpdir, "modules") - Puppet::ModuleTool::Tar.instance.unpack(plugins, moduledir, user) - moduledir - end - # Returns all files under dir relative to dir def list_all_files(dir) parent = Pathname.new(dir) @@ -218,108 +198,7 @@ def mock_plan_info(full_name) end end - describe '/project_plans/:module_name/:plan_name' do - context 'with module_name::plan_name' do - let(:path) { "/project_plans/bolt_server_test_project/simple_plan?versioned_project=bolt_server_test_project" } - let(:expected_response) { - { - 'name' => 'bolt_server_test_project::simple_plan', - 'description' => 'Simple plan testing', - 'parameters' => { 'foo' => { 'sensitive' => false, 'type' => 'String' } }, - 'allowed' => false, - 'private' => false, - 'summary' => nil, - 'docstring' => 'Simple plan testing' - } - } - it '/project_plans/:module_name/:plan_name handles module::plan_name' do - get(path) - resp = JSON.parse(last_response.body) - expect(resp).to eq(expected_response) - end - end - - context 'with module_name' do - let(:init_plan) { "/project_plans/bolt_server_test_project/init?versioned_project=bolt_server_test_project" } - let(:expected_response) { - { - 'name' => 'bolt_server_test_project', - 'description' => 'Project plan testing', - 'parameters' => { 'foo' => { 'sensitive' => false, 'type' => 'String' } }, - 'allowed' => true, - 'private' => false, - 'summary' => nil, - 'docstring' => 'Project plan testing' - } - } - it '/project_plans/:module_name/:plan_name handles plan name = module name (init.pp) plan' do - get(init_plan) - resp = JSON.parse(last_response.body) - expect(resp).to eq(expected_response) - end - end - - context 'allowlist contains a glob' do - let(:plan) do - "/project_plans/bolt_server_test_project/allowlist_glob?versioned_project=bolt_server_test_project" - end - let(:expected_response) { - { - 'name' => 'bolt_server_test_project::allowlist_glob', - 'description' => 'Project plan testing', - 'parameters' => { 'foo' => { 'sensitive' => false, 'type' => 'String' } }, - 'allowed' => true, - 'private' => false, - 'summary' => nil, - 'docstring' => 'Project plan testing' - } - } - it 'properly filters allowed and disallowed plans' do - get(plan) - resp = JSON.parse(last_response.body) - expect(resp).to eq(expected_response) - end - end - - context 'with non-existent plan' do - let(:path) { "/project_plans/foo/bar?versioned_project=bolt_server_test_project" } - it 'returns 404 if an unknown plan error is thrown' do - get(path) - expect(last_response.status).to eq(404) - err = JSON.parse(last_response.body) - expect(err['kind']).to eq('bolt-server/request-error') - expect(err['msg']).to eq("Could not find a plan named 'foo::bar'") - end - end - end - - describe '/project_plans' do - describe 'when requesting plan list' do - context 'with an existing project' do - let(:path) { "/project_plans?versioned_project=bolt_server_test_project" } - it 'returns the plans and filters based on allowlist in bolt-project.yaml' do - get(path) - metadata = JSON.parse(last_response.body) - expect(metadata).to include( - { 'name' => 'bolt_server_test_project', 'allowed' => true }, - { 'name' => 'bolt_server_test_project::simple_plan', 'allowed' => false }, - { 'name' => 'bolt_server_test_project::allowlist_glob', 'allowed' => true } - ) - end - end - - context 'with a non existent project' do - let(:path) { "/project_plans/foo/bar?versioned_project=not_a_real_project" } - it 'returns 400 if an versioned_project not found error is thrown' do - get(path) - error = JSON.parse(last_response.body) - expect(error['msg']).to match(/not_a_real_project' does not exist/) - expect(last_response.status).to eq(400) - end - end - end - end - + describe '/tasks' do context 'with a non existent environment' do let(:path) { "/tasks?environment=production" } @@ -350,21 +229,6 @@ def mock_plan_info(full_name) end end - describe '/project_tasks' do - context 'with an existing project' do - let(:path) { "/project_tasks?versioned_project=bolt_server_test_project" } - it 'returns the tasks and filters based on allowlist in bolt-project.yaml' do - get(path) - metadata = JSON.parse(last_response.body) - expect(metadata).to include( - { 'name' => 'bolt_server_test_project', 'allowed' => true }, - { 'name' => 'bolt_server_test_project::hidden', 'allowed' => false }, - { 'name' => 'bolt_server_test_project::allowlist_glob', 'allowed' => true } - ) - end - end - end - describe '/tasks/:module_name/:task_name' do context 'with module_name::task_name' do let(:path) { '/tasks/bolt_server_test/simple_task?environment=production' } @@ -443,86 +307,6 @@ def mock_plan_info(full_name) end end - describe '/project_tasks/:module_name/:task_name' do - context 'with module_name::task_name' do - let(:path) { "/project_tasks/bolt_server_test_project/hidden?versioned_project=bolt_server_test_project" } - let(:expected_response) { - { - "metadata" => { "description" => "Project task testing" }, - "name" => "bolt_server_test_project::hidden", - "files" => [ - { - "filename" => "hidden.sh", - "sha256" => Digest::SHA256.hexdigest( - File.read( - File.join(project_dir, 'bolt_server_test_project', 'tasks', 'hidden.sh') - ) - ), - "size_bytes" => File.size( - File.join(project_dir, 'bolt_server_test_project', 'tasks', 'hidden.sh') - ), - "uri" => { - "path" => "/puppet/v3/file_content/tasks/bolt_server_test_project/hidden.sh", - "params" => { "versioned_project" => 'bolt_server_test_project' } - } - } - ], - "allowed" => false - } - } - it '/project_tasks/:module_name/:task_name handles module::task_name' do - get(path) - resp = JSON.parse(last_response.body) - expect(resp).to eq(expected_response) - end - end - - context 'with module_name' do - let(:path) { "/project_tasks/bolt_server_test_project/init?versioned_project=bolt_server_test_project" } - let(:expected_response) { - { - "metadata" => { "description" => "Project task testing" }, - "name" => "bolt_server_test_project", - "files" => [ - { - "filename" => "init.sh", - "sha256" => Digest::SHA256.hexdigest( - File.read( - File.join(project_dir, 'bolt_server_test_project', 'tasks', 'init.sh') - ) - ), - "size_bytes" => File.size( - File.join(project_dir, 'bolt_server_test_project', 'tasks', 'init.sh') - ), - "uri" => { - "path" => "/puppet/v3/file_content/tasks/bolt_server_test_project/init.sh", - "params" => { "versioned_project" => 'bolt_server_test_project' } - } - } - ], - "allowed" => true - } - } - - it '/prject_tasks/:module_name/:task_name handles task name = module name (init.rb) task' do - get(path) - resp = JSON.parse(last_response.body) - expect(resp).to eq(expected_response) - end - end - - context 'with non-existent task' do - let(:path) { "/project_tasks/foo/bar?versioned_project=bolt_server_test_project" } - it 'returns 404 if an unknown task error is thrown' do - get(path) - expect(last_response.status).to eq(404) - err = JSON.parse(last_response.body) - expect(err['kind']).to eq('bolt-server/request-error') - expect(err['msg']).to eq("Could not find a task named 'foo::bar'") - end - end - end - describe '/ssh/*' do let(:path) { "/ssh/#{action}" } let(:target) { conn_info('ssh') } @@ -957,339 +741,5 @@ def post_over_transport(transport, action, body_content, multiple: false) end end end - - describe '/project_file_metadatas/:module_name/:file' do - let(:versioned_project) { 'bolt_server_test_project' } - let(:mod) { 'project_module' } - let(:modpath) { - File.join(project_dir, versioned_project, 'modules', 'project_module') - } - - it 'returns 400 if versioned_project is not specified' do - get('/project_file_metadatas/foo_module/foo_file') - expect(last_response.status).to eq(400) - error = JSON.parse(last_response.body) - expect(error['msg']).to match("'versioned_project' is a required argument") - end - - it 'returns 400 if versioned_project does not exist' do - get("/project_file_metadatas/bar/foo?versioned_project=not_a_real_project") - expect(last_response.status).to eq(400) - error = JSON.parse(last_response.body) - expect(error['msg']).to match(/not_a_real_project' does not exist/) - end - - it 'returns 400 if module_name does not exist' do - get("/project_file_metadatas/bar/foo?versioned_project=#{versioned_project}") - expect(last_response.status).to eq(400) - error = JSON.parse(last_response.body) - expect(error['msg']).to match(/bar' does not exist/) - end - - it 'returns 400 if file does not exist in the module' do - get("/project_file_metadatas/project_module/not_a_real_file?versioned_project=#{versioned_project}") - expect(last_response.status).to eq(400) - error = JSON.parse(last_response.body) - expect(error['msg']).to match(/not_a_real_file' does not exist/) - end - - it 'returns 400 if the file includes scripts/ and does not exist' do - get("/project_file_metadatas/project_module/scripts/not_a_real_file?versioned_project=#{versioned_project}") - expect(last_response.status).to eq(400) - error = JSON.parse(last_response.body) - expect(error['msg']).to match(/not_a_real_file' does not exist/) - end - - shared_examples 'valid data' do - context "with a valid filepath to one file", ssh: true do - let(:file_checksum) { Digest::SHA256.hexdigest(File.read(test_file)) } - let(:expected_response) { - [ - { - "path" => test_file, - "relative_path" => ".", - "links" => "follow", - "owner" => File.stat(test_file).uid, - "group" => File.stat(test_file).gid, - "checksum" => { - "type" => "sha256", - "value" => "{sha256}#{file_checksum}" - }, - "type" => "file", - "destination" => nil, - "puppetserver_root" => test_file_puppetserver_root - } - ] - } - - it 'returns the file metadata of the file, prefering the files directory' do - get("/project_file_metadatas/project_module/#{file_request}?versioned_project=#{versioned_project}") - file_metadatas = JSON.parse(last_response.body) - # I don't know why the mode returned by puppet is not the same as the mode returned - # from ruby's File.stat(test_file) function. But these tests probably don't need to - # cover the specifics of what puppet returns, plus we don't use this metadata in - # orch anyway, so ignore the mode part of the respose. - # - Sean P. McDonald 10/15/2020 - file_metadatas.each do |entry| - entry.delete("mode") - end - expect(file_metadatas).to eq(expected_response) - expect(last_response.status).to eq(200) - end - end - - context "with a directory", ssh: true do - let(:file_checksum) { Digest::SHA256.hexdigest(File.read(file_in_dir)) } - let(:expected_response) { - [ - { - "path" => test_dir, - "relative_path" => ".", - "links" => "follow", - "owner" => File.stat(test_dir).uid, - "group" => File.stat(test_dir).gid, - "checksum" => { - "type" => "ctime", - "value" => "{ctime}#{File.ctime(test_dir)}" - }, - "type" => "directory", - "destination" => nil, - "puppetserver_root" => test_dir_puppetserver_root - }, - { - "path" => test_dir, - "relative_path" => File.basename(file_in_dir), - "links" => "follow", - "owner" => File.stat(file_in_dir).uid, - "group" => File.stat(file_in_dir).gid, - "checksum" => { - "type" => "sha256", - "value" => "{sha256}#{file_checksum}" - }, - "type" => "file", - "destination" => nil, - "puppetserver_root" => test_dir_puppetserver_root - } - ] - } - - it 'returns the file metadata of the directory and all its children' do - get("/project_file_metadatas/project_module/#{dir_request}?versioned_project=#{versioned_project}") - file_metadatas = JSON.parse(last_response.body) - # I don't know why the mode returned by puppet is not the same as the mode returned - # from ruby's File.stat(test_file) function. But these tests probably don't need to - # cover the specifics of what puppet returns, plus we don't use this metadata in - # orch anyway, so ignore the mode part of the respose. - # - Sean P. McDonald 10/15/2020 - file_metadatas.each do |entry| - entry.delete("mode") - end - expect(file_metadatas).to eq(expected_response) - expect(last_response.status).to eq(200) - end - end - end - - context "using nonspecific Puppet file ref (/)" do - let(:test_file) { File.join(modpath, 'files', 'test_file') } - let(:file_request) { 'test_file' } - let(:test_file_puppetserver_root) { "modules/#{mod}/test_file" } - let(:test_dir) { File.join(modpath, 'files', 'test_dir') } - let(:dir_request) { 'test_dir' } - let(:test_dir_puppetserver_root) { "modules/#{mod}/test_dir" } - let(:file_in_dir) { File.join(test_dir, 'test_dir_file') } - - include_examples 'valid data' - - context "when the file path contains '/'", ssh: true do - let(:file_in_dir) { File.join(modpath, 'files', 'test_dir', 'test_dir_file') } - let(:file_checksum) { Digest::SHA256.hexdigest(File.read(file_in_dir)) } - let(:expected_response) { - [ - { - "path" => file_in_dir, - "relative_path" => ".", - "links" => "follow", - "owner" => File.stat(file_in_dir).uid, - "group" => File.stat(file_in_dir).gid, - "checksum" => { - "type" => "sha256", - "value" => "{sha256}#{file_checksum}" - }, - "type" => "file", - "destination" => nil, - "puppetserver_root" => "modules/#{mod}/test_dir/test_dir_file" - } - ] - } - - it 'returns the file metadata of the file' do - get("/project_file_metadatas/project_module/test_dir/test_dir_file?versioned_project=#{versioned_project}") - file_metadatas = JSON.parse(last_response.body) - # I don't know why the mode returned by puppet is not the same as the mode returned - # from ruby's File.stat(file_in_dir) function. But these tests probably don't need to - # cover the specifics of what puppet returns, plus we don't use this metadata in - # orch anyway, so ignore the mode part of the respose. - # - Sean P. McDonald 10/15/2020 - file_metadatas.each do |entry| - entry.delete("mode") - end - expect(file_metadatas).to eq(expected_response) - expect(last_response.status).to eq(200) - end - end - end - - context "getting files from the 'scripts/' directory" do - let(:test_file) { File.join(modpath, 'scripts', 'script.sh') } - let(:file_request) { 'scripts/script.sh' } - let(:test_file_puppetserver_root) { "scripts/#{mod}/script.sh" } - let(:test_dir) { File.join(modpath, 'scripts', 'test_dir') } - let(:dir_request) { 'scripts/test_dir/' } - let(:test_dir_puppetserver_root) { "scripts/#{mod}/test_dir/" } - let(:file_in_dir) { File.join(test_dir, 'dir_script.sh') } - - include_examples 'valid data' - end - - context "getting files from the 'files/' directory with specific ref" do - let(:test_file) { File.join(modpath, 'files', 'test_file') } - let(:file_request) { 'files/test_file' } - let(:test_file_puppetserver_root) { "modules/#{mod}/test_file" } - let(:test_dir) { File.join(modpath, 'files', 'test_dir') } - let(:dir_request) { 'files/test_dir' } - let(:test_dir_puppetserver_root) { "modules/#{mod}/test_dir" } - let(:file_in_dir) { File.join(test_dir, 'test_dir_file') } - - include_examples 'valid data' - end - - it 'without specific Puppet file reference does not find script' do - get("/project_file_metadatas/project_module/script.sh?versioned_project=#{versioned_project}") - expect(last_response.status).to eq(400) - error = JSON.parse(last_response.body) - expect(error['msg']).to match(/'script.sh' does not exist/) - end - - it 'prefers loading from files/files/ when file also exists in files/' do - abs_path = File.join(modpath, 'files', 'files', 'duplicate') - get("/project_file_metadatas/project_module/files/duplicate?versioned_project=#{versioned_project}") - file_metadatas = JSON.parse(last_response.body) - expect(file_metadatas.first['path']).to eq(abs_path) - expect(file_metadatas.first['puppetserver_root']).to eq('modules/project_module/files/duplicate') - expect(last_response.status).to eq(200) - end - end - - describe '/project_facts_plugin_tarball' do - let(:versioned_project) { 'bolt_server_test_project' } - - it 'returns 400 if versioned_project is not specified' do - get('/project_facts_plugin_tarball') - expect(last_response.status).to eq(400) - error = JSON.parse(last_response.body) - expect(error['msg']).to match("'versioned_project' is a required argument") - end - - it 'returns 400 if versioned_project does not exist' do - get("/project_facts_plugin_tarball?versioned_project=not_a_real_project") - expect(last_response.status).to eq(400) - error = JSON.parse(last_response.body) - expect(error['msg']).to match(/not_a_real_project' does not exist/) - end - - it "returns a base64 encoded tar archive of the project's plugin code for custom facts" do - get("/project_facts_plugin_tarball?versioned_project=#{versioned_project}") - expect(last_response.status).to eq(200) - Dir.mktmpdir("project_facts_plugin_tarball_test") do |tmpdir| - unpacked_pluginsdir = unpack_tarball(JSON.parse(last_response.body), tmpdir) - - unpacked_plugin_files = list_all_files(unpacked_pluginsdir) - expected_plugin_files = [ - 'plugin_module/lib/puppet/functions/some_function.rb', - 'pluginfacts_module/facts.d/external_fact.sh' - ] - expect(unpacked_plugin_files.sort).to eql(expected_plugin_files.sort) - - # Make sure the contents also match - unpacked_plugin_files.each do |plugin_file| - file_fixture_path = File.join( - project_dir, - versioned_project, - 'modules', - plugin_file - ) - file_unpacked_path = File.join( - unpacked_pluginsdir, - plugin_file - ) - - expected_content = File.read(file_fixture_path) - actual_content = File.read(file_unpacked_path) - - expect(expected_content).to eql(actual_content) - end - end - end - end - - describe '/project_plugin_tarball' do - let(:versioned_project) { 'bolt_server_test_project' } - - it 'returns 400 if versioned_project is not specified' do - get('/project_plugin_tarball') - expect(last_response.status).to eq(400) - error = JSON.parse(last_response.body) - expect(error['msg']).to match("'versioned_project' is a required argument") - end - - it 'returns 400 if versioned_project does not exist' do - get("/project_plugin_tarball?versioned_project=not_a_real_project") - expect(last_response.status).to eq(400) - error = JSON.parse(last_response.body) - expect(error['msg']).to match(/not_a_real_project' does not exist/) - end - - it "returns a base64 encoded tar archive of the project's plugin code" do - get("/project_plugin_tarball?versioned_project=#{versioned_project}") - expect(last_response.status).to eq(200) - Dir.mktmpdir("project_plugin_tarball_test") do |tmpdir| - unpacked_pluginsdir = unpack_tarball(JSON.parse(last_response.body), tmpdir) - - unpacked_plugin_files = list_all_files(unpacked_pluginsdir) - expected_plugin_files = [ - 'plugin_module/lib/puppet/functions/some_function.rb', - 'plugin_module/types/some_alias.pp', - 'pluginfacts_module/facts.d/external_fact.sh', - 'project_module/files/test_dir/test_dir_file', - 'project_module/files/test_file', - 'project_module/files/duplicate', - 'project_module/files/files/duplicate', - 'project_module/scripts/script.sh', - 'project_module/scripts/test_dir/dir_script.sh' - ] - expect(unpacked_plugin_files.sort).to eql(expected_plugin_files.sort) - - # Make sure the contents also match - unpacked_plugin_files.each do |plugin_file| - file_fixture_path = File.join( - project_dir, - versioned_project, - 'modules', - plugin_file - ) - file_unpacked_path = File.join( - unpacked_pluginsdir, - plugin_file - ) - - expected_content = File.read(file_fixture_path) - actual_content = File.read(file_unpacked_path) - - expect(expected_content).to eql(actual_content) - end - end - end - end end end diff --git a/spec/fixtures/api_server_configs/global-bolt-server.conf b/spec/fixtures/api_server_configs/global-bolt-server.conf index b47c545b83..d2e6e3156b 100644 --- a/spec/fixtures/api_server_configs/global-bolt-server.conf +++ b/spec/fixtures/api_server_configs/global-bolt-server.conf @@ -12,5 +12,4 @@ bolt-server: { logfile: /var/log/global allowlist: [a] concurrency: 12 - projects-dir: "/tmp/foo" } diff --git a/spec/fixtures/api_server_configs/required-bolt-server.conf b/spec/fixtures/api_server_configs/required-bolt-server.conf index b1dddf07ad..7607e01af3 100644 --- a/spec/fixtures/api_server_configs/required-bolt-server.conf +++ b/spec/fixtures/api_server_configs/required-bolt-server.conf @@ -3,5 +3,4 @@ bolt-server: { ssl-key: "spec/fixtures/ssl/key.pem" ssl-ca-cert: "spec/fixtures/ssl/ca.pem" file-server-uri: "https://localhost:8140" - projects-dir: "/tmp/foo" } diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/bolt-project.yaml b/spec/fixtures/bolt_server/projects/bolt_server_test_project/bolt-project.yaml deleted file mode 100644 index e784483c7d..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/bolt-project.yaml +++ /dev/null @@ -1,7 +0,0 @@ -name: 'bolt_server_test_project' -plans: - - 'bolt_server_test_project' - - '*allowlist*' -tasks: - - 'bolt_server_test_project' - - '*allowlist*' \ No newline at end of file diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/apply_helpers/tasks/apply_catalog.json b/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/apply_helpers/tasks/apply_catalog.json deleted file mode 100644 index e4ff7e661f..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/apply_helpers/tasks/apply_catalog.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "description": "Helper to apply a catalog from Bolt (for internal use only)", - "input_method": "stdin", - "private": true, - "supports_noop": true, - "implementations": [ - { "name": "apply_catalog.rb" }, - { "name": "apply_catalog.rb", "remote": true} - ], - "parameters": { - "catalog": { - "description": "Catalog to be applied", - "type": "Data", - "sensitive": true - }, - "plugins": { - "description": "Plugin bundle to use when applying resources", - "type": "String", - "sensitive": true - }, - "apply_settings": { - "description": "Puppet Settings to use during catalog application for example `show_diff`", - "type": "Optional[Hash]", - "sensitive": true - } - } -} diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/apply_helpers/tasks/apply_catalog.rb b/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/apply_helpers/tasks/apply_catalog.rb deleted file mode 100755 index ccf6d62232..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/apply_helpers/tasks/apply_catalog.rb +++ /dev/null @@ -1,117 +0,0 @@ -#! /opt/puppetlabs/puppet/bin/ruby -# frozen_string_literal: true - -require 'fileutils' -require 'json' -require 'puppet' -require 'puppet/configurer' -require 'puppet/module_tool/tar' -require 'securerandom' -require 'tempfile' - -args = JSON.parse(ARGV[0] ? File.read(ARGV[0]) : $stdin.read) - -# Create temporary directories for all core Puppet settings so we don't clobber -# existing state or read from puppet.conf. Also create a temporary modulepath. -# Additionally include rundir, which gets its own initialization. -puppet_root = Dir.mktmpdir -moduledir = File.join(puppet_root, 'modules') -Dir.mkdir(moduledir) -cli = (Puppet::Settings::REQUIRED_APP_SETTINGS + [:rundir]).flat_map do |setting| - ["--#{setting}", File.join(puppet_root, setting.to_s.chomp('dir'))] -end -cli << '--modulepath' << moduledir -Puppet.initialize_settings(cli) - -# Avoid extraneous output -Puppet[:report] = false - -# Make sure to apply the catalog -Puppet[:noop] = args['_noop'] || false -args['apply_settings'].each do |setting, value| - Puppet[setting.to_sym] = value -end - -Puppet[:default_file_terminus] = :file_server - -exit_code = 0 -begin - # This happens implicitly when running the Configurer, but we make it explicit here. It creates the - # directories we configured earlier. - Puppet.settings.use(:main) - - Tempfile.open('plugins.tar.gz') do |plugins| - File.binwrite(plugins, Base64.decode64(args['plugins'])) - user = Etc.getpwuid.nil? ? Etc.getlogin : Etc.getpwuid.name - Puppet::ModuleTool::Tar.instance.unpack(plugins, moduledir, user) - end - - env = Puppet.lookup(:environments).get('production') - # Needed to ensure features are loaded - env.each_plugin_directory do |dir| - $LOAD_PATH << dir unless $LOAD_PATH.include?(dir) - end - - if (conn_info = args['_target']) - unless (type = conn_info['remote-transport']) - puts "Cannot execute a catalog for a remote target without knowing it's the remote-transport type." - exit 1 - end - - begin - require 'puppet/resource_api/transport' - rescue LoadError - msg = "Could not load 'puppet/resource_api/transport', puppet-resource_api "\ - "gem version 1.8.0 or greater is required on the proxy target" - puts msg - exit 1 - end - - # Transport.connect will modify this hash! - transport_conn_info = conn_info.transform_keys(&:to_sym) - - transport = Puppet::ResourceApi::Transport.connect(type, transport_conn_info) - Puppet::ResourceApi::Transport.inject_device(type, transport) - - Puppet[:facts_terminus] = :network_device - Puppet[:certname] = conn_info['name'] - end - - # Ensure custom facts are available for provider suitability tests - facts = Puppet::Node::Facts.indirection.find(SecureRandom.uuid, environment: env) - - report = if Puppet::Util::Package.versioncmp(Puppet.version, '5.0.0') > 0 - Puppet::Transaction::Report.new - else - Puppet::Transaction::Report.new('apply') - end - - overrides = { current_environment: env, - loaders: Puppet::Pops::Loaders.new(env) } - overrides[:network_device] = true if args['_target'] - - Puppet.override(overrides) do - catalog = Puppet::Resource::Catalog.from_data_hash(args['catalog']) - catalog.environment = env.name.to_s - catalog.environment_instance = env - if defined?(Puppet::Pops::Evaluator::DeferredResolver) - # Only available in Puppet 6 - Puppet::Pops::Evaluator::DeferredResolver.resolve_and_replace(facts, catalog) - end - catalog = catalog.to_ral - - configurer = Puppet::Configurer.new - configurer.run(catalog: catalog, report: report, pluginsync: false) - end - - puts JSON.pretty_generate(report.to_data_hash) - exit_code = report.exit_status != 1 -ensure - begin - FileUtils.remove_dir(puppet_root) - rescue Errno::ENOTEMPTY => e - $stderr.puts("Could not cleanup temporary directory: #{e}") - end -end - -exit exit_code diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/apply_helpers/tasks/custom_facts.json b/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/apply_helpers/tasks/custom_facts.json deleted file mode 100644 index 28c498c628..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/apply_helpers/tasks/custom_facts.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "description": "Helper to retrieve custom facts from Bolt (for internal use only)", - "input_method": "stdin", - "private": true, - "supports_noop": true, - "parameters": { - "plugins": { - "description": "Custom facts bundle to use when applying resources", - "type": "String", - "sensitive": true - } - } -} - diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/apply_helpers/tasks/custom_facts.rb b/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/apply_helpers/tasks/custom_facts.rb deleted file mode 100755 index db65e93c37..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/apply_helpers/tasks/custom_facts.rb +++ /dev/null @@ -1,63 +0,0 @@ -#! /opt/puppetlabs/puppet/bin/ruby -# frozen_string_literal: true - -require 'json' -require 'puppet' -require 'puppet/module_tool/tar' -require 'tempfile' - -args = JSON.parse($stdin.read) - -Dir.mktmpdir do |puppet_root| - # Create temporary directories for all core Puppet settings so we don't clobber - # existing state or read from puppet.conf. Also create a temporary modulepath. - moduledir = File.join(puppet_root, 'modules') - Dir.mkdir(moduledir) - cli = Puppet::Settings::REQUIRED_APP_SETTINGS.flat_map do |setting| - ["--#{setting}", File.join(puppet_root, setting.to_s.chomp('dir'))] - end - cli << '--modulepath' << moduledir - Puppet.initialize_settings(cli) - - Tempfile.open('plugins.tar.gz') do |plugins| - File.binwrite(plugins, Base64.decode64(args['plugins'])) - user = Etc.getpwuid.nil? ? Etc.getlogin : Etc.getpwuid.name - Puppet::ModuleTool::Tar.instance.unpack(plugins, moduledir, user) - end - - env = Puppet.lookup(:environments).get('production') - env.each_plugin_directory do |dir| - $LOAD_PATH << dir unless $LOAD_PATH.include?(dir) - end - - if (conn_info = args['_target']) - unless (type = conn_info['remote-transport']) - puts "Cannot collect facts for a remote target without knowing the remote-transport type." - exit 1 - end - - begin - require 'puppet/resource_api/transport' - rescue LoadError - msg = "Could not load 'puppet/resource_api/transport', puppet-resource_api "\ - "gem version 1.8.0 or greater is required on the proxy target" - puts msg - exit 1 - end - - # Transport.connect will modify this hash! - transport_conn_info = conn_info.transform_keys(&:to_sym) - transport = Puppet::ResourceApi::Transport.connect(type, transport_conn_info) - Puppet::ResourceApi::Transport.inject_device(type, transport) - - Puppet[:facts_terminus] = :network_device - Puppet[:certname] = conn_info['name'] - end - - facts = Puppet::Node::Facts.indirection.find(SecureRandom.uuid, environment: env) - - facts.name = facts.values['clientcert'] - puts(facts.values.to_json) -end - -exit 0 diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/fake_puppet_agent/tasks/install.rb b/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/fake_puppet_agent/tasks/install.rb deleted file mode 100755 index 2b657f2cbe..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/fake_puppet_agent/tasks/install.rb +++ /dev/null @@ -1,13 +0,0 @@ -#!/opt/puppetlabs/puppet/bin/ruby -# frozen_string_literal: true - -require 'json' - -params = JSON.parse($stdin.read) - -if params['fail'] - exit 1 -else - output = { 'installed' => 'agent' } - puts output.to_json -end diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/plugin_module/lib/puppet/functions/some_function.rb b/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/plugin_module/lib/puppet/functions/some_function.rb deleted file mode 100644 index c9a4ddec17..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/plugin_module/lib/puppet/functions/some_function.rb +++ /dev/null @@ -1,2 +0,0 @@ -# frozen_string_literal: true -# some function diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/plugin_module/types/some_alias.pp b/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/plugin_module/types/some_alias.pp deleted file mode 100644 index 8e01d238ef..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/plugin_module/types/some_alias.pp +++ /dev/null @@ -1 +0,0 @@ -type Alias = Integer diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/pluginfacts_module/facts.d/external_fact.sh b/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/pluginfacts_module/facts.d/external_fact.sh deleted file mode 100644 index 64a727314c..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/pluginfacts_module/facts.d/external_fact.sh +++ /dev/null @@ -1 +0,0 @@ -# external fact diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/files/duplicate b/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/files/duplicate deleted file mode 100644 index 4561119d1c..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/files/duplicate +++ /dev/null @@ -1 +0,0 @@ -Watermelon sugar diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/files/files/duplicate b/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/files/files/duplicate deleted file mode 100644 index ea431b0f3c..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/files/files/duplicate +++ /dev/null @@ -1 +0,0 @@ -high diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/files/test_dir/test_dir_file b/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/files/test_dir/test_dir_file deleted file mode 100644 index 31e446dbb4..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/files/test_dir/test_dir_file +++ /dev/null @@ -1 +0,0 @@ -foobarbaz \ No newline at end of file diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/files/test_file b/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/files/test_file deleted file mode 100644 index f6ea049518..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/files/test_file +++ /dev/null @@ -1 +0,0 @@ -foobar \ No newline at end of file diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/scripts/script.sh b/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/scripts/script.sh deleted file mode 100644 index 0522f66644..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/scripts/script.sh +++ /dev/null @@ -1 +0,0 @@ -echo 'Watermelon' diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/scripts/test_dir/dir_script.sh b/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/scripts/test_dir/dir_script.sh deleted file mode 100644 index 3e6cc1fbbe..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/modules/project_module/scripts/test_dir/dir_script.sh +++ /dev/null @@ -1 +0,0 @@ -echo 'sugar' diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/plans/allowlist_glob.pp b/spec/fixtures/bolt_server/projects/bolt_server_test_project/plans/allowlist_glob.pp deleted file mode 100644 index 68cd174351..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/plans/allowlist_glob.pp +++ /dev/null @@ -1,7 +0,0 @@ -# Project plan testing -plan bolt_server_test_project::allowlist_glob( - String $foo -) { - return $foo -} - diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/plans/init.pp b/spec/fixtures/bolt_server/projects/bolt_server_test_project/plans/init.pp deleted file mode 100644 index 193386fd03..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/plans/init.pp +++ /dev/null @@ -1,7 +0,0 @@ -# Project plan testing -plan bolt_server_test_project( - String $foo -) { - return $foo -} - diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/plans/simple_plan.pp b/spec/fixtures/bolt_server/projects/bolt_server_test_project/plans/simple_plan.pp deleted file mode 100644 index 18eff26b1d..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/plans/simple_plan.pp +++ /dev/null @@ -1,4 +0,0 @@ -# Simple plan testing -plan bolt_server_test_project::simple_plan(String $foo) { - return $foo -} diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/tasks/allowlist_glob.sh b/spec/fixtures/bolt_server/projects/bolt_server_test_project/tasks/allowlist_glob.sh deleted file mode 100644 index 42a5c5226f..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/tasks/allowlist_glob.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env sh - -whoami \ No newline at end of file diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/tasks/hidden.json b/spec/fixtures/bolt_server/projects/bolt_server_test_project/tasks/hidden.json deleted file mode 100644 index 3de4ac6b6c..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/tasks/hidden.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "description": "Project task testing" -} diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/tasks/hidden.sh b/spec/fixtures/bolt_server/projects/bolt_server_test_project/tasks/hidden.sh deleted file mode 100644 index 42a5c5226f..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/tasks/hidden.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env sh - -whoami \ No newline at end of file diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/tasks/init.json b/spec/fixtures/bolt_server/projects/bolt_server_test_project/tasks/init.json deleted file mode 100644 index 3de4ac6b6c..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/tasks/init.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "description": "Project task testing" -} diff --git a/spec/fixtures/bolt_server/projects/bolt_server_test_project/tasks/init.sh b/spec/fixtures/bolt_server/projects/bolt_server_test_project/tasks/init.sh deleted file mode 100644 index 42a5c5226f..0000000000 --- a/spec/fixtures/bolt_server/projects/bolt_server_test_project/tasks/init.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env sh - -whoami \ No newline at end of file