Compare commits
26 Commits
2.4.1.dev+
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
115c4a7f05 | ||
|
|
30a97de0c4 | ||
|
|
f2e0d3b2f3 | ||
|
|
f4596baa11 | ||
|
|
d8fdc500b7 | ||
|
|
c239e37b2f | ||
|
|
f2092af607 | ||
|
|
7d8b5e45fc | ||
|
|
d5a28aea95 | ||
|
|
720b68f59f | ||
|
|
600eef07c5 | ||
|
|
3550c9b427 | ||
|
|
f7185bcd02 | ||
|
|
1f7c959938 | ||
|
|
35e823ca12 | ||
|
|
66c39b70f5 | ||
|
|
d7ec3bffe9 | ||
|
|
5a7fd6b302 | ||
|
|
4ca49e5776 | ||
|
|
3f00495a52 | ||
|
|
55ec0d4391 | ||
|
|
2b7a200383 | ||
|
|
8809fb6b68 | ||
|
|
0a8f99df52 | ||
|
|
886aab2ff9 | ||
|
|
606f825eaa |
84
.github/workflows/codeql.yml
vendored
Normal file
84
.github/workflows/codeql.yml
vendored
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
# For most projects, this workflow file will not need changing; you simply need
|
||||||
|
# to commit it to your repository.
|
||||||
|
#
|
||||||
|
# You may wish to alter this file to override the set of languages analyzed,
|
||||||
|
# or to provide custom queries or build logic.
|
||||||
|
#
|
||||||
|
# ******** NOTE ********
|
||||||
|
# We have attempted to detect the languages in your repository. Please check
|
||||||
|
# the `language` matrix defined below to confirm you have the correct set of
|
||||||
|
# supported CodeQL languages.
|
||||||
|
#
|
||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "main" ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ "main" ]
|
||||||
|
schedule:
|
||||||
|
- cron: '20 19 * * 0'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
# Runner size impacts CodeQL analysis time. To learn more, please see:
|
||||||
|
# - https://gh.io/recommended-hardware-resources-for-running-codeql
|
||||||
|
# - https://gh.io/supported-runners-and-hardware-resources
|
||||||
|
# - https://gh.io/using-larger-runners
|
||||||
|
# Consider using larger runners for possible analysis time improvements.
|
||||||
|
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
|
||||||
|
timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }}
|
||||||
|
permissions:
|
||||||
|
# required for all workflows
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
# only required for workflows in private repositories
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ 'go', 'ruby' ]
|
||||||
|
# CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ]
|
||||||
|
# Use only 'java-kotlin' to analyze code written in Java, Kotlin or both
|
||||||
|
# Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
|
||||||
|
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v3
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
|
# By default, queries listed here will override any specified in a config file.
|
||||||
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
|
||||||
|
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||||
|
# queries: security-extended,security-and-quality
|
||||||
|
|
||||||
|
|
||||||
|
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||||
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
|
- name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v3
|
||||||
|
|
||||||
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
|
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||||
|
|
||||||
|
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||||
|
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||||
|
|
||||||
|
# - run: |
|
||||||
|
# echo "Run, Build Application using script"
|
||||||
|
# ./location_of_script_within_repo/buildscript.sh
|
||||||
|
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v3
|
||||||
|
with:
|
||||||
|
category: "/language:${{matrix.language}}"
|
||||||
39
.github/workflows/criticality_score.yml
vendored
Normal file
39
.github/workflows/criticality_score.yml
vendored
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
name: "criticality score"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
github_auth_token:
|
||||||
|
description: 'github auth token'
|
||||||
|
required: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Installing go
|
||||||
|
uses: actions/setup-go@v4
|
||||||
|
with:
|
||||||
|
go-version: '1.21'
|
||||||
|
|
||||||
|
- name: Installing criticality score
|
||||||
|
run: |
|
||||||
|
go install github.com/ossf/criticality_score/cmd/criticality_score@latest
|
||||||
|
|
||||||
|
- name: Generate criticality score
|
||||||
|
run: |
|
||||||
|
export GITHUB_TOKEN=$INPUT_TOKEN
|
||||||
|
criticality_score -depsdev-disable -format json https://github.com/${{ github.repository }}
|
||||||
|
env:
|
||||||
|
INPUT_TOKEN: ${{ github.event.inputs.github_auth_token }}
|
||||||
2
.github/workflows/go-spectest-skipped.yml
vendored
2
.github/workflows/go-spectest-skipped.yml
vendored
@ -15,7 +15,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
ruby: ['3.0', '3.1', '3.2']
|
ruby: ['3.0', '3.1', '3.2', '3.3']
|
||||||
name: Vagrant acceptance tests (Ruby ${{ matrix.ruby }})
|
name: Vagrant acceptance tests (Ruby ${{ matrix.ruby }})
|
||||||
steps:
|
steps:
|
||||||
- name: Stubbed for skip
|
- name: Stubbed for skip
|
||||||
|
|||||||
2
.github/workflows/go-spectest.yml
vendored
2
.github/workflows/go-spectest.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
ruby: ['3.0', '3.1', '3.2']
|
ruby: ['3.0', '3.1', '3.2', '3.3']
|
||||||
name: Vagrant acceptance tests (Ruby ${{ matrix.ruby }})
|
name: Vagrant acceptance tests (Ruby ${{ matrix.ruby }})
|
||||||
steps:
|
steps:
|
||||||
- name: Code Checkout
|
- name: Code Checkout
|
||||||
|
|||||||
2
.github/workflows/go-testing-skipped.yml
vendored
2
.github/workflows/go-testing-skipped.yml
vendored
@ -14,7 +14,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
ruby: ['3.0', '3.1', '3.2']
|
ruby: ['3.0', '3.1', '3.2', '3.3']
|
||||||
name: Vagrant unit tests on Go (Ruby ${{ matrix.ruby }})
|
name: Vagrant unit tests on Go (Ruby ${{ matrix.ruby }})
|
||||||
steps:
|
steps:
|
||||||
- name: Stubbed for skip
|
- name: Stubbed for skip
|
||||||
|
|||||||
2
.github/workflows/go-testing.yml
vendored
2
.github/workflows/go-testing.yml
vendored
@ -26,7 +26,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
ruby: ['3.0', '3.1', '3.2']
|
ruby: ['3.0', '3.1', '3.2', '3.3']
|
||||||
name: Vagrant unit tests on Go (Ruby ${{ matrix.ruby }})
|
name: Vagrant unit tests on Go (Ruby ${{ matrix.ruby }})
|
||||||
steps:
|
steps:
|
||||||
- name: Code Checkout
|
- name: Code Checkout
|
||||||
|
|||||||
34
.github/workflows/microsoft-sbom.yml
vendored
Normal file
34
.github/workflows/microsoft-sbom.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
name: "Microsft SBOM"
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "main" ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ "main" ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Installing SBOM
|
||||||
|
run: |
|
||||||
|
curl -Lo $RUNNER_TEMP/sbom-tool https://github.com/microsoft/sbom-tool/releases/latest/download/sbom-tool-linux-x64
|
||||||
|
chmod +x $RUNNER_TEMP/sbom-tool
|
||||||
|
|
||||||
|
- name: Generate SBOM
|
||||||
|
run: |
|
||||||
|
mkdir SBOMOUTPUT
|
||||||
|
$RUNNER_TEMP/sbom-tool generate -b SBOMOUTPUT/ -bc . -pn ${{ github.repository }} -pv alpha -ps wipro -nsb https://github.com/hashicorp/vagrant
|
||||||
|
|
||||||
|
- name: Upload SBOM artifact
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: sbom
|
||||||
|
path: SBOMOUTPUT/_manifest/spdx_2.2/manifest.spdx.json
|
||||||
70
.github/workflows/scorecard.yml
vendored
Normal file
70
.github/workflows/scorecard.yml
vendored
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
# This workflow uses actions that are not certified by GitHub. They are provided
|
||||||
|
# by a third-party and are governed by separate terms of service, privacy
|
||||||
|
# policy, and support documentation.
|
||||||
|
|
||||||
|
name: Scorecard supply-chain security
|
||||||
|
on:
|
||||||
|
# For Branch-Protection check. Only the default branch is supported. See
|
||||||
|
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
|
||||||
|
branch_protection_rule:
|
||||||
|
# To guarantee Maintained check is occasionally updated. See
|
||||||
|
# https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
|
||||||
|
schedule:
|
||||||
|
- cron: '36 19 * * 6'
|
||||||
|
|
||||||
|
# Declare default permissions as read only.
|
||||||
|
permissions: read-all
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analysis:
|
||||||
|
name: Scorecard analysis
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
# Needed to upload the results to code-scanning dashboard.
|
||||||
|
security-events: write
|
||||||
|
# Needed to publish results and get a badge (see publish_results below).
|
||||||
|
id-token: write
|
||||||
|
# Uncomment the permissions below if installing in a private repository.
|
||||||
|
# contents: read
|
||||||
|
# actions: read
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "Checkout code"
|
||||||
|
uses: actions/checkout@93ea575cb5d8a053eaa0ac8fa3b40d7e05a33cc8 # v3.1.0
|
||||||
|
with:
|
||||||
|
persist-credentials: false
|
||||||
|
|
||||||
|
- name: "Run analysis"
|
||||||
|
uses: ossf/scorecard-action@e38b1902ae4f44df626f11ba0734b14fb91f8f86 # v2.1.2
|
||||||
|
with:
|
||||||
|
results_file: results.sarif
|
||||||
|
results_format: sarif
|
||||||
|
# (Optional) "write" PAT token. Uncomment the `repo_token` line below if:
|
||||||
|
# - you want to enable the Branch-Protection check on a *public* repository, or
|
||||||
|
# - you are installing Scorecard on a *private* repository
|
||||||
|
# To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat.
|
||||||
|
# repo_token: ${{ secrets.SCORECARD_TOKEN }}
|
||||||
|
|
||||||
|
# Public repositories:
|
||||||
|
# - Publish results to OpenSSF REST API for easy access by consumers
|
||||||
|
# - Allows the repository to include the Scorecard badge.
|
||||||
|
# - See https://github.com/ossf/scorecard-action#publishing-results.
|
||||||
|
# For private repositories:
|
||||||
|
# - `publish_results` will always be set to `false`, regardless
|
||||||
|
# of the value entered here.
|
||||||
|
publish_results: true
|
||||||
|
|
||||||
|
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
|
||||||
|
# format to the repository Actions tab.
|
||||||
|
- name: "Upload artifact"
|
||||||
|
uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0
|
||||||
|
with:
|
||||||
|
name: SARIF file
|
||||||
|
path: results.sarif
|
||||||
|
retention-days: 5
|
||||||
|
|
||||||
|
# Upload the results to GitHub's code scanning dashboard.
|
||||||
|
- name: "Upload to code-scanning"
|
||||||
|
uses: github/codeql-action/upload-sarif@17573ee1cc1b9d061760f3a006fc4aac4f944fd5 # v2.2.4
|
||||||
|
with:
|
||||||
|
sarif_file: results.sarif
|
||||||
2
.github/workflows/testing-skipped.yml
vendored
2
.github/workflows/testing-skipped.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
|||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
ruby: [ '3.0', '3.1', '3.2' ]
|
ruby: [ '3.0', '3.1', '3.2', '3.3' ]
|
||||||
name: Vagrant unit tests on Ruby ${{ matrix.ruby }}
|
name: Vagrant unit tests on Ruby ${{ matrix.ruby }}
|
||||||
steps:
|
steps:
|
||||||
- name: Stubbed for skip
|
- name: Stubbed for skip
|
||||||
|
|||||||
2
.github/workflows/testing.yml
vendored
2
.github/workflows/testing.yml
vendored
@ -32,7 +32,7 @@ jobs:
|
|||||||
continue-on-error: true
|
continue-on-error: true
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
ruby: [ '3.0', '3.1', '3.2' ]
|
ruby: [ '3.0', '3.1', '3.2', '3.3' ]
|
||||||
name: Vagrant unit tests on Ruby ${{ matrix.ruby }}
|
name: Vagrant unit tests on Ruby ${{ matrix.ruby }}
|
||||||
steps:
|
steps:
|
||||||
- name: Code Checkout
|
- name: Code Checkout
|
||||||
|
|||||||
19
CHANGELOG.md
19
CHANGELOG.md
@ -1,15 +1,32 @@
|
|||||||
## 2.4.1.dev (UNRELEASED)
|
## 2.4.2.dev (UNRELEASED)
|
||||||
|
|
||||||
|
FEATURES:
|
||||||
|
|
||||||
|
IMPROVEMENTS:
|
||||||
|
|
||||||
|
BUG FIXES:
|
||||||
|
|
||||||
|
- provider/docker: Prevent error if network configuration data is missing [GH-13337]
|
||||||
|
|
||||||
|
VAGRANT-GO:
|
||||||
|
|
||||||
|
## 2.4.1 (January 19, 2024)
|
||||||
|
|
||||||
FEATURES:
|
FEATURES:
|
||||||
|
|
||||||
IMPROVEMENTS:
|
IMPROVEMENTS:
|
||||||
|
|
||||||
- communicator/ssh: Support ECDSA type keys for insecure key replacement [GH-13327]
|
- communicator/ssh: Support ECDSA type keys for insecure key replacement [GH-13327]
|
||||||
|
- communicator/ssh: Inspect guest for supported key types [GH-13334]
|
||||||
|
- core: Update Ruby constraint to allow Ruby 3.3 [GH-13335]
|
||||||
|
- core/bundler: Force strict dependencies for default gems [GH-13336]
|
||||||
- provisioner/ansible: Support pip installation for RHEL >= 8 [GH-13326]
|
- provisioner/ansible: Support pip installation for RHEL >= 8 [GH-13326]
|
||||||
- util/keypair: Add support for ECDSA keys [GH-13327]
|
- util/keypair: Add support for ECDSA keys [GH-13327]
|
||||||
|
|
||||||
BUG FIXES:
|
BUG FIXES:
|
||||||
|
|
||||||
|
- command/plugin: Fix plugin extension installation on Windows [GH-13328]
|
||||||
|
- communicator/ssh: Fix private key writing on Windows [GH-13329]
|
||||||
- core: Fix Vagrant SSL helper detection on macOS [GH-13277]
|
- core: Fix Vagrant SSL helper detection on macOS [GH-13277]
|
||||||
- core: Fix box collection sorting [GH-#13320]
|
- core: Fix box collection sorting [GH-#13320]
|
||||||
- util/platform: Fix architecture mapping for Windows [GH-13278]
|
- util/platform: Fix architecture mapping for Windows [GH-13278]
|
||||||
|
|||||||
4
LICENSE
4
LICENSE
@ -4,11 +4,11 @@ License text copyright (c) 2020 MariaDB Corporation Ab, All Rights Reserved.
|
|||||||
Parameters
|
Parameters
|
||||||
|
|
||||||
Licensor: HashiCorp, Inc.
|
Licensor: HashiCorp, Inc.
|
||||||
Licensed Work: Vagrant 2.4.1.dev or later. The Licensed Work is (c) 2024 HashiCorp, Inc.
|
Licensed Work: Vagrant 2.4.2.dev or later. The Licensed Work is (c) %YEAR% HashiCorp, Inc.
|
||||||
Additional Use Grant: You may make production use of the Licensed Work, provided
|
Additional Use Grant: You may make production use of the Licensed Work, provided
|
||||||
Your use does not include offering the Licensed Work to third
|
Your use does not include offering the Licensed Work to third
|
||||||
parties on a hosted or embedded basis in order to compete with
|
parties on a hosted or embedded basis in order to compete with
|
||||||
HashiCorp's paid version(s) of the Licensed Work. For purposes
|
HashiCorp’s paid version(s) of the Licensed Work. For purposes
|
||||||
of this license:
|
of this license:
|
||||||
|
|
||||||
A "competitive offering" is a Product that is offered to third
|
A "competitive offering" is a Product that is offered to third
|
||||||
|
|||||||
@ -1,11 +1,12 @@
|
|||||||
# Copyright (c) HashiCorp, Inc.
|
# Copyright (c) HashiCorp, Inc.
|
||||||
# SPDX-License-Identifier: BUSL-1.1
|
# SPDX-License-Identifier: BUSL-1.1
|
||||||
|
|
||||||
require "log4r"
|
require "log4r"
|
||||||
|
|
||||||
# Add patches to log4r to support trace level
|
# Add patches to log4r to support trace level
|
||||||
require "vagrant/patches/log4r"
|
require "vagrant/patches/log4r"
|
||||||
require "vagrant/patches/net-ssh"
|
require "vagrant/patches/net-ssh"
|
||||||
|
require "vagrant/patches/rubygems"
|
||||||
|
|
||||||
# Set our log levels and include trace
|
# Set our log levels and include trace
|
||||||
require 'log4r/configurator'
|
require 'log4r/configurator'
|
||||||
Log4r::Configurator.custom_levels(*(["TRACE"] + Log4r::Log4rConfig::LogLevels))
|
Log4r::Configurator.custom_levels(*(["TRACE"] + Log4r::Log4rConfig::LogLevels))
|
||||||
|
|||||||
@ -523,10 +523,20 @@ module Vagrant
|
|||||||
if Vagrant.strict_dependency_enforcement
|
if Vagrant.strict_dependency_enforcement
|
||||||
@logger.debug("Enabling strict dependency enforcement")
|
@logger.debug("Enabling strict dependency enforcement")
|
||||||
plugin_deps += vagrant_internal_specs.map do |spec|
|
plugin_deps += vagrant_internal_specs.map do |spec|
|
||||||
next if system_plugins.include?(spec.name)
|
# NOTE: When working within bundler, skip any system plugins and
|
||||||
# If this spec is for a default plugin included in
|
# default gems. However, when not within bundler (in the installer)
|
||||||
# the ruby stdlib, ignore it
|
# include them as strict dependencies to prevent the resolver from
|
||||||
next if spec.default_gem?
|
# attempting to create a solution with a newer version. The request
|
||||||
|
# set does allow for resolving conservatively but it can't be set
|
||||||
|
# from the public API (requires an instance variable set on the resolver
|
||||||
|
# instance) so strict dependencies are used instead.
|
||||||
|
if Vagrant.in_bundler?
|
||||||
|
next if system_plugins.include?(spec.name)
|
||||||
|
# # If this spec is for a default plugin included in
|
||||||
|
# # the ruby stdlib, ignore it
|
||||||
|
next if spec.default_gem?
|
||||||
|
end
|
||||||
|
|
||||||
# If we are not running within the installer and
|
# If we are not running within the installer and
|
||||||
# we are not within a bundler environment then we
|
# we are not within a bundler environment then we
|
||||||
# only want activated specs
|
# only want activated specs
|
||||||
|
|||||||
116
lib/vagrant/patches/builder/mkmf.rb
Normal file
116
lib/vagrant/patches/builder/mkmf.rb
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
# Copyright (c) HashiCorp, Inc.
|
||||||
|
# SPDX-License-Identifier: BUSL-1.1
|
||||||
|
|
||||||
|
# This custom mkmf.rb file is used on Windows platforms
|
||||||
|
# to handle common path related build failures where
|
||||||
|
# a space is included in the path. The default installation
|
||||||
|
# location being in Program Files results in most many
|
||||||
|
# extensions failing to build. These patches will attempt
|
||||||
|
# to find unquoted paths in flags and quote them prior to
|
||||||
|
# usage.
|
||||||
|
|
||||||
|
# Start with locating the real mkmf.rb file and
|
||||||
|
# loading it
|
||||||
|
mkmf_paths = $LOAD_PATH.find_all { |x|
|
||||||
|
!x.start_with?(__dir__) &&
|
||||||
|
File.exist?(File.join(x, "mkmf.rb"))
|
||||||
|
}.uniq
|
||||||
|
|
||||||
|
# At this point the path collection should only consist
|
||||||
|
# of a single entry. If there's more than one, load all
|
||||||
|
# of them but include a warning message that more than
|
||||||
|
# one was encountered. If none are found, then something
|
||||||
|
# bad is going on so just bail.
|
||||||
|
if mkmf_paths.size > 1
|
||||||
|
$stderr.puts "WARNING: Multiple mkmf.rb files located: #{mkmf_paths.inspect}"
|
||||||
|
elsif mkmf_paths.empty?
|
||||||
|
raise "Failed to locate mkmf.rb file"
|
||||||
|
end
|
||||||
|
|
||||||
|
mkmf_paths.each do |mpath|
|
||||||
|
require File.join(mpath, "mkmf.rb")
|
||||||
|
end
|
||||||
|
|
||||||
|
# Attempt to detect and quote Windos paths found within
|
||||||
|
# the given string of flags
|
||||||
|
#
|
||||||
|
# @param [String] flags Compiler/linker flags
|
||||||
|
# @return [String] flags with paths quoted
|
||||||
|
def flag_cleaner(flags)
|
||||||
|
parts = flags.split(" -")
|
||||||
|
parts.map! do |p|
|
||||||
|
if p !~ %r{[A-Za-z]:(/|\\)}
|
||||||
|
next p
|
||||||
|
elsif p =~ %r{"[A-Za-z]:(/|\\).+"$}
|
||||||
|
next p
|
||||||
|
end
|
||||||
|
|
||||||
|
p.gsub(%r{([A-Za-z]:(/|\\).+)$}, '"\1"')
|
||||||
|
end
|
||||||
|
|
||||||
|
parts.join(" -")
|
||||||
|
end
|
||||||
|
|
||||||
|
# Check values defined for CFLAGS, CPPFLAGS, LDFLAGS,
|
||||||
|
# and INCFLAGS for unquoted Windows paths and quote
|
||||||
|
# them.
|
||||||
|
def clean_flags!
|
||||||
|
$CFLAGS = flag_cleaner($CFLAGS)
|
||||||
|
$CPPFLAGS = flag_cleaner($CPPFLAGS)
|
||||||
|
$LDFLAGS = flag_cleaner($LDFLAGS)
|
||||||
|
$INCFLAGS = flag_cleaner($INCFLAGS)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Since mkmf loads the MakeMakefile module directly into the
|
||||||
|
# current scope, apply patches directly in the scope
|
||||||
|
def vagrant_create_makefile(*args)
|
||||||
|
clean_flags!
|
||||||
|
|
||||||
|
ruby_create_makefile(*args)
|
||||||
|
end
|
||||||
|
alias :ruby_create_makefile :create_makefile
|
||||||
|
alias :create_makefile :vagrant_create_makefile
|
||||||
|
|
||||||
|
def vagrant_append_cflags(*args)
|
||||||
|
result = ruby_append_cflags(*args)
|
||||||
|
clean_flags!
|
||||||
|
result
|
||||||
|
end
|
||||||
|
alias :ruby_append_cflags :append_cflags
|
||||||
|
alias :append_cflags :vagrant_append_cflags
|
||||||
|
|
||||||
|
def vagrant_append_cppflags(*args)
|
||||||
|
result = ruby_append_cppflags(*args)
|
||||||
|
clean_flags!
|
||||||
|
result
|
||||||
|
end
|
||||||
|
alias :ruby_append_cppflags :append_cppflags
|
||||||
|
alias :append_cppflags :vagrant_append_cppflags
|
||||||
|
|
||||||
|
def vagrant_append_ldflags(*args)
|
||||||
|
result = ruby_append_ldflags(*args)
|
||||||
|
clean_flags!
|
||||||
|
result
|
||||||
|
end
|
||||||
|
alias :ruby_append_ldflags :append_ldflags
|
||||||
|
alias :append_ldflags :vagrant_append_ldflags
|
||||||
|
|
||||||
|
def vagrant_cc_config(*args)
|
||||||
|
clean_flags!
|
||||||
|
ruby_cc_config(*args)
|
||||||
|
end
|
||||||
|
alias :ruby_cc_config :cc_config
|
||||||
|
alias :cc_config :vagrant_cc_config
|
||||||
|
|
||||||
|
def vagrant_link_config(*args)
|
||||||
|
clean_flags!
|
||||||
|
ruby_link_config(*args)
|
||||||
|
end
|
||||||
|
alias :ruby_link_config :link_config
|
||||||
|
alias :link_config :vagrant_link_config
|
||||||
|
|
||||||
|
# Finally, always append the flags that Vagrant has
|
||||||
|
# defined via the environment
|
||||||
|
append_cflags(ENV["CFLAGS"]) if ENV["CFLAGS"]
|
||||||
|
append_cppflags(ENV["CPPFLAGS"]) if ENV["CPPFLAGS"]
|
||||||
|
append_ldflags(ENV["LDFLAGS"]) if ENV["LDFLAGS"]
|
||||||
23
lib/vagrant/patches/rubygems.rb
Normal file
23
lib/vagrant/patches/rubygems.rb
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# Copyright (c) HashiCorp, Inc.
|
||||||
|
# SPDX-License-Identifier: BUSL-1.1
|
||||||
|
|
||||||
|
# This allows for effective monkey patching of the MakeMakefile
|
||||||
|
# module when building gem extensions. When gem extensions are
|
||||||
|
# built, the extconf.rb file is executed as a separate process.
|
||||||
|
# To support monkey patching the MakeMakefile module, the ruby
|
||||||
|
# executable path is adjusted to add a custom load path allowing
|
||||||
|
# a customized mkmf.rb file to load the proper mkmf.rb file, and
|
||||||
|
# then applying the proper patches.
|
||||||
|
if Gem.win_platform?
|
||||||
|
Gem.class_eval do
|
||||||
|
class << self
|
||||||
|
def vagrant_ruby
|
||||||
|
cmd = ruby_ruby
|
||||||
|
"#{cmd} -I\"#{Vagrant.source_root.join("lib/vagrant/patches/builder")}\""
|
||||||
|
end
|
||||||
|
|
||||||
|
alias_method :ruby_ruby, :ruby
|
||||||
|
alias_method :ruby, :vagrant_ruby
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
@ -251,7 +251,7 @@ module VagrantPlugins
|
|||||||
|
|
||||||
# Write out the private key in the data dir so that the
|
# Write out the private key in the data dir so that the
|
||||||
# machine automatically picks it up.
|
# machine automatically picks it up.
|
||||||
@machine.data_dir.join("private_key").open("w+") do |f|
|
@machine.data_dir.join("private_key").open("wb+") do |f|
|
||||||
f.write(priv)
|
f.write(priv)
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -838,26 +838,59 @@ module VagrantPlugins
|
|||||||
end
|
end
|
||||||
|
|
||||||
def supported_key_types
|
def supported_key_types
|
||||||
|
return @supported_key_types if @supported_key_types
|
||||||
|
|
||||||
if @connection.nil?
|
if @connection.nil?
|
||||||
raise Vagrant::Errors::SSHNotReady
|
raise Vagrant::Errors::SSHNotReady
|
||||||
end
|
end
|
||||||
|
|
||||||
server_data = @connection.
|
list = ""
|
||||||
transport&.
|
result = sudo("sshd -T | grep key", {error_check: false}) do |type, data|
|
||||||
algorithms&.
|
list << data
|
||||||
instance_variable_get(:@server_data)
|
|
||||||
if server_data.nil?
|
|
||||||
@logger.warn("No server data available for key type support check")
|
|
||||||
raise ServerDataError, "no data available"
|
|
||||||
end
|
|
||||||
if !server_data.is_a?(Hash)
|
|
||||||
@logger.warn("Server data is not expected type (expecting Hash, got #{server_data.class})")
|
|
||||||
raise ServerDataError, "unexpected type encountered (expecting Hash, got #{server_data.class})"
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@logger.debug("server supported key type list: #{server_data[:host_key]}")
|
# If the command failed, attempt to extract some supported
|
||||||
|
# key information from within net-ssh
|
||||||
|
if result != 0
|
||||||
|
server_data = @connection.
|
||||||
|
transport&.
|
||||||
|
algorithms&.
|
||||||
|
instance_variable_get(:@server_data)
|
||||||
|
if server_data.nil?
|
||||||
|
@logger.warn("No server data available for key type support check")
|
||||||
|
raise ServerDataError, "no data available"
|
||||||
|
end
|
||||||
|
if !server_data.is_a?(Hash)
|
||||||
|
@logger.warn("Server data is not expected type (expecting Hash, got #{server_data.class})")
|
||||||
|
raise ServerDataError, "unexpected type encountered (expecting Hash, got #{server_data.class})"
|
||||||
|
end
|
||||||
|
|
||||||
server_data[:host_key]
|
@logger.debug("server supported key type list (extracted from connection server info using host key): #{server_data[:host_key]}")
|
||||||
|
return @supported_key_types = server_data[:host_key]
|
||||||
|
end
|
||||||
|
|
||||||
|
# Convert the options into a Hash for easy access
|
||||||
|
opts = Hash[*list.split("\n").map{|line| line.split(" ", 2)}.flatten]
|
||||||
|
|
||||||
|
# Define the option names to check for in preferred order
|
||||||
|
# NOTE: pubkeyacceptedkeytypes has been renamed to pubkeyacceptedalgorithms
|
||||||
|
# ref: https://github.com/openssh/openssh-portable/commit/ee9c0da8035b3168e8e57c1dedc2d1b0daf00eec
|
||||||
|
["pubkeyacceptedalgorithms", "pubkeyacceptedkeytypes", "hostkeyalgorithms"].each do |opt_name|
|
||||||
|
next if !opts.key?(opt_name)
|
||||||
|
|
||||||
|
@supported_key_types = opts[opt_name].split(",")
|
||||||
|
@logger.debug("server supported key type list (using #{opt_name}): #{@supported_key_types}")
|
||||||
|
|
||||||
|
return @supported_key_types
|
||||||
|
end
|
||||||
|
|
||||||
|
# Still here means unable to determine key types
|
||||||
|
# so log what information was returned and toss
|
||||||
|
# and error
|
||||||
|
@logger.warn("failed to determine supported key types from remote inspection")
|
||||||
|
@logger.debug("data returned for supported key types remote inspection: #{list.inspect}")
|
||||||
|
|
||||||
|
raise ServerDataError, "no data available"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|||||||
@ -348,7 +348,7 @@ module VagrantPlugins
|
|||||||
|
|
||||||
network_info = inspect_network(all_networks)
|
network_info = inspect_network(all_networks)
|
||||||
network_info.each do |network|
|
network_info.each do |network|
|
||||||
config = network["IPAM"]["Config"]
|
config = Array(network["IPAM"]["Config"])
|
||||||
if (config.size > 0 &&
|
if (config.size > 0 &&
|
||||||
config.first["Subnet"] == subnet_string)
|
config.first["Subnet"] == subnet_string)
|
||||||
@logger.debug("Found existing network #{network["Name"]} already configured with #{subnet_string}")
|
@logger.debug("Found existing network #{network["Name"]} already configured with #{subnet_string}")
|
||||||
|
|||||||
@ -69,7 +69,8 @@ describe VagrantPlugins::CommunicatorSSH::Communicator do
|
|||||||
let(:command_stderr_data) { '' }
|
let(:command_stderr_data) { '' }
|
||||||
# Mock for net-ssh scp
|
# Mock for net-ssh scp
|
||||||
let(:scp) { double("scp") }
|
let(:scp) { double("scp") }
|
||||||
|
# Value returned from remote ssh supported key check
|
||||||
|
let(:sudo_supported_key_list) { "pubkeyacceptedalgorithms ssh-rsa" }
|
||||||
|
|
||||||
# Setup for commands using the net-ssh connection. This can be reused where needed
|
# Setup for commands using the net-ssh connection. This can be reused where needed
|
||||||
# by providing to `before`
|
# by providing to `before`
|
||||||
@ -93,13 +94,16 @@ describe VagrantPlugins::CommunicatorSSH::Communicator do
|
|||||||
and_yield(nil, exit_data)
|
and_yield(nil, exit_data)
|
||||||
# Return mocked net-ssh connection during setup
|
# Return mocked net-ssh connection during setup
|
||||||
allow(communicator).to receive(:retryable).and_return(connection)
|
allow(communicator).to receive(:retryable).and_return(connection)
|
||||||
|
# Stub in a response for supported key types check
|
||||||
|
allow(communicator).to receive(:sudo).with("sshd -T | grep key", any_args).
|
||||||
|
and_yield(:stdout, sudo_supported_key_list).and_return(0)
|
||||||
end
|
end
|
||||||
|
|
||||||
before do
|
before do
|
||||||
allow(host).to receive(:capability?).and_return(false)
|
allow(host).to receive(:capability?).and_return(false)
|
||||||
end
|
end
|
||||||
|
|
||||||
describe ".wait_for_ready" do
|
describe "#wait_for_ready" do
|
||||||
before(&connection_setup)
|
before(&connection_setup)
|
||||||
context "with no static config (default scenario)" do
|
context "with no static config (default scenario)" do
|
||||||
context "when ssh_info requires a multiple tries before it is ready" do
|
context "when ssh_info requires a multiple tries before it is ready" do
|
||||||
@ -162,7 +166,7 @@ describe VagrantPlugins::CommunicatorSSH::Communicator do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "reset!" do
|
describe "#reset!" do
|
||||||
let(:connection) { double("connection") }
|
let(:connection) { double("connection") }
|
||||||
|
|
||||||
before do
|
before do
|
||||||
@ -182,7 +186,7 @@ describe VagrantPlugins::CommunicatorSSH::Communicator do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe ".ready?" do
|
describe "#ready?" do
|
||||||
before(&connection_setup)
|
before(&connection_setup)
|
||||||
it "returns true if shell test is successful" do
|
it "returns true if shell test is successful" do
|
||||||
expect(communicator.ready?).to be(true)
|
expect(communicator.ready?).to be(true)
|
||||||
@ -248,8 +252,6 @@ describe VagrantPlugins::CommunicatorSSH::Communicator do
|
|||||||
let(:path_joiner){ double("path_joiner") }
|
let(:path_joiner){ double("path_joiner") }
|
||||||
let(:algorithms) { double(:algorithms) }
|
let(:algorithms) { double(:algorithms) }
|
||||||
let(:transport) { double(:transport, algorithms: algorithms) }
|
let(:transport) { double(:transport, algorithms: algorithms) }
|
||||||
let(:valid_key_types) { [] }
|
|
||||||
let(:server_data) { { host_key: valid_key_types} }
|
|
||||||
|
|
||||||
before do
|
before do
|
||||||
allow(Vagrant::Util::Keypair).to receive(:create).
|
allow(Vagrant::Util::Keypair).to receive(:create).
|
||||||
@ -264,7 +266,6 @@ describe VagrantPlugins::CommunicatorSSH::Communicator do
|
|||||||
allow(guest).to receive(:capability).with(:insert_public_key)
|
allow(guest).to receive(:capability).with(:insert_public_key)
|
||||||
allow(guest).to receive(:capability).with(:remove_public_key)
|
allow(guest).to receive(:capability).with(:remove_public_key)
|
||||||
allow(connection).to receive(:transport).and_return(transport)
|
allow(connection).to receive(:transport).and_return(transport)
|
||||||
allow(algorithms).to receive(:instance_variable_get).with(:@server_data).and_return(server_data)
|
|
||||||
allow(communicator).to receive(:supported_key_types).and_raise(described_class.const_get(:ServerDataError))
|
allow(communicator).to receive(:supported_key_types).and_raise(described_class.const_get(:ServerDataError))
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -297,7 +298,7 @@ describe VagrantPlugins::CommunicatorSSH::Communicator do
|
|||||||
|
|
||||||
context "with server algorithm support data" do
|
context "with server algorithm support data" do
|
||||||
before do
|
before do
|
||||||
allow(communicator).to receive(:supported_key_types).and_call_original
|
allow(communicator).to receive(:supported_key_types).and_return(valid_key_types)
|
||||||
end
|
end
|
||||||
|
|
||||||
context "when rsa is the only match" do
|
context "when rsa is the only match" do
|
||||||
@ -371,8 +372,7 @@ describe VagrantPlugins::CommunicatorSSH::Communicator do
|
|||||||
|
|
||||||
context "when an error is encountered getting server data" do
|
context "when an error is encountered getting server data" do
|
||||||
before do
|
before do
|
||||||
expect(communicator).to receive(:supported_key_types).and_call_original
|
expect(communicator).to receive(:supported_key_types).and_raise(StandardError)
|
||||||
expect(connection).to receive(:transport).and_raise(StandardError)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
it "should default to rsa key" do
|
it "should default to rsa key" do
|
||||||
@ -385,7 +385,7 @@ describe VagrantPlugins::CommunicatorSSH::Communicator do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe ".execute" do
|
describe "#execute" do
|
||||||
before(&connection_setup)
|
before(&connection_setup)
|
||||||
it "runs valid command and returns successful status code" do
|
it "runs valid command and returns successful status code" do
|
||||||
expect(command_channel).to receive(:send_data).with(/ls \/\n/)
|
expect(command_channel).to receive(:send_data).with(/ls \/\n/)
|
||||||
@ -579,7 +579,7 @@ describe VagrantPlugins::CommunicatorSSH::Communicator do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe ".test" do
|
describe "#test" do
|
||||||
before(&connection_setup)
|
before(&connection_setup)
|
||||||
context "with exit code as zero" do
|
context "with exit code as zero" do
|
||||||
it "returns true" do
|
it "returns true" do
|
||||||
@ -598,7 +598,7 @@ describe VagrantPlugins::CommunicatorSSH::Communicator do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe ".upload" do
|
describe "#upload" do
|
||||||
before do
|
before do
|
||||||
expect(communicator).to receive(:scp_connect).and_yield(scp)
|
expect(communicator).to receive(:scp_connect).and_yield(scp)
|
||||||
allow(communicator).to receive(:create_remote_directory)
|
allow(communicator).to receive(:create_remote_directory)
|
||||||
@ -704,7 +704,7 @@ describe VagrantPlugins::CommunicatorSSH::Communicator do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe ".download" do
|
describe "#download" do
|
||||||
before do
|
before do
|
||||||
expect(communicator).to receive(:scp_connect).and_yield(scp)
|
expect(communicator).to receive(:scp_connect).and_yield(scp)
|
||||||
end
|
end
|
||||||
@ -715,7 +715,7 @@ describe VagrantPlugins::CommunicatorSSH::Communicator do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe ".connect" do
|
describe "#connect" do
|
||||||
|
|
||||||
it "cannot be called directly" do
|
it "cannot be called directly" do
|
||||||
expect{ communicator.connect }.to raise_error(NoMethodError)
|
expect{ communicator.connect }.to raise_error(NoMethodError)
|
||||||
@ -1030,7 +1030,7 @@ describe VagrantPlugins::CommunicatorSSH::Communicator do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe ".insecure_key?" do
|
describe "#insecure_key?" do
|
||||||
let(:key_data) { "" }
|
let(:key_data) { "" }
|
||||||
let(:key_file) {
|
let(:key_file) {
|
||||||
if !@key_file
|
if !@key_file
|
||||||
@ -1069,7 +1069,7 @@ describe VagrantPlugins::CommunicatorSSH::Communicator do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe ".generate_environment_export" do
|
describe "#generate_environment_export" do
|
||||||
it "should generate bourne shell compatible export" do
|
it "should generate bourne shell compatible export" do
|
||||||
expect(communicator.send(:generate_environment_export, "TEST", "value")).to eq("export TEST=\"value\"\n")
|
expect(communicator.send(:generate_environment_export, "TEST", "value")).to eq("export TEST=\"value\"\n")
|
||||||
end
|
end
|
||||||
@ -1082,4 +1082,129 @@ describe VagrantPlugins::CommunicatorSSH::Communicator do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
describe "#supported_key_types" do
|
||||||
|
let(:sudo_result) { 0 }
|
||||||
|
let(:sudo_data) { "" }
|
||||||
|
let(:server_data_error) { VagrantPlugins::CommunicatorSSH::Communicator::ServerDataError }
|
||||||
|
let(:transport) { double("transport", algorithms: algorithms) }
|
||||||
|
let(:algorithms) { double("algorithms") }
|
||||||
|
|
||||||
|
before do
|
||||||
|
allow(communicator).to receive(:ready?).and_return(true)
|
||||||
|
expect(communicator).to receive(:sudo).
|
||||||
|
with("sshd -T | grep key", any_args).
|
||||||
|
and_yield(:stdout, sudo_data).
|
||||||
|
and_return(sudo_result)
|
||||||
|
# The @connection value is checked to determine if supported key types
|
||||||
|
# can be checked. To facilitate this, set it to a non-nil value
|
||||||
|
communicator.instance_variable_set(:@connection, connection)
|
||||||
|
allow(connection).to receive(:transport).and_return(transport)
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should raise an error when no data is returned" do
|
||||||
|
expect { communicator.send(:supported_key_types) }.to raise_error(server_data_error)
|
||||||
|
end
|
||||||
|
|
||||||
|
context "when sudo command is unsuccessful" do
|
||||||
|
let(:sudo_result) { 1 }
|
||||||
|
|
||||||
|
it "should inspect the net-ssh connection" do
|
||||||
|
expect(algorithms).to receive(:instance_variable_get).
|
||||||
|
with(:@server_data).and_return({})
|
||||||
|
communicator.send(:supported_key_types)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "when data includes pubkeyacceptedalgorithms" do
|
||||||
|
let(:sudo_data) do
|
||||||
|
"pubkeyauthentication yes
|
||||||
|
gssapikeyexchange no
|
||||||
|
gssapistorecredentialsonrekey no
|
||||||
|
trustedusercakeys none
|
||||||
|
revokedkeys none
|
||||||
|
authorizedkeyscommand none
|
||||||
|
authorizedkeyscommanduser none
|
||||||
|
hostkeyagent none
|
||||||
|
hostbasedacceptedkeytypes ecdsa-sha2-nistp521,ssh-ed25519,rsa-sha2-512,rsa-sha2-256,ssh-rsa
|
||||||
|
hostkeyalgorithms ssh-ed25519,rsa-sha2-512,rsa-sha2-256,ssh-rsa
|
||||||
|
pubkeyacceptedalgorithms rsa-sha2-512,rsa-sha2-256,ssh-rsa
|
||||||
|
authorizedkeysfile .ssh/authorized_keys
|
||||||
|
hostkey /etc/ssh/ssh_host_rsa_key
|
||||||
|
rekeylimit 0 0"
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should return expected values" do
|
||||||
|
expect(communicator.send(:supported_key_types)).to eq(["rsa-sha2-512", "rsa-sha2-256", "ssh-rsa"])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "when data includes pubkeyacceptedkeytypes" do
|
||||||
|
let(:sudo_data) do
|
||||||
|
"pubkeyauthentication yes
|
||||||
|
gssapikeyexchange no
|
||||||
|
gssapistorecredentialsonrekey no
|
||||||
|
trustedusercakeys none
|
||||||
|
revokedkeys none
|
||||||
|
authorizedkeyscommand none
|
||||||
|
authorizedkeyscommanduser none
|
||||||
|
hostkeyagent none
|
||||||
|
hostbasedacceptedkeytypes ecdsa-sha2-nistp521,ssh-ed25519,rsa-sha2-512,rsa-sha2-256,ssh-rsa
|
||||||
|
hostkeyalgorithms ssh-ed25519,rsa-sha2-512,rsa-sha2-256,ssh-rsa
|
||||||
|
pubkeyacceptedkeytypes rsa-sha2-512,rsa-sha2-256,ssh-rsa
|
||||||
|
authorizedkeysfile .ssh/authorized_keys
|
||||||
|
hostkey /etc/ssh/ssh_host_rsa_key
|
||||||
|
rekeylimit 0 0"
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should return expected values" do
|
||||||
|
expect(communicator.send(:supported_key_types)).
|
||||||
|
to eq(["rsa-sha2-512", "rsa-sha2-256", "ssh-rsa"])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "when data does not include pubkeyacceptedalgorithms or pubkeyacceptedkeytypes" do
|
||||||
|
let(:sudo_data) do
|
||||||
|
"pubkeyauthentication yes
|
||||||
|
gssapikeyexchange no
|
||||||
|
gssapistorecredentialsonrekey no
|
||||||
|
trustedusercakeys none
|
||||||
|
revokedkeys none
|
||||||
|
authorizedkeyscommand none
|
||||||
|
authorizedkeyscommanduser none
|
||||||
|
hostkeyagent none
|
||||||
|
hostbasedacceptedkeytypes ecdsa-sha2-nistp521,ssh-ed25519,rsa-sha2-512,rsa-sha2-256,ssh-rsa
|
||||||
|
hostkeyalgorithms ssh-ed25519,rsa-sha2-512,rsa-sha2-256,ssh-rsa
|
||||||
|
authorizedkeysfile .ssh/authorized_keys
|
||||||
|
hostkey /etc/ssh/ssh_host_rsa_key
|
||||||
|
rekeylimit 0 0"
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should use hostkeyalgorithms" do
|
||||||
|
expect(communicator.send(:supported_key_types)).
|
||||||
|
to eq(["ssh-ed25519", "rsa-sha2-512", "rsa-sha2-256", "ssh-rsa"])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
context "when data does not include defined config options" do
|
||||||
|
let(:sudo_data) do
|
||||||
|
"pubkeyauthentication yes
|
||||||
|
gssapikeyexchange no
|
||||||
|
gssapistorecredentialsonrekey no
|
||||||
|
trustedusercakeys none
|
||||||
|
revokedkeys none
|
||||||
|
authorizedkeyscommand none
|
||||||
|
authorizedkeyscommanduser none
|
||||||
|
hostkeyagent none
|
||||||
|
authorizedkeysfile .ssh/authorized_keys
|
||||||
|
hostkey /etc/ssh/ssh_host_rsa_key
|
||||||
|
rekeylimit 0 0"
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should raise error" do
|
||||||
|
expect { communicator.send(:supported_key_types) }.
|
||||||
|
to raise_error(server_data_error)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|||||||
@ -667,21 +667,68 @@ describe VagrantPlugins::DockerProvider::Driver do
|
|||||||
let(:subnet_string) { "172.20.0.0/16" }
|
let(:subnet_string) { "172.20.0.0/16" }
|
||||||
let(:network_names) { ["vagrant_network_172.20.0.0/16", "bridge", "null" ] }
|
let(:network_names) { ["vagrant_network_172.20.0.0/16", "bridge", "null" ] }
|
||||||
|
|
||||||
it "returns network name if defined" do
|
before do
|
||||||
allow(subject).to receive(:list_network_names).and_return(network_names)
|
allow(subject).to receive(:list_network_names).and_return(network_names)
|
||||||
allow(subject).to receive(:inspect_network).and_return(JSON.load(docker_network_struct))
|
allow(subject).to receive(:inspect_network).and_return(JSON.load(docker_network_struct))
|
||||||
|
end
|
||||||
|
|
||||||
|
it "returns network name if defined" do
|
||||||
network_name = subject.network_defined?(subnet_string)
|
network_name = subject.network_defined?(subnet_string)
|
||||||
expect(network_name).to eq("vagrant_network_172.20.0.0/16")
|
expect(network_name).to eq("vagrant_network_172.20.0.0/16")
|
||||||
end
|
end
|
||||||
|
|
||||||
it "returns nil name if not defined" do
|
it "returns nil name if not defined" do
|
||||||
allow(subject).to receive(:list_network_names).and_return(network_names)
|
|
||||||
allow(subject).to receive(:inspect_network).and_return(JSON.load(docker_network_struct))
|
|
||||||
|
|
||||||
network_name = subject.network_defined?("120.20.0.0/24")
|
network_name = subject.network_defined?("120.20.0.0/24")
|
||||||
expect(network_name).to eq(nil)
|
expect(network_name).to eq(nil)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
context "when config information is missing" do
|
||||||
|
let(:docker_network_struct) do
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"Name": "bridge",
|
||||||
|
"Id": "ae74f6cc18bbcde86326937797070b814cc71bfc4a6d8e3e8cf3b2cc5c7f4a7d",
|
||||||
|
"Created": "2019-03-20T14:10:06.313314662-07:00",
|
||||||
|
"Scope": "local",
|
||||||
|
"Driver": "bridge",
|
||||||
|
"EnableIPv6": false,
|
||||||
|
"IPAM": {
|
||||||
|
"Driver": "default",
|
||||||
|
"Options": nil,
|
||||||
|
},
|
||||||
|
"Internal": false,
|
||||||
|
"Attachable": false,
|
||||||
|
"Ingress": false,
|
||||||
|
"ConfigFrom": {
|
||||||
|
"Network": ""
|
||||||
|
},
|
||||||
|
"ConfigOnly": false,
|
||||||
|
"Containers": {
|
||||||
|
"a1ee9b12bcea8268495b1f43e8d1285df1925b7174a695075f6140adb9415d87": {
|
||||||
|
"Name": "vagrant-sandbox_docker-1_1553116237",
|
||||||
|
"EndpointID": "fc1b0ed6e4f700cf88bb26a98a0722655191542e90df3e3492461f4d1f3c0cae",
|
||||||
|
"MacAddress": "02:42:ac:11:00:02",
|
||||||
|
"IPv4Address": "172.17.0.2/16",
|
||||||
|
"IPv6Address": ""
|
||||||
|
},
|
||||||
|
"Options": {
|
||||||
|
"com.docker.network.bridge.default_bridge": "true",
|
||||||
|
"com.docker.network.bridge.enable_icc": "true",
|
||||||
|
"com.docker.network.bridge.enable_ip_masquerade": "true",
|
||||||
|
"com.docker.network.bridge.host_binding_ipv4": "0.0.0.0",
|
||||||
|
"com.docker.network.bridge.name": "docker0",
|
||||||
|
"com.docker.network.driver.mtu": "1500"
|
||||||
|
},
|
||||||
|
"Labels": {}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
].to_json
|
||||||
|
end
|
||||||
|
|
||||||
|
it "should not raise an error" do
|
||||||
|
expect { subject.network_defined?(subnet_string) }.not_to raise_error
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe '#network_containing_address' do
|
describe '#network_containing_address' do
|
||||||
|
|||||||
@ -12,7 +12,7 @@ Gem::Specification.new do |s|
|
|||||||
s.summary = "Build and distribute virtualized development environments."
|
s.summary = "Build and distribute virtualized development environments."
|
||||||
s.description = "Vagrant is a tool for building and distributing virtualized development environments."
|
s.description = "Vagrant is a tool for building and distributing virtualized development environments."
|
||||||
|
|
||||||
s.required_ruby_version = ">= 3.0", "< 3.3"
|
s.required_ruby_version = ">= 3.0", "< 3.4"
|
||||||
s.required_rubygems_version = ">= 1.3.6"
|
s.required_rubygems_version = ">= 1.3.6"
|
||||||
|
|
||||||
s.add_dependency "bcrypt_pbkdf", "~> 1.1"
|
s.add_dependency "bcrypt_pbkdf", "~> 1.1"
|
||||||
@ -34,7 +34,7 @@ Gem::Specification.new do |s|
|
|||||||
s.add_dependency "rexml", "~> 3.2"
|
s.add_dependency "rexml", "~> 3.2"
|
||||||
s.add_dependency "rgl", "~> 0.5.10"
|
s.add_dependency "rgl", "~> 0.5.10"
|
||||||
s.add_dependency "rubyzip", "~> 2.3.2"
|
s.add_dependency "rubyzip", "~> 2.3.2"
|
||||||
s.add_dependency "vagrant_cloud", "~> 3.1.0"
|
s.add_dependency "vagrant_cloud", "~> 3.1.1"
|
||||||
s.add_dependency "wdm", "~> 0.1.1"
|
s.add_dependency "wdm", "~> 0.1.1"
|
||||||
s.add_dependency "winrm", ">= 2.3.6", "< 3.0"
|
s.add_dependency "winrm", ">= 2.3.6", "< 3.0"
|
||||||
s.add_dependency "winrm-elevated", ">= 1.2.3", "< 2.0"
|
s.add_dependency "winrm-elevated", ">= 1.2.3", "< 2.0"
|
||||||
|
|||||||
@ -1 +1 @@
|
|||||||
2.4.1.dev
|
2.4.2.dev
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
{
|
{
|
||||||
"VERSION": "2.4.0",
|
"VERSION": "2.4.1",
|
||||||
"VMWARE_UTILITY_VERSION": "1.0.22"
|
"VMWARE_UTILITY_VERSION": "1.0.22"
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user