package assets
import "embed" // used for embedding KICS libraries
//go:embed libraries/*.rego
var embeddedLibraries embed.FS
//go:embed libraries/*.json
var embeddedLibraryData embed.FS
//go:embed queries/common/passwords_and_secrets/metadata.json
var SecretsQueryMetadataJSON string
//go:embed queries/common/passwords_and_secrets/regex_rules.json
var SecretsQueryRegexRulesJSON string
// GetEmbeddedLibrary returns the embedded library.rego for the platform passed in the argument
func GetEmbeddedLibrary(platform string) (string, error) {
content, err := embeddedLibraries.ReadFile("libraries/" + platform + ".rego")
return string(content), err
}
// GetEmbeddedLibrary returns the embedded library.rego for the platform passed in the argument
func GetEmbeddedLibraryData(platform string) (string, error) {
content, err := embeddedLibraryData.ReadFile("libraries/" + platform + ".json")
return string(content), err
}
package main
import (
"context"
"os"
"path"
"path/filepath"
"github.com/Checkmarx/kics/v2/pkg/builder/engine"
"github.com/Checkmarx/kics/v2/pkg/builder/writer"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
)
func main() {
var (
inPath string
outPath string
)
ctx := context.Background()
log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stdout})
zerolog.SetGlobalLevel(zerolog.DebugLevel)
rootCmd := &cobra.Command{
Use: "inspect",
Short: "Tool to build new query from example file",
RunE: func(cmd *cobra.Command, args []string) error {
content, err := os.ReadFile(filepath.Clean(inPath))
if err != nil {
return err
}
rules, err := engine.Run(content, path.Base(inPath))
if err != nil {
return err
}
regoWriter, err := writer.NewRegoWriter()
if err != nil {
return err
}
outContent, err := regoWriter.Render(rules)
if err != nil {
return err
}
return saveFile(outPath, outContent)
},
}
rootCmd.Flags().StringVarP(&inPath, "in", "i", "", "path for in file")
rootCmd.Flags().StringVarP(&outPath, "out", "o", "", "path for out path")
if err := rootCmd.MarkFlagRequired("in"); err != nil {
log.Err(err).Msg("Failed to add command required flags")
}
if err := rootCmd.MarkFlagRequired("out"); err != nil {
log.Err(err).Msg("Failed to add command required flags")
}
if err := rootCmd.ExecuteContext(ctx); err != nil {
os.Exit(-1)
}
}
func saveFile(filePath string, content []byte) error {
f, err := os.OpenFile(filepath.Clean(filePath), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, os.ModePerm)
if err != nil {
return err
}
defer func() {
if err := f.Close(); err != nil {
log.Err(err).Msgf("failed to close '%s'", filePath)
}
}()
if _, err := f.Write(content); err != nil {
return err
}
return nil
}
package main
import (
"os"
"github.com/Checkmarx/kics/v2/internal/console"
"github.com/Checkmarx/kics/v2/internal/console/helpers"
"github.com/Checkmarx/kics/v2/internal/constants"
)
func main() {
if err := console.Execute(); err != nil {
if helpers.ShowError("errors") {
os.Exit(constants.EngineErrorCode)
}
}
}
package console
import (
_ "embed" // Embed kics CLI img and analyze-flags
"encoding/json"
"os"
"path/filepath"
"github.com/Checkmarx/kics/v2/internal/console/flags"
sentryReport "github.com/Checkmarx/kics/v2/internal/sentry"
"github.com/Checkmarx/kics/v2/pkg/analyzer"
"github.com/Checkmarx/kics/v2/pkg/engine/source"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
)
var (
//go:embed assets/analyze-flags.json
analyzeFlagsListContent string
)
const (
perms = 0640
)
// NewAnalyzeCmd creates a new instance of the analyze Command
func NewAnalyzeCmd() *cobra.Command {
return &cobra.Command{
Use: "analyze",
Short: "Determines the detected platforms of a certain project",
RunE: func(cmd *cobra.Command, args []string) error {
return analyze()
},
}
}
func initAnalyzeCmd(analyzeCmd *cobra.Command) error {
if err := flags.InitJSONFlags(
analyzeCmd,
analyzeFlagsListContent,
false,
source.ListSupportedPlatforms(),
source.ListSupportedCloudProviders()); err != nil {
return err
}
if err := analyzeCmd.MarkFlagRequired(flags.AnalyzePath); err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: "Failed to add command required flags",
Err: err,
Location: "func initAnalyzeCmd()",
}, true)
log.Err(err).Msg("Failed to add command required flags")
}
return nil
}
func analyze() error {
// save the analyze parameters into the AnalyzeParameters struct
analyzeParams := getAnalyzeParameters()
return executeAnalyze(analyzeParams)
}
func getAnalyzeParameters() *analyzer.Parameters {
analyzeParams := analyzer.Parameters{
Path: flags.GetMultiStrFlag(flags.AnalyzePath),
Results: flags.GetStrFlag(flags.AnalyzeResults),
MaxFileSize: flags.GetIntFlag(flags.MaxFileSizeFlag),
}
return &analyzeParams
}
func executeAnalyze(analyzeParams *analyzer.Parameters) error {
log.Debug().Msg("console.scan()")
for _, warn := range warnings {
log.Warn().Msgf("%s", warn)
}
console := newConsole()
console.preScan()
analyzerStruct := &analyzer.Analyzer{
Paths: analyzeParams.Path,
Types: []string{""},
ExcludeTypes: []string{""},
Exc: []string{""},
ExcludeGitIgnore: false,
GitIgnoreFileName: "",
MaxFileSize: analyzeParams.MaxFileSize,
}
analyzedPaths, err := analyzer.Analyze(analyzerStruct)
if err != nil {
log.Err(err)
return err
}
err = writeToFile(analyzeParams.Results, analyzedPaths)
if err != nil {
log.Err(err)
return err
}
return nil
}
func writeToFile(resultsPath string, analyzerResults model.AnalyzedPaths) error {
err := os.MkdirAll(filepath.Dir(resultsPath), perms)
if err != nil {
return err
}
f, err := os.Create(resultsPath)
if err != nil {
return err
}
defer f.Close()
content, err := json.Marshal(analyzerResults)
if err != nil {
return err
}
_, err = f.Write(content)
if err != nil {
return err
}
return nil
}
package flags
import (
"encoding/json"
"regexp"
"strconv"
"strings"
"github.com/Checkmarx/kics/v2/internal/console/helpers"
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
)
var (
flagsMultiStrReferences = make(map[string]*[]string)
flagsStrReferences = make(map[string]*string)
flagsBoolReferences = make(map[string]*bool)
flagsIntReferences = make(map[string]*int)
validations = make(map[string][]string)
)
type flagJSON struct {
FlagType string
ShorthandFlag string
DefaultValue *string
Usage string
Hidden bool
Deprecated bool
DeprecatedInfo string
Validation string
}
func evalUsage(usage string, supportedPlatforms, supportedCloudProviders []string) string {
variables := map[string]string{
"sliceInstructions": "can be provided multiple times or as a comma separated string",
"supportedLogLevels": strings.Join(constants.AvailableLogLevels, ","),
"supportedPlatforms": strings.Join(supportedPlatforms, ", "),
"supportedProviders": strings.Join(supportedCloudProviders, ", "),
"supportedReports": strings.Join(append([]string{"all"}, helpers.ListReportFormats()...), ", "),
"defaultLogFile": constants.DefaultLogFile,
"logFormatPretty": constants.LogFormatPretty,
"logFormatJSON": constants.LogFormatJSON,
}
variableRegex := regexp.MustCompile(`\$\{(\w+)\}`)
match := variableRegex.FindAllStringSubmatch(usage, -1)
for i := range match {
usage = strings.ReplaceAll(usage, "${"+match[i][1]+"}", variables[match[i][1]])
}
return usage
}
func checkHiddenAndDeprecated(flagSet *pflag.FlagSet, flagName string, flagProps flagJSON) error { //nolint:gocritic
if flagProps.Hidden {
err := flagSet.MarkHidden(flagName)
if err != nil {
log.Err(err).Msg("Loading flags: could not mark flag as hidden")
return err
}
}
if flagProps.Deprecated {
err := flagSet.MarkDeprecated(flagName, flagProps.DeprecatedInfo)
if err != nil {
log.Err(err).Msg("Loading flags: could not mark flag as deprecated")
return err
}
}
return nil
}
// InitJSONFlags initialize cobra flags
func InitJSONFlags(
cmd *cobra.Command,
flagsListContent string,
persistentFlag bool,
supportedPlatforms,
supportedCloudProviders []string) error {
var flagsList map[string]flagJSON
err := json.Unmarshal([]byte(flagsListContent), &flagsList)
if err != nil {
log.Err(err).Msg("Loading flags: could not unmarshal flags")
return err
}
flagSet := cmd.Flags()
if persistentFlag {
flagSet = cmd.PersistentFlags()
}
for flagName, flagProps := range flagsList {
flagProps.Usage = evalUsage(flagProps.Usage, supportedPlatforms, supportedCloudProviders)
switch flagProps.FlagType {
case "multiStr":
var flag []string
flagsMultiStrReferences[flagName] = &flag
defaultValues := make([]string, 0)
if flagProps.DefaultValue != nil {
defaultValues = strings.Split(*flagProps.DefaultValue, ",")
}
flagSet.StringSliceVarP(flagsMultiStrReferences[flagName], flagName, flagProps.ShorthandFlag, defaultValues, flagProps.Usage)
case "str":
var flag string
flagsStrReferences[flagName] = &flag
flagSet.StringVarP(flagsStrReferences[flagName], flagName, flagProps.ShorthandFlag, *flagProps.DefaultValue, flagProps.Usage)
case "bool":
var flag bool
flagsBoolReferences[flagName] = &flag
defaultValue, err := strconv.ParseBool(*flagProps.DefaultValue)
if err != nil {
log.Err(err).Msg("Loading flags: could not convert default values")
return err
}
flagSet.BoolVarP(flagsBoolReferences[flagName], flagName, flagProps.ShorthandFlag, defaultValue, flagProps.Usage)
case "int":
var flag int
flagsIntReferences[flagName] = &flag
defaultValue, err := strconv.Atoi(*flagProps.DefaultValue)
if err != nil {
log.Err(err).Msg("Loading flags: could not convert default values")
return err
}
flagSet.IntVarP(flagsIntReferences[flagName], flagName, flagProps.ShorthandFlag, defaultValue, flagProps.Usage)
default:
log.Error().Msgf("Flag %s has unknown type %s", flagName, flagProps.FlagType)
}
err := checkHiddenAndDeprecated(flagSet, flagName, flagProps)
if err != nil {
return err
}
if flagProps.Validation != "" {
validations[flagName] = strings.Split(flagProps.Validation, ",")
}
}
return nil
}
// GetStrFlag get a string flag by its name
func GetStrFlag(flagName string) string {
if value, ok := flagsStrReferences[flagName]; ok {
return *value
}
log.Debug().Msgf("Could not find string flag %s", flagName)
return ""
}
// GetMultiStrFlag get a slice of strings flag by its name
func GetMultiStrFlag(flagName string) []string {
if value, ok := flagsMultiStrReferences[flagName]; ok {
return *value
}
log.Debug().Msgf("Could not find string slice flag %s", flagName)
return []string{}
}
// GetBoolFlag get a boolean flag by its name
func GetBoolFlag(flagName string) bool {
if value, ok := flagsBoolReferences[flagName]; ok {
return *value
}
log.Debug().Msgf("Could not find boolean flag %s", flagName)
return false
}
// GetIntFlag get a integer flag by its name
func GetIntFlag(flagName string) int {
if value, ok := flagsIntReferences[flagName]; ok {
return *value
}
log.Debug().Msgf("Could not find integer flag %s", flagName)
return -1
}
// SetStrFlag set a string flag using its name
func SetStrFlag(flagName, value string) {
if _, ok := flagsStrReferences[flagName]; ok {
*flagsStrReferences[flagName] = value
} else {
log.Debug().Msgf("Could not set string flag %s", flagName)
}
}
// SetMultiStrFlag set a slice of strings flag using its name
func SetMultiStrFlag(flagName string, value []string) {
if _, ok := flagsMultiStrReferences[flagName]; ok {
*flagsMultiStrReferences[flagName] = value
} else {
log.Debug().Msgf("Could not set string slice flag %s", flagName)
}
}
// GetAllFlags returns all flags values
func GetAllFlags() map[string]interface{} {
flags := make(map[string]interface{})
for flag, value := range flagsBoolReferences {
flags[flag] = value
}
for flag, value := range flagsIntReferences {
flags[flag] = value
}
for flag, value := range flagsMultiStrReferences {
flags[flag] = value
}
for flag, value := range flagsStrReferences {
flags[flag] = value
}
return flags
}
package flags
import (
"fmt"
"strings"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/spf13/viper"
)
// FormatNewError reports the impossibility of flag1 and flag2 usage simultaneously
func FormatNewError(flag1, flag2 string) error {
return errors.Errorf("can't provide '%s' and '%s' flags simultaneously",
flag1,
flag2)
}
// ValidateQuerySelectionFlags reports the impossibility of include and exclude flags usage simultaneously
func ValidateQuerySelectionFlags() error {
if len(GetMultiStrFlag(IncludeQueriesFlag)) > 0 && len(GetMultiStrFlag(ExcludeQueriesFlag)) > 0 {
return FormatNewError(IncludeQueriesFlag, ExcludeQueriesFlag)
}
if len(GetMultiStrFlag(IncludeQueriesFlag)) > 0 && len(GetMultiStrFlag(ExcludeCategoriesFlag)) > 0 {
return FormatNewError(IncludeQueriesFlag, ExcludeCategoriesFlag)
}
return nil
}
// ValidateTypeSelectionFlags reports the impossibility of include and exclude flags usage simultaneously
func ValidateTypeSelectionFlags() error {
if len(GetMultiStrFlag(TypeFlag)) > 1 && len(GetMultiStrFlag(ExcludeTypeFlag)) > 1 {
return FormatNewError(TypeFlag, ExcludeTypeFlag)
}
if GetMultiStrFlag(TypeFlag)[0] != "" && GetMultiStrFlag(ExcludeTypeFlag)[0] != "" {
return FormatNewError(TypeFlag, ExcludeTypeFlag)
}
return nil
}
// BindFlags fill flags values with config file or environment variables data
func BindFlags(cmd *cobra.Command, v *viper.Viper) error {
log.Debug().Msg("console.bindFlags()")
settingsMap := v.AllSettings()
cmd.Flags().VisitAll(func(f *pflag.Flag) {
settingsMap[f.Name] = true
if strings.Contains(f.Name, "-") {
envVarSuffix := strings.ToUpper(strings.ReplaceAll(f.Name, "-", "_"))
variableName := fmt.Sprintf("%s_%s", "KICS", envVarSuffix)
if err := v.BindEnv(f.Name, variableName); err != nil {
log.Err(err).Msg("Failed to bind Viper flags")
}
}
if !f.Changed && v.IsSet(f.Name) {
val := v.Get(f.Name)
setBoundFlags(f.Name, val, cmd)
}
})
for key, val := range settingsMap {
if val != true {
return fmt.Errorf("unknown configuration key: '%s'\nShowing help for '%s' command", key, cmd.Name())
}
}
return nil
}
func setBoundFlags(flagName string, val interface{}, cmd *cobra.Command) {
switch t := val.(type) {
case []interface{}:
var paramSlice []string
for _, param := range t {
paramSlice = append(paramSlice, param.(string))
}
valStr := strings.Join(paramSlice, ",")
if err := cmd.Flags().Set(flagName, valStr); err != nil {
log.Err(err).Msg("Failed to set Viper flags")
}
default:
if err := cmd.Flags().Set(flagName, fmt.Sprintf("%v", val)); err != nil {
log.Err(err).Msg("Failed to set Viper flags")
}
}
}
package flags
import "regexp"
type flagValidationFuncsMap map[string]func(flagName string) error
var flagValidationFuncs = flagValidationFuncsMap{
"sliceFlagsShouldNotStartWithFlags": sliceFlagsShouldNotStartWithFlags,
"validateMultiStrEnum": validateMultiStrEnum,
"validateStrEnum": validateStrEnum,
"allQueriesID": allQueriesID,
"validateWorkersFlag": validateWorkersFlag,
"validatePath": validatePath,
}
func isQueryID(id string) bool {
re := regexp.MustCompile(`^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$`)
return re.MatchString(id)
}
func convertSliceToDummyMap(slice []string) map[string]string {
returnMap := make(map[string]string, len(slice))
for _, element := range slice {
returnMap[element] = ""
}
return returnMap
}
// Validate validate if flag values are ok, if not, returns an error
func Validate() error {
for validation, validationFuncs := range validations {
for _, validationFunc := range validationFuncs {
if function, ok := flagValidationFuncs[validationFunc]; ok {
if err := function(validation); err != nil {
return err
}
}
}
}
return nil
}
package flags
import (
"fmt"
"regexp"
"runtime"
"strings"
"time"
"github.com/Checkmarx/kics/v2/internal/console/helpers"
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/Checkmarx/kics/v2/pkg/utils"
)
var validMultiStrEnums = map[string]map[string]string{
CloudProviderFlag: constants.AvailableCloudProviders,
ExcludeCategoriesFlag: constants.AvailableCategories,
ExcludeSeveritiesFlag: convertSliceToDummyMap(constants.AvailableSeverities),
FailOnFlag: convertSliceToDummyMap(constants.AvailableSeverities),
ReportFormatsFlag: convertSliceToDummyMap(append([]string{"all"}, helpers.ListReportFormats()...)),
TypeFlag: constants.AvailablePlatforms,
ExcludeTypeFlag: constants.AvailablePlatforms,
}
func sliceFlagsShouldNotStartWithFlags(flagName string) error {
values := GetMultiStrFlag(flagName)
re := regexp.MustCompile(`^--[a-z-]+$`)
if len(values) > 0 {
firstArg := values[0]
if strings.HasPrefix(firstArg, "-") && len(firstArg) == 2 || re.MatchString(firstArg) {
return fmt.Errorf("flag needs an argument: %s", flagName)
}
}
return nil
}
func allQueriesID(flagName string) error {
queriesID := GetMultiStrFlag(flagName)
for _, queryID := range queriesID {
if !isQueryID(queryID) {
return fmt.Errorf("invalid argument --%s: %s is not a valid query ID", flagName, queryID)
}
}
return nil
}
func validateMultiStrEnum(flagName string) error {
enums := GetMultiStrFlag(flagName)
invalidEnum := make([]string, 0)
caseInsensitiveMap := make(map[string]string)
for key, value := range validMultiStrEnums[flagName] {
caseInsensitiveMap[strings.ToLower(key)] = value
}
for _, enum := range enums {
if _, ok := caseInsensitiveMap[strings.ToLower(enum)]; enum != "" && !ok {
invalidEnum = append(invalidEnum, enum)
}
}
validEnumsValues := utils.SortedKeys(validMultiStrEnums[flagName])
if len(invalidEnum) > 0 {
return fmt.Errorf(
"unknown argument(s) for --%s: %s\nvalid arguments:\n %s",
flagName,
strings.Join(invalidEnum, ", "),
strings.Join(validEnumsValues, "\n "),
)
}
return nil
}
func validateWorkersFlag(flagName string) error {
workers := GetIntFlag(flagName)
if workers < 0 {
return fmt.Errorf("invalid argument --%s: value must be greater or equal to 0", flagName)
}
if workers > runtime.GOMAXPROCS(-1) {
now := time.Now()
timeStr := now.Format("03:04PM")
fmt.Println("\x1b[90m" + timeStr + " \x1b[31mWRN\x1b[0m Number of workers is greater than the number of logical CPUs")
return nil
}
return nil
}
package flags
import (
"errors"
"regexp"
)
func validatePath(flagName string) error {
relPath := `^(?:\.\.\\|\.\\|\.\.\/|\.\/|\\|\/)?(?:[^<>:"\/\\|?*]+[\\\/])*[^<>:"\/\\|?*]+(\/|\\)?$`
absPath := `^[a-zA-Z]:[\\\/](?:[^<>:"\/\\|?*]+[\\\/])*[^<>:"\/\\|?*]+(?:\/|\\)?$`
regex := regexp.MustCompile(relPath + `|` + absPath)
path := GetStrFlag(flagName)
isValid := regex.MatchString(path) || path == ""
if !isValid {
errorMsg := "the directory name you provided for the " + flagName + " flag contains invalid characters"
return errors.New(errorMsg)
}
return nil
}
package flags
import (
"fmt"
"strings"
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/Checkmarx/kics/v2/pkg/utils"
)
var validStrEnums = map[string]map[string]string{
LogLevelFlag: convertSliceToDummyMap(constants.AvailableLogLevels),
}
func validateStrEnum(flagName string) error {
value := GetStrFlag(flagName)
caseInsensitiveMap := make(map[string]string)
for key, value := range validStrEnums[flagName] {
caseInsensitiveMap[strings.ToLower(key)] = value
}
validEnumsValues := utils.SortedKeys(validStrEnums[flagName])
if _, ok := caseInsensitiveMap[strings.ToLower(value)]; value != "" && !ok {
return fmt.Errorf(
"unknown argument for --%s: %s\nvalid arguments:\n %s",
flagName,
value,
strings.Join(validEnumsValues, "\n "),
)
}
return nil
}
package console
import (
"fmt"
"github.com/google/uuid"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
)
// NewGenerateIDCmd creates a new instance of the generate-id Command
func NewGenerateIDCmd() *cobra.Command {
return &cobra.Command{
Use: "generate-id",
Short: "Generates uuid for query",
RunE: func(cmd *cobra.Command, args []string) error {
_, err := fmt.Fprintln(cmd.OutOrStdout(), uuid.New().String())
if err != nil {
log.Err(err).Msg("failed to get uuid")
}
return err
},
}
}
package helpers
import (
"fmt"
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
)
var shouldIgnore string
var shouldFail map[string]struct{}
// ResultsExitCode calculate exit code base on severity of results, returns 0 if no results was reported
func ResultsExitCode(summary *model.Summary) int {
// severityArr is needed to make sure 'for' cycle is made in an ordered fashion
severityArr := []model.Severity{"CRITICAL", "HIGH", "MEDIUM", "LOW", "INFO", "TRACE"}
codeMap := map[model.Severity]int{"CRITICAL": 60, "HIGH": 50, "MEDIUM": 40, "LOW": 30, "INFO": 20, "TRACE": 0}
exitMap := summary.SeveritySummary.SeverityCounters
for _, severity := range severityArr {
if _, reportSeverity := shouldFail[strings.ToLower(string(severity))]; !reportSeverity {
continue
}
if exitMap[severity] > 0 {
return codeMap[severity]
}
}
return 0
}
// InitShouldIgnoreArg initializes what kind of errors should be used on exit codes
func InitShouldIgnoreArg(arg string) error {
validArgs := []string{"none", "all", "results", "errors"}
for _, validArg := range validArgs {
if strings.EqualFold(validArg, arg) {
shouldIgnore = strings.ToLower(arg)
return nil
}
}
return fmt.Errorf("unknown argument for --ignore-on-exit: %s\nvalid arguments:\n %s", arg, strings.Join(validArgs, "\n "))
}
// InitShouldFailArg initializes which kind of vulnerability severity should changes exit code
func InitShouldFailArg(args []string) error {
possibleArgs := map[string]struct{}{
"critical": {},
"high": {},
"medium": {},
"low": {},
"info": {},
}
if len(args) == 0 {
shouldFail = possibleArgs
return nil
}
argsConverted := make(map[string]struct{})
for _, arg := range args {
if _, ok := possibleArgs[strings.ToLower(arg)]; !ok {
validArgs := []string{"critical", "high", "medium", "low", "info"}
return fmt.Errorf("unknown argument for --fail-on: %s\nvalid arguments:\n %s", arg, strings.Join(validArgs, "\n "))
}
argsConverted[strings.ToLower(arg)] = struct{}{}
}
shouldFail = argsConverted
return nil
}
// ShowError returns true if should show error, otherwise returns false
func ShowError(kind string) bool {
return strings.EqualFold(shouldIgnore, "none") || (!strings.EqualFold(shouldIgnore, "all") && !strings.EqualFold(shouldIgnore, kind))
}
// RemediateExitCode calculate exit code base on the difference between remediation selected and done
func RemediateExitCode(selectedRemediationNumber, actualRemediationDoneNumber int) int {
statusCode := 70
if selectedRemediationNumber != actualRemediationDoneNumber {
// KICS AR was not able to remediate all the selected remediation
return statusCode
}
return 0
}
package helpers
import (
"bufio"
"encoding/json"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"runtime"
"sort"
"strconv"
"strings"
"github.com/BurntSushi/toml"
"github.com/Checkmarx/kics/v2/internal/metrics"
"github.com/Checkmarx/kics/v2/pkg/progress"
"github.com/Checkmarx/kics/v2/pkg/report"
"github.com/hashicorp/hcl"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
"gopkg.in/yaml.v3"
)
const divisor = float32(100000)
var reportGenerators = map[string]func(path, filename string, body interface{}) error{
"json": report.PrintJSONReport,
"sarif": report.PrintSarifReport,
"html": report.PrintHTMLReport,
"glsast": report.PrintGitlabSASTReport,
"pdf": report.PrintPdfReport,
"sonarqube": report.PrintSonarQubeReport,
"cyclonedx": report.PrintCycloneDxReport,
"junit": report.PrintJUnitReport,
"asff": report.PrintASFFReport,
"csv": report.PrintCSVReport,
"codeclimate": report.PrintCodeClimateReport,
}
// CustomConsoleWriter creates an output to print log in a files
func CustomConsoleWriter(fileLogger *zerolog.ConsoleWriter) zerolog.ConsoleWriter {
fileLogger.FormatLevel = func(i interface{}) string {
return strings.ToUpper(fmt.Sprintf("| %-6s|", i))
}
fileLogger.FormatFieldName = func(i interface{}) string {
return fmt.Sprintf("%s:", i)
}
fileLogger.FormatErrFieldName = func(i interface{}) string {
return "ERROR:"
}
fileLogger.FormatFieldValue = func(i interface{}) string {
return fmt.Sprintf("%s", i)
}
return *fileLogger
}
// FileAnalyzer determines the type of extension in the passed config file by its content
func FileAnalyzer(path string) (string, error) {
ostat, err := os.Open(filepath.Clean(path))
if err != nil {
return "", err
}
rc, err := io.ReadAll(ostat)
if err != nil {
return "", err
}
var temp map[string]interface{}
// CxSAST query under review
if err := json.Unmarshal(rc, &temp); err == nil {
return "json", nil
}
// CxSAST query under review
if err := yaml.Unmarshal(rc, &temp); err == nil {
return "yaml", nil
}
// CxSAST query under review
if _, err := toml.Decode(string(rc), &temp); err == nil {
return "toml", nil
}
// CxSAST query under review
if c, err := hcl.Parse(string(rc)); err == nil {
if err = hcl.DecodeObject(&temp, c); err == nil {
return "hcl", nil
}
}
return "", errors.New("invalid configuration file format")
}
// GenerateReport execute each report function to generate report
func GenerateReport(path, filename string, body interface{}, formats []string, proBarBuilder progress.PbBuilder) error {
log.Debug().Msgf("helpers.GenerateReport()")
metrics.Metric.Start("generate_report")
progressBar := proBarBuilder.BuildCircle("Generating Reports: ")
var err error = nil
go progressBar.Start()
defer progressBar.Close()
for _, format := range formats {
format = strings.ToLower(format)
if err = reportGenerators[format](path, filename, body); err != nil {
log.Error().Msgf("Failed to generate %s report", format)
break
}
}
metrics.Metric.Stop()
return err
}
// GetExecutableDirectory - returns the path to the directory containing KICS executable
func GetExecutableDirectory() string {
log.Debug().Msg("helpers.GetExecutableDirectory()")
path, err := os.Executable()
if err != nil {
log.Err(err)
}
return filepath.Dir(path)
}
// GetDefaultQueryPath - returns the default query path
func GetDefaultQueryPath(queriesPath string) (string, error) {
log.Debug().Msg("helpers.GetDefaultQueryPath()")
queriesDirectory, err := GetFullPath(queriesPath)
if err != nil {
return "", err
}
log.Debug().Msgf("Queries found in %s", queriesDirectory)
return queriesDirectory, nil
}
// GetFulPath returns the full path of a partial path used for queries or experimental queries json path
func GetFullPath(partialPath string) (string, error) {
executableDirPath := GetExecutableDirectory()
fullPath := filepath.Join(executableDirPath, partialPath)
if _, err := os.Stat(fullPath); os.IsNotExist(err) {
currentWorkDir, err := os.Getwd()
if err != nil {
return "", err
}
idx := strings.Index(currentWorkDir, "kics")
if idx != -1 {
currentWorkDir = currentWorkDir[:strings.LastIndex(currentWorkDir, "kics")] + "kics"
}
fullPath = filepath.Join(currentWorkDir, partialPath)
if _, err := os.Stat(fullPath); os.IsNotExist(err) {
return "", err
}
}
return fullPath, nil
}
// ListReportFormats return a slice with all supported report formats
func ListReportFormats() []string {
supportedFormats := make([]string, 0, len(reportGenerators))
for reportFormats := range reportGenerators {
supportedFormats = append(supportedFormats, reportFormats)
}
sort.Strings(supportedFormats)
return supportedFormats
}
// GetNumCPU return the number of cpus available
func GetNumCPU() float32 {
// Check if application is running inside docker
_, err := os.Stat("/.dockerenv")
if err == nil {
numCPU, err := getCPUFromQuotaUS()
if err == nil {
return numCPU
}
numCPU, err = getCPUFromCPUMax()
if err == nil {
return numCPU
}
}
return float32(runtime.NumCPU())
}
func getCPUFromQuotaUS() (float32, error) {
f, err := os.Open("/sys/fs/cgroup/cpu/cpu.cfs_quota_us")
if err != nil {
return -1, err
}
defer func() {
if err := f.Close(); err != nil {
log.Err(err).Msg("failed to close '/sys/fs/cgroup/cpu/cpu.cfs_quota_us'")
}
}()
scanner := bufio.NewScanner(f)
if scanner.Scan() {
text := scanner.Text()
cpus, err := strconv.Atoi(text)
if err != nil {
return float32(cpus) / divisor, err
}
if cpus != -1 {
return float32(cpus) / divisor, nil
}
return float32(runtime.NumCPU()), nil
}
return float32(runtime.NumCPU()), nil
}
func getCPUFromCPUMax() (float32, error) {
f, err := os.Open("/sys/fs/cgroup/cpu.max")
if err != nil {
return -1, err
}
defer func() {
if err := f.Close(); err != nil {
log.Err(err).Msg("failed to close '/sys/fs/cgroup/cpu.max'")
}
}()
scanner := bufio.NewScanner(f)
if scanner.Scan() {
text := scanner.Text()
stringCpus := strings.Split(text, " ")[0]
cpus, err := strconv.Atoi(stringCpus)
if err != nil {
return float32(cpus) / divisor, err
}
if cpus != -1 {
return float32(cpus) / divisor, nil
}
return float32(runtime.NumCPU()), nil
}
return float32(runtime.NumCPU()), nil
}
package console
import (
"context"
_ "embed" // Embed kics flags
"os"
"time"
"github.com/Checkmarx/kics/v2/internal/console/flags"
"github.com/Checkmarx/kics/v2/internal/constants"
sentryReport "github.com/Checkmarx/kics/v2/internal/sentry"
"github.com/Checkmarx/kics/v2/pkg/engine/source"
"github.com/Checkmarx/kics/v2/pkg/printer"
"github.com/getsentry/sentry-go"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
const (
scanID = "console"
)
var (
//go:embed assets/kics-flags.json
kicsFlagsListContent string
// warnings - a buffer to accumulate warnings before the printer gets initialized
warnings = make([]string, 0)
ctx = context.Background()
)
// NewKICSCmd creates a new instance of the kics Command
func NewKICSCmd() *cobra.Command {
return &cobra.Command{
Use: "kics",
Short: constants.Fullname,
}
}
func initialize(rootCmd *cobra.Command) error {
scanCmd := NewScanCmd()
remediateCmd := NewRemediateCmd()
analyzeCmd := NewAnalyzeCmd()
rootCmd.AddCommand(NewVersionCmd())
rootCmd.AddCommand(NewGenerateIDCmd())
rootCmd.AddCommand(scanCmd)
rootCmd.AddCommand(NewListPlatformsCmd())
rootCmd.AddCommand(remediateCmd)
rootCmd.AddCommand(analyzeCmd)
rootCmd.CompletionOptions.DisableDefaultCmd = true
if err := flags.InitJSONFlags(
rootCmd,
kicsFlagsListContent,
true,
source.ListSupportedPlatforms(),
source.ListSupportedCloudProviders()); err != nil {
return err
}
if err := viper.BindPFlags(rootCmd.PersistentFlags()); err != nil {
return err
}
if err := initRemediateCmd(remediateCmd); err != nil {
return err
}
if err := initAnalyzeCmd(analyzeCmd); err != nil {
return err
}
return initScanCmd(scanCmd)
}
// Execute starts kics execution
func Execute() error {
zerolog.SetGlobalLevel(zerolog.InfoLevel)
enableCrashReport()
rootCmd := NewKICSCmd()
if err := initialize(rootCmd); err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: "Failed to initialize CLI",
Err: err,
Location: "func Execute()",
}, true)
return err
}
if err := rootCmd.ExecuteContext(ctx); err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: "Failed to run application",
Err: err,
Location: "func Execute()",
}, printer.IsInitialized())
return err
}
return nil
}
func enableCrashReport() {
enableCrashReport, found := os.LookupEnv("DISABLE_CRASH_REPORT")
if found && (enableCrashReport == "0" || enableCrashReport == "false") {
initSentry("")
} else {
initSentry(constants.SentryDSN)
}
}
func initSentry(dsn string) {
var err error
if dsn == "" {
warnings = append(warnings, "KICS crash report disabled")
err = sentry.Init(sentry.ClientOptions{
Release: constants.GetRelease(),
})
} else {
err = sentry.Init(sentry.ClientOptions{
Dsn: dsn,
Release: constants.GetRelease(),
})
}
if err != nil {
log.Err(err).Msg("Failed to initialize sentry")
}
sentry.Flush(constants.SentryRefreshRate * time.Second)
}
package console
import (
"fmt"
"github.com/Checkmarx/kics/v2/pkg/engine/source"
"github.com/spf13/cobra"
)
// NewListPlatformsCmd creates a new instance of the list-platforms Command
func NewListPlatformsCmd() *cobra.Command {
return &cobra.Command{
Use: "list-platforms",
Short: "List supported platforms",
RunE: func(cmd *cobra.Command, args []string) error {
for _, v := range source.ListSupportedPlatforms() {
fmt.Fprintf(cmd.OutOrStdout(), "%s\n", v)
}
return nil
},
}
}
package console
import (
_ "embed" // Embed kics CLI img and scan-flags
"fmt"
"os"
"path/filepath"
"runtime"
"strings"
"code.cloudfoundry.org/bytefmt"
"github.com/Checkmarx/kics/v2/internal/console/flags"
consoleHelpers "github.com/Checkmarx/kics/v2/internal/console/helpers"
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/Checkmarx/kics/v2/internal/metrics"
internalPrinter "github.com/Checkmarx/kics/v2/pkg/printer"
"github.com/Checkmarx/kics/v2/pkg/progress"
"github.com/mackerelio/go-osstat/memory"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
"github.com/spf13/viper"
)
func preRun(cmd *cobra.Command) error {
err := initializeConfig(cmd)
if err != nil {
return errors.New(initError + err.Error())
}
err = flags.Validate()
if err != nil {
return err
}
err = flags.ValidateQuerySelectionFlags()
if err != nil {
return err
}
err = flags.ValidateTypeSelectionFlags()
if err != nil {
return err
}
err = internalPrinter.SetupPrinter(cmd.InheritedFlags())
if err != nil {
return errors.New(initError + err.Error())
}
err = metrics.InitializeMetrics(flags.GetStrFlag(flags.ProfilingFlag), flags.GetBoolFlag(flags.CIFlag))
if err != nil {
return errors.New(initError + err.Error())
}
return nil
}
func setupConfigFile() (bool, error) {
if flags.GetStrFlag(flags.ConfigFlag) == "" {
path := flags.GetMultiStrFlag(flags.PathFlag)
if len(path) == 0 {
return true, nil
}
if len(path) > 1 {
warnings = append(warnings, "Any kics.config file will be ignored, please use --config if kics.config is wanted")
return true, nil
}
configPath := path[0]
info, err := os.Stat(configPath)
if err != nil {
return true, nil
}
if !info.IsDir() {
configPath = filepath.Dir(configPath)
}
_, err = os.Stat(filepath.ToSlash(filepath.Join(configPath, constants.DefaultConfigFilename)))
if err != nil {
if os.IsNotExist(err) {
return true, nil
}
return true, err
}
flags.SetStrFlag(flags.ConfigFlag, filepath.ToSlash(filepath.Join(configPath, constants.DefaultConfigFilename)))
}
return false, nil
}
func initializeConfig(cmd *cobra.Command) error {
log.Debug().Msg("console.initializeConfig()")
v := viper.New()
v.SetEnvPrefix("KICS")
v.AutomaticEnv()
errBind := flags.BindFlags(cmd, v)
if errBind != nil {
return errBind
}
exit, err := setupConfigFile()
if err != nil {
return err
}
if exit {
return nil
}
base := filepath.Base(flags.GetStrFlag(flags.ConfigFlag))
v.SetConfigName(base)
v.AddConfigPath(filepath.Dir(flags.GetStrFlag(flags.ConfigFlag)))
ext, err := consoleHelpers.FileAnalyzer(flags.GetStrFlag(flags.ConfigFlag))
if err != nil {
return err
}
v.SetConfigType(ext)
if err := v.ReadInConfig(); err != nil {
return err
}
errBind = flags.BindFlags(cmd, v)
if errBind != nil {
return errBind
}
return nil
}
type console struct {
Printer *internalPrinter.Printer
ProBarBuilder *progress.PbBuilder
}
func newConsole() *console {
return &console{}
}
// preScan is responsible for scan preparation
func (console *console) preScan() {
log.Debug().Msg("console.scan()")
for _, warn := range warnings {
log.Warn().Msgf("%s", warn)
}
printer := internalPrinter.NewPrinter(flags.GetBoolFlag(flags.MinimalUIFlag))
printer.Success.Printf("\n%s\n", banner)
versionMsg := fmt.Sprintf("\nScanning with %s\n\n", constants.GetVersion())
fmt.Println(versionMsg)
log.Info().Msgf("%s", strings.ReplaceAll(versionMsg, "\n", ""))
log.Info().Msgf("Operating system: %s", runtime.GOOS)
mem, err := memory.Get()
if err != nil {
log.Err(err)
} else {
log.Info().Msgf("Total memory: %s", bytefmt.ByteSize(mem.Total))
}
cpu := consoleHelpers.GetNumCPU()
log.Info().Msgf("CPU: %.1f", cpu)
log.Info().Msgf("Max file size permitted for scanning: %d MB", flags.GetIntFlag(flags.MaxFileSizeFlag))
log.Info().Msgf("Max resolver depth permitted for resolving files: %d", flags.GetIntFlag(flags.MaxResolverDepth))
noProgress := flags.GetBoolFlag(flags.NoProgressFlag)
if strings.EqualFold(flags.GetStrFlag(flags.LogLevelFlag), "debug") {
noProgress = true
}
proBarBuilder := progress.InitializePbBuilder(
noProgress,
flags.GetBoolFlag(flags.CIFlag),
flags.GetBoolFlag(flags.SilentFlag))
console.Printer = printer
console.ProBarBuilder = proBarBuilder
}
package console
import (
_ "embed" // Embed remediate flags
"encoding/json"
"fmt"
"os"
"path/filepath"
"github.com/Checkmarx/kics/v2/internal/console/flags"
consoleHelpers "github.com/Checkmarx/kics/v2/internal/console/helpers"
sentryReport "github.com/Checkmarx/kics/v2/internal/sentry"
"github.com/Checkmarx/kics/v2/pkg/engine/source"
internalPrinter "github.com/Checkmarx/kics/v2/pkg/printer"
"github.com/Checkmarx/kics/v2/pkg/remediation"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
)
var (
//go:embed assets/remediate-flags.json
remediateFlagsListContent string
)
// NewRemediateCmd creates a new instance of the remediate Command
func NewRemediateCmd() *cobra.Command {
return &cobra.Command{
Use: "remediate",
Short: "Auto remediates the project",
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
return preRemediate(cmd)
},
RunE: func(cmd *cobra.Command, args []string) error {
return remediate()
},
}
}
func initRemediateCmd(remediateCmd *cobra.Command) error {
if err := flags.InitJSONFlags(
remediateCmd,
remediateFlagsListContent,
false,
source.ListSupportedPlatforms(),
source.ListSupportedCloudProviders()); err != nil {
return err
}
if err := remediateCmd.MarkFlagRequired(flags.Results); err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: "Failed to add command required flags",
Err: err,
Location: "func initScanCmd()",
}, true)
log.Err(err).Msg("Failed to add command required flags")
}
return nil
}
func preRemediate(cmd *cobra.Command) error {
err := flags.Validate()
if err != nil {
return err
}
err = flags.ValidateQuerySelectionFlags()
if err != nil {
return err
}
err = internalPrinter.SetupPrinter(cmd.InheritedFlags())
if err != nil {
return errors.New(initError + err.Error())
}
return err
}
func remediate() error {
resultsPath := flags.GetStrFlag(flags.Results)
include := flags.GetMultiStrFlag(flags.IncludeIds)
openAPIResolveReferences := flags.GetBoolFlag(flags.OpenAPIReferencesFlag)
maxResolverDepth := flags.GetIntFlag(flags.MaxResolverDepth)
filepath.Clean(resultsPath)
content, err := os.ReadFile(resultsPath)
if err != nil {
log.Error().Msgf("failed to read file: %s", err)
return err
}
results := remediation.Report{}
err = json.Unmarshal(content, &results)
if err != nil {
log.Error().Msgf("failed to unmarshal file: %s", err)
return err
}
summary := &remediation.Summary{
SelectedRemediationNumber: 0,
ActualRemediationDoneNumber: 0,
}
// get all the remediationSets related to each filePath
remediationSets := summary.GetRemediationSets(results, include)
for filePath := range remediationSets {
fix := remediationSets[filePath].(remediation.Set)
err = summary.RemediateFile(filePath, fix, openAPIResolveReferences, maxResolverDepth)
if err != nil {
return err
}
}
fmt.Printf("\nSelected remediation: %d\n", summary.SelectedRemediationNumber)
fmt.Printf("Remediation done: %d\n", summary.ActualRemediationDoneNumber)
exitCode := consoleHelpers.RemediateExitCode(summary.SelectedRemediationNumber, summary.ActualRemediationDoneNumber)
if exitCode != 0 {
os.Exit(exitCode)
}
return nil
}
package console
import (
_ "embed" // Embed kics CLI img and scan-flags
"os"
"os/signal"
"path/filepath"
"strings"
"syscall"
"github.com/Checkmarx/kics/v2/internal/console/flags"
consoleHelpers "github.com/Checkmarx/kics/v2/internal/console/helpers"
"github.com/Checkmarx/kics/v2/internal/constants"
sentryReport "github.com/Checkmarx/kics/v2/internal/sentry"
"github.com/Checkmarx/kics/v2/pkg/engine/source"
"github.com/Checkmarx/kics/v2/pkg/scan"
"github.com/rs/zerolog/log"
"github.com/spf13/cobra"
)
var (
//go:embed assets/kics-console
banner string
//go:embed assets/scan-flags.json
scanFlagsListContent string
)
const (
scanCommandStr = "scan"
initError = "initialization error - "
)
// NewScanCmd creates a new instance of the scan Command
func NewScanCmd() *cobra.Command {
return &cobra.Command{
Use: scanCommandStr,
Short: "Executes a scan analysis",
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
return preRun(cmd)
},
RunE: func(cmd *cobra.Command, args []string) error {
return run(cmd)
},
}
}
func initScanCmd(scanCmd *cobra.Command) error {
if err := flags.InitJSONFlags(
scanCmd,
scanFlagsListContent,
false,
source.ListSupportedPlatforms(),
source.ListSupportedCloudProviders()); err != nil {
return err
}
if err := scanCmd.MarkFlagRequired(flags.PathFlag); err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: "Failed to add command required flags",
Err: err,
Location: "func initScanCmd()",
}, true)
log.Err(err).Msg("Failed to add command required flags")
}
return nil
}
func run(cmd *cobra.Command) error {
changedDefaultQueryPath := cmd.Flags().Lookup(flags.QueriesPath).Changed
changedDefaultLibrariesPath := cmd.Flags().Lookup(flags.LibrariesPath).Changed
if err := consoleHelpers.InitShouldIgnoreArg(flags.GetStrFlag(flags.IgnoreOnExitFlag)); err != nil {
return err
}
if err := consoleHelpers.InitShouldFailArg(flags.GetMultiStrFlag(flags.FailOnFlag)); err != nil {
return err
}
if flags.GetStrFlag(flags.OutputPathFlag) != "" {
updateReportFormats()
flags.SetStrFlag(flags.OutputNameFlag, filepath.Base(flags.GetStrFlag(flags.OutputNameFlag)))
if filepath.Ext(flags.GetStrFlag(flags.OutputPathFlag)) != "" {
flags.SetStrFlag(flags.OutputPathFlag, filepath.Join(flags.GetStrFlag(flags.OutputPathFlag), string(os.PathSeparator)))
}
if err := os.MkdirAll(flags.GetStrFlag(flags.OutputPathFlag), os.ModePerm); err != nil {
return err
}
}
if flags.GetStrFlag(flags.PayloadPathFlag) != "" && filepath.Dir(flags.GetStrFlag(flags.PayloadPathFlag)) != "." {
if err := os.MkdirAll(filepath.Dir(flags.GetStrFlag(flags.PayloadPathFlag)), os.ModePerm); err != nil {
return err
}
}
gracefulShutdown()
// save the scan parameters into the ScanParameters struct
scanParams := getScanParameters(changedDefaultQueryPath, changedDefaultLibrariesPath)
return executeScan(scanParams)
}
func updateReportFormats() {
for _, format := range flags.GetMultiStrFlag(flags.ReportFormatsFlag) {
if strings.EqualFold(format, "all") {
flags.SetMultiStrFlag(flags.ReportFormatsFlag, consoleHelpers.ListReportFormats())
break
}
}
}
func getScanParameters(changedDefaultQueryPath, changedDefaultLibrariesPath bool) *scan.Parameters {
scanParams := scan.Parameters{
CloudProvider: flags.GetMultiStrFlag(flags.CloudProviderFlag),
DisableFullDesc: flags.GetBoolFlag(flags.DisableFullDescFlag),
ExcludeCategories: flags.GetMultiStrFlag(flags.ExcludeCategoriesFlag),
ExcludePaths: flags.GetMultiStrFlag(flags.ExcludePathsFlag),
ExcludeQueries: flags.GetMultiStrFlag(flags.ExcludeQueriesFlag),
ExcludeResults: flags.GetMultiStrFlag(flags.ExcludeResultsFlag),
ExcludeSeverities: flags.GetMultiStrFlag(flags.ExcludeSeveritiesFlag),
ExperimentalQueries: flags.GetBoolFlag(flags.ExperimentalQueriesFlag),
IncludeQueries: flags.GetMultiStrFlag(flags.IncludeQueriesFlag),
InputData: flags.GetStrFlag(flags.InputDataFlag),
OutputName: flags.GetStrFlag(flags.OutputNameFlag),
OutputPath: flags.GetStrFlag(flags.OutputPathFlag),
Path: flags.GetMultiStrFlag(flags.PathFlag),
PayloadPath: flags.GetStrFlag(flags.PayloadPathFlag),
PreviewLines: flags.GetIntFlag(flags.PreviewLinesFlag),
QueriesPath: flags.GetMultiStrFlag(flags.QueriesPath),
LibrariesPath: flags.GetStrFlag(flags.LibrariesPath),
ReportFormats: flags.GetMultiStrFlag(flags.ReportFormatsFlag),
Platform: flags.GetMultiStrFlag(flags.TypeFlag),
ExcludePlatform: flags.GetMultiStrFlag(flags.ExcludeTypeFlag),
TerraformVarsPath: flags.GetStrFlag(flags.TerraformVarsPathFlag),
QueryExecTimeout: flags.GetIntFlag(flags.QueryExecTimeoutFlag),
LineInfoPayload: flags.GetBoolFlag(flags.LineInfoPayloadFlag),
DisableSecrets: flags.GetBoolFlag(flags.DisableSecretsFlag),
SecretsRegexesPath: flags.GetStrFlag(flags.SecretsRegexesPathFlag),
ScanID: scanID,
ChangedDefaultLibrariesPath: changedDefaultLibrariesPath,
ChangedDefaultQueryPath: changedDefaultQueryPath,
BillOfMaterials: flags.GetBoolFlag(flags.BomFlag),
ExcludeGitIgnore: flags.GetBoolFlag(flags.ExcludeGitIgnore),
OpenAPIResolveReferences: flags.GetBoolFlag(flags.OpenAPIReferencesFlag),
ParallelScanFlag: flags.GetIntFlag(flags.ParallelScanFile),
MaxFileSizeFlag: flags.GetIntFlag(flags.MaxFileSizeFlag),
UseOldSeverities: flags.GetBoolFlag(flags.UseOldSeveritiesFlag),
MaxResolverDepth: flags.GetIntFlag(flags.MaxResolverDepth),
KicsComputeNewSimID: flags.GetBoolFlag(flags.KicsComputeNewSimIDFlag),
}
return &scanParams
}
func executeScan(scanParams *scan.Parameters) error {
log.Debug().Msg("console.scan()")
console := newConsole()
console.preScan()
client, err := scan.NewClient(scanParams, console.ProBarBuilder, console.Printer)
if err != nil {
log.Err(err)
return err
}
err = client.PerformScan(ctx)
if err != nil {
log.Err(err)
return err
}
return nil
}
// gracefulShutdown catches signal interrupt and returns the appropriate exit code
func gracefulShutdown() {
c := make(chan os.Signal)
// This line should not be lint, since golangci-lint has an issue about it (https://github.com/golang/go/issues/45043)
signal.Notify(c, os.Interrupt, syscall.SIGTERM) //nolint
showErrors := consoleHelpers.ShowError("errors")
interruptCode := constants.SignalInterruptCode
go func(showErrors bool, interruptCode int) {
<-c
if showErrors {
os.Exit(interruptCode)
}
}(showErrors, interruptCode)
}
package console
import (
"fmt"
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/spf13/cobra"
)
// NewVersionCmd creates a new instance of the version Command
func NewVersionCmd() *cobra.Command {
return &cobra.Command{
Use: "version",
Short: "Displays the current version",
RunE: func(cmd *cobra.Command, args []string) error {
fmt.Fprintf(cmd.OutOrStdout(), "%s\n", constants.GetVersion())
return nil
},
}
}
package constants
import (
"fmt"
"math"
"os"
"path/filepath"
)
var (
// Version - current KICS version
Version = "development"
// SCMCommit - Source control management commit identifier
SCMCommit = "NOCOMMIT"
// SentryDSN - sentry DSN, unset for disabling
SentryDSN = ""
// BaseURL - Descriptions endpoint URL
BaseURL = ""
// APIScanner - API scanner feature switch
APIScanner = ""
// AvailableCategories - All categories and its identifies
AvailableCategories = map[string]string{
"Access Control": "CAT001",
"Availability": "CAT002",
"Backup": "CAT003",
"Best Practices": "CAT004",
"Build Process": "CAT005",
"Encryption": "CAT006",
"Insecure Configurations": "CAT007",
"Insecure Defaults": "CAT008",
"Networking and Firewall": "CAT009",
"Observability": "CAT010",
"Resource Management": "CAT011",
"Secret Management": "CAT012",
"Supply-Chain": "CAT013",
"Structure and Semantics": "CAT014",
"Bill Of Materials": "CAT015",
}
// AvailablePlatforms - All platforms available
AvailablePlatforms = map[string]string{
"Ansible": "ansible",
"CICD": "cicd",
"CloudFormation": "cloudFormation",
"Crossplane": "crossplane",
"Dockerfile": "dockerfile",
"DockerCompose": "dockerCompose",
"Knative": "knative",
"Kubernetes": "k8s",
"OpenAPI": "openAPI",
"Terraform": "terraform",
"AzureResourceManager": "azureResourceManager",
"Bicep": "bicep",
"GoogleDeploymentManager": "googleDeploymentManager",
"GRPC": "grpc",
"Buildah": "buildah",
"Pulumi": "pulumi",
"ServerlessFW": "serverlessFW",
}
// AvailableSeverities - All severities available
AvailableSeverities = []string{
"critical",
"high",
"medium",
"low",
"info",
"trace",
}
// AvailableLogLevels - All log levels available
AvailableLogLevels = []string{
"TRACE",
"DEBUG",
"INFO",
"WARN",
"ERROR",
"FATAL",
}
// AvailableCloudProviders - All cloud providers available
AvailableCloudProviders = map[string]string{
"alicloud": "",
"aws": "",
"azure": "",
"gcp": "",
"nifcloud": "",
"tencentcloud": "",
}
)
const (
// Fullname - KICS fullname
Fullname = "Keeping Infrastructure as Code Secure"
// URL - KICS url
URL = "https://www.kics.io/"
// DefaultLogFile - logfile name
DefaultLogFile = "info.log"
// DefaultConfigFilename - default configuration filename
DefaultConfigFilename = "kics.config"
// MinimumPreviewLines - default minimum preview lines number
MinimumPreviewLines = 1
// MaximumPreviewLines - default maximum preview lines number
MaximumPreviewLines = 30
// EngineErrorCode - Exit Status code for error in engine
EngineErrorCode = 126
// SignalInterruptCode - Exit Status code for a signal interrupt
SignalInterruptCode = 130
// MaxInteger - max possible integer in golang
MaxInteger = math.MaxInt64
// SentryRefreshRate - sentry crash report refresh rate
SentryRefreshRate = 2
// LogFormatJSON - print log as json
LogFormatJSON = "json"
// LogFormatPretty - print log more readable
LogFormatPretty = "pretty"
)
// GetRelease - returns the current release in the format 'kics@version' to be used by sentry
func GetRelease() string {
return fmt.Sprintf("kics@%s", Version)
}
// GetVersion - returns the current version in the format 'Keeping Infrastructure as Code Secure <version>'
func GetVersion() string {
return fmt.Sprintf("%s %s", Fullname, Version)
}
// GetDefaultLogPath - returns the path where the default log file is located
func GetDefaultLogPath() (string, error) {
currentWorkDir, err := os.Getwd()
if err != nil {
return "", err
}
return filepath.Join(currentWorkDir, DefaultLogFile), nil
}
package metrics
import (
"bytes"
"runtime/pprof"
"time"
"github.com/rs/zerolog/log"
)
type cpuMetric struct {
close func()
writer *bytes.Buffer
idx int
typeMap map[string]float64
}
var cpuMap = map[string]float64{
"ns": float64(time.Nanosecond),
"us": float64(time.Microsecond),
"ms": float64(time.Millisecond),
"s": float64(time.Second),
"hrs": float64(time.Hour),
}
func (c *cpuMetric) getDefault() string {
return "ms"
}
// Start - start gathering metrics for CPU usage
func (c *cpuMetric) start() {
c.idx = 1
c.typeMap = cpuMap
c.writer = bytes.NewBuffer([]byte{})
if err := pprof.StartCPUProfile(c.writer); err != nil {
log.Error().Msgf("failed to write cpu profile")
}
c.close = func() {
pprof.StopCPUProfile()
}
}
// Stop - stop gathering metrics for CPU usage
func (c *cpuMetric) stop() {
c.close()
}
// getWriter returns the profile buffer
func (c *cpuMetric) getWriter() *bytes.Buffer {
return c.writer
}
// getIndex returns the cpu sample index
func (c *cpuMetric) getIndex() int {
return c.idx
}
// getMap returns the map used to format total value
func (c *cpuMetric) getMap() map[string]float64 {
return c.typeMap
}
package metrics
import (
"bytes"
"runtime"
"runtime/pprof"
"github.com/rs/zerolog/log"
)
type memMetric struct {
close func()
writer *bytes.Buffer
idx int
typeMap map[string]float64
}
var (
b = 1
kb = 10
mb = 20
gb = 30
tb = 40
pb = 50
)
var memoryMap = map[string]float64{
"B": float64(b),
"kB": float64(b << kb),
"MB": float64(b << mb),
"GB": float64(b << gb),
"TB": float64(b << tb),
"PB": float64(b << pb),
}
// Start - start gathering metrics for Memory usage
func (c *memMetric) start() {
c.idx = 3
c.typeMap = memoryMap
old := runtime.MemProfileRate
runtime.MemProfileRate = 4096 // set default memory rate
c.writer = bytes.NewBuffer([]byte{})
c.close = func() {
if err := pprof.Lookup("heap").WriteTo(c.writer, 0); err != nil {
log.Error().Msgf("failed to write mem profile")
}
runtime.MemProfileRate = old
}
}
func (c *memMetric) getDefault() string {
return "B"
}
// Stop - stop gathering metrics for Memory usage
func (c *memMetric) stop() {
c.close()
}
// getWriter returns the profile buffer
func (c *memMetric) getWriter() *bytes.Buffer {
return c.writer
}
// getIndex returns the memory sample index
func (c *memMetric) getIndex() int {
return c.idx
}
// getMap returns the map used to format total value
func (c *memMetric) getMap() map[string]float64 {
return c.typeMap
}
package metrics
import (
"bytes"
"fmt"
"math"
"strings"
"github.com/google/pprof/profile"
"github.com/rs/zerolog/log"
)
var (
// Metric is the global metrics object
Metric = &Metrics{
Disable: true,
}
)
// Start - starts gathering metrics based on the type of metrics and writes metrics to string
// Stop - stops gathering metrics for the type of metrics specified
type metricType interface {
start()
stop()
getWriter() *bytes.Buffer
getIndex() int
getMap() map[string]float64
getDefault() string
}
// Metrics - structure to keep information relevant to the metrics calculation
// Disable - disables metric calculations
type Metrics struct {
metric metricType
metricsID string
location string
Disable bool
total int64
ci bool
}
// InitializeMetrics - creates a new instance of a Metrics based on the type of metrics specified
func InitializeMetrics(metric string, ci bool) error {
var err error
switch strings.ToLower(metric) {
case "cpu":
Metric.Disable = false
Metric.metric = &cpuMetric{}
Metric.total = 0
case "mem":
Metric.total = 0
Metric.metric = &memMetric{}
Metric.Disable = false
case "":
Metric.total = 0
Metric.Disable = true
default:
Metric.total = 0
Metric.Disable = true
err = fmt.Errorf("unknown metric: %s (available metrics: CPU, MEM)", metric)
}
// Create temporary dir to keep pprof file
if !Metric.Disable {
Metric.metricsID = metric
Metric.ci = ci
}
return err
}
// Start - starts gathering metrics for the location specified
func (m *Metrics) Start(location string) {
if m.Disable {
return
}
log.Debug().Msgf("Started %s profiling for %s", m.metricsID, location)
m.location = location
m.metric.start()
}
// Stop - stops gathering metrics and logs the result
func (m *Metrics) Stop() {
if m.Disable {
return
}
log.Debug().Msgf("Stopped %s profiling for %s", m.metricsID, m.location)
m.metric.stop()
p, err := profile.Parse(m.metric.getWriter())
if err != nil {
log.Error().Msgf("failed to parse profile on %s: %s", m.location, err)
}
if err := p.CheckValid(); err != nil {
log.Error().Msgf("invalid profile on %s: %s", m.location, err)
}
total := getTotal(p, m.metric.getIndex())
log.Info().
Msgf("Total %s usage for %s: %s", strings.ToUpper(m.metricsID),
m.location, m.formatTotal(total, m.metric.getMap(), m.metric.getDefault()))
m.total = total
}
// getTotal goes through the profile samples summing their values according to
// the type of profile
func getTotal(prof *profile.Profile, idx int) int64 {
var total, diffTotal int64
for _, sample := range prof.Sample {
var v int64
v = sample.Value[idx]
if v < 0 {
v = -v
}
total += v
if sample.DiffBaseSample() {
diffTotal += v
}
}
if diffTotal > 0 {
total = diffTotal
}
return total
}
// formatTotal parses total value into a human readable way
func (m *Metrics) formatTotal(b int64, typeMap map[string]float64, defaultMetric string) string {
value := float64(b)
var formatter float64
var mesure string
if m.ci {
metric := value / typeMap[defaultMetric]
if math.IsNaN(metric) {
metric = 0
}
return fmt.Sprintf("%.f%s", metric, defaultMetric)
}
for k, u := range typeMap {
if u >= formatter && (value/u) >= 1.0 {
formatter = u
mesure = k
}
}
metric := value / formatter
if math.IsNaN(metric) {
metric = 0
}
return fmt.Sprintf("%.2f%s", metric, mesure)
}
package sentry
import (
"encoding/json"
"github.com/Checkmarx/kics/v2/internal/console/flags"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/getsentry/sentry-go"
"github.com/rs/zerolog/log"
)
// Report is the struct containing necessary information to send to sentry
type Report struct {
Location string `json:"location"`
Flags map[string]interface{} `json:"flags"`
FileName string `json:"file_name"`
Query string `json:"query_name"`
Platform string `json:"platform"`
Kind model.FileKind `json:"kind"`
Metadata map[string]interface{} `json:"query_metadata"`
Message string `json:"message"`
Err error `json:"error"`
AdditionalValues map[string]interface{} `json:"additional_values"`
}
// ReportSentry creates a new issue with the necessary information to sentry
// and logs the issue
func ReportSentry(report *Report, shouldLog bool) {
sentry.WithScope(func(scope *sentry.Scope) {
report.Flags = flags.GetAllFlags()
value := make(map[string]interface{})
value["report"] = report
scope.SetContext("Issue", value)
sentry.CaptureException(report.Err)
})
if shouldLog {
log.Err(report.Err).Msgf("%s", report.Message)
log.Debug().Msgf("Error Report: \n%+v\n", report.string())
}
}
func (r *Report) string() string {
stringifyed, err := json.MarshalIndent(&r, "", " ")
if err != nil {
log.Err(err).Msgf("Failed to marshall sentry report")
}
return string(stringifyed)
}
package storage
import (
"context"
"fmt"
"sync"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/rs/zerolog/log"
)
var (
memoryMu sync.Mutex
)
// MemoryStorage is scans' results representation
type MemoryStorage struct {
vulnerabilities []model.Vulnerability
allFiles model.FileMetadatas
}
// SaveFile adds a new file metadata to files collection
func (m *MemoryStorage) SaveFile(_ context.Context, metadata *model.FileMetadata) error {
m.allFiles = append(m.allFiles, *metadata)
return nil
}
// GetFiles returns a collection of files saved on MemoryStorage
func (m *MemoryStorage) GetFiles(_ context.Context, _ string) (model.FileMetadatas, error) {
return m.allFiles, nil
}
// SaveVulnerabilities adds a list of vulnerabilities to vulnerabilities collection
func (m *MemoryStorage) SaveVulnerabilities(_ context.Context, vulnerabilities []model.Vulnerability) error {
defer memoryMu.Unlock()
memoryMu.Lock()
m.vulnerabilities = append(m.vulnerabilities, vulnerabilities...)
return nil
}
// GetVulnerabilities returns a collection of vulnerabilities saved on MemoryStorage
func (m *MemoryStorage) GetVulnerabilities(_ context.Context, _ string) ([]model.Vulnerability, error) {
return m.getUniqueVulnerabilities(), nil
}
func (m *MemoryStorage) getUniqueVulnerabilities() []model.Vulnerability {
vulnDictionary := make(map[string]model.Vulnerability)
for i := range m.vulnerabilities {
key := fmt.Sprintf("%s:%s:%d:%s:%s:%s",
m.vulnerabilities[i].QueryID,
m.vulnerabilities[i].FileName,
m.vulnerabilities[i].Line,
m.vulnerabilities[i].SimilarityID,
m.vulnerabilities[i].SearchKey,
m.vulnerabilities[i].KeyActualValue,
)
vulnDictionary[key] = m.vulnerabilities[i]
}
var uniqueVulnerabilities []model.Vulnerability
for key := range vulnDictionary {
uniqueVulnerabilities = append(uniqueVulnerabilities, vulnDictionary[key])
}
if len(uniqueVulnerabilities) == 0 {
return m.vulnerabilities
}
return uniqueVulnerabilities
}
// GetScanSummary is not supported by MemoryStorage
func (m *MemoryStorage) GetScanSummary(_ context.Context, _ []string) ([]model.SeveritySummary, error) {
return nil, nil
}
// NewMemoryStorage creates a new MemoryStorage empty and returns it
func NewMemoryStorage() *MemoryStorage {
log.Debug().Msg("storage.NewMemoryStorage()")
return &MemoryStorage{
allFiles: make(model.FileMetadatas, 0),
vulnerabilities: make([]model.Vulnerability, 0),
}
}
package tracker
import (
"fmt"
"sync"
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/Checkmarx/kics/v2/pkg/model"
)
// CITracker contains information of how many queries were loaded and executed
// and how many files were found and executed
var (
trackerMu sync.Mutex
)
type CITracker struct {
ExecutingQueries int
ExecutedQueries int
FoundFiles int
FailedSimilarityID int
FailedOldSimilarityID int
LoadedQueries int
ParsedFiles int
ScanSecrets int
ScanPaths int
lines int
FoundCountLines int
ParsedCountLines int
IgnoreCountLines int
Version model.Version
BagOfFilesParse map[string]int
BagOfFilesFound map[string]int
syncFileMutex sync.Mutex
}
// NewTracker will create a new instance of a tracker with the number of lines to display in results output
// number of lines can not be smaller than 1
func NewTracker(previewLines int) (*CITracker, error) {
if previewLines < constants.MinimumPreviewLines || previewLines > constants.MaximumPreviewLines {
return &CITracker{},
fmt.Errorf("output lines minimum is %v and maximum is %v", constants.MinimumPreviewLines, constants.MaximumPreviewLines)
}
return &CITracker{
lines: previewLines,
BagOfFilesParse: make(map[string]int),
BagOfFilesFound: make(map[string]int),
}, nil
}
// GetOutputLines returns the number of lines to display in results output
func (c *CITracker) GetOutputLines() int {
return c.lines
}
// TrackQueryLoad adds a loaded query
func (c *CITracker) TrackQueryLoad(queryAggregation int) {
c.LoadedQueries += queryAggregation
}
// TrackQueryExecuting adds a executing queries
func (c *CITracker) TrackQueryExecuting(queryAggregation int) {
c.ExecutingQueries += queryAggregation
}
// TrackQueryExecution adds a query executed
func (c *CITracker) TrackQueryExecution(queryAggregation int) {
trackerMu.Lock()
defer trackerMu.Unlock()
c.ExecutedQueries += queryAggregation
}
// TrackFileFound adds a found file to be scanned
func (c *CITracker) TrackFileFound(path string) {
c.syncFileMutex.Lock()
defer c.syncFileMutex.Unlock()
count, value := c.BagOfFilesFound[path]
if !value {
c.BagOfFilesFound[path] = 1
c.FoundFiles++
} else {
c.BagOfFilesFound[path] = count + 1
}
}
// TrackFileParse adds a successful parsed file to be scanned
func (c *CITracker) TrackFileParse(path string) {
c.syncFileMutex.Lock()
defer c.syncFileMutex.Unlock()
count, value := c.BagOfFilesParse[path]
if !value {
c.BagOfFilesParse[path] = 1
c.ParsedFiles++
} else {
c.BagOfFilesParse[path] = count + 1
}
}
// FailedDetectLine - queries that fail to detect line are counted as failed to execute queries
func (c *CITracker) FailedDetectLine() {
c.ExecutedQueries--
}
// FailedComputeSimilarityID - queries that failed to compute similarity ID
func (c *CITracker) FailedComputeSimilarityID() {
c.FailedSimilarityID++
}
// FailedComputeOldSimilarityID - queries that failed to compute old similarity ID
func (c *CITracker) FailedComputeOldSimilarityID() {
c.FailedOldSimilarityID++
}
// TrackScanSecret - add to secrets scanned
func (c *CITracker) TrackScanSecret() {
c.ScanSecrets++
}
// TrackScanPath - paths to preform scan
func (c *CITracker) TrackScanPath() {
c.ScanPaths++
}
// TrackVersion - information if current version is latest
func (c *CITracker) TrackVersion(retrievedVersion model.Version) {
c.Version = retrievedVersion
}
// TrackFileFoundCountLines - information about the lines of the scanned files
func (c *CITracker) TrackFileFoundCountLines(countLines int) {
c.FoundCountLines += countLines
}
// TrackFileParseCountLines - information about the lines of the parsed files
func (c *CITracker) TrackFileParseCountLines(countLines int) {
c.ParsedCountLines += countLines
}
// TrackFileIgnoreCountLines - information about the lines ignored of the parsed files
func (c *CITracker) TrackFileIgnoreCountLines(countLines int) {
c.IgnoreCountLines += countLines
}
package analyzer
import (
"fmt"
"os"
"path/filepath"
"regexp"
"sort"
"strings"
"sync"
"github.com/Checkmarx/kics/v2/internal/metrics"
"github.com/Checkmarx/kics/v2/pkg/engine/provider"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
ignore "github.com/sabhiram/go-gitignore"
yamlParser "gopkg.in/yaml.v3"
)
// move the openApi regex to public to be used on file.go
// openAPIRegex - Regex that finds OpenAPI defining property "openapi" or "swagger"
// openAPIRegexInfo - Regex that finds OpenAPI defining property "info"
// openAPIRegexPath - Regex that finds OpenAPI defining property "paths", "components", or "webhooks" (from 3.1.0)
// cloudRegex - Regex that finds CloudFormation defining property "Resources"
// k8sRegex - Regex that finds Kubernetes defining property "apiVersion"
// k8sRegexKind - Regex that finds Kubernetes defining property "kind"
// k8sRegexMetadata - Regex that finds Kubernetes defining property "metadata"
// k8sRegexSpec - Regex that finds Kubernetes defining property "spec"
var (
OpenAPIRegex = regexp.MustCompile(`("(openapi|swagger)"|(openapi|swagger))\s*:`)
OpenAPIRegexInfo = regexp.MustCompile(`("info"|info)\s*:`)
OpenAPIRegexPath = regexp.MustCompile(`("(paths|components|webhooks)"|(paths|components|webhooks))\s*:`)
armRegexContentVersion = regexp.MustCompile(`"contentVersion"\s*:`)
armRegexResources = regexp.MustCompile(`"resources"\s*:`)
cloudRegex = regexp.MustCompile(`("Resources"|Resources)\s*:`)
k8sRegex = regexp.MustCompile(`("apiVersion"|apiVersion)\s*:`)
k8sRegexKind = regexp.MustCompile(`("kind"|kind)\s*:`)
tfPlanRegexPV = regexp.MustCompile(`"planned_values"\s*:`)
tfPlanRegexRC = regexp.MustCompile(`"resource_changes"\s*:`)
tfPlanRegexConf = regexp.MustCompile(`"configuration"\s*:`)
tfPlanRegexTV = regexp.MustCompile(`"terraform_version"\s*:`)
cdkTfRegexMetadata = regexp.MustCompile(`"metadata"\s*:`)
cdkTfRegexStackName = regexp.MustCompile(`"stackName"\s*:`)
cdkTfRegexTerraform = regexp.MustCompile(`"terraform"\s*:`)
artifactsRegexKind = regexp.MustCompile(`("kind"|kind)\s*:`)
artifactsRegexProperties = regexp.MustCompile(`("properties"|properties)\s*:`)
artifactsRegexParametes = regexp.MustCompile(`("parameters"|parameters)\s*:`)
policyAssignmentArtifactRegexPolicyDefinitionID = regexp.MustCompile(`("policyDefinitionId"|policyDefinitionId)\s*:`)
roleAssignmentArtifactRegexPrincipalIds = regexp.MustCompile(`("principalIds"|principalIds)\s*:`)
roleAssignmentArtifactRegexRoleDefinitionID = regexp.MustCompile(`("roleDefinitionId"|roleDefinitionId)\s*:`)
templateArtifactRegexParametes = regexp.MustCompile(`("template"|template)\s*:`)
blueprintpRegexTargetScope = regexp.MustCompile(`("targetScope"|targetScope)\s*:`)
blueprintpRegexProperties = regexp.MustCompile(`("properties"|properties)\s*:`)
buildahRegex = regexp.MustCompile(`buildah\s*from\s*\w+`)
dockerComposeServicesRegex = regexp.MustCompile(`services\s*:[\w\W]+(image|build)\s*:`)
crossPlaneRegex = regexp.MustCompile(`"?apiVersion"?\s*:\s*(\w+\.)+crossplane\.io/v\w+\s*`)
knativeRegex = regexp.MustCompile(`"?apiVersion"?\s*:\s*(\w+\.)+knative\.dev/v\w+\s*`)
pulumiNameRegex = regexp.MustCompile(`name\s*:`)
pulumiRuntimeRegex = regexp.MustCompile(`runtime\s*:`)
pulumiResourcesRegex = regexp.MustCompile(`resources\s*:`)
serverlessServiceRegex = regexp.MustCompile(`service\s*:`)
serverlessProviderRegex = regexp.MustCompile(`(^|\n)provider\s*:`)
cicdOnRegex = regexp.MustCompile(`\s*on:\s*`)
cicdJobsRegex = regexp.MustCompile(`\s*jobs:\s*`)
cicdStepsRegex = regexp.MustCompile(`\s*steps:\s*`)
queryRegexPathsAnsible = regexp.MustCompile(fmt.Sprintf(`^.*?%s(?:group|host)_vars%s.*$`, regexp.QuoteMeta(string(os.PathSeparator)), regexp.QuoteMeta(string(os.PathSeparator)))) //nolint:lll
)
var (
listKeywordsGoogleDeployment = []string{"resources"}
armRegexTypes = []string{"blueprint", "templateArtifact", "roleAssignmentArtifact", "policyAssignmentArtifact"}
possibleFileTypes = map[string]bool{
".yml": true,
".yaml": true,
".json": true,
".dockerfile": true,
"Dockerfile": true,
"possibleDockerfile": true,
".debian": true,
".ubi8": true,
".tf": true,
"tfvars": true,
".proto": true,
".sh": true,
".cfg": true,
".conf": true,
".ini": true,
".bicep": true,
}
supportedRegexes = map[string][]string{
"azureresourcemanager": append(armRegexTypes, arm),
"buildah": {"buildah"},
"cicd": {"cicd"},
"cloudformation": {"cloudformation"},
"crossplane": {"crossplane"},
"dockercompose": {"dockercompose"},
"knative": {"knative"},
"kubernetes": {"kubernetes"},
"openapi": {"openapi"},
"terraform": {"terraform", "cdkTf"},
"pulumi": {"pulumi"},
"serverlessfw": {"serverlessfw"},
}
listKeywordsAnsible = []string{"name", "gather_facts",
"hosts", "tasks", "become", "with_items", "with_dict",
"when", "become_pass", "become_exe", "become_flags"}
playBooks = "playbooks"
ansibleHost = []string{"all", "ungrouped"}
listKeywordsAnsibleHots = []string{"hosts", "children"}
)
const (
yml = ".yml"
yaml = ".yaml"
json = ".json"
sh = ".sh"
arm = "azureresourcemanager"
bicep = "bicep"
kubernetes = "kubernetes"
terraform = "terraform"
gdm = "googledeploymentmanager"
ansible = "ansible"
grpc = "grpc"
dockerfile = "dockerfile"
crossplane = "crossplane"
knative = "knative"
sizeMb = 1048576
)
type Parameters struct {
Results string
Path []string
MaxFileSize int
}
// regexSlice is a struct to contain a slice of regex
type regexSlice struct {
regex []*regexp.Regexp
}
type analyzerInfo struct {
typesFlag []string
excludeTypesFlag []string
filePath string
}
// Analyzer keeps all the relevant info for the function Analyze
type Analyzer struct {
Paths []string
Types []string
ExcludeTypes []string
Exc []string
GitIgnoreFileName string
ExcludeGitIgnore bool
MaxFileSize int
}
// types is a map that contains the regex by type
var types = map[string]regexSlice{
"openapi": {
regex: []*regexp.Regexp{
OpenAPIRegex,
OpenAPIRegexInfo,
OpenAPIRegexPath,
},
},
"kubernetes": {
regex: []*regexp.Regexp{
k8sRegex,
k8sRegexKind,
},
},
"crossplane": {
regex: []*regexp.Regexp{
crossPlaneRegex,
k8sRegexKind,
},
},
"knative": {
regex: []*regexp.Regexp{
knativeRegex,
k8sRegexKind,
},
},
"cloudformation": {
regex: []*regexp.Regexp{
cloudRegex,
},
},
"azureresourcemanager": {
[]*regexp.Regexp{
armRegexContentVersion,
armRegexResources,
},
},
"terraform": {
[]*regexp.Regexp{
tfPlanRegexConf,
tfPlanRegexPV,
tfPlanRegexRC,
tfPlanRegexTV,
},
},
"cdkTf": {
[]*regexp.Regexp{
cdkTfRegexMetadata,
cdkTfRegexStackName,
cdkTfRegexTerraform,
},
},
"policyAssignmentArtifact": {
[]*regexp.Regexp{
artifactsRegexKind,
artifactsRegexProperties,
artifactsRegexParametes,
policyAssignmentArtifactRegexPolicyDefinitionID,
},
},
"roleAssignmentArtifact": {
[]*regexp.Regexp{
artifactsRegexKind,
artifactsRegexProperties,
roleAssignmentArtifactRegexPrincipalIds,
roleAssignmentArtifactRegexRoleDefinitionID,
},
},
"templateArtifact": {
[]*regexp.Regexp{
artifactsRegexKind,
artifactsRegexProperties,
artifactsRegexParametes,
templateArtifactRegexParametes,
},
},
"blueprint": {
[]*regexp.Regexp{
blueprintpRegexTargetScope,
blueprintpRegexProperties,
},
},
"buildah": {
[]*regexp.Regexp{
buildahRegex,
},
},
"dockercompose": {
[]*regexp.Regexp{
dockerComposeServicesRegex,
},
},
"pulumi": {
[]*regexp.Regexp{
pulumiNameRegex,
pulumiRuntimeRegex,
pulumiResourcesRegex,
},
},
"serverlessfw": {
[]*regexp.Regexp{
serverlessServiceRegex,
serverlessProviderRegex,
},
},
"cicd": {
[]*regexp.Regexp{
cicdOnRegex,
cicdJobsRegex,
cicdStepsRegex,
},
},
}
var defaultConfigFiles = []string{"pnpm-lock.yaml"}
// Analyze will go through the slice paths given and determine what type of queries should be loaded
// should be loaded based on the extension of the file and the content
func Analyze(a *Analyzer) (model.AnalyzedPaths, error) {
// start metrics for file analyzer
metrics.Metric.Start("file_type_analyzer")
returnAnalyzedPaths := model.AnalyzedPaths{
Types: make([]string, 0),
Exc: make([]string, 0),
ExpectedLOC: 0,
}
var files []string
var wg sync.WaitGroup
// results is the channel shared by the workers that contains the types found
results := make(chan string)
locCount := make(chan int)
ignoreFiles := make([]string, 0)
projectConfigFiles := make([]string, 0)
done := make(chan bool)
hasGitIgnoreFile, gitIgnore := shouldConsiderGitIgnoreFile(a.Paths[0], a.GitIgnoreFileName, a.ExcludeGitIgnore)
// get all the files inside the given paths
for _, path := range a.Paths {
if _, err := os.Stat(path); err != nil {
return returnAnalyzedPaths, errors.Wrap(err, "failed to analyze path")
}
if err := filepath.Walk(path, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
ext, errExt := utils.GetExtension(path)
if errExt == nil {
trimmedPath := strings.ReplaceAll(path, a.Paths[0], filepath.Base(a.Paths[0]))
ignoreFiles = a.checkIgnore(info.Size(), hasGitIgnoreFile, gitIgnore, path, trimmedPath, ignoreFiles)
if isConfigFile(path, defaultConfigFiles) {
projectConfigFiles = append(projectConfigFiles, path)
a.Exc = append(a.Exc, path)
}
if _, ok := possibleFileTypes[ext]; ok && !isExcludedFile(path, a.Exc) {
files = append(files, path)
}
}
return nil
}); err != nil {
log.Error().Msgf("failed to analyze path %s: %s", path, err)
}
}
// unwanted is the channel shared by the workers that contains the unwanted files that the parser will ignore
unwanted := make(chan string, len(files))
a.Types, a.ExcludeTypes = typeLower(a.Types, a.ExcludeTypes)
// Start the workers
for _, file := range files {
wg.Add(1)
// analyze the files concurrently
a := &analyzerInfo{
typesFlag: a.Types,
excludeTypesFlag: a.ExcludeTypes,
filePath: file,
}
go a.worker(results, unwanted, locCount, &wg)
}
go func() {
// close channel results when the worker has finished writing into it
defer func() {
close(unwanted)
close(results)
close(locCount)
}()
wg.Wait()
done <- true
}()
availableTypes, unwantedPaths, loc := computeValues(results, unwanted, locCount, done)
multiPlatformTypeCheck(&availableTypes)
unwantedPaths = append(unwantedPaths, ignoreFiles...)
unwantedPaths = append(unwantedPaths, projectConfigFiles...)
returnAnalyzedPaths.Types = availableTypes
returnAnalyzedPaths.Exc = unwantedPaths
returnAnalyzedPaths.ExpectedLOC = loc
// stop metrics for file analyzer
metrics.Metric.Stop()
return returnAnalyzedPaths, nil
}
// worker determines the type of the file by ext (dockerfile and terraform)/content and
// writes the answer to the results channel
// if no types were found, the worker will write the path of the file in the unwanted channel
func (a *analyzerInfo) worker(results, unwanted chan<- string, locCount chan<- int, wg *sync.WaitGroup) { //nolint: gocyclo
defer wg.Done()
ext, errExt := utils.GetExtension(a.filePath)
if errExt == nil {
linesCount, _ := utils.LineCounter(a.filePath)
switch ext {
// Dockerfile (direct identification)
case ".dockerfile", "Dockerfile":
if a.isAvailableType(dockerfile) {
results <- dockerfile
locCount <- linesCount
}
// Dockerfile (indirect identification)
case "possibleDockerfile", ".ubi8", ".debian":
if a.isAvailableType(dockerfile) && isDockerfile(a.filePath) {
results <- dockerfile
locCount <- linesCount
} else {
unwanted <- a.filePath
}
// Terraform
case ".tf", "tfvars":
if a.isAvailableType(terraform) {
results <- terraform
locCount <- linesCount
}
// Bicep
case ".bicep":
if a.isAvailableType(bicep) {
results <- bicep
locCount <- linesCount
}
// GRPC
case ".proto":
if a.isAvailableType(grpc) {
results <- grpc
locCount <- linesCount
}
// It could be Ansible Config or Ansible Inventory
case ".cfg", ".conf", ".ini":
if a.isAvailableType(ansible) {
results <- ansible
locCount <- linesCount
}
/* It could be Ansible, Buildah, CICD, CloudFormation, Crossplane, OpenAPI, Azure Resource Manager
Docker Compose, Knative, Kubernetes, Pulumi, ServerlessFW or Google Deployment Manager*/
case yaml, yml, json, sh:
a.checkContent(results, unwanted, locCount, linesCount, ext)
}
}
}
func isDockerfile(path string) bool {
content, err := os.ReadFile(filepath.Clean(path))
if err != nil {
log.Error().Msgf("failed to analyze file: %s", err)
return false
}
regexes := []*regexp.Regexp{
regexp.MustCompile(`\s*FROM\s*`),
regexp.MustCompile(`\s*RUN\s*`),
}
check := true
for _, regex := range regexes {
if !regex.Match(content) {
check = false
break
}
}
return check
}
// overrides k8s match when all regexs passes for azureresourcemanager key and extension is set to json
func needsOverride(check bool, returnType, key, ext string) bool {
if check && returnType == kubernetes && key == arm && ext == json {
return true
} else if check && returnType == kubernetes && (key == knative || key == crossplane) && (ext == yaml || ext == yml) {
return true
}
return false
}
// checkContent will determine the file type by content when worker was unable to
// determine by ext, if no type was determined checkContent adds it to unwanted channel
func (a *analyzerInfo) checkContent(results, unwanted chan<- string, locCount chan<- int, linesCount int, ext string) {
typesFlag := a.typesFlag
excludeTypesFlag := a.excludeTypesFlag
// get file content
content, err := os.ReadFile(a.filePath)
if err != nil {
log.Error().Msgf("failed to analyze file: %s", err)
return
}
returnType := ""
// Sort map so that CloudFormation (type that as less requireds) goes last
keys := make([]string, 0, len(types))
for k := range types {
keys = append(keys, k)
}
if typesFlag[0] != "" {
keys = getKeysFromTypesFlag(typesFlag)
} else if excludeTypesFlag[0] != "" {
keys = getKeysFromExcludeTypesFlag(excludeTypesFlag)
}
sort.Sort(sort.Reverse(sort.StringSlice(keys)))
for _, key := range keys {
check := true
for _, typeRegex := range types[key].regex {
if !typeRegex.Match(content) {
check = false
break
}
}
// If all regexs passed and there wasn't a type already assigned
if check && returnType == "" {
returnType = key
} else if needsOverride(check, returnType, key, ext) {
returnType = key
}
}
returnType = checkReturnType(a.filePath, returnType, ext, content)
if returnType != "" {
if a.isAvailableType(returnType) {
results <- returnType
locCount <- linesCount
return
}
}
// No type was determined (ignore on parser)
unwanted <- a.filePath
}
func checkReturnType(path, returnType, ext string, content []byte) string {
if returnType != "" {
if returnType == "cdkTf" {
return terraform
}
if utils.Contains(returnType, armRegexTypes) {
return arm
}
} else if ext == yaml || ext == yml {
if checkHelm(path) {
return kubernetes
}
platform := checkYamlPlatform(content, path)
if platform != "" {
return platform
}
}
return returnType
}
func checkHelm(path string) bool {
_, err := os.Stat(filepath.Join(filepath.Dir(path), "Chart.yaml"))
if errors.Is(err, os.ErrNotExist) {
return false
} else if err != nil {
log.Error().Msgf("failed to check helm: %s", err)
}
return true
}
func checkYamlPlatform(content []byte, path string) string {
content = utils.DecryptAnsibleVault(content, os.Getenv("ANSIBLE_VAULT_PASSWORD_FILE"))
var yamlContent model.Document
if err := yamlParser.Unmarshal(content, &yamlContent); err != nil {
log.Warn().Msgf("failed to parse yaml file (%s): %s", path, err)
}
// check if it is google deployment manager platform
for _, keyword := range listKeywordsGoogleDeployment {
if _, ok := yamlContent[keyword]; ok {
return gdm
}
}
// check if the file contains some keywords related with Ansible
if checkForAnsible(yamlContent) {
return ansible
}
// check if the file contains some keywords related with Ansible Host
if checkForAnsibleHost(yamlContent) {
return ansible
}
// add for yaml files contained at paths (group_vars, host_vars) related with ansible
if checkForAnsibleByPaths(path) {
return ansible
}
return ""
}
func checkForAnsibleByPaths(path string) bool {
return queryRegexPathsAnsible.MatchString(path)
}
func checkForAnsible(yamlContent model.Document) bool {
isAnsible := false
if play := yamlContent[playBooks]; play != nil {
if listOfPlayBooks, ok := play.([]interface{}); ok {
for _, value := range listOfPlayBooks {
castingValue, ok := value.(map[string]interface{})
if ok {
for _, keyword := range listKeywordsAnsible {
if _, ok := castingValue[keyword]; ok {
isAnsible = true
}
}
}
}
}
}
return isAnsible
}
func checkForAnsibleHost(yamlContent model.Document) bool {
isAnsible := false
for _, ansibleDefault := range ansibleHost {
if hosts := yamlContent[ansibleDefault]; hosts != nil {
if listHosts, ok := hosts.(map[string]interface{}); ok {
for _, value := range listKeywordsAnsibleHots {
if host := listHosts[value]; host != nil {
isAnsible = true
}
}
}
}
}
return isAnsible
}
// computeValues computes expected Lines of Code to be scanned from locCount channel
// and creates the types and unwanted slices from the channels removing any duplicates
func computeValues(types, unwanted chan string, locCount chan int, done chan bool) (typesS, unwantedS []string, locTotal int) {
var val int
unwantedSlice := make([]string, 0)
typeSlice := make([]string, 0)
for {
select {
case i := <-locCount:
val += i
case i := <-unwanted:
if !utils.Contains(i, unwantedSlice) {
unwantedSlice = append(unwantedSlice, i)
}
case i := <-types:
if !utils.Contains(i, typeSlice) {
typeSlice = append(typeSlice, i)
}
case <-done:
return typeSlice, unwantedSlice, val
}
}
}
// getKeysFromTypesFlag gets all the regexes keys related to the types flag
func getKeysFromTypesFlag(typesFlag []string) []string {
ks := make([]string, 0, len(types))
for i := range typesFlag {
t := typesFlag[i]
if regexes, ok := supportedRegexes[t]; ok {
ks = append(ks, regexes...)
}
}
return ks
}
// getKeysFromExcludeTypesFlag gets all the regexes keys related to the excluding unwanted types from flag
func getKeysFromExcludeTypesFlag(excludeTypesFlag []string) []string {
ks := make([]string, 0, len(types))
for k := range supportedRegexes {
if !utils.Contains(k, excludeTypesFlag) {
if regexes, ok := supportedRegexes[k]; ok {
ks = append(ks, regexes...)
}
}
}
return ks
}
// isExcludedFile verifies if the path is pointed in the --exclude-paths flag
func isExcludedFile(path string, exc []string) bool {
for i := range exc {
exclude, err := provider.GetExcludePaths(exc[i])
if err != nil {
log.Err(err).Msg("failed to get exclude paths")
}
for j := range exclude {
if exclude[j] == path {
log.Info().Msgf("Excluded file %s from analyzer", path)
return true
}
}
}
return false
}
func isDeadSymlink(path string) bool {
fileInfo, _ := os.Stat(path)
return fileInfo == nil
}
func isConfigFile(path string, exc []string) bool {
for i := range exc {
exclude, err := provider.GetExcludePaths(exc[i])
if err != nil {
log.Err(err).Msg("failed to get exclude paths")
}
for j := range exclude {
fileInfo, _ := os.Stat(path)
if fileInfo != nil && fileInfo.IsDir() {
continue
}
if len(path)-len(exclude[j]) > 0 && path[len(path)-len(exclude[j]):] == exclude[j] && exclude[j] != "" {
log.Info().Msgf("Excluded file %s from analyzer", path)
return true
}
}
}
return false
}
// shouldConsiderGitIgnoreFile verifies if the scan should exclude the files according to the .gitignore file
func shouldConsiderGitIgnoreFile(path, gitIgnore string, excludeGitIgnoreFile bool) (hasGitIgnoreFileRes bool,
gitIgnoreRes *ignore.GitIgnore) {
gitIgnorePath := filepath.ToSlash(filepath.Join(path, gitIgnore))
_, err := os.Stat(gitIgnorePath)
if !excludeGitIgnoreFile && err == nil && gitIgnore != "" {
gitIgnore, _ := ignore.CompileIgnoreFile(gitIgnorePath)
if gitIgnore != nil {
log.Info().Msgf(".gitignore file was found in '%s' and it will be used to automatically exclude paths", path)
return true, gitIgnore
}
}
return false, nil
}
func multiPlatformTypeCheck(typesSelected *[]string) {
if utils.Contains("serverlessfw", *typesSelected) && !utils.Contains("cloudformation", *typesSelected) {
*typesSelected = append(*typesSelected, "cloudformation")
}
if utils.Contains("knative", *typesSelected) && !utils.Contains("kubernetes", *typesSelected) {
*typesSelected = append(*typesSelected, "kubernetes")
}
}
func (a *analyzerInfo) isAvailableType(typeName string) bool {
// no flag is set
if len(a.typesFlag) == 1 && a.typesFlag[0] == "" && len(a.excludeTypesFlag) == 1 && a.excludeTypesFlag[0] == "" {
return true
} else if len(a.typesFlag) > 1 || a.typesFlag[0] != "" {
// type flag is set
return utils.Contains(typeName, a.typesFlag)
} else if len(a.excludeTypesFlag) > 1 || a.excludeTypesFlag[0] != "" {
// exclude type flag is set
return !utils.Contains(typeName, a.excludeTypesFlag)
}
// no valid behavior detected
return false
}
func (a *Analyzer) checkIgnore(fileSize int64, hasGitIgnoreFile bool,
gitIgnore *ignore.GitIgnore,
fullPath string, trimmedPath string, ignoreFiles []string) []string {
exceededFileSize := a.MaxFileSize >= 0 && float64(fileSize)/float64(sizeMb) > float64(a.MaxFileSize)
if (hasGitIgnoreFile && gitIgnore.MatchesPath(trimmedPath)) || isDeadSymlink(fullPath) || exceededFileSize {
ignoreFiles = append(ignoreFiles, fullPath)
a.Exc = append(a.Exc, fullPath)
if exceededFileSize {
log.Error().Msgf("file %s exceeds maximum file size of %d Mb", fullPath, a.MaxFileSize)
}
}
return ignoreFiles
}
func typeLower(types, exclTypes []string) (typesRes, exclTypesRes []string) {
for i := range types {
types[i] = strings.ToLower(types[i])
}
for i := range exclTypes {
exclTypes[i] = strings.ToLower(exclTypes[i])
}
return types, exclTypes
}
package engine
import (
"fmt"
"strings"
build "github.com/Checkmarx/kics/v2/pkg/builder/model"
commentParser "github.com/Checkmarx/kics/v2/pkg/builder/parser/comment"
tagParser "github.com/Checkmarx/kics/v2/pkg/builder/parser/tag"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
"github.com/rs/zerolog/log"
"github.com/zclconf/go-cty/cty"
ctyConvert "github.com/zclconf/go-cty/cty/convert"
)
const resourceLabelsCount = 2
// Engine contains the conditions of rules and comments positions
type Engine struct {
commentParser *commentParser.Parser
conditions []build.Condition
}
// Run parses files and execute engine.Run
func Run(src []byte, filename string) ([]build.Rule, error) {
cp, err := commentParser.NewParser(src, filename)
if err != nil {
return nil, err
}
file, diags := hclsyntax.ParseConfig(src, filename, hcl.Pos{Byte: 0, Line: 1, Column: 1})
if diags != nil && diags.HasErrors() {
return nil, diags.Errs()[0]
}
if file == nil {
return nil, fmt.Errorf("invalid parse result")
}
e := &Engine{
commentParser: cp,
}
return e.Run(file.Body.(*hclsyntax.Body))
}
// Run initializes rules for Engine and returns it
func (e *Engine) Run(body *hclsyntax.Body) ([]build.Rule, error) {
e.conditions = make([]build.Condition, 0)
if err := e.walkBody(body, []build.PathItem{}); err != nil {
return nil, err
}
rules := make([]build.Rule, 0)
conditionGroups := make(map[string][]build.Condition)
for _, condition := range e.conditions {
group, ok := condition.AttrAsString("group")
if !ok {
rules = append(rules, build.Rule{
Conditions: []build.Condition{condition},
})
continue
}
conditionGroups[group] = append(conditionGroups[group], condition)
}
for _, conditionGroup := range conditionGroups {
rules = append(rules, build.Rule{
Conditions: conditionGroup,
})
}
return rules, nil
}
func (e *Engine) walkBody(body *hclsyntax.Body, walkHistory []build.PathItem) error {
for _, attribute := range body.Attributes {
if err := e.walkAttribute(attribute, walkHistory); err != nil {
return err
}
}
for _, block := range body.Blocks {
if err := e.walkBlock(block, walkHistory); err != nil {
return err
}
}
return nil
}
func (e *Engine) walkBlock(block *hclsyntax.Block, walkHistory []build.PathItem) error {
if len(block.Labels) == resourceLabelsCount {
walkHistory = append(walkHistory,
build.PathItem{Type: build.PathTypeResource, Name: block.Type},
build.PathItem{Type: build.PathTypeResourceType, Name: block.Labels[0]},
build.PathItem{Type: build.PathTypeResourceName, Name: block.Type},
)
} else {
walkHistory = append(walkHistory, build.PathItem{Type: build.PathTypeDefault, Name: block.Type})
}
e.checkComment(block.Range(), walkHistory, nil)
return e.walkBody(block.Body, walkHistory)
}
func (e *Engine) walkAttribute(attr *hclsyntax.Attribute, walkHistory []build.PathItem) error {
walkHistory = append(walkHistory, build.PathItem{Type: build.PathTypeDefault, Name: attr.Name})
switch exp := attr.Expr.(type) {
case *hclsyntax.TemplateExpr,
*hclsyntax.TemplateWrapExpr,
*hclsyntax.LiteralValueExpr,
*hclsyntax.ScopeTraversalExpr:
v, err := e.ExpToString(attr.Expr)
if err != nil {
return err
}
e.checkComment(attr.Range(), walkHistory, &v)
case *hclsyntax.ObjectConsExpr:
e.checkComment(attr.Range(), walkHistory, nil)
for _, item := range exp.Items {
if err := e.walkConstantItem(item, walkHistory); err != nil {
return err
}
}
default:
e.checkComment(attr.Range(), walkHistory, nil)
}
return nil
}
// ExpToString converts an expression into a string
func (e *Engine) ExpToString(expr hclsyntax.Expression) (string, error) {
switch t := expr.(type) {
case *hclsyntax.LiteralValueExpr:
s, err := ctyConvert.Convert(t.Val, cty.String)
if err != nil {
return "", err
}
return s.AsString(), nil
case *hclsyntax.TemplateExpr:
if t.IsStringLiteral() {
v, err := t.Value(nil)
if err != nil {
return "", err
}
return v.AsString(), nil
}
builderString, err := e.buildString(t.Parts)
if err != nil {
return "", err
}
return builderString, nil
case *hclsyntax.TemplateWrapExpr:
return e.ExpToString(t.Wrapped)
case *hclsyntax.ObjectConsKeyExpr:
return e.ExpToString(t.Wrapped)
case *hclsyntax.ScopeTraversalExpr:
items := evaluateScopeTraversalExpr(t.Traversal)
return strings.Join(items, "."), nil
}
return "", fmt.Errorf("can't convert expression %T to string", expr)
}
func (e *Engine) buildString(parts []hclsyntax.Expression) (string, error) {
builder := &strings.Builder{}
for _, part := range parts {
s, err := e.ExpToString(part)
if err != nil {
return "", err
}
builder.WriteString(s)
}
s := builder.String()
builder.Reset()
builder = nil
return s, nil
}
func (e *Engine) walkConstantItem(item hclsyntax.ObjectConsItem, walkHistory []build.PathItem) error {
k, err := e.ExpToString(item.KeyExpr)
if err != nil {
return err
}
walkHistory = append(walkHistory, build.PathItem{Type: build.PathTypeDefault, Name: k})
v, err := e.ExpToString(item.ValueExpr)
if err != nil {
return err
}
e.checkComment(item.ValueExpr.Range(), walkHistory, &v)
return nil
}
func (e *Engine) checkComment(rg hcl.Range, walkHistory []build.PathItem, actualValue *string) {
leadComment, endLineComment := e.commentParser.ParseCommentsForNode(rg)
if !leadComment.IsEmpty() {
e.addRule(walkHistory, leadComment, actualValue)
}
if !endLineComment.IsEmpty() {
e.addRule(walkHistory, endLineComment, actualValue)
}
}
func (e *Engine) addRule(walkHistory []build.PathItem, comment commentParser.Comment, actualValue *string) {
tags, err := tagParser.Parse(comment.Value(), model.AllIssueTypesAsString)
if err != nil {
log.Err(err).Msgf("Line %d: failed to parse comment '%s'", comment.Line(), comment.Value())
return
}
if len(tags) == 0 {
return
}
cp := make([]build.PathItem, len(walkHistory))
copy(cp, walkHistory)
for _, t := range tags {
e.conditions = append(e.conditions, build.Condition{
Line: comment.Line(),
IssueType: model.IssueType(t.Name),
Path: cp,
Value: actualValue,
Attributes: t.Attributes,
})
}
}
func evaluateScopeTraversalExpr(t hcl.Traversal) []string {
items := make([]string, 0)
for _, part := range t {
switch tt := part.(type) {
case hcl.TraverseAttr:
items = append(items, tt.Name)
case hcl.TraverseRoot:
items = append(items, tt.Name)
case hcl.TraverseIndex:
switch tt.Key.Type() {
case cty.Number:
items = append(items, tt.Key.AsBigFloat().String())
case cty.String:
items = append(items, tt.Key.AsString())
}
}
}
return items
}
package model
import "github.com/Checkmarx/kics/v2/pkg/model"
// PathItemType represents which type of path that item belongs on json representation
type PathItemType string
// Constants for kinds of PathItemTypes
const (
PathTypeDefault PathItemType = "DEFAULT"
PathTypeResource PathItemType = "RESOURCE"
PathTypeResourceType PathItemType = "RESOURCE_TYPE"
PathTypeResourceName PathItemType = "RESOURCE_NAME"
)
// PathItem represents json's element name and type
type PathItem struct {
Name string
Type PathItemType
}
// Condition represents a condition from a rule that should be checked
type Condition struct {
Line int
IssueType model.IssueType
Path []PathItem
Value interface{}
Attributes map[string]interface{}
}
// Rule represents a list of conditions to validate a rule
type Rule struct {
Conditions []Condition
}
// Attr add some configurations to the condition to return the condition to be matched
func (c Condition) Attr(name string) (interface{}, bool) {
v, ok := c.Attributes[name]
if !ok {
return nil, false
}
return v, true
}
// AttrAsString gets Attr and converts to string
func (c Condition) AttrAsString(name string) (string, bool) {
v, ok := c.Attributes[name]
if !ok {
return "", false
}
if vv, ok := v.(string); ok {
return vv, true
}
return "", false
}
package comment
import (
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
)
// Parser represents a list of code tokens
type Parser struct {
tokens hclsyntax.Tokens
}
// NewParser parses the content of a file and return a parser with its tokens
func NewParser(src []byte, filename string) (*Parser, error) {
tokens, diags := hclsyntax.LexConfig(src, filename, hcl.Pos{Line: 0, Column: 0})
if diags != nil && diags.HasErrors() {
return nil, diags.Errs()[0]
}
return &Parser{
tokens: tokens,
}, nil
}
// ParseCommentsForNode returns a comment in the range given in rg, if exists
func (p *Parser) ParseCommentsForNode(rg hcl.Range) (startComment, leadComment Comment) {
start, end := p.rangePosition(rg)
startLeadComment := p.leadCommentStarts(start)
endLineComment := p.lineCommentEnds(end)
return p.createCommentFromRange(startLeadComment, start), p.createCommentFromRange(end, endLineComment)
}
func (p *Parser) rangePosition(rng hcl.Range) (start, end int) {
for i := 0; ; i++ {
if i >= len(p.tokens) {
return len(p.tokens), len(p.tokens)
}
if p.tokens[i].Range.Start.Byte >= rng.Start.Byte {
start = i
break
}
}
for i := start; ; i++ {
if i >= len(p.tokens) {
return start, len(p.tokens)
}
if p.tokens[i].Range.Start.Byte >= rng.End.Byte {
end = i
break
}
}
return start, end
}
func (p *Parser) leadCommentStarts(before int) (i int) {
defer func() {
if i != before && i-1 >= 0 && p.tokens[i-1].Type != hclsyntax.TokenNewline {
i++
}
}()
for i = before - 1; i >= 0; i-- {
if p.tokens[i].Type != hclsyntax.TokenComment {
return i + 1
}
}
return 0
}
func (p *Parser) lineCommentEnds(after int) int {
for i := after; i < len(p.tokens); i++ {
tok := p.tokens[i]
if tok.Type != hclsyntax.TokenComment {
return i
}
if len(tok.Bytes) > 0 && tok.Bytes[len(tok.Bytes)-1] == '\n' {
return i + 1
}
}
return len(p.tokens)
}
// Comment - struct with comment value and its position on file
type Comment struct {
pos hcl.Pos
value string
}
// IsEmpty returns true if comment is empty, otherwise returns false
func (c Comment) IsEmpty() bool {
return c.value == ""
}
// Value returns comment value
func (c Comment) Value() string {
return c.value
}
// Line returns the line comment starts
func (c Comment) Line() int {
return c.pos.Line + 1
}
func (p *Parser) createCommentFromRange(start, end int) Comment {
s := ""
for i := start; i < end; i++ {
s += string(p.tokens[i].Bytes)
}
return Comment{
pos: p.tokens[start].Range.Start,
value: s,
}
}
package tag
import (
"bytes"
"errors"
"fmt"
"strconv"
"strings"
"text/scanner"
)
const (
base = 10
bitSize64 = 64
)
// Tag contains the tag name reference and its attributes
type Tag struct {
Name string
Attributes map[string]interface{}
}
// Parse tag from following structure
// name1:"expected=private,test=false" name2:"attr=1"
func Parse(s string, supportedNames []string) ([]Tag, error) {
s = strings.TrimLeft(strings.TrimLeft(strings.TrimSpace(s), "/"), " ")
var tags []Tag
for _, si := range strings.Split(s, " ") {
cleanSi := strings.TrimSpace(si)
if cleanSi == "" {
continue
}
for _, supportedName := range supportedNames {
if !strings.HasPrefix(cleanSi, supportedName) {
continue
}
tag, err := parseTag(cleanSi, supportedName)
if err != nil {
return nil, err
}
tags = append(tags, tag)
}
}
return tags, nil
}
func parseTag(s, name string) (Tag, error) {
t := Tag{
Name: name,
Attributes: make(map[string]interface{}),
}
attributePart := strings.TrimPrefix(s, name)
attributePart = strings.TrimPrefix(attributePart, ":")
attributePart = strings.TrimPrefix(attributePart, "\"")
attributePart = strings.TrimSuffix(attributePart, "\"")
if attributePart == "" {
return t, nil
}
sc := &scanner.Scanner{}
sc.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings
sc.Init(strings.NewReader(attributePart))
for {
tok := sc.Scan()
switch tok {
case scanner.EOF:
return t, nil
case scanner.Ident:
ident := sc.TokenText()
switch sc.Peek() {
case '=':
sc.Next()
value, err := parseValue(sc)
if err != nil {
return Tag{}, err
}
t.Attributes[ident] = value
case '[':
sc.Next()
arg, err := parseArgs(sc)
if err != nil {
return Tag{}, err
}
t.Attributes[ident] = arg
case ',':
sc.Next()
t.Attributes[ident] = nil
case scanner.EOF:
t.Attributes[ident] = nil
}
case ',':
// NOP
default:
return Tag{}, fmt.Errorf("invalid token: %s", sc.TokenText())
}
}
}
func parseArray(sc *scanner.Scanner) ([]interface{}, error) {
var result []interface{}
for {
value, err := parseValue(sc)
if err != nil {
return result, err
}
result = append(result, value)
next := sc.Next()
if next == ']' {
return result, nil
}
if next == ',' {
continue
}
return result, fmt.Errorf(", expected but got %s", string(next))
}
}
func parseValue(sc *scanner.Scanner) (interface{}, error) {
switch sc.Peek() {
case '\'':
sc.Next()
return parseString(sc)
case '*':
r := sc.Next()
return string(r), nil
case '<', '>':
r := sc.Next()
if sc.Peek() == '=' {
sc.Next()
return string(r) + "=", nil
}
return string(r), nil
case '!':
sc.Next()
if sc.Peek() == '=' {
sc.Next()
return "!=", nil
}
return nil, fmt.Errorf("invalid value: %s", sc.TokenText())
case '[':
sc.Next()
return parseArray(sc)
default:
tok := sc.Scan()
switch tok {
case scanner.Ident:
return checkType(sc.TokenText()), nil
case scanner.String, scanner.Int, scanner.Float:
if tok == scanner.String {
str := sc.TokenText()
return str[1 : len(str)-1], nil
} else if tok == scanner.Int {
return strconv.ParseInt(sc.TokenText(), base, bitSize64)
} else if tok == scanner.Float {
return strconv.ParseFloat(sc.TokenText(), bitSize64)
}
default:
return nil, fmt.Errorf("invalid value: %s", sc.TokenText())
}
}
return nil, errors.New("invalid value")
}
func parseArgs(sc *scanner.Scanner) (map[string]interface{}, error) {
result := map[string]interface{}{}
for {
tok := sc.Scan()
if tok != scanner.Ident {
return result, fmt.Errorf("invalid attribute name: %s", sc.TokenText())
}
name := sc.TokenText()
eq := sc.Next()
if eq != '=' {
return result, fmt.Errorf("= expected but got %s", string(eq))
}
value, err := parseValue(sc)
if err != nil {
return result, err
}
result[name] = value
next := sc.Next()
if next == ']' {
return result, nil
}
if next == ',' {
continue
}
return result, fmt.Errorf(") or , expected but got %s", string(next))
}
}
func parseString(sc *scanner.Scanner) (string, error) {
var buf bytes.Buffer
ch := sc.Next()
for ch != '\'' {
if ch == '\n' || ch == '\r' || ch < 0 {
return "", errors.New("unterminated string")
}
if ch == '\\' {
s, err := parseEscape(sc)
if err != nil {
return "", err
}
buf.WriteString(s)
} else {
buf.WriteRune(ch)
}
ch = sc.Next()
}
return buf.String(), nil
}
func parseEscape(sc *scanner.Scanner) (string, error) {
ch := sc.Next()
switch ch {
case 'a':
return "\a", nil
case 'b':
return "\b", nil
case 'f':
return "\f", nil
case 'n':
return "\n", nil
case 'r':
return "\r", nil
case 't':
return "\t", nil
case 'v':
return "\v", nil
case '\\':
return "\\", nil
case '"':
return "\"", nil
case '\'':
return "'", nil
}
return "", fmt.Errorf("invalid escape sequence: %s", string(ch))
}
func checkType(s string) interface{} {
switch s {
case "true", "TRUE":
return true
case "false", "FALSE":
return false
default:
if i, err := strconv.ParseInt(s, base, bitSize64); err == nil {
return i
}
if f, err := strconv.ParseFloat(s, bitSize64); err == nil {
return f
}
return s
}
}
package writer
import (
"bytes"
"fmt"
"html/template"
"strconv"
"strings"
build "github.com/Checkmarx/kics/v2/pkg/builder/model"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
// RegoWriter represents the template for a Rego rule
type RegoWriter struct {
tmpl *template.Template
}
// Block represents a json block of a file for scan
type Block struct {
Name string
All bool
List []string
}
// RegoRule contains a block to be scanned and a rule to be applied
type RegoRule struct {
Block Block
build.Rule
}
const (
stringValue = "\"%s\""
prec = 6
bitSize32 = 32
bitSize64 = 64
)
// NewRegoWriter initializes a default RegoWriter using builder template
func NewRegoWriter() (*RegoWriter, error) {
tmpl, err := template.New("template.gorego").
Funcs(template.FuncMap{
"condition": condition,
"regoValue": regoValueToString,
"lastCondition": func(r RegoRule) build.Condition {
return r.Conditions[len(r.Conditions)-1]
},
"unescape": func(v string) template.HTML {
return template.HTML(v) //nolint:gosec
},
"innerKey": func(r RegoRule) template.HTML {
condition := r.Conditions[len(r.Conditions)-1]
return template.HTML(conditionKey(r.Block, condition, false, true)) //nolint:gosec
},
"searchKey": func(r RegoRule) template.HTML {
format := "%%s[%%s].%s"
condition := r.Conditions[len(r.Conditions)-1]
var vars []string
if v, ok := condition.Attr("resource"); ok && v == "*" {
vars = append(vars, "blockType")
} else {
vars = append(vars, "blockTypes[blockIndex]")
}
vars = append(vars, "name")
if _, ok := condition.Attr("any_key"); ok {
format += ".%%s"
vars = append(vars, "key")
}
format = fmt.Sprintf(format, conditionKey(r.Block, condition, false, true))
return template.HTML(fmt.Sprintf("sprintf(\"%s\", [%s])", format, strings.Join(vars, ", "))) //nolint
},
}).
ParseFiles("./pkg/builder/writer/template.gorego")
if err != nil {
return nil, err
}
return &RegoWriter{tmpl: tmpl}, nil
}
// Render starts RegoWriter rules list passed as parameter
func (w *RegoWriter) Render(rules []build.Rule) ([]byte, error) {
wr := bytes.NewBuffer(nil)
if err := w.tmpl.Execute(wr, format(rules)); err != nil {
return nil, errors.Wrap(err, "failed to render")
}
return wr.Bytes(), nil
}
func condition(r Block, c build.Condition) string {
key := conditionKey(r, c, true, false)
if c.IssueType == model.IssueTypeRedundantAttribute {
return key
}
if c.IssueType == model.IssueTypeMissingAttribute {
return fmt.Sprintf("not %s", key)
}
if _, ok := c.Attr("upper"); ok {
key = fmt.Sprintf("upper(%s)", key)
}
if _, ok := c.Attr("lower"); ok {
key = fmt.Sprintf("lower(%s)", key)
}
if reg, ok := c.Attr("regex"); ok {
return fmt.Sprintf("re_match(%q, %s)", reg, key)
}
condition := "=="
if v, ok := c.AttrAsString("condition"); ok {
condition = v
}
if value, ok := c.AttrAsString("val"); ok {
return fmt.Sprintf("%s %s %s", key, condition, regoValueToString(value))
}
return fmt.Sprintf("%s %s %s", key, condition, regoValueToString(c.Value))
}
func regoValueToString(i interface{}) string {
switch v := i.(type) {
case bool:
if v {
return "true"
}
return "false"
case int64:
return strconv.Itoa(int(v))
case int32:
return strconv.Itoa(int(v))
case int:
return strconv.Itoa(v)
case float64:
return strconv.FormatFloat(v, 'f', prec, bitSize64)
case float32:
return strconv.FormatFloat(float64(v), 'f', prec, bitSize32)
case string:
return fmt.Sprintf(stringValue, v)
case *string:
if v == nil {
return "\"\""
}
return fmt.Sprintf(stringValue, *v)
case []string:
sts := make([]string, 0, len(v))
for _, vi := range v {
sts = append(sts, fmt.Sprintf(stringValue, vi))
}
return fmt.Sprintf("{%s}", strings.Join(sts, ", "))
default:
log.Warn().Msgf("Can't convert value, %T to string", i)
return ""
}
}
func conditionKey(block Block, c build.Condition, withBlockPrefix, pathOnly bool) string {
key := ""
if withBlockPrefix {
key = "block"
}
for i, pathItem := range c.Path {
switch pathItem.Type {
case build.PathTypeResourceType:
if pathOnly {
continue
} else if block.All {
key += "[blockType]"
} else {
key += "[blockTypes[blockIndex]]"
}
case build.PathTypeResourceName:
if !pathOnly {
key += "[name]"
}
case build.PathTypeDefault:
key = buildDefaultType(c, i, pathOnly, pathItem, key)
}
}
return key
}
func buildDefaultType(c build.Condition, i int, pathOnly bool, pathItem build.PathItem, key string) string {
if _, ok := c.Attr("any_key"); ok && i == len(c.Path)-1 {
if !pathOnly {
key += "[key]"
}
return key
}
if key != "" {
key += "."
}
key += pathItem.Name
return key
}
func format(rules []build.Rule) []RegoRule {
res := make([]RegoRule, len(rules))
for i, r := range rules {
res[i] = RegoRule{
Rule: r,
Block: createBlock(r),
}
}
return res
}
func createBlock(rule build.Rule) Block {
result := Block{}
result = resultName(rule, result)
resources := make(map[string]struct{}, len(rule.Conditions))
for _, condition := range rule.Conditions {
if len(condition.Path) == 0 {
continue
}
v, ok := condition.Attr("resource")
if !ok {
for _, pathItem := range condition.Path {
if pathItem.Type == build.PathTypeResourceType {
resources[pathItem.Name] = struct{}{}
}
}
continue
}
resources, result = switchFunction(v, result, resources)
}
result.List = make([]string, 0, len(resources))
for resource := range resources {
result.List = append(result.List, resource)
}
return result
}
func switchFunction(v interface{}, result Block, resources map[string]struct{}) (map[string]struct{}, Block) {
switch vv := v.(type) {
case string:
if vv == "*" {
result.All = true
}
resources[vv] = struct{}{}
case []string:
for _, vi := range vv {
resources[vi] = struct{}{}
}
case []interface{}:
for _, vi := range vv {
if vvi, ok := vi.(string); ok {
resources[vvi] = struct{}{}
}
}
}
return resources, result
}
func resultName(rule build.Rule, result Block) Block {
for _, pathItem := range rule.Conditions[len(rule.Conditions)-1].Path {
if pathItem.Type == build.PathTypeResource {
result.Name = pathItem.Name
break
}
}
return result
}
package descriptions
import (
"bytes"
"encoding/base64"
"encoding/json"
"fmt"
"io"
"net/http"
"os"
"time"
"github.com/Checkmarx/kics/v2/internal/constants"
descModel "github.com/Checkmarx/kics/v2/pkg/descriptions/model"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/rs/zerolog/log"
)
var (
// ***************************************************
// * HARDCODED authKey is NOT FOR SECURITY PURPOSES *
// ***************************************************
authKey = []rune{67, 101, 110, 116, 101, 114, 95, 102, 111, 114, 95, 73, 110, 116, 101, 114, 110, 101,
116, 95, 83, 101, 99, 117, 114, 105, 116, 121, 95, 80, 114, 111, 112, 114, 105, 101, 116, 97, 114, 121,
95, 67, 111, 110, 116, 101, 110, 116, 95, 99, 105, 115, 101, 99, 117, 114, 105, 116, 121, 46, 111, 114, 103}
tr = &http.Transport{
Proxy: http.ProxyFromEnvironment,
MaxIdleConns: 10,
IdleConnTimeout: 30 * time.Second,
DisableCompression: true,
}
// HTTPRequestClient - http client to use for requests
HTTPRequestClient HTTPClient = &http.Client{
Transport: tr,
Timeout: 20 * time.Second,
}
)
// HTTPClient - http client to use for requests
type HTTPClient interface {
Do(req *http.Request) (*http.Response, error)
}
// HTTPDescription - HTTP client interface to use for requesting descriptions
type HTTPDescription interface {
CheckConnection() error
RequestDescriptions(descriptionIDs []string) (map[string]descModel.CISDescriptions, error)
CheckLatestVersion(version string) (model.Version, error)
}
// Client - client for making descriptions requests
type Client struct {
}
// CheckConnection - checks if the endpoint is reachable
func (c *Client) CheckConnection() error {
baseURL, err := getBaseURL()
if err != nil {
return err
}
endpointURL := fmt.Sprintf("%s/api/", baseURL)
req, err := http.NewRequest(http.MethodGet, endpointURL, http.NoBody) //nolint
if err != nil {
return err
}
resp, err := doRequest(req)
if err != nil {
return err
}
defer func() {
if closeErr := resp.Body.Close(); closeErr != nil {
log.Err(closeErr).Msg("Error closing file")
}
}()
return err
}
// CheckLatestVersion - Check if using KICS latest version from endpoint
func (c *Client) CheckLatestVersion(version string) (model.Version, error) {
baseURL, err := getBaseURL()
if err != nil {
return model.Version{}, err
}
endpointURL := fmt.Sprintf("%s/api/%s", baseURL, "version")
versionRequest := descModel.VersionRequest{
Version: version,
}
requestBody, err := json.Marshal(versionRequest)
if err != nil {
return model.Version{}, err
}
req, err := http.NewRequest(http.MethodPost, endpointURL, bytes.NewReader(requestBody)) //nolint
if err != nil {
return model.Version{}, err
}
req.Header.Add("Content-Type", "application/json")
req.Header.Add("Authorization", fmt.Sprintf("Basic %s", base64.StdEncoding.EncodeToString([]byte(getBasicAuth()))))
resp, err := doRequest(req)
if err != nil {
return model.Version{}, err
}
defer func() {
if closeErr := resp.Body.Close(); closeErr != nil {
log.Err(closeErr).Msg("Error closing file")
}
}()
b, err := io.ReadAll(resp.Body)
if err != nil {
return model.Version{}, err
}
var VersionResponse model.Version
err = json.Unmarshal(b, &VersionResponse)
if err != nil {
return model.Version{}, err
}
return VersionResponse, nil
}
// RequestDescriptions - gets descriptions from endpoint
func (c *Client) RequestDescriptions(descriptionIDs []string) (map[string]descModel.CISDescriptions, error) {
baseURL, err := getBaseURL()
if err != nil {
log.Debug().Msg("Unable to get baseURL")
return nil, err
}
endpointURL := fmt.Sprintf("%s/api/%s", baseURL, "descriptions")
descriptionRequest := descModel.DescriptionRequest{
Version: constants.Version,
DescriptionIDs: descriptionIDs,
}
requestBody, err := json.Marshal(descriptionRequest)
if err != nil {
log.Err(err).Msg("Unable to marshal request body")
return nil, err
}
req, err := http.NewRequest(http.MethodPost, endpointURL, bytes.NewReader(requestBody)) //nolint
if err != nil {
return nil, err
}
req.Header.Add("Content-Type", "application/json")
req.Header.Add("Authorization", fmt.Sprintf("Basic %s", base64.StdEncoding.EncodeToString([]byte(getBasicAuth()))))
log.Debug().Msgf("HTTP POST to descriptions endpoint")
startTime := time.Now()
resp, err := doRequest(req)
if err != nil {
log.Err(err).Msgf("Unable to POST to descriptions endpoint")
return nil, err
}
defer func() {
if closeErr := resp.Body.Close(); closeErr != nil {
log.Err(closeErr).Msg("Error closing file")
}
}()
endTime := time.Since(startTime)
log.Debug().Msgf("HTTP Status: %d %s %v", resp.StatusCode, http.StatusText(resp.StatusCode), endTime)
b, err := io.ReadAll(resp.Body)
if err != nil {
log.Err(err).Msg("Unable to read response body")
return nil, err
}
var getDescriptionsResponse descModel.DescriptionResponse
err = json.Unmarshal(b, &getDescriptionsResponse)
if err != nil {
log.Err(err).Msg("Unable to unmarshal response body")
return nil, err
}
return getDescriptionsResponse.Descriptions, nil
}
// doRequest - make HTTP request
func doRequest(request *http.Request) (*http.Response, error) {
return HTTPRequestClient.Do(request)
}
func getBaseURL() (string, error) {
var rtnBaseURL string
urlFromEnv := os.Getenv("KICS_DESCRIPTIONS_ENDPOINT")
if constants.BaseURL == "" && urlFromEnv == "" {
return "", fmt.Errorf("the BaseURL or KICS_DESCRIPTIONS_ENDPOINT environment variable not set")
}
if urlFromEnv != "" {
rtnBaseURL = urlFromEnv
} else {
rtnBaseURL = constants.BaseURL
}
return rtnBaseURL, nil
}
func getBasicAuth() string {
auth := os.Getenv("KICS_BASIC_AUTH_PASS")
if auth == "" {
auth = string(authKey)
}
return auth
}
package descriptions
import (
"fmt"
"github.com/Checkmarx/kics/v2/pkg/model"
)
var (
descClient HTTPDescription = &Client{}
)
// RequestAndOverrideDescriptions - Requests descriptions and override default descriptions
func RequestAndOverrideDescriptions(summary *model.Summary) error {
descriptionIDs := make([]string, 0)
for idx := range summary.Queries {
descriptionIDs = append(descriptionIDs, summary.Queries[idx].DescriptionID)
}
if err := descClient.CheckConnection(); err != nil {
return err
}
descriptionMap, err := descClient.RequestDescriptions(descriptionIDs)
if err != nil {
return err
}
for idx := range summary.Queries {
if descriptionMap[summary.Queries[idx].DescriptionID].DescriptionID == "" &&
descriptionMap[summary.Queries[idx].DescriptionID].RationaleText == "" {
continue
}
descriptionID := summary.Queries[idx].DescriptionID
summary.Queries[idx].CISDescriptionID = descriptionMap[descriptionID].DescriptionID
summary.Queries[idx].CISDescriptionTitle = descriptionMap[descriptionID].DescriptionTitle
summary.Queries[idx].CISDescriptionText = descriptionMap[descriptionID].DescriptionText
summary.Queries[idx].CISRationaleText = descriptionMap[descriptionID].RationaleText
summary.Queries[idx].CISBenchmarkName = descriptionMap[descriptionID].BenchmarkName
summary.Queries[idx].CISBenchmarkVersion = descriptionMap[descriptionID].BenchmarkVersion
summary.Queries[idx].CISDescriptionIDFormatted = fmt.Sprintf(
"Security - %s v%s - Rule %s",
descriptionMap[descriptionID].BenchmarkName,
descriptionMap[descriptionID].BenchmarkVersion,
descriptionMap[descriptionID].DescriptionID,
)
summary.Queries[idx].CISDescriptionTextFormatted = fmt.Sprintf(
"%s\n%s",
descriptionMap[descriptionID].DescriptionText,
descriptionMap[descriptionID].RationaleText,
)
}
return nil
}
package mockclient
import (
"net/http"
"github.com/Checkmarx/kics/v2/pkg/descriptions/model"
genModel "github.com/Checkmarx/kics/v2/pkg/model"
)
// MockHTTPClient - the mock http client
type MockHTTPClient struct {
DoFunc func(req *http.Request) (*http.Response, error)
}
// Do - mock clients do function
func (m *MockHTTPClient) Do(req *http.Request) (*http.Response, error) {
return GetDoFunc(req)
}
// MockDescriptionsClient - the mock descriptions client
type MockDescriptionsClient struct {
RequestDescriptionsFunc func(descriptionIDs []string) (map[string]model.CISDescriptions, error)
}
// RequestDescriptions - mock descriptions client request descriptions function
func (m *MockDescriptionsClient) RequestDescriptions(descriptionIDs []string) (map[string]model.CISDescriptions, error) {
return GetDescriptions(descriptionIDs)
}
// CheckConnection - mock descriptions client check connection function
func (m *MockDescriptionsClient) CheckConnection() error {
return CheckConnection()
}
// CheckLatestVersion - mock client request version function
func (m *MockDescriptionsClient) CheckLatestVersion(version string) (genModel.Version, error) {
return CheckVersion(version)
}
var (
// GetDoFunc - mock client's `Do` func
GetDoFunc func(req *http.Request) (*http.Response, error)
// CheckConnection - mock client's `CheckConnection` func
CheckConnection func() error
// GetDescriptions - mock client's `RequestDescriptions` func
GetDescriptions func(descriptionIDs []string) (map[string]model.CISDescriptions, error)
// CheckVersion mock client's `CheckLatestVersion` func
CheckVersion func(version string) (genModel.Version, error)
)
// MockRequestBody - mock request body
type MockRequestBody struct {
Descriptions []string `json:"descriptions"`
}
// MockResponseBody - mock response body
type MockResponseBody struct {
Descriptions map[string]string `json:"descriptions"`
}
package descriptions
import (
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/Checkmarx/kics/v2/internal/tracker"
"github.com/Checkmarx/kics/v2/pkg/model"
)
// CheckVersion - checks if using the latest version and saves that information in the tracker
func CheckVersion(t *tracker.CITracker) {
baseVersionInfo := model.Version{
Latest: true,
}
if err := descClient.CheckConnection(); err != nil {
t.TrackVersion(baseVersionInfo)
return
}
versionInfo, err := descClient.CheckLatestVersion(constants.Version)
if err != nil {
t.TrackVersion(baseVersionInfo)
return
}
t.TrackVersion(versionInfo)
}
package detector
import (
"strconv"
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/rs/zerolog"
)
const (
undetectedVulnerabilityLine = -1
)
type defaultDetectLine struct {
}
// DetectLine searches vulnerability line if kindDetectLine is not in detectors
func (d defaultDetectLine) DetectLine(file *model.FileMetadata, searchKey string,
outputLines int, logwithfields *zerolog.Logger) model.VulnerabilityLines {
detector := &DefaultDetectLineResponse{
CurrentLine: 0,
IsBreak: false,
FoundAtLeastOne: false,
ResolvedFile: file.FilePath,
ResolvedFiles: d.prepareResolvedFiles(file.ResolvedFiles),
}
var extractedString [][]string
extractedString = GetBracketValues(searchKey, extractedString, "")
sanitizedSubstring := searchKey
for idx, str := range extractedString {
sanitizedSubstring = strings.Replace(sanitizedSubstring, str[0], `{{`+strconv.Itoa(idx)+`}}`, -1)
}
lines := *file.LinesOriginalData
splitSanitized := strings.Split(sanitizedSubstring, ".")
for index, split := range splitSanitized {
if strings.Contains(split, "$ref") {
splitSanitized[index] = strings.Join(splitSanitized[index:], ".")
splitSanitized = splitSanitized[:index+1]
break
}
}
for _, key := range splitSanitized {
substr1, substr2 := GenerateSubstrings(key, extractedString)
// BICEP-specific tweaks in order to make bicep files compatible with ARM queries
if file.Kind == "BICEP" {
substr1 = strings.ReplaceAll(substr1, "resources", "resource")
substr1 = strings.ReplaceAll(substr1, "parameters", "param")
substr1 = strings.ReplaceAll(substr1, "variables", "variable")
}
detector, lines = detector.DetectCurrentLine(substr1, substr2, 0, lines)
if detector.IsBreak {
break
}
}
if detector.FoundAtLeastOne {
return model.VulnerabilityLines{
Line: detector.CurrentLine + 1,
VulnLines: GetAdjacentVulnLines(detector.CurrentLine, outputLines, lines),
ResolvedFile: detector.ResolvedFile,
}
}
var filePathSplit = strings.Split(file.FilePath, "/")
logwithfields.Warn().Msgf("Failed to detect line associated with identified result in file %s\n", filePathSplit[len(filePathSplit)-1])
return model.VulnerabilityLines{
Line: undetectedVulnerabilityLine,
VulnLines: &[]model.CodeLine{},
ResolvedFile: detector.ResolvedFile,
}
}
func (d defaultDetectLine) prepareResolvedFiles(resFiles map[string]model.ResolvedFile) map[string]model.ResolvedFileSplit {
resolvedFiles := make(map[string]model.ResolvedFileSplit)
for f, res := range resFiles {
resolvedFiles[f] = model.ResolvedFileSplit{
Path: res.Path,
Lines: *res.LinesContent,
}
}
return resolvedFiles
}
package detector
import (
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/rs/zerolog"
)
type kindDetectLine interface {
DetectLine(file *model.FileMetadata, searchKey string, outputLines int, logWithFields *zerolog.Logger) model.VulnerabilityLines
}
// DetectLine is a struct that associates a kindDetectLine to its FileKind
type DetectLine struct {
detectors map[model.FileKind]kindDetectLine
outputLines int
logWithFields *zerolog.Logger
defaultDetector kindDetectLine
}
// NewDetectLine creates a new DetectLine's reference
func NewDetectLine(outputLines int) *DetectLine {
return &DetectLine{
detectors: make(map[model.FileKind]kindDetectLine),
logWithFields: &zerolog.Logger{},
outputLines: outputLines,
defaultDetector: defaultDetectLine{},
}
}
// SetupLogs will change the logger feild to be used in kindDetectLine DetectLine method
func (d *DetectLine) SetupLogs(logger *zerolog.Logger) {
d.logWithFields = logger
}
// Add adds a new kindDetectLine to the caller and returns it
func (d *DetectLine) Add(detector kindDetectLine, kind model.FileKind) *DetectLine {
d.detectors[kind] = detector
return d
}
// DetectLine will use the correct kindDetectLine according to the files kind
// if file kind is not in detectors default detect line is called
func (d *DetectLine) DetectLine(file *model.FileMetadata, searchKey string, logWithFields *zerolog.Logger) model.VulnerabilityLines {
if det, ok := d.detectors[file.Kind]; ok {
return det.DetectLine(file, searchKey, d.outputLines, logWithFields)
}
return d.defaultDetector.DetectLine(file, searchKey, d.outputLines, logWithFields)
}
// GetAdjacent finds and returns the lines adjacent to the line containing the vulnerability
func (d *DetectLine) GetAdjacent(file *model.FileMetadata, line int) model.VulnerabilityLines {
return model.VulnerabilityLines{
Line: line,
VulnLines: GetAdjacentVulnLines(line-1, d.outputLines, *file.LinesOriginalData),
ResolvedFile: file.FilePath,
}
}
package docker
import (
"regexp"
"strconv"
"strings"
"github.com/Checkmarx/kics/v2/pkg/detector"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/rs/zerolog"
)
// DetectKindLine defines a kindDetectLine type
type DetectKindLine struct {
}
const (
undetectedVulnerabilityLine = -1
)
var (
nameRegexDockerFileML = regexp.MustCompile(`.+\s+\\$`)
commentRegex = regexp.MustCompile(`^\s*#.*`)
splitRegex = regexp.MustCompile(`\s\\`)
)
// DetectLine searches vulnerability line in docker files
func (d DetectKindLine) DetectLine(file *model.FileMetadata, searchKey string,
outputLines int, logwithfields *zerolog.Logger) model.VulnerabilityLines {
det := &detector.DefaultDetectLineResponse{
CurrentLine: 0,
IsBreak: false,
FoundAtLeastOne: false,
ResolvedFile: file.FilePath,
ResolvedFiles: make(map[string]model.ResolvedFileSplit),
}
var extractedString [][]string
extractedString = detector.GetBracketValues(searchKey, extractedString, "")
sKey := searchKey
for idx, str := range extractedString {
sKey = strings.Replace(sKey, str[0], `{{`+strconv.Itoa(idx)+`}}`, -1)
}
unchangedText := make([]string, len(*file.LinesOriginalData))
copy(unchangedText, *file.LinesOriginalData)
for _, key := range strings.Split(sKey, ".") {
substr1, substr2 := detector.GenerateSubstrings(key, extractedString)
det, _ = det.DetectCurrentLine(substr1, substr2, 0, prepareDockerFileLines(*file.LinesOriginalData))
if det.IsBreak {
break
}
}
if det.FoundAtLeastOne {
return model.VulnerabilityLines{
Line: det.CurrentLine + 1,
VulnLines: detector.GetAdjacentVulnLines(det.CurrentLine, outputLines, unchangedText),
ResolvedFile: file.FilePath,
}
}
logwithfields.Warn().Msgf("Failed to detect Docker line, query response %s", sKey)
return model.VulnerabilityLines{
Line: undetectedVulnerabilityLine,
VulnLines: &[]model.CodeLine{},
ResolvedFile: file.FilePath,
}
}
func prepareDockerFileLines(text []string) []string {
for idx, key := range text {
if !commentRegex.MatchString(key) {
text[idx] = multiLineSpliter(text, key, idx)
}
}
return text
}
func multiLineSpliter(textSplit []string, key string, idx int) string {
if nameRegexDockerFileML.MatchString(key) {
i := idx + 1
if i >= len(textSplit) {
return textSplit[idx]
}
for textSplit[i] == "" {
i++
if i >= len(textSplit) {
return textSplit[idx]
}
}
if commentRegex.MatchString(textSplit[i]) {
textSplit[i] += " \\"
}
textSplit[idx] = splitRegex.ReplaceAllLiteralString(textSplit[idx], " "+textSplit[i])
textSplit[i] = ""
textSplit[idx] = multiLineSpliter(textSplit, textSplit[idx], idx)
}
return textSplit[idx]
}
package helm
import (
"fmt"
"sort"
"strconv"
"strings"
"github.com/Checkmarx/kics/v2/pkg/detector"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/agnivade/levenshtein"
"github.com/rs/zerolog"
)
// DetectKindLine defines a kindDetectLine type
type DetectKindLine struct {
}
type detectCurlLine struct {
foundRes bool
lineRes int
breakRes bool
lastUnique dupHistory
}
// dupHistory keeps the history of uniques
type dupHistory struct {
unique bool
lastUniqueLine int
}
const (
undetectedVulnerabilityLine = -1
)
// DetectLine is used to detect line on the helm template,
// it looks only at the keys of the template and will make use of the auxiliary added
// lines (ex: "# KICS_HELM_ID_")
func (d DetectKindLine) DetectLine(file *model.FileMetadata, searchKey string,
outputLines int, logWithFields *zerolog.Logger) model.VulnerabilityLines {
searchKey = fmt.Sprintf("%s.%s", strings.TrimRight(strings.TrimLeft(file.HelmID, "# "), ":"), searchKey)
lines := make([]string, len(*file.LinesOriginalData))
copy(lines, *file.LinesOriginalData)
curLineRes := detectCurlLine{
foundRes: false,
lineRes: 0,
breakRes: false,
}
var extractedString [][]string
extractedString = detector.GetBracketValues(searchKey, extractedString, "")
sanitizedSubstring := searchKey
for idx, str := range extractedString {
sanitizedSubstring = strings.Replace(sanitizedSubstring, str[0], `{{`+strconv.Itoa(idx)+`}}`, -1)
}
helmID, err := strconv.Atoi(strings.TrimSuffix(strings.TrimPrefix(file.HelmID, "# KICS_HELM_ID_"), ":"))
if err != nil {
helmID = -1
}
// Since we are only looking at keys we can ignore the second value passed through '=' and '[]'
for _, key := range strings.Split(sanitizedSubstring, ".") {
substr1, _ := detector.GenerateSubstrings(key, extractedString)
curLineRes = curLineRes.detectCurrentLine(lines, fmt.Sprintf("%s:", substr1), "", true, file.IDInfo, helmID)
if curLineRes.breakRes {
break
}
}
// Look at dupHistory to see if the last element was duplicate, if so
// change the line to the last unique key
if !curLineRes.lastUnique.unique {
curLineRes.lineRes = curLineRes.lastUnique.lastUniqueLine
}
if curLineRes.foundRes {
lineRemove := make(map[int]int)
count := 0
for i, line := range lines { // Remove auxiliary lines
if strings.Contains(line, "# KICS_HELM_ID_") {
count++
lineRemove[i] = count
lines = append(lines[:i], lines[i+1:]...)
}
}
// Update found line
curLineRes.lineRes = removeLines(curLineRes.lineRes, lineRemove)
return model.VulnerabilityLines{
Line: curLineRes.lineRes + 1,
VulnLines: detector.GetAdjacentVulnLines(curLineRes.lineRes, outputLines, lines),
LineWithVulnerability: strings.Split(lines[curLineRes.lineRes], ": ")[0],
ResolvedFile: file.FilePath,
}
}
var filePathSplit = strings.Split(file.FilePath, "/")
logWithFields.Warn().Msgf("Failed to detect line associated with identified result in file %s\n", filePathSplit[len(filePathSplit)-1])
return model.VulnerabilityLines{
Line: undetectedVulnerabilityLine,
VulnLines: &[]model.CodeLine{},
ResolvedFile: file.FilePath,
}
}
// removeLines is used to update the vulnerability line after removing the "# KICS_HELM_ID_"
func removeLines(current int, lineRemove map[int]int) int {
orderByKey := make([]int, len(lineRemove))
i := 0
for k := range lineRemove {
orderByKey[i] = k
i++
}
remove := 0
sort.Ints(orderByKey)
for _, k := range orderByKey {
if current > k {
remove = lineRemove[k]
} else {
break
}
}
current -= remove
return current
}
func (d detectCurlLine) detectCurrentLine(lines []string, str1,
str2 string, byKey bool, idInfo map[int]interface{}, id int) detectCurlLine {
distances := make(map[int]int)
for i := d.lineRes; i < len(lines); i++ {
if str1 != "" && str2 != "" {
if strings.Contains(lines[i], str1) && strings.Contains(lines[i], str2) {
distances[i] = levenshtein.ComputeDistance(detector.ExtractLineFragment(lines[i], str2, byKey), str2)
}
} else if str1 != "" {
if strings.Contains(lines[i], str1) {
distances[i] = levenshtein.ComputeDistance(
detector.ExtractLineFragment(strings.TrimSpace(lines[i]), str1, byKey), str1)
}
}
}
lastSingle := d.lastUnique.lastUniqueLine
if len(distances) == 0 {
return detectCurlLine{
foundRes: d.foundRes,
lineRes: d.lineRes,
breakRes: true,
lastUnique: dupHistory{
lastUniqueLine: lastSingle,
unique: d.lastUnique.unique,
},
}
}
lineResponse := detector.SelectLineWithMinimumDistance(distances, d.lineRes)
// if lineResponse is unique
unique := detectLastSingle(lineResponse, distances, idInfo, id)
if unique {
lastSingle = lineResponse
}
return detectCurlLine{
foundRes: true,
lineRes: lineResponse,
breakRes: false,
lastUnique: dupHistory{
unique: unique,
lastUniqueLine: lastSingle,
},
}
}
// detectLastSingle checks if the line is unique or a duplicate
func detectLastSingle(line int, dis map[int]int, idInfo map[int]interface{}, id int) bool {
if idInfo == nil {
return true
}
for key, value := range dis {
if value == dis[line] && key != line {
// check if we are only looking at original data equivalent to the vulnerability
if ok := idInfo[id].(map[int]int)[key]; ok != 0 {
return false
}
}
}
return true
}
package detector
import (
"fmt"
"regexp"
"strconv"
"strings"
"github.com/agnivade/levenshtein"
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/rs/zerolog/log"
)
var (
nameRegex = regexp.MustCompile(`^([A-Za-z\d-_]+)\[([A-Za-z\d-_{}]+)]$`)
nameRegexDocker = regexp.MustCompile(`{{(.*?)}}`)
)
const (
namePartsLength = 3
valuePartsLength = 2
)
// DefaultDetectLineResponse is the default response for struct DetectLine
type DefaultDetectLineResponse struct {
CurrentLine int
IsBreak bool
FoundAtLeastOne bool
ResolvedFile string
ResolvedFiles map[string]model.ResolvedFileSplit
}
// GetBracketValues gets values inside "{{ }}" ignoring any "{{" or "}}" inside
func GetBracketValues(expr string, list [][]string, restOfString string) [][]string {
var tempList []string
firstOpen := strings.Index(expr, "{{")
firstClose := strings.Index(expr, "}}")
for firstOpen > firstClose && firstClose != -1 {
firstClose = strings.Index(expr[firstOpen:], "}}") + firstOpen
}
// in case we have '}}}' we need to advance one position to get the close
for firstClose+2 < len(expr) && string(expr[firstClose+2]) == `}` && firstClose != -1 {
firstClose++
}
switch t := firstClose - firstOpen; t >= 0 {
case true:
if t == 0 && expr != "" {
tempList = append(tempList, fmt.Sprintf("{{%s}}", expr), expr)
list = append(list, tempList)
}
if t == 0 && restOfString == "" {
return list // if there is no more string to read from return value of list
}
if t > 0 && firstOpen+2 <= firstClose {
list = GetBracketValues(expr[firstOpen+2:firstClose], list, expr[firstClose+2:])
} else {
list = GetBracketValues(restOfString, list, "") // recursive call to the rest of the string
}
case false:
nextClose := strings.Index(restOfString, "}}")
tempNextClose := nextClose + 2
if tempNextClose == len(restOfString) {
tempNextClose = nextClose
}
tempList = append(tempList, fmt.Sprintf("{{%s}}%s}}", expr, restOfString[:tempNextClose]),
fmt.Sprintf("%s}}%s", expr, restOfString[:tempNextClose]))
list = append(list, tempList)
list = GetBracketValues(restOfString[nextClose+2:], list, "") // recursive call to the rest of the string
}
return list
}
// GenerateSubstrings returns the substrings used for line searching depending on search key
// '.' is new line
// '=' is value in the same line
// '[]' is in the same line
func GenerateSubstrings(key string, extractedString [][]string) (substr1Res, substr2Res string) {
var substr1, substr2 string
if parts := nameRegex.FindStringSubmatch(key); len(parts) == namePartsLength {
substr1, substr2 = getKeyWithCurlyBrackets(key, extractedString, parts)
} else if parts := strings.Split(key, "="); len(parts) == valuePartsLength {
substr1, substr2 = getKeyWithCurlyBrackets(key, extractedString, parts)
} else {
parts := []string{key, ""}
substr1, substr2 = getKeyWithCurlyBrackets(key, extractedString, parts)
}
return substr1, substr2
}
func getKeyWithCurlyBrackets(key string, extractedString [][]string, parts []string) (substr1Res, substr2Res string) {
var substr1, substr2 string
extractedPart := nameRegexDocker.FindStringSubmatch(key)
if len(extractedPart) == valuePartsLength {
for idx, key := range parts {
if extractedPart[0] == key {
switch idx {
case len(parts) - 2:
i, err := strconv.Atoi(extractedPart[1])
if err != nil {
log.Error().Msgf("failed to extract curly brackets substring")
}
if len(extractedString) > i {
if extractedString[i][1] != "" {
substr1 = extractedString[i][1]
}
}
case len(parts) - 1:
i, err := strconv.Atoi(extractedPart[1])
if err != nil {
log.Error().Msgf("failed to extract curly brackets substring")
}
if len(extractedString) > i {
if extractedString[i][1] != "" {
substr2 = extractedString[i][1]
}
}
}
} else {
substr1 = generateSubstr(substr1, parts, valuePartsLength)
substr2 = generateSubstr(substr2, parts, 1)
}
}
} else {
substr1 = parts[len(parts)-2]
substr2 = parts[len(parts)-1]
}
return substr1, substr2
}
func generateSubstr(substr string, parts []string, length int) string {
if substr == "" {
substr = parts[len(parts)-length]
}
return substr
}
// GetAdjacentVulnLines is used to get the lines adjacent to the line that contains the vulnerability
// adj is the amount of lines wanted
func GetAdjacentVulnLines(idx, adj int, lines []string) *[]model.CodeLine {
var endPos int
var startPos int
if adj <= len(lines) {
endPos = idx + adj/2 + 1 // if adj lines passes the number of lines in file
if len(lines) < endPos {
endPos = len(lines)
}
startAdj := adj
if adj%2 == 0 {
startAdj--
}
startPos = idx - startAdj/2 // if adj lines passes the first line in the file
if startPos < 0 {
startPos = 0
}
} else { // in case adj is bigger than number of lines in file
adj = len(lines)
endPos = len(lines)
startPos = 0
}
switch idx {
case 0:
// case vulnerability is the first line of the file
return createVulnLines(1, lines[:adj])
case len(lines) - 1:
// case vulnerability is the last line of the file
return createVulnLines(len(lines)-adj+1, lines[len(lines)-adj:])
default:
// case vulnerability is in the middle of the file
return createVulnLines(startPos+1, lines[startPos:endPos])
}
}
// createVulnLines is the function that will generate the array that contains the lines numbers
// used to alter the color of the line that contains the vulnerability
func createVulnLines(startPos int, lines []string) *[]model.CodeLine {
vulns := make([]model.CodeLine, len(lines))
for idx, line := range lines {
vulns[idx] = model.CodeLine{
Line: line,
Position: startPos,
}
startPos++
}
return &vulns
}
// SelectLineWithMinimumDistance will search a map of levenshtein distances to find the minimum distance
func SelectLineWithMinimumDistance(distances map[int]int, startingFrom int) int {
minDistance, lineOfMinDistance := constants.MaxInteger, startingFrom
for line, distance := range distances {
if distance < minDistance || distance == minDistance && line < lineOfMinDistance {
minDistance = distance
lineOfMinDistance = line
}
}
return lineOfMinDistance
}
// ExtractLineFragment will prepare substr for line detection
func ExtractLineFragment(line, substr string, key bool) string {
// If detecting line by keys only
idx := strings.Index(line, ":")
if key && idx >= 0 {
return line[:idx]
}
start := strings.Index(line, substr)
end := start + len(substr)
for start >= 0 {
if line[start] == ' ' {
break
}
start--
}
for end < len(line) {
if line[end] == ' ' {
break
}
end++
}
return removeExtras(line, start, end)
}
func removeExtras(result string, start, end int) string {
// workaround for selecting yaml keys
if result[end-1] == ':' {
end--
}
if result[end-1] == '"' {
end--
}
if result[start+1] == '"' {
start++
}
return result[start+1 : end]
}
// DetectCurrentLine uses levenshtein distance to find the most accurate line for the vulnerability
func (d *DefaultDetectLineResponse) DetectCurrentLine(str1, str2 string, recurseCount int,
lines []string) (det *DefaultDetectLineResponse, l []string) {
distances := make(map[int]int)
for i := d.CurrentLine; i < len(lines); i++ {
distances = checkLine(str1, str2, distances, lines[i], i)
}
if len(distances) == 0 {
d.IsBreak = true
return d, lines
}
d.CurrentLine = SelectLineWithMinimumDistance(distances, d.CurrentLine)
d.IsBreak = false
d.FoundAtLeastOne = true
return d, lines
}
func checkLine(str1, str2 string, distances map[int]int, line string, i int) map[int]int {
regex := regexp.MustCompile(`^\s+`)
line = regex.ReplaceAllString(line, "")
if str1 != "" && str2 != "" && strings.Contains(line, str1) {
restLine := line[strings.Index(line, str1)+len(str1):]
if strings.Contains(restLine, str2) {
distances[i] = levenshtein.ComputeDistance(ExtractLineFragment(line, str1, false), str1)
distances[i] += levenshtein.ComputeDistance(ExtractLineFragment(restLine, str2, false), str2)
}
} else if str1 != "" && strings.Contains(line, str1) {
distances[i] = levenshtein.ComputeDistance(ExtractLineFragment(line, str1, false), str1)
}
return distances
}
package detector
import (
"encoding/json"
"strconv"
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/tidwall/gjson"
)
// searchLineDetector is the struct used to get the line from the payload with lines information
// content - payload with line information
// resolvedPath - string created from pathComponents, used to create gjson paths
// resolvedArrayPath - string created from pathComponents containing an array used to create gjson paths
// targetObj - key of the interface{}, we want the line from
type searchLineDetector struct {
content []byte
resolvedPath string
resolvedArrayPath string
targetObj string
}
// GetLineBySearchLine makes use of the gjson pkg to find the line of a key in the original file
// with it's path given by a slice of strings
func GetLineBySearchLine(pathComponents []string, file *model.FileMetadata) (int, error) {
content, err := json.Marshal(file.LineInfoDocument)
if err != nil {
return -1, err
}
detector := &searchLineDetector{
content: content,
}
return detector.preparePath(pathComponents), nil
}
// preparePath resolves the path components and retrives important information
// for the creation of the paths to search
func (d *searchLineDetector) preparePath(pathItems []string) int {
if len(pathItems) == 0 {
return 1
}
// Escaping '.' in path components so it doesn't conflict with gjson pkg
objPath := strings.ReplaceAll(pathItems[0], ".", "\\.")
ArrPath := strings.ReplaceAll(pathItems[0], ".", "\\.")
obj := pathItems[len(pathItems)-1]
arrayObject := ""
// Iterate reversely through the path components and get the key of the last array in the path
// needed for cases where the fields in the array are <"key": "value"> type and not <object>
foundArrayIdx := false
for i := len(pathItems) - 1; i >= 0; i-- {
if _, err := strconv.Atoi(pathItems[i]); err == nil {
foundArrayIdx = true
continue
}
if foundArrayIdx {
arrayObject = pathItems[i]
break
}
}
if arrayObject == objPath {
ArrPath = "_kics_lines._kics_" + arrayObject + "._kics_arr"
}
var treatedPathItems []string
if len(pathItems) > 1 {
treatedPathItems = pathItems[1 : len(pathItems)-1]
}
// Create a string based on the path components so it can be later transformed in a gjson path
for _, pathItem := range treatedPathItems {
// In case of an array present
if pathItem == arrayObject {
ArrPath += "._kics_lines._kics_" + strings.ReplaceAll(pathItem, ".", "\\.") + "._kics_arr"
} else {
ArrPath += "." + strings.ReplaceAll(pathItem, ".", "\\.")
}
objPath += "." + strings.ReplaceAll(pathItem, ".", "\\.")
}
d.resolvedPath = objPath
d.resolvedArrayPath = ArrPath
d.targetObj = obj
return d.getResult()
}
// getResult creates the paths to be used by gjson pkg to find the line in the content
func (d *searchLineDetector) getResult() int {
pathObjects := []string{
d.resolvedPath + "._kics_lines._kics_" + d.targetObj + "._kics_line",
d.resolvedPath + "." + d.targetObj + "._kics_lines._kics__default._kics_line",
d.resolvedArrayPath + "." + d.targetObj + "._kics__default._kics_line",
d.resolvedArrayPath + "._kics_" + d.targetObj + "._kics_line",
}
result := -1
// run gjson pkg
for _, pathItem := range pathObjects {
if tmpResult := gjson.GetBytes(d.content, pathItem); int(tmpResult.Int()) > 0 {
result = int(tmpResult.Int())
break
}
}
return result
}
package engine
import (
"bytes"
"context"
"encoding/json"
"fmt"
"runtime"
"strings"
"sync"
"time"
"github.com/Checkmarx/kics/v2/internal/metrics"
sentryReport "github.com/Checkmarx/kics/v2/internal/sentry"
"github.com/Checkmarx/kics/v2/pkg/detector"
"github.com/Checkmarx/kics/v2/pkg/detector/docker"
"github.com/Checkmarx/kics/v2/pkg/detector/helm"
"github.com/Checkmarx/kics/v2/pkg/engine/source"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/open-policy-agent/opa/ast"
"github.com/open-policy-agent/opa/cover"
"github.com/open-policy-agent/opa/rego"
"github.com/open-policy-agent/opa/storage/inmem"
"github.com/open-policy-agent/opa/topdown"
"github.com/open-policy-agent/opa/util"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
// Default values for inspector
const (
UndetectedVulnerabilityLine = -1
DefaultQueryID = "Undefined"
DefaultQueryName = "Anonymous"
DefaultExperimental = false
DefaultQueryDescription = "Undefined"
DefaultQueryDescriptionID = "Undefined"
DefaultQueryURI = "https://github.com/Checkmarx/kics/"
DefaultIssueType = model.IssueTypeIncorrectValue
regoQuery = `result = data.Cx.CxPolicy`
)
// ErrNoResult - error representing when a query didn't return a result
var ErrNoResult = errors.New("query: not result")
// ErrInvalidResult - error representing invalid result
var ErrInvalidResult = errors.New("query: invalid result format")
// QueryLoader is responsible for loading the queries for the inspector
type QueryLoader struct {
commonLibrary source.RegoLibraries
platformLibraries map[string]source.RegoLibraries
querySum int
QueriesMetadata []model.QueryMetadata
}
// VulnerabilityBuilder represents a function that will build a vulnerability
type VulnerabilityBuilder func(ctx *QueryContext, tracker Tracker, v interface{},
detector *detector.DetectLine, useOldSeverities bool, kicsComputeNewSimID bool) (*model.Vulnerability, error)
// PreparedQuery includes the opaQuery and its metadata
type PreparedQuery struct {
OpaQuery rego.PreparedEvalQuery
Metadata model.QueryMetadata
}
// Inspector represents a list of compiled queries, a builder for vulnerabilities, an information tracker
// a flag to enable coverage and the coverage report if it is enabled
type Inspector struct {
QueryLoader *QueryLoader
vb VulnerabilityBuilder
tracker Tracker
failedQueries map[string]error
excludeResults map[string]bool
detector *detector.DetectLine
enableCoverageReport bool
coverageReport cover.Report
queryExecTimeout time.Duration
useOldSeverities bool
numWorkers int
kicsComputeNewSimID bool
}
// QueryContext contains the context where the query is executed, which scan it belongs, basic information of query,
// the query compiled and its payload
type QueryContext struct {
Ctx context.Context
scanID string
Files map[string]model.FileMetadata
Query *PreparedQuery
payload *ast.Value
BaseScanPaths []string
}
var (
unsafeRegoFunctions = map[string]struct{}{
"http.send": {},
"opa.runtime": {},
}
)
func adjustNumWorkers(workers int) int {
// for the case in which the end user decides to use num workers as "auto-detected"
// we will set the number of workers to the number of CPUs available based on GOMAXPROCS value
if workers == 0 {
return runtime.GOMAXPROCS(-1)
}
return workers
}
// NewInspector initializes a inspector, compiling and loading queries for scan and its tracker
func NewInspector(
ctx context.Context,
queriesSource source.QueriesSource,
vb VulnerabilityBuilder,
tracker Tracker,
queryParameters *source.QueryInspectorParameters,
excludeResults map[string]bool,
queryTimeout int,
useOldSeverities bool,
needsLog bool,
numWorkers int,
kicsComputeNewSimID bool) (*Inspector, error) {
log.Debug().Msg("engine.NewInspector()")
metrics.Metric.Start("get_queries")
queries, err := queriesSource.GetQueries(queryParameters)
if err != nil {
return nil, errors.Wrap(err, "failed to get queries")
}
commonLibrary, err := queriesSource.GetQueryLibrary("common")
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: fmt.Sprintf("Inspector failed to get library for %s platform", "common"),
Err: err,
Location: "func NewInspector()",
Platform: "common",
}, true)
return nil, errors.Wrap(err, "failed to get library")
}
platformLibraries := getPlatformLibraries(queriesSource, queries)
queryLoader := prepareQueries(queries, commonLibrary, platformLibraries, tracker)
failedQueries := make(map[string]error)
metrics.Metric.Stop()
if needsLog {
log.Info().
Msgf("Inspector initialized, number of queries=%d", queryLoader.querySum)
}
lineDetector := detector.NewDetectLine(tracker.GetOutputLines()).
Add(helm.DetectKindLine{}, model.KindHELM).
Add(docker.DetectKindLine{}, model.KindDOCKER).
Add(docker.DetectKindLine{}, model.KindBUILDAH)
queryExecTimeout := time.Duration(queryTimeout) * time.Second
if needsLog {
log.Info().Msgf("Query execution timeout=%v", queryExecTimeout)
}
return &Inspector{
QueryLoader: &queryLoader,
vb: vb,
tracker: tracker,
failedQueries: failedQueries,
excludeResults: excludeResults,
detector: lineDetector,
queryExecTimeout: queryExecTimeout,
useOldSeverities: useOldSeverities,
numWorkers: adjustNumWorkers(numWorkers),
kicsComputeNewSimID: kicsComputeNewSimID,
}, nil
}
func getPlatformLibraries(queriesSource source.QueriesSource, queries []model.QueryMetadata) map[string]source.RegoLibraries {
supportedPlatforms := make(map[string]string)
for _, query := range queries {
supportedPlatforms[query.Platform] = ""
}
platformLibraries := make(map[string]source.RegoLibraries)
for platform := range supportedPlatforms {
platformLibrary, errLoadingPlatformLib := queriesSource.GetQueryLibrary(platform)
if errLoadingPlatformLib != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: fmt.Sprintf("Inspector failed to get library for %s platform", platform),
Err: errLoadingPlatformLib,
Location: "func getPlatformLibraries()",
Platform: platform,
}, true)
continue
}
platformLibraries[platform] = platformLibrary
}
return platformLibraries
}
type InspectionJob struct {
queryID int
}
type QueryResult struct {
vulnerabilities []model.Vulnerability
err error
queryID int
}
// This function creates an inspection task and sends it to the jobs channel
func (c *Inspector) createInspectionJobs(jobs chan<- InspectionJob, queries []model.QueryMetadata) {
defer close(jobs)
for i := range queries {
jobs <- InspectionJob{queryID: i}
}
}
// This function performs an inspection job and sends the result to the results channel
func (c *Inspector) performInspection(ctx context.Context, scanID string, files model.FileMetadatas,
astPayload ast.Value, baseScanPaths []string, currentQuery chan<- int64,
jobs <-chan InspectionJob, results chan<- QueryResult, queries []model.QueryMetadata) {
for job := range jobs {
currentQuery <- 1
queryOpa, err := c.QueryLoader.LoadQuery(ctx, &queries[job.queryID])
if err != nil {
continue
}
log.Debug().Msgf("Starting to run query %s", queries[job.queryID].Query)
queryStartTime := time.Now()
query := &PreparedQuery{
OpaQuery: *queryOpa,
Metadata: queries[job.queryID],
}
queryContext := &QueryContext{
Ctx: ctx,
scanID: scanID,
Files: files.ToMap(),
Query: query,
payload: &astPayload,
BaseScanPaths: baseScanPaths,
}
vuls, err := c.doRun(queryContext)
if err == nil {
log.Debug().Msgf("Finished to run query %s after %v", queries[job.queryID].Query, time.Since(queryStartTime))
c.tracker.TrackQueryExecution(query.Metadata.Aggregation)
}
results <- QueryResult{vulnerabilities: vuls, err: err, queryID: job.queryID}
}
}
func (c *Inspector) Inspect(
ctx context.Context,
scanID string,
files model.FileMetadatas,
baseScanPaths []string,
platforms []string,
currentQuery chan<- int64) ([]model.Vulnerability, error) {
log.Debug().Msg("engine.Inspect()")
combinedFiles := files.Combine(false)
var vulnerabilities []model.Vulnerability
vulnerabilities = make([]model.Vulnerability, 0)
var p interface{}
payload, err := json.Marshal(combinedFiles)
if err != nil {
return vulnerabilities, err
}
err = util.UnmarshalJSON(payload, &p)
if err != nil {
return vulnerabilities, err
}
astPayload, err := ast.InterfaceToValue(p)
if err != nil {
return vulnerabilities, err
}
queries := c.getQueriesByPlat(platforms)
// Create a channel to collect the results
results := make(chan QueryResult, len(queries))
// Create a channel for inspection jobs
jobs := make(chan InspectionJob, len(queries))
var wg sync.WaitGroup
// Start a goroutine for each worker
for w := 0; w < c.numWorkers; w++ {
wg.Add(1)
go func() {
// Decrement the counter when the goroutine completes
defer wg.Done()
c.performInspection(ctx, scanID, files, astPayload, baseScanPaths, currentQuery, jobs, results, queries)
}()
}
// Start a goroutine to create inspection jobs
go c.createInspectionJobs(jobs, queries)
go func() {
// Wait for all jobs to finish
wg.Wait()
// Then close the results channel
close(results)
}()
// Collect all the results
for result := range results {
if result.err != nil {
fmt.Println()
sentryReport.ReportSentry(&sentryReport.Report{
Message: fmt.Sprintf("Inspector. query executed with error, query=%s", queries[result.queryID].Query),
Err: result.err,
Location: "func Inspect()",
Platform: queries[result.queryID].Platform,
Metadata: queries[result.queryID].Metadata,
Query: queries[result.queryID].Query,
}, true)
c.failedQueries[queries[result.queryID].Query] = result.err
continue
}
vulnerabilities = append(vulnerabilities, result.vulnerabilities...)
}
return vulnerabilities, nil
}
// LenQueriesByPlat returns the number of queries by platforms
func (c *Inspector) LenQueriesByPlat(platforms []string) int {
count := 0
for _, query := range c.QueryLoader.QueriesMetadata {
if contains(platforms, query.Platform) {
c.tracker.TrackQueryExecuting(query.Aggregation)
count++
}
}
return count
}
func (c *Inspector) getQueriesByPlat(platforms []string) []model.QueryMetadata {
queries := make([]model.QueryMetadata, 0)
for _, query := range c.QueryLoader.QueriesMetadata {
if contains(platforms, query.Platform) {
queries = append(queries, query)
}
}
return queries
}
// EnableCoverageReport enables the flag to create a coverage report
func (c *Inspector) EnableCoverageReport() {
c.enableCoverageReport = true
}
// GetCoverageReport returns the scan coverage report
func (c *Inspector) GetCoverageReport() cover.Report {
return c.coverageReport
}
// GetFailedQueries returns a map of failed queries and the associated error
func (c *Inspector) GetFailedQueries() map[string]error {
return c.failedQueries
}
func (c *Inspector) doRun(ctx *QueryContext) (vulns []model.Vulnerability, err error) {
timeoutCtx, cancel := context.WithTimeout(ctx.Ctx, c.queryExecTimeout)
defer cancel()
defer func() {
if r := recover(); r != nil {
errMessage := fmt.Sprintf("Recovered from panic during query '%s' run. ", ctx.Query.Metadata.Query)
err = fmt.Errorf("panic: %v", r)
fmt.Println()
log.Err(err).Msg(errMessage)
}
}()
options := []rego.EvalOption{rego.EvalParsedInput(*ctx.payload)}
var cov *cover.Cover
if c.enableCoverageReport {
cov = cover.New()
options = append(options, rego.EvalQueryTracer(cov))
}
results, err := ctx.Query.OpaQuery.Eval(timeoutCtx, options...)
ctx.payload = nil
if err != nil {
if topdown.IsCancel(err) {
return nil, errors.Wrap(err, "query executing timeout exited")
}
return nil, errors.Wrap(err, "failed to evaluate query")
}
if c.enableCoverageReport && cov != nil {
module, parseErr := ast.ParseModule(ctx.Query.Metadata.Query, ctx.Query.Metadata.Content)
if parseErr != nil {
return nil, errors.Wrap(parseErr, "failed to parse coverage module")
}
c.coverageReport = cov.Report(map[string]*ast.Module{
ctx.Query.Metadata.Query: module,
})
}
log.Trace().
Str("scanID", ctx.scanID).
Msgf("Inspector executed with result %+v, query=%s", results, ctx.Query.Metadata.Query)
timeoutCtxToDecode, cancelDecode := context.WithTimeout(ctx.Ctx, c.queryExecTimeout)
defer cancelDecode()
return c.DecodeQueryResults(ctx, timeoutCtxToDecode, results)
}
// DecodeQueryResults decodes the results into []model.Vulnerability
func (c *Inspector) DecodeQueryResults(
ctx *QueryContext,
ctxTimeout context.Context,
results rego.ResultSet) ([]model.Vulnerability, error) {
if len(results) == 0 {
return nil, ErrNoResult
}
result := results[0].Bindings
queryResult, ok := result["result"]
if !ok {
return nil, ErrNoResult
}
queryResultItems, ok := queryResult.([]interface{})
if !ok {
return nil, ErrInvalidResult
}
vulnerabilities := make([]model.Vulnerability, 0, len(queryResultItems))
failedDetectLine := false
timeOut := false
for _, queryResultItem := range queryResultItems {
select {
case <-ctxTimeout.Done():
timeOut = true
break
default:
vulnerability, aux := getVulnerabilitiesFromQuery(ctx, c, queryResultItem)
if aux {
failedDetectLine = aux
}
if vulnerability != nil && !aux {
vulnerabilities = append(vulnerabilities, *vulnerability)
}
}
}
if timeOut {
fmt.Println()
log.Err(ctxTimeout.Err()).Msgf(
"Timeout processing the results of the query: %s %s",
ctx.Query.Metadata.Platform,
ctx.Query.Metadata.Query)
}
if failedDetectLine {
c.tracker.FailedDetectLine()
}
return vulnerabilities, nil
}
func getVulnerabilitiesFromQuery(ctx *QueryContext, c *Inspector, queryResultItem interface{}) (*model.Vulnerability, bool) {
vulnerability, err := c.vb(ctx, c.tracker, queryResultItem, c.detector, c.useOldSeverities, c.kicsComputeNewSimID)
if err != nil && err.Error() == ErrNoResult.Error() {
// Ignoring bad results
return nil, false
}
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: fmt.Sprintf("Inspector can't save vulnerability, query=%s", ctx.Query.Metadata.Query),
Err: err,
Location: "func decodeQueryResults()",
Platform: ctx.Query.Metadata.Platform,
Metadata: ctx.Query.Metadata.Metadata,
Query: ctx.Query.Metadata.Query,
}, true)
if _, ok := c.failedQueries[ctx.Query.Metadata.Query]; !ok {
c.failedQueries[ctx.Query.Metadata.Query] = err
}
return nil, false
}
file := ctx.Files[vulnerability.FileID]
if ShouldSkipVulnerability(file.Commands, vulnerability.QueryID) {
log.Debug().Msgf("Skipping vulnerability in file %s for query '%s':%s", file.FilePath, vulnerability.QueryName, vulnerability.QueryID)
return nil, false
}
if vulnerability.Line == UndetectedVulnerabilityLine {
return nil, true
}
if _, ok := c.excludeResults[vulnerability.SimilarityID]; ok {
log.Debug().
Msgf("Excluding result SimilarityID: %s", vulnerability.SimilarityID)
return nil, false
} else if checkComment(vulnerability.Line, file.LinesIgnore) {
log.Debug().
Msgf("Excluding result Comment: %s", vulnerability.SimilarityID)
return nil, false
}
return vulnerability, false
}
// checkComment checks if the vulnerability should be skipped from comment
func checkComment(line int, ignoreLines []int) bool {
for _, ignoreLine := range ignoreLines {
if line == ignoreLine {
return true
}
}
return false
}
// contains is a simple method to check if a slice
// contains an entry
func contains(s []string, e string) bool {
if e == "common" {
return true
}
if e == "k8s" {
e = "kubernetes"
}
for _, a := range s {
if strings.EqualFold(a, e) {
return true
}
}
return false
}
func isDisabled(queries, queryID string, output bool) bool {
for _, query := range strings.Split(queries, ",") {
if strings.EqualFold(query, queryID) {
return output
}
}
return !output
}
// ShouldSkipVulnerability verifies if the vulnerability in question should be ignored through comment commands
func ShouldSkipVulnerability(command model.CommentsCommands, queryID string) bool {
if queries, ok := command["enable"]; ok {
return isDisabled(queries, queryID, false)
}
if queries, ok := command["disable"]; ok {
return isDisabled(queries, queryID, true)
}
return false
}
func prepareQueries(queries []model.QueryMetadata, commonLibrary source.RegoLibraries,
platformLibraries map[string]source.RegoLibraries, tracker Tracker) QueryLoader {
// track queries loaded
sum := 0
for _, metadata := range queries {
tracker.TrackQueryLoad(metadata.Aggregation)
sum += metadata.Aggregation
}
return QueryLoader{
commonLibrary: commonLibrary,
platformLibraries: platformLibraries,
querySum: sum,
QueriesMetadata: queries,
}
}
// LoadQuery loads the query into memory so it can be freed when not used anymore
func (q QueryLoader) LoadQuery(ctx context.Context, query *model.QueryMetadata) (*rego.PreparedEvalQuery, error) {
opaQuery := rego.PreparedEvalQuery{}
platformGeneralQuery, ok := q.platformLibraries[query.Platform]
if !ok {
return nil, errors.New("failed to get platform library")
}
select {
case <-ctx.Done():
return nil, ctx.Err()
default:
mergedInputData, err := source.MergeInputData(platformGeneralQuery.LibraryInputData, query.InputData)
if err != nil {
log.Debug().Msgf("Could not merge %s library input data", query.Platform)
}
mergedInputData, err = source.MergeInputData(q.commonLibrary.LibraryInputData, mergedInputData)
if err != nil {
log.Debug().Msg("Could not merge common library input data")
}
store := inmem.NewFromReader(bytes.NewBufferString(mergedInputData))
opaQuery, err = rego.New(
rego.Query(regoQuery),
rego.Module("Common", q.commonLibrary.LibraryCode),
rego.Module("Generic", platformGeneralQuery.LibraryCode),
rego.Module(query.Query, query.Content),
rego.Store(store),
rego.UnsafeBuiltins(unsafeRegoFunctions),
).PrepareForEval(ctx)
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: fmt.Sprintf("Inspector failed to prepare query for evaluation, query=%s", query.Query),
Err: err,
Location: "func NewInspector()",
Query: query.Query,
Metadata: query.Metadata,
Platform: query.Platform,
}, true)
return nil, err
}
return &opaQuery, nil
}
}
// Code generated by MockGen. DO NOT EDIT.
// Source: ./pkg/engine/source/source.go
// Package mock is a generated GoMock package.
package mock
import (
reflect "reflect"
source "github.com/Checkmarx/kics/v2/pkg/engine/source"
model "github.com/Checkmarx/kics/v2/pkg/model"
gomock "github.com/golang/mock/gomock"
)
// MockQueriesSource is a mock of QueriesSource interface.
type MockQueriesSource struct {
ctrl *gomock.Controller
recorder *MockQueriesSourceMockRecorder
}
// MockQueriesSourceMockRecorder is the mock recorder for MockQueriesSource.
type MockQueriesSourceMockRecorder struct {
mock *MockQueriesSource
}
// NewMockQueriesSource creates a new mock instance.
func NewMockQueriesSource(ctrl *gomock.Controller) *MockQueriesSource {
mock := &MockQueriesSource{ctrl: ctrl}
mock.recorder = &MockQueriesSourceMockRecorder{mock}
return mock
}
// EXPECT returns an object that allows the caller to indicate expected use.
func (m *MockQueriesSource) EXPECT() *MockQueriesSourceMockRecorder {
return m.recorder
}
// GetQueries mocks base method.
func (m *MockQueriesSource) GetQueries(querySelection *source.QueryInspectorParameters) ([]model.QueryMetadata, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetQueries", querySelection)
ret0, _ := ret[0].([]model.QueryMetadata)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetQueries indicates an expected call of GetQueries.
func (mr *MockQueriesSourceMockRecorder) GetQueries(querySelection interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetQueries", reflect.TypeOf((*MockQueriesSource)(nil).GetQueries), querySelection)
}
// GetQueryLibrary mocks base method.
func (m *MockQueriesSource) GetQueryLibrary(platform string) (source.RegoLibraries, error) {
m.ctrl.T.Helper()
ret := m.ctrl.Call(m, "GetQueryLibrary", platform)
ret0, _ := ret[0].(source.RegoLibraries)
ret1, _ := ret[1].(error)
return ret0, ret1
}
// GetQueryLibrary indicates an expected call of GetQueryLibrary.
func (mr *MockQueriesSourceMockRecorder) GetQueryLibrary(platform interface{}) *gomock.Call {
mr.mock.ctrl.T.Helper()
return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "GetQueryLibrary", reflect.TypeOf((*MockQueriesSource)(nil).GetQueryLibrary), platform)
}
package provider
import (
"context"
"errors"
"io/fs"
"os"
"os/signal"
"path/filepath"
"sync"
"github.com/alexmullins/zip"
"github.com/Checkmarx/kics/v2/pkg/kuberneter"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/rs/zerolog/log"
"github.com/hashicorp/go-getter"
)
const (
channelLength = 2
)
// ExtractedPath is a struct that contains the paths, temporary paths to remove
// and extraction map path of the sources
// Path is the slice of paths to scan
// ExtractionMap is a map that correlates the temporary path to the given path
// RemoveTmp is the slice containing temporary paths to be removed
type ExtractedPath struct {
Path []string
ExtractionMap map[string]model.ExtractedPathObject
}
type getterStruct struct {
ctx context.Context
cancel context.CancelFunc
mode getter.ClientMode
pwd string
opts []getter.ClientOption
destination string
source string
}
// GetKuberneterSources uses Kubernetes API to download runtime resources
// After Downloaded files kics scan the files as normal local files
func GetKuberneterSources(ctx context.Context, source []string, destinationPath string) (ExtractedPath, error) {
extrStruct := ExtractedPath{
Path: []string{},
ExtractionMap: make(map[string]model.ExtractedPathObject),
}
for _, path := range source {
exportedPath, err := kuberneter.Import(ctx, path, destinationPath)
if err != nil {
log.Error().Msgf("failed to import %s: %s", path, err)
}
extrStruct.ExtractionMap[exportedPath] = model.ExtractedPathObject{
Path: exportedPath,
LocalPath: true,
}
extrStruct.Path = append(extrStruct.Path, exportedPath)
}
return extrStruct, nil
}
// GetSources goes through the source slice, and determines the of source type (ex: zip, git, local).
// It than extracts the files to be scanned. If the source given is not local, a temp dir
// will be created where the files will be stored.
func GetSources(source []string) (ExtractedPath, error) {
extrStruct := ExtractedPath{
Path: []string{},
ExtractionMap: make(map[string]model.ExtractedPathObject),
}
for _, path := range source {
destination := filepath.Join(os.TempDir(), "kics-extract-"+utils.NextRandom())
mode := getter.ClientModeAny
pwd, err := os.Getwd()
if err != nil {
log.Fatal().Msgf("Error getting wd: %s", err)
}
opts := []getter.ClientOption{}
opts = append(opts, getter.WithInsecure())
ctx, cancel := context.WithCancel(context.Background())
goGetter := getterStruct{
ctx: ctx,
cancel: cancel,
mode: mode,
pwd: pwd,
opts: opts,
destination: destination,
source: path,
}
getterDst, err := getPaths(&goGetter)
if err != nil {
if ignoreDamagedFiles(path) {
continue
}
log.Error().Msgf("%s", err)
return ExtractedPath{}, err
}
tempDst, local := checkSymLink(getterDst, path)
extrStruct.ExtractionMap[getterDst] = model.ExtractedPathObject{
Path: path,
LocalPath: local,
}
extrStruct.Path = append(extrStruct.Path, tempDst)
}
return extrStruct, nil
}
func getPaths(g *getterStruct) (string, error) {
if isEncrypted(g.source) {
err := errors.New("zip encrypted files are not supported")
log.Err(err)
return "", err
}
// Build the client
client := &getter.Client{
Ctx: g.ctx,
Src: g.source,
Dst: g.destination,
Pwd: g.pwd,
Mode: g.mode,
Options: g.opts,
}
wg := sync.WaitGroup{}
wg.Add(1)
errChan := make(chan error, channelLength)
go func() {
defer wg.Done()
defer g.cancel()
if err := client.Get(); err != nil {
errChan <- err
}
}()
c := make(chan os.Signal, channelLength)
signal.Notify(c, os.Interrupt)
select {
case <-c:
signal.Reset(os.Interrupt)
g.cancel()
wg.Wait()
case <-g.ctx.Done():
wg.Wait()
case err := <-errChan:
wg.Wait()
return "", err
}
return g.destination, nil
}
// check if the dst is a symbolic link
func checkSymLink(getterDst, pathFile string) (string, bool) {
var local bool
_, err := os.Stat(pathFile)
if err == nil { // check if file exist locally
local = true
}
info, err := os.Lstat(getterDst)
if err != nil {
log.Error().Msgf("failed lstat for %s: %v", getterDst, err)
}
fileInfo := getFileInfo(info, getterDst, pathFile)
if info.Mode()&os.ModeSymlink != 0 { // if it's a symbolic Link
path, err := os.Readlink(getterDst) // get location of symbolic Link
if err != nil {
log.Error().Msgf("failed Readlink for %s: %v", getterDst, err)
}
getterDst = path // change path to local path
} else if !fileInfo.IsDir() { // symbolic links are not created for single files
if local { // check if file exist locally
getterDst = pathFile
}
}
return getterDst, local
}
func getFileInfo(info fs.FileInfo, dst, pathFile string) fs.FileInfo {
var extension = filepath.Ext(pathFile)
var path string
if extension == "" {
path = filepath.Join(dst, filepath.Base(pathFile[0:len(pathFile)-len(extension)])) // for single file
} else {
path = filepath.Join(dst, filepath.Base(pathFile)) // for directories
}
fileInfo, err := os.Lstat(path)
if err != nil {
fileInfo = info
}
return fileInfo
}
func isEncrypted(sourceFile string) bool {
if filepath.Ext(sourceFile) != ".zip" {
return false
}
zipFile, err := zip.OpenReader(sourceFile)
if err != nil {
log.Error().Msgf("failed to open %s: %v", sourceFile, err)
return false
}
defer zipFile.Close()
for _, file := range zipFile.File {
if file.IsEncrypted() {
log.Error().Msgf("file %s is encrypted", sourceFile)
return true
}
}
return false
}
package provider
import (
"context"
"fmt"
ioFs "io/fs"
"os"
"path/filepath"
"regexp"
"strings"
"sync"
"syscall"
sentryReport "github.com/Checkmarx/kics/v2/internal/sentry"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
"github.com/yargevad/filepathx"
)
// FileSystemSourceProvider provides a path to be scanned
// and a list of files which will not be scanned
type FileSystemSourceProvider struct {
paths []string
excludes map[string][]os.FileInfo
mu sync.RWMutex
}
var (
queryRegexExcludeTerraCache = regexp.MustCompile(fmt.Sprintf(`^(.*?%s)?\.terra.*`, regexp.QuoteMeta(string(os.PathSeparator))))
// ErrNotSupportedFile - error representing when a file format is not supported by KICS
ErrNotSupportedFile = errors.New("invalid file format")
)
// NewFileSystemSourceProvider initializes a FileSystemSourceProvider with path and files that will be ignored
func NewFileSystemSourceProvider(paths, excludes []string) (*FileSystemSourceProvider, error) {
log.Debug().Msgf("provider.NewFileSystemSourceProvider()")
ex := make(map[string][]os.FileInfo, len(excludes))
osPaths := make([]string, len(paths))
for idx, path := range paths {
osPaths[idx] = filepath.FromSlash(path)
}
fs := &FileSystemSourceProvider{
paths: osPaths,
excludes: ex,
}
for _, exclude := range excludes {
excludePaths, err := GetExcludePaths(exclude)
if err != nil {
return nil, err
}
if err := fs.AddExcluded(excludePaths); err != nil {
return nil, err
}
}
return fs, nil
}
// AddExcluded add new excluded files to the File System Source Provider
func (s *FileSystemSourceProvider) AddExcluded(excludePaths []string) error {
for _, excludePath := range excludePaths {
info, err := os.Stat(excludePath)
if err != nil {
if os.IsNotExist(err) {
continue
}
if sysErr, ok := err.(*ioFs.PathError); ok {
log.Warn().Msgf("Failed getting file info for file '%s', Skipping due to: %s, Error number: %d",
excludePath, sysErr, sysErr.Err.(syscall.Errno))
continue
}
return errors.Wrap(err, "failed to open excluded file")
}
s.mu.Lock()
if _, ok := s.excludes[info.Name()]; !ok {
s.excludes[info.Name()] = make([]os.FileInfo, 0)
}
s.excludes[info.Name()] = append(s.excludes[info.Name()], info)
s.mu.Unlock()
}
return nil
}
// GetExcludePaths gets all the files that should be excluded
func GetExcludePaths(pathExpressions string) ([]string, error) {
if strings.ContainsAny(pathExpressions, "*?[") {
info, err := filepathx.Glob(pathExpressions)
if err != nil {
log.Error().Msgf("failed to get exclude path %s: %s", pathExpressions, err)
return []string{pathExpressions}, nil
}
return info, nil
}
return []string{pathExpressions}, nil
}
// GetBasePaths returns base path of FileSystemSourceProvider
func (s *FileSystemSourceProvider) GetBasePaths() []string {
return s.paths
}
// ignoreDamagedFiles checks whether we should ignore a damaged file from a scan or not.
func ignoreDamagedFiles(path string) bool {
shouldIgnoreFile := false
fileInfo, err := os.Lstat(path)
if err != nil {
log.Warn().Msgf("Failed getting the file info for file '%s'", path)
return shouldIgnoreFile
}
log.Info().Msgf("No mode type bits are set( is a regular file ) for file '%s' : %t ", path, fileInfo.Mode().IsRegular())
if fileInfo.Mode()&os.ModeSymlink == os.ModeSymlink {
log.Warn().Msgf("File '%s' is a symbolic link - but seems not to be accessible", path)
shouldIgnoreFile = true
}
return shouldIgnoreFile
}
// GetSources tries to open file or directory and execute sink function on it
func (s *FileSystemSourceProvider) GetSources(ctx context.Context,
extensions model.Extensions, sink Sink, resolverSink ResolverSink) error {
for _, scanPath := range s.paths {
resolved := false
fileInfo, err := os.Stat(scanPath)
if err != nil {
return errors.Wrap(err, "failed to open path")
}
if !fileInfo.IsDir() {
c, openFileErr := openScanFile(scanPath, extensions)
if openFileErr != nil {
if openFileErr == ErrNotSupportedFile || ignoreDamagedFiles(scanPath) {
continue
}
return openFileErr
}
if sinkErr := sink(ctx, scanPath, c); sinkErr != nil {
return sinkErr
}
continue
}
err = s.walkDir(ctx, scanPath, resolved, sink, resolverSink, extensions)
if err != nil {
return errors.Wrap(err, "failed to walk directory")
}
continue
}
return nil
}
func (s *FileSystemSourceProvider) walkDir(ctx context.Context, scanPath string, resolved bool,
sink Sink, resolverSink ResolverSink, extensions model.Extensions) error {
return filepath.Walk(scanPath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if shouldSkip, skipFolder := s.checkConditions(info, extensions, path, resolved); shouldSkip {
return skipFolder
}
// ------------------ Helm resolver --------------------------------
if info.IsDir() {
excluded, errRes := resolverSink(ctx, strings.ReplaceAll(path, "\\", "/"))
if errRes != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: fmt.Sprintf("Filesystem files provider couldn't Resolve Directory, file=%s", info.Name()),
Err: errRes,
Location: "func walkDir()",
FileName: info.Name(),
}, true)
return nil
}
if errAdd := s.AddExcluded(excluded); errAdd != nil {
log.Err(errAdd).Msgf("Filesystem files provider couldn't exclude rendered Chart files, Chart=%s", info.Name())
}
resolved = true
return nil
}
// -----------------------------------------------------------------
c, err := os.Open(filepath.Clean(path))
if err != nil {
if ignoreDamagedFiles(filepath.Clean(path)) {
return nil
}
return errors.Wrap(err, "failed to open file")
}
defer closeFile(c, info)
err = sink(ctx, strings.ReplaceAll(path, "\\", "/"), c)
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: fmt.Sprintf("Filesystem files provider couldn't parse file, file=%s", info.Name()),
Err: err,
Location: "func walkDir()",
FileName: info.Name(),
}, true)
}
return nil
})
}
func openScanFile(scanPath string, extensions model.Extensions) (*os.File, error) {
ext, _ := utils.GetExtension(scanPath)
if !extensions.Include(ext) {
return nil, ErrNotSupportedFile
}
c, errOpenFile := os.Open(scanPath)
if errOpenFile != nil {
return nil, errors.Wrap(errOpenFile, "failed to open path")
}
return c, nil
}
func closeFile(file *os.File, info os.FileInfo) {
if err := file.Close(); err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: fmt.Sprintf("Filesystem couldn't close file, file=%s", info.Name()),
Err: err,
Location: "func closeFile()",
FileName: info.Name(),
}, true)
}
}
func (s *FileSystemSourceProvider) checkConditions(info os.FileInfo, extensions model.Extensions,
path string, resolved bool) (bool, error) {
s.mu.RLock()
defer s.mu.RUnlock()
if info.IsDir() {
// exclude terraform cache folders
if queryRegexExcludeTerraCache.MatchString(path) {
log.Info().Msgf("Directory ignored: %s", path)
err := s.AddExcluded([]string{info.Name()})
if err != nil {
return true, err
}
return true, filepath.SkipDir
}
if f, ok := s.excludes[info.Name()]; ok && containsFile(f, info) {
log.Info().Msgf("Directory ignored: %s", path)
return true, filepath.SkipDir
}
_, err := os.Stat(filepath.Join(path, "Chart.yaml"))
if err != nil || resolved {
return true, nil
}
return false, nil
}
if f, ok := s.excludes[info.Name()]; ok && containsFile(f, info) {
log.Trace().Msgf("File ignored: %s", path)
return true, nil
}
ext, _ := utils.GetExtension(path)
if !extensions.Include(ext) {
log.Trace().Msgf("File ignored: %s", path)
return true, nil
}
return false, nil
}
func containsFile(fileList []os.FileInfo, target os.FileInfo) bool {
for _, file := range fileList {
if os.SameFile(file, target) {
return true
}
}
return false
}
package secrets
import (
"context"
_ "embed" // Embed KICS regex rules
"encoding/json"
"fmt"
"math"
"regexp"
"strings"
"sync"
"time"
"github.com/Checkmarx/kics/v2/assets"
"github.com/Checkmarx/kics/v2/pkg/detector"
"github.com/Checkmarx/kics/v2/pkg/detector/docker"
"github.com/Checkmarx/kics/v2/pkg/detector/helm"
engine "github.com/Checkmarx/kics/v2/pkg/engine"
"github.com/Checkmarx/kics/v2/pkg/engine/similarity"
"github.com/Checkmarx/kics/v2/pkg/engine/source"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/rs/zerolog/log"
)
const (
Base64Chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/="
HexChars = "1234567890abcdefABCDEF"
)
var (
SecretsQueryMetadata map[string]string
)
// SecretTracker is Struct created to keep track of the secrets found in the inspector
// it used for masking all the secrets in the vulnerability preview in the different report formats
type SecretTracker struct {
ResolvedFilePath string
Line int
OriginalContent string
MaskedContent string
}
type Inspector struct {
ctx context.Context
tracker engine.Tracker
detector *detector.DetectLine
excludeResults map[string]bool
regexQueries []RegexQuery
allowRules []AllowRule
vulnerabilities []model.Vulnerability
queryExecutionTimeout time.Duration
foundLines []int
mu sync.RWMutex
SecretTracker []SecretTracker
}
type Entropy struct {
Group int `json:"group"`
Min float64 `json:"min"`
Max float64 `json:"max"`
}
type MultilineResult struct {
DetectLineGroup int `json:"detectLineGroup"`
}
type AllowRule struct {
Description string `json:"description"`
RegexStr string `json:"regex"`
Regex *regexp.Regexp
}
type RegexQuery struct {
ID string `json:"id"`
Name string `json:"name"`
Multiline MultilineResult `json:"multiline"`
RegexStr string `json:"regex"`
SpecialMask string `json:"specialMask"`
Entropies []Entropy `json:"entropies"`
AllowRules []AllowRule `json:"allowRules"`
Regex *regexp.Regexp
}
type RegexRuleStruct struct {
Rules []RegexQuery `json:"rules"`
AllowRules []AllowRule `json:"allowRules"`
}
type RuleMatch struct {
File string
RuleName string
Matches []string
Line int
Entropy float64
}
type lineVulneInfo struct {
lineContent string
lineNumber int
groups []string
}
func NewInspector(
ctx context.Context,
excludeResults map[string]bool,
tracker engine.Tracker,
queryFilter *source.QueryInspectorParameters,
disableSecretsQuery bool,
executionTimeout int,
regexRulesContent string,
isCustomSecretsRegexes bool,
) (*Inspector, error) {
passwordsAndSecretsQueryID, err := getPasswordsAndSecretsQueryID()
if err != nil {
return nil, err
}
excludeSecretsQuery := isValueInArray(passwordsAndSecretsQueryID, queryFilter.ExcludeQueries.ByIDs)
if disableSecretsQuery || excludeSecretsQuery && !isCustomSecretsRegexes {
return &Inspector{
ctx: ctx,
tracker: tracker,
excludeResults: excludeResults,
regexQueries: make([]RegexQuery, 0),
allowRules: make([]AllowRule, 0),
vulnerabilities: make([]model.Vulnerability, 0),
queryExecutionTimeout: time.Duration(executionTimeout) * time.Second,
SecretTracker: make([]SecretTracker, 0),
}, nil
}
lineDetector := detector.NewDetectLine(tracker.GetOutputLines()).
Add(helm.DetectKindLine{}, model.KindHELM).
Add(docker.DetectKindLine{}, model.KindDOCKER)
err = json.Unmarshal([]byte(assets.SecretsQueryMetadataJSON), &SecretsQueryMetadata)
if err != nil {
return nil, err
}
queryExecutionTimeout := time.Duration(executionTimeout) * time.Second
var allRegexQueries RegexRuleStruct
err = json.Unmarshal([]byte(regexRulesContent), &allRegexQueries)
if err != nil {
return nil, err
}
if isCustomSecretsRegexes {
err = validateCustomSecretsQueriesID(allRegexQueries.Rules)
if err != nil {
return nil, err
}
}
regexQueries, err := compileRegexQueries(queryFilter, allRegexQueries.Rules, isCustomSecretsRegexes, passwordsAndSecretsQueryID)
if err != nil {
return nil, err
}
allowRules, err := CompileRegex(allRegexQueries.AllowRules)
if err != nil {
return nil, err
}
return &Inspector{
ctx: ctx,
detector: lineDetector,
excludeResults: excludeResults,
tracker: tracker,
regexQueries: regexQueries,
allowRules: allowRules,
vulnerabilities: make([]model.Vulnerability, 0),
queryExecutionTimeout: queryExecutionTimeout,
foundLines: make([]int, 0),
}, nil
}
func (c *Inspector) inspectQuery(ctx context.Context, basePaths []string,
files model.FileMetadatas, i int) ([]model.Vulnerability, error) {
timeoutCtx, cancel := context.WithTimeout(ctx, c.queryExecutionTimeout)
defer cancel()
cleanFiles := cleanFiles(files)
for idx := range cleanFiles {
if _, ok := cleanFiles[idx].Commands["ignore"]; !ok {
select {
case <-timeoutCtx.Done():
return c.vulnerabilities, timeoutCtx.Err()
default:
c.checkContent(i, idx, basePaths, cleanFiles)
}
}
}
return c.vulnerabilities, nil
}
// Inspect inspects the source code for passwords & secrets and returns the list of vulnerabilities
func (c *Inspector) Inspect(ctx context.Context, basePaths []string,
files model.FileMetadatas, currentQuery chan<- int64) ([]model.Vulnerability, error) {
for i := range c.regexQueries {
currentQuery <- 1
vulns, err := c.inspectQuery(ctx, basePaths, files, i)
if err != nil {
return vulns, err
}
}
return c.vulnerabilities, nil
}
func compileRegexQueries(
queryFilter *source.QueryInspectorParameters,
allRegexQueries []RegexQuery,
isCustom bool,
passwordsAndSecretsQueryID string,
) ([]RegexQuery, error) {
var regexQueries []RegexQuery
var includeSpecificSecretQuery bool
allSecretsQueryAndCustom := false
includeAllSecretsQuery := isValueInArray(passwordsAndSecretsQueryID, queryFilter.IncludeQueries.ByIDs)
if includeAllSecretsQuery && isCustom { // merge case
var kicsRegexQueries RegexRuleStruct
err := json.Unmarshal([]byte(assets.SecretsQueryRegexRulesJSON), &kicsRegexQueries)
if err != nil {
return nil, err
}
allSecretsQueryAndCustom = true
regexQueries = kicsRegexQueries.Rules
}
for i := range allRegexQueries {
includeSpecificSecretQuery = isValueInArray(allRegexQueries[i].ID, queryFilter.IncludeQueries.ByIDs)
if len(queryFilter.IncludeQueries.ByIDs) > 0 && !allSecretsQueryAndCustom {
if includeAllSecretsQuery || includeSpecificSecretQuery {
regexQueries = append(regexQueries, allRegexQueries[i])
}
} else {
if !shouldExecuteQuery(
allRegexQueries[i].ID,
allRegexQueries[i].ID,
SecretsQueryMetadata["category"],
SecretsQueryMetadata["severity"],
queryFilter.ExcludeQueries.ByIDs,
) {
continue
}
if !shouldExecuteQuery(
SecretsQueryMetadata["category"],
allRegexQueries[i].ID,
SecretsQueryMetadata["category"],
SecretsQueryMetadata["severity"],
queryFilter.ExcludeQueries.ByCategories,
) {
continue
}
if !shouldExecuteQuery(
SecretsQueryMetadata["severity"],
allRegexQueries[i].ID,
SecretsQueryMetadata["category"],
SecretsQueryMetadata["severity"],
queryFilter.ExcludeQueries.BySeverities,
) {
continue
}
regexQueries = append(regexQueries, allRegexQueries[i])
}
}
for i := range regexQueries {
compiledRegexp, err := regexp.Compile(regexQueries[i].RegexStr)
if err != nil {
return regexQueries, err
}
regexQueries[i].Regex = compiledRegexp
for j := range regexQueries[i].AllowRules {
regexQueries[i].AllowRules[j].Regex = regexp.MustCompile(regexQueries[i].AllowRules[j].RegexStr)
}
}
return regexQueries, nil
}
// CompileRegex compiles the regex allow rules
func CompileRegex(allowRules []AllowRule) ([]AllowRule, error) {
for j := range allowRules {
compiledRegex, err := regexp.Compile(allowRules[j].RegexStr)
if err != nil {
return nil, err
}
allowRules[j].Regex = compiledRegex
}
return allowRules, nil
}
func (c *Inspector) GetQueriesLength() int {
return len(c.regexQueries)
}
func isValueInArray(value string, array []string) bool {
for i := range array {
if strings.EqualFold(value, array[i]) {
return true
}
}
return false
}
func (c *Inspector) isSecret(s string, query *RegexQuery) (isSecretRet bool, groups [][]string) {
if IsAllowRule(s, query, append(query.AllowRules, c.allowRules...)) {
return false, [][]string{}
}
groups = query.Regex.FindAllStringSubmatch(s, -1)
for _, group := range groups {
splitedText := strings.Split(s, "\n")
maxSplit := -1
for i, splited := range splitedText {
if len(groups) < query.Multiline.DetectLineGroup {
if strings.Contains(splited, group[query.Multiline.DetectLineGroup]) && i > maxSplit {
maxSplit = i
}
}
}
if maxSplit == -1 {
continue
}
secret, newGroups := c.isSecret(strings.Join(append(splitedText[:maxSplit], splitedText[maxSplit+1:]...), "\n"), query)
if !secret {
continue
}
groups = append(groups, newGroups...)
}
if len(groups) > 0 {
return true, groups
}
return false, [][]string{}
}
// IsAllowRule check if string matches any of the allow rules for the secret queries
func IsAllowRule(s string, query *RegexQuery, allowRules []AllowRule) bool {
regexMatch := query.Regex.FindStringIndex(s)
if regexMatch != nil {
allowRuleMatches := AllowRuleMatches(s, append(query.AllowRules, allowRules...))
for _, allowMatch := range allowRuleMatches {
allowStart, allowEnd := allowMatch[0], allowMatch[1]
regexStart, regexEnd := regexMatch[0], regexMatch[1]
if (allowStart <= regexEnd && allowStart >= regexStart) || (regexStart <= allowEnd && regexStart >= allowStart) {
return true
}
}
}
return false
}
// AllowRuleMatches return all the allow rules matches for the secret queries
func AllowRuleMatches(s string, allowRules []AllowRule) [][]int {
allowRuleMatches := [][]int{}
for i := range allowRules {
allowRuleMatches = append(allowRuleMatches, allowRules[i].Regex.FindAllStringIndex(s, -1)...)
}
return allowRuleMatches
}
func (c *Inspector) checkFileContent(query *RegexQuery, basePaths []string, file *model.FileMetadata) {
isSecret, groups := c.isSecret(file.OriginalData, query)
if !isSecret {
return
}
lineVulns := c.secretsDetectLine(query, file, groups)
for _, lineVuln := range lineVulns {
if len(query.Entropies) == 0 {
c.addVulnerability(
basePaths,
file,
query,
lineVuln.lineNumber,
lineVuln.lineContent,
)
}
if len(lineVuln.groups) > 0 {
for _, entropy := range query.Entropies {
// if matched group does not exist continue
if len(lineVuln.groups) <= entropy.Group {
return
}
isMatch, entropyFloat := CheckEntropyInterval(
entropy,
lineVuln.groups[entropy.Group],
)
log.Debug().Msgf("match: %v :: %v", isMatch, fmt.Sprint(entropyFloat))
if isMatch {
c.addVulnerability(
basePaths,
file,
query,
lineVuln.lineNumber,
lineVuln.lineContent,
)
}
}
}
}
}
func (c *Inspector) secretsDetectLine(query *RegexQuery, file *model.FileMetadata, vulnGroups [][]string) []lineVulneInfo {
content := file.OriginalData
lines := *file.LinesOriginalData
lineVulneInfoSlice := make([]lineVulneInfo, 0)
realLineUpdater := 0
for _, groups := range vulnGroups {
lineVulneInfoObject := lineVulneInfo{
lineNumber: -1,
lineContent: "-",
groups: groups,
}
if len(groups) <= query.Multiline.DetectLineGroup {
log.Warn().Msgf("Unable to detect line in file %v Multiline group not found: %v", file.FilePath, query.Multiline.DetectLineGroup)
lineVulneInfoSlice = append(lineVulneInfoSlice, lineVulneInfoObject)
continue
}
contentMatchRemoved := strings.Replace(content, groups[query.Multiline.DetectLineGroup], "", 1)
text := strings.ReplaceAll(contentMatchRemoved, "\r", "")
contentMatchRemovedLines := strings.Split(text, "\n")
for i := 0; i < len(lines); i++ {
if lines[i] != contentMatchRemovedLines[i] {
lineVulneInfoObject.lineNumber = i + realLineUpdater
lineVulneInfoObject.lineContent = lines[i]
break
}
}
realLineUpdater += len(lines) - len(contentMatchRemovedLines)
content = contentMatchRemoved
lines = contentMatchRemovedLines
lineVulneInfoSlice = append(lineVulneInfoSlice, lineVulneInfoObject)
}
return lineVulneInfoSlice
}
func (c *Inspector) checkLineByLine(wg *sync.WaitGroup, query *RegexQuery,
basePaths []string, file *model.FileMetadata, lineNumber int, currentLine string) {
defer wg.Done()
isSecret, groups := c.isSecret(currentLine, query)
if !isSecret {
return
}
if len(query.Entropies) == 0 {
c.addVulnerability(
basePaths,
file,
query,
lineNumber,
currentLine,
)
}
for i := range query.Entropies {
entropy := query.Entropies[i]
// if matched group does not exist continue
if len(groups[0]) <= entropy.Group {
return
}
isMatch, entropyFloat := CheckEntropyInterval(
entropy,
groups[0][entropy.Group],
)
log.Debug().Msgf("match: %v :: %v", isMatch, fmt.Sprint(entropyFloat))
if isMatch {
c.addVulnerability(
basePaths,
file,
query,
lineNumber,
currentLine,
)
}
}
}
func (c *Inspector) addVulnerability(basePaths []string, file *model.FileMetadata, query *RegexQuery, lineNumber int, issueLine string) {
if engine.ShouldSkipVulnerability(file.Commands, query.ID) {
log.Debug().Msgf("Skipping vulnerability in file %s for query '%s':%s", file.FilePath, query.Name, query.ID)
return
}
simID, err := similarity.ComputeSimilarityID(
basePaths,
file.FilePath,
query.ID,
fmt.Sprintf("%d", lineNumber),
"",
)
if err != nil {
log.Error().Msg("unable to compute similarity ID")
}
c.mu.Lock()
if _, ok := c.excludeResults[engine.PtrStringToString(simID)]; !ok {
linesVuln := c.detector.GetAdjacent(file, lineNumber+1)
if !ignoreLine(linesVuln.Line, file.LinesIgnore) {
vuln := model.Vulnerability{
QueryID: query.ID,
QueryName: SecretsQueryMetadata["queryName"] + " - " + query.Name,
SimilarityID: engine.PtrStringToString(simID),
FileID: file.ID,
FileName: file.FilePath,
Line: linesVuln.Line,
VulnLines: hideSecret(&linesVuln, issueLine, query, &c.SecretTracker),
IssueType: "RedundantAttribute",
Platform: SecretsQueryMetadata["platform"],
CWE: SecretsQueryMetadata["cwe"],
Severity: model.SeverityHigh,
QueryURI: SecretsQueryMetadata["descriptionUrl"],
Category: SecretsQueryMetadata["category"],
Description: SecretsQueryMetadata["descriptionText"],
DescriptionID: SecretsQueryMetadata["descriptionID"],
KeyExpectedValue: "Hardcoded secret key should not appear in source",
KeyActualValue: "Hardcoded secret key appears in source",
CloudProvider: SecretsQueryMetadata["cloudProvider"],
}
c.vulnerabilities = append(c.vulnerabilities, vuln)
}
}
c.mu.Unlock()
}
// CheckEntropyInterval - verifies if a given token's entropy is within expected bounds
func CheckEntropyInterval(entropy Entropy, token string) (isEntropyInInterval bool, entropyLevel float64) {
base64Entropy := calculateEntropy(token, Base64Chars)
hexEntropy := calculateEntropy(token, HexChars)
highestEntropy := math.Max(base64Entropy, hexEntropy)
if insideInterval(entropy, base64Entropy) || insideInterval(entropy, hexEntropy) {
return true, highestEntropy
}
return false, highestEntropy
}
func insideInterval(entropy Entropy, floatEntropy float64) bool {
return floatEntropy >= entropy.Min && floatEntropy <= entropy.Max
}
// calculateEntropy - calculates the entropy of a string based on the Shannon formula
func calculateEntropy(token, charSet string) float64 {
if token == "" {
return 0
}
charMap := map[rune]float64{}
for _, char := range token {
if strings.Contains(charSet, string(char)) {
charMap[char]++
}
}
var freq float64
length := float64(len(token))
for _, count := range charMap {
freq += count * math.Log2(count)
}
return math.Log2(length) - freq/length
}
func shouldExecuteQuery(filterTarget, id, category, severity string, filter []string) bool {
if isValueInArray(filterTarget, filter) {
log.Debug().
Msgf("Excluding query ID: %s category: %s severity: %s",
id,
category,
severity)
return false
}
return true
}
func getPasswordsAndSecretsQueryID() (string, error) {
var metadata = make(map[string]string)
err := json.Unmarshal([]byte(assets.SecretsQueryMetadataJSON), &metadata)
if err != nil {
return "", err
}
return metadata["id"], nil
}
func validateCustomSecretsQueriesID(allRegexQueries []RegexQuery) error {
for i := range allRegexQueries {
re := regexp.MustCompile(`^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$`)
if !(re.MatchString(allRegexQueries[i].ID)) {
return fmt.Errorf("the query %s defines an invalid query ID (%s)", allRegexQueries[i].Name, allRegexQueries[i].ID)
}
}
return nil
}
func (c *Inspector) checkContent(i, idx int, basePaths []string, files model.FileMetadatas) {
// lines ignore can have the lines from the resolved files
// since inspector secrets only looks to original data, the lines ignore should be replaced
files[idx].LinesIgnore = model.GetIgnoreLines(&files[idx])
wg := &sync.WaitGroup{}
// check file content line by line
if c.regexQueries[i].Multiline == (MultilineResult{}) {
lines := (&files[idx]).LinesOriginalData
for lineNumber, currentLine := range *lines {
wg.Add(1)
go c.checkLineByLine(wg, &c.regexQueries[i], basePaths, &files[idx], lineNumber, currentLine)
}
wg.Wait()
return
}
// check file content as a whole
c.checkFileContent(&c.regexQueries[i], basePaths, &files[idx])
}
func ignoreLine(lineNumber int, linesIgnore []int) bool {
for _, ignoreLine := range linesIgnore {
if lineNumber == ignoreLine {
return true
}
}
return false
}
// cleanFiles keeps one file per filePath
func cleanFiles(files model.FileMetadatas) model.FileMetadatas {
keys := make(map[string]bool)
cleanFiles := model.FileMetadatas{}
for i := range files {
filePath := files[i].FilePath
if _, value := keys[filePath]; !value {
keys[filePath] = true
cleanFiles = append(cleanFiles, files[i])
}
}
return cleanFiles
}
func hideSecret(linesVuln *model.VulnerabilityLines,
issueLine string,
query *RegexQuery,
secretTracker *[]SecretTracker) *[]model.CodeLine {
for idx := range *linesVuln.VulnLines {
if query.SpecialMask == "all" && idx != 0 {
addToSecretTracker(secretTracker, linesVuln.ResolvedFile, linesVuln.Line, (*linesVuln.VulnLines)[idx].Line, "<SECRET-MASKED-ON-PURPOSE>")
(*linesVuln.VulnLines)[idx].Line = "<SECRET-MASKED-ON-PURPOSE>"
continue
}
if (*linesVuln.VulnLines)[idx].Line == issueLine {
regex := query.RegexStr
if query.SpecialMask != "" {
regex = "(.*)" + query.SpecialMask // get key
}
var re = regexp.MustCompile(regex)
match := re.FindString(issueLine)
if query.SpecialMask != "" {
match = issueLine[len(match):] // get value
}
if match != "" {
originalCntAux := (*linesVuln.VulnLines)[idx].Line
(*linesVuln.VulnLines)[idx].Line = strings.Replace(issueLine, match, "<SECRET-MASKED-ON-PURPOSE>", 1)
addToSecretTracker(secretTracker, linesVuln.ResolvedFile, linesVuln.Line, originalCntAux, (*linesVuln.VulnLines)[idx].Line)
} else {
addToSecretTracker(secretTracker,
linesVuln.ResolvedFile,
linesVuln.Line,
(*linesVuln.VulnLines)[idx].Line,
"<SECRET-MASKED-ON-PURPOSE>")
(*linesVuln.VulnLines)[idx].Line = "<SECRET-MASKED-ON-PURPOSE>"
}
}
}
return linesVuln.VulnLines
}
func addToSecretTracker(secretTracker *[]SecretTracker, path string, line int, originalCnt, maskedCnt string) {
*secretTracker = append(*secretTracker, SecretTracker{
ResolvedFilePath: path,
Line: line,
OriginalContent: originalCnt,
MaskedContent: maskedCnt,
})
}
package similarity
import (
"crypto/sha256"
"encoding/hex"
"path/filepath"
"strings"
"github.com/rs/zerolog/log"
)
// ComputeSimilarityID This function receives four string parameters and computes a sha256 hash
func ComputeSimilarityID(basePaths []string, filePath, queryID, searchKey, searchValue string) (*string, error) {
basePath := ""
for _, path := range basePaths {
if strings.Contains(filepath.ToSlash(filePath), filepath.ToSlash(path)) {
basePath = filepath.ToSlash(path)
break
}
}
standardizedPath, err := standardizeToRelativePath(basePath, filePath)
if err != nil {
log.Debug().Msgf("Error while standardizing path: %s", err)
}
var stringNode = standardizedPath + queryID + searchKey + searchValue
hashSum := sha256.Sum256([]byte(stringNode))
similarity := hex.EncodeToString(hashSum[:])
return &similarity, nil
}
func standardizeToRelativePath(basePath, path string) (string, error) {
cleanPath := filepath.Clean(path)
standardPath := filepath.ToSlash(cleanPath)
basePath = filepath.ToSlash(basePath)
relativeStandardPath, err := filepath.Rel(basePath, standardPath)
if err != nil {
return "", err
}
return filepath.ToSlash(relativeStandardPath), nil
}
package source
import (
"encoding/json"
"fmt"
"os"
"path"
"path/filepath"
"sort"
"strings"
"github.com/Checkmarx/kics/v2/assets"
"github.com/Checkmarx/kics/v2/internal/constants"
sentryReport "github.com/Checkmarx/kics/v2/internal/sentry"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
// FilesystemSource this type defines a struct with a path to a filesystem source of queries
// Source is the path to the queries
// Types are the types given by the flag --type for query selection mechanism
type FilesystemSource struct {
Source []string
Types []string
CloudProviders []string
Library string
ExperimentalQueries bool
}
const (
// QueryFileName The default query file name
QueryFileName = "query.rego"
// MetadataFileName The default metadata file name
MetadataFileName = "metadata.json"
// LibrariesDefaultBasePath the path to rego libraries
LibrariesDefaultBasePath = "./assets/libraries"
emptyInputData = "{}"
common = "Common"
kicsDefault = "default"
)
// NewFilesystemSource initializes a NewFilesystemSource with source to queries and types of queries to load
func NewFilesystemSource(source, types, cloudProviders []string, libraryPath string, experimentalQueries bool) *FilesystemSource {
log.Debug().Msg("source.NewFilesystemSource()")
if len(types) == 0 {
types = []string{""}
}
if len(cloudProviders) == 0 {
cloudProviders = []string{""}
}
for s := range source {
source[s] = filepath.FromSlash(source[s])
}
return &FilesystemSource{
Source: source,
Types: types,
CloudProviders: cloudProviders,
Library: filepath.FromSlash(libraryPath),
ExperimentalQueries: experimentalQueries,
}
}
// ListSupportedPlatforms returns a list of supported platforms
func ListSupportedPlatforms() []string {
keys := make([]string, len(constants.AvailablePlatforms))
i := 0
for k := range constants.AvailablePlatforms {
keys[i] = k
i++
}
sort.Strings(keys)
return keys
}
// ListSupportedCloudProviders returns a list of supported cloud providers
func ListSupportedCloudProviders() []string {
return []string{"alicloud", "aws", "azure", "gcp", "nifcloud", "tencentcloud"}
}
func getLibraryInDir(platform, libraryDirPath string) string {
var libraryFilePath string
err := filepath.Walk(libraryDirPath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if strings.EqualFold(filepath.Base(path), platform+".rego") { // try to find the library file <platform>.rego
libraryFilePath = path
}
return nil
})
if err != nil {
log.Error().Msgf("Failed to analyze path %s: %s", libraryDirPath, err)
}
return libraryFilePath
}
func isDefaultLibrary(libraryPath string) bool {
return filepath.FromSlash(libraryPath) == filepath.FromSlash(LibrariesDefaultBasePath)
}
// GetPathToCustomLibrary - returns the libraries path for a given platform
func GetPathToCustomLibrary(platform, libraryPathFlag string) string {
libraryFilePath := kicsDefault
if !isDefaultLibrary(libraryPathFlag) {
log.Debug().Msgf("Trying to load custom libraries from %s", libraryPathFlag)
library := getLibraryInDir(platform, libraryPathFlag)
// found a library named according to the platform
if library != "" {
libraryFilePath = library
}
}
return libraryFilePath
}
// GetQueryLibrary returns the library.rego for the platform passed in the argument
func (s *FilesystemSource) GetQueryLibrary(platform string) (RegoLibraries, error) {
library := GetPathToCustomLibrary(platform, s.Library)
customLibraryCode := ""
customLibraryData := emptyInputData
if library == "" {
return RegoLibraries{}, errors.New("unable to get libraries path")
}
if library != kicsDefault {
byteContent, err := os.ReadFile(library)
if err != nil {
return RegoLibraries{}, err
}
customLibraryCode = string(byteContent)
customLibraryData, err = readInputData(strings.TrimSuffix(library, filepath.Ext(library)) + ".json")
if err != nil {
log.Debug().Msg(err.Error())
}
} else {
log.Debug().Msgf("Custom library %s not provided. Loading embedded library instead", platform)
}
// getting embedded library
embeddedLibraryCode, errGettingEmbeddedLibrary := assets.GetEmbeddedLibrary(strings.ToLower(platform))
if errGettingEmbeddedLibrary != nil {
return RegoLibraries{}, errGettingEmbeddedLibrary
}
mergedLibraryCode, errMergeLibs := mergeLibraries(customLibraryCode, embeddedLibraryCode)
if errMergeLibs != nil {
return RegoLibraries{}, errMergeLibs
}
embeddedLibraryData, errGettingEmbeddedLibraryCode := assets.GetEmbeddedLibraryData(strings.ToLower(platform))
if errGettingEmbeddedLibraryCode != nil {
log.Debug().Msgf("Could not open embedded library data for %s platform", platform)
embeddedLibraryData = emptyInputData
}
mergedLibraryData, errMergingLibraryData := MergeInputData(embeddedLibraryData, customLibraryData)
if errMergingLibraryData != nil {
log.Debug().Msgf("Could not merge library data for %s platform", platform)
}
regoLibrary := RegoLibraries{
LibraryCode: mergedLibraryCode,
LibraryInputData: mergedLibraryData,
}
return regoLibrary, nil
}
// CheckType checks if the queries have the type passed as an argument in '--type' flag to be loaded
func (s *FilesystemSource) CheckType(queryPlatform interface{}) bool {
if queryPlatform.(string) == common {
return true
}
if s.Types[0] != "" {
for _, t := range s.Types {
if strings.EqualFold(t, queryPlatform.(string)) {
return true
}
}
return false
}
return true
}
// CheckCloudProvider checks if the queries have the cloud provider passed as an argument in '--cloud-provider' flag to be loaded
func (s *FilesystemSource) CheckCloudProvider(cloudProvider interface{}) bool {
if cloudProvider != nil {
if strings.EqualFold(cloudProvider.(string), common) {
return true
}
if s.CloudProviders[0] != "" {
return strings.Contains(strings.ToUpper(strings.Join(s.CloudProviders, ",")), strings.ToUpper(cloudProvider.(string)))
}
}
if s.CloudProviders[0] == "" {
return true
}
return false
}
func checkQueryInclude(id interface{}, includedQueries []string) bool {
queryMetadataKey, ok := id.(string)
if !ok {
log.Warn().
Msgf("Can't cast query metadata key = %v", id)
return false
}
for _, includedQuery := range includedQueries {
if queryMetadataKey == includedQuery {
return true
}
}
return false
}
func checkQueryExcludeField(id interface{}, excludeQueries []string) bool {
queryMetadataKey, ok := id.(string)
if !ok {
log.Warn().
Msgf("Can't cast query metadata key = %v", id)
return false
}
for _, excludedQuery := range excludeQueries {
if strings.EqualFold(queryMetadataKey, excludedQuery) {
return true
}
}
return false
}
func checkQueryExclude(metadata map[string]interface{}, queryParameters *QueryInspectorParameters) bool {
return checkQueryExcludeField(metadata["id"], queryParameters.ExcludeQueries.ByIDs) ||
checkQueryExcludeField(metadata["category"], queryParameters.ExcludeQueries.ByCategories) ||
checkQueryExcludeField(metadata["severity"], queryParameters.ExcludeQueries.BySeverities) ||
(!queryParameters.BomQueries && metadata["severity"] == model.SeverityTrace)
}
// GetQueries walks a given filesource path returns all queries found in an array of
// QueryMetadata struct
func (s *FilesystemSource) GetQueries(queryParameters *QueryInspectorParameters) ([]model.QueryMetadata, error) {
queryDirs, err := s.iterateSources()
if err != nil {
return nil, err
}
queries := s.iterateQueryDirs(queryDirs, queryParameters)
return queries, nil
}
func (s *FilesystemSource) iterateSources() ([]string, error) {
queryDirs := make([]string, 0)
for _, source := range s.Source {
err := filepath.Walk(source,
func(p string, f os.FileInfo, err error) error {
if err != nil {
return err
}
if f.IsDir() || f.Name() != QueryFileName {
return nil
}
querypathDir := filepath.Dir(p)
if err == nil {
queryDirs = append(queryDirs, querypathDir)
} else if err != nil {
return errors.Wrap(err, "Failed to get query relative path")
}
return nil
})
if err != nil {
return nil, errors.Wrap(err, "failed to get query Source")
}
}
return queryDirs, nil
}
// iterateQueryDirs iterates all query directories and reads the respective queries
func (s *FilesystemSource) iterateQueryDirs(queryDirs []string, queryParameters *QueryInspectorParameters) []model.QueryMetadata {
queries := make([]model.QueryMetadata, 0, len(queryDirs))
for _, queryDir := range queryDirs {
query, errRQ := ReadQuery(queryDir)
if errRQ != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: fmt.Sprintf("Query provider failed to read query, query=%s", path.Base(queryDir)),
Err: errRQ,
Location: "func GetQueries()",
FileName: path.Base(queryDir),
}, true)
continue
}
if query.Experimental && !queryParameters.ExperimentalQueries {
continue
}
if !s.CheckType(query.Metadata["platform"]) {
continue
}
if !s.CheckCloudProvider(query.Metadata["cloudProvider"]) {
continue
}
customInputData, readInputErr := readInputData(filepath.Join(queryParameters.InputDataPath, query.Metadata["id"].(string)+".json"))
if readInputErr != nil {
log.Err(errRQ).
Msgf("failed to read input data, query=%s", path.Base(queryDir))
continue
}
inputData, mergeError := MergeInputData(query.InputData, customInputData)
if mergeError != nil {
log.Err(mergeError).
Msgf("failed to merge input data, query=%s", path.Base(queryDir))
continue
}
query.InputData = inputData
if len(queryParameters.IncludeQueries.ByIDs) > 0 {
if checkQueryInclude(query.Metadata["id"], queryParameters.IncludeQueries.ByIDs) {
queries = append(queries, query)
}
} else {
if checkQueryExclude(query.Metadata, queryParameters) {
log.Debug().
Msgf("Excluding query ID: %s category: %s severity: %s", query.Metadata["id"], query.Metadata["category"], query.Metadata["severity"])
continue
}
queries = append(queries, query)
}
}
return queries
}
// validateMetadata prevents panics when KICS queries metadata fields are missing
func validateMetadata(metadata map[string]interface{}) (exist bool, field string) {
fields := []string{
"id",
"platform",
}
for _, field = range fields {
if _, exist = metadata[field]; !exist {
return
}
}
return
}
// ReadQuery reads query's files for a given path and returns a QueryMetadata struct with it's
// content
func ReadQuery(queryDir string) (model.QueryMetadata, error) {
queryContent, err := os.ReadFile(filepath.Clean(path.Join(queryDir, QueryFileName)))
if err != nil {
return model.QueryMetadata{}, errors.Wrapf(err, "failed to read query %s", path.Base(queryDir))
}
metadata, err := ReadMetadata(queryDir)
if err != nil {
return model.QueryMetadata{}, errors.Wrapf(err, "failed to read query %s", path.Base(queryDir))
}
if valid, missingField := validateMetadata(metadata); !valid {
return model.QueryMetadata{}, fmt.Errorf("failed to read metadata field: %s", missingField)
}
platform := getPlatform(metadata["platform"].(string))
inputData, errInputData := readInputData(filepath.Join(queryDir, "data.json"))
if errInputData != nil {
log.Err(errInputData).
Msgf("Query provider failed to read input data, query=%s", path.Base(queryDir))
}
aggregation := 1
if agg, ok := metadata["aggregation"]; ok {
aggregation = int(agg.(float64))
}
experimental := getExperimental(metadata["experimental"])
return model.QueryMetadata{
Query: path.Base(filepath.ToSlash(queryDir)),
Content: string(queryContent),
Metadata: metadata,
Platform: platform,
InputData: inputData,
Aggregation: aggregation,
Experimental: experimental,
}, nil
}
// ReadMetadata read query's metadata file inside the query directory
func ReadMetadata(queryDir string) (map[string]interface{}, error) {
f, err := os.Open(filepath.Clean(path.Join(queryDir, MetadataFileName)))
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: fmt.Sprintf("Queries provider can't read metadata, query=%s", path.Base(queryDir)),
Err: err,
Location: "func ReadMetadata()",
FileName: path.Base(queryDir),
}, true)
return nil, err
}
defer func() {
if err := f.Close(); err != nil {
log.Err(err).
Msgf("Queries provider can't close file, file=%s", filepath.Clean(path.Join(queryDir, MetadataFileName)))
}
}()
var metadata map[string]interface{}
if err := json.NewDecoder(f).Decode(&metadata); err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: fmt.Sprintf("Queries provider can't unmarshal metadata, query=%s", path.Base(queryDir)),
Err: err,
Location: "func ReadMetadata()",
FileName: path.Base(queryDir),
}, true)
return nil, err
}
return metadata, nil
}
type supportedPlatforms map[string]string
var supPlatforms = &supportedPlatforms{
"Ansible": "ansible",
"CloudFormation": "cloudFormation",
"Common": "common",
"Crossplane": "crossplane",
"Dockerfile": "dockerfile",
"DockerCompose": "dockerCompose",
"Knative": "knative",
"Kubernetes": "k8s",
"OpenAPI": "openAPI",
"Terraform": "terraform",
"AzureResourceManager": "azureResourceManager",
"GRPC": "grpc",
"GoogleDeploymentManager": "googleDeploymentManager",
"Buildah": "buildah",
"Pulumi": "pulumi",
"ServerlessFW": "serverlessFW",
"CICD": "cicd",
}
func getPlatform(metadataPlatform string) string {
if p, ok := (*supPlatforms)[metadataPlatform]; ok {
return p
}
return "unknown"
}
func getExperimental(experimental interface{}) bool {
readExperimental, _ := experimental.(string)
if readExperimental == "true" {
return true
} else {
return false
}
}
func readInputData(inputDataPath string) (string, error) {
inputData, err := os.ReadFile(filepath.Clean(inputDataPath))
if err != nil {
if os.IsNotExist(err) {
return emptyInputData, nil
}
return emptyInputData, errors.Wrapf(err, "failed to read query input data %s", path.Base(inputDataPath))
}
return string(inputData), nil
}
// Package source (go:generate go run -mod=mod github.com/golang/mock/mockgen -package mock -source=./$GOFILE -destination=../mock/$GOFILE)
package source
import (
"encoding/json"
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/open-policy-agent/opa/ast"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
// QueryInspectorParameters is a struct that represents the optionn to select queries to be executed
type QueryInspectorParameters struct {
IncludeQueries IncludeQueries
ExcludeQueries ExcludeQueries
ExperimentalQueries bool
InputDataPath string
BomQueries bool
}
// ExcludeQueries is a struct that represents the option to exclude queries by ids or by categories
type ExcludeQueries struct {
ByIDs []string
ByCategories []string
BySeverities []string
}
// IncludeQueries is a struct that represents the option to include queries by ID taking precedence over exclusion
type IncludeQueries struct {
ByIDs []string
}
// RegoLibraries is a struct that contains the library code and its input data
type RegoLibraries struct {
LibraryCode string
LibraryInputData string
}
// QueriesSource wraps an interface that contains basic methods: GetQueries and GetQueryLibrary
// GetQueries gets all queries from a QueryMetadata list
// GetQueryLibrary gets a library of rego functions given a plataform's name
type QueriesSource interface {
GetQueries(querySelection *QueryInspectorParameters) ([]model.QueryMetadata, error)
GetQueryLibrary(platform string) (RegoLibraries, error)
}
// mergeLibraries return custom library and embedded library merged, overwriting embedded library functions, if necessary
func mergeLibraries(customLib, embeddedLib string) (string, error) {
if customLib == "" {
return embeddedLib, nil
}
statements, _, err := ast.NewParser().WithReader(strings.NewReader(customLib)).Parse()
if err != nil {
log.Err(err).Msg("Could not parse custom library")
return "", err
}
headers := make(map[string]string)
variables := make(map[string]string)
for _, st := range statements {
if rule, ok := st.(*ast.Rule); ok {
headers[string(rule.Head.Name)] = ""
}
if regoPackage, ok := st.(ast.Body); ok {
variableSet := regoPackage.Vars(ast.SafetyCheckVisitorParams)
for variable := range variableSet {
variables[variable.String()] = ""
}
}
}
statements, _, err = ast.NewParser().WithReader(strings.NewReader(embeddedLib)).Parse()
if err != nil {
log.Err(err).Msg("Could not parse default library")
return "", err
}
for _, st := range statements {
if rule, ok := st.(*ast.Rule); ok {
if _, remove := headers[string(rule.Head.Name)]; remove {
embeddedLib = strings.Replace(embeddedLib, string(rule.Location.Text), "", 1)
}
continue
}
if regoPackage, ok := st.(*ast.Package); ok {
firstHalf := strings.Join(strings.Split(embeddedLib, "\n")[:regoPackage.Location.Row-1], "\n")
secondHalf := strings.Join(strings.Split(embeddedLib, "\n")[regoPackage.Location.Row+1:], "\n")
embeddedLib = firstHalf + "\n" + secondHalf
continue
}
if body, ok := st.(ast.Body); ok {
variableSet := body.Vars(ast.SafetyCheckVisitorParams)
for variable := range variableSet {
if _, remove := variables[variable.String()]; remove {
embeddedLib = strings.Replace(embeddedLib, string(body.Loc().Text), "", 1)
break
}
}
}
}
customLib += "\n" + embeddedLib
return customLib, nil
}
// MergeInputData merges KICS input data with custom input data user defined
func MergeInputData(defaultInputData, customInputData string) (string, error) {
if checkEmptyInputdata(customInputData) && checkEmptyInputdata(defaultInputData) {
return emptyInputData, nil
}
if checkEmptyInputdata(defaultInputData) {
return customInputData, nil
}
if checkEmptyInputdata(customInputData) {
return defaultInputData, nil
}
dataJSON := map[string]interface{}{}
customDataJSON := map[string]interface{}{}
if unmarshalError := json.Unmarshal([]byte(defaultInputData), &dataJSON); unmarshalError != nil {
return "", errors.Wrapf(unmarshalError, "failed to merge query input data")
}
if unmarshalError := json.Unmarshal([]byte(customInputData), &customDataJSON); unmarshalError != nil {
return "", errors.Wrapf(unmarshalError, "failed to merge query input data")
}
for key, value := range customDataJSON {
dataJSON[key] = value
}
mergedJSON, mergeErr := json.Marshal(dataJSON)
if mergeErr != nil {
return "", errors.Wrapf(mergeErr, "failed to merge query input data")
}
return string(mergedJSON), nil
}
func checkEmptyInputdata(inputData string) bool {
return inputData == emptyInputData || inputData == ""
}
package engine
import (
"encoding/json"
"strings"
dec "github.com/Checkmarx/kics/v2/pkg/detector"
"github.com/Checkmarx/kics/v2/pkg/engine/similarity"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/pkg/errors"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
)
const (
formatFloat64 = 64
searchKeyMinLen = 3
)
func modifyVulSearchKeyReference(doc interface{}, originalSearchKey string, stringVulList []string) (string, bool) {
for index, vulSplit := range stringVulList {
switch docTyped := doc.(type) {
case map[string]interface{}:
if strings.HasPrefix(vulSplit, "{{") && strings.HasSuffix(vulSplit, "}}") {
vulSplit = vulSplit[2 : len(vulSplit)-2]
}
if vulSplitEqual := strings.Split(vulSplit, "="); len(vulSplitEqual) != 1 {
vulSplit = vulSplitEqual[0]
}
newDoc, foundEntry := docTyped[vulSplit]
if metadataRefDoc, ok := docTyped["RefMetadata"]; ok && foundEntry && index < len(stringVulList) {
newSearchKey := strings.Join(stringVulList[:index], ".") + ".$ref=" + (metadataRefDoc.(map[string]interface{})["$ref"].(string))
return newSearchKey, true
} else if foundEntry {
doc = newDoc
} else {
return originalSearchKey, false
}
case []interface{}:
for _, listDoc := range docTyped {
if newSearchKey, modified := modifyVulSearchKeyReference(listDoc, originalSearchKey, stringVulList[index:]); modified {
return strings.Join(stringVulList[:index], ".") + "." + newSearchKey, true
}
}
return originalSearchKey, false
default:
if index != len(stringVulList)-1 {
return originalSearchKey, false
}
}
}
return originalSearchKey, false
}
// DefaultVulnerabilityBuilder defines a vulnerability builder to execute default actions of scan
var DefaultVulnerabilityBuilder = func(ctx *QueryContext,
tracker Tracker,
v interface{},
detector *dec.DetectLine,
useOldSeverities bool,
kicsComputeNewSimID bool) (*model.Vulnerability, error) {
vObj, ok := v.(map[string]interface{})
if !ok {
return &model.Vulnerability{}, ErrInvalidResult
}
vObj = mergeWithMetadata(vObj, ctx.Query.Metadata.Metadata)
var err error
var output []byte
output, err = json.Marshal(vObj)
if err != nil {
return &model.Vulnerability{}, errors.Wrap(err, "failed to marshall query output")
}
var fileID *string
fileID, err = mapKeyToString(vObj, "documentId", false)
if err != nil {
return &model.Vulnerability{}, errors.Wrap(err, "failed to recognize file id")
}
file, ok := ctx.Files[*fileID]
if !ok {
return &model.Vulnerability{}, errors.New("failed to find file from query response")
}
logWithFields := log.With().
Str("scanID", ctx.scanID).
Str("fileName", file.FilePath).
Str("queryName", ctx.Query.Metadata.Query).
Logger()
detector.SetupLogs(&logWithFields)
linesVulne := model.VulnerabilityLines{
Line: -1,
VulnLines: &[]model.CodeLine{},
}
similarityIDLineInfo := ""
searchKey := ""
if s, ok := vObj["searchKey"]; ok {
searchKey = s.(string)
similarityIDLineInfo = searchKey
intDoc := file.LineInfoDocument
vulsSplit := strings.Split(searchKey, ".")
if file.Kind == model.KindINI {
vulsSplit, searchKey = sanitizeINIKey(vulsSplit)
}
if strings.Contains(vulsSplit[len(vulsSplit)-1], "RefMetadata") {
return &model.Vulnerability{}, ErrNoResult
}
// modify the search key in cases where it should be referencing a ref instead of part of the resolved object
searchKey, _ = modifyVulSearchKeyReference(intDoc, searchKey, vulsSplit)
vObj["searchKey"] = searchKey
linesVulne = detector.DetectLine(&file, searchKey, &logWithFields)
} else {
logWithFields.Error().Msg("Saving result. failed to detect line")
}
lineNumber := 0
var similarityIDLineInfoOld = similarityIDLineInfo
if file.Kind != model.KindHELM && len(file.ResolvedFiles) == 0 {
searchLineCalc := &searchLineCalculator{
lineNr: -1,
vObj: vObj,
file: file,
detector: detector,
similarityIDLineInfo: similarityIDLineInfo,
linesVulne: linesVulne,
}
// calculate search Line if possible (default uses values of search key)
lineNumber, similarityIDLineInfoOld, linesVulne = calculeSearchLine(searchLineCalc)
}
if linesVulne.Line == -1 {
logWithFields.Warn().Msgf("Failed to detect line, query response %s", searchKey)
linesVulne.Line = 1
}
searchValue := ""
if s, ok := vObj["searchValue"]; ok {
searchValue = s.(string)
}
overrideKey := ""
if s, ok := vObj["overrideKey"]; ok {
overrideKey = s.(string)
}
queryID := getStringFromMap("id", DefaultQueryID, overrideKey, vObj, &logWithFields)
severity := getResolvedSeverity(vObj, &logWithFields, overrideKey, useOldSeverities)
issueType := DefaultIssueType
if v := mustMapKeyToString(vObj, "issueType"); v != nil {
issueType = model.IssueType(*v)
}
similarityID, oldSimilarityID := generateSimilaritiesID(ctx, linesVulne.ResolvedFile, queryID, similarityIDLineInfo, searchValue,
searchKey, similarityIDLineInfoOld, kicsComputeNewSimID, &logWithFields, tracker)
return &model.Vulnerability{
ID: 0,
SimilarityID: PtrStringToString(similarityID),
OldSimilarityID: PtrStringToString(oldSimilarityID),
ScanID: ctx.scanID,
FileID: file.ID,
FileName: linesVulne.ResolvedFile,
QueryName: getStringFromMap("queryName", DefaultQueryName, overrideKey, vObj, &logWithFields),
QueryID: queryID,
Experimental: getBoolFromMap("experimental", DefaultExperimental, overrideKey, vObj, &logWithFields),
QueryURI: getStringFromMap("descriptionUrl", DefaultQueryURI, overrideKey, vObj, &logWithFields),
Category: getStringFromMap("category", "", overrideKey, vObj, &logWithFields),
Description: getStringFromMap("descriptionText", "", overrideKey, vObj, &logWithFields),
DescriptionID: getStringFromMap("descriptionID", DefaultQueryDescriptionID, overrideKey, vObj, &logWithFields),
Severity: severity,
Platform: getStringFromMap("platform", "", overrideKey, vObj, &logWithFields),
CWE: getStringFromMap("cwe", "", overrideKey, vObj, &logWithFields),
Line: linesVulne.Line,
VulnLines: linesVulne.VulnLines,
ResourceType: PtrStringToString(mustMapKeyToString(vObj, "resourceType")),
ResourceName: PtrStringToString(mustMapKeyToString(vObj, "resourceName")),
IssueType: issueType,
SearchKey: searchKey,
SearchLine: lineNumber,
SearchValue: searchValue,
KeyExpectedValue: PtrStringToString(mustMapKeyToString(vObj, "keyExpectedValue")),
KeyActualValue: PtrStringToString(mustMapKeyToString(vObj, "keyActualValue")),
Value: mustMapKeyToString(vObj, "value"),
Output: string(output),
CloudProvider: getCloudProvider(overrideKey, vObj, &logWithFields),
Remediation: PtrStringToString(mustMapKeyToString(vObj, "remediation")),
RemediationType: PtrStringToString(mustMapKeyToString(vObj, "remediationType")),
}, nil
}
// <editor-fold desc="similarity id">
func generateSimilaritiesID(ctx *QueryContext,
resolvedFile, queryID, similarityIDLineInfo, searchValue, searchKey, similarityIDLineInfoOld string,
kicsComputeNewSimID bool,
logWithFields *zerolog.Logger,
tracker Tracker) (similarityID, oldSimilarityID *string) {
if kicsComputeNewSimID {
similarityID, err := buildSimilarityID(ctx, resolvedFile, queryID, similarityIDLineInfo, searchValue)
if err != nil {
logWithFields.Err(err).Send()
tracker.FailedComputeSimilarityID()
}
oldSimilarityID, err = oldBuildSimilarityID(ctx, resolvedFile, queryID, searchKey, similarityIDLineInfoOld, searchValue)
if err != nil {
logWithFields.Err(err).Send()
tracker.FailedComputeOldSimilarityID()
}
return similarityID, oldSimilarityID
} else {
similarityID, err := oldBuildSimilarityID(ctx, resolvedFile, queryID, searchKey, similarityIDLineInfoOld, searchValue)
if err != nil {
logWithFields.Err(err).Send()
tracker.FailedComputeSimilarityID()
}
return similarityID, oldSimilarityID
}
}
func buildSimilarityID(
ctx *QueryContext,
resolvedFile,
queryID,
searchKey,
searchValue string) (*string, error) {
return similarity.ComputeSimilarityID(ctx.BaseScanPaths, resolvedFile, queryID, searchKey, searchValue)
}
// <editor-fold desc="old similarity id">
func oldBuildSimilarityID(
ctx *QueryContext,
resolvedFile,
queryID,
searchKey,
similarityIDLineInfo,
searchValue string) (*string, error) {
if checkMinified(ctx, resolvedFile) {
return similarity.ComputeSimilarityID(ctx.BaseScanPaths, resolvedFile, queryID, searchKey, searchValue)
} else {
return similarity.ComputeSimilarityID(ctx.BaseScanPaths, resolvedFile, queryID, similarityIDLineInfo, searchValue)
}
}
func checkMinified(ctx *QueryContext, resolvedFile string) bool {
for i := range ctx.Files {
if ctx.Files[i].FilePath == resolvedFile {
return ctx.Files[i].IsMinified
}
}
return false
}
// </editor-fold>
// </editor-fold>
func getCloudProvider(overrideKey string, vObj map[string]interface{}, logWithFields *zerolog.Logger) string {
cloudProvider := ""
if _, ok := vObj["cloudProvider"]; ok {
cloudProvider = getStringFromMap("cloudProvider", "", overrideKey, vObj, logWithFields)
}
return cloudProvider
}
// calculate search Line if possible (default uses values of search key)
func calculeSearchLine(searchLineCalc *searchLineCalculator) (lineNumber int,
similarityIDLineInfo string, linesVulne model.VulnerabilityLines) {
searchLineCalc.calculate()
lineNumber = searchLineCalc.lineNr
similarityIDLineInfo = searchLineCalc.similarityIDLineInfo
linesVulne = searchLineCalc.linesVulne
return lineNumber, similarityIDLineInfo, linesVulne
}
func getResolvedSeverity(vObj map[string]interface{}, logWithFields *zerolog.Logger,
overrideKey string, useOldSeverities bool) model.Severity {
var severity model.Severity = model.SeverityInfo
s, err := mapKeyToString(vObj, "severity", false)
if err == nil {
sev := getSeverity(strings.ToUpper(*s))
if sev == "" {
logWithFields.Warn().Str("severity", *s).Msg("Saving result. invalid severity constant value")
} else {
severity = sev
overrideValue := tryOverride(overrideKey, "severity", vObj)
if overrideValue != nil {
sev = getSeverity(strings.ToUpper(*overrideValue))
if sev != "" {
severity = sev
}
} else if useOldSeverities {
oldS, errOld := mapKeyToString(vObj, "oldSeverity", false)
if errOld == nil {
oldSev := getSeverity(strings.ToUpper(*oldS))
severity = oldSev
}
}
}
} else {
logWithFields.Info().Msg("Saving result. failed to detect severity")
}
return severity
}
// sanitizeINIKey removes useless searchkey elements like "all" and "children"
func sanitizeINIKey(vulsSplit []string) (vulsRefact []string, searchKey string) {
length := len(vulsSplit)
vulsRefact = vulsSplit
if length >= searchKeyMinLen {
vulsRefact = []string{"[" + vulsSplit[2] + "]"}
if length >= searchKeyMinLen+2 {
vulsRefact = append(vulsRefact, vulsSplit[4])
if length >= searchKeyMinLen+4 {
vulsRefact = append(vulsRefact, vulsSplit[6])
}
}
}
return vulsRefact, strings.Join(vulsRefact, ".")
}
package engine
import (
"encoding/json"
"fmt"
"strconv"
dec "github.com/Checkmarx/kics/v2/pkg/detector"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
)
type searchLineCalculator struct {
lineNr int
vObj map[string]interface{}
file model.FileMetadata
detector *dec.DetectLine
similarityIDLineInfo string
linesVulne model.VulnerabilityLines
}
func (s *searchLineCalculator) calculate() {
if searchLine, ok := s.vObj["searchLine"]; ok {
line := make([]string, 0, len(searchLine.([]interface{})))
for _, strElement := range searchLine.([]interface{}) {
line = append(line, strElement.(string))
}
var err error
s.lineNr, err = dec.GetLineBySearchLine(line, &s.file)
if err != nil {
log.Error().Msgf("failed to get line information from searchLine, using searchKey")
}
if s.lineNr >= 0 {
s.similarityIDLineInfo = strconv.Itoa(s.lineNr)
s.linesVulne = s.detector.GetAdjacent(&s.file, s.lineNr)
}
}
}
func mergeWithMetadata(base, additional map[string]interface{}) map[string]interface{} {
for k, v := range additional {
if _, ok := base[k]; ok {
continue
}
base[k] = v
}
return base
}
func mustMapKeyToString(m map[string]interface{}, key string) *string {
res, err := mapKeyToString(m, key, true)
excludedFields := []string{"value", "resourceName", "resourceType", "remediation", "remediationType"}
if err != nil && !utils.Contains(key, excludedFields) {
log.Warn().
Str("reason", err.Error()).
Msgf("Failed to get key %s in map", key)
}
return res
}
func mapKeyToString(m map[string]interface{}, key string, allowNil bool) (*string, error) {
v, ok := m[key]
if !ok {
return nil, fmt.Errorf("key '%s' not found in map", key)
}
switch vv := v.(type) {
case json.Number:
return stringToPtrString(vv.String()), nil
case string:
return stringToPtrString(vv), nil
case int, int32, int64:
return stringToPtrString(fmt.Sprintf("%d", vv)), nil
case float32:
return stringToPtrString(strconv.FormatFloat(float64(vv), 'f', -1, formatFloat64)), nil
case float64:
return stringToPtrString(strconv.FormatFloat(vv, 'f', -1, formatFloat64)), nil
case nil:
if allowNil {
return nil, nil
}
return stringToPtrString("null"), nil
case bool:
return stringToPtrString(fmt.Sprintf("%v", vv)), nil
}
log.Debug().
Msg("Detecting line. can't format item to string")
if allowNil {
return nil, nil
}
return stringToPtrString(""), nil
}
func stringToPtrString(v string) *string {
return &v
}
// PtrStringToString - converts a pointer to string to a string
func PtrStringToString(v *string) string {
if v == nil {
return ""
}
return *v
}
func tryOverride(overrideKey, vulnParam string, vObj map[string]interface{}) *string {
if overrideKey != "" {
if override, ok := vObj["override"].(map[string]interface{}); ok {
if overrideObject, ok := override[overrideKey].(map[string]interface{}); ok {
if _, ok := overrideObject[vulnParam]; ok {
overrideValue, err := mapKeyToString(overrideObject, vulnParam, true)
if err != nil {
return nil
} else if overrideValue != nil {
return overrideValue
}
}
}
}
}
return nil
}
func getStringFromMap(vulnParam, defaultParam, overrideKey string, vObj map[string]interface{}, logWithFields *zerolog.Logger) string {
ts, err := mapKeyToString(vObj, vulnParam, false)
if err != nil {
logWithFields.Err(err).
Msgf("Saving result. failed to detect %s", vulnParam)
return defaultParam
}
overrideValue := tryOverride(overrideKey, vulnParam, vObj)
if overrideValue != nil {
ts = overrideValue
}
return *ts
}
func getBoolFromMap(
vulnParam string,
defaultParam bool,
overrideKey string,
vObj map[string]interface{},
logWithFields *zerolog.Logger) bool {
ts, err := mapKeyToString(vObj, vulnParam, false)
if err != nil {
return defaultParam
}
overrideValue := tryOverride(overrideKey, vulnParam, vObj)
if overrideValue != nil {
ts = overrideValue
}
res, err := strconv.ParseBool(*ts)
if err != nil {
logWithFields.Err(err).
Msgf("Saving result. failed to detect %s", vulnParam)
return defaultParam
}
return res
}
func getSeverity(severity string) model.Severity {
for _, si := range model.AllSeverities {
if severity == string(si) {
return si
}
}
return ""
}
package kics
import (
"bytes"
"context"
"encoding/json"
"fmt"
"regexp"
"sort"
sentryReport "github.com/Checkmarx/kics/v2/internal/sentry"
"github.com/Checkmarx/kics/v2/pkg/minified"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/google/uuid"
"github.com/rs/zerolog/log"
)
func (s *Service) resolverSink(
ctx context.Context,
filename, scanID string,
openAPIResolveReferences bool,
maxResolverDepth int) ([]string, error) {
kind := s.Resolver.GetType(filename)
if kind == model.KindCOMMON {
return []string{}, nil
}
resFiles, err := s.Resolver.Resolve(filename, kind)
if err != nil {
log.Err(err).Msgf("failed to render file content")
return []string{}, err
}
for _, rfile := range resFiles.File {
s.Tracker.TrackFileFound(rfile.FileName)
isMinified := minified.IsMinified(rfile.FileName, rfile.Content)
documents, err := s.Parser.Parse(rfile.FileName, rfile.Content, openAPIResolveReferences, isMinified, maxResolverDepth)
if err != nil {
if documents.Kind == "break" {
return []string{}, nil
}
log.Err(err).Msgf("failed to parse file content")
return []string{}, nil
}
if kind == model.KindHELM {
ignoreList, errorIL := s.getOriginalIgnoreLines(
rfile.FileName, rfile.OriginalData,
openAPIResolveReferences, isMinified, maxResolverDepth)
if errorIL == nil {
documents.IgnoreLines = ignoreList
// Need to ignore #KICS_HELM_ID Line
documents.CountLines = bytes.Count(rfile.OriginalData, []byte{'\n'})
}
} else {
documents.CountLines = bytes.Count(rfile.OriginalData, []byte{'\n'}) + 1
}
fileCommands := s.Parser.CommentsCommands(rfile.FileName, rfile.OriginalData)
for _, document := range documents.Docs {
_, err = json.Marshal(document)
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: fmt.Sprintf("failed to marshal content in file: %s", rfile.FileName),
Err: err,
Location: "func resolverSink()",
FileName: rfile.FileName,
Kind: kind,
}, true)
continue
}
if len(documents.IgnoreLines) > 0 {
sort.Ints(documents.IgnoreLines)
}
file := model.FileMetadata{
ID: uuid.New().String(),
ScanID: scanID,
Document: PrepareScanDocument(document, kind),
OriginalData: string(rfile.OriginalData),
LineInfoDocument: document,
Kind: kind,
FilePath: rfile.FileName,
Content: string(rfile.Content),
HelmID: rfile.SplitID,
Commands: fileCommands,
IDInfo: rfile.IDInfo,
LinesIgnore: documents.IgnoreLines,
ResolvedFiles: documents.ResolvedFiles,
LinesOriginalData: utils.SplitLines(string(rfile.OriginalData)),
IsMinified: documents.IsMinified,
}
s.saveToFile(ctx, &file)
}
s.Tracker.TrackFileParse(rfile.FileName)
s.Tracker.TrackFileFoundCountLines(documents.CountLines)
s.Tracker.TrackFileParseCountLines(documents.CountLines - len(documents.IgnoreLines))
s.Tracker.TrackFileIgnoreCountLines(len(documents.IgnoreLines))
}
return resFiles.Excluded, nil
}
func (s *Service) getOriginalIgnoreLines(filename string,
originalFile []uint8,
openAPIResolveReferences, isMinified bool,
maxResolverDepth int) (ignoreLines []int, err error) {
refactor := regexp.MustCompile(`.*\n?.*KICS_HELM_ID.+\n`).ReplaceAll(originalFile, []uint8{})
refactor = regexp.MustCompile(`{{-\s*(.*?)\s*}}`).ReplaceAll(refactor, []uint8{})
documentsOriginal, err := s.Parser.Parse(filename, refactor, openAPIResolveReferences, isMinified, maxResolverDepth)
if err == nil {
ignoreLines = documentsOriginal.IgnoreLines
}
return
}
package kics
import (
"bytes"
"context"
"encoding/json"
"io"
"sync"
"github.com/Checkmarx/kics/v2/pkg/engine"
"github.com/Checkmarx/kics/v2/pkg/engine/provider"
"github.com/Checkmarx/kics/v2/pkg/engine/secrets"
"github.com/Checkmarx/kics/v2/pkg/minified"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/parser"
"github.com/Checkmarx/kics/v2/pkg/resolver"
"github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
const (
mbConst = 1048576
)
// Storage is the interface that wraps following basic methods: SaveFile, SaveVulnerability, GetVulnerability and GetScanSummary
// SaveFile should append metadata to a file
// SaveVulnerabilities should append vulnerabilities list to current storage
// GetVulnerabilities should returns all vulnerabilities associated to a scan ID
// GetScanSummary should return a list of summaries based on their scan IDs
type Storage interface {
SaveFile(ctx context.Context, metadata *model.FileMetadata) error
SaveVulnerabilities(ctx context.Context, vulnerabilities []model.Vulnerability) error
GetVulnerabilities(ctx context.Context, scanID string) ([]model.Vulnerability, error)
GetScanSummary(ctx context.Context, scanIDs []string) ([]model.SeveritySummary, error)
}
// Tracker is the interface that wraps the basic methods: TrackFileFound and TrackFileParse
// TrackFileFound should increment the number of files to be scanned
// TrackFileParse should increment the number of files parsed successfully to be scanned
type Tracker interface {
TrackFileFound(path string)
TrackFileParse(path string)
TrackFileFoundCountLines(countLines int)
TrackFileParseCountLines(countLines int)
TrackFileIgnoreCountLines(countLines int)
}
// Service is a struct that contains a SourceProvider to receive sources, a storage to save and retrieve scanning informations
// a parser to parse and provide files in format that KICS understand, a inspector that runs the scanning and a tracker to
// update scanning numbers
type Service struct {
SourceProvider provider.SourceProvider
Storage Storage
Parser *parser.Parser
Inspector *engine.Inspector
SecretsInspector *secrets.Inspector
Tracker Tracker
Resolver *resolver.Resolver
files model.FileMetadatas
MaxFileSize int
}
// PrepareSources will prepare the sources to be scanned
func (s *Service) PrepareSources(ctx context.Context,
scanID string,
openAPIResolveReferences bool,
maxResolverDepth int,
wg *sync.WaitGroup, errCh chan<- error) {
defer wg.Done()
// CxSAST query under review
data := make([]byte, mbConst)
if err := s.SourceProvider.GetSources(
ctx,
s.Parser.SupportedExtensions(),
func(ctx context.Context, filename string, rc io.ReadCloser) error {
return s.sink(ctx, filename, scanID, rc, data, openAPIResolveReferences, maxResolverDepth)
},
func(ctx context.Context, filename string) ([]string, error) { // Sink used for resolver files and templates
return s.resolverSink(ctx, filename, scanID, openAPIResolveReferences, maxResolverDepth)
},
); err != nil {
errCh <- errors.Wrap(err, "failed to read sources")
}
}
// StartScan executes scan over the context, using the scanID as reference
func (s *Service) StartScan(
ctx context.Context,
scanID string,
errCh chan<- error,
wg *sync.WaitGroup,
currentQuery chan<- int64) {
log.Debug().Msg("service.StartScan()")
defer wg.Done()
secretsVulnerabilities, err := s.SecretsInspector.Inspect(
ctx,
s.SourceProvider.GetBasePaths(),
s.files,
currentQuery,
)
if err != nil {
errCh <- errors.Wrap(err, "failed to inspect secrets")
}
vulnerabilities, err := s.Inspector.Inspect(
ctx,
scanID,
s.files,
s.SourceProvider.GetBasePaths(),
s.Parser.Platform,
currentQuery,
)
if err != nil {
errCh <- errors.Wrap(err, "failed to inspect files")
}
vulnerabilities = append(vulnerabilities, secretsVulnerabilities...)
updateMaskedSecrets(&vulnerabilities, s.SecretsInspector.SecretTracker)
err = s.Storage.SaveVulnerabilities(ctx, vulnerabilities)
if err != nil {
errCh <- errors.Wrap(err, "failed to save vulnerabilities")
}
}
// Content keeps the content of the file and the number of lines
type Content struct {
Content *[]byte
CountLines int
IsMinified bool
}
/*
getContent will read the passed file 1MB at a time
to prevent resource exhaustion and return its content
*/
func getContent(rc io.Reader, data []byte, maxSizeMB int, filename string) (*Content, error) {
var content []byte
countLines := 0
c := &Content{
Content: &[]byte{},
CountLines: 0,
}
for {
if maxSizeMB < 0 {
return c, errors.New("file size limit exceeded")
}
data = data[:cap(data)]
n, err := rc.Read(data)
if err != nil {
if err == io.EOF {
break
}
return c, err
}
countLines += bytes.Count(data[:n], []byte{'\n'}) + 1
content = append(content, data[:n]...)
maxSizeMB--
}
c.Content = &content
c.CountLines = countLines
c.IsMinified = minified.IsMinified(filename, content)
return c, nil
}
// GetVulnerabilities returns a list of scan detected vulnerabilities
func (s *Service) GetVulnerabilities(ctx context.Context, scanID string) ([]model.Vulnerability, error) {
return s.Storage.GetVulnerabilities(ctx, scanID)
}
// GetScanSummary returns how many vulnerabilities of each severity was found
func (s *Service) GetScanSummary(ctx context.Context, scanIDs []string) ([]model.SeveritySummary, error) {
return s.Storage.GetScanSummary(ctx, scanIDs)
}
func (s *Service) saveToFile(ctx context.Context, file *model.FileMetadata) {
err := s.Storage.SaveFile(ctx, file)
if err == nil {
s.files = append(s.files, *file)
}
}
// PrepareScanDocument removes _kics_lines from payload and parses json filters
func PrepareScanDocument(body map[string]interface{}, kind model.FileKind) map[string]interface{} {
var bodyMap map[string]interface{}
j, err := json.Marshal(body)
if err != nil {
log.Error().Msgf("failed to remove kics line information")
return body
}
if err := json.Unmarshal(j, &bodyMap); err != nil {
log.Error().Msgf("failed to remove kics line information: '%s'", err)
return body
}
prepareScanDocumentRoot(bodyMap, kind)
return bodyMap
}
func prepareScanDocumentRoot(body interface{}, kind model.FileKind) {
switch bodyType := body.(type) {
case map[string]interface{}:
prepareScanDocumentValue(bodyType, kind)
case []interface{}:
for _, indx := range bodyType {
prepareScanDocumentRoot(indx, kind)
}
}
}
func prepareScanDocumentValue(bodyType map[string]interface{}, kind model.FileKind) {
delete(bodyType, "_kics_lines")
for key, v := range bodyType {
switch value := v.(type) {
case map[string]interface{}:
prepareScanDocumentRoot(value, kind)
case []interface{}:
for _, indx := range value {
prepareScanDocumentRoot(indx, kind)
}
case string:
if field, ok := lines[kind]; ok && utils.Contains(key, field) {
bodyType[key] = resolveJSONFilter(value)
}
}
}
}
func updateMaskedSecrets(vulnerabilities *[]model.Vulnerability, maskedSecretsTracked []secrets.SecretTracker) {
for idx := range *vulnerabilities {
for _, secretT := range maskedSecretsTracked {
updateMaskedSecretLine(&(*vulnerabilities)[idx], secretT)
}
}
}
func updateMaskedSecretLine(vulnerability *model.Vulnerability, secretT secrets.SecretTracker) {
if vulnerability.FileName == secretT.ResolvedFilePath {
for vlidx := range *vulnerability.VulnLines {
if (*vulnerability.VulnLines)[vlidx].Position == secretT.Line {
(*vulnerability.VulnLines)[vlidx].Line = secretT.MaskedContent
}
}
}
}
package kics
import (
"context"
"encoding/json"
"fmt"
"io"
"regexp"
"sort"
sentryReport "github.com/Checkmarx/kics/v2/internal/sentry"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/parser/jsonfilter/parser"
"github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/antlr4-go/antlr/v4"
"github.com/google/uuid"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
var (
lines = map[model.FileKind][]string{
"TF": {"pattern"},
"JSON": {"FilterPattern"},
"YAML": {"filter_pattern", "FilterPattern"},
}
)
func (s *Service) sink(ctx context.Context, filename, scanID string,
rc io.Reader, data []byte,
openAPIResolveReferences bool,
maxResolverDepth int) error {
s.Tracker.TrackFileFound(filename)
log.Debug().Msgf("Starting to process file %s", filename)
c, err := getContent(rc, data, s.MaxFileSize, filename)
*c.Content = resolveCRLFFile(*c.Content)
content := c.Content
s.Tracker.TrackFileFoundCountLines(c.CountLines)
if err != nil {
return errors.Wrapf(err, "failed to get file content: %s", filename)
}
documents, err := s.Parser.Parse(filename, *content, openAPIResolveReferences, c.IsMinified, maxResolverDepth)
if err != nil {
log.Err(err).Msgf("failed to parse file content: %s", filename)
return nil
}
linesResolved := 0
for _, ref := range documents.ResolvedFiles {
if ref.Path != filename {
linesResolved += len(*ref.LinesContent)
}
}
s.Tracker.TrackFileFoundCountLines(linesResolved)
fileCommands := s.Parser.CommentsCommands(filename, *content)
for _, document := range documents.Docs {
_, err = json.Marshal(document)
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Message: fmt.Sprintf("failed to marshal content in file: %s", filename),
Err: err,
Location: "func sink()",
FileName: filename,
Kind: documents.Kind,
}, true)
continue
}
if len(documents.IgnoreLines) > 0 {
sort.Ints(documents.IgnoreLines)
}
file := model.FileMetadata{
ID: uuid.New().String(),
ScanID: scanID,
Document: PrepareScanDocument(document, documents.Kind),
LineInfoDocument: document,
OriginalData: documents.Content,
Kind: documents.Kind,
FilePath: filename,
Commands: fileCommands,
LinesIgnore: documents.IgnoreLines,
ResolvedFiles: documents.ResolvedFiles,
LinesOriginalData: utils.SplitLines(documents.Content),
IsMinified: documents.IsMinified,
}
s.saveToFile(ctx, &file)
}
s.Tracker.TrackFileParse(filename)
log.Debug().Msgf("Finished to process file %s", filename)
s.Tracker.TrackFileParseCountLines(documents.CountLines - len(documents.IgnoreLines))
s.Tracker.TrackFileIgnoreCountLines(len(documents.IgnoreLines))
return errors.Wrap(err, "failed to save file content")
}
func resolveCRLFFile(fileContent []byte) []byte {
regex := regexp.MustCompile(`\r\n`)
contentSTR := regex.ReplaceAllString(string(fileContent), "\n")
return []byte(contentSTR)
}
func resolveJSONFilter(jsonFilter string) string {
is := antlr.NewInputStream(jsonFilter)
// lexer build
lexer := parser.NewJSONFilterLexer(is)
lexer.RemoveErrorListeners()
stream := antlr.NewCommonTokenStream(lexer, antlr.TokenDefaultChannel)
errorListener := parser.NewCustomErrorListener()
lexer.RemoveErrorListeners()
lexer.AddErrorListener(errorListener)
// parser build
p := parser.NewJSONFilterParser(stream)
p.RemoveErrorListeners()
p.AddErrorListener(errorListener)
p.BuildParseTrees = true
tree := p.Awsjsonfilter()
// parse
visitor := parser.NewJSONFilterPrinterVisitor()
if errorListener.HasErrors() {
return jsonFilter
}
parsed := visitor.VisitAll(tree)
parsedByte, err := json.Marshal(parsed)
if err != nil {
return jsonFilter
}
return string(parsedByte)
}
/*
Package kuberneter implements calls to the Kubernetes API in order to scan the runtime information of the resources
*/
package kuberneter
import (
"os"
b64 "encoding/base64"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
"k8s.io/client-go/rest"
"k8s.io/client-go/tools/clientcmd"
"sigs.k8s.io/controller-runtime/pkg/client"
)
// K8sConfig saves the config for k8s auth
type K8sConfig struct {
Config *rest.Config
}
func getK8sClient() (client.Client, error) {
// authentication through k8s config file
if os.Getenv("K8S_CONFIG_FILE") != "" {
config, err := clientcmd.BuildConfigFromFlags("", os.Getenv("K8S_CONFIG_FILE"))
if err != nil {
log.Error().Msgf("failed to get k8s client through k8s config file: %s", err)
return nil, err
}
log.Info().Msg("auth to k8s API through k8s config file")
config.QPS = 100
config.Burst = 100
return client.New(config, client.Options{})
}
c := &K8sConfig{
Config: &rest.Config{
QPS: 100,
Burst: 100,
},
}
// authentication through k8s service account token or k8s client certificate
if os.Getenv("K8S_HOST") != "" && c.hasCertificateAuthority() {
c.Config.Host = os.Getenv("K8S_HOST")
// authentication through k8s service account token
if c.hasServiceAccountToken() {
log.Info().Msg("auth to k8s API through k8s service account token")
return client.New(c.Config, client.Options{})
}
// authentication through k8s client certificate
if c.hasClientCertificate() {
log.Info().Msg("auth to k8s API through k8s client certificate")
return client.New(c.Config, client.Options{})
}
}
log.Error().Msg("failed to get k8s client. check the k8s cluster auth information")
return nil, errors.New("failed to get k8s client")
}
func (c *K8sConfig) hasCertificateAuthority() bool {
if os.Getenv("K8S_CA_FILE") != "" {
c.Config.TLSClientConfig.CAFile = os.Getenv("K8S_CA_FILE")
return true
}
if os.Getenv("K8S_CA_DATA") != "" {
caDataDecoded, err := b64.StdEncoding.DecodeString(os.Getenv("K8S_CA_DATA"))
if err != nil {
log.Error().Msgf("failed to decode K8S_CA_DATA: %s", err)
return false
}
c.Config.TLSClientConfig.CAData = caDataDecoded
return true
}
return false
}
func (c *K8sConfig) hasServiceAccountToken() bool {
if os.Getenv("K8S_SA_TOKEN_FILE") != "" {
c.Config.BearerTokenFile = os.Getenv("K8S_SA_TOKEN_FILE")
return true
}
if os.Getenv("K8S_SA_TOKEN_DATA") != "" {
c.Config.BearerToken = os.Getenv("K8S_SA_TOKEN_DATA")
return true
}
return false
}
func (c *K8sConfig) hasClientCertificate() bool {
hasCert := false
if os.Getenv("K8S_CERT_FILE") != "" {
c.Config.TLSClientConfig.CertFile = os.Getenv("K8S_CERT_FILE")
hasCert = true
}
if os.Getenv("K8S_CERT_DATA") != "" {
certDataDecoded, err := b64.StdEncoding.DecodeString(os.Getenv("K8S_CERT_DATA"))
if err != nil {
log.Error().Msgf("failed to decode K8S_CERT_DATA: %s", err)
return false
}
c.Config.TLSClientConfig.CertData = certDataDecoded
hasCert = true
}
if hasCert {
if os.Getenv("K8S_KEY_FILE") != "" {
c.Config.TLSClientConfig.KeyFile = os.Getenv("K8S_KEY_FILE")
return true
}
if os.Getenv("K8S_KEY_DATA") != "" {
keyDataDecoded, err := b64.StdEncoding.DecodeString(os.Getenv("K8S_KEY_DATA"))
if err != nil {
log.Error().Msgf("failed to decode K8S_KEY_DATA: %s", err)
return false
}
c.Config.TLSClientConfig.KeyData = keyDataDecoded
return true
}
}
return false
}
/*
Package kuberneter implements calls to the Kubernetes API in order to scan the runtime information of the resources
*/
package kuberneter
import (
"context"
"os"
"path/filepath"
"strings"
"sync"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
"k8s.io/apimachinery/pkg/api/meta"
"sigs.k8s.io/controller-runtime/pkg/client"
)
type k8sAPICall struct {
client client.Client
options *K8sAPIOptions
ctx *context.Context
destinationPath string
}
type supportedKinds map[string]map[string]interface{}
var getK8sClientFunc = getK8sClient // for testing purposes
// Import imports the k8s cluster resources into the destination using kuberneter path
func Import(ctx context.Context, kuberneterPath, destinationPath string) (string, error) {
log.Info().Msg("importing k8s cluster resources")
supportedKinds := buildSupportedKinds()
defer func() { supportedKinds = nil }()
// extract k8s API options
k8sAPIOptions, err := extractK8sAPIOptions(kuberneterPath, supportedKinds)
if err != nil {
return "", err
}
// get the k8s client
c, err := getK8sClientFunc()
if err != nil {
return "", err
}
// create folder to save k8s resources
destination, err := getDestinationFolder(destinationPath)
if err != nil {
return "", err
}
if c == nil {
return destination, errors.New("failed to get client")
}
info := &k8sAPICall{
client: c,
options: k8sAPIOptions,
ctx: &ctx,
destinationPath: destination,
}
// list and save k8s resources
for i := range k8sAPIOptions.Namespaces {
info.listK8sResources(i, supportedKinds)
}
return destination, nil
}
func (info *k8sAPICall) listK8sResources(idx int, supKinds *supportedKinds) {
var wg sync.WaitGroup
for apiVersion := range *supKinds {
kinds := (*supKinds)[apiVersion]
if isTarget(apiVersion, info.options.APIVersions) {
wg.Add(1)
go info.listKinds(apiVersion, kinds, info.options.Namespaces[idx], &wg)
}
}
wg.Wait()
}
func (info *k8sAPICall) listKinds(apiVersion string, kinds map[string]interface{}, namespace string, wg *sync.WaitGroup) {
defer wg.Done()
sb := &strings.Builder{}
apiVersionFolder := filepath.Join(info.destinationPath, apiVersion)
if err := os.MkdirAll(apiVersionFolder, os.ModePerm); err != nil {
log.Error().Msgf("unable to create folder %s: %s", apiVersionFolder, err)
return
}
for kind := range kinds {
kindList := kinds[kind]
if !isTarget(kind, info.options.Kinds) {
continue
}
if _, ok := kindList.(client.ObjectList); !ok {
continue
}
resource := kindList.(client.ObjectList)
err := info.client.List(*info.ctx, resource, client.InNamespace(namespace))
if err != nil {
log.Info().Msgf("failed to list %s: %s", apiVersion, err)
}
objList, err := meta.ExtractList(resource)
if err != nil {
log.Info().Msgf("failed to extract list: %s", err)
}
log.Info().Msgf("KICS found %d %s(s) in %s from %s", len(objList), kind, getNamespace(namespace), apiVersion)
for i := range objList {
item := objList[i]
sb = info.getResource(item, apiVersion, kind, sb)
}
if sb.String() != "" {
info.saveK8sResources(kind, sb.String(), apiVersionFolder)
}
sb.Reset()
}
}
package kuberneter
import (
"fmt"
"os"
"path/filepath"
"strings"
"time"
"github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
appsv1 "k8s.io/api/apps/v1"
appsv1beta1 "k8s.io/api/apps/v1beta1"
appsv1beta2 "k8s.io/api/apps/v1beta2"
batchv1 "k8s.io/api/batch/v1"
batchv1beta1 "k8s.io/api/batch/v1beta1"
corev1 "k8s.io/api/core/v1"
networkingv1 "k8s.io/api/networking/v1"
networkingv1beta1 "k8s.io/api/networking/v1beta1"
policyv1 "k8s.io/api/policy/v1"
policyv1beta1 "k8s.io/api/policy/v1beta1"
rbacv1 "k8s.io/api/rbac/v1"
rbacv1alpha1 "k8s.io/api/rbac/v1alpha1"
rbacv1beta1 "k8s.io/api/rbac/v1beta1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/serializer/json"
"k8s.io/client-go/kubernetes/scheme"
)
// K8sAPIOptions saves all the necessary information to list the resources
type K8sAPIOptions struct {
Namespaces []string
APIVersions []string
Kinds []string
}
const kuberneterPathLength = 3
func (info *k8sAPICall) saveK8sResources(kind, k8sResourcesContent, apiVersionFolder string) {
file := filepath.Join(apiVersionFolder, kind+"s"+".yaml")
f, err := os.OpenFile(filepath.Clean(file), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
if err != nil {
log.Error().Msgf("failed to open file '%s': %s", file, err)
}
if _, err = f.WriteString(k8sResourcesContent); err != nil {
log.Error().Msgf("failed to write file '%s': %s", file, err)
}
err = f.Close()
if err != nil {
log.Err(err).Msgf("failed to close file: %s", file)
}
}
func (info *k8sAPICall) getResource(o runtime.Object, apiVersion, kind string, sb *strings.Builder) *strings.Builder {
e := json.NewYAMLSerializer(json.DefaultMetaFactory, scheme.Scheme, scheme.Scheme)
begin := fmt.Sprintf("\n---\napiVersion: %s\nkind: %s\n", getAPIVersion(apiVersion), kind)
if _, err := sb.WriteString(begin); err != nil {
log.Err(err).Msg("failed to write")
}
if err := e.Encode(o, sb); err != nil {
log.Err(err).Msg("failed to encode")
}
return sb
}
func extractK8sAPIOptions(path string, supportedKinds *supportedKinds) (*K8sAPIOptions, error) {
pathInfo := strings.Split(path, ":")
if len(pathInfo) != kuberneterPathLength {
return &K8sAPIOptions{}, errors.New("wrong kuberneter path syntax")
}
k8sAPIOptions := &K8sAPIOptions{
Namespaces: strings.Split(pathInfo[0], "+"),
APIVersions: strings.Split(pathInfo[1], "+"),
Kinds: strings.Split(pathInfo[2], "+"),
}
supAPIVersions, supKinds := getSupportedOptions(supportedKinds)
for i := range k8sAPIOptions.APIVersions {
if !utils.Contains(k8sAPIOptions.APIVersions[i], *supAPIVersions) {
return &K8sAPIOptions{}, errors.New("wrong apiVersion: " + k8sAPIOptions.APIVersions[i])
}
}
for i := range k8sAPIOptions.Kinds {
if !utils.Contains(k8sAPIOptions.Kinds[i], *supKinds) {
return &K8sAPIOptions{}, errors.New("wrong kind: " + k8sAPIOptions.Kinds[i])
}
}
if k8sAPIOptions.Namespaces[0] == "*" {
k8sAPIOptions.Namespaces[0] = ""
}
return k8sAPIOptions, nil
}
func getNamespace(namespace string) string {
if namespace == "" {
return "all namespaces"
}
return fmt.Sprintf("the namespace %s", namespace)
}
func buildSupportedKinds() *supportedKinds {
supportedKinds := &supportedKinds{
"apps/v1": {
"DaemonSet": &appsv1.DaemonSetList{},
"Deployment": &appsv1.DeploymentList{},
"ReplicaSet": &appsv1.ReplicaSetList{},
"StatefulSet": &appsv1.StatefulSetList{},
},
"core/v1": {
"LimitRange": &corev1.LimitRangeList{},
"Pod": &corev1.PodList{},
"PersistentVolume": &corev1.PersistentVolumeList{},
"PersistentVolumeClaim": &corev1.PersistentVolumeClaimList{},
"ReplicationController": &corev1.ReplicationControllerList{},
"ResourceQuota": &corev1.ResourceQuotaList{},
"Secret": &corev1.SecretList{},
"ServiceAccount": &corev1.ServiceAccountList{},
"Service": &corev1.ServiceList{},
},
"batch/v1": {
"CronJob": &batchv1.CronJobList{},
"Job": &batchv1.JobList{},
},
"networking.k8s.io/v1": {
"IngressClass": &networkingv1.IngressClassList{},
"Ingress": &networkingv1.IngressList{},
"NetworkPolicy": &networkingv1.NetworkPolicyList{},
},
"policy/v1": {
"PodDisruptionBudget": &policyv1.PodDisruptionBudgetList{},
},
"rbac.authorization.k8s.io/v1": {
"ClusterRoleBinding": &rbacv1.ClusterRoleBindingList{},
"ClusterRole": &rbacv1.ClusterRoleList{},
"RoleBinding": &rbacv1.RoleBindingList{},
"Role": &rbacv1.RoleList{},
},
"apps/v1beta1": {
"Deployment": &appsv1beta1.DeploymentList{},
"StatefulSet": &appsv1beta1.StatefulSetList{},
},
"apps/v1beta2": {
"DaemonSet": &appsv1beta2.DaemonSetList{},
"Deployment": &appsv1beta2.DeploymentList{},
"ReplicaSet": &appsv1beta2.ReplicaSetList{},
"StatefulSet": &appsv1beta2.StatefulSet{},
},
"batch/v1beta1": {
"CronJob": &batchv1beta1.CronJobList{},
},
"networking.k8s.io/v1beta1": {
"IngressClass": &networkingv1beta1.IngressClassList{},
"Ingress": &networkingv1beta1.IngressList{},
},
"policy/v1beta1": {
"PodDisruptionBudget": &policyv1beta1.PodDisruptionBudgetList{},
},
"rbac.authorization.k8s.io/v1alpha1": {
"ClusterRoleBinding": &rbacv1alpha1.ClusterRoleBindingList{},
"ClusterRole": &rbacv1alpha1.ClusterRoleList{},
"RoleBinding": &rbacv1alpha1.RoleBindingList{},
"Role": &rbacv1alpha1.RoleList{},
},
"rbac.authorization.k8s.io/v1beta1": {
"ClusterRoleBinding": &rbacv1beta1.ClusterRoleBindingList{},
"ClusterRole": &rbacv1beta1.ClusterRoleList{},
"RoleBinding": &rbacv1beta1.RoleBindingList{},
"Role": &rbacv1beta1.RoleList{},
},
}
return supportedKinds
}
func isTarget(target string, targetOptions []string) bool {
if targetOptions[0] == "*" || utils.Contains(target, targetOptions) {
return true
}
return false
}
func getDestinationFolder(destinationPath string) (string, error) {
var err error
if destinationPath == "" {
destinationPath, err = os.Getwd()
if err != nil {
return "", errors.Wrap(err, "failed to get working directory")
}
}
destFolderName := fmt.Sprintf("kics-extract-kuberneter-%s", time.Now().Format("01-02-2006"))
destination := filepath.Join(destinationPath, destFolderName)
if err := os.MkdirAll(destination, os.ModePerm); err != nil {
return "", err
}
return destination, nil
}
func getAPIVersion(apiVersion string) string {
if apiVersion == "core/v1" {
return "v1"
}
return apiVersion
}
func getSupportedOptions(supportedKinds *supportedKinds) (v, k *[]string) {
supportedAPIVersions := make([]string, 0)
supKinds := make([]string, 0)
for apiVersion := range *supportedKinds {
supportedAPIVersions = append(supportedAPIVersions, apiVersion)
for kind := range (*supportedKinds)[apiVersion] {
supKinds = append(supKinds, kind)
}
}
supportedAPIVersions = append(supportedAPIVersions, "*")
supKinds = append(supKinds, "*")
return &supportedAPIVersions, &supKinds
}
package minified
import (
"regexp"
"strings"
)
func IsMinified(filename string, content []byte) bool {
if strings.HasSuffix(filename, ".json") {
return isMinifiedJSON(string(content))
}
return false
}
func isMinifiedJSON(content string) bool {
// Define a regular expression to match common minification patterns
minifiedPattern := regexp.MustCompile(`\s+`)
// Count the number of non-whitespace characters
nonWhitespaceCount := len(minifiedPattern.ReplaceAllString(content, ""))
// 80% of non-whitespace characters
minifiedThreshold := 0.8
return float64(nonWhitespaceCount)/float64(len(content)) > minifiedThreshold
}
package model
import (
"reflect"
"strings"
"sync"
"gopkg.in/yaml.v3"
)
// comment is a struct that holds the comment
type comment string
// Ignore is a struct that holds the lines to ignore
type Ignore struct {
// Lines is the lines to ignore
Lines []int
}
var (
// NewIgnore is the ignore struct
NewIgnore = &Ignore{}
memoryMu sync.Mutex
)
// build builds the ignore struct
func (i *Ignore) build(lines []int) {
defer memoryMu.Unlock()
memoryMu.Lock()
i.Lines = append(i.Lines, lines...)
}
// GetLines returns the lines to ignore
func (i *Ignore) GetLines() []int {
return RemoveDuplicates(i.Lines)
}
// Reset resets the ignore struct
func (i *Ignore) Reset() {
i.Lines = make([]int, 0)
}
// ignoreCommentsYAML sets the lines to ignore for a yaml file
func ignoreCommentsYAML(node *yaml.Node) {
linesIgnore := make([]int, 0)
if node.HeadComment != "" {
// Squence Node - Head Comment comes in root node
linesIgnore = append(linesIgnore, processCommentYAML((*comment)(&node.HeadComment), 0, node, node.Kind, false)...)
NewIgnore.build(linesIgnore)
return
}
// check if comment is in the content
for i, content := range node.Content {
if content.FootComment != "" && i+2 < len(node.Content) {
linesIgnore = append(linesIgnore, processCommentYAML((*comment)(&content.FootComment), i+2, node, node.Kind, true)...)
}
if content.HeadComment == "" {
continue
}
linesIgnore = append(linesIgnore, processCommentYAML((*comment)(&content.HeadComment), i, node, node.Kind, false)...)
}
NewIgnore.build(linesIgnore)
}
// processCommentYAML returns the lines to ignore
func processCommentYAML(comment *comment, position int, content *yaml.Node, kind yaml.Kind, isFooter bool) (linesIgnore []int) {
linesIgnore = make([]int, 0)
switch com := (*comment).value(); com {
case IgnoreLine:
linesIgnore = append(linesIgnore, processLine(kind, content, position)...)
case IgnoreBlock:
linesIgnore = append(linesIgnore, processBlock(kind, content.Content, position)...)
default:
linesIgnore = append(linesIgnore, processRegularLine(string(*comment), content, position, isFooter)...)
}
return
}
func getSeqLastLine(content *yaml.Node) int {
if len(content.Content) == 0 {
return content.Line
}
return content.Content[len(content.Content)-1].Line
}
func getFootComments(comment string, content *yaml.Node, position, commentsNumber int) (linesIgnore []int) {
for { // get the right position where the comment is a foot comment
if content.Content[position].FootComment == comment {
break
}
position--
}
line := content.Content[position].Line
if content.Content[position+1].Kind == yaml.SequenceNode {
// get the last line of the sequence through the sequence after the content that has the comment as a foot comment
// example:
// - proto: tcp // content.Content[position]
// ports: // content.Content[position+1]
// - 80
// - 443 // last line of the sequence
// # public ALB 80 + 443 must be access able from everywhere
line = getSeqLastLine(content.Content[position+1])
}
for i := 1; i <= commentsNumber; i++ {
linesIgnore = append(linesIgnore, line+i)
}
return
}
func processRegularLine(comment string, content *yaml.Node, position int, isFooter bool) (linesIgnore []int) {
linesIgnore = make([]int, 0)
if len(content.Content) == 0 {
return linesIgnore
}
line := content.Content[position].Line
commentsNumber := strings.Count(comment, "\n") + 1 // number of comments (coverage of nested comments)
if isFooter { // comment is a foot comment
return getFootComments(comment, content, position, commentsNumber)
}
// comment is not a foot comment
if KICSCommentRgxpYaml.MatchString(comment) {
// has kics-scan ignore at the end of the comment
linesIgnore = append(linesIgnore, line)
}
for i := 1; i <= commentsNumber; i++ {
linesIgnore = append(linesIgnore, line-i)
}
return linesIgnore
}
// processLine returns the lines to ignore for a line
func processLine(kind yaml.Kind, content *yaml.Node, position int) (linesIgnore []int) {
linesIgnore = make([]int, 0)
var nodeToIgnore *yaml.Node
if kind == yaml.ScalarNode {
nodeToIgnore = content
} else {
nodeToIgnore = content.Content[position]
}
linesIgnore = append(linesIgnore, nodeToIgnore.Line-1, nodeToIgnore.Line)
return
}
// processBlock returns the lines to ignore for a block
func processBlock(kind yaml.Kind, content []*yaml.Node, position int) (linesIgnore []int) {
linesIgnore = make([]int, 0)
var contentToIgnore []*yaml.Node
if kind == yaml.SequenceNode {
contentToIgnore = content[position].Content
} else if position == 0 {
contentToIgnore = content
} else {
contentToIgnore = content[position+1].Content
}
linesIgnore = append(linesIgnore, content[position].Line, content[position].Line-1)
linesIgnore = append(linesIgnore, Range(contentToIgnore[0].Line,
getNodeLastLine(contentToIgnore[len(contentToIgnore)-1]))...)
return
}
// getNodeLastLine returns the last line of a node
func getNodeLastLine(node *yaml.Node) (lastLine int) {
lastLine = node.Line
if len(node.Content) > 0 {
for _, content := range node.Content {
if content.Line > lastLine {
lastLine = content.Line
}
if lineContent := getNodeLastLine(content); lineContent > lastLine {
lastLine = lineContent
}
}
} else if reflect.TypeOf(node.Value).Kind() == reflect.String {
lastLine += strings.Count(node.Value, "\n")
}
return
}
// value returns the value of the comment
func (c *comment) value() (value CommentCommand) {
comment := strings.ToLower(string(*c))
if isHelm(comment) {
res := KICSGetContentCommentRgxp.FindString(comment)
if res != "" {
comment = res
}
}
// check if we are working with kics command
if KICSCommentRgxp.MatchString(comment) {
comment = KICSCommentRgxp.ReplaceAllString(comment, "")
comment = strings.Trim(comment, "\n")
commands := strings.Split(strings.Trim(comment, "\r"), " ")
value = ProcessCommands(commands)
return
}
return CommentCommand(comment)
}
func isHelm(comment string) bool {
return strings.Contains(comment, "helm")
}
package model
// RemoveDuplicates removes duplicate lines from a slice of lines.
func RemoveDuplicates(lines []int) []int {
seen := make(map[int]bool)
var result []int
for _, line := range lines {
if !seen[line] {
result = append(result, line)
seen[line] = true
}
}
return result
}
// ProcessCommands processes a slice of commands.
func ProcessCommands(commands []string) CommentCommand {
for _, command := range commands {
switch com := CommentCommand(command); com {
case IgnoreLine:
return IgnoreLine
case IgnoreBlock:
return IgnoreBlock
default:
continue
}
}
return CommentCommand(commands[0])
}
// Range returns a slice of lines between the start and end line numbers.
func Range(start, end int) (lines []int) {
lines = make([]int, end-start+1)
for i := range lines {
lines[i] = start + i
}
return
}
package model
import (
"regexp"
"sort"
"strings"
"github.com/rs/zerolog/log"
)
// Constants to describe what kind of file refers
const (
KindTerraform FileKind = "TF"
KindBICEP FileKind = "BICEP"
KindJSON FileKind = "JSON"
KindYAML FileKind = "YAML"
KindYML FileKind = "YML"
KindDOCKER FileKind = "DOCKERFILE"
KindPROTO FileKind = "PROTO"
KindCOMMON FileKind = "*"
KindHELM FileKind = "HELM"
KindBUILDAH FileKind = "SH"
KindCFG FileKind = "CFG"
KindINI FileKind = "INI"
)
// Constants to describe commands given from comments
const (
IgnoreLine CommentCommand = "ignore-line"
IgnoreBlock CommentCommand = "ignore-block"
IgnoreComment CommentCommand = "ignore-comment"
)
// Constants to describe vulnerability's severity
const (
SeverityCritical = "CRITICAL"
SeverityHigh = "HIGH"
SeverityMedium = "MEDIUM"
SeverityLow = "LOW"
SeverityInfo = "INFO"
SeverityTrace = "TRACE"
)
// Constants to describe issue's type
const (
IssueTypeMissingAttribute IssueType = "MissingAttribute"
IssueTypeRedundantAttribute IssueType = "RedundantAttribute"
IssueTypeIncorrectValue IssueType = "IncorrectValue"
)
// Arrays to group all constants of one type
var (
AllSeverities = []Severity{
SeverityCritical,
SeverityHigh,
SeverityMedium,
SeverityLow,
SeverityInfo,
SeverityTrace,
}
AllIssueTypesAsString = []string{
string(IssueTypeMissingAttribute),
string(IssueTypeRedundantAttribute),
string(IssueTypeIncorrectValue),
}
)
var (
// KICSCommentRgxp is the regexp to identify if a comment is a KICS comment
KICSCommentRgxp = regexp.MustCompile(`(^|\n)((/{2})|#|;)*\s*kics-scan\s*`)
// KICSGetContentCommentRgxp to gets the kics comment on the hel case
KICSGetContentCommentRgxp = regexp.MustCompile(`(^|\n)((/{2})|#|;)*\s*kics-scan([^\n]*)\n`)
// KICSCommentRgxpYaml is the regexp to identify if the comment has KICS comment at the end of the comment in YAML
KICSCommentRgxpYaml = regexp.MustCompile(`((/{2})|#)*\s*kics-scan\s*(ignore-line|ignore-block)\s*\n*$`)
)
// Version - is the model for the version response
type Version struct {
Latest bool `json:"is_latest"`
LatestVersionTag string `json:"latest_version"`
}
// VulnerabilityLines is the representation of the found line for issue
type VulnerabilityLines struct {
Line int
VulnLines *[]CodeLine
LineWithVulnerability string
ResolvedFile string
}
// CommentCommand represents a command given from a comment
type CommentCommand string
// FileKind is the extension of a file
type FileKind string
// Severity of the vulnerability
type Severity string
// IssueType is the issue's type string representation
type IssueType string
// CodeLine is the lines containing and adjacent to the vulnerability line with their respective positions
type CodeLine struct {
Position int
Line string
}
// ExtractedPathObject is the struct that contains the path location of extracted source
// and a boolean to check if it is a local source
type ExtractedPathObject struct {
Path string
LocalPath bool
}
// CommentsCommands list of commands on a file that will be parsed
type CommentsCommands map[string]string
// FileMetadata is a representation of basic information and content of a file
type FileMetadata struct {
ID string `db:"id"`
ScanID string `db:"scan_id"`
Document Document
LineInfoDocument map[string]interface{}
OriginalData string `db:"orig_data"`
Kind FileKind `db:"kind"`
FilePath string `db:"file_path"`
Content string
HelmID string
IDInfo map[int]interface{}
Commands CommentsCommands
LinesIgnore []int
ResolvedFiles map[string]ResolvedFile
LinesOriginalData *[]string
IsMinified bool
}
// QueryMetadata is a representation of general information about a query
type QueryMetadata struct {
InputData string
Query string
Content string
Metadata map[string]interface{}
Platform string
CWE string
// special field for generic queries
// represents how many queries are aggregated into a single rego file
Aggregation int
Experimental bool
}
// Vulnerability is a representation of a detected vulnerability in scanned files
// after running a query
type Vulnerability struct {
ID int `json:"id"`
ScanID string `db:"scan_id" json:"-"`
SimilarityID string `db:"similarity_id" json:"similarityID"`
OldSimilarityID string `db:"old_similarity_id" json:"oldSimilarityID"`
FileID string `db:"file_id" json:"-"`
FileName string `db:"file_name" json:"fileName"`
QueryID string `db:"query_id" json:"queryID"`
QueryName string `db:"query_name" json:"queryName"`
QueryURI string `json:"-"`
Category string `json:"category"`
Experimental bool `json:"experimental"`
Description string `json:"description"`
DescriptionID string `json:"descriptionID"`
Platform string `db:"platform" json:"platform"`
CWE string `db:"cwe" json:"cwe"`
Severity Severity `json:"severity"`
Line int `json:"line"`
VulnLines *[]CodeLine `json:"vulnLines"`
ResourceType string `db:"resource_type" json:"resourceType"`
ResourceName string `db:"resource_name" json:"resourceName"`
IssueType IssueType `db:"issue_type" json:"issueType"`
SearchKey string `db:"search_key" json:"searchKey"`
SearchLine int `db:"search_line" json:"searchLine"`
SearchValue string `db:"search_value" json:"searchValue"`
KeyExpectedValue string `db:"key_expected_value" json:"expectedValue"`
KeyActualValue string `db:"key_actual_value" json:"actualValue"`
Value *string `db:"value" json:"value"`
Output string `json:"-"`
CloudProvider string `json:"cloud_provider"`
Remediation string `db:"remediation" json:"remediation"`
RemediationType string `db:"remediation_type" json:"remediation_type"`
}
// QueryConfig is a struct that contains the fileKind and platform of the rego query
type QueryConfig struct {
FileKind []FileKind
Platform string
}
// ResolvedFiles keeps the information of all file/template resolved
type ResolvedFiles struct {
File []ResolvedHelm
Excluded []string
}
// ResolvedHelm keeps the information of a file/template resolved
type ResolvedHelm struct {
FileName string
Content []byte
OriginalData []byte
SplitID string
IDInfo map[int]interface{}
}
// Extensions represents a list of supported extensions
type Extensions map[string]struct{}
// Include returns true if an extension is included in supported extensions listed
// otherwise returns false
func (e Extensions) Include(ext string) bool {
_, b := e[ext]
return b
}
// LineObject is the struct that will hold line information for each key
type LineObject struct {
Line int `json:"_kics_line"`
Arr []map[string]*LineObject `json:"_kics_arr,omitempty"`
}
// MatchedFilesRegex returns the regex rule to identify if an extension is supported or not
func (e Extensions) MatchedFilesRegex() string {
if len(e) == 0 {
return "NO_MATCHED_FILES"
}
var parts []string
for ext := range e {
parts = append(parts, "\\"+ext)
}
sort.Strings(parts)
return "(.*)(" + strings.Join(parts, "|") + ")$"
}
// FileMetadatas is a slice of FileMetadata
type FileMetadatas []FileMetadata
// ToMap creates a map of FileMetadatas, which the key is the FileMedata ID and the value is the FileMetadata
func (m FileMetadatas) ToMap() map[string]FileMetadata {
c := make(map[string]FileMetadata, len(m))
for i := 0; i < len(m); i++ {
c[m[i].ID] = m[i]
}
return c
}
// Documents
type Documents struct {
Documents []Document `json:"document"`
}
// Document
type Document map[string]interface{}
// Combine merge documents from FileMetadatas using the ID as reference for Document ID and FileName as reference for file
func (m FileMetadatas) Combine(lineInfo bool) Documents {
documents := Documents{Documents: make([]Document, 0, len(m))}
for i := 0; i < len(m); i++ {
_, ignore := m[i].Commands["ignore"]
if len(m[i].Document) == 0 {
continue
}
if ignore {
log.Debug().Msgf("Ignoring file %s", m[i].FilePath)
continue
}
if lineInfo {
m[i].LineInfoDocument["id"] = m[i].ID
m[i].LineInfoDocument["file"] = m[i].FilePath
documents.Documents = append(documents.Documents, m[i].LineInfoDocument)
} else {
m[i].Document["id"] = m[i].ID
m[i].Document["file"] = m[i].FilePath
documents.Documents = append(documents.Documents, m[i].Document)
}
}
return documents
}
// AnalyzedPaths is a slice of types and excluded files obtained from the Analyzer
type AnalyzedPaths struct {
Types []string
Exc []string
ExpectedLOC int
}
// ResolvedFileSplit is a struct that contains the information of a resolved file, the path and the lines of the file
type ResolvedFileSplit struct {
Path string
Lines []string
}
// ResolvedFile is a struct that contains the information of a resolved file, the path and the content in bytes of the file
type ResolvedFile struct {
Path string
Content []byte
LinesContent *[]string
}
package model
import (
json "encoding/json"
"errors"
"path/filepath"
"strconv"
"github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/rs/zerolog/log"
"gopkg.in/yaml.v3"
)
// UnmarshalYAML is a custom yaml parser that places line information in the payload
func (m *Document) UnmarshalYAML(value *yaml.Node) error {
dpc := unmarshal(value)
if mapDcp, ok := dpc.(map[string]interface{}); ok {
// set line information for root level objects
mapDcp["_kics_lines"] = getLines(value, 0)
// place the payload in the Document struct
tmp, _ := json.Marshal(mapDcp)
_ = json.Unmarshal(tmp, m)
return nil
}
return errors.New("failed to parse yaml content")
}
// GetIgnoreLines get the lines to ignore in the KICS results
// lines ignore can have the lines from the resolved files
// since inspector secrets only looks to original data, the lines ignore should be replaced in yaml cases
func GetIgnoreLines(file *FileMetadata) []int {
ignoreLines := file.LinesIgnore
if utils.Contains(filepath.Ext(file.FilePath), []string{".yml", ".yaml"}) {
NewIgnore.Reset()
var node yaml.Node
if err := yaml.Unmarshal([]byte(file.OriginalData), &node); err != nil {
log.Info().Msgf("failed to unmarshal file: %s", err)
return ignoreLines
}
if node.Kind == 1 && len(node.Content) == 1 {
_ = unmarshal(node.Content[0])
ignoreLines = NewIgnore.GetLines()
}
}
return ignoreLines
}
/*
YAML Node TYPES
SequenceNode -> array
ScalarNode -> generic (except for arrays, objects and maps)
MappingNode -> map
*/
// unmarshal is the function that will parse the yaml elements and call the functions needed
// to place their line information in the payload
func unmarshal(val *yaml.Node) interface{} {
tmp := make(map[string]interface{})
ignoreCommentsYAML(val)
// if Yaml Node is an Array than we are working with ansible
// which need to be placed inside "playbooks"
if val.Kind == yaml.SequenceNode {
contentArray := make([]interface{}, 0)
for _, contentEntry := range val.Content {
contentArray = append(contentArray, unmarshal(contentEntry))
}
tmp["playbooks"] = contentArray
} else if val.Kind == yaml.ScalarNode {
// resolve Scalar Node
return scalarNodeResolver(val)
} else {
// iterate two by two, since first iteration is the key and the second is the value
for i := 0; i < len(val.Content); i += 2 {
if val.Content[i].Kind == yaml.ScalarNode {
switch val.Content[i+1].Kind {
case yaml.ScalarNode:
tmp[val.Content[i].Value] = scalarNodeResolver(val.Content[i+1])
// in case value iteration is a map
case yaml.MappingNode:
// unmarshall map value and get its line information
tt := unmarshal(val.Content[i+1]).(map[string]interface{})
tt["_kics_lines"] = getLines(val.Content[i+1], val.Content[i].Line)
tmp[val.Content[i].Value] = tt
// in case value iteration is an array
case yaml.SequenceNode:
contentArray := make([]interface{}, 0)
// unmarshall each iteration of the array
for _, contentEntry := range val.Content[i+1].Content {
contentArray = append(contentArray, unmarshal(contentEntry))
}
tmp[val.Content[i].Value] = contentArray
case yaml.AliasNode:
if tt, ok := unmarshal(val.Content[i+1].Alias).(map[string]interface{}); ok {
tt["_kics_lines"] = getLines(val.Content[i+1], val.Content[i].Line)
utils.MergeMaps(tmp, tt)
}
if v, ok := unmarshal(val.Content[i+1].Alias).(string); ok {
tmp[val.Content[i].Value] = v
}
}
}
}
}
return tmp
}
// getLines creates the map containing the line information for the yaml Node
// def is the line to be used as "_kics__default"
func getLines(val *yaml.Node, def int) map[string]*LineObject {
lineMap := make(map[string]*LineObject)
// line information map
lineMap["_kics__default"] = &LineObject{
Line: def,
Arr: []map[string]*LineObject{},
}
// if yaml Node is an Array use func getSeqLines
if val.Kind == yaml.SequenceNode {
return getSeqLines(val, def)
}
// iterate two by two, since first iteration is the key and the second is the value
for i := 0; i < len(val.Content); i += 2 {
lineArr := make([]map[string]*LineObject, 0)
// in case the value iteration is an array call getLines for each iteration of the array
if val.Content[i+1].Kind == yaml.SequenceNode {
for _, contentEntry := range val.Content[i+1].Content {
defaultLine := val.Content[i].Line
if contentEntry.Kind == yaml.ScalarNode {
defaultLine = contentEntry.Line
} else if contentEntry.Kind == yaml.MappingNode && len(contentEntry.Content) > 0 {
defaultLine = contentEntry.Content[0].Line
}
lineArr = append(lineArr, getLines(contentEntry, defaultLine))
}
}
// line information map of each key of the yaml Node
lineMap["_kics_"+val.Content[i].Value] = &LineObject{
Line: val.Content[i].Line,
Arr: lineArr,
}
}
return lineMap
}
// getSeqLines iterates through the elements of an Array
// creating a map with each iteration lines information
func getSeqLines(val *yaml.Node, def int) map[string]*LineObject {
lineMap := make(map[string]*LineObject)
lineArr := make([]map[string]*LineObject, 0)
// get line information slice of every element in the array
for _, cont := range val.Content {
lineArr = append(lineArr, getLines(cont, cont.Line))
}
// create line information of array with its line and elements line information
lineMap["_kics__default"] = &LineObject{
Line: def,
Arr: lineArr,
}
return lineMap
}
// scalarNodeResolver transforms a ScalarNode value in its correct type
func scalarNodeResolver(val *yaml.Node) interface{} {
var transformed interface{} = val.Value
switch val.Tag {
case "!!bool":
transformed = transformBoolScalarNode(val.Value)
case "!!int":
v, err := strconv.Atoi(val.Value)
if err != nil {
log.Error().Msgf("failed to convert integer in yaml parser")
return val.Value
}
transformed = v
case "!!null":
transformed = nil
}
return transformed
}
// transformBoolScalarNode transforms a string value to its boolean representation
func transformBoolScalarNode(value string) bool {
switch value {
case "true", "True":
return true
default:
return false
}
}
package model
import (
"os"
"path/filepath"
"regexp"
"sort"
"strings"
"time"
"github.com/rs/zerolog/log"
)
// SeveritySummary contains scans' result numbers, how many vulnerabilities of each severity was detected
type SeveritySummary struct {
ScanID string `json:"scan_id"`
SeverityCounters map[Severity]int `json:"severity_counters"`
TotalCounter int `json:"total_counter"`
TotalBOMResources int `json:"total_bom_resources"`
}
// VulnerableFile contains information of a vulnerable file and where the vulnerability was found
type VulnerableFile struct {
FileName string `json:"file_name"`
SimilarityID string `json:"similarity_id"`
OldSimilarityID string `json:"old_similarity_id,omitempty"`
Line int `json:"line"`
VulnLines *[]CodeLine `json:"-"`
ResourceType string `json:"resource_type,omitempty"`
ResourceName string `json:"resource_name,omitempty"`
IssueType IssueType `json:"issue_type"`
SearchKey string `json:"search_key"`
SearchLine int `json:"search_line"`
SearchValue string `json:"search_value"`
KeyExpectedValue string `json:"expected_value"`
KeyActualValue string `json:"actual_value"`
Value *string `json:"value,omitempty"`
Remediation string `json:"remediation,omitempty"`
RemediationType string `json:"remediation_type,omitempty"`
}
// QueryResult contains a query that tested positive ID, name, severity and a list of files that tested vulnerable
type QueryResult struct {
QueryName string `json:"query_name"`
QueryID string `json:"query_id"`
QueryURI string `json:"query_url"`
Severity Severity `json:"severity"`
Platform string `json:"platform"`
CWE string `json:"cwe,omitempty"`
CloudProvider string `json:"cloud_provider,omitempty"`
Category string `json:"category"`
Experimental bool `json:"experimental"`
Description string `json:"description"`
DescriptionID string `json:"description_id"`
CISDescriptionIDFormatted string `json:"cis_description_id,omitempty"`
CISDescriptionTitle string `json:"cis_description_title,omitempty"`
CISDescriptionTextFormatted string `json:"cis_description_text,omitempty"`
CISDescriptionID string `json:"cis_description_id_raw,omitempty"`
CISDescriptionText string `json:"cis_description_text_raw,omitempty"`
CISRationaleText string `json:"cis_description_rationale,omitempty"`
CISBenchmarkName string `json:"cis_benchmark_name,omitempty"`
CISBenchmarkVersion string `json:"cis_benchmark_version,omitempty"`
Files []VulnerableFile `json:"files"`
}
// QueryResultSlice is a slice of QueryResult
type QueryResultSlice []QueryResult
// Counters hold information about how many files were scanned, parsed, failed to be scaned, the total of queries
// and how many queries failed to execute
type Counters struct {
ScannedFiles int `json:"files_scanned"`
ScannedFilesLines int `json:"lines_scanned"`
ParsedFiles int `json:"files_parsed"`
ParsedFilesLines int `json:"lines_parsed"`
IgnoredFilesLines int `json:"lines_ignored"`
FailedToScanFiles int `json:"files_failed_to_scan"`
TotalQueries int `json:"queries_total"`
FailedToExecuteQueries int `json:"queries_failed_to_execute"`
FailedSimilarityID int `json:"queries_failed_to_compute_similarity_id"`
}
// Times represents an object that contains the start and end time of the scan
type Times struct {
Start time.Time `json:"start"`
End time.Time `json:"end"`
}
// VersionResponse - is the model for the version response
type VersionResponse struct {
Latest bool `json:"is_latest"`
LatestVersionTag string `json:"latest_version"`
}
// Summary is a report of a single scan
type Summary struct {
Version string `json:"kics_version,omitempty"`
LatestVersion Version `json:"-"`
Counters
SeveritySummary
Times
ScannedPaths []string `json:"paths"`
Queries QueryResultSlice `json:"queries"`
Bom QueryResultSlice `json:"bill_of_materials,omitempty"`
FilePaths map[string]string `json:"-"`
}
// PathParameters - structure wraps the required fields for temporary path translation
type PathParameters struct {
ScannedPaths []string
PathExtractionMap map[string]ExtractedPathObject
}
var (
queryRegex = regexp.MustCompile(`\?([\w-]+(=[\w-]*)?(&[\w-]+(=[\w-]*)?)*)?`)
urlAuthRegex = regexp.MustCompile(`((ssh|https?)://)(\S+(:\S*)?@).*`)
)
const authGroupPosition = 3
func getRelativePath(basePath, filePath string) string {
var returnPath string
relativePath, err := filepath.Rel(basePath, filePath)
if err != nil {
returnPath = filePath
} else {
returnPath = relativePath
}
return returnPath
}
func replaceIfTemporaryPath(filePath string, pathExtractionMap map[string]ExtractedPathObject) string {
prettyPath := filePath
for key, val := range pathExtractionMap {
if strings.Contains(filePath, key) {
splittedPath := strings.Split(filePath, key)
if !val.LocalPath {
// remove authentication information from the URL
sanitizedURL := removeURLCredentials(val.Path)
// remove query parameters '?key=value&key2=value'
return filepath.FromSlash(queryRegex.ReplaceAllString(sanitizedURL, "") + splittedPath[1])
}
prettyPath = filepath.FromSlash(filepath.Base(val.Path) + splittedPath[1])
} else {
prettyPath = filePath
}
}
return prettyPath
}
func removeAllURLCredentials(pathExtractionMap map[string]ExtractedPathObject) []string {
sanitizedScannedPaths := make([]string, 0)
for _, val := range pathExtractionMap {
if !val.LocalPath {
sanitizedURL := removeURLCredentials(val.Path)
sanitizedScannedPaths = append(sanitizedScannedPaths, sanitizedURL)
} else {
sanitizedScannedPaths = append(sanitizedScannedPaths, val.Path)
}
}
return sanitizedScannedPaths
}
func removeURLCredentials(url string) string {
authGroup := ""
groups := urlAuthRegex.FindStringSubmatch(url)
// credentials are present in the URL
if len(groups) > authGroupPosition {
authGroup = groups[authGroupPosition]
}
return strings.Replace(url, authGroup, "", 1)
}
func resolvePath(filePath string, pathExtractionMap map[string]ExtractedPathObject) string {
var returnPath string
returnPath = replaceIfTemporaryPath(filepath.FromSlash(filePath), pathExtractionMap)
pwd, err := os.Getwd()
if err != nil {
log.Error().Msgf("Unable to get current working dir %s", err)
return returnPath
}
returnPath = getRelativePath(pwd, returnPath)
return returnPath
}
// CreateSummary creates a report for a single scan, based on its scanID
func CreateSummary(counters Counters, vulnerabilities []Vulnerability,
scanID string, pathExtractionMap map[string]ExtractedPathObject, version Version) Summary {
log.Debug().Msg("model.CreateSummary()")
q := make(map[string]QueryResult, len(vulnerabilities))
severitySummary := SeveritySummary{
ScanID: scanID,
}
filePaths := make(map[string]string)
for i := range vulnerabilities {
item := vulnerabilities[i]
if _, ok := q[item.QueryID]; !ok {
q[item.QueryID] = QueryResult{
QueryName: item.QueryName,
QueryID: item.QueryID,
Severity: item.Severity,
QueryURI: item.QueryURI,
Platform: item.Platform,
CWE: item.CWE,
Experimental: item.Experimental,
CloudProvider: strings.ToUpper(item.CloudProvider),
Category: item.Category,
Description: item.Description,
DescriptionID: item.DescriptionID,
}
}
resolvedPath := resolvePath(item.FileName, pathExtractionMap)
qItem := q[item.QueryID]
qItem.Files = append(qItem.Files, VulnerableFile{
FileName: resolvedPath,
SimilarityID: item.SimilarityID,
OldSimilarityID: item.OldSimilarityID,
Line: item.Line,
VulnLines: item.VulnLines,
ResourceType: item.ResourceType,
ResourceName: item.ResourceName,
IssueType: item.IssueType,
SearchKey: item.SearchKey,
SearchValue: item.SearchValue,
SearchLine: item.SearchLine,
KeyExpectedValue: item.KeyExpectedValue,
KeyActualValue: item.KeyActualValue,
Value: item.Value,
Remediation: item.Remediation,
RemediationType: item.RemediationType,
})
filePaths[resolvedPath] = item.FileName
q[item.QueryID] = qItem
}
queries := make([]QueryResult, 0, len(q))
sevs := map[Severity]int{SeverityTrace: 0, SeverityInfo: 0, SeverityLow: 0, SeverityMedium: 0, SeverityHigh: 0, SeverityCritical: 0}
for idx := range q {
sevs[q[idx].Severity] += len(q[idx].Files)
if q[idx].Severity == SeverityTrace {
continue
}
queries = append(queries, q[idx])
severitySummary.TotalCounter += len(q[idx].Files)
}
severityOrder := map[Severity]int{
SeverityTrace: 5,
SeverityInfo: 4,
SeverityLow: 3,
SeverityMedium: 2,
SeverityHigh: 1,
SeverityCritical: 0,
}
sort.Slice(queries, func(i, j int) bool {
if severityOrder[queries[i].Severity] == severityOrder[queries[j].Severity] {
return queries[i].QueryName < queries[j].QueryName
}
return severityOrder[queries[i].Severity] < severityOrder[queries[j].Severity]
})
materials := make([]QueryResult, 0, len(q))
for idx := range q {
if q[idx].Severity == SeverityTrace {
materials = append(materials, q[idx])
severitySummary.TotalBOMResources += len(q[idx].Files)
}
}
severitySummary.SeverityCounters = sevs
return Summary{
Bom: materials,
Counters: counters,
Queries: queries,
SeveritySummary: severitySummary,
ScannedPaths: removeAllURLCredentials(pathExtractionMap),
LatestVersion: version,
FilePaths: filePaths,
}
}
package comments
import (
"regexp"
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
)
func getKicsIgnore(comment string) string {
commentLower := model.KICSCommentRgxp.ReplaceAllString(strings.ToLower(comment), "")
commentLower = strings.Trim(commentLower, "\r")
commentLower = strings.Trim(commentLower, "\n")
return commentLower
}
func getIgnoreLinesFromBlock(lines []string, ignoreBlockLine int) int {
i := ignoreBlockLine + 1
if i >= len(lines) {
return ignoreBlockLine
}
// Check if the next line is a group: [group_name]
if match, _ := regexp.MatchString(`^\s*\[`, lines[i]); !match {
return ignoreBlockLine
}
// Now needs to find the end of the block (next group or end of file)
i += 1
nextGroup := regexp.MustCompile(`^\s*\[`)
for ; i < len(lines); i++ {
if nextGroup.MatchString(lines[i]) {
return i - 1
}
}
return i - 1
}
func GetIgnoreLines(lines []string) []int {
ignoreLines := make([]int, 0)
comment := regexp.MustCompile(`^[#;]`)
for i, line := range lines {
if model.KICSCommentRgxp.MatchString(line) {
kicsIgnore := getKicsIgnore(line)
switch model.CommentCommand(kicsIgnore) {
case model.IgnoreLine:
if i+1 < len(lines) {
ignoreLines = append(ignoreLines, i, i+1)
} else {
ignoreLines = append(ignoreLines, i)
}
case model.IgnoreBlock:
until := getIgnoreLinesFromBlock(lines, i)
if until > i {
ignoreLines = append(ignoreLines, model.Range(i, until)...)
} else {
ignoreLines = append(ignoreLines, i)
}
}
} else if comment.MatchString(line) {
ignoreLines = append(ignoreLines, i+1)
}
}
return ignoreLines
}
package ansibleconfig
import (
"strconv"
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/parser/ansible/ini/comments"
"github.com/bigkevmcd/go-configparser"
)
// Parser defines a parser type
type Parser struct {
}
func (p *Parser) Resolve(fileContent []byte, _ string, _ bool, _ int) ([]byte, error) {
return fileContent, nil
}
// Parse parses .cfg/.conf file and returns it as a Document
func (p *Parser) Parse(filePath string, fileContent []byte) ([]model.Document, []int, error) {
model.NewIgnore.Reset()
reader := strings.NewReader(string(fileContent))
configparser.Delimiters("=")
inline := configparser.InlineCommentPrefixes([]string{";"})
config, err := configparser.ParseReaderWithOptions(reader, inline)
if err != nil {
return nil, nil, err
}
doc := make(map[string]interface{})
doc["groups"] = refactorConfig(config)
ignoreLines := comments.GetIgnoreLines(strings.Split(string(fileContent), "\n"))
return []model.Document{doc}, ignoreLines, nil
}
// refactorConfig removes all extra information and tries to convert
func refactorConfig(config *configparser.ConfigParser) (doc *model.Document) {
doc = emptyDocument()
for _, section := range config.Sections() {
dict, err := config.Items(section)
if err != nil {
continue
}
dictRefact := make(map[string]interface{})
for key, value := range dict {
if boolValue, err := strconv.ParseBool(value); err == nil {
dictRefact[key] = boolValue
} else if floatValue, err := strconv.ParseFloat(value, 64); err == nil {
dictRefact[key] = floatValue
} else if strings.Contains(value, ",") {
elements := strings.Split(value, ",")
for i := 0; i < len(elements); i++ {
elements[i] = strings.TrimSpace(elements[i])
}
dictRefact[key] = elements
} else if value == "[]" {
dictRefact[key] = []string{}
} else {
dictRefact[key] = value
}
}
(*doc)[section] = dictRefact
}
return doc
}
// SupportedExtensions returns extensions supported by this parser, which are only ini extension
func (p *Parser) SupportedExtensions() []string {
return []string{".cfg", ".conf"}
}
// SupportedTypes returns types supported by this parser, which is ansible
func (p *Parser) SupportedTypes() map[string]bool {
return map[string]bool{
"ansible": true,
}
}
// GetKind returns CFG constant kind
func (p *Parser) GetKind() model.FileKind {
return model.KindCFG
}
// GetCommentToken return the comment token of CFG/CONF - #
func (p *Parser) GetCommentToken() string {
return "#"
}
// GetResolvedFiles returns resolved files
func (p *Parser) GetResolvedFiles() map[string]model.ResolvedFile {
return make(map[string]model.ResolvedFile)
}
// StringifyContent converts original content into string formatted version
func (p *Parser) StringifyContent(content []byte) (string, error) {
return string(content), nil
}
func emptyDocument() *model.Document {
return &model.Document{}
}
package hosts
import (
"strconv"
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/parser/ansible/ini/comments"
"github.com/relex/aini"
)
// Parser defines a parser type
type Parser struct {
}
func (p *Parser) Resolve(fileContent []byte, _ string, _ bool, _ int) ([]byte, error) {
return fileContent, nil
}
// Parse parses .ini file and returns it as a Document
func (p *Parser) Parse(_ string, fileContent []byte) ([]model.Document, []int, error) {
model.NewIgnore.Reset()
inventoryReader := strings.NewReader(string(fileContent))
var inventory, err = aini.Parse(inventoryReader)
if err != nil {
return nil, nil, err
}
doc := model.Document{}
childrenMap, _ := refactorInv(inventory.Groups, 1)
allMap := emptyDocument()
(*allMap)["children"] = childrenMap
doc["all"] = allMap
ignoreLines := comments.GetIgnoreLines(strings.Split(string(fileContent), "\n"))
return []model.Document{doc}, ignoreLines, nil
}
// refactorInv removes all extra information
func refactorInv(groups map[string]*aini.Group, parentSize int) (doc *model.Document, children map[string]bool) {
doc = emptyDocument()
children = make(map[string]bool)
for _, group := range groups {
if parentSize != len(group.Parents) {
continue
}
groupMap := emptyDocument()
ans, childGroup := refactorInv(group.Children, parentSize+1)
if len(*ans) > 0 {
(*groupMap)["children"] = ans
}
ans = refactorHosts(group.Hosts, childGroup)
if len(*ans) > 0 {
(*groupMap)["hosts"] = ans
}
children[group.Name] = true
for child := range childGroup {
children[child] = true
}
(*doc)[group.Name] = groupMap
}
return doc, children
}
// refactorHosts only add Hosts that aren't defined in Children
func refactorHosts(hosts map[string]*aini.Host, children map[string]bool) *model.Document {
hostMap := emptyDocument()
for _, host := range hosts {
if !children[host.Name] {
(*hostMap)[host.Name] = refactorVars(host.Vars)
children[host.Name] = true
}
}
return hostMap
}
// refactorVars try to convert to float and add all vars
func refactorVars(vars map[string]string) *model.Document {
varMap := emptyDocument()
for key, value := range vars {
valueFloat, err := strconv.ParseFloat(value, 64)
if err == nil {
(*varMap)[key] = valueFloat
} else {
if valueBool, err := strconv.ParseBool(value); err == nil {
(*varMap)[key] = valueBool
} else {
(*varMap)[key] = value
}
}
}
return varMap
}
// SupportedExtensions returns extensions supported by this parser, which is INI extension
func (p *Parser) SupportedExtensions() []string {
return []string{".ini"}
}
// SupportedTypes returns types supported by this parser, which are ansible
func (p *Parser) SupportedTypes() map[string]bool {
return map[string]bool{
"ansible": true,
}
}
// GetKind returns INI constant kind
func (p *Parser) GetKind() model.FileKind {
return model.KindINI
}
// GetCommentToken return the comment token of INI - #
func (p *Parser) GetCommentToken() string {
return "#"
}
// GetResolvedFiles returns resolved files
func (p *Parser) GetResolvedFiles() map[string]model.ResolvedFile {
return make(map[string]model.ResolvedFile)
}
// StringifyContent converts original content into string formatted version
func (p *Parser) StringifyContent(content []byte) (string, error) {
return string(content), nil
}
func emptyDocument() *model.Document {
return &model.Document{}
}
// Code generated from bicep.g4 by ANTLR 4.13.1. DO NOT EDIT.
package parser // bicep
import "github.com/antlr4-go/antlr/v4"
type BasebicepVisitor struct {
*antlr.BaseParseTreeVisitor
}
func (v *BasebicepVisitor) VisitProgram(ctx *ProgramContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitStatement(ctx *StatementContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitTargetScopeDecl(ctx *TargetScopeDeclContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitImportDecl(ctx *ImportDeclContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitMetadataDecl(ctx *MetadataDeclContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitParameterDecl(ctx *ParameterDeclContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitParameterDefaultValue(ctx *ParameterDefaultValueContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitTypeDecl(ctx *TypeDeclContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitVariableDecl(ctx *VariableDeclContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitResourceDecl(ctx *ResourceDeclContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitModuleDecl(ctx *ModuleDeclContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitOutputDecl(ctx *OutputDeclContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitIfCondition(ctx *IfConditionContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitForExpression(ctx *ForExpressionContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitForVariableBlock(ctx *ForVariableBlockContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitForBody(ctx *ForBodyContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitInterpString(ctx *InterpStringContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitExpression(ctx *ExpressionContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitLambdaExpression(ctx *LambdaExpressionContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitLogicCharacter(ctx *LogicCharacterContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitPrimaryExpression(ctx *PrimaryExpressionContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitParenthesizedExpression(ctx *ParenthesizedExpressionContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitTypeExpression(ctx *TypeExpressionContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitLiteralValue(ctx *LiteralValueContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitObject(ctx *ObjectContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitObjectProperty(ctx *ObjectPropertyContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitArray(ctx *ArrayContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitArrayItem(ctx *ArrayItemContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitDecorator(ctx *DecoratorContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitDecoratorExpression(ctx *DecoratorExpressionContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitFunctionCall(ctx *FunctionCallContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitArgumentList(ctx *ArgumentListContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BasebicepVisitor) VisitIdentifier(ctx *IdentifierContext) interface{} {
return v.VisitChildren(ctx)
}
// Code generated from bicep.g4 by ANTLR 4.13.1. DO NOT EDIT.
package parser
import (
"fmt"
"github.com/antlr4-go/antlr/v4"
"sync"
"unicode"
)
// Suppress unused import error
var _ = fmt.Printf
var _ = sync.Once{}
var _ = unicode.IsLetter
type bicepLexer struct {
*antlr.BaseLexer
channelNames []string
modeNames []string
// TODO: EOF string
}
var BicepLexerLexerStaticData struct {
once sync.Once
serializedATN []int32
ChannelNames []string
ModeNames []string
LiteralNames []string
SymbolicNames []string
RuleNames []string
PredictionContextCache *antlr.PredictionContextCache
atn *antlr.ATN
decisionToDFA []*antlr.DFA
}
func biceplexerLexerInit() {
staticData := &BicepLexerLexerStaticData
staticData.ChannelNames = []string{
"DEFAULT_TOKEN_CHANNEL", "HIDDEN",
}
staticData.ModeNames = []string{
"DEFAULT_MODE",
}
staticData.LiteralNames = []string{
"", "", "'@'", "','", "'['", "']'", "'('", "')'", "'.'", "'|'", "",
"'='", "'{'", "'}'", "'param'", "'var'", "'true'", "'false'", "'null'",
"'array'", "'object'", "'resource'", "'output'", "'targetScope'", "'import'",
"'with'", "'as'", "'metadata'", "'existing'", "'type'", "'module'",
"", "", "", "", "'string'", "'int'", "'bool'", "'if'", "'for'", "'in'",
"'?'", "'>'", "'>='", "'<'", "'<='", "'=='", "'!='", "'=>'",
}
staticData.SymbolicNames = []string{
"", "MULTILINE_STRING", "AT", "COMMA", "OBRACK", "CBRACK", "OPAR", "CPAR",
"DOT", "PIPE", "COL", "ASSIGN", "OBRACE", "CBRACE", "PARAM", "VAR",
"TRUE", "FALSE", "NULL", "ARRAY", "OBJECT", "RESOURCE", "OUTPUT", "TARGET_SCOPE",
"IMPORT", "WITH", "AS", "METADATA", "EXISTING", "TYPE", "MODULE", "STRING_LEFT_PIECE",
"STRING_MIDDLE_PIECE", "STRING_RIGHT_PIECE", "STRING_COMPLETE", "STRING",
"INT", "BOOL", "IF", "FOR", "IN", "QMARK", "GT", "GTE", "LT", "LTE",
"EQ", "NEQ", "ARROW", "IDENTIFIER", "NUMBER", "NL", "SINGLE_LINE_COMMENT",
"MULTI_LINE_COMMENT", "SPACES", "UNKNOWN",
}
staticData.RuleNames = []string{
"MULTILINE_STRING", "AT", "COMMA", "OBRACK", "CBRACK", "OPAR", "CPAR",
"DOT", "PIPE", "COL", "ASSIGN", "OBRACE", "CBRACE", "PARAM", "VAR",
"TRUE", "FALSE", "NULL", "ARRAY", "OBJECT", "RESOURCE", "OUTPUT", "TARGET_SCOPE",
"IMPORT", "WITH", "AS", "METADATA", "EXISTING", "TYPE", "MODULE", "STRING_LEFT_PIECE",
"STRING_MIDDLE_PIECE", "STRING_RIGHT_PIECE", "STRING_COMPLETE", "STRING",
"INT", "BOOL", "IF", "FOR", "IN", "QMARK", "GT", "GTE", "LT", "LTE",
"EQ", "NEQ", "ARROW", "IDENTIFIER", "NUMBER", "NL", "SINGLE_LINE_COMMENT",
"MULTI_LINE_COMMENT", "SPACES", "UNKNOWN", "STRINGCHAR", "ESCAPE", "HEX",
}
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 0, 55, 434, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36,
7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7,
41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46,
2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2,
52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57,
7, 57, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 5, 0, 123, 8, 0, 10, 0, 12, 0, 126,
9, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4,
1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9,
3, 9, 151, 8, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13,
1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1,
15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17,
1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1,
19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20,
1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1,
21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22,
1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1,
24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26,
1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1,
27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29,
1, 29, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 5, 30, 273, 8, 30, 10,
30, 12, 30, 276, 9, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 5, 31, 283,
8, 31, 10, 31, 12, 31, 286, 9, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 5,
32, 293, 8, 32, 10, 32, 12, 32, 296, 9, 32, 1, 32, 1, 32, 1, 33, 1, 33,
5, 33, 302, 8, 33, 10, 33, 12, 33, 305, 9, 33, 1, 33, 1, 33, 1, 34, 1,
34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36,
1, 36, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1,
38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42,
1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1,
46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 5, 48, 358, 8, 48, 10, 48, 12, 48,
361, 9, 48, 1, 49, 4, 49, 364, 8, 49, 11, 49, 12, 49, 365, 1, 49, 1, 49,
4, 49, 370, 8, 49, 11, 49, 12, 49, 371, 3, 49, 374, 8, 49, 1, 50, 4, 50,
377, 8, 50, 11, 50, 12, 50, 378, 1, 51, 1, 51, 1, 51, 1, 51, 5, 51, 385,
8, 51, 10, 51, 12, 51, 388, 9, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1,
52, 5, 52, 396, 8, 52, 10, 52, 12, 52, 399, 9, 52, 1, 52, 1, 52, 1, 52,
1, 52, 1, 52, 1, 53, 4, 53, 407, 8, 53, 11, 53, 12, 53, 408, 1, 53, 1,
53, 1, 54, 1, 54, 1, 55, 1, 55, 3, 55, 417, 8, 55, 1, 56, 1, 56, 1, 56,
1, 56, 1, 56, 1, 56, 4, 56, 425, 8, 56, 11, 56, 12, 56, 426, 1, 56, 1,
56, 3, 56, 431, 8, 56, 1, 57, 1, 57, 2, 124, 397, 0, 58, 1, 1, 3, 2, 5,
3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25,
13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43,
22, 45, 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 29, 59, 30, 61,
31, 63, 32, 65, 33, 67, 34, 69, 35, 71, 36, 73, 37, 75, 38, 77, 39, 79,
40, 81, 41, 83, 42, 85, 43, 87, 44, 89, 45, 91, 46, 93, 47, 95, 48, 97,
49, 99, 50, 101, 51, 103, 52, 105, 53, 107, 54, 109, 55, 111, 0, 113, 0,
115, 0, 1, 0, 8, 3, 0, 65, 90, 95, 95, 97, 122, 4, 0, 48, 57, 65, 90, 95,
95, 97, 122, 1, 0, 48, 57, 2, 0, 10, 10, 13, 13, 2, 0, 9, 9, 32, 32, 5,
0, 9, 10, 13, 13, 36, 36, 39, 39, 92, 92, 6, 0, 36, 36, 39, 39, 92, 92,
110, 110, 114, 114, 116, 116, 3, 0, 48, 57, 65, 70, 97, 102, 447, 0, 1,
1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9,
1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0,
17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0,
0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0,
0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0,
0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1,
0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55,
1, 0, 0, 0, 0, 57, 1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0,
63, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0,
0, 71, 1, 0, 0, 0, 0, 73, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 0, 77, 1, 0, 0,
0, 0, 79, 1, 0, 0, 0, 0, 81, 1, 0, 0, 0, 0, 83, 1, 0, 0, 0, 0, 85, 1, 0,
0, 0, 0, 87, 1, 0, 0, 0, 0, 89, 1, 0, 0, 0, 0, 91, 1, 0, 0, 0, 0, 93, 1,
0, 0, 0, 0, 95, 1, 0, 0, 0, 0, 97, 1, 0, 0, 0, 0, 99, 1, 0, 0, 0, 0, 101,
1, 0, 0, 0, 0, 103, 1, 0, 0, 0, 0, 105, 1, 0, 0, 0, 0, 107, 1, 0, 0, 0,
0, 109, 1, 0, 0, 0, 1, 117, 1, 0, 0, 0, 3, 131, 1, 0, 0, 0, 5, 133, 1,
0, 0, 0, 7, 135, 1, 0, 0, 0, 9, 137, 1, 0, 0, 0, 11, 139, 1, 0, 0, 0, 13,
141, 1, 0, 0, 0, 15, 143, 1, 0, 0, 0, 17, 145, 1, 0, 0, 0, 19, 150, 1,
0, 0, 0, 21, 152, 1, 0, 0, 0, 23, 154, 1, 0, 0, 0, 25, 156, 1, 0, 0, 0,
27, 158, 1, 0, 0, 0, 29, 164, 1, 0, 0, 0, 31, 168, 1, 0, 0, 0, 33, 173,
1, 0, 0, 0, 35, 179, 1, 0, 0, 0, 37, 184, 1, 0, 0, 0, 39, 190, 1, 0, 0,
0, 41, 197, 1, 0, 0, 0, 43, 206, 1, 0, 0, 0, 45, 213, 1, 0, 0, 0, 47, 225,
1, 0, 0, 0, 49, 232, 1, 0, 0, 0, 51, 237, 1, 0, 0, 0, 53, 240, 1, 0, 0,
0, 55, 249, 1, 0, 0, 0, 57, 258, 1, 0, 0, 0, 59, 263, 1, 0, 0, 0, 61, 270,
1, 0, 0, 0, 63, 280, 1, 0, 0, 0, 65, 290, 1, 0, 0, 0, 67, 299, 1, 0, 0,
0, 69, 308, 1, 0, 0, 0, 71, 315, 1, 0, 0, 0, 73, 319, 1, 0, 0, 0, 75, 324,
1, 0, 0, 0, 77, 327, 1, 0, 0, 0, 79, 331, 1, 0, 0, 0, 81, 334, 1, 0, 0,
0, 83, 336, 1, 0, 0, 0, 85, 338, 1, 0, 0, 0, 87, 341, 1, 0, 0, 0, 89, 343,
1, 0, 0, 0, 91, 346, 1, 0, 0, 0, 93, 349, 1, 0, 0, 0, 95, 352, 1, 0, 0,
0, 97, 355, 1, 0, 0, 0, 99, 363, 1, 0, 0, 0, 101, 376, 1, 0, 0, 0, 103,
380, 1, 0, 0, 0, 105, 391, 1, 0, 0, 0, 107, 406, 1, 0, 0, 0, 109, 412,
1, 0, 0, 0, 111, 416, 1, 0, 0, 0, 113, 418, 1, 0, 0, 0, 115, 432, 1, 0,
0, 0, 117, 118, 5, 39, 0, 0, 118, 119, 5, 39, 0, 0, 119, 120, 5, 39, 0,
0, 120, 124, 1, 0, 0, 0, 121, 123, 9, 0, 0, 0, 122, 121, 1, 0, 0, 0, 123,
126, 1, 0, 0, 0, 124, 125, 1, 0, 0, 0, 124, 122, 1, 0, 0, 0, 125, 127,
1, 0, 0, 0, 126, 124, 1, 0, 0, 0, 127, 128, 5, 39, 0, 0, 128, 129, 5, 39,
0, 0, 129, 130, 5, 39, 0, 0, 130, 2, 1, 0, 0, 0, 131, 132, 5, 64, 0, 0,
132, 4, 1, 0, 0, 0, 133, 134, 5, 44, 0, 0, 134, 6, 1, 0, 0, 0, 135, 136,
5, 91, 0, 0, 136, 8, 1, 0, 0, 0, 137, 138, 5, 93, 0, 0, 138, 10, 1, 0,
0, 0, 139, 140, 5, 40, 0, 0, 140, 12, 1, 0, 0, 0, 141, 142, 5, 41, 0, 0,
142, 14, 1, 0, 0, 0, 143, 144, 5, 46, 0, 0, 144, 16, 1, 0, 0, 0, 145, 146,
5, 124, 0, 0, 146, 18, 1, 0, 0, 0, 147, 151, 5, 58, 0, 0, 148, 149, 5,
58, 0, 0, 149, 151, 5, 58, 0, 0, 150, 147, 1, 0, 0, 0, 150, 148, 1, 0,
0, 0, 151, 20, 1, 0, 0, 0, 152, 153, 5, 61, 0, 0, 153, 22, 1, 0, 0, 0,
154, 155, 5, 123, 0, 0, 155, 24, 1, 0, 0, 0, 156, 157, 5, 125, 0, 0, 157,
26, 1, 0, 0, 0, 158, 159, 5, 112, 0, 0, 159, 160, 5, 97, 0, 0, 160, 161,
5, 114, 0, 0, 161, 162, 5, 97, 0, 0, 162, 163, 5, 109, 0, 0, 163, 28, 1,
0, 0, 0, 164, 165, 5, 118, 0, 0, 165, 166, 5, 97, 0, 0, 166, 167, 5, 114,
0, 0, 167, 30, 1, 0, 0, 0, 168, 169, 5, 116, 0, 0, 169, 170, 5, 114, 0,
0, 170, 171, 5, 117, 0, 0, 171, 172, 5, 101, 0, 0, 172, 32, 1, 0, 0, 0,
173, 174, 5, 102, 0, 0, 174, 175, 5, 97, 0, 0, 175, 176, 5, 108, 0, 0,
176, 177, 5, 115, 0, 0, 177, 178, 5, 101, 0, 0, 178, 34, 1, 0, 0, 0, 179,
180, 5, 110, 0, 0, 180, 181, 5, 117, 0, 0, 181, 182, 5, 108, 0, 0, 182,
183, 5, 108, 0, 0, 183, 36, 1, 0, 0, 0, 184, 185, 5, 97, 0, 0, 185, 186,
5, 114, 0, 0, 186, 187, 5, 114, 0, 0, 187, 188, 5, 97, 0, 0, 188, 189,
5, 121, 0, 0, 189, 38, 1, 0, 0, 0, 190, 191, 5, 111, 0, 0, 191, 192, 5,
98, 0, 0, 192, 193, 5, 106, 0, 0, 193, 194, 5, 101, 0, 0, 194, 195, 5,
99, 0, 0, 195, 196, 5, 116, 0, 0, 196, 40, 1, 0, 0, 0, 197, 198, 5, 114,
0, 0, 198, 199, 5, 101, 0, 0, 199, 200, 5, 115, 0, 0, 200, 201, 5, 111,
0, 0, 201, 202, 5, 117, 0, 0, 202, 203, 5, 114, 0, 0, 203, 204, 5, 99,
0, 0, 204, 205, 5, 101, 0, 0, 205, 42, 1, 0, 0, 0, 206, 207, 5, 111, 0,
0, 207, 208, 5, 117, 0, 0, 208, 209, 5, 116, 0, 0, 209, 210, 5, 112, 0,
0, 210, 211, 5, 117, 0, 0, 211, 212, 5, 116, 0, 0, 212, 44, 1, 0, 0, 0,
213, 214, 5, 116, 0, 0, 214, 215, 5, 97, 0, 0, 215, 216, 5, 114, 0, 0,
216, 217, 5, 103, 0, 0, 217, 218, 5, 101, 0, 0, 218, 219, 5, 116, 0, 0,
219, 220, 5, 83, 0, 0, 220, 221, 5, 99, 0, 0, 221, 222, 5, 111, 0, 0, 222,
223, 5, 112, 0, 0, 223, 224, 5, 101, 0, 0, 224, 46, 1, 0, 0, 0, 225, 226,
5, 105, 0, 0, 226, 227, 5, 109, 0, 0, 227, 228, 5, 112, 0, 0, 228, 229,
5, 111, 0, 0, 229, 230, 5, 114, 0, 0, 230, 231, 5, 116, 0, 0, 231, 48,
1, 0, 0, 0, 232, 233, 5, 119, 0, 0, 233, 234, 5, 105, 0, 0, 234, 235, 5,
116, 0, 0, 235, 236, 5, 104, 0, 0, 236, 50, 1, 0, 0, 0, 237, 238, 5, 97,
0, 0, 238, 239, 5, 115, 0, 0, 239, 52, 1, 0, 0, 0, 240, 241, 5, 109, 0,
0, 241, 242, 5, 101, 0, 0, 242, 243, 5, 116, 0, 0, 243, 244, 5, 97, 0,
0, 244, 245, 5, 100, 0, 0, 245, 246, 5, 97, 0, 0, 246, 247, 5, 116, 0,
0, 247, 248, 5, 97, 0, 0, 248, 54, 1, 0, 0, 0, 249, 250, 5, 101, 0, 0,
250, 251, 5, 120, 0, 0, 251, 252, 5, 105, 0, 0, 252, 253, 5, 115, 0, 0,
253, 254, 5, 116, 0, 0, 254, 255, 5, 105, 0, 0, 255, 256, 5, 110, 0, 0,
256, 257, 5, 103, 0, 0, 257, 56, 1, 0, 0, 0, 258, 259, 5, 116, 0, 0, 259,
260, 5, 121, 0, 0, 260, 261, 5, 112, 0, 0, 261, 262, 5, 101, 0, 0, 262,
58, 1, 0, 0, 0, 263, 264, 5, 109, 0, 0, 264, 265, 5, 111, 0, 0, 265, 266,
5, 100, 0, 0, 266, 267, 5, 117, 0, 0, 267, 268, 5, 108, 0, 0, 268, 269,
5, 101, 0, 0, 269, 60, 1, 0, 0, 0, 270, 274, 5, 39, 0, 0, 271, 273, 3,
111, 55, 0, 272, 271, 1, 0, 0, 0, 273, 276, 1, 0, 0, 0, 274, 272, 1, 0,
0, 0, 274, 275, 1, 0, 0, 0, 275, 277, 1, 0, 0, 0, 276, 274, 1, 0, 0, 0,
277, 278, 5, 36, 0, 0, 278, 279, 5, 123, 0, 0, 279, 62, 1, 0, 0, 0, 280,
284, 5, 125, 0, 0, 281, 283, 3, 111, 55, 0, 282, 281, 1, 0, 0, 0, 283,
286, 1, 0, 0, 0, 284, 282, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 287,
1, 0, 0, 0, 286, 284, 1, 0, 0, 0, 287, 288, 5, 36, 0, 0, 288, 289, 5, 123,
0, 0, 289, 64, 1, 0, 0, 0, 290, 294, 5, 125, 0, 0, 291, 293, 3, 111, 55,
0, 292, 291, 1, 0, 0, 0, 293, 296, 1, 0, 0, 0, 294, 292, 1, 0, 0, 0, 294,
295, 1, 0, 0, 0, 295, 297, 1, 0, 0, 0, 296, 294, 1, 0, 0, 0, 297, 298,
5, 39, 0, 0, 298, 66, 1, 0, 0, 0, 299, 303, 5, 39, 0, 0, 300, 302, 3, 111,
55, 0, 301, 300, 1, 0, 0, 0, 302, 305, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0,
303, 304, 1, 0, 0, 0, 304, 306, 1, 0, 0, 0, 305, 303, 1, 0, 0, 0, 306,
307, 5, 39, 0, 0, 307, 68, 1, 0, 0, 0, 308, 309, 5, 115, 0, 0, 309, 310,
5, 116, 0, 0, 310, 311, 5, 114, 0, 0, 311, 312, 5, 105, 0, 0, 312, 313,
5, 110, 0, 0, 313, 314, 5, 103, 0, 0, 314, 70, 1, 0, 0, 0, 315, 316, 5,
105, 0, 0, 316, 317, 5, 110, 0, 0, 317, 318, 5, 116, 0, 0, 318, 72, 1,
0, 0, 0, 319, 320, 5, 98, 0, 0, 320, 321, 5, 111, 0, 0, 321, 322, 5, 111,
0, 0, 322, 323, 5, 108, 0, 0, 323, 74, 1, 0, 0, 0, 324, 325, 5, 105, 0,
0, 325, 326, 5, 102, 0, 0, 326, 76, 1, 0, 0, 0, 327, 328, 5, 102, 0, 0,
328, 329, 5, 111, 0, 0, 329, 330, 5, 114, 0, 0, 330, 78, 1, 0, 0, 0, 331,
332, 5, 105, 0, 0, 332, 333, 5, 110, 0, 0, 333, 80, 1, 0, 0, 0, 334, 335,
5, 63, 0, 0, 335, 82, 1, 0, 0, 0, 336, 337, 5, 62, 0, 0, 337, 84, 1, 0,
0, 0, 338, 339, 5, 62, 0, 0, 339, 340, 5, 61, 0, 0, 340, 86, 1, 0, 0, 0,
341, 342, 5, 60, 0, 0, 342, 88, 1, 0, 0, 0, 343, 344, 5, 60, 0, 0, 344,
345, 5, 61, 0, 0, 345, 90, 1, 0, 0, 0, 346, 347, 5, 61, 0, 0, 347, 348,
5, 61, 0, 0, 348, 92, 1, 0, 0, 0, 349, 350, 5, 33, 0, 0, 350, 351, 5, 61,
0, 0, 351, 94, 1, 0, 0, 0, 352, 353, 5, 61, 0, 0, 353, 354, 5, 62, 0, 0,
354, 96, 1, 0, 0, 0, 355, 359, 7, 0, 0, 0, 356, 358, 7, 1, 0, 0, 357, 356,
1, 0, 0, 0, 358, 361, 1, 0, 0, 0, 359, 357, 1, 0, 0, 0, 359, 360, 1, 0,
0, 0, 360, 98, 1, 0, 0, 0, 361, 359, 1, 0, 0, 0, 362, 364, 7, 2, 0, 0,
363, 362, 1, 0, 0, 0, 364, 365, 1, 0, 0, 0, 365, 363, 1, 0, 0, 0, 365,
366, 1, 0, 0, 0, 366, 373, 1, 0, 0, 0, 367, 369, 5, 46, 0, 0, 368, 370,
7, 2, 0, 0, 369, 368, 1, 0, 0, 0, 370, 371, 1, 0, 0, 0, 371, 369, 1, 0,
0, 0, 371, 372, 1, 0, 0, 0, 372, 374, 1, 0, 0, 0, 373, 367, 1, 0, 0, 0,
373, 374, 1, 0, 0, 0, 374, 100, 1, 0, 0, 0, 375, 377, 7, 3, 0, 0, 376,
375, 1, 0, 0, 0, 377, 378, 1, 0, 0, 0, 378, 376, 1, 0, 0, 0, 378, 379,
1, 0, 0, 0, 379, 102, 1, 0, 0, 0, 380, 381, 5, 47, 0, 0, 381, 382, 5, 47,
0, 0, 382, 386, 1, 0, 0, 0, 383, 385, 8, 3, 0, 0, 384, 383, 1, 0, 0, 0,
385, 388, 1, 0, 0, 0, 386, 384, 1, 0, 0, 0, 386, 387, 1, 0, 0, 0, 387,
389, 1, 0, 0, 0, 388, 386, 1, 0, 0, 0, 389, 390, 6, 51, 0, 0, 390, 104,
1, 0, 0, 0, 391, 392, 5, 47, 0, 0, 392, 393, 5, 42, 0, 0, 393, 397, 1,
0, 0, 0, 394, 396, 9, 0, 0, 0, 395, 394, 1, 0, 0, 0, 396, 399, 1, 0, 0,
0, 397, 398, 1, 0, 0, 0, 397, 395, 1, 0, 0, 0, 398, 400, 1, 0, 0, 0, 399,
397, 1, 0, 0, 0, 400, 401, 5, 42, 0, 0, 401, 402, 5, 47, 0, 0, 402, 403,
1, 0, 0, 0, 403, 404, 6, 52, 0, 0, 404, 106, 1, 0, 0, 0, 405, 407, 7, 4,
0, 0, 406, 405, 1, 0, 0, 0, 407, 408, 1, 0, 0, 0, 408, 406, 1, 0, 0, 0,
408, 409, 1, 0, 0, 0, 409, 410, 1, 0, 0, 0, 410, 411, 6, 53, 0, 0, 411,
108, 1, 0, 0, 0, 412, 413, 9, 0, 0, 0, 413, 110, 1, 0, 0, 0, 414, 417,
8, 5, 0, 0, 415, 417, 3, 113, 56, 0, 416, 414, 1, 0, 0, 0, 416, 415, 1,
0, 0, 0, 417, 112, 1, 0, 0, 0, 418, 430, 5, 92, 0, 0, 419, 431, 7, 6, 0,
0, 420, 421, 5, 117, 0, 0, 421, 422, 5, 123, 0, 0, 422, 424, 1, 0, 0, 0,
423, 425, 3, 115, 57, 0, 424, 423, 1, 0, 0, 0, 425, 426, 1, 0, 0, 0, 426,
424, 1, 0, 0, 0, 426, 427, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 429,
5, 125, 0, 0, 429, 431, 1, 0, 0, 0, 430, 419, 1, 0, 0, 0, 430, 420, 1,
0, 0, 0, 431, 114, 1, 0, 0, 0, 432, 433, 7, 7, 0, 0, 433, 116, 1, 0, 0,
0, 18, 0, 124, 150, 274, 284, 294, 303, 359, 365, 371, 373, 378, 386, 397,
408, 416, 426, 430, 1, 6, 0, 0,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
atn := staticData.atn
staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState))
decisionToDFA := staticData.decisionToDFA
for index, state := range atn.DecisionToState {
decisionToDFA[index] = antlr.NewDFA(state, index)
}
}
// bicepLexerInit initializes any static state used to implement bicepLexer. By default the
// static state used to implement the lexer is lazily initialized during the first call to
// NewbicepLexer(). You can call this function if you wish to initialize the static state ahead
// of time.
func BicepLexerInit() {
staticData := &BicepLexerLexerStaticData
staticData.once.Do(biceplexerLexerInit)
}
// NewbicepLexer produces a new lexer instance for the optional input antlr.CharStream.
func NewbicepLexer(input antlr.CharStream) *bicepLexer {
BicepLexerInit()
l := new(bicepLexer)
l.BaseLexer = antlr.NewBaseLexer(input)
staticData := &BicepLexerLexerStaticData
l.Interpreter = antlr.NewLexerATNSimulator(l, staticData.atn, staticData.decisionToDFA, staticData.PredictionContextCache)
l.channelNames = staticData.ChannelNames
l.modeNames = staticData.ModeNames
l.RuleNames = staticData.RuleNames
l.LiteralNames = staticData.LiteralNames
l.SymbolicNames = staticData.SymbolicNames
l.GrammarFileName = "bicep.g4"
// TODO: l.EOF = antlr.TokenEOF
return l
}
// bicepLexer tokens.
const (
bicepLexerMULTILINE_STRING = 1
bicepLexerAT = 2
bicepLexerCOMMA = 3
bicepLexerOBRACK = 4
bicepLexerCBRACK = 5
bicepLexerOPAR = 6
bicepLexerCPAR = 7
bicepLexerDOT = 8
bicepLexerPIPE = 9
bicepLexerCOL = 10
bicepLexerASSIGN = 11
bicepLexerOBRACE = 12
bicepLexerCBRACE = 13
bicepLexerPARAM = 14
bicepLexerVAR = 15
bicepLexerTRUE = 16
bicepLexerFALSE = 17
bicepLexerNULL = 18
bicepLexerARRAY = 19
bicepLexerOBJECT = 20
bicepLexerRESOURCE = 21
bicepLexerOUTPUT = 22
bicepLexerTARGET_SCOPE = 23
bicepLexerIMPORT = 24
bicepLexerWITH = 25
bicepLexerAS = 26
bicepLexerMETADATA = 27
bicepLexerEXISTING = 28
bicepLexerTYPE = 29
bicepLexerMODULE = 30
bicepLexerSTRING_LEFT_PIECE = 31
bicepLexerSTRING_MIDDLE_PIECE = 32
bicepLexerSTRING_RIGHT_PIECE = 33
bicepLexerSTRING_COMPLETE = 34
bicepLexerSTRING = 35
bicepLexerINT = 36
bicepLexerBOOL = 37
bicepLexerIF = 38
bicepLexerFOR = 39
bicepLexerIN = 40
bicepLexerQMARK = 41
bicepLexerGT = 42
bicepLexerGTE = 43
bicepLexerLT = 44
bicepLexerLTE = 45
bicepLexerEQ = 46
bicepLexerNEQ = 47
bicepLexerARROW = 48
bicepLexerIDENTIFIER = 49
bicepLexerNUMBER = 50
bicepLexerNL = 51
bicepLexerSINGLE_LINE_COMMENT = 52
bicepLexerMULTI_LINE_COMMENT = 53
bicepLexerSPACES = 54
bicepLexerUNKNOWN = 55
)
// Code generated from bicep.g4 by ANTLR 4.13.1. DO NOT EDIT.
package parser // bicep
import (
"fmt"
"strconv"
"sync"
"github.com/antlr4-go/antlr/v4"
)
// Suppress unused import errors
var _ = fmt.Printf
var _ = strconv.Itoa
var _ = sync.Once{}
type bicepParser struct {
*antlr.BaseParser
}
var BicepParserStaticData struct {
once sync.Once
serializedATN []int32
LiteralNames []string
SymbolicNames []string
RuleNames []string
PredictionContextCache *antlr.PredictionContextCache
atn *antlr.ATN
decisionToDFA []*antlr.DFA
}
func bicepParserInit() {
staticData := &BicepParserStaticData
staticData.LiteralNames = []string{
"", "", "'@'", "','", "'['", "']'", "'('", "')'", "'.'", "'|'", "",
"'='", "'{'", "'}'", "'param'", "'var'", "'true'", "'false'", "'null'",
"'array'", "'object'", "'resource'", "'output'", "'targetScope'", "'import'",
"'with'", "'as'", "'metadata'", "'existing'", "'type'", "'module'",
"", "", "", "", "'string'", "'int'", "'bool'", "'if'", "'for'", "'in'",
"'?'", "'>'", "'>='", "'<'", "'<='", "'=='", "'!='", "'=>'",
}
staticData.SymbolicNames = []string{
"", "MULTILINE_STRING", "AT", "COMMA", "OBRACK", "CBRACK", "OPAR", "CPAR",
"DOT", "PIPE", "COL", "ASSIGN", "OBRACE", "CBRACE", "PARAM", "VAR",
"TRUE", "FALSE", "NULL", "ARRAY", "OBJECT", "RESOURCE", "OUTPUT", "TARGET_SCOPE",
"IMPORT", "WITH", "AS", "METADATA", "EXISTING", "TYPE", "MODULE", "STRING_LEFT_PIECE",
"STRING_MIDDLE_PIECE", "STRING_RIGHT_PIECE", "STRING_COMPLETE", "STRING",
"INT", "BOOL", "IF", "FOR", "IN", "QMARK", "GT", "GTE", "LT", "LTE",
"EQ", "NEQ", "ARROW", "IDENTIFIER", "NUMBER", "NL", "SINGLE_LINE_COMMENT",
"MULTI_LINE_COMMENT", "SPACES", "UNKNOWN",
}
staticData.RuleNames = []string{
"program", "statement", "targetScopeDecl", "importDecl", "metadataDecl",
"parameterDecl", "parameterDefaultValue", "typeDecl", "variableDecl",
"resourceDecl", "moduleDecl", "outputDecl", "ifCondition", "forExpression",
"forVariableBlock", "forBody", "interpString", "expression", "lambdaExpression",
"logicCharacter", "primaryExpression", "parenthesizedExpression", "typeExpression",
"literalValue", "object", "objectProperty", "array", "arrayItem", "decorator",
"decoratorExpression", "functionCall", "argumentList", "identifier",
}
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 1, 55, 436, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7,
4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7,
10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15,
2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2,
21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26,
7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7,
31, 2, 32, 7, 32, 1, 0, 5, 0, 68, 8, 0, 10, 0, 12, 0, 71, 9, 0, 1, 0, 1,
0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 85,
8, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 5, 3, 93, 8, 3, 10, 3, 12, 3,
96, 9, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 5, 3, 104, 8, 3, 10, 3, 12,
3, 107, 9, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 5,
5, 118, 8, 5, 10, 5, 12, 5, 121, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 127,
8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 132, 8, 5, 3, 5, 134, 8, 5, 1, 5, 1, 5, 1,
6, 1, 6, 1, 6, 1, 7, 5, 7, 142, 8, 7, 10, 7, 12, 7, 145, 9, 7, 1, 7, 1,
7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 5, 8, 154, 8, 8, 10, 8, 12, 8, 157, 9,
8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 5, 9, 166, 8, 9, 10, 9, 12,
9, 169, 9, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 175, 8, 9, 1, 9, 1, 9, 1, 9,
1, 9, 3, 9, 181, 8, 9, 1, 9, 1, 9, 1, 10, 5, 10, 186, 8, 10, 10, 10, 12,
10, 189, 9, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10,
198, 8, 10, 1, 10, 1, 10, 1, 11, 5, 11, 203, 8, 11, 10, 11, 12, 11, 206,
9, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 3, 11, 213, 8, 11, 1, 11, 1,
11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 5, 13, 225,
8, 13, 10, 13, 12, 13, 228, 9, 13, 1, 13, 1, 13, 1, 13, 3, 13, 233, 8,
13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 240, 8, 13, 10, 13, 12, 13,
243, 9, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1,
15, 1, 15, 3, 15, 255, 8, 15, 1, 16, 1, 16, 1, 16, 1, 16, 5, 16, 261, 8,
16, 10, 16, 12, 16, 264, 9, 16, 1, 16, 1, 16, 1, 16, 1, 16, 3, 16, 270,
8, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1,
17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17,
1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 299, 8, 17, 10,
17, 12, 17, 302, 9, 17, 1, 18, 1, 18, 3, 18, 306, 8, 18, 1, 18, 1, 18,
3, 18, 310, 8, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1,
20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 3, 20, 326, 8, 20, 1, 21,
1, 21, 3, 21, 330, 8, 21, 1, 21, 1, 21, 3, 21, 334, 8, 21, 1, 21, 1, 21,
1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 3, 23, 345, 8, 23, 1,
24, 1, 24, 4, 24, 349, 8, 24, 11, 24, 12, 24, 350, 1, 24, 1, 24, 4, 24,
355, 8, 24, 11, 24, 12, 24, 356, 5, 24, 359, 8, 24, 10, 24, 12, 24, 362,
9, 24, 3, 24, 364, 8, 24, 1, 24, 1, 24, 1, 25, 1, 25, 3, 25, 370, 8, 25,
1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 5, 26, 377, 8, 26, 10, 26, 12, 26, 380,
9, 26, 1, 26, 5, 26, 383, 8, 26, 10, 26, 12, 26, 386, 9, 26, 1, 26, 1,
26, 1, 27, 1, 27, 4, 27, 392, 8, 27, 11, 27, 12, 27, 393, 1, 27, 3, 27,
397, 8, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1,
29, 3, 29, 408, 8, 29, 1, 30, 1, 30, 1, 30, 3, 30, 413, 8, 30, 1, 30, 3,
30, 416, 8, 30, 1, 30, 3, 30, 419, 8, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1,
31, 3, 31, 426, 8, 31, 1, 31, 5, 31, 429, 8, 31, 10, 31, 12, 31, 432, 9,
31, 1, 32, 1, 32, 1, 32, 0, 1, 34, 33, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18,
20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54,
56, 58, 60, 62, 64, 0, 2, 1, 0, 42, 47, 3, 0, 14, 30, 35, 40, 49, 49, 474,
0, 69, 1, 0, 0, 0, 2, 84, 1, 0, 0, 0, 4, 86, 1, 0, 0, 0, 6, 94, 1, 0, 0,
0, 8, 110, 1, 0, 0, 0, 10, 119, 1, 0, 0, 0, 12, 137, 1, 0, 0, 0, 14, 143,
1, 0, 0, 0, 16, 155, 1, 0, 0, 0, 18, 167, 1, 0, 0, 0, 20, 187, 1, 0, 0,
0, 22, 204, 1, 0, 0, 0, 24, 218, 1, 0, 0, 0, 26, 222, 1, 0, 0, 0, 28, 246,
1, 0, 0, 0, 30, 254, 1, 0, 0, 0, 32, 269, 1, 0, 0, 0, 34, 271, 1, 0, 0,
0, 36, 309, 1, 0, 0, 0, 38, 314, 1, 0, 0, 0, 40, 325, 1, 0, 0, 0, 42, 327,
1, 0, 0, 0, 44, 337, 1, 0, 0, 0, 46, 344, 1, 0, 0, 0, 48, 346, 1, 0, 0,
0, 50, 369, 1, 0, 0, 0, 52, 374, 1, 0, 0, 0, 54, 389, 1, 0, 0, 0, 56, 398,
1, 0, 0, 0, 58, 407, 1, 0, 0, 0, 60, 409, 1, 0, 0, 0, 62, 422, 1, 0, 0,
0, 64, 433, 1, 0, 0, 0, 66, 68, 3, 2, 1, 0, 67, 66, 1, 0, 0, 0, 68, 71,
1, 0, 0, 0, 69, 67, 1, 0, 0, 0, 69, 70, 1, 0, 0, 0, 70, 72, 1, 0, 0, 0,
71, 69, 1, 0, 0, 0, 72, 73, 5, 0, 0, 1, 73, 1, 1, 0, 0, 0, 74, 85, 3, 4,
2, 0, 75, 85, 3, 6, 3, 0, 76, 85, 3, 8, 4, 0, 77, 85, 3, 10, 5, 0, 78,
85, 3, 14, 7, 0, 79, 85, 3, 16, 8, 0, 80, 85, 3, 18, 9, 0, 81, 85, 3, 20,
10, 0, 82, 85, 3, 22, 11, 0, 83, 85, 5, 51, 0, 0, 84, 74, 1, 0, 0, 0, 84,
75, 1, 0, 0, 0, 84, 76, 1, 0, 0, 0, 84, 77, 1, 0, 0, 0, 84, 78, 1, 0, 0,
0, 84, 79, 1, 0, 0, 0, 84, 80, 1, 0, 0, 0, 84, 81, 1, 0, 0, 0, 84, 82,
1, 0, 0, 0, 84, 83, 1, 0, 0, 0, 85, 3, 1, 0, 0, 0, 86, 87, 5, 23, 0, 0,
87, 88, 5, 11, 0, 0, 88, 89, 3, 34, 17, 0, 89, 90, 5, 51, 0, 0, 90, 5,
1, 0, 0, 0, 91, 93, 3, 56, 28, 0, 92, 91, 1, 0, 0, 0, 93, 96, 1, 0, 0,
0, 94, 92, 1, 0, 0, 0, 94, 95, 1, 0, 0, 0, 95, 97, 1, 0, 0, 0, 96, 94,
1, 0, 0, 0, 97, 98, 5, 24, 0, 0, 98, 105, 3, 32, 16, 0, 99, 100, 5, 25,
0, 0, 100, 104, 3, 48, 24, 0, 101, 102, 5, 26, 0, 0, 102, 104, 3, 64, 32,
0, 103, 99, 1, 0, 0, 0, 103, 101, 1, 0, 0, 0, 104, 107, 1, 0, 0, 0, 105,
103, 1, 0, 0, 0, 105, 106, 1, 0, 0, 0, 106, 108, 1, 0, 0, 0, 107, 105,
1, 0, 0, 0, 108, 109, 5, 51, 0, 0, 109, 7, 1, 0, 0, 0, 110, 111, 5, 27,
0, 0, 111, 112, 3, 64, 32, 0, 112, 113, 5, 11, 0, 0, 113, 114, 3, 34, 17,
0, 114, 115, 5, 51, 0, 0, 115, 9, 1, 0, 0, 0, 116, 118, 3, 56, 28, 0, 117,
116, 1, 0, 0, 0, 118, 121, 1, 0, 0, 0, 119, 117, 1, 0, 0, 0, 119, 120,
1, 0, 0, 0, 120, 122, 1, 0, 0, 0, 121, 119, 1, 0, 0, 0, 122, 123, 5, 14,
0, 0, 123, 133, 3, 64, 32, 0, 124, 126, 3, 44, 22, 0, 125, 127, 3, 12,
6, 0, 126, 125, 1, 0, 0, 0, 126, 127, 1, 0, 0, 0, 127, 134, 1, 0, 0, 0,
128, 129, 5, 21, 0, 0, 129, 131, 3, 32, 16, 0, 130, 132, 3, 12, 6, 0, 131,
130, 1, 0, 0, 0, 131, 132, 1, 0, 0, 0, 132, 134, 1, 0, 0, 0, 133, 124,
1, 0, 0, 0, 133, 128, 1, 0, 0, 0, 134, 135, 1, 0, 0, 0, 135, 136, 5, 51,
0, 0, 136, 11, 1, 0, 0, 0, 137, 138, 5, 11, 0, 0, 138, 139, 3, 34, 17,
0, 139, 13, 1, 0, 0, 0, 140, 142, 3, 56, 28, 0, 141, 140, 1, 0, 0, 0, 142,
145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 146,
1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 147, 5, 29, 0, 0, 147, 148, 3, 64,
32, 0, 148, 149, 5, 11, 0, 0, 149, 150, 3, 44, 22, 0, 150, 151, 5, 51,
0, 0, 151, 15, 1, 0, 0, 0, 152, 154, 3, 56, 28, 0, 153, 152, 1, 0, 0, 0,
154, 157, 1, 0, 0, 0, 155, 153, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156,
158, 1, 0, 0, 0, 157, 155, 1, 0, 0, 0, 158, 159, 5, 15, 0, 0, 159, 160,
3, 64, 32, 0, 160, 161, 5, 11, 0, 0, 161, 162, 3, 34, 17, 0, 162, 163,
5, 51, 0, 0, 163, 17, 1, 0, 0, 0, 164, 166, 3, 56, 28, 0, 165, 164, 1,
0, 0, 0, 166, 169, 1, 0, 0, 0, 167, 165, 1, 0, 0, 0, 167, 168, 1, 0, 0,
0, 168, 170, 1, 0, 0, 0, 169, 167, 1, 0, 0, 0, 170, 171, 5, 21, 0, 0, 171,
172, 3, 64, 32, 0, 172, 174, 3, 32, 16, 0, 173, 175, 5, 28, 0, 0, 174,
173, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 180,
5, 11, 0, 0, 177, 181, 3, 24, 12, 0, 178, 181, 3, 48, 24, 0, 179, 181,
3, 26, 13, 0, 180, 177, 1, 0, 0, 0, 180, 178, 1, 0, 0, 0, 180, 179, 1,
0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 183, 5, 51, 0, 0, 183, 19, 1, 0, 0,
0, 184, 186, 3, 56, 28, 0, 185, 184, 1, 0, 0, 0, 186, 189, 1, 0, 0, 0,
187, 185, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 190, 1, 0, 0, 0, 189,
187, 1, 0, 0, 0, 190, 191, 5, 30, 0, 0, 191, 192, 3, 64, 32, 0, 192, 193,
3, 32, 16, 0, 193, 197, 5, 11, 0, 0, 194, 198, 3, 24, 12, 0, 195, 198,
3, 48, 24, 0, 196, 198, 3, 26, 13, 0, 197, 194, 1, 0, 0, 0, 197, 195, 1,
0, 0, 0, 197, 196, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 200, 5, 51, 0,
0, 200, 21, 1, 0, 0, 0, 201, 203, 3, 56, 28, 0, 202, 201, 1, 0, 0, 0, 203,
206, 1, 0, 0, 0, 204, 202, 1, 0, 0, 0, 204, 205, 1, 0, 0, 0, 205, 207,
1, 0, 0, 0, 206, 204, 1, 0, 0, 0, 207, 208, 5, 22, 0, 0, 208, 212, 3, 64,
32, 0, 209, 213, 3, 64, 32, 0, 210, 211, 5, 21, 0, 0, 211, 213, 3, 32,
16, 0, 212, 209, 1, 0, 0, 0, 212, 210, 1, 0, 0, 0, 213, 214, 1, 0, 0, 0,
214, 215, 5, 11, 0, 0, 215, 216, 3, 34, 17, 0, 216, 217, 5, 51, 0, 0, 217,
23, 1, 0, 0, 0, 218, 219, 5, 38, 0, 0, 219, 220, 3, 42, 21, 0, 220, 221,
3, 48, 24, 0, 221, 25, 1, 0, 0, 0, 222, 226, 5, 4, 0, 0, 223, 225, 5, 51,
0, 0, 224, 223, 1, 0, 0, 0, 225, 228, 1, 0, 0, 0, 226, 224, 1, 0, 0, 0,
226, 227, 1, 0, 0, 0, 227, 229, 1, 0, 0, 0, 228, 226, 1, 0, 0, 0, 229,
232, 5, 39, 0, 0, 230, 233, 3, 64, 32, 0, 231, 233, 3, 28, 14, 0, 232,
230, 1, 0, 0, 0, 232, 231, 1, 0, 0, 0, 233, 234, 1, 0, 0, 0, 234, 235,
5, 40, 0, 0, 235, 236, 3, 34, 17, 0, 236, 237, 5, 10, 0, 0, 237, 241, 3,
30, 15, 0, 238, 240, 5, 51, 0, 0, 239, 238, 1, 0, 0, 0, 240, 243, 1, 0,
0, 0, 241, 239, 1, 0, 0, 0, 241, 242, 1, 0, 0, 0, 242, 244, 1, 0, 0, 0,
243, 241, 1, 0, 0, 0, 244, 245, 5, 5, 0, 0, 245, 27, 1, 0, 0, 0, 246, 247,
5, 6, 0, 0, 247, 248, 3, 64, 32, 0, 248, 249, 5, 3, 0, 0, 249, 250, 3,
64, 32, 0, 250, 251, 5, 7, 0, 0, 251, 29, 1, 0, 0, 0, 252, 255, 3, 34,
17, 0, 253, 255, 3, 24, 12, 0, 254, 252, 1, 0, 0, 0, 254, 253, 1, 0, 0,
0, 255, 31, 1, 0, 0, 0, 256, 262, 5, 31, 0, 0, 257, 258, 3, 34, 17, 0,
258, 259, 5, 32, 0, 0, 259, 261, 1, 0, 0, 0, 260, 257, 1, 0, 0, 0, 261,
264, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, 262, 263, 1, 0, 0, 0, 263, 265,
1, 0, 0, 0, 264, 262, 1, 0, 0, 0, 265, 266, 3, 34, 17, 0, 266, 267, 5,
33, 0, 0, 267, 270, 1, 0, 0, 0, 268, 270, 5, 34, 0, 0, 269, 256, 1, 0,
0, 0, 269, 268, 1, 0, 0, 0, 270, 33, 1, 0, 0, 0, 271, 272, 6, 17, -1, 0,
272, 273, 3, 40, 20, 0, 273, 300, 1, 0, 0, 0, 274, 275, 10, 6, 0, 0, 275,
276, 5, 41, 0, 0, 276, 277, 3, 34, 17, 0, 277, 278, 5, 10, 0, 0, 278, 279,
3, 34, 17, 7, 279, 299, 1, 0, 0, 0, 280, 281, 10, 2, 0, 0, 281, 282, 3,
38, 19, 0, 282, 283, 3, 34, 17, 3, 283, 299, 1, 0, 0, 0, 284, 285, 10,
7, 0, 0, 285, 286, 5, 4, 0, 0, 286, 287, 3, 34, 17, 0, 287, 288, 5, 5,
0, 0, 288, 299, 1, 0, 0, 0, 289, 290, 10, 5, 0, 0, 290, 291, 5, 8, 0, 0,
291, 299, 3, 64, 32, 0, 292, 293, 10, 4, 0, 0, 293, 294, 5, 8, 0, 0, 294,
299, 3, 60, 30, 0, 295, 296, 10, 3, 0, 0, 296, 297, 5, 10, 0, 0, 297, 299,
3, 64, 32, 0, 298, 274, 1, 0, 0, 0, 298, 280, 1, 0, 0, 0, 298, 284, 1,
0, 0, 0, 298, 289, 1, 0, 0, 0, 298, 292, 1, 0, 0, 0, 298, 295, 1, 0, 0,
0, 299, 302, 1, 0, 0, 0, 300, 298, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0, 301,
35, 1, 0, 0, 0, 302, 300, 1, 0, 0, 0, 303, 305, 5, 6, 0, 0, 304, 306, 3,
62, 31, 0, 305, 304, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, 307, 1, 0,
0, 0, 307, 310, 5, 7, 0, 0, 308, 310, 3, 64, 32, 0, 309, 303, 1, 0, 0,
0, 309, 308, 1, 0, 0, 0, 310, 311, 1, 0, 0, 0, 311, 312, 5, 48, 0, 0, 312,
313, 3, 34, 17, 0, 313, 37, 1, 0, 0, 0, 314, 315, 7, 0, 0, 0, 315, 39,
1, 0, 0, 0, 316, 326, 3, 46, 23, 0, 317, 326, 3, 60, 30, 0, 318, 326, 3,
32, 16, 0, 319, 326, 5, 1, 0, 0, 320, 326, 3, 52, 26, 0, 321, 326, 3, 48,
24, 0, 322, 326, 3, 26, 13, 0, 323, 326, 3, 42, 21, 0, 324, 326, 3, 36,
18, 0, 325, 316, 1, 0, 0, 0, 325, 317, 1, 0, 0, 0, 325, 318, 1, 0, 0, 0,
325, 319, 1, 0, 0, 0, 325, 320, 1, 0, 0, 0, 325, 321, 1, 0, 0, 0, 325,
322, 1, 0, 0, 0, 325, 323, 1, 0, 0, 0, 325, 324, 1, 0, 0, 0, 326, 41, 1,
0, 0, 0, 327, 329, 5, 6, 0, 0, 328, 330, 5, 51, 0, 0, 329, 328, 1, 0, 0,
0, 329, 330, 1, 0, 0, 0, 330, 331, 1, 0, 0, 0, 331, 333, 3, 34, 17, 0,
332, 334, 5, 51, 0, 0, 333, 332, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334,
335, 1, 0, 0, 0, 335, 336, 5, 7, 0, 0, 336, 43, 1, 0, 0, 0, 337, 338, 3,
64, 32, 0, 338, 45, 1, 0, 0, 0, 339, 345, 5, 50, 0, 0, 340, 345, 5, 16,
0, 0, 341, 345, 5, 17, 0, 0, 342, 345, 5, 18, 0, 0, 343, 345, 3, 64, 32,
0, 344, 339, 1, 0, 0, 0, 344, 340, 1, 0, 0, 0, 344, 341, 1, 0, 0, 0, 344,
342, 1, 0, 0, 0, 344, 343, 1, 0, 0, 0, 345, 47, 1, 0, 0, 0, 346, 363, 5,
12, 0, 0, 347, 349, 5, 51, 0, 0, 348, 347, 1, 0, 0, 0, 349, 350, 1, 0,
0, 0, 350, 348, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 351, 360, 1, 0, 0, 0,
352, 354, 3, 50, 25, 0, 353, 355, 5, 51, 0, 0, 354, 353, 1, 0, 0, 0, 355,
356, 1, 0, 0, 0, 356, 354, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 359,
1, 0, 0, 0, 358, 352, 1, 0, 0, 0, 359, 362, 1, 0, 0, 0, 360, 358, 1, 0,
0, 0, 360, 361, 1, 0, 0, 0, 361, 364, 1, 0, 0, 0, 362, 360, 1, 0, 0, 0,
363, 348, 1, 0, 0, 0, 363, 364, 1, 0, 0, 0, 364, 365, 1, 0, 0, 0, 365,
366, 5, 13, 0, 0, 366, 49, 1, 0, 0, 0, 367, 370, 3, 64, 32, 0, 368, 370,
3, 32, 16, 0, 369, 367, 1, 0, 0, 0, 369, 368, 1, 0, 0, 0, 370, 371, 1,
0, 0, 0, 371, 372, 5, 10, 0, 0, 372, 373, 3, 34, 17, 0, 373, 51, 1, 0,
0, 0, 374, 378, 5, 4, 0, 0, 375, 377, 5, 51, 0, 0, 376, 375, 1, 0, 0, 0,
377, 380, 1, 0, 0, 0, 378, 376, 1, 0, 0, 0, 378, 379, 1, 0, 0, 0, 379,
384, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 381, 383, 3, 54, 27, 0, 382, 381,
1, 0, 0, 0, 383, 386, 1, 0, 0, 0, 384, 382, 1, 0, 0, 0, 384, 385, 1, 0,
0, 0, 385, 387, 1, 0, 0, 0, 386, 384, 1, 0, 0, 0, 387, 388, 5, 5, 0, 0,
388, 53, 1, 0, 0, 0, 389, 396, 3, 34, 17, 0, 390, 392, 5, 51, 0, 0, 391,
390, 1, 0, 0, 0, 392, 393, 1, 0, 0, 0, 393, 391, 1, 0, 0, 0, 393, 394,
1, 0, 0, 0, 394, 397, 1, 0, 0, 0, 395, 397, 5, 3, 0, 0, 396, 391, 1, 0,
0, 0, 396, 395, 1, 0, 0, 0, 396, 397, 1, 0, 0, 0, 397, 55, 1, 0, 0, 0,
398, 399, 5, 2, 0, 0, 399, 400, 3, 58, 29, 0, 400, 401, 5, 51, 0, 0, 401,
57, 1, 0, 0, 0, 402, 408, 3, 60, 30, 0, 403, 404, 3, 34, 17, 0, 404, 405,
5, 8, 0, 0, 405, 406, 3, 60, 30, 0, 406, 408, 1, 0, 0, 0, 407, 402, 1,
0, 0, 0, 407, 403, 1, 0, 0, 0, 408, 59, 1, 0, 0, 0, 409, 410, 3, 64, 32,
0, 410, 415, 5, 6, 0, 0, 411, 413, 5, 51, 0, 0, 412, 411, 1, 0, 0, 0, 412,
413, 1, 0, 0, 0, 413, 414, 1, 0, 0, 0, 414, 416, 3, 62, 31, 0, 415, 412,
1, 0, 0, 0, 415, 416, 1, 0, 0, 0, 416, 418, 1, 0, 0, 0, 417, 419, 5, 51,
0, 0, 418, 417, 1, 0, 0, 0, 418, 419, 1, 0, 0, 0, 419, 420, 1, 0, 0, 0,
420, 421, 5, 7, 0, 0, 421, 61, 1, 0, 0, 0, 422, 430, 3, 34, 17, 0, 423,
425, 5, 3, 0, 0, 424, 426, 5, 51, 0, 0, 425, 424, 1, 0, 0, 0, 425, 426,
1, 0, 0, 0, 426, 427, 1, 0, 0, 0, 427, 429, 3, 34, 17, 0, 428, 423, 1,
0, 0, 0, 429, 432, 1, 0, 0, 0, 430, 428, 1, 0, 0, 0, 430, 431, 1, 0, 0,
0, 431, 63, 1, 0, 0, 0, 432, 430, 1, 0, 0, 0, 433, 434, 7, 1, 0, 0, 434,
65, 1, 0, 0, 0, 47, 69, 84, 94, 103, 105, 119, 126, 131, 133, 143, 155,
167, 174, 180, 187, 197, 204, 212, 226, 232, 241, 254, 262, 269, 298, 300,
305, 309, 325, 329, 333, 344, 350, 356, 360, 363, 369, 378, 384, 393, 396,
407, 412, 415, 418, 425, 430,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
atn := staticData.atn
staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState))
decisionToDFA := staticData.decisionToDFA
for index, state := range atn.DecisionToState {
decisionToDFA[index] = antlr.NewDFA(state, index)
}
}
// bicepParserInit initializes any static state used to implement bicepParser. By default the
// static state used to implement the parser is lazily initialized during the first call to
// NewbicepParser(). You can call this function if you wish to initialize the static state ahead
// of time.
func BicepParserInit() {
staticData := &BicepParserStaticData
staticData.once.Do(bicepParserInit)
}
// NewbicepParser produces a new parser instance for the optional input antlr.TokenStream.
func NewbicepParser(input antlr.TokenStream) *bicepParser {
BicepParserInit()
this := new(bicepParser)
this.BaseParser = antlr.NewBaseParser(input)
staticData := &BicepParserStaticData
this.Interpreter = antlr.NewParserATNSimulator(this, staticData.atn, staticData.decisionToDFA, staticData.PredictionContextCache)
this.RuleNames = staticData.RuleNames
this.LiteralNames = staticData.LiteralNames
this.SymbolicNames = staticData.SymbolicNames
this.GrammarFileName = "bicep.g4"
return this
}
// bicepParser tokens.
const (
bicepParserEOF = antlr.TokenEOF
bicepParserMULTILINE_STRING = 1
bicepParserAT = 2
bicepParserCOMMA = 3
bicepParserOBRACK = 4
bicepParserCBRACK = 5
bicepParserOPAR = 6
bicepParserCPAR = 7
bicepParserDOT = 8
bicepParserPIPE = 9
bicepParserCOL = 10
bicepParserASSIGN = 11
bicepParserOBRACE = 12
bicepParserCBRACE = 13
bicepParserPARAM = 14
bicepParserVAR = 15
bicepParserTRUE = 16
bicepParserFALSE = 17
bicepParserNULL = 18
bicepParserARRAY = 19
bicepParserOBJECT = 20
bicepParserRESOURCE = 21
bicepParserOUTPUT = 22
bicepParserTARGET_SCOPE = 23
bicepParserIMPORT = 24
bicepParserWITH = 25
bicepParserAS = 26
bicepParserMETADATA = 27
bicepParserEXISTING = 28
bicepParserTYPE = 29
bicepParserMODULE = 30
bicepParserSTRING_LEFT_PIECE = 31
bicepParserSTRING_MIDDLE_PIECE = 32
bicepParserSTRING_RIGHT_PIECE = 33
bicepParserSTRING_COMPLETE = 34
bicepParserSTRING = 35
bicepParserINT = 36
bicepParserBOOL = 37
bicepParserIF = 38
bicepParserFOR = 39
bicepParserIN = 40
bicepParserQMARK = 41
bicepParserGT = 42
bicepParserGTE = 43
bicepParserLT = 44
bicepParserLTE = 45
bicepParserEQ = 46
bicepParserNEQ = 47
bicepParserARROW = 48
bicepParserIDENTIFIER = 49
bicepParserNUMBER = 50
bicepParserNL = 51
bicepParserSINGLE_LINE_COMMENT = 52
bicepParserMULTI_LINE_COMMENT = 53
bicepParserSPACES = 54
bicepParserUNKNOWN = 55
)
// bicepParser rules.
const (
bicepParserRULE_program = 0
bicepParserRULE_statement = 1
bicepParserRULE_targetScopeDecl = 2
bicepParserRULE_importDecl = 3
bicepParserRULE_metadataDecl = 4
bicepParserRULE_parameterDecl = 5
bicepParserRULE_parameterDefaultValue = 6
bicepParserRULE_typeDecl = 7
bicepParserRULE_variableDecl = 8
bicepParserRULE_resourceDecl = 9
bicepParserRULE_moduleDecl = 10
bicepParserRULE_outputDecl = 11
bicepParserRULE_ifCondition = 12
bicepParserRULE_forExpression = 13
bicepParserRULE_forVariableBlock = 14
bicepParserRULE_forBody = 15
bicepParserRULE_interpString = 16
bicepParserRULE_expression = 17
bicepParserRULE_lambdaExpression = 18
bicepParserRULE_logicCharacter = 19
bicepParserRULE_primaryExpression = 20
bicepParserRULE_parenthesizedExpression = 21
bicepParserRULE_typeExpression = 22
bicepParserRULE_literalValue = 23
bicepParserRULE_object = 24
bicepParserRULE_objectProperty = 25
bicepParserRULE_array = 26
bicepParserRULE_arrayItem = 27
bicepParserRULE_decorator = 28
bicepParserRULE_decoratorExpression = 29
bicepParserRULE_functionCall = 30
bicepParserRULE_argumentList = 31
bicepParserRULE_identifier = 32
)
// IProgramContext is an interface to support dynamic dispatch.
type IProgramContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
EOF() antlr.TerminalNode
AllStatement() []IStatementContext
Statement(i int) IStatementContext
// IsProgramContext differentiates from other interfaces.
IsProgramContext()
}
type ProgramContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyProgramContext() *ProgramContext {
var p = new(ProgramContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_program
return p
}
func InitEmptyProgramContext(p *ProgramContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_program
}
func (*ProgramContext) IsProgramContext() {}
func NewProgramContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ProgramContext {
var p = new(ProgramContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_program
return p
}
func (s *ProgramContext) GetParser() antlr.Parser { return s.parser }
func (s *ProgramContext) EOF() antlr.TerminalNode {
return s.GetToken(bicepParserEOF, 0)
}
func (s *ProgramContext) AllStatement() []IStatementContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IStatementContext); ok {
len++
}
}
tst := make([]IStatementContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IStatementContext); ok {
tst[i] = t.(IStatementContext)
i++
}
}
return tst
}
func (s *ProgramContext) Statement(i int) IStatementContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IStatementContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IStatementContext)
}
func (s *ProgramContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ProgramContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ProgramContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitProgram(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) Program() (localctx IProgramContext) {
localctx = NewProgramContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 0, bicepParserRULE_program)
var _la int
p.EnterOuterAlt(localctx, 1)
p.SetState(69)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&2251801590022148) != 0 {
{
p.SetState(66)
p.Statement()
}
p.SetState(71)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
{
p.SetState(72)
p.Match(bicepParserEOF)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IStatementContext is an interface to support dynamic dispatch.
type IStatementContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
TargetScopeDecl() ITargetScopeDeclContext
ImportDecl() IImportDeclContext
MetadataDecl() IMetadataDeclContext
ParameterDecl() IParameterDeclContext
TypeDecl() ITypeDeclContext
VariableDecl() IVariableDeclContext
ResourceDecl() IResourceDeclContext
ModuleDecl() IModuleDeclContext
OutputDecl() IOutputDeclContext
NL() antlr.TerminalNode
// IsStatementContext differentiates from other interfaces.
IsStatementContext()
}
type StatementContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyStatementContext() *StatementContext {
var p = new(StatementContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_statement
return p
}
func InitEmptyStatementContext(p *StatementContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_statement
}
func (*StatementContext) IsStatementContext() {}
func NewStatementContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *StatementContext {
var p = new(StatementContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_statement
return p
}
func (s *StatementContext) GetParser() antlr.Parser { return s.parser }
func (s *StatementContext) TargetScopeDecl() ITargetScopeDeclContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(ITargetScopeDeclContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(ITargetScopeDeclContext)
}
func (s *StatementContext) ImportDecl() IImportDeclContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IImportDeclContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IImportDeclContext)
}
func (s *StatementContext) MetadataDecl() IMetadataDeclContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IMetadataDeclContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IMetadataDeclContext)
}
func (s *StatementContext) ParameterDecl() IParameterDeclContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IParameterDeclContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IParameterDeclContext)
}
func (s *StatementContext) TypeDecl() ITypeDeclContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(ITypeDeclContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(ITypeDeclContext)
}
func (s *StatementContext) VariableDecl() IVariableDeclContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IVariableDeclContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IVariableDeclContext)
}
func (s *StatementContext) ResourceDecl() IResourceDeclContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IResourceDeclContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IResourceDeclContext)
}
func (s *StatementContext) ModuleDecl() IModuleDeclContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IModuleDeclContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IModuleDeclContext)
}
func (s *StatementContext) OutputDecl() IOutputDeclContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IOutputDeclContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IOutputDeclContext)
}
func (s *StatementContext) NL() antlr.TerminalNode {
return s.GetToken(bicepParserNL, 0)
}
func (s *StatementContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *StatementContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *StatementContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitStatement(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) Statement() (localctx IStatementContext) {
localctx = NewStatementContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 2, bicepParserRULE_statement)
p.SetState(84)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 1, p.GetParserRuleContext()) {
case 1:
p.EnterOuterAlt(localctx, 1)
{
p.SetState(74)
p.TargetScopeDecl()
}
case 2:
p.EnterOuterAlt(localctx, 2)
{
p.SetState(75)
p.ImportDecl()
}
case 3:
p.EnterOuterAlt(localctx, 3)
{
p.SetState(76)
p.MetadataDecl()
}
case 4:
p.EnterOuterAlt(localctx, 4)
{
p.SetState(77)
p.ParameterDecl()
}
case 5:
p.EnterOuterAlt(localctx, 5)
{
p.SetState(78)
p.TypeDecl()
}
case 6:
p.EnterOuterAlt(localctx, 6)
{
p.SetState(79)
p.VariableDecl()
}
case 7:
p.EnterOuterAlt(localctx, 7)
{
p.SetState(80)
p.ResourceDecl()
}
case 8:
p.EnterOuterAlt(localctx, 8)
{
p.SetState(81)
p.ModuleDecl()
}
case 9:
p.EnterOuterAlt(localctx, 9)
{
p.SetState(82)
p.OutputDecl()
}
case 10:
p.EnterOuterAlt(localctx, 10)
{
p.SetState(83)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case antlr.ATNInvalidAltNumber:
goto errorExit
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// ITargetScopeDeclContext is an interface to support dynamic dispatch.
type ITargetScopeDeclContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
TARGET_SCOPE() antlr.TerminalNode
ASSIGN() antlr.TerminalNode
Expression() IExpressionContext
NL() antlr.TerminalNode
// IsTargetScopeDeclContext differentiates from other interfaces.
IsTargetScopeDeclContext()
}
type TargetScopeDeclContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyTargetScopeDeclContext() *TargetScopeDeclContext {
var p = new(TargetScopeDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_targetScopeDecl
return p
}
func InitEmptyTargetScopeDeclContext(p *TargetScopeDeclContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_targetScopeDecl
}
func (*TargetScopeDeclContext) IsTargetScopeDeclContext() {}
func NewTargetScopeDeclContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *TargetScopeDeclContext {
var p = new(TargetScopeDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_targetScopeDecl
return p
}
func (s *TargetScopeDeclContext) GetParser() antlr.Parser { return s.parser }
func (s *TargetScopeDeclContext) TARGET_SCOPE() antlr.TerminalNode {
return s.GetToken(bicepParserTARGET_SCOPE, 0)
}
func (s *TargetScopeDeclContext) ASSIGN() antlr.TerminalNode {
return s.GetToken(bicepParserASSIGN, 0)
}
func (s *TargetScopeDeclContext) Expression() IExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IExpressionContext)
}
func (s *TargetScopeDeclContext) NL() antlr.TerminalNode {
return s.GetToken(bicepParserNL, 0)
}
func (s *TargetScopeDeclContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *TargetScopeDeclContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *TargetScopeDeclContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitTargetScopeDecl(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) TargetScopeDecl() (localctx ITargetScopeDeclContext) {
localctx = NewTargetScopeDeclContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 4, bicepParserRULE_targetScopeDecl)
p.EnterOuterAlt(localctx, 1)
{
p.SetState(86)
p.Match(bicepParserTARGET_SCOPE)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(87)
p.Match(bicepParserASSIGN)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(88)
p.expression(0)
}
{
p.SetState(89)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IImportDeclContext is an interface to support dynamic dispatch.
type IImportDeclContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// GetSpecification returns the specification rule contexts.
GetSpecification() IInterpStringContext
// GetAlias returns the alias rule contexts.
GetAlias() IIdentifierContext
// SetSpecification sets the specification rule contexts.
SetSpecification(IInterpStringContext)
// SetAlias sets the alias rule contexts.
SetAlias(IIdentifierContext)
// Getter signatures
IMPORT() antlr.TerminalNode
NL() antlr.TerminalNode
InterpString() IInterpStringContext
AllDecorator() []IDecoratorContext
Decorator(i int) IDecoratorContext
AllWITH() []antlr.TerminalNode
WITH(i int) antlr.TerminalNode
AllObject() []IObjectContext
Object(i int) IObjectContext
AllAS() []antlr.TerminalNode
AS(i int) antlr.TerminalNode
AllIdentifier() []IIdentifierContext
Identifier(i int) IIdentifierContext
// IsImportDeclContext differentiates from other interfaces.
IsImportDeclContext()
}
type ImportDeclContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
specification IInterpStringContext
alias IIdentifierContext
}
func NewEmptyImportDeclContext() *ImportDeclContext {
var p = new(ImportDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_importDecl
return p
}
func InitEmptyImportDeclContext(p *ImportDeclContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_importDecl
}
func (*ImportDeclContext) IsImportDeclContext() {}
func NewImportDeclContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ImportDeclContext {
var p = new(ImportDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_importDecl
return p
}
func (s *ImportDeclContext) GetParser() antlr.Parser { return s.parser }
func (s *ImportDeclContext) GetSpecification() IInterpStringContext { return s.specification }
func (s *ImportDeclContext) GetAlias() IIdentifierContext { return s.alias }
func (s *ImportDeclContext) SetSpecification(v IInterpStringContext) { s.specification = v }
func (s *ImportDeclContext) SetAlias(v IIdentifierContext) { s.alias = v }
func (s *ImportDeclContext) IMPORT() antlr.TerminalNode {
return s.GetToken(bicepParserIMPORT, 0)
}
func (s *ImportDeclContext) NL() antlr.TerminalNode {
return s.GetToken(bicepParserNL, 0)
}
func (s *ImportDeclContext) InterpString() IInterpStringContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IInterpStringContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IInterpStringContext)
}
func (s *ImportDeclContext) AllDecorator() []IDecoratorContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IDecoratorContext); ok {
len++
}
}
tst := make([]IDecoratorContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IDecoratorContext); ok {
tst[i] = t.(IDecoratorContext)
i++
}
}
return tst
}
func (s *ImportDeclContext) Decorator(i int) IDecoratorContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IDecoratorContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IDecoratorContext)
}
func (s *ImportDeclContext) AllWITH() []antlr.TerminalNode {
return s.GetTokens(bicepParserWITH)
}
func (s *ImportDeclContext) WITH(i int) antlr.TerminalNode {
return s.GetToken(bicepParserWITH, i)
}
func (s *ImportDeclContext) AllObject() []IObjectContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IObjectContext); ok {
len++
}
}
tst := make([]IObjectContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IObjectContext); ok {
tst[i] = t.(IObjectContext)
i++
}
}
return tst
}
func (s *ImportDeclContext) Object(i int) IObjectContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IObjectContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IObjectContext)
}
func (s *ImportDeclContext) AllAS() []antlr.TerminalNode {
return s.GetTokens(bicepParserAS)
}
func (s *ImportDeclContext) AS(i int) antlr.TerminalNode {
return s.GetToken(bicepParserAS, i)
}
func (s *ImportDeclContext) AllIdentifier() []IIdentifierContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IIdentifierContext); ok {
len++
}
}
tst := make([]IIdentifierContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IIdentifierContext); ok {
tst[i] = t.(IIdentifierContext)
i++
}
}
return tst
}
func (s *ImportDeclContext) Identifier(i int) IIdentifierContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IIdentifierContext)
}
func (s *ImportDeclContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ImportDeclContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ImportDeclContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitImportDecl(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) ImportDecl() (localctx IImportDeclContext) {
localctx = NewImportDeclContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 6, bicepParserRULE_importDecl)
var _la int
p.EnterOuterAlt(localctx, 1)
p.SetState(94)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for _la == bicepParserAT {
{
p.SetState(91)
p.Decorator()
}
p.SetState(96)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
{
p.SetState(97)
p.Match(bicepParserIMPORT)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(98)
var _x = p.InterpString()
localctx.(*ImportDeclContext).specification = _x
}
p.SetState(105)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for _la == bicepParserWITH || _la == bicepParserAS {
p.SetState(103)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetTokenStream().LA(1) {
case bicepParserWITH:
{
p.SetState(99)
p.Match(bicepParserWITH)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(100)
p.Object()
}
case bicepParserAS:
{
p.SetState(101)
p.Match(bicepParserAS)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(102)
var _x = p.Identifier()
localctx.(*ImportDeclContext).alias = _x
}
default:
p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil))
goto errorExit
}
p.SetState(107)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
{
p.SetState(108)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IMetadataDeclContext is an interface to support dynamic dispatch.
type IMetadataDeclContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// GetName returns the name rule contexts.
GetName() IIdentifierContext
// SetName sets the name rule contexts.
SetName(IIdentifierContext)
// Getter signatures
METADATA() antlr.TerminalNode
ASSIGN() antlr.TerminalNode
Expression() IExpressionContext
NL() antlr.TerminalNode
Identifier() IIdentifierContext
// IsMetadataDeclContext differentiates from other interfaces.
IsMetadataDeclContext()
}
type MetadataDeclContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
name IIdentifierContext
}
func NewEmptyMetadataDeclContext() *MetadataDeclContext {
var p = new(MetadataDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_metadataDecl
return p
}
func InitEmptyMetadataDeclContext(p *MetadataDeclContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_metadataDecl
}
func (*MetadataDeclContext) IsMetadataDeclContext() {}
func NewMetadataDeclContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *MetadataDeclContext {
var p = new(MetadataDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_metadataDecl
return p
}
func (s *MetadataDeclContext) GetParser() antlr.Parser { return s.parser }
func (s *MetadataDeclContext) GetName() IIdentifierContext { return s.name }
func (s *MetadataDeclContext) SetName(v IIdentifierContext) { s.name = v }
func (s *MetadataDeclContext) METADATA() antlr.TerminalNode {
return s.GetToken(bicepParserMETADATA, 0)
}
func (s *MetadataDeclContext) ASSIGN() antlr.TerminalNode {
return s.GetToken(bicepParserASSIGN, 0)
}
func (s *MetadataDeclContext) Expression() IExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IExpressionContext)
}
func (s *MetadataDeclContext) NL() antlr.TerminalNode {
return s.GetToken(bicepParserNL, 0)
}
func (s *MetadataDeclContext) Identifier() IIdentifierContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IIdentifierContext)
}
func (s *MetadataDeclContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *MetadataDeclContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *MetadataDeclContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitMetadataDecl(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) MetadataDecl() (localctx IMetadataDeclContext) {
localctx = NewMetadataDeclContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 8, bicepParserRULE_metadataDecl)
p.EnterOuterAlt(localctx, 1)
{
p.SetState(110)
p.Match(bicepParserMETADATA)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(111)
var _x = p.Identifier()
localctx.(*MetadataDeclContext).name = _x
}
{
p.SetState(112)
p.Match(bicepParserASSIGN)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(113)
p.expression(0)
}
{
p.SetState(114)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IParameterDeclContext is an interface to support dynamic dispatch.
type IParameterDeclContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// GetName returns the name rule contexts.
GetName() IIdentifierContext
// GetType_ returns the type_ rule contexts.
GetType_() IInterpStringContext
// SetName sets the name rule contexts.
SetName(IIdentifierContext)
// SetType_ sets the type_ rule contexts.
SetType_(IInterpStringContext)
// Getter signatures
PARAM() antlr.TerminalNode
NL() antlr.TerminalNode
Identifier() IIdentifierContext
TypeExpression() ITypeExpressionContext
RESOURCE() antlr.TerminalNode
AllDecorator() []IDecoratorContext
Decorator(i int) IDecoratorContext
InterpString() IInterpStringContext
ParameterDefaultValue() IParameterDefaultValueContext
// IsParameterDeclContext differentiates from other interfaces.
IsParameterDeclContext()
}
type ParameterDeclContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
name IIdentifierContext
type_ IInterpStringContext
}
func NewEmptyParameterDeclContext() *ParameterDeclContext {
var p = new(ParameterDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_parameterDecl
return p
}
func InitEmptyParameterDeclContext(p *ParameterDeclContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_parameterDecl
}
func (*ParameterDeclContext) IsParameterDeclContext() {}
func NewParameterDeclContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ParameterDeclContext {
var p = new(ParameterDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_parameterDecl
return p
}
func (s *ParameterDeclContext) GetParser() antlr.Parser { return s.parser }
func (s *ParameterDeclContext) GetName() IIdentifierContext { return s.name }
func (s *ParameterDeclContext) GetType_() IInterpStringContext { return s.type_ }
func (s *ParameterDeclContext) SetName(v IIdentifierContext) { s.name = v }
func (s *ParameterDeclContext) SetType_(v IInterpStringContext) { s.type_ = v }
func (s *ParameterDeclContext) PARAM() antlr.TerminalNode {
return s.GetToken(bicepParserPARAM, 0)
}
func (s *ParameterDeclContext) NL() antlr.TerminalNode {
return s.GetToken(bicepParserNL, 0)
}
func (s *ParameterDeclContext) Identifier() IIdentifierContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IIdentifierContext)
}
func (s *ParameterDeclContext) TypeExpression() ITypeExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(ITypeExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(ITypeExpressionContext)
}
func (s *ParameterDeclContext) RESOURCE() antlr.TerminalNode {
return s.GetToken(bicepParserRESOURCE, 0)
}
func (s *ParameterDeclContext) AllDecorator() []IDecoratorContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IDecoratorContext); ok {
len++
}
}
tst := make([]IDecoratorContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IDecoratorContext); ok {
tst[i] = t.(IDecoratorContext)
i++
}
}
return tst
}
func (s *ParameterDeclContext) Decorator(i int) IDecoratorContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IDecoratorContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IDecoratorContext)
}
func (s *ParameterDeclContext) InterpString() IInterpStringContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IInterpStringContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IInterpStringContext)
}
func (s *ParameterDeclContext) ParameterDefaultValue() IParameterDefaultValueContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IParameterDefaultValueContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IParameterDefaultValueContext)
}
func (s *ParameterDeclContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ParameterDeclContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ParameterDeclContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitParameterDecl(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) ParameterDecl() (localctx IParameterDeclContext) {
localctx = NewParameterDeclContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 10, bicepParserRULE_parameterDecl)
var _la int
p.EnterOuterAlt(localctx, 1)
p.SetState(119)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for _la == bicepParserAT {
{
p.SetState(116)
p.Decorator()
}
p.SetState(121)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
{
p.SetState(122)
p.Match(bicepParserPARAM)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(123)
var _x = p.Identifier()
localctx.(*ParameterDeclContext).name = _x
}
p.SetState(133)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 8, p.GetParserRuleContext()) {
case 1:
{
p.SetState(124)
p.TypeExpression()
}
p.SetState(126)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
if _la == bicepParserASSIGN {
{
p.SetState(125)
p.ParameterDefaultValue()
}
}
case 2:
{
p.SetState(128)
p.Match(bicepParserRESOURCE)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(129)
var _x = p.InterpString()
localctx.(*ParameterDeclContext).type_ = _x
}
p.SetState(131)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
if _la == bicepParserASSIGN {
{
p.SetState(130)
p.ParameterDefaultValue()
}
}
case antlr.ATNInvalidAltNumber:
goto errorExit
}
{
p.SetState(135)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IParameterDefaultValueContext is an interface to support dynamic dispatch.
type IParameterDefaultValueContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
ASSIGN() antlr.TerminalNode
Expression() IExpressionContext
// IsParameterDefaultValueContext differentiates from other interfaces.
IsParameterDefaultValueContext()
}
type ParameterDefaultValueContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyParameterDefaultValueContext() *ParameterDefaultValueContext {
var p = new(ParameterDefaultValueContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_parameterDefaultValue
return p
}
func InitEmptyParameterDefaultValueContext(p *ParameterDefaultValueContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_parameterDefaultValue
}
func (*ParameterDefaultValueContext) IsParameterDefaultValueContext() {}
func NewParameterDefaultValueContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ParameterDefaultValueContext {
var p = new(ParameterDefaultValueContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_parameterDefaultValue
return p
}
func (s *ParameterDefaultValueContext) GetParser() antlr.Parser { return s.parser }
func (s *ParameterDefaultValueContext) ASSIGN() antlr.TerminalNode {
return s.GetToken(bicepParserASSIGN, 0)
}
func (s *ParameterDefaultValueContext) Expression() IExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IExpressionContext)
}
func (s *ParameterDefaultValueContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ParameterDefaultValueContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ParameterDefaultValueContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitParameterDefaultValue(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) ParameterDefaultValue() (localctx IParameterDefaultValueContext) {
localctx = NewParameterDefaultValueContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 12, bicepParserRULE_parameterDefaultValue)
p.EnterOuterAlt(localctx, 1)
{
p.SetState(137)
p.Match(bicepParserASSIGN)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(138)
p.expression(0)
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// ITypeDeclContext is an interface to support dynamic dispatch.
type ITypeDeclContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// GetName returns the name rule contexts.
GetName() IIdentifierContext
// SetName sets the name rule contexts.
SetName(IIdentifierContext)
// Getter signatures
TYPE() antlr.TerminalNode
ASSIGN() antlr.TerminalNode
TypeExpression() ITypeExpressionContext
NL() antlr.TerminalNode
Identifier() IIdentifierContext
AllDecorator() []IDecoratorContext
Decorator(i int) IDecoratorContext
// IsTypeDeclContext differentiates from other interfaces.
IsTypeDeclContext()
}
type TypeDeclContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
name IIdentifierContext
}
func NewEmptyTypeDeclContext() *TypeDeclContext {
var p = new(TypeDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_typeDecl
return p
}
func InitEmptyTypeDeclContext(p *TypeDeclContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_typeDecl
}
func (*TypeDeclContext) IsTypeDeclContext() {}
func NewTypeDeclContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *TypeDeclContext {
var p = new(TypeDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_typeDecl
return p
}
func (s *TypeDeclContext) GetParser() antlr.Parser { return s.parser }
func (s *TypeDeclContext) GetName() IIdentifierContext { return s.name }
func (s *TypeDeclContext) SetName(v IIdentifierContext) { s.name = v }
func (s *TypeDeclContext) TYPE() antlr.TerminalNode {
return s.GetToken(bicepParserTYPE, 0)
}
func (s *TypeDeclContext) ASSIGN() antlr.TerminalNode {
return s.GetToken(bicepParserASSIGN, 0)
}
func (s *TypeDeclContext) TypeExpression() ITypeExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(ITypeExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(ITypeExpressionContext)
}
func (s *TypeDeclContext) NL() antlr.TerminalNode {
return s.GetToken(bicepParserNL, 0)
}
func (s *TypeDeclContext) Identifier() IIdentifierContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IIdentifierContext)
}
func (s *TypeDeclContext) AllDecorator() []IDecoratorContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IDecoratorContext); ok {
len++
}
}
tst := make([]IDecoratorContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IDecoratorContext); ok {
tst[i] = t.(IDecoratorContext)
i++
}
}
return tst
}
func (s *TypeDeclContext) Decorator(i int) IDecoratorContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IDecoratorContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IDecoratorContext)
}
func (s *TypeDeclContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *TypeDeclContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *TypeDeclContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitTypeDecl(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) TypeDecl() (localctx ITypeDeclContext) {
localctx = NewTypeDeclContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 14, bicepParserRULE_typeDecl)
var _la int
p.EnterOuterAlt(localctx, 1)
p.SetState(143)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for _la == bicepParserAT {
{
p.SetState(140)
p.Decorator()
}
p.SetState(145)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
{
p.SetState(146)
p.Match(bicepParserTYPE)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(147)
var _x = p.Identifier()
localctx.(*TypeDeclContext).name = _x
}
{
p.SetState(148)
p.Match(bicepParserASSIGN)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(149)
p.TypeExpression()
}
{
p.SetState(150)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IVariableDeclContext is an interface to support dynamic dispatch.
type IVariableDeclContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// GetName returns the name rule contexts.
GetName() IIdentifierContext
// SetName sets the name rule contexts.
SetName(IIdentifierContext)
// Getter signatures
VAR() antlr.TerminalNode
ASSIGN() antlr.TerminalNode
Expression() IExpressionContext
NL() antlr.TerminalNode
Identifier() IIdentifierContext
AllDecorator() []IDecoratorContext
Decorator(i int) IDecoratorContext
// IsVariableDeclContext differentiates from other interfaces.
IsVariableDeclContext()
}
type VariableDeclContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
name IIdentifierContext
}
func NewEmptyVariableDeclContext() *VariableDeclContext {
var p = new(VariableDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_variableDecl
return p
}
func InitEmptyVariableDeclContext(p *VariableDeclContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_variableDecl
}
func (*VariableDeclContext) IsVariableDeclContext() {}
func NewVariableDeclContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *VariableDeclContext {
var p = new(VariableDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_variableDecl
return p
}
func (s *VariableDeclContext) GetParser() antlr.Parser { return s.parser }
func (s *VariableDeclContext) GetName() IIdentifierContext { return s.name }
func (s *VariableDeclContext) SetName(v IIdentifierContext) { s.name = v }
func (s *VariableDeclContext) VAR() antlr.TerminalNode {
return s.GetToken(bicepParserVAR, 0)
}
func (s *VariableDeclContext) ASSIGN() antlr.TerminalNode {
return s.GetToken(bicepParserASSIGN, 0)
}
func (s *VariableDeclContext) Expression() IExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IExpressionContext)
}
func (s *VariableDeclContext) NL() antlr.TerminalNode {
return s.GetToken(bicepParserNL, 0)
}
func (s *VariableDeclContext) Identifier() IIdentifierContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IIdentifierContext)
}
func (s *VariableDeclContext) AllDecorator() []IDecoratorContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IDecoratorContext); ok {
len++
}
}
tst := make([]IDecoratorContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IDecoratorContext); ok {
tst[i] = t.(IDecoratorContext)
i++
}
}
return tst
}
func (s *VariableDeclContext) Decorator(i int) IDecoratorContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IDecoratorContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IDecoratorContext)
}
func (s *VariableDeclContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *VariableDeclContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *VariableDeclContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitVariableDecl(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) VariableDecl() (localctx IVariableDeclContext) {
localctx = NewVariableDeclContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 16, bicepParserRULE_variableDecl)
var _la int
p.EnterOuterAlt(localctx, 1)
p.SetState(155)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for _la == bicepParserAT {
{
p.SetState(152)
p.Decorator()
}
p.SetState(157)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
{
p.SetState(158)
p.Match(bicepParserVAR)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(159)
var _x = p.Identifier()
localctx.(*VariableDeclContext).name = _x
}
{
p.SetState(160)
p.Match(bicepParserASSIGN)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(161)
p.expression(0)
}
{
p.SetState(162)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IResourceDeclContext is an interface to support dynamic dispatch.
type IResourceDeclContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// GetName returns the name rule contexts.
GetName() IIdentifierContext
// GetType_ returns the type_ rule contexts.
GetType_() IInterpStringContext
// SetName sets the name rule contexts.
SetName(IIdentifierContext)
// SetType_ sets the type_ rule contexts.
SetType_(IInterpStringContext)
// Getter signatures
RESOURCE() antlr.TerminalNode
ASSIGN() antlr.TerminalNode
NL() antlr.TerminalNode
Identifier() IIdentifierContext
InterpString() IInterpStringContext
IfCondition() IIfConditionContext
Object() IObjectContext
ForExpression() IForExpressionContext
AllDecorator() []IDecoratorContext
Decorator(i int) IDecoratorContext
EXISTING() antlr.TerminalNode
// IsResourceDeclContext differentiates from other interfaces.
IsResourceDeclContext()
}
type ResourceDeclContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
name IIdentifierContext
type_ IInterpStringContext
}
func NewEmptyResourceDeclContext() *ResourceDeclContext {
var p = new(ResourceDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_resourceDecl
return p
}
func InitEmptyResourceDeclContext(p *ResourceDeclContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_resourceDecl
}
func (*ResourceDeclContext) IsResourceDeclContext() {}
func NewResourceDeclContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ResourceDeclContext {
var p = new(ResourceDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_resourceDecl
return p
}
func (s *ResourceDeclContext) GetParser() antlr.Parser { return s.parser }
func (s *ResourceDeclContext) GetName() IIdentifierContext { return s.name }
func (s *ResourceDeclContext) GetType_() IInterpStringContext { return s.type_ }
func (s *ResourceDeclContext) SetName(v IIdentifierContext) { s.name = v }
func (s *ResourceDeclContext) SetType_(v IInterpStringContext) { s.type_ = v }
func (s *ResourceDeclContext) RESOURCE() antlr.TerminalNode {
return s.GetToken(bicepParserRESOURCE, 0)
}
func (s *ResourceDeclContext) ASSIGN() antlr.TerminalNode {
return s.GetToken(bicepParserASSIGN, 0)
}
func (s *ResourceDeclContext) NL() antlr.TerminalNode {
return s.GetToken(bicepParserNL, 0)
}
func (s *ResourceDeclContext) Identifier() IIdentifierContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IIdentifierContext)
}
func (s *ResourceDeclContext) InterpString() IInterpStringContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IInterpStringContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IInterpStringContext)
}
func (s *ResourceDeclContext) IfCondition() IIfConditionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIfConditionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IIfConditionContext)
}
func (s *ResourceDeclContext) Object() IObjectContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IObjectContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IObjectContext)
}
func (s *ResourceDeclContext) ForExpression() IForExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IForExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IForExpressionContext)
}
func (s *ResourceDeclContext) AllDecorator() []IDecoratorContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IDecoratorContext); ok {
len++
}
}
tst := make([]IDecoratorContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IDecoratorContext); ok {
tst[i] = t.(IDecoratorContext)
i++
}
}
return tst
}
func (s *ResourceDeclContext) Decorator(i int) IDecoratorContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IDecoratorContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IDecoratorContext)
}
func (s *ResourceDeclContext) EXISTING() antlr.TerminalNode {
return s.GetToken(bicepParserEXISTING, 0)
}
func (s *ResourceDeclContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ResourceDeclContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ResourceDeclContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitResourceDecl(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) ResourceDecl() (localctx IResourceDeclContext) {
localctx = NewResourceDeclContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 18, bicepParserRULE_resourceDecl)
var _la int
p.EnterOuterAlt(localctx, 1)
p.SetState(167)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for _la == bicepParserAT {
{
p.SetState(164)
p.Decorator()
}
p.SetState(169)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
{
p.SetState(170)
p.Match(bicepParserRESOURCE)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(171)
var _x = p.Identifier()
localctx.(*ResourceDeclContext).name = _x
}
{
p.SetState(172)
var _x = p.InterpString()
localctx.(*ResourceDeclContext).type_ = _x
}
p.SetState(174)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
if _la == bicepParserEXISTING {
{
p.SetState(173)
p.Match(bicepParserEXISTING)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
}
{
p.SetState(176)
p.Match(bicepParserASSIGN)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(180)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetTokenStream().LA(1) {
case bicepParserIF:
{
p.SetState(177)
p.IfCondition()
}
case bicepParserOBRACE:
{
p.SetState(178)
p.Object()
}
case bicepParserOBRACK:
{
p.SetState(179)
p.ForExpression()
}
default:
p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil))
goto errorExit
}
{
p.SetState(182)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IModuleDeclContext is an interface to support dynamic dispatch.
type IModuleDeclContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// GetName returns the name rule contexts.
GetName() IIdentifierContext
// GetType_ returns the type_ rule contexts.
GetType_() IInterpStringContext
// SetName sets the name rule contexts.
SetName(IIdentifierContext)
// SetType_ sets the type_ rule contexts.
SetType_(IInterpStringContext)
// Getter signatures
MODULE() antlr.TerminalNode
ASSIGN() antlr.TerminalNode
NL() antlr.TerminalNode
Identifier() IIdentifierContext
InterpString() IInterpStringContext
IfCondition() IIfConditionContext
Object() IObjectContext
ForExpression() IForExpressionContext
AllDecorator() []IDecoratorContext
Decorator(i int) IDecoratorContext
// IsModuleDeclContext differentiates from other interfaces.
IsModuleDeclContext()
}
type ModuleDeclContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
name IIdentifierContext
type_ IInterpStringContext
}
func NewEmptyModuleDeclContext() *ModuleDeclContext {
var p = new(ModuleDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_moduleDecl
return p
}
func InitEmptyModuleDeclContext(p *ModuleDeclContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_moduleDecl
}
func (*ModuleDeclContext) IsModuleDeclContext() {}
func NewModuleDeclContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ModuleDeclContext {
var p = new(ModuleDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_moduleDecl
return p
}
func (s *ModuleDeclContext) GetParser() antlr.Parser { return s.parser }
func (s *ModuleDeclContext) GetName() IIdentifierContext { return s.name }
func (s *ModuleDeclContext) GetType_() IInterpStringContext { return s.type_ }
func (s *ModuleDeclContext) SetName(v IIdentifierContext) { s.name = v }
func (s *ModuleDeclContext) SetType_(v IInterpStringContext) { s.type_ = v }
func (s *ModuleDeclContext) MODULE() antlr.TerminalNode {
return s.GetToken(bicepParserMODULE, 0)
}
func (s *ModuleDeclContext) ASSIGN() antlr.TerminalNode {
return s.GetToken(bicepParserASSIGN, 0)
}
func (s *ModuleDeclContext) NL() antlr.TerminalNode {
return s.GetToken(bicepParserNL, 0)
}
func (s *ModuleDeclContext) Identifier() IIdentifierContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IIdentifierContext)
}
func (s *ModuleDeclContext) InterpString() IInterpStringContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IInterpStringContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IInterpStringContext)
}
func (s *ModuleDeclContext) IfCondition() IIfConditionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIfConditionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IIfConditionContext)
}
func (s *ModuleDeclContext) Object() IObjectContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IObjectContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IObjectContext)
}
func (s *ModuleDeclContext) ForExpression() IForExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IForExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IForExpressionContext)
}
func (s *ModuleDeclContext) AllDecorator() []IDecoratorContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IDecoratorContext); ok {
len++
}
}
tst := make([]IDecoratorContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IDecoratorContext); ok {
tst[i] = t.(IDecoratorContext)
i++
}
}
return tst
}
func (s *ModuleDeclContext) Decorator(i int) IDecoratorContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IDecoratorContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IDecoratorContext)
}
func (s *ModuleDeclContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ModuleDeclContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ModuleDeclContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitModuleDecl(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) ModuleDecl() (localctx IModuleDeclContext) {
localctx = NewModuleDeclContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 20, bicepParserRULE_moduleDecl)
var _la int
p.EnterOuterAlt(localctx, 1)
p.SetState(187)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for _la == bicepParserAT {
{
p.SetState(184)
p.Decorator()
}
p.SetState(189)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
{
p.SetState(190)
p.Match(bicepParserMODULE)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(191)
var _x = p.Identifier()
localctx.(*ModuleDeclContext).name = _x
}
{
p.SetState(192)
var _x = p.InterpString()
localctx.(*ModuleDeclContext).type_ = _x
}
{
p.SetState(193)
p.Match(bicepParserASSIGN)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(197)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetTokenStream().LA(1) {
case bicepParserIF:
{
p.SetState(194)
p.IfCondition()
}
case bicepParserOBRACE:
{
p.SetState(195)
p.Object()
}
case bicepParserOBRACK:
{
p.SetState(196)
p.ForExpression()
}
default:
p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil))
goto errorExit
}
{
p.SetState(199)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IOutputDeclContext is an interface to support dynamic dispatch.
type IOutputDeclContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// GetName returns the name rule contexts.
GetName() IIdentifierContext
// GetType1 returns the type1 rule contexts.
GetType1() IIdentifierContext
// GetType2 returns the type2 rule contexts.
GetType2() IInterpStringContext
// SetName sets the name rule contexts.
SetName(IIdentifierContext)
// SetType1 sets the type1 rule contexts.
SetType1(IIdentifierContext)
// SetType2 sets the type2 rule contexts.
SetType2(IInterpStringContext)
// Getter signatures
OUTPUT() antlr.TerminalNode
ASSIGN() antlr.TerminalNode
Expression() IExpressionContext
NL() antlr.TerminalNode
AllIdentifier() []IIdentifierContext
Identifier(i int) IIdentifierContext
RESOURCE() antlr.TerminalNode
AllDecorator() []IDecoratorContext
Decorator(i int) IDecoratorContext
InterpString() IInterpStringContext
// IsOutputDeclContext differentiates from other interfaces.
IsOutputDeclContext()
}
type OutputDeclContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
name IIdentifierContext
type1 IIdentifierContext
type2 IInterpStringContext
}
func NewEmptyOutputDeclContext() *OutputDeclContext {
var p = new(OutputDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_outputDecl
return p
}
func InitEmptyOutputDeclContext(p *OutputDeclContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_outputDecl
}
func (*OutputDeclContext) IsOutputDeclContext() {}
func NewOutputDeclContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *OutputDeclContext {
var p = new(OutputDeclContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_outputDecl
return p
}
func (s *OutputDeclContext) GetParser() antlr.Parser { return s.parser }
func (s *OutputDeclContext) GetName() IIdentifierContext { return s.name }
func (s *OutputDeclContext) GetType1() IIdentifierContext { return s.type1 }
func (s *OutputDeclContext) GetType2() IInterpStringContext { return s.type2 }
func (s *OutputDeclContext) SetName(v IIdentifierContext) { s.name = v }
func (s *OutputDeclContext) SetType1(v IIdentifierContext) { s.type1 = v }
func (s *OutputDeclContext) SetType2(v IInterpStringContext) { s.type2 = v }
func (s *OutputDeclContext) OUTPUT() antlr.TerminalNode {
return s.GetToken(bicepParserOUTPUT, 0)
}
func (s *OutputDeclContext) ASSIGN() antlr.TerminalNode {
return s.GetToken(bicepParserASSIGN, 0)
}
func (s *OutputDeclContext) Expression() IExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IExpressionContext)
}
func (s *OutputDeclContext) NL() antlr.TerminalNode {
return s.GetToken(bicepParserNL, 0)
}
func (s *OutputDeclContext) AllIdentifier() []IIdentifierContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IIdentifierContext); ok {
len++
}
}
tst := make([]IIdentifierContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IIdentifierContext); ok {
tst[i] = t.(IIdentifierContext)
i++
}
}
return tst
}
func (s *OutputDeclContext) Identifier(i int) IIdentifierContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IIdentifierContext)
}
func (s *OutputDeclContext) RESOURCE() antlr.TerminalNode {
return s.GetToken(bicepParserRESOURCE, 0)
}
func (s *OutputDeclContext) AllDecorator() []IDecoratorContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IDecoratorContext); ok {
len++
}
}
tst := make([]IDecoratorContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IDecoratorContext); ok {
tst[i] = t.(IDecoratorContext)
i++
}
}
return tst
}
func (s *OutputDeclContext) Decorator(i int) IDecoratorContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IDecoratorContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IDecoratorContext)
}
func (s *OutputDeclContext) InterpString() IInterpStringContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IInterpStringContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IInterpStringContext)
}
func (s *OutputDeclContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *OutputDeclContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *OutputDeclContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitOutputDecl(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) OutputDecl() (localctx IOutputDeclContext) {
localctx = NewOutputDeclContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 22, bicepParserRULE_outputDecl)
var _la int
p.EnterOuterAlt(localctx, 1)
p.SetState(204)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for _la == bicepParserAT {
{
p.SetState(201)
p.Decorator()
}
p.SetState(206)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
{
p.SetState(207)
p.Match(bicepParserOUTPUT)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(208)
var _x = p.Identifier()
localctx.(*OutputDeclContext).name = _x
}
p.SetState(212)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 17, p.GetParserRuleContext()) {
case 1:
{
p.SetState(209)
var _x = p.Identifier()
localctx.(*OutputDeclContext).type1 = _x
}
case 2:
{
p.SetState(210)
p.Match(bicepParserRESOURCE)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(211)
var _x = p.InterpString()
localctx.(*OutputDeclContext).type2 = _x
}
case antlr.ATNInvalidAltNumber:
goto errorExit
}
{
p.SetState(214)
p.Match(bicepParserASSIGN)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(215)
p.expression(0)
}
{
p.SetState(216)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IIfConditionContext is an interface to support dynamic dispatch.
type IIfConditionContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
IF() antlr.TerminalNode
ParenthesizedExpression() IParenthesizedExpressionContext
Object() IObjectContext
// IsIfConditionContext differentiates from other interfaces.
IsIfConditionContext()
}
type IfConditionContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyIfConditionContext() *IfConditionContext {
var p = new(IfConditionContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_ifCondition
return p
}
func InitEmptyIfConditionContext(p *IfConditionContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_ifCondition
}
func (*IfConditionContext) IsIfConditionContext() {}
func NewIfConditionContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *IfConditionContext {
var p = new(IfConditionContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_ifCondition
return p
}
func (s *IfConditionContext) GetParser() antlr.Parser { return s.parser }
func (s *IfConditionContext) IF() antlr.TerminalNode {
return s.GetToken(bicepParserIF, 0)
}
func (s *IfConditionContext) ParenthesizedExpression() IParenthesizedExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IParenthesizedExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IParenthesizedExpressionContext)
}
func (s *IfConditionContext) Object() IObjectContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IObjectContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IObjectContext)
}
func (s *IfConditionContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *IfConditionContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *IfConditionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitIfCondition(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) IfCondition() (localctx IIfConditionContext) {
localctx = NewIfConditionContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 24, bicepParserRULE_ifCondition)
p.EnterOuterAlt(localctx, 1)
{
p.SetState(218)
p.Match(bicepParserIF)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(219)
p.ParenthesizedExpression()
}
{
p.SetState(220)
p.Object()
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IForExpressionContext is an interface to support dynamic dispatch.
type IForExpressionContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// GetItem returns the item rule contexts.
GetItem() IIdentifierContext
// SetItem sets the item rule contexts.
SetItem(IIdentifierContext)
// Getter signatures
OBRACK() antlr.TerminalNode
FOR() antlr.TerminalNode
IN() antlr.TerminalNode
Expression() IExpressionContext
COL() antlr.TerminalNode
ForBody() IForBodyContext
CBRACK() antlr.TerminalNode
ForVariableBlock() IForVariableBlockContext
AllNL() []antlr.TerminalNode
NL(i int) antlr.TerminalNode
Identifier() IIdentifierContext
// IsForExpressionContext differentiates from other interfaces.
IsForExpressionContext()
}
type ForExpressionContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
item IIdentifierContext
}
func NewEmptyForExpressionContext() *ForExpressionContext {
var p = new(ForExpressionContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_forExpression
return p
}
func InitEmptyForExpressionContext(p *ForExpressionContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_forExpression
}
func (*ForExpressionContext) IsForExpressionContext() {}
func NewForExpressionContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ForExpressionContext {
var p = new(ForExpressionContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_forExpression
return p
}
func (s *ForExpressionContext) GetParser() antlr.Parser { return s.parser }
func (s *ForExpressionContext) GetItem() IIdentifierContext { return s.item }
func (s *ForExpressionContext) SetItem(v IIdentifierContext) { s.item = v }
func (s *ForExpressionContext) OBRACK() antlr.TerminalNode {
return s.GetToken(bicepParserOBRACK, 0)
}
func (s *ForExpressionContext) FOR() antlr.TerminalNode {
return s.GetToken(bicepParserFOR, 0)
}
func (s *ForExpressionContext) IN() antlr.TerminalNode {
return s.GetToken(bicepParserIN, 0)
}
func (s *ForExpressionContext) Expression() IExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IExpressionContext)
}
func (s *ForExpressionContext) COL() antlr.TerminalNode {
return s.GetToken(bicepParserCOL, 0)
}
func (s *ForExpressionContext) ForBody() IForBodyContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IForBodyContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IForBodyContext)
}
func (s *ForExpressionContext) CBRACK() antlr.TerminalNode {
return s.GetToken(bicepParserCBRACK, 0)
}
func (s *ForExpressionContext) ForVariableBlock() IForVariableBlockContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IForVariableBlockContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IForVariableBlockContext)
}
func (s *ForExpressionContext) AllNL() []antlr.TerminalNode {
return s.GetTokens(bicepParserNL)
}
func (s *ForExpressionContext) NL(i int) antlr.TerminalNode {
return s.GetToken(bicepParserNL, i)
}
func (s *ForExpressionContext) Identifier() IIdentifierContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IIdentifierContext)
}
func (s *ForExpressionContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ForExpressionContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ForExpressionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitForExpression(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) ForExpression() (localctx IForExpressionContext) {
localctx = NewForExpressionContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 26, bicepParserRULE_forExpression)
var _la int
p.EnterOuterAlt(localctx, 1)
{
p.SetState(222)
p.Match(bicepParserOBRACK)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(226)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for _la == bicepParserNL {
{
p.SetState(223)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(228)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
{
p.SetState(229)
p.Match(bicepParserFOR)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(232)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetTokenStream().LA(1) {
case bicepParserPARAM, bicepParserVAR, bicepParserTRUE, bicepParserFALSE, bicepParserNULL, bicepParserARRAY, bicepParserOBJECT, bicepParserRESOURCE, bicepParserOUTPUT, bicepParserTARGET_SCOPE, bicepParserIMPORT, bicepParserWITH, bicepParserAS, bicepParserMETADATA, bicepParserEXISTING, bicepParserTYPE, bicepParserMODULE, bicepParserSTRING, bicepParserINT, bicepParserBOOL, bicepParserIF, bicepParserFOR, bicepParserIN, bicepParserIDENTIFIER:
{
p.SetState(230)
var _x = p.Identifier()
localctx.(*ForExpressionContext).item = _x
}
case bicepParserOPAR:
{
p.SetState(231)
p.ForVariableBlock()
}
default:
p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil))
goto errorExit
}
{
p.SetState(234)
p.Match(bicepParserIN)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(235)
p.expression(0)
}
{
p.SetState(236)
p.Match(bicepParserCOL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(237)
p.ForBody()
}
p.SetState(241)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for _la == bicepParserNL {
{
p.SetState(238)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(243)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
{
p.SetState(244)
p.Match(bicepParserCBRACK)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IForVariableBlockContext is an interface to support dynamic dispatch.
type IForVariableBlockContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// GetItem returns the item rule contexts.
GetItem() IIdentifierContext
// GetIndex returns the index rule contexts.
GetIndex() IIdentifierContext
// SetItem sets the item rule contexts.
SetItem(IIdentifierContext)
// SetIndex sets the index rule contexts.
SetIndex(IIdentifierContext)
// Getter signatures
OPAR() antlr.TerminalNode
COMMA() antlr.TerminalNode
CPAR() antlr.TerminalNode
AllIdentifier() []IIdentifierContext
Identifier(i int) IIdentifierContext
// IsForVariableBlockContext differentiates from other interfaces.
IsForVariableBlockContext()
}
type ForVariableBlockContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
item IIdentifierContext
index IIdentifierContext
}
func NewEmptyForVariableBlockContext() *ForVariableBlockContext {
var p = new(ForVariableBlockContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_forVariableBlock
return p
}
func InitEmptyForVariableBlockContext(p *ForVariableBlockContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_forVariableBlock
}
func (*ForVariableBlockContext) IsForVariableBlockContext() {}
func NewForVariableBlockContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ForVariableBlockContext {
var p = new(ForVariableBlockContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_forVariableBlock
return p
}
func (s *ForVariableBlockContext) GetParser() antlr.Parser { return s.parser }
func (s *ForVariableBlockContext) GetItem() IIdentifierContext { return s.item }
func (s *ForVariableBlockContext) GetIndex() IIdentifierContext { return s.index }
func (s *ForVariableBlockContext) SetItem(v IIdentifierContext) { s.item = v }
func (s *ForVariableBlockContext) SetIndex(v IIdentifierContext) { s.index = v }
func (s *ForVariableBlockContext) OPAR() antlr.TerminalNode {
return s.GetToken(bicepParserOPAR, 0)
}
func (s *ForVariableBlockContext) COMMA() antlr.TerminalNode {
return s.GetToken(bicepParserCOMMA, 0)
}
func (s *ForVariableBlockContext) CPAR() antlr.TerminalNode {
return s.GetToken(bicepParserCPAR, 0)
}
func (s *ForVariableBlockContext) AllIdentifier() []IIdentifierContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IIdentifierContext); ok {
len++
}
}
tst := make([]IIdentifierContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IIdentifierContext); ok {
tst[i] = t.(IIdentifierContext)
i++
}
}
return tst
}
func (s *ForVariableBlockContext) Identifier(i int) IIdentifierContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IIdentifierContext)
}
func (s *ForVariableBlockContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ForVariableBlockContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ForVariableBlockContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitForVariableBlock(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) ForVariableBlock() (localctx IForVariableBlockContext) {
localctx = NewForVariableBlockContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 28, bicepParserRULE_forVariableBlock)
p.EnterOuterAlt(localctx, 1)
{
p.SetState(246)
p.Match(bicepParserOPAR)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(247)
var _x = p.Identifier()
localctx.(*ForVariableBlockContext).item = _x
}
{
p.SetState(248)
p.Match(bicepParserCOMMA)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(249)
var _x = p.Identifier()
localctx.(*ForVariableBlockContext).index = _x
}
{
p.SetState(250)
p.Match(bicepParserCPAR)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IForBodyContext is an interface to support dynamic dispatch.
type IForBodyContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// GetBody returns the body rule contexts.
GetBody() IExpressionContext
// SetBody sets the body rule contexts.
SetBody(IExpressionContext)
// Getter signatures
Expression() IExpressionContext
IfCondition() IIfConditionContext
// IsForBodyContext differentiates from other interfaces.
IsForBodyContext()
}
type ForBodyContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
body IExpressionContext
}
func NewEmptyForBodyContext() *ForBodyContext {
var p = new(ForBodyContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_forBody
return p
}
func InitEmptyForBodyContext(p *ForBodyContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_forBody
}
func (*ForBodyContext) IsForBodyContext() {}
func NewForBodyContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ForBodyContext {
var p = new(ForBodyContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_forBody
return p
}
func (s *ForBodyContext) GetParser() antlr.Parser { return s.parser }
func (s *ForBodyContext) GetBody() IExpressionContext { return s.body }
func (s *ForBodyContext) SetBody(v IExpressionContext) { s.body = v }
func (s *ForBodyContext) Expression() IExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IExpressionContext)
}
func (s *ForBodyContext) IfCondition() IIfConditionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIfConditionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IIfConditionContext)
}
func (s *ForBodyContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ForBodyContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ForBodyContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitForBody(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) ForBody() (localctx IForBodyContext) {
localctx = NewForBodyContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 30, bicepParserRULE_forBody)
p.SetState(254)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 21, p.GetParserRuleContext()) {
case 1:
p.EnterOuterAlt(localctx, 1)
{
p.SetState(252)
var _x = p.expression(0)
localctx.(*ForBodyContext).body = _x
}
case 2:
p.EnterOuterAlt(localctx, 2)
{
p.SetState(253)
p.IfCondition()
}
case antlr.ATNInvalidAltNumber:
goto errorExit
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IInterpStringContext is an interface to support dynamic dispatch.
type IInterpStringContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
STRING_LEFT_PIECE() antlr.TerminalNode
AllExpression() []IExpressionContext
Expression(i int) IExpressionContext
STRING_RIGHT_PIECE() antlr.TerminalNode
AllSTRING_MIDDLE_PIECE() []antlr.TerminalNode
STRING_MIDDLE_PIECE(i int) antlr.TerminalNode
STRING_COMPLETE() antlr.TerminalNode
// IsInterpStringContext differentiates from other interfaces.
IsInterpStringContext()
}
type InterpStringContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyInterpStringContext() *InterpStringContext {
var p = new(InterpStringContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_interpString
return p
}
func InitEmptyInterpStringContext(p *InterpStringContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_interpString
}
func (*InterpStringContext) IsInterpStringContext() {}
func NewInterpStringContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *InterpStringContext {
var p = new(InterpStringContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_interpString
return p
}
func (s *InterpStringContext) GetParser() antlr.Parser { return s.parser }
func (s *InterpStringContext) STRING_LEFT_PIECE() antlr.TerminalNode {
return s.GetToken(bicepParserSTRING_LEFT_PIECE, 0)
}
func (s *InterpStringContext) AllExpression() []IExpressionContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IExpressionContext); ok {
len++
}
}
tst := make([]IExpressionContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IExpressionContext); ok {
tst[i] = t.(IExpressionContext)
i++
}
}
return tst
}
func (s *InterpStringContext) Expression(i int) IExpressionContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IExpressionContext)
}
func (s *InterpStringContext) STRING_RIGHT_PIECE() antlr.TerminalNode {
return s.GetToken(bicepParserSTRING_RIGHT_PIECE, 0)
}
func (s *InterpStringContext) AllSTRING_MIDDLE_PIECE() []antlr.TerminalNode {
return s.GetTokens(bicepParserSTRING_MIDDLE_PIECE)
}
func (s *InterpStringContext) STRING_MIDDLE_PIECE(i int) antlr.TerminalNode {
return s.GetToken(bicepParserSTRING_MIDDLE_PIECE, i)
}
func (s *InterpStringContext) STRING_COMPLETE() antlr.TerminalNode {
return s.GetToken(bicepParserSTRING_COMPLETE, 0)
}
func (s *InterpStringContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *InterpStringContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *InterpStringContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitInterpString(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) InterpString() (localctx IInterpStringContext) {
localctx = NewInterpStringContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 32, bicepParserRULE_interpString)
var _alt int
p.SetState(269)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetTokenStream().LA(1) {
case bicepParserSTRING_LEFT_PIECE:
p.EnterOuterAlt(localctx, 1)
{
p.SetState(256)
p.Match(bicepParserSTRING_LEFT_PIECE)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(262)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 22, p.GetParserRuleContext())
if p.HasError() {
goto errorExit
}
for _alt != 2 && _alt != antlr.ATNInvalidAltNumber {
if _alt == 1 {
{
p.SetState(257)
p.expression(0)
}
{
p.SetState(258)
p.Match(bicepParserSTRING_MIDDLE_PIECE)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
}
p.SetState(264)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 22, p.GetParserRuleContext())
if p.HasError() {
goto errorExit
}
}
{
p.SetState(265)
p.expression(0)
}
{
p.SetState(266)
p.Match(bicepParserSTRING_RIGHT_PIECE)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case bicepParserSTRING_COMPLETE:
p.EnterOuterAlt(localctx, 2)
{
p.SetState(268)
p.Match(bicepParserSTRING_COMPLETE)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
default:
p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil))
goto errorExit
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IExpressionContext is an interface to support dynamic dispatch.
type IExpressionContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// GetProperty returns the property rule contexts.
GetProperty() IIdentifierContext
// GetName returns the name rule contexts.
GetName() IIdentifierContext
// SetProperty sets the property rule contexts.
SetProperty(IIdentifierContext)
// SetName sets the name rule contexts.
SetName(IIdentifierContext)
// Getter signatures
PrimaryExpression() IPrimaryExpressionContext
AllExpression() []IExpressionContext
Expression(i int) IExpressionContext
QMARK() antlr.TerminalNode
COL() antlr.TerminalNode
LogicCharacter() ILogicCharacterContext
OBRACK() antlr.TerminalNode
CBRACK() antlr.TerminalNode
DOT() antlr.TerminalNode
Identifier() IIdentifierContext
FunctionCall() IFunctionCallContext
// IsExpressionContext differentiates from other interfaces.
IsExpressionContext()
}
type ExpressionContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
property IIdentifierContext
name IIdentifierContext
}
func NewEmptyExpressionContext() *ExpressionContext {
var p = new(ExpressionContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_expression
return p
}
func InitEmptyExpressionContext(p *ExpressionContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_expression
}
func (*ExpressionContext) IsExpressionContext() {}
func NewExpressionContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ExpressionContext {
var p = new(ExpressionContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_expression
return p
}
func (s *ExpressionContext) GetParser() antlr.Parser { return s.parser }
func (s *ExpressionContext) GetProperty() IIdentifierContext { return s.property }
func (s *ExpressionContext) GetName() IIdentifierContext { return s.name }
func (s *ExpressionContext) SetProperty(v IIdentifierContext) { s.property = v }
func (s *ExpressionContext) SetName(v IIdentifierContext) { s.name = v }
func (s *ExpressionContext) PrimaryExpression() IPrimaryExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IPrimaryExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IPrimaryExpressionContext)
}
func (s *ExpressionContext) AllExpression() []IExpressionContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IExpressionContext); ok {
len++
}
}
tst := make([]IExpressionContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IExpressionContext); ok {
tst[i] = t.(IExpressionContext)
i++
}
}
return tst
}
func (s *ExpressionContext) Expression(i int) IExpressionContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IExpressionContext)
}
func (s *ExpressionContext) QMARK() antlr.TerminalNode {
return s.GetToken(bicepParserQMARK, 0)
}
func (s *ExpressionContext) COL() antlr.TerminalNode {
return s.GetToken(bicepParserCOL, 0)
}
func (s *ExpressionContext) LogicCharacter() ILogicCharacterContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(ILogicCharacterContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(ILogicCharacterContext)
}
func (s *ExpressionContext) OBRACK() antlr.TerminalNode {
return s.GetToken(bicepParserOBRACK, 0)
}
func (s *ExpressionContext) CBRACK() antlr.TerminalNode {
return s.GetToken(bicepParserCBRACK, 0)
}
func (s *ExpressionContext) DOT() antlr.TerminalNode {
return s.GetToken(bicepParserDOT, 0)
}
func (s *ExpressionContext) Identifier() IIdentifierContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IIdentifierContext)
}
func (s *ExpressionContext) FunctionCall() IFunctionCallContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IFunctionCallContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IFunctionCallContext)
}
func (s *ExpressionContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ExpressionContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ExpressionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitExpression(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) Expression() (localctx IExpressionContext) {
return p.expression(0)
}
func (p *bicepParser) expression(_p int) (localctx IExpressionContext) {
var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext()
_parentState := p.GetState()
localctx = NewExpressionContext(p, p.GetParserRuleContext(), _parentState)
var _prevctx IExpressionContext = localctx
var _ antlr.ParserRuleContext = _prevctx // TODO: To prevent unused variable warning.
_startState := 34
p.EnterRecursionRule(localctx, 34, bicepParserRULE_expression, _p)
var _alt int
p.EnterOuterAlt(localctx, 1)
{
p.SetState(272)
p.PrimaryExpression()
}
p.GetParserRuleContext().SetStop(p.GetTokenStream().LT(-1))
p.SetState(300)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 25, p.GetParserRuleContext())
if p.HasError() {
goto errorExit
}
for _alt != 2 && _alt != antlr.ATNInvalidAltNumber {
if _alt == 1 {
if p.GetParseListeners() != nil {
p.TriggerExitRuleEvent()
}
_prevctx = localctx
p.SetState(298)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 24, p.GetParserRuleContext()) {
case 1:
localctx = NewExpressionContext(p, _parentctx, _parentState)
p.PushNewRecursionContext(localctx, _startState, bicepParserRULE_expression)
p.SetState(274)
if !(p.Precpred(p.GetParserRuleContext(), 6)) {
p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 6)", ""))
goto errorExit
}
{
p.SetState(275)
p.Match(bicepParserQMARK)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(276)
p.expression(0)
}
{
p.SetState(277)
p.Match(bicepParserCOL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(278)
p.expression(7)
}
case 2:
localctx = NewExpressionContext(p, _parentctx, _parentState)
p.PushNewRecursionContext(localctx, _startState, bicepParserRULE_expression)
p.SetState(280)
if !(p.Precpred(p.GetParserRuleContext(), 2)) {
p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 2)", ""))
goto errorExit
}
{
p.SetState(281)
p.LogicCharacter()
}
{
p.SetState(282)
p.expression(3)
}
case 3:
localctx = NewExpressionContext(p, _parentctx, _parentState)
p.PushNewRecursionContext(localctx, _startState, bicepParserRULE_expression)
p.SetState(284)
if !(p.Precpred(p.GetParserRuleContext(), 7)) {
p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 7)", ""))
goto errorExit
}
{
p.SetState(285)
p.Match(bicepParserOBRACK)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(286)
p.expression(0)
}
{
p.SetState(287)
p.Match(bicepParserCBRACK)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case 4:
localctx = NewExpressionContext(p, _parentctx, _parentState)
p.PushNewRecursionContext(localctx, _startState, bicepParserRULE_expression)
p.SetState(289)
if !(p.Precpred(p.GetParserRuleContext(), 5)) {
p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 5)", ""))
goto errorExit
}
{
p.SetState(290)
p.Match(bicepParserDOT)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(291)
var _x = p.Identifier()
localctx.(*ExpressionContext).property = _x
}
case 5:
localctx = NewExpressionContext(p, _parentctx, _parentState)
p.PushNewRecursionContext(localctx, _startState, bicepParserRULE_expression)
p.SetState(292)
if !(p.Precpred(p.GetParserRuleContext(), 4)) {
p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 4)", ""))
goto errorExit
}
{
p.SetState(293)
p.Match(bicepParserDOT)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(294)
p.FunctionCall()
}
case 6:
localctx = NewExpressionContext(p, _parentctx, _parentState)
p.PushNewRecursionContext(localctx, _startState, bicepParserRULE_expression)
p.SetState(295)
if !(p.Precpred(p.GetParserRuleContext(), 3)) {
p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 3)", ""))
goto errorExit
}
{
p.SetState(296)
p.Match(bicepParserCOL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(297)
var _x = p.Identifier()
localctx.(*ExpressionContext).name = _x
}
case antlr.ATNInvalidAltNumber:
goto errorExit
}
}
p.SetState(302)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 25, p.GetParserRuleContext())
if p.HasError() {
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.UnrollRecursionContexts(_parentctx)
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// ILambdaExpressionContext is an interface to support dynamic dispatch.
type ILambdaExpressionContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
ARROW() antlr.TerminalNode
Expression() IExpressionContext
OPAR() antlr.TerminalNode
CPAR() antlr.TerminalNode
Identifier() IIdentifierContext
ArgumentList() IArgumentListContext
// IsLambdaExpressionContext differentiates from other interfaces.
IsLambdaExpressionContext()
}
type LambdaExpressionContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyLambdaExpressionContext() *LambdaExpressionContext {
var p = new(LambdaExpressionContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_lambdaExpression
return p
}
func InitEmptyLambdaExpressionContext(p *LambdaExpressionContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_lambdaExpression
}
func (*LambdaExpressionContext) IsLambdaExpressionContext() {}
func NewLambdaExpressionContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *LambdaExpressionContext {
var p = new(LambdaExpressionContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_lambdaExpression
return p
}
func (s *LambdaExpressionContext) GetParser() antlr.Parser { return s.parser }
func (s *LambdaExpressionContext) ARROW() antlr.TerminalNode {
return s.GetToken(bicepParserARROW, 0)
}
func (s *LambdaExpressionContext) Expression() IExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IExpressionContext)
}
func (s *LambdaExpressionContext) OPAR() antlr.TerminalNode {
return s.GetToken(bicepParserOPAR, 0)
}
func (s *LambdaExpressionContext) CPAR() antlr.TerminalNode {
return s.GetToken(bicepParserCPAR, 0)
}
func (s *LambdaExpressionContext) Identifier() IIdentifierContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IIdentifierContext)
}
func (s *LambdaExpressionContext) ArgumentList() IArgumentListContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IArgumentListContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IArgumentListContext)
}
func (s *LambdaExpressionContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *LambdaExpressionContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *LambdaExpressionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitLambdaExpression(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) LambdaExpression() (localctx ILambdaExpressionContext) {
localctx = NewLambdaExpressionContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 36, bicepParserRULE_lambdaExpression)
var _la int
p.EnterOuterAlt(localctx, 1)
p.SetState(309)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetTokenStream().LA(1) {
case bicepParserOPAR:
{
p.SetState(303)
p.Match(bicepParserOPAR)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(305)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
if (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&1691035998605394) != 0 {
{
p.SetState(304)
p.ArgumentList()
}
}
{
p.SetState(307)
p.Match(bicepParserCPAR)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case bicepParserPARAM, bicepParserVAR, bicepParserTRUE, bicepParserFALSE, bicepParserNULL, bicepParserARRAY, bicepParserOBJECT, bicepParserRESOURCE, bicepParserOUTPUT, bicepParserTARGET_SCOPE, bicepParserIMPORT, bicepParserWITH, bicepParserAS, bicepParserMETADATA, bicepParserEXISTING, bicepParserTYPE, bicepParserMODULE, bicepParserSTRING, bicepParserINT, bicepParserBOOL, bicepParserIF, bicepParserFOR, bicepParserIN, bicepParserIDENTIFIER:
{
p.SetState(308)
p.Identifier()
}
default:
p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil))
goto errorExit
}
{
p.SetState(311)
p.Match(bicepParserARROW)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(312)
p.expression(0)
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// ILogicCharacterContext is an interface to support dynamic dispatch.
type ILogicCharacterContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
GT() antlr.TerminalNode
GTE() antlr.TerminalNode
LT() antlr.TerminalNode
LTE() antlr.TerminalNode
EQ() antlr.TerminalNode
NEQ() antlr.TerminalNode
// IsLogicCharacterContext differentiates from other interfaces.
IsLogicCharacterContext()
}
type LogicCharacterContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyLogicCharacterContext() *LogicCharacterContext {
var p = new(LogicCharacterContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_logicCharacter
return p
}
func InitEmptyLogicCharacterContext(p *LogicCharacterContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_logicCharacter
}
func (*LogicCharacterContext) IsLogicCharacterContext() {}
func NewLogicCharacterContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *LogicCharacterContext {
var p = new(LogicCharacterContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_logicCharacter
return p
}
func (s *LogicCharacterContext) GetParser() antlr.Parser { return s.parser }
func (s *LogicCharacterContext) GT() antlr.TerminalNode {
return s.GetToken(bicepParserGT, 0)
}
func (s *LogicCharacterContext) GTE() antlr.TerminalNode {
return s.GetToken(bicepParserGTE, 0)
}
func (s *LogicCharacterContext) LT() antlr.TerminalNode {
return s.GetToken(bicepParserLT, 0)
}
func (s *LogicCharacterContext) LTE() antlr.TerminalNode {
return s.GetToken(bicepParserLTE, 0)
}
func (s *LogicCharacterContext) EQ() antlr.TerminalNode {
return s.GetToken(bicepParserEQ, 0)
}
func (s *LogicCharacterContext) NEQ() antlr.TerminalNode {
return s.GetToken(bicepParserNEQ, 0)
}
func (s *LogicCharacterContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *LogicCharacterContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *LogicCharacterContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitLogicCharacter(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) LogicCharacter() (localctx ILogicCharacterContext) {
localctx = NewLogicCharacterContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 38, bicepParserRULE_logicCharacter)
var _la int
p.EnterOuterAlt(localctx, 1)
{
p.SetState(314)
_la = p.GetTokenStream().LA(1)
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&277076930199552) != 0) {
p.GetErrorHandler().RecoverInline(p)
} else {
p.GetErrorHandler().ReportMatch(p)
p.Consume()
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IPrimaryExpressionContext is an interface to support dynamic dispatch.
type IPrimaryExpressionContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
LiteralValue() ILiteralValueContext
FunctionCall() IFunctionCallContext
InterpString() IInterpStringContext
MULTILINE_STRING() antlr.TerminalNode
Array() IArrayContext
Object() IObjectContext
ForExpression() IForExpressionContext
ParenthesizedExpression() IParenthesizedExpressionContext
LambdaExpression() ILambdaExpressionContext
// IsPrimaryExpressionContext differentiates from other interfaces.
IsPrimaryExpressionContext()
}
type PrimaryExpressionContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyPrimaryExpressionContext() *PrimaryExpressionContext {
var p = new(PrimaryExpressionContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_primaryExpression
return p
}
func InitEmptyPrimaryExpressionContext(p *PrimaryExpressionContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_primaryExpression
}
func (*PrimaryExpressionContext) IsPrimaryExpressionContext() {}
func NewPrimaryExpressionContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *PrimaryExpressionContext {
var p = new(PrimaryExpressionContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_primaryExpression
return p
}
func (s *PrimaryExpressionContext) GetParser() antlr.Parser { return s.parser }
func (s *PrimaryExpressionContext) LiteralValue() ILiteralValueContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(ILiteralValueContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(ILiteralValueContext)
}
func (s *PrimaryExpressionContext) FunctionCall() IFunctionCallContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IFunctionCallContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IFunctionCallContext)
}
func (s *PrimaryExpressionContext) InterpString() IInterpStringContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IInterpStringContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IInterpStringContext)
}
func (s *PrimaryExpressionContext) MULTILINE_STRING() antlr.TerminalNode {
return s.GetToken(bicepParserMULTILINE_STRING, 0)
}
func (s *PrimaryExpressionContext) Array() IArrayContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IArrayContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IArrayContext)
}
func (s *PrimaryExpressionContext) Object() IObjectContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IObjectContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IObjectContext)
}
func (s *PrimaryExpressionContext) ForExpression() IForExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IForExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IForExpressionContext)
}
func (s *PrimaryExpressionContext) ParenthesizedExpression() IParenthesizedExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IParenthesizedExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IParenthesizedExpressionContext)
}
func (s *PrimaryExpressionContext) LambdaExpression() ILambdaExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(ILambdaExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(ILambdaExpressionContext)
}
func (s *PrimaryExpressionContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *PrimaryExpressionContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *PrimaryExpressionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitPrimaryExpression(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) PrimaryExpression() (localctx IPrimaryExpressionContext) {
localctx = NewPrimaryExpressionContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 40, bicepParserRULE_primaryExpression)
p.SetState(325)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 28, p.GetParserRuleContext()) {
case 1:
p.EnterOuterAlt(localctx, 1)
{
p.SetState(316)
p.LiteralValue()
}
case 2:
p.EnterOuterAlt(localctx, 2)
{
p.SetState(317)
p.FunctionCall()
}
case 3:
p.EnterOuterAlt(localctx, 3)
{
p.SetState(318)
p.InterpString()
}
case 4:
p.EnterOuterAlt(localctx, 4)
{
p.SetState(319)
p.Match(bicepParserMULTILINE_STRING)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case 5:
p.EnterOuterAlt(localctx, 5)
{
p.SetState(320)
p.Array()
}
case 6:
p.EnterOuterAlt(localctx, 6)
{
p.SetState(321)
p.Object()
}
case 7:
p.EnterOuterAlt(localctx, 7)
{
p.SetState(322)
p.ForExpression()
}
case 8:
p.EnterOuterAlt(localctx, 8)
{
p.SetState(323)
p.ParenthesizedExpression()
}
case 9:
p.EnterOuterAlt(localctx, 9)
{
p.SetState(324)
p.LambdaExpression()
}
case antlr.ATNInvalidAltNumber:
goto errorExit
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IParenthesizedExpressionContext is an interface to support dynamic dispatch.
type IParenthesizedExpressionContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
OPAR() antlr.TerminalNode
Expression() IExpressionContext
CPAR() antlr.TerminalNode
AllNL() []antlr.TerminalNode
NL(i int) antlr.TerminalNode
// IsParenthesizedExpressionContext differentiates from other interfaces.
IsParenthesizedExpressionContext()
}
type ParenthesizedExpressionContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyParenthesizedExpressionContext() *ParenthesizedExpressionContext {
var p = new(ParenthesizedExpressionContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_parenthesizedExpression
return p
}
func InitEmptyParenthesizedExpressionContext(p *ParenthesizedExpressionContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_parenthesizedExpression
}
func (*ParenthesizedExpressionContext) IsParenthesizedExpressionContext() {}
func NewParenthesizedExpressionContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ParenthesizedExpressionContext {
var p = new(ParenthesizedExpressionContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_parenthesizedExpression
return p
}
func (s *ParenthesizedExpressionContext) GetParser() antlr.Parser { return s.parser }
func (s *ParenthesizedExpressionContext) OPAR() antlr.TerminalNode {
return s.GetToken(bicepParserOPAR, 0)
}
func (s *ParenthesizedExpressionContext) Expression() IExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IExpressionContext)
}
func (s *ParenthesizedExpressionContext) CPAR() antlr.TerminalNode {
return s.GetToken(bicepParserCPAR, 0)
}
func (s *ParenthesizedExpressionContext) AllNL() []antlr.TerminalNode {
return s.GetTokens(bicepParserNL)
}
func (s *ParenthesizedExpressionContext) NL(i int) antlr.TerminalNode {
return s.GetToken(bicepParserNL, i)
}
func (s *ParenthesizedExpressionContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ParenthesizedExpressionContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ParenthesizedExpressionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitParenthesizedExpression(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) ParenthesizedExpression() (localctx IParenthesizedExpressionContext) {
localctx = NewParenthesizedExpressionContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 42, bicepParserRULE_parenthesizedExpression)
var _la int
p.EnterOuterAlt(localctx, 1)
{
p.SetState(327)
p.Match(bicepParserOPAR)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(329)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
if _la == bicepParserNL {
{
p.SetState(328)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
}
{
p.SetState(331)
p.expression(0)
}
p.SetState(333)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
if _la == bicepParserNL {
{
p.SetState(332)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
}
{
p.SetState(335)
p.Match(bicepParserCPAR)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// ITypeExpressionContext is an interface to support dynamic dispatch.
type ITypeExpressionContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// GetType_ returns the type_ rule contexts.
GetType_() IIdentifierContext
// SetType_ sets the type_ rule contexts.
SetType_(IIdentifierContext)
// Getter signatures
Identifier() IIdentifierContext
// IsTypeExpressionContext differentiates from other interfaces.
IsTypeExpressionContext()
}
type TypeExpressionContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
type_ IIdentifierContext
}
func NewEmptyTypeExpressionContext() *TypeExpressionContext {
var p = new(TypeExpressionContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_typeExpression
return p
}
func InitEmptyTypeExpressionContext(p *TypeExpressionContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_typeExpression
}
func (*TypeExpressionContext) IsTypeExpressionContext() {}
func NewTypeExpressionContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *TypeExpressionContext {
var p = new(TypeExpressionContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_typeExpression
return p
}
func (s *TypeExpressionContext) GetParser() antlr.Parser { return s.parser }
func (s *TypeExpressionContext) GetType_() IIdentifierContext { return s.type_ }
func (s *TypeExpressionContext) SetType_(v IIdentifierContext) { s.type_ = v }
func (s *TypeExpressionContext) Identifier() IIdentifierContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IIdentifierContext)
}
func (s *TypeExpressionContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *TypeExpressionContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *TypeExpressionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitTypeExpression(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) TypeExpression() (localctx ITypeExpressionContext) {
localctx = NewTypeExpressionContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 44, bicepParserRULE_typeExpression)
p.EnterOuterAlt(localctx, 1)
{
p.SetState(337)
var _x = p.Identifier()
localctx.(*TypeExpressionContext).type_ = _x
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// ILiteralValueContext is an interface to support dynamic dispatch.
type ILiteralValueContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
NUMBER() antlr.TerminalNode
TRUE() antlr.TerminalNode
FALSE() antlr.TerminalNode
NULL() antlr.TerminalNode
Identifier() IIdentifierContext
// IsLiteralValueContext differentiates from other interfaces.
IsLiteralValueContext()
}
type LiteralValueContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyLiteralValueContext() *LiteralValueContext {
var p = new(LiteralValueContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_literalValue
return p
}
func InitEmptyLiteralValueContext(p *LiteralValueContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_literalValue
}
func (*LiteralValueContext) IsLiteralValueContext() {}
func NewLiteralValueContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *LiteralValueContext {
var p = new(LiteralValueContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_literalValue
return p
}
func (s *LiteralValueContext) GetParser() antlr.Parser { return s.parser }
func (s *LiteralValueContext) NUMBER() antlr.TerminalNode {
return s.GetToken(bicepParserNUMBER, 0)
}
func (s *LiteralValueContext) TRUE() antlr.TerminalNode {
return s.GetToken(bicepParserTRUE, 0)
}
func (s *LiteralValueContext) FALSE() antlr.TerminalNode {
return s.GetToken(bicepParserFALSE, 0)
}
func (s *LiteralValueContext) NULL() antlr.TerminalNode {
return s.GetToken(bicepParserNULL, 0)
}
func (s *LiteralValueContext) Identifier() IIdentifierContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IIdentifierContext)
}
func (s *LiteralValueContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *LiteralValueContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *LiteralValueContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitLiteralValue(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) LiteralValue() (localctx ILiteralValueContext) {
localctx = NewLiteralValueContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 46, bicepParserRULE_literalValue)
p.SetState(344)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 31, p.GetParserRuleContext()) {
case 1:
p.EnterOuterAlt(localctx, 1)
{
p.SetState(339)
p.Match(bicepParserNUMBER)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case 2:
p.EnterOuterAlt(localctx, 2)
{
p.SetState(340)
p.Match(bicepParserTRUE)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case 3:
p.EnterOuterAlt(localctx, 3)
{
p.SetState(341)
p.Match(bicepParserFALSE)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case 4:
p.EnterOuterAlt(localctx, 4)
{
p.SetState(342)
p.Match(bicepParserNULL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case 5:
p.EnterOuterAlt(localctx, 5)
{
p.SetState(343)
p.Identifier()
}
case antlr.ATNInvalidAltNumber:
goto errorExit
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IObjectContext is an interface to support dynamic dispatch.
type IObjectContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
OBRACE() antlr.TerminalNode
CBRACE() antlr.TerminalNode
AllNL() []antlr.TerminalNode
NL(i int) antlr.TerminalNode
AllObjectProperty() []IObjectPropertyContext
ObjectProperty(i int) IObjectPropertyContext
// IsObjectContext differentiates from other interfaces.
IsObjectContext()
}
type ObjectContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyObjectContext() *ObjectContext {
var p = new(ObjectContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_object
return p
}
func InitEmptyObjectContext(p *ObjectContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_object
}
func (*ObjectContext) IsObjectContext() {}
func NewObjectContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ObjectContext {
var p = new(ObjectContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_object
return p
}
func (s *ObjectContext) GetParser() antlr.Parser { return s.parser }
func (s *ObjectContext) OBRACE() antlr.TerminalNode {
return s.GetToken(bicepParserOBRACE, 0)
}
func (s *ObjectContext) CBRACE() antlr.TerminalNode {
return s.GetToken(bicepParserCBRACE, 0)
}
func (s *ObjectContext) AllNL() []antlr.TerminalNode {
return s.GetTokens(bicepParserNL)
}
func (s *ObjectContext) NL(i int) antlr.TerminalNode {
return s.GetToken(bicepParserNL, i)
}
func (s *ObjectContext) AllObjectProperty() []IObjectPropertyContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IObjectPropertyContext); ok {
len++
}
}
tst := make([]IObjectPropertyContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IObjectPropertyContext); ok {
tst[i] = t.(IObjectPropertyContext)
i++
}
}
return tst
}
func (s *ObjectContext) ObjectProperty(i int) IObjectPropertyContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IObjectPropertyContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IObjectPropertyContext)
}
func (s *ObjectContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ObjectContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ObjectContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitObject(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) Object() (localctx IObjectContext) {
localctx = NewObjectContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 48, bicepParserRULE_object)
var _la int
p.EnterOuterAlt(localctx, 1)
{
p.SetState(346)
p.Match(bicepParserOBRACE)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(363)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
if _la == bicepParserNL {
p.SetState(348)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for ok := true; ok; ok = _la == bicepParserNL {
{
p.SetState(347)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(350)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
p.SetState(360)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&565136091758592) != 0 {
{
p.SetState(352)
p.ObjectProperty()
}
p.SetState(354)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for ok := true; ok; ok = _la == bicepParserNL {
{
p.SetState(353)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(356)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
p.SetState(362)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
}
{
p.SetState(365)
p.Match(bicepParserCBRACE)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IObjectPropertyContext is an interface to support dynamic dispatch.
type IObjectPropertyContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// GetName returns the name rule contexts.
GetName() IIdentifierContext
// SetName sets the name rule contexts.
SetName(IIdentifierContext)
// Getter signatures
COL() antlr.TerminalNode
Expression() IExpressionContext
InterpString() IInterpStringContext
Identifier() IIdentifierContext
// IsObjectPropertyContext differentiates from other interfaces.
IsObjectPropertyContext()
}
type ObjectPropertyContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
name IIdentifierContext
}
func NewEmptyObjectPropertyContext() *ObjectPropertyContext {
var p = new(ObjectPropertyContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_objectProperty
return p
}
func InitEmptyObjectPropertyContext(p *ObjectPropertyContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_objectProperty
}
func (*ObjectPropertyContext) IsObjectPropertyContext() {}
func NewObjectPropertyContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ObjectPropertyContext {
var p = new(ObjectPropertyContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_objectProperty
return p
}
func (s *ObjectPropertyContext) GetParser() antlr.Parser { return s.parser }
func (s *ObjectPropertyContext) GetName() IIdentifierContext { return s.name }
func (s *ObjectPropertyContext) SetName(v IIdentifierContext) { s.name = v }
func (s *ObjectPropertyContext) COL() antlr.TerminalNode {
return s.GetToken(bicepParserCOL, 0)
}
func (s *ObjectPropertyContext) Expression() IExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IExpressionContext)
}
func (s *ObjectPropertyContext) InterpString() IInterpStringContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IInterpStringContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IInterpStringContext)
}
func (s *ObjectPropertyContext) Identifier() IIdentifierContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IIdentifierContext)
}
func (s *ObjectPropertyContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ObjectPropertyContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ObjectPropertyContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitObjectProperty(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) ObjectProperty() (localctx IObjectPropertyContext) {
localctx = NewObjectPropertyContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 50, bicepParserRULE_objectProperty)
p.EnterOuterAlt(localctx, 1)
p.SetState(369)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetTokenStream().LA(1) {
case bicepParserPARAM, bicepParserVAR, bicepParserTRUE, bicepParserFALSE, bicepParserNULL, bicepParserARRAY, bicepParserOBJECT, bicepParserRESOURCE, bicepParserOUTPUT, bicepParserTARGET_SCOPE, bicepParserIMPORT, bicepParserWITH, bicepParserAS, bicepParserMETADATA, bicepParserEXISTING, bicepParserTYPE, bicepParserMODULE, bicepParserSTRING, bicepParserINT, bicepParserBOOL, bicepParserIF, bicepParserFOR, bicepParserIN, bicepParserIDENTIFIER:
{
p.SetState(367)
var _x = p.Identifier()
localctx.(*ObjectPropertyContext).name = _x
}
case bicepParserSTRING_LEFT_PIECE, bicepParserSTRING_COMPLETE:
{
p.SetState(368)
p.InterpString()
}
default:
p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil))
goto errorExit
}
{
p.SetState(371)
p.Match(bicepParserCOL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(372)
p.expression(0)
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IArrayContext is an interface to support dynamic dispatch.
type IArrayContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
OBRACK() antlr.TerminalNode
CBRACK() antlr.TerminalNode
AllNL() []antlr.TerminalNode
NL(i int) antlr.TerminalNode
AllArrayItem() []IArrayItemContext
ArrayItem(i int) IArrayItemContext
// IsArrayContext differentiates from other interfaces.
IsArrayContext()
}
type ArrayContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyArrayContext() *ArrayContext {
var p = new(ArrayContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_array
return p
}
func InitEmptyArrayContext(p *ArrayContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_array
}
func (*ArrayContext) IsArrayContext() {}
func NewArrayContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ArrayContext {
var p = new(ArrayContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_array
return p
}
func (s *ArrayContext) GetParser() antlr.Parser { return s.parser }
func (s *ArrayContext) OBRACK() antlr.TerminalNode {
return s.GetToken(bicepParserOBRACK, 0)
}
func (s *ArrayContext) CBRACK() antlr.TerminalNode {
return s.GetToken(bicepParserCBRACK, 0)
}
func (s *ArrayContext) AllNL() []antlr.TerminalNode {
return s.GetTokens(bicepParserNL)
}
func (s *ArrayContext) NL(i int) antlr.TerminalNode {
return s.GetToken(bicepParserNL, i)
}
func (s *ArrayContext) AllArrayItem() []IArrayItemContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IArrayItemContext); ok {
len++
}
}
tst := make([]IArrayItemContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IArrayItemContext); ok {
tst[i] = t.(IArrayItemContext)
i++
}
}
return tst
}
func (s *ArrayContext) ArrayItem(i int) IArrayItemContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IArrayItemContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IArrayItemContext)
}
func (s *ArrayContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ArrayContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ArrayContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitArray(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) Array() (localctx IArrayContext) {
localctx = NewArrayContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 52, bicepParserRULE_array)
var _la int
p.EnterOuterAlt(localctx, 1)
{
p.SetState(374)
p.Match(bicepParserOBRACK)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(378)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for _la == bicepParserNL {
{
p.SetState(375)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(380)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
p.SetState(384)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&1691035998605394) != 0 {
{
p.SetState(381)
p.ArrayItem()
}
p.SetState(386)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
{
p.SetState(387)
p.Match(bicepParserCBRACK)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IArrayItemContext is an interface to support dynamic dispatch.
type IArrayItemContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
Expression() IExpressionContext
COMMA() antlr.TerminalNode
AllNL() []antlr.TerminalNode
NL(i int) antlr.TerminalNode
// IsArrayItemContext differentiates from other interfaces.
IsArrayItemContext()
}
type ArrayItemContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyArrayItemContext() *ArrayItemContext {
var p = new(ArrayItemContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_arrayItem
return p
}
func InitEmptyArrayItemContext(p *ArrayItemContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_arrayItem
}
func (*ArrayItemContext) IsArrayItemContext() {}
func NewArrayItemContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ArrayItemContext {
var p = new(ArrayItemContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_arrayItem
return p
}
func (s *ArrayItemContext) GetParser() antlr.Parser { return s.parser }
func (s *ArrayItemContext) Expression() IExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IExpressionContext)
}
func (s *ArrayItemContext) COMMA() antlr.TerminalNode {
return s.GetToken(bicepParserCOMMA, 0)
}
func (s *ArrayItemContext) AllNL() []antlr.TerminalNode {
return s.GetTokens(bicepParserNL)
}
func (s *ArrayItemContext) NL(i int) antlr.TerminalNode {
return s.GetToken(bicepParserNL, i)
}
func (s *ArrayItemContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ArrayItemContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ArrayItemContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitArrayItem(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) ArrayItem() (localctx IArrayItemContext) {
localctx = NewArrayItemContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 54, bicepParserRULE_arrayItem)
var _la int
p.EnterOuterAlt(localctx, 1)
{
p.SetState(389)
p.expression(0)
}
p.SetState(396)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetTokenStream().LA(1) {
case bicepParserNL:
p.SetState(391)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for ok := true; ok; ok = _la == bicepParserNL {
{
p.SetState(390)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(393)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
case bicepParserCOMMA:
{
p.SetState(395)
p.Match(bicepParserCOMMA)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case bicepParserMULTILINE_STRING, bicepParserOBRACK, bicepParserCBRACK, bicepParserOPAR, bicepParserOBRACE, bicepParserPARAM, bicepParserVAR, bicepParserTRUE, bicepParserFALSE, bicepParserNULL, bicepParserARRAY, bicepParserOBJECT, bicepParserRESOURCE, bicepParserOUTPUT, bicepParserTARGET_SCOPE, bicepParserIMPORT, bicepParserWITH, bicepParserAS, bicepParserMETADATA, bicepParserEXISTING, bicepParserTYPE, bicepParserMODULE, bicepParserSTRING_LEFT_PIECE, bicepParserSTRING_COMPLETE, bicepParserSTRING, bicepParserINT, bicepParserBOOL, bicepParserIF, bicepParserFOR, bicepParserIN, bicepParserIDENTIFIER, bicepParserNUMBER:
default:
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IDecoratorContext is an interface to support dynamic dispatch.
type IDecoratorContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
AT() antlr.TerminalNode
DecoratorExpression() IDecoratorExpressionContext
NL() antlr.TerminalNode
// IsDecoratorContext differentiates from other interfaces.
IsDecoratorContext()
}
type DecoratorContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyDecoratorContext() *DecoratorContext {
var p = new(DecoratorContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_decorator
return p
}
func InitEmptyDecoratorContext(p *DecoratorContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_decorator
}
func (*DecoratorContext) IsDecoratorContext() {}
func NewDecoratorContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *DecoratorContext {
var p = new(DecoratorContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_decorator
return p
}
func (s *DecoratorContext) GetParser() antlr.Parser { return s.parser }
func (s *DecoratorContext) AT() antlr.TerminalNode {
return s.GetToken(bicepParserAT, 0)
}
func (s *DecoratorContext) DecoratorExpression() IDecoratorExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IDecoratorExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IDecoratorExpressionContext)
}
func (s *DecoratorContext) NL() antlr.TerminalNode {
return s.GetToken(bicepParserNL, 0)
}
func (s *DecoratorContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *DecoratorContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *DecoratorContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitDecorator(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) Decorator() (localctx IDecoratorContext) {
localctx = NewDecoratorContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 56, bicepParserRULE_decorator)
p.EnterOuterAlt(localctx, 1)
{
p.SetState(398)
p.Match(bicepParserAT)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(399)
p.DecoratorExpression()
}
{
p.SetState(400)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IDecoratorExpressionContext is an interface to support dynamic dispatch.
type IDecoratorExpressionContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
FunctionCall() IFunctionCallContext
Expression() IExpressionContext
DOT() antlr.TerminalNode
// IsDecoratorExpressionContext differentiates from other interfaces.
IsDecoratorExpressionContext()
}
type DecoratorExpressionContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyDecoratorExpressionContext() *DecoratorExpressionContext {
var p = new(DecoratorExpressionContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_decoratorExpression
return p
}
func InitEmptyDecoratorExpressionContext(p *DecoratorExpressionContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_decoratorExpression
}
func (*DecoratorExpressionContext) IsDecoratorExpressionContext() {}
func NewDecoratorExpressionContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *DecoratorExpressionContext {
var p = new(DecoratorExpressionContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_decoratorExpression
return p
}
func (s *DecoratorExpressionContext) GetParser() antlr.Parser { return s.parser }
func (s *DecoratorExpressionContext) FunctionCall() IFunctionCallContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IFunctionCallContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IFunctionCallContext)
}
func (s *DecoratorExpressionContext) Expression() IExpressionContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IExpressionContext)
}
func (s *DecoratorExpressionContext) DOT() antlr.TerminalNode {
return s.GetToken(bicepParserDOT, 0)
}
func (s *DecoratorExpressionContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *DecoratorExpressionContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *DecoratorExpressionContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitDecoratorExpression(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) DecoratorExpression() (localctx IDecoratorExpressionContext) {
localctx = NewDecoratorExpressionContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 58, bicepParserRULE_decoratorExpression)
p.SetState(407)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 41, p.GetParserRuleContext()) {
case 1:
p.EnterOuterAlt(localctx, 1)
{
p.SetState(402)
p.FunctionCall()
}
case 2:
p.EnterOuterAlt(localctx, 2)
{
p.SetState(403)
p.expression(0)
}
{
p.SetState(404)
p.Match(bicepParserDOT)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(405)
p.FunctionCall()
}
case antlr.ATNInvalidAltNumber:
goto errorExit
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IFunctionCallContext is an interface to support dynamic dispatch.
type IFunctionCallContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
Identifier() IIdentifierContext
OPAR() antlr.TerminalNode
CPAR() antlr.TerminalNode
ArgumentList() IArgumentListContext
AllNL() []antlr.TerminalNode
NL(i int) antlr.TerminalNode
// IsFunctionCallContext differentiates from other interfaces.
IsFunctionCallContext()
}
type FunctionCallContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyFunctionCallContext() *FunctionCallContext {
var p = new(FunctionCallContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_functionCall
return p
}
func InitEmptyFunctionCallContext(p *FunctionCallContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_functionCall
}
func (*FunctionCallContext) IsFunctionCallContext() {}
func NewFunctionCallContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *FunctionCallContext {
var p = new(FunctionCallContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_functionCall
return p
}
func (s *FunctionCallContext) GetParser() antlr.Parser { return s.parser }
func (s *FunctionCallContext) Identifier() IIdentifierContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IIdentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IIdentifierContext)
}
func (s *FunctionCallContext) OPAR() antlr.TerminalNode {
return s.GetToken(bicepParserOPAR, 0)
}
func (s *FunctionCallContext) CPAR() antlr.TerminalNode {
return s.GetToken(bicepParserCPAR, 0)
}
func (s *FunctionCallContext) ArgumentList() IArgumentListContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IArgumentListContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IArgumentListContext)
}
func (s *FunctionCallContext) AllNL() []antlr.TerminalNode {
return s.GetTokens(bicepParserNL)
}
func (s *FunctionCallContext) NL(i int) antlr.TerminalNode {
return s.GetToken(bicepParserNL, i)
}
func (s *FunctionCallContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *FunctionCallContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *FunctionCallContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitFunctionCall(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) FunctionCall() (localctx IFunctionCallContext) {
localctx = NewFunctionCallContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 60, bicepParserRULE_functionCall)
var _la int
p.EnterOuterAlt(localctx, 1)
{
p.SetState(409)
p.Identifier()
}
{
p.SetState(410)
p.Match(bicepParserOPAR)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(415)
p.GetErrorHandler().Sync(p)
if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 43, p.GetParserRuleContext()) == 1 {
p.SetState(412)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
if _la == bicepParserNL {
{
p.SetState(411)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
}
{
p.SetState(414)
p.ArgumentList()
}
} else if p.HasError() { // JIM
goto errorExit
}
p.SetState(418)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
if _la == bicepParserNL {
{
p.SetState(417)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
}
{
p.SetState(420)
p.Match(bicepParserCPAR)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IArgumentListContext is an interface to support dynamic dispatch.
type IArgumentListContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
AllExpression() []IExpressionContext
Expression(i int) IExpressionContext
AllCOMMA() []antlr.TerminalNode
COMMA(i int) antlr.TerminalNode
AllNL() []antlr.TerminalNode
NL(i int) antlr.TerminalNode
// IsArgumentListContext differentiates from other interfaces.
IsArgumentListContext()
}
type ArgumentListContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyArgumentListContext() *ArgumentListContext {
var p = new(ArgumentListContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_argumentList
return p
}
func InitEmptyArgumentListContext(p *ArgumentListContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_argumentList
}
func (*ArgumentListContext) IsArgumentListContext() {}
func NewArgumentListContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ArgumentListContext {
var p = new(ArgumentListContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_argumentList
return p
}
func (s *ArgumentListContext) GetParser() antlr.Parser { return s.parser }
func (s *ArgumentListContext) AllExpression() []IExpressionContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IExpressionContext); ok {
len++
}
}
tst := make([]IExpressionContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IExpressionContext); ok {
tst[i] = t.(IExpressionContext)
i++
}
}
return tst
}
func (s *ArgumentListContext) Expression(i int) IExpressionContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IExpressionContext)
}
func (s *ArgumentListContext) AllCOMMA() []antlr.TerminalNode {
return s.GetTokens(bicepParserCOMMA)
}
func (s *ArgumentListContext) COMMA(i int) antlr.TerminalNode {
return s.GetToken(bicepParserCOMMA, i)
}
func (s *ArgumentListContext) AllNL() []antlr.TerminalNode {
return s.GetTokens(bicepParserNL)
}
func (s *ArgumentListContext) NL(i int) antlr.TerminalNode {
return s.GetToken(bicepParserNL, i)
}
func (s *ArgumentListContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ArgumentListContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ArgumentListContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitArgumentList(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) ArgumentList() (localctx IArgumentListContext) {
localctx = NewArgumentListContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 62, bicepParserRULE_argumentList)
var _la int
p.EnterOuterAlt(localctx, 1)
{
p.SetState(422)
p.expression(0)
}
p.SetState(430)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
for _la == bicepParserCOMMA {
{
p.SetState(423)
p.Match(bicepParserCOMMA)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(425)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
if _la == bicepParserNL {
{
p.SetState(424)
p.Match(bicepParserNL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
}
{
p.SetState(427)
p.expression(0)
}
p.SetState(432)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_la = p.GetTokenStream().LA(1)
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IIdentifierContext is an interface to support dynamic dispatch.
type IIdentifierContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
IDENTIFIER() antlr.TerminalNode
IMPORT() antlr.TerminalNode
WITH() antlr.TerminalNode
AS() antlr.TerminalNode
METADATA() antlr.TerminalNode
PARAM() antlr.TerminalNode
RESOURCE() antlr.TerminalNode
MODULE() antlr.TerminalNode
OUTPUT() antlr.TerminalNode
EXISTING() antlr.TerminalNode
TYPE() antlr.TerminalNode
VAR() antlr.TerminalNode
IF() antlr.TerminalNode
FOR() antlr.TerminalNode
IN() antlr.TerminalNode
TRUE() antlr.TerminalNode
FALSE() antlr.TerminalNode
NULL() antlr.TerminalNode
TARGET_SCOPE() antlr.TerminalNode
STRING() antlr.TerminalNode
INT() antlr.TerminalNode
BOOL() antlr.TerminalNode
ARRAY() antlr.TerminalNode
OBJECT() antlr.TerminalNode
// IsIdentifierContext differentiates from other interfaces.
IsIdentifierContext()
}
type IdentifierContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyIdentifierContext() *IdentifierContext {
var p = new(IdentifierContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_identifier
return p
}
func InitEmptyIdentifierContext(p *IdentifierContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = bicepParserRULE_identifier
}
func (*IdentifierContext) IsIdentifierContext() {}
func NewIdentifierContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *IdentifierContext {
var p = new(IdentifierContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = bicepParserRULE_identifier
return p
}
func (s *IdentifierContext) GetParser() antlr.Parser { return s.parser }
func (s *IdentifierContext) IDENTIFIER() antlr.TerminalNode {
return s.GetToken(bicepParserIDENTIFIER, 0)
}
func (s *IdentifierContext) IMPORT() antlr.TerminalNode {
return s.GetToken(bicepParserIMPORT, 0)
}
func (s *IdentifierContext) WITH() antlr.TerminalNode {
return s.GetToken(bicepParserWITH, 0)
}
func (s *IdentifierContext) AS() antlr.TerminalNode {
return s.GetToken(bicepParserAS, 0)
}
func (s *IdentifierContext) METADATA() antlr.TerminalNode {
return s.GetToken(bicepParserMETADATA, 0)
}
func (s *IdentifierContext) PARAM() antlr.TerminalNode {
return s.GetToken(bicepParserPARAM, 0)
}
func (s *IdentifierContext) RESOURCE() antlr.TerminalNode {
return s.GetToken(bicepParserRESOURCE, 0)
}
func (s *IdentifierContext) MODULE() antlr.TerminalNode {
return s.GetToken(bicepParserMODULE, 0)
}
func (s *IdentifierContext) OUTPUT() antlr.TerminalNode {
return s.GetToken(bicepParserOUTPUT, 0)
}
func (s *IdentifierContext) EXISTING() antlr.TerminalNode {
return s.GetToken(bicepParserEXISTING, 0)
}
func (s *IdentifierContext) TYPE() antlr.TerminalNode {
return s.GetToken(bicepParserTYPE, 0)
}
func (s *IdentifierContext) VAR() antlr.TerminalNode {
return s.GetToken(bicepParserVAR, 0)
}
func (s *IdentifierContext) IF() antlr.TerminalNode {
return s.GetToken(bicepParserIF, 0)
}
func (s *IdentifierContext) FOR() antlr.TerminalNode {
return s.GetToken(bicepParserFOR, 0)
}
func (s *IdentifierContext) IN() antlr.TerminalNode {
return s.GetToken(bicepParserIN, 0)
}
func (s *IdentifierContext) TRUE() antlr.TerminalNode {
return s.GetToken(bicepParserTRUE, 0)
}
func (s *IdentifierContext) FALSE() antlr.TerminalNode {
return s.GetToken(bicepParserFALSE, 0)
}
func (s *IdentifierContext) NULL() antlr.TerminalNode {
return s.GetToken(bicepParserNULL, 0)
}
func (s *IdentifierContext) TARGET_SCOPE() antlr.TerminalNode {
return s.GetToken(bicepParserTARGET_SCOPE, 0)
}
func (s *IdentifierContext) STRING() antlr.TerminalNode {
return s.GetToken(bicepParserSTRING, 0)
}
func (s *IdentifierContext) INT() antlr.TerminalNode {
return s.GetToken(bicepParserINT, 0)
}
func (s *IdentifierContext) BOOL() antlr.TerminalNode {
return s.GetToken(bicepParserBOOL, 0)
}
func (s *IdentifierContext) ARRAY() antlr.TerminalNode {
return s.GetToken(bicepParserARRAY, 0)
}
func (s *IdentifierContext) OBJECT() antlr.TerminalNode {
return s.GetToken(bicepParserOBJECT, 0)
}
func (s *IdentifierContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *IdentifierContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *IdentifierContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case bicepVisitor:
return t.VisitIdentifier(s)
default:
return t.VisitChildren(s)
}
}
func (p *bicepParser) Identifier() (localctx IIdentifierContext) {
localctx = NewIdentifierContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 64, bicepParserRULE_identifier)
var _la int
p.EnterOuterAlt(localctx, 1)
{
p.SetState(433)
_la = p.GetTokenStream().LA(1)
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&565116764405760) != 0) {
p.GetErrorHandler().RecoverInline(p)
} else {
p.GetErrorHandler().ReportMatch(p)
p.Consume()
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
func (p *bicepParser) Sempred(localctx antlr.RuleContext, ruleIndex, predIndex int) bool {
switch ruleIndex {
case 17:
var t *ExpressionContext = nil
if localctx != nil {
t = localctx.(*ExpressionContext)
}
return p.Expression_Sempred(t, predIndex)
default:
panic("No predicate with index: " + fmt.Sprint(ruleIndex))
}
}
func (p *bicepParser) Expression_Sempred(localctx antlr.RuleContext, predIndex int) bool {
switch predIndex {
case 0:
return p.Precpred(p.GetParserRuleContext(), 6)
case 1:
return p.Precpred(p.GetParserRuleContext(), 2)
case 2:
return p.Precpred(p.GetParserRuleContext(), 7)
case 3:
return p.Precpred(p.GetParserRuleContext(), 5)
case 4:
return p.Precpred(p.GetParserRuleContext(), 4)
case 5:
return p.Precpred(p.GetParserRuleContext(), 3)
default:
panic("No predicate with index: " + fmt.Sprint(predIndex))
}
}
package bicep
import (
"encoding/json"
"strconv"
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/parser/bicep/antlr/parser"
"github.com/antlr4-go/antlr/v4"
)
type Parser struct {
}
const kicsPrefix = "_kics_"
const kicsLine = kicsPrefix + "line"
const kicsLines = kicsPrefix + "lines"
const kicsArray = kicsPrefix + "arr"
const CloseParenthesis = "')"
type BicepVisitor struct {
parser.BasebicepVisitor
paramList map[string]interface{}
varList map[string]interface{}
resourceList []interface{}
}
type JSONBicep struct {
Parameters map[string]interface{} `json:"parameters"`
Variables map[string]interface{} `json:"variables"`
Resources []interface{} `json:"resources"`
}
type KicsObjectProperty struct {
objectProperty map[string]interface{}
line int
}
func NewBicepVisitor() *BicepVisitor {
paramList := map[string]interface{}{}
varList := map[string]interface{}{}
resourceList := []interface{}{}
return &BicepVisitor{paramList: paramList, varList: varList, resourceList: resourceList}
}
func convertVisitorToJSONBicep(visitor *BicepVisitor) *JSONBicep {
return &JSONBicep{
Parameters: visitor.paramList,
Variables: visitor.varList,
Resources: visitor.resourceList,
}
}
type Resource struct {
Name string
FullType string
Parent string
Children []*Resource
ResourceData interface{}
}
// Filters the Resource array in order to keep only the top-level resources while reformatting them
func filterParentStructs(resources []*Resource) []interface{} {
filteredResources := []interface{}{}
for _, resource := range resources {
if resource.Parent == "" {
formattedNode := reformatTestTree(resource)
filteredResources = append(filteredResources, formattedNode)
}
}
return filteredResources
}
func setChildType(child map[string]interface{}, parentType string) {
childType, hasType := child["type"]
if !hasType {
return
}
childTypeString, ok := childType.(string)
if !ok {
return
}
if parentType != "" {
childTypeString = strings.Replace(childTypeString, parentType+"/", "", 1)
child["type"] = childTypeString
}
}
// Converts Resource struct array back to a JBicep structure
func reformatTestTree(resource *Resource) map[string]interface{} {
reformattedResource := map[string]interface{}{}
children := []interface{}{}
for _, child := range resource.Children {
formattedChild := reformatTestTree(child)
setChildType(formattedChild, resource.FullType)
children = append(children, formattedChild)
}
if len(children) > 0 {
reformattedResource["resources"] = children
}
resData, ok := resource.ResourceData.(map[string]interface{})
if !ok {
return reformattedResource
}
for k, v := range resData {
reformattedResource[k] = v
}
return reformattedResource
}
// Adds resource to its parent's children array
func addChildrenToParents(resources []*Resource) {
resourceMap := map[string]*Resource{}
// Loops twice through the resources array in order to first fill the resourceMap with the required data
for _, resource := range resources {
resourceMap[resource.Name] = resource
}
for _, resource := range resources {
if resource.Parent != "" {
parent, ok := resourceMap[resource.Parent]
if !ok {
continue
}
parent.Children = append(parent.Children, resource)
}
}
}
// Converts JBicep structure to a Resource struct array
func convertOriginalResourcesToStruct(resources []interface{}) []*Resource {
newResources := []*Resource{}
for _, res := range resources {
actualRes, ok := res.(map[string]interface{})
if !ok {
return newResources
}
resName, hasName := actualRes["identifier"]
resType, hasType := actualRes["type"]
if !hasName || !hasType {
return newResources
}
resNameString, ok := resName.(string)
if !ok {
return newResources
}
resTypeString, ok := resType.(string)
if !ok {
return newResources
}
newRes := Resource{
Name: resNameString,
FullType: resTypeString,
ResourceData: res,
}
if resParent, hasParent := actualRes["parent"]; hasParent {
var ok bool
newRes.Parent, ok = resParent.(string)
if !ok {
return newResources
}
}
newResources = append(newResources, &newRes)
}
return newResources
}
func makeResourcesNestedStructure(jBicep *JSONBicep) []interface{} {
originalResources := jBicep.Resources
resources := convertOriginalResourcesToStruct(originalResources)
addChildrenToParents(resources)
filteredResources := filterParentStructs(resources)
return filteredResources
}
// Parse - parses bicep to BicepVisitor template (json file)
func (p *Parser) Parse(file string, _ []byte) ([]model.Document, []int, error) {
bicepVisitor := NewBicepVisitor()
stream, err := antlr.NewFileStream(file)
if err != nil {
return nil, nil, err
}
lexer := parser.NewbicepLexer(stream)
tokenStream := antlr.NewCommonTokenStream(lexer, antlr.TokenDefaultChannel)
bicepParser := parser.NewbicepParser(tokenStream)
bicepParser.RemoveErrorListeners()
bicepParser.AddErrorListener(antlr.NewDiagnosticErrorListener(true))
program := bicepParser.Program()
if program != nil {
program.Accept(bicepVisitor)
}
var doc model.Document
jBicep := convertVisitorToJSONBicep(bicepVisitor)
nestedResources := makeResourcesNestedStructure(jBicep)
jBicep.Resources = nestedResources
bicepBytes, err := json.Marshal(jBicep)
if err != nil {
return nil, nil, err
}
err = json.Unmarshal(bicepBytes, &doc)
if err != nil {
return nil, nil, err
}
return []model.Document{doc}, nil, nil
}
func (s *BicepVisitor) VisitProgram(ctx *parser.ProgramContext) interface{} {
for _, val := range ctx.AllStatement() {
val.Accept(s)
}
return nil
}
func (s *BicepVisitor) VisitStatement(ctx *parser.StatementContext) interface{} {
if ctx.ParameterDecl() != nil {
return ctx.ParameterDecl().Accept(s)
}
if ctx.VariableDecl() != nil {
return ctx.VariableDecl().Accept(s)
}
if ctx.ResourceDecl() != nil {
return ctx.ResourceDecl().Accept(s)
}
return nil
}
func parseDecorators(decorators []parser.IDecoratorContext, s *BicepVisitor) map[string]interface{} {
decoratorsMap := map[string]interface{}{}
for _, val := range decorators {
if val == nil {
continue
}
decorator, ok := val.Accept(s).(map[string][]interface{})
if !ok {
return map[string]interface{}{}
}
for name, values := range decorator {
if name == "description" {
if len(values) > 0 {
metadata := map[string]interface{}{}
metadata["description"] = values[0]
decoratorsMap["metadata"] = metadata
}
} else if name == "maxLength" || name == "minLength" || name == "minValue" || name == "maxValue" {
if len(values) > 0 {
decoratorsMap[name] = values[0]
}
} else {
decoratorsMap[name] = values
}
}
}
return decoratorsMap
}
func (s *BicepVisitor) VisitParameterDecl(ctx *parser.ParameterDeclContext) interface{} {
param := map[string]interface{}{}
identifier := checkAcceptAntlrString(ctx.Identifier(), s)
if ctx.ParameterDefaultValue() != nil {
paramVal := ctx.ParameterDefaultValue().Accept(s)
switch paramVal := paramVal.(type) {
case map[string][]interface{}:
stringifiedFunction := parseFunctionCall(paramVal)
param["defaultValue"] = "[" + stringifiedFunction + "]"
case interface{}:
if isDotFunction(paramVal) {
paramVal = "[" + paramVal.(string) + "]"
}
param["defaultValue"] = paramVal
default:
param["defaultValue"] = nil
}
}
if ctx.TypeExpression() != nil {
typeExpression := ctx.TypeExpression().Accept(s)
param["type"] = typeExpression
}
decoratorsMap := parseDecorators(ctx.AllDecorator(), s)
for name, values := range decoratorsMap {
if name == "secure" {
if param["type"] == "string" {
param["type"] = "secureString"
} else if param["type"] == "object" {
param["type"] = "secureObject"
}
} else {
if name == "allowed" {
param["allowedValues"] = values
} else {
param[name] = values
}
}
}
line := map[string]int{kicsLine: ctx.GetStop().GetLine()}
lines := map[string]map[string]int{
kicsPrefix + "defaultValue": line,
kicsPrefix + "type": line,
}
param[kicsLines] = lines
s.paramList[identifier] = param
return nil
}
func (s *BicepVisitor) VisitVariableDecl(ctx *parser.VariableDeclContext) interface{} {
var variable = map[string]interface{}{}
identifier := checkAcceptAntlrString(ctx.Identifier(), s)
decoratorsMap := parseDecorators(ctx.AllDecorator(), s)
for name, values := range decoratorsMap {
variable[name] = values
}
expression := checkAcceptExpression(ctx.Expression(), s)
variable["value"] = expression
s.varList[identifier] = variable
return nil
}
func (s *BicepVisitor) VisitResourceDecl(ctx *parser.ResourceDeclContext) interface{} {
resource := map[string]interface{}{}
resourceType := ""
apiVersion := ""
interpString := checkAcceptAntlrString(ctx.InterpString(), s)
identifier := checkAcceptAntlrString(ctx.Identifier(), s)
fullType := strings.Split(interpString, "@")
if len(fullType) > 0 {
resourceType = fullType[0]
}
if len(fullType) > 1 {
apiVersion = fullType[1]
}
resource["identifier"] = identifier
resource["type"] = resourceType
resource["apiVersion"] = apiVersion
decoratorsMap := parseDecorators(ctx.AllDecorator(), s)
for name, values := range decoratorsMap {
resource[name] = values
}
if ctx.Object() != nil {
object, ok := ctx.Object().Accept(s).(map[string]interface{})
if ok {
for key, val := range object {
resource[key] = val
}
}
}
lines := map[string]interface{}{}
if resKicsLines, hasLines := resource[kicsLines]; hasLines {
var ok bool
lines, ok = resKicsLines.(map[string]interface{})
if !ok {
lines = map[string]interface{}{}
}
}
line := map[string]int{kicsLine: ctx.GetStart().GetLine()}
lines[kicsPrefix+"apiVersion"] = line
lines[kicsPrefix+"type"] = line
s.resourceList = append(s.resourceList, resource)
return nil
}
func checkAcceptAntlrString(ctx antlr.ParserRuleContext, s *BicepVisitor) string {
if ctx != nil {
if result, ok := ctx.Accept(s).(string); ok {
return result
}
}
return ""
}
func checkAcceptExpression(ctx antlr.ParserRuleContext, s *BicepVisitor) interface{} {
if ctx != nil {
return ctx.Accept(s)
}
return ""
}
func (s *BicepVisitor) VisitParameterDefaultValue(ctx *parser.ParameterDefaultValueContext) interface{} {
param := checkAcceptExpression(ctx.Expression(), s)
return param
}
/*
Converts functioncall data (map of identifying string to slice of arguments) into a string
Example: "FunctionName": ["arg1", 2, "arg3", map[Function2: [arg4, arg5]]] becomes
"FunctionName(arg1, 2, arg3, Function2(arg4, arg5))"
*/
func parseFunctionCall(functionData map[string][]interface{}) string {
stringifiedFunctionCall := ""
for functionName, argumentList := range functionData {
stringifiedFunctionCall += functionName + "("
for index, argument := range argumentList {
switch argument := argument.(type) {
case string:
stringifiedFunctionCall += argument
case int:
convertedArgument := strconv.Itoa(argument)
stringifiedFunctionCall += convertedArgument
case map[string][]interface{}:
stringifiedFunctionCall += parseFunctionCall(argument)
}
if index < len(argumentList)-1 {
stringifiedFunctionCall += ", "
}
}
stringifiedFunctionCall += ")"
}
return stringifiedFunctionCall
}
// function to check if an identifier is a parameter/variable and add the required keyword if so
func convertToParamVar(str string, s *BicepVisitor) string {
for variable := range s.varList {
if variable == str {
return "variables('" + str + CloseParenthesis
}
}
for parameter := range s.paramList {
if parameter == str {
return "parameters('" + str + CloseParenthesis
}
}
return str
}
func (s *BicepVisitor) VisitExpression(ctx *parser.ExpressionContext) interface{} {
if ctx.GetChildCount() > 1 {
if ctx.DOT() != nil {
var expressionString string
var exp interface{} = ""
if ctx.Expression(0) != nil {
exp = ctx.Expression(0).Accept(s)
}
switch exp := exp.(type) {
case map[string][]interface{}:
expressionString = parseFunctionCall(exp)
case string:
expressionString = exp
default:
expressionString = ""
}
if ctx.Identifier() != nil {
identifier := checkAcceptAntlrString(ctx.Identifier(), s)
identifier = convertToParamVar(identifier, s)
return expressionString + "." + identifier
}
if ctx.FunctionCall() != nil {
fc := ctx.FunctionCall().Accept(s)
fcData, ok := fc.(map[string][]interface{})
if !ok {
return ""
}
functionCallString := parseFunctionCall(fcData)
return expressionString + "." + functionCallString
}
}
}
if ctx.PrimaryExpression() != nil {
return ctx.PrimaryExpression().Accept(s)
}
return nil
}
func (s *BicepVisitor) VisitPrimaryExpression(ctx *parser.PrimaryExpressionContext) interface{} {
if ctx.LiteralValue() != nil {
return ctx.LiteralValue().Accept(s)
}
if ctx.FunctionCall() != nil {
return ctx.FunctionCall().Accept(s)
}
if ctx.InterpString() != nil {
return ctx.InterpString().Accept(s)
}
if ctx.MULTILINE_STRING() != nil {
finalString := strings.ReplaceAll(ctx.MULTILINE_STRING().GetText(), "'''", "")
finalString = strings.ReplaceAll(finalString, "\r", "")
finalString = strings.ReplaceAll(finalString, "\n", "")
return finalString
}
if ctx.Array() != nil {
return ctx.Array().Accept(s)
}
if ctx.Object() != nil {
return ctx.Object().Accept(s)
}
if ctx.ParenthesizedExpression() != nil {
return ctx.ParenthesizedExpression().Accept(s)
}
return nil
}
func (s *BicepVisitor) VisitLiteralValue(ctx *parser.LiteralValueContext) interface{} {
if ctx.NUMBER() != nil {
number, _ := strconv.ParseFloat(ctx.NUMBER().GetText(), 32)
return number
}
if ctx.TRUE() != nil {
return true
}
if ctx.FALSE() != nil {
return false
}
if ctx.Identifier() != nil {
identifier, ok := ctx.Identifier().Accept(s).(string)
if ok {
identifier = convertToParamVar(identifier, s)
return identifier
}
}
return nil
}
func acceptExpressionAtIndex(idx int, ctx *parser.InterpStringContext, s *BicepVisitor) interface{} {
if ctx.Expression(idx) != nil {
return ctx.Expression(idx).Accept(s)
}
return ""
}
func buildComplexInterp(interpStringValues []interface{}) string {
str := ""
for _, v := range interpStringValues {
switch v := v.(type) {
case string:
str += v
case map[string][]interface{}:
for identifier, argumentList := range v {
resStr := "[" + identifier + "("
for idx, arg := range argumentList {
stringArg, ok := arg.(string)
if !ok {
return ""
}
resStr += stringArg
if idx < len(argumentList)-1 {
resStr += ", "
}
}
resStr += ")]"
str += resStr
}
}
}
return str
}
func parseComplexInterp(ctx *parser.InterpStringContext, s *BicepVisitor) string {
interpString := []interface{}{}
if ctx.STRING_LEFT_PIECE() == nil || ctx.STRING_RIGHT_PIECE() == nil {
return ""
}
leftPiece := ctx.STRING_LEFT_PIECE().GetText()
rightPiece := ctx.STRING_RIGHT_PIECE().GetText()
middlePieces := ctx.AllSTRING_MIDDLE_PIECE()
interpString = append(interpString, leftPiece)
if middlePieces != nil && (len(middlePieces) > 0) {
for idx, val := range middlePieces {
expression := acceptExpressionAtIndex(idx, ctx, s)
interpString = append(interpString, expression, val.GetText())
}
}
lastExpression := acceptExpressionAtIndex(len(middlePieces), ctx, s)
interpString = append(interpString,
lastExpression,
rightPiece)
resultString := buildComplexInterp(interpString)
return resultString
}
func (s *BicepVisitor) VisitInterpString(ctx *parser.InterpStringContext) interface{} {
if ctx.GetChildCount() > 1 {
complexInterpString := parseComplexInterp(ctx, s)
return complexInterpString
}
if ctx.STRING_COMPLETE() != nil {
unformattedString := ctx.STRING_COMPLETE().GetText()
finalString := strings.ReplaceAll(unformattedString, "'", "")
return finalString
}
return ""
}
func (s *BicepVisitor) VisitArray(ctx *parser.ArrayContext) interface{} {
array := []interface{}{}
for _, val := range ctx.AllArrayItem() {
expression := val.Accept(s)
if isParameter(expression) || isDotFunction(expression) {
expression = "[" + expression.(string) + "]"
}
array = append(array, expression)
}
return array
}
func (s *BicepVisitor) VisitArrayItem(ctx *parser.ArrayItemContext) interface{} {
return checkAcceptExpression(ctx.Expression(), s)
}
func isParameter(expression interface{}) bool {
exp, ok := expression.(string)
if !ok {
return false
}
return strings.Contains(exp, "parameters(") || strings.Contains(exp, "variables(")
}
func isDotFunction(expression interface{}) bool {
exp, ok := expression.(string)
if !ok {
return false
}
return strings.Contains(exp, ").")
}
func (s *BicepVisitor) VisitObject(ctx *parser.ObjectContext) interface{} {
object := map[string]interface{}{}
propertiesLines := map[string]interface{}{}
for _, val := range ctx.AllObjectProperty() {
objectProperty, ok := val.Accept(s).(KicsObjectProperty)
if !ok {
return object
}
for key, val := range objectProperty.objectProperty {
object[key] = val
line := map[string]interface{}{kicsLine: objectProperty.line}
arr, isArray := val.([]interface{})
if isArray {
for range arr {
arrLine := map[string]int{kicsLine: objectProperty.line}
kicsDefault := map[string]interface{}{kicsPrefix + "_default": arrLine}
kicsArr := []interface{}{kicsDefault}
line[kicsArray] = kicsArr
}
}
propertiesLines[kicsPrefix+key] = line
}
}
defaultLine := map[string]int{kicsLine: ctx.GetStart().GetLine()}
propertiesLines[kicsPrefix+"_default"] = defaultLine
object[kicsLines] = propertiesLines
return object
}
func (s *BicepVisitor) VisitObjectProperty(ctx *parser.ObjectPropertyContext) interface{} {
objectProperty := map[string]interface{}{}
if ctx.Expression() != nil {
objectValue := ctx.Expression().Accept(s)
if isParameter(objectValue) || isDotFunction(objectValue) {
objectValue = "[" + objectValue.(string) + "]"
}
if ctx.Identifier() != nil {
identifier, ok := ctx.Identifier().Accept(s).(string)
if ok {
objectProperty[identifier] = objectValue
}
}
if ctx.InterpString() != nil {
interpString, ok := ctx.InterpString().Accept(s).(string)
if ok {
objectProperty[interpString] = objectValue
}
}
}
return KicsObjectProperty{objectProperty: objectProperty, line: ctx.GetStart().GetLine()}
}
func (s *BicepVisitor) VisitIdentifier(ctx *parser.IdentifierContext) interface{} {
contexts := []antlr.TerminalNode{
ctx.IDENTIFIER(),
ctx.IMPORT(),
ctx.WITH(),
ctx.AS(),
ctx.METADATA(),
ctx.PARAM(),
ctx.RESOURCE(),
ctx.OUTPUT(),
ctx.EXISTING(),
ctx.VAR(),
ctx.IF(),
ctx.FOR(),
ctx.IN(),
ctx.TRUE(),
ctx.FALSE(),
ctx.NULL(),
ctx.TARGET_SCOPE(),
ctx.STRING(),
ctx.INT(),
ctx.BOOL(),
ctx.ARRAY(),
ctx.OBJECT(),
ctx.TYPE(),
ctx.MODULE(),
}
for _, context := range contexts {
if context != nil {
return context.GetText()
}
}
return ""
}
func (s *BicepVisitor) VisitParenthesizedExpression(ctx *parser.ParenthesizedExpressionContext) interface{} {
return checkAcceptExpression(ctx.Expression(), s)
}
func (s *BicepVisitor) VisitDecorator(ctx *parser.DecoratorContext) interface{} {
if ctx.DecoratorExpression() == nil {
return map[string][]interface{}{}
}
decorator := ctx.DecoratorExpression().Accept(s)
return decorator
}
func (s *BicepVisitor) VisitDecoratorExpression(ctx *parser.DecoratorExpressionContext) interface{} {
if ctx.FunctionCall() == nil {
return map[string][]interface{}{}
}
return ctx.FunctionCall().Accept(s)
}
func (s *BicepVisitor) VisitFunctionCall(ctx *parser.FunctionCallContext) interface{} {
var argumentList []interface{}
identifier := checkAcceptAntlrString(ctx.Identifier(), s)
if ctx.ArgumentList() != nil {
var ok bool
argumentList, ok = ctx.ArgumentList().Accept(s).([]interface{})
if !ok {
return map[string]interface{}{}
}
}
functionCall := map[string][]interface{}{
identifier: argumentList,
}
return functionCall
}
func (s *BicepVisitor) VisitArgumentList(ctx *parser.ArgumentListContext) interface{} {
var argumentList []interface{}
for _, val := range ctx.AllExpression() {
argument := val.Accept(s)
argumentList = append(argumentList, argument)
}
return argumentList
}
func (s *BicepVisitor) VisitTypeExpression(ctx *parser.TypeExpressionContext) interface{} {
return checkAcceptAntlrString(ctx.Identifier(), s)
}
// GetKind returns the kind of the parser
func (p *Parser) GetKind() model.FileKind {
return model.KindBICEP
}
// SupportedExtensions returns Bicep extensions
func (p *Parser) SupportedExtensions() []string {
return []string{".bicep"}
}
// SupportedTypes returns types supported by this parser, which are bicep files
func (p *Parser) SupportedTypes() map[string]bool {
return map[string]bool{"bicep": true, "azureresourcemanager": true}
}
// GetCommentToken return the comment token of Bicep files - #
func (p *Parser) GetCommentToken() string {
return "//"
}
// StringifyContent converts original content into string formatted version
func (p *Parser) StringifyContent(content []byte) (string, error) {
return string(content), nil
}
// Resolve resolves bicep files variables
func (p *Parser) Resolve(fileContent []byte, _ string, _ bool, _ int) ([]byte, error) {
return fileContent, nil
}
// GetResolvedFiles returns the list of files that are resolved
func (p *Parser) GetResolvedFiles() map[string]model.ResolvedFile {
return make(map[string]model.ResolvedFile)
}
package buildah
import (
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
"mvdan.cc/sh/v3/syntax"
)
func getKicsIgnore(comment string) string {
commentLower := model.KICSCommentRgxp.ReplaceAllString(strings.ToLower(comment), "")
commentLower = strings.Trim(commentLower, "\r")
commentLower = strings.Trim(commentLower, "\n")
return commentLower
}
func (i *Info) getIgnoreLines(comment *syntax.Comment) {
// get normal comments
i.IgnoreLines = append(i.IgnoreLines, int(comment.Hash.Line())) //nolint:gosec
if model.KICSCommentRgxp.MatchString(comment.Text) {
kicsIgnore := getKicsIgnore(comment.Text)
switch model.CommentCommand(kicsIgnore) {
case model.IgnoreLine:
// get kics-scan ignore-line
i.IgnoreLines = append(i.IgnoreLines, int(comment.Hash.Line())+1) //nolint:gosec
case model.IgnoreBlock:
// get kics-scan ignore-block for ignoreFromBlock
i.IgnoreBlockLines = append(i.IgnoreBlockLines, int(comment.Pos().Line())) //nolint:gosec
}
}
}
func (i *Info) getIgnoreBlockLines(comments []syntax.Comment, start, end int) {
for c := range comments {
comment := comments[c]
// get kics-scan ignore-block related to command
if model.KICSCommentRgxp.MatchString(comment.Text) {
kicsIgnore := getKicsIgnore(comment.Text)
if model.CommentCommand(kicsIgnore) == model.IgnoreBlock {
if int(comment.Hash.Line()) == start-1 { //nolint:gosec
i.IgnoreLines = append(i.IgnoreLines, model.Range(start, end)...)
i.IgnoreBlockLines = append(i.IgnoreBlockLines, model.Range(start, end)...)
}
}
}
}
}
func (i *Info) ignoreFromBlock() {
for j := range i.IgnoreBlockLines {
for z := range i.FromValues {
i.getIgnoreLinesFromBlock(i.IgnoreBlockLines[j], i.FromValues[z])
}
}
}
func (i *Info) getIgnoreLinesFromBlock(ignoreBlockLine int, fromValue FromValue) {
start := fromValue.Line
value := fromValue.Value
if start == ignoreBlockLine+1 {
targetFrom := i.From[value]
end := targetFrom[len(targetFrom)-1].EndLine
i.IgnoreLines = append(i.IgnoreLines, model.Range(start, end)...)
}
}
package buildah
import (
"bytes"
"sort"
"strings"
"github.com/rs/zerolog/log"
"encoding/json"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/pkg/errors"
"mvdan.cc/sh/v3/syntax"
)
// Parser is a Buildah parser
type Parser struct {
}
// Resource separates the list of commands by file
type Resource struct {
CommandList map[string][]Command `json:"command"`
}
// Command is the struct for each Buildah command
type Command struct {
Cmd string
Original string
Value string
StartLine int `json:"_kics_line"`
EndLine int
}
// FromValue is the struct for each from
type FromValue struct {
Value string
Line int
}
// Info has the relevant information to Buildah parser
type Info struct {
IgnoreLines []int
From map[string][]Command
FromValues []FromValue
IgnoreBlockLines []int
}
const (
buildah = "buildah"
)
// Resolve - replace or modifies in-memory content before parsing
func (p *Parser) Resolve(fileContent []byte, _ string, _ bool, _ int) ([]byte, error) {
return fileContent, nil
}
// Parse - parses Buildah file to Json
func (p *Parser) Parse(_ string, fileContent []byte) ([]model.Document, []int, error) {
var info Info
info.From = map[string][]Command{}
reader := bytes.NewReader(fileContent)
f, err := syntax.NewParser(syntax.KeepComments(true)).Parse(reader, "")
if err != nil {
return nil, []int{}, err
}
syntax.Walk(f, func(node syntax.Node) bool {
switch x := node.(type) {
case *syntax.Stmt:
info.getStmt(x)
case *syntax.Comment:
info.getIgnoreLines(x)
}
return true
})
// get kics-scan ignore-block related to from
info.ignoreFromBlock()
var documents []model.Document
var resource Resource
resource.CommandList = info.From
doc := &model.Document{}
j, err := json.Marshal(resource)
if err != nil {
return nil, []int{}, errors.Wrap(err, "failed to Marshal Buildah")
}
err = json.Unmarshal(j, &doc)
if err != nil {
return nil, []int{}, errors.Wrap(err, "failed to Unmarshal Buildah")
}
documents = append(documents, *doc)
sort.Ints(info.IgnoreLines)
return documents, info.IgnoreLines, nil
}
func (i *Info) getStmt(stmt *syntax.Stmt) {
if cmd, ok := stmt.Cmd.(*syntax.CallExpr); ok {
args := cmd.Args
// get kics-scan ignore-block related to command + get command
stCommand := i.getStmtInfo(stmt, args)
if stCommand.Cmd == "buildah from" {
fromValue := FromValue{
Value: stCommand.Value,
Line: stCommand.StartLine,
}
i.FromValues = append(i.FromValues, fromValue)
}
if stCommand.Cmd != "" {
if len(i.FromValues) != 0 {
v := i.FromValues[len(i.FromValues)-1].Value
i.From[v] = append(i.From[v], stCommand)
}
}
}
}
func (i *Info) getStmtInfo(stmt *syntax.Stmt, args []*syntax.Word) Command {
var command Command
minimumArgs := 2
if len(args) > minimumArgs {
if getWordValue(args[0]) == buildah {
cmd := "buildah " + strings.TrimSpace(getWordValue(args[1]))
fullCmd := strings.TrimSpace(getFullCommand(args))
value := strings.TrimPrefix(fullCmd, cmd)
start := int(args[0].Pos().Line()) //nolint:gosec
end := int(args[len(args)-1].End().Line()) //nolint:gosec
command = Command{
Cmd: cmd,
Original: fullCmd,
StartLine: start,
EndLine: end,
Value: strings.TrimSpace(value),
}
// get kics-scan ignore-block comments
i.getIgnoreBlockLines(stmt.Comments, start, end)
return command
}
}
return command
}
func getWordValue(wd *syntax.Word) string {
printer := syntax.NewPrinter()
var buf bytes.Buffer
err := printer.Print(&buf, wd)
if err != nil {
log.Debug().Msgf("failed to get word value: %s", err)
}
value := buf.String()
buf.Reset()
return value
}
func getFullCommand(args []*syntax.Word) string {
var buf bytes.Buffer
printer := syntax.NewPrinter()
call := &syntax.CallExpr{Args: args}
err := printer.Print(&buf, call)
if err != nil {
log.Debug().Msgf("failed to get full command: %s", err)
}
command := buf.String()
buf.Reset()
command = strings.Replace(command, "\n", "", -1)
command = strings.Replace(command, "\r", "", -1)
command = strings.Replace(command, "\t", "", -1)
command = strings.Replace(command, "\\", "", -1)
return command
}
// GetKind returns the kind of the parser
func (p *Parser) GetKind() model.FileKind {
return model.KindBUILDAH
}
// SupportedExtensions returns Buildah extensions
func (p *Parser) SupportedExtensions() []string {
return []string{".sh"}
}
// SupportedTypes returns types supported by this parser, which are Buildah
func (p *Parser) SupportedTypes() map[string]bool {
return map[string]bool{"buildah": true}
}
// GetCommentToken return the comment token of Buildah - #
func (p *Parser) GetCommentToken() string {
return "#"
}
// StringifyContent converts original content into string formatted version
func (p *Parser) StringifyContent(content []byte) (string, error) {
return string(content), nil
}
// GetResolvedFiles returns the resolved files
func (p *Parser) GetResolvedFiles() map[string]model.ResolvedFile {
return make(map[string]model.ResolvedFile)
}
package docker
import (
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/moby/buildkit/frontend/dockerfile/parser"
)
// ignore is a structure that contains information about the lines that are being ignored.
type ignore struct {
from map[string]bool
lines []int
}
// newIgnore returns a new ignore struct.
func newIgnore() *ignore {
return &ignore{
from: make(map[string]bool),
lines: make([]int, 0),
}
}
// setIgnore adds a new entry to the ignore struct for the 'FROM' block to be ignored
func (i *ignore) setIgnore(from string) {
i.from[from] = true
}
// ignoreBlock adds block lines to be ignored to the ignore struct.
func (i *ignore) ignoreBlock(node *parser.Node, from string) {
if _, ok := i.from[from]; ok {
i.lines = append(i.lines, model.Range(node.StartLine, node.EndLine)...)
}
}
// getIgnoreLines returns the lines that are being ignored.
func (i *ignore) getIgnoreLines() []int {
return model.RemoveDuplicates(i.lines)
}
// getIgnoreComments returns lines to be ignored for each node of the dockerfile
func (i *ignore) getIgnoreComments(node *parser.Node) (ignore bool) {
if len(node.PrevComment) == 0 {
return false
}
for idx, comment := range node.PrevComment {
switch processComment(comment) {
case model.IgnoreLine:
i.lines = append(i.lines, model.Range(node.StartLine-(idx+1), node.EndLine)...)
case model.IgnoreBlock:
i.lines = append(i.lines, node.StartLine-(idx+1))
ignore = true
default:
i.lines = append(i.lines, node.StartLine-(idx+1))
}
}
return
}
// processComment returns the type of comment given.
func processComment(comment string) (value model.CommentCommand) {
commentLower := strings.ToLower(comment)
if model.KICSCommentRgxp.MatchString(commentLower) {
commentLower = model.KICSCommentRgxp.ReplaceAllString(commentLower, "")
commands := strings.Split(strings.Trim(commentLower, "\n"), " ")
value = model.ProcessCommands(commands)
return
}
return model.CommentCommand(comment)
}
package docker
import (
"bytes"
"encoding/json"
"fmt"
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/moby/buildkit/frontend/dockerfile/parser"
"github.com/pkg/errors"
)
// Parser is a Dockerfile parser
type Parser struct {
}
// Resource Separates the list of commands by file
type Resource struct {
CommandList map[string][]Command `json:"command"`
Arguments []Command `json:"args"`
}
// Command is the struct for each dockerfile command
type Command struct {
Cmd string
SubCmd string
Flags []string
Value []string
Original string
StartLine int `json:"_kics_line"`
EndLine int
JSON bool
}
// Resolve - replace or modifies in-memory content before parsing
func (p *Parser) Resolve(fileContent []byte, _ string, _ bool, _ int) ([]byte, error) {
return fileContent, nil
}
// Parse - parses dockerfile to Json
func (p *Parser) Parse(_ string, fileContent []byte) ([]model.Document, []int, error) {
var documents []model.Document
reader := bytes.NewReader(fileContent)
parsed, err := parser.Parse(reader)
if err != nil {
return nil, []int{}, errors.Wrap(err, "failed to parse Dockerfile")
}
fromValue := ""
from := make(map[string][]Command)
arguments := make([]Command, 0)
ignoreStruct := newIgnore()
args := make(map[string]string, 0)
envs := make(map[string]string, 0)
for _, child := range parsed.AST.Children {
child.Value = strings.ToLower(child.Value)
if child.Value == "from" {
fromValue = strings.TrimPrefix(child.Original, "FROM ")
}
if ignoreStruct.getIgnoreComments(child) {
ignoreStruct.setIgnore(fromValue)
}
ignoreStruct.ignoreBlock(child, fromValue)
cmd := Command{
Cmd: child.Value,
Original: child.Original,
Flags: child.Flags,
StartLine: child.StartLine,
EndLine: child.EndLine,
}
if child.Next != nil && len(child.Next.Children) > 0 {
cmd.SubCmd = strings.ToLower(child.Next.Children[0].Value)
child = child.Next.Children[0]
}
cmd.JSON = child.Attributes["json"]
for n := child.Next; n != nil; n = n.Next {
cmd.Value = append(cmd.Value, n.Value)
}
if child.Value != "arg" {
cmd.Value = resolveArgsAndEnvs(cmd.Value, args)
} else {
args = saveArgs(args, cmd.Value[0])
}
if child.Value != "env" {
cmd.Value = resolveArgsAndEnvs(cmd.Value, envs)
} else {
envs = saveEnvs(envs, cmd.Value)
}
if fromValue == "" {
arguments = append(arguments, cmd)
} else {
from[fromValue] = append(from[fromValue], cmd)
}
}
doc := &model.Document{}
var resource Resource
resource.CommandList = from
resource.Arguments = arguments
j, err := json.Marshal(resource)
if err != nil {
return nil, []int{}, errors.Wrap(err, "failed to Marshal Dockerfile")
}
if err := json.Unmarshal(j, &doc); err != nil {
return nil, []int{}, errors.Wrap(err, "failed to Unmarshal Dockerfile")
}
documents = append(documents, *doc)
ignoreLines := ignoreStruct.getIgnoreLines()
return documents, ignoreLines, nil
}
// GetKind returns the kind of the parser
func (p *Parser) GetKind() model.FileKind {
return model.KindDOCKER
}
// SupportedExtensions returns Dockerfile extensions
func (p *Parser) SupportedExtensions() []string {
return []string{"Dockerfile", ".dockerfile", ".ubi8", ".debian", "possibleDockerfile"}
}
// SupportedTypes returns types supported by this parser, which are dockerfile
func (p *Parser) SupportedTypes() map[string]bool {
return map[string]bool{"dockerfile": true}
}
// GetCommentToken return the comment token of Docker - #
func (p *Parser) GetCommentToken() string {
return "#"
}
// StringifyContent converts original content into string formatted version
func (p *Parser) StringifyContent(content []byte) (string, error) {
return string(content), nil
}
// GetResolvedFiles returns the list of files that are resolved
func (p *Parser) GetResolvedFiles() map[string]model.ResolvedFile {
return make(map[string]model.ResolvedFile)
}
func resolveArgsAndEnvs(values []string, args map[string]string) []string {
for i := range values {
for arg := range args {
ref1 := fmt.Sprintf("${%s}", arg)
values[i] = strings.Replace(values[i], ref1, args[arg], 1)
ref2 := fmt.Sprintf("$%s", arg)
values[i] = strings.Replace(values[i], ref2, args[arg], 1)
}
}
return values
}
func saveArgs(args map[string]string, argValue string) map[string]string {
value := strings.Split(argValue, "=")
if len(value) == 2 {
args[value[0]] = value[1]
}
if len(value) > 2 {
// to handle cases like ARG VAR=erereR=E
args[value[0]] = strings.Join(value[1:], "=")
}
return args
}
func saveEnvs(envs map[string]string, envValues []string) map[string]string {
if len(envValues) == 2 {
envs[envValues[0]] = envValues[1]
}
return envs
}
package converter
import (
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/emicklei/proto"
)
// JSONProto is a JSON representation of a proto file
type JSONProto struct {
Syntax string `json:"syntax"`
PackageName string `json:"package"`
Messages map[string]interface{} `json:"messages"`
Enum map[string]interface{} `json:"enum"`
Services map[string]interface{} `json:"services"`
Imports map[string]interface{} `json:"imports"`
Options []Option `json:"options"`
Lines map[string]model.LineObject `json:"_kics_lines"`
linesToIgnore []int `json:"-"`
linesNotToIgnore []int `json:"-"`
}
// Service is a JSON representation of a proto service
type Service struct {
RPC map[string]RPC `json:"rpc,omitempty"`
Options map[string]Option `json:"options,omitempty"`
Lines map[string]model.LineObject `json:"_kics_lines,omitempty"`
}
// Message is a JSON representation of a proto message
type Message struct {
Field map[string]*Field `json:"field,omitempty"`
Reserved []*Reserved `json:"reserved,omitempty"`
OneOf map[string]OneOf `json:"oneof,omitempty"`
Enum map[string]Enum `json:"enum,omitempty"`
Map map[string]*Map `json:"map,omitempty"`
InnerMessage map[string]Message `json:"inner_message,omitempty"`
Options map[string]Option `json:"options,omitempty"`
Lines map[string]model.LineObject `json:"_kics_lines,omitempty"`
}
// Map is a JSON representation of a proto map
type Map struct {
*Field `json:"field,omitempty"`
KeyType string `json:"key_type,omitempty"`
Lines map[string]model.LineObject `json:"_kics_lines,omitempty"`
}
// OneOf is a JSON representation of a proto oneof
type OneOf struct {
Field map[string]*Field `json:"fields,omitempty"`
Options map[string]Option `json:"options,omitempty"`
Lines map[string]model.LineObject `json:"_kics_lines,omitempty"`
}
// Enum is a JSON representation of a proto enum
type Enum struct {
Reserved []*Reserved `json:"reserved,omitempty"`
EnumField map[string]EnumValue `json:"field,omitempty"`
Options map[string]Option `json:"options,omitempty"`
Lines map[string]model.LineObject `json:"_kics_lines,omitempty"`
}
// EnumValue is a JSON representation of a proto enum value
type EnumValue struct {
Value int `json:"value,omitempty"`
Options Option `json:"options,omitempty"`
Lines map[string]model.LineObject `json:"_kics_lines,omitempty"`
}
// Import is a JSON representation of a proto import
type Import struct {
Kind string `json:"kind,omitempty"`
Lines map[string]model.LineObject `json:"_kics_lines,omitempty"`
}
// Reserved is a JSON representation of a proto reserved
type Reserved struct {
Ranges []proto.Range `json:"ranges,omitempty"`
FieldNames []string `json:"fieldNames,omitempty"`
Lines map[string]model.LineObject `json:"_kics_lines,omitempty"`
}
// Field is a JSON representation of a proto field
type Field struct {
Type string `json:"type,omitempty"`
Sequence int `json:"sequence,omitempty"`
Repeated bool `json:"repeated,omitempty"`
Required bool `json:"required,omitempty"`
Optional bool `json:"optional,omitempty"`
Options []Option `json:"options,omitempty"`
Lines map[string]model.LineObject `json:"_kics_lines,omitempty"`
}
// RPC is a JSON representation of a proto service RPC
type RPC struct {
RequestType string `json:"requestType,omitempty"`
StreamsRequest bool `json:"streamsRequest,omitempty"`
ReturnsType string `json:"returnsType,omitempty"`
StreamsReturns bool `json:"streamsReturns,omitempty"`
Options []Option `json:"options,omitempty"`
Lines map[string]model.LineObject `json:"_kics_lines,omitempty"`
}
// Option is a JSON representation of a proto option
type Option struct {
Name string `json:"name,omitempty"`
Constant OptionLiteral `json:"constant,omitempty"`
IsEmbedded bool `json:"isEmbedded,omitempty"`
AggregatedConstants []*OptionLiteral `json:"aggregatedConstants,omitempty"`
Lines map[string]model.LineObject `json:"_kics_lines,omitempty"`
}
// OptionLiteral is a JSON representation of a proto option literal
type OptionLiteral struct {
Name string `json:"name,omitempty"`
Source string `json:"source,omitempty"`
IsString bool `json:"isString,omitempty"`
QuoteRune rune `json:"quoteRune,omitempty"`
Array []OptionLiteral `json:"array,omitempty"`
Map map[string]OptionLiteral `json:"map,omitempty"`
OrderedMap []OptionLiteral `json:"orderedMap,omitempty"`
Lines map[string]model.LineObject `json:"_kics_lines,omitempty"`
}
// newJSONProto creates a new JSONProto struct with default values for all fields
func newJSONProto() *JSONProto {
return &JSONProto{
Messages: make(map[string]interface{}),
Services: make(map[string]interface{}),
Imports: make(map[string]interface{}),
Options: make([]Option, 0),
Enum: make(map[string]interface{}),
Syntax: "",
PackageName: "",
Lines: make(map[string]model.LineObject),
linesToIgnore: make([]int, 0),
}
}
const kicsLinesKey = "_kics_"
// Convert converts a proto file to a JSONProto struct
func Convert(nodes *proto.Proto) (file *JSONProto, linesIgnore []int) {
jproto := newJSONProto()
// handle panic during conversion process
defer func() {
if r := recover(); r != nil {
errMessage := "Recovered from panic during conversion of JSONProto " + file.PackageName
utils.HandlePanic(r, errMessage)
}
}()
messageLines := make(map[string]model.LineObject)
enumLines := make(map[string]model.LineObject)
serviceLines := make(map[string]model.LineObject)
importLines := make(map[string]model.LineObject)
defaultArr := make([]map[string]*model.LineObject, 0)
for _, elem := range nodes.Elements {
switch element := elem.(type) {
case *proto.Message:
jproto.processCommentProto(element.Comment, element.Position.Line, element)
jproto.Messages[element.Name] = jproto.convertMessage(element)
messageLines[kicsLinesKey+element.Name] = model.LineObject{
Line: element.Position.Line,
Arr: make([]map[string]*model.LineObject, 0),
}
case *proto.Service:
jproto.processCommentProto(element.Comment, element.Position.Line, element)
jproto.convertService(element)
serviceLines[kicsLinesKey+element.Name] = model.LineObject{
Line: element.Position.Line,
Arr: make([]map[string]*model.LineObject, 0),
}
case *proto.Package:
jproto.processCommentProto(element.Comment, element.Position.Line, element)
jproto.PackageName = element.Name
jproto.Lines["_kics_package"] = model.LineObject{
Line: element.Position.Line,
}
case *proto.Import:
jproto.processCommentProto(element.Comment, element.Position.Line, element)
jproto.Imports[element.Filename] = Import{
Kind: element.Kind,
}
importLines[kicsLinesKey+element.Filename] = model.LineObject{
Line: element.Position.Line,
Arr: make([]map[string]*model.LineObject, 0),
}
case *proto.Option:
jproto.processCommentProto(element.Comment, element.Position.Line, element)
jproto.Options = append(jproto.Options, jproto.convertSingleOption(element))
defaultArr = append(defaultArr, map[string]*model.LineObject{
element.Name: {
Line: element.Position.Line,
},
})
case *proto.Enum:
jproto.processCommentProto(element.Comment, element.Position.Line, element)
jproto.Enum[element.Name] = jproto.convertEnum(element)
enumLines[kicsLinesKey+element.Name] = model.LineObject{
Line: element.Position.Line,
Arr: make([]map[string]*model.LineObject, 0),
}
case *proto.Syntax:
jproto.processCommentProto(element.Comment, element.Position.Line, element)
jproto.Syntax = element.Value
jproto.Lines["_kics_syntax"] = model.LineObject{
Line: element.Position.Line,
}
}
}
// set line information
jproto.Messages["_kics_lines"] = messageLines
jproto.Enum["_kics_lines"] = enumLines
jproto.Services["_kics_lines"] = serviceLines
jproto.Imports["_kics_lines"] = importLines
jproto.Lines["kics__default"] = model.LineObject{
Line: 0,
Arr: defaultArr,
}
return jproto, model.RemoveDuplicates(jproto.linesToIgnore)
}
// convertMessage converts a proto message to a JSON message
func (j *JSONProto) convertMessage(n *proto.Message) Message {
message := Message{
Field: make(map[string]*Field),
Reserved: make([]*Reserved, 0),
OneOf: make(map[string]OneOf),
Enum: make(map[string]Enum),
Map: make(map[string]*Map),
InnerMessage: make(map[string]Message),
Options: make(map[string]Option),
Lines: make(map[string]model.LineObject),
}
defaultArr := make([]map[string]*model.LineObject, 0)
for _, field := range n.Elements {
switch field := field.(type) {
case *proto.NormalField:
j.processCommentProto(field.Comment, field.Position.Line, field)
message.Lines[kicsLinesKey+field.Name] = model.LineObject{
Line: field.Position.Line,
}
message.Field[field.Name] = &Field{
Type: field.Type,
Sequence: field.Sequence,
Repeated: field.Repeated,
Required: field.Required,
Options: j.convertOption(field.Options),
Lines: map[string]model.LineObject{
"_kics__default": {Line: field.Position.Line},
},
}
case *proto.Reserved:
j.processCommentProto(field.Comment, field.Position.Line, field)
message.Reserved = append(message.Reserved, j.convertReserved(field))
defaultArr = append(defaultArr, map[string]*model.LineObject{
"Reserved": {
Line: field.Position.Line,
},
})
case *proto.Oneof:
j.processCommentProto(field.Comment, field.Position.Line, field)
message.OneOf[field.Name] = j.convertOneOf(field)
message.Lines[kicsLinesKey+field.Name] = model.LineObject{
Line: field.Position.Line,
}
case *proto.Enum:
j.processCommentProto(field.Comment, field.Position.Line, field)
message.Enum[field.Name] = j.convertEnum(field)
message.Lines[kicsLinesKey+field.Name] = model.LineObject{
Line: field.Position.Line,
}
case *proto.MapField:
j.processCommentProto(field.Comment, field.Position.Line, field)
message.Map[field.Name] = &Map{
Field: &Field{
Type: field.Type,
Sequence: field.Sequence,
Lines: map[string]model.LineObject{
"_kics__default": {Line: field.Position.Line},
},
},
KeyType: field.KeyType,
}
message.Lines[kicsLinesKey+field.Name] = model.LineObject{
Line: field.Position.Line,
}
case *proto.Message:
j.processCommentProto(field.Comment, field.Position.Line, field)
message.InnerMessage[field.Name] = j.convertMessage(field)
message.Lines[kicsLinesKey+field.Name] = model.LineObject{
Line: field.Position.Line,
}
case *proto.Option:
j.processCommentProto(field.Comment, field.Position.Line, field)
message.Options[field.Name] = j.convertSingleOption(field)
message.Lines[kicsLinesKey+field.Name] = model.LineObject{
Line: field.Position.Line,
}
}
continue
}
message.Lines["_kics__default"] = model.LineObject{
Line: n.Position.Line,
Arr: defaultArr,
}
return message
}
// convertEnum converts a proto enum to a JSON enum
func (j *JSONProto) convertEnum(n *proto.Enum) Enum {
enum := Enum{
Reserved: make([]*Reserved, 0),
EnumField: make(map[string]EnumValue),
Options: make(map[string]Option),
Lines: make(map[string]model.LineObject),
}
defaultArr := make([]map[string]*model.LineObject, 0)
for _, elem := range n.Elements {
switch elem := elem.(type) {
case *proto.EnumField:
j.processCommentProto(elem.Comment, elem.Position.Line, elem)
enum.EnumField[elem.Name] = EnumValue{
Value: elem.Integer,
Options: j.convertSingleOption(elem.ValueOption),
Lines: map[string]model.LineObject{
"_kics__default": {Line: elem.Position.Line},
},
}
enum.Lines[kicsLinesKey+elem.Name] = model.LineObject{
Line: elem.Position.Line,
}
case *proto.Reserved:
j.processCommentProto(elem.Comment, elem.Position.Line, elem)
enum.Reserved = append(enum.Reserved, j.convertReserved(elem))
defaultArr = append(defaultArr, map[string]*model.LineObject{
"Reserved": {
Line: elem.Position.Line,
},
})
case *proto.Option:
j.processCommentProto(elem.Comment, elem.Position.Line, elem)
enum.Options[elem.Name] = j.convertSingleOption(elem)
enum.Lines[kicsLinesKey+elem.Name] = model.LineObject{
Line: elem.Position.Line,
}
}
continue
}
enum.Lines["_kics__default"] = model.LineObject{
Line: n.Position.Line,
Arr: defaultArr,
}
return enum
}
// convertOneOf converts a proto oneof to a JSON oneof
func (j *JSONProto) convertOneOf(n *proto.Oneof) OneOf {
oneof := OneOf{
Field: make(map[string]*Field),
Options: make(map[string]Option),
Lines: make(map[string]model.LineObject),
}
oneof.Lines["_kics__default"] = model.LineObject{
Line: n.Position.Line,
Arr: make([]map[string]*model.LineObject, 0),
}
for _, elem := range n.Elements {
switch elem := elem.(type) {
case *proto.OneOfField:
j.processCommentProto(elem.Comment, elem.Position.Line, elem)
oneof.Field[elem.Name] = &Field{
Type: elem.Type,
Sequence: elem.Sequence,
Options: j.convertOption(elem.Options),
Lines: map[string]model.LineObject{
"_kics__default": {Line: elem.Position.Line},
},
}
oneof.Lines[kicsLinesKey+elem.Name] = model.LineObject{
Line: elem.Position.Line,
}
case *proto.Option:
j.processCommentProto(elem.Comment, elem.Position.Line, elem)
oneof.Options[elem.Name] = j.convertSingleOption(elem)
oneof.Lines[kicsLinesKey+elem.Name] = model.LineObject{
Line: elem.Position.Line,
}
}
continue
}
return oneof
}
// convertReserved converts a proto reserved to a JSON reserved
func (j *JSONProto) convertReserved(n *proto.Reserved) *Reserved {
return &Reserved{
Ranges: n.Ranges,
FieldNames: n.FieldNames,
Lines: map[string]model.LineObject{
"_kics__default": {Line: n.Position.Line},
},
}
}
// convertService converts a proto service to a JSON service
func (j *JSONProto) convertService(n *proto.Service) {
service := Service{
RPC: make(map[string]RPC),
Options: make(map[string]Option),
Lines: make(map[string]model.LineObject),
}
service.Lines["_kics__default"] = model.LineObject{
Line: n.Position.Line,
Arr: make([]map[string]*model.LineObject, 0),
}
for _, rpc := range n.Elements {
switch rpc := rpc.(type) {
case *proto.RPC:
j.processCommentProto(rpc.Comment, rpc.Position.Line, rpc)
service.RPC[rpc.Name] = j.convertRPC(rpc)
service.Lines[kicsLinesKey+rpc.Name] = model.LineObject{
Line: rpc.Position.Line,
}
case *proto.Option:
j.processCommentProto(rpc.Comment, rpc.Position.Line, rpc)
service.Options[rpc.Name] = j.convertSingleOption(rpc)
service.Lines[kicsLinesKey+rpc.Name] = model.LineObject{
Line: rpc.Position.Line,
}
}
}
j.Services[n.Name] = service
}
// convertOption converts a proto option to a JSON option
func (j *JSONProto) convertOption(n []*proto.Option) []Option {
if n == nil {
return []Option{}
}
options := make([]Option, 0)
for _, option := range n {
j.processCommentProto(option.Comment, option.Position.Line, option)
options = append(options, Option{
Name: option.Name,
Constant: j.convertOptionLiteral(&option.Constant),
IsEmbedded: option.IsEmbedded,
Lines: map[string]model.LineObject{
"_kics__default": {Line: option.Position.Line},
},
})
}
return options
}
// convertRPC converts a proto rpc to a JSON rpc
func (j *JSONProto) convertRPC(n *proto.RPC) RPC {
return RPC{
RequestType: n.RequestType,
StreamsRequest: n.StreamsRequest,
ReturnsType: n.ReturnsType,
StreamsReturns: n.StreamsReturns,
Options: j.convertOption(n.Options),
Lines: map[string]model.LineObject{
"_kics__default": {Line: n.Position.Line},
},
}
}
// convertOptionLiteral converts a proto option literal to a JSON option literal
func (j *JSONProto) convertOptionLiteral(n *proto.Literal) OptionLiteral {
return OptionLiteral{
IsString: n.IsString,
Source: n.Source,
Name: "",
QuoteRune: n.QuoteRune,
Array: j.getArrayLiteral(n.Array),
Map: j.getMapLiteral(n.Map),
OrderedMap: j.getLiteralMap(n.OrderedMap),
Lines: map[string]model.LineObject{
"_kics__default": {Line: n.Position.Line},
},
}
}
// convertOptionNamedLiteral converts a proto option named literal to a JSON option named literal
func (j *JSONProto) convertOptionNamedLiteral(n *proto.NamedLiteral) OptionLiteral {
return OptionLiteral{
IsString: n.IsString,
Source: n.Source,
Name: n.Name,
QuoteRune: n.QuoteRune,
Array: j.getArrayLiteral(n.Array),
Map: j.getMapLiteral(n.Map),
OrderedMap: j.getLiteralMap(n.OrderedMap),
Lines: map[string]model.LineObject{
"_kics__default": {Line: n.Position.Line},
},
}
}
// convertSingleOption converts a proto option to a JSON option
func (j *JSONProto) convertSingleOption(n *proto.Option) Option {
if n == nil {
return Option{}
}
return Option{
Name: n.Name,
Constant: j.convertOptionLiteral(&n.Constant),
IsEmbedded: n.IsEmbedded,
Lines: map[string]model.LineObject{
"_kics__default": {Line: n.Position.Line},
},
}
}
// getArrayLiteral converts a proto array literal to a JSON array literal
func (j *JSONProto) getArrayLiteral(n []*proto.Literal) []OptionLiteral {
array := make([]OptionLiteral, 0)
for _, elem := range n {
array = append(array, j.convertOptionLiteral(elem))
}
return array
}
// getMapLiteral converts a proto map literal to a JSON map literal
func (j *JSONProto) getMapLiteral(n map[string]*proto.Literal) map[string]OptionLiteral {
returnMap := make(map[string]OptionLiteral)
for key, value := range n {
returnMap[key] = j.convertOptionLiteral(value)
}
return returnMap
}
// getLiteralMap converts a proto literal map to a JSON literal map
func (j *JSONProto) getLiteralMap(n proto.LiteralMap) []OptionLiteral {
array := make([]OptionLiteral, 0)
for _, elem := range n {
array = append(array, j.convertOptionNamedLiteral(elem))
}
return array
}
// processCommentProto gathers lines to ignore based on comment commands
func (j *JSONProto) processCommentProto(comment *proto.Comment, lineStart int, element interface{}) {
// if comment is nil, return
if comment == nil {
j.linesNotToIgnore = append(j.linesNotToIgnore, lineStart)
return
}
rangeToIgnore := model.Range(comment.Position.Line, comment.Position.Line+(len(comment.Lines)-1))
// ignore lines that are comments
linesToIgnore := j.ignoreComment(rangeToIgnore)
j.linesToIgnore = append(j.linesToIgnore, linesToIgnore...)
var value model.CommentCommand
for _, line := range comment.Lines {
comment := strings.ToLower(line)
if model.KICSCommentRgxp.MatchString(comment) {
comment = model.KICSCommentRgxp.ReplaceAllString(comment, "")
comment = strings.Trim(comment, "\n")
commands := strings.Split(strings.Trim(comment, "\r"), " ")
value = model.ProcessCommands(commands)
}
continue
}
lineEnd := getLastElementLine(element, lineStart)
switch value {
case model.IgnoreLine:
j.linesToIgnore = append(j.linesToIgnore, lineStart)
case model.IgnoreBlock:
j.linesToIgnore = append(j.linesToIgnore, model.Range(lineStart, lineEnd)...)
default:
break
}
}
// ignoreComment returns a slice of lines to ignore with inline comments removed
func (j *JSONProto) ignoreComment(values []int) []int {
linesToIgnore := make([]int, 0)
for _, value := range values {
if isInSlice(value, j.linesNotToIgnore) {
continue
}
linesToIgnore = append(linesToIgnore, value)
}
return linesToIgnore
}
// isInSlice checks if a value is in a slice
func isInSlice(value int, slice []int) bool {
for _, v := range slice {
if v == value {
return true
}
}
return false
}
// getLastElementLine returns the last line of an element block
func getLastElementLine(v interface{}, parentLine int) int {
position := 0
switch t := v.(type) {
// case proto.Proto:
// position = getLastElement(t.Elements[len(t.Elements)-1], t.Position.Line)
case *proto.Message:
if len(t.Elements) > 0 {
position = getLastElementLine(t.Elements[len(t.Elements)-1], t.Position.Line)
}
case *proto.Service:
if len(t.Elements) > 0 {
position = getLastElementLine(t.Elements[len(t.Elements)-1], t.Position.Line)
}
case *proto.EnumField:
if len(t.Elements) > 0 {
position = getLastElementLine(t.Elements[len(t.Elements)-1], t.Position.Line)
}
case *proto.Enum:
if len(t.Elements) > 0 {
position = getLastElementLine(t.Elements[len(t.Elements)-1], t.Position.Line)
}
case *proto.Oneof:
if len(t.Elements) > 0 {
position = getLastElementLine(t.Elements[len(t.Elements)-1], t.Position.Line)
}
case *proto.RPC:
if len(t.Elements) > 0 {
position = getLastElementLine(t.Elements[len(t.Elements)-1], t.Position.Line)
}
case *proto.Group:
if len(t.Elements) > 0 {
position = getLastElementLine(t.Elements[len(t.Elements)-1], t.Position.Line)
}
default:
position = setElementLine(v, parentLine)
}
return position
}
// setElementLine sets the last line of an element block
func setElementLine(v interface{}, parentLine int) int {
position := 0
switch t := v.(type) {
case *proto.Syntax:
position = t.Position.Line
case *proto.Package:
position = t.Position.Line
case *proto.Import:
position = t.Position.Line
case *proto.NormalField:
position = t.Position.Line
case *proto.Comment:
position = t.Position.Line
case *proto.OneOfField:
position = t.Position.Line
case *proto.Reserved:
position = t.Position.Line
case *proto.MapField:
position = t.Position.Line
case *proto.Extensions:
position = t.Position.Line
default:
position = parentLine
}
return position
}
package grpc
import (
"bytes"
"encoding/json"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/parser/grpc/converter"
"github.com/emicklei/proto"
)
// Parser - parser for Proto files
type Parser struct {
}
// Parse - parses dockerfile to Json
func (p *Parser) Parse(_ string, fileContent []byte) ([]model.Document, []int, error) {
reader := bytes.NewReader(fileContent)
parserProto := proto.NewParser(reader)
nodes, err := parserProto.Parse()
if err != nil {
return nil, nil, err
}
var doc model.Document
jproto, linesIgnore := converter.Convert(nodes)
protoBytes, err := json.Marshal(jproto)
if err != nil {
return nil, nil, err
}
err = json.Unmarshal(protoBytes, &doc)
if err != nil {
return nil, nil, err
}
return []model.Document{doc}, linesIgnore, nil
}
// GetKind returns the kind of the parser
func (p *Parser) GetKind() model.FileKind {
return model.KindPROTO
}
// SupportedExtensions returns Dockerfile extensions
func (p *Parser) SupportedExtensions() []string {
return []string{".proto"}
}
// SupportedTypes returns types supported by this parser, which are dockerfile
func (p *Parser) SupportedTypes() map[string]bool {
return map[string]bool{"grpc": true}
}
// GetCommentToken return the comment token of Docker - #
func (p *Parser) GetCommentToken() string {
return "//"
}
// StringifyContent converts original content into string formatted version
func (p *Parser) StringifyContent(content []byte) (string, error) {
return string(content), nil
}
// Resolve resolves proto files variables
func (p *Parser) Resolve(fileContent []byte, _ string, _ bool, _ int) ([]byte, error) {
return fileContent, nil
}
// GetResolvedFiles returns the list of files that are resolved
func (p *Parser) GetResolvedFiles() map[string]model.ResolvedFile {
return make(map[string]model.ResolvedFile)
}
package json
import (
"bytes"
"encoding/json"
"fmt"
"sort"
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
)
type jsonLine struct {
LineInfo map[string]model.Document
}
// jsonLineStruct is the struct that keeps important information for the creation of line Information Map
// tmpParent is the parent key of the value we are currently on
// pathArr is an array containing the path elements of the value we are currently on
// noremoveidx keeps information of which elements should not be removed from pathArr on closing delimiters
// lastWasRune keeps information if last Token was a delimiter
// parent is the string path of the element we are currently on
type jsonLineStruct struct {
tmpParent string
pathArr []string
lastWasRune bool
noremoveidx map[int]string
parent string
}
type fifo struct {
name string // for debugging purposes
Value []int
}
// initiateJSONLine will create a map, containing line information for every key present in the JSON
func initializeJSONLine(doc []byte) *jsonLine {
newMap := make(map[string]model.Document)
dec := json.NewDecoder(bytes.NewReader(doc))
jstruct := jsonLineStruct{
tmpParent: "",
pathArr: make([]string, 0),
lastWasRune: false,
noremoveidx: make(map[int]string),
parent: "",
}
line := 1
prevInputOffset := 0
// for each token inside JSON
for {
tok, err := dec.Token()
if err != nil {
break
}
if v, ok := tok.(json.Delim); ok {
// token is a delimiter
jstruct.delimSetup(v)
} else {
jstruct.lastWasRune = false
}
tokStringRepresentation := ""
// if token is a string than update temporary father key
switch t := tok.(type) {
case string:
jstruct.tmpParent = t
tokStringRepresentation = t
case float64:
tokStringRepresentation = fmt.Sprint(int(t))
jstruct.tmpParent = tokStringRepresentation
case bool:
tokStringRepresentation = fmt.Sprint(t)
jstruct.tmpParent = tokStringRepresentation
case nil:
tokStringRepresentation = fmt.Sprint(t)
jstruct.tmpParent = tokStringRepresentation
default:
continue
}
// get the correct line based on byte offset
currentInputOffset := int(dec.InputOffset())
for i := prevInputOffset; i < currentInputOffset; i++ {
if doc[i] == byte('\n') {
line++
}
}
prevInputOffset = currentInputOffset
// insert into line information map
if _, ok := newMap[tokStringRepresentation]; !ok {
// key info is not in map yet
newLineSlice := &fifo{name: tokStringRepresentation}
parentMap := make(map[string]interface{})
newLineSlice.add(line)
parentMap[jstruct.parent] = newLineSlice
newMap[tokStringRepresentation] = parentMap
} else if v, ok := newMap[tokStringRepresentation][jstruct.parent]; ok {
// key info is in map with the same path so append is made
newLineSlice := &fifo{name: tokStringRepresentation}
newLineSlice.add(v.(*fifo).Value...)
newLineSlice.add(line)
newMap[tokStringRepresentation][jstruct.parent] = newLineSlice
} else {
// key info is in map with different path
newLineSlice := &fifo{name: tokStringRepresentation}
newLineSlice.add(line)
newMap[tokStringRepresentation][jstruct.parent] = newLineSlice
}
}
return &jsonLine{
LineInfo: newMap,
}
}
// delimSetup updates the jsonLineStruct when a json delimiter (ex: { [ ...) is found
func (j *jsonLineStruct) delimSetup(v json.Delim) {
lenPathArr := len(j.pathArr) - 1
switch rune(v) {
case '{', '[':
// check if last element was a json delimiter
if !j.lastWasRune {
j.pathArr = append(j.pathArr, j.tmpParent)
} else {
// check if temporary parent is in path array, if not last element must be the tempParent
// and added to noremoveidx
// the next close delimiter should not remove the last element from the pathArr
if j.tmpParent != j.pathArr[lenPathArr] {
j.tmpParent = j.pathArr[lenPathArr]
j.noremoveidx[lenPathArr] = j.tmpParent
} else {
// the next close delimiter should not remove the last element from the pathArr
j.noremoveidx[lenPathArr] = j.tmpParent
}
}
// update parent path string
j.parent = strings.Join(j.pathArr, ".")
case '}', ']':
j.closeBrackets(lenPathArr)
}
j.lastWasRune = true
}
// closeBrackets is what based on the jsonLineStruct information
// will update the parent path and make necessary updates on its structure
func (j *jsonLineStruct) closeBrackets(lenPathArr int) {
// check if there are elements in the pathArr
if lenPathArr > 0 {
// check if there are elements in the no noremoveidx
if v, ok := j.noremoveidx[lenPathArr]; ok {
// if the last elements in pathArr and noremoveidx differ,
// than the last element on pathArr was already closed and can
// be removed
if j.pathArr[lenPathArr] != v {
j.pathArr = j.pathArr[:lenPathArr]
} else {
// the last element was not closed but should be closed
// on the next closing delim
// remove from noremoveidx
delete(j.noremoveidx, lenPathArr)
}
} else {
// this last element in the pathArr was closed
// it can now be removed from the pathArr
j.pathArr = j.pathArr[:lenPathArr]
}
}
// update parent string path
j.parent = strings.Join(j.pathArr, ".")
}
// setLineInfo will set the line information of keys in json based on the line Information map
func (j *jsonLine) setLineInfo(doc map[string]interface{}) map[string]interface{} {
// set the line info for keys in root level
doc["_kics_lines"] = j.setLine(doc, 0, "", false)
return doc
}
// setLine returns the line information for the key containing values
// def is the line of the key
// index is used in case of an array, otherwhise should be 0
// father is the path to the key
func (j *jsonLine) setLine(val map[string]interface{}, def int, father string, pop bool) map[string]*model.LineObject {
lineMap := make(map[string]*model.LineObject)
// set the line information of val
lineMap["_kics__default"] = &model.LineObject{
Line: def,
Arr: []map[string]*model.LineObject{},
}
// iterate through the values of the object
for key, v := range val {
// if the key with father path was not found ignore
if _, ok2 := j.LineInfo[key][father]; !ok2 {
continue
}
line := j.LineInfo[key][father]
if len(line.(*fifo).Value) == 0 {
continue
}
lineArr := make([]map[string]*model.LineObject, 0)
lineNr := line.(*fifo).head()
if pop {
lineNr = line.(*fifo).pop()
}
switch v := v.(type) {
// value is an array and must call func setSeqLines to set element lines
case []interface{}:
lineArr = j.setSeqLines(v, lineNr, father, key, lineArr)
// value is an object and must setLines for each element of the object
case map[string]interface{}:
v["_kics_lines"] = j.setLine(v, lineNr, fmt.Sprintf("%s.%s", father, key), pop)
default:
// value as no childs
lineMap[fmt.Sprintf("_kics_%s", key)] = &model.LineObject{
Line: lineNr,
Arr: lineArr,
}
continue
}
// set line information of value with its default line and
// if present array elements line informations
lineMap[fmt.Sprintf("_kics_%s", key)] = &model.LineObject{
Line: lineNr,
Arr: lineArr,
}
}
return lineMap
}
// setSeqLines sets the elements lines information for value of type array
func (j *jsonLine) setSeqLines(v []interface{}, def int, father, key string,
lineArr []map[string]*model.LineObject) []map[string]*model.LineObject {
// update father path with key
fatherKey := father + "." + key
// iterate over each element of the array
for _, contentEntry := range v {
defaultLineArr := j.getMapDefaultLine(v, fatherKey)
if defaultLineArr == -1 {
defaultLineArr = def
}
switch con := contentEntry.(type) {
// case element is a map/object call func setLine
case map[string]interface{}:
lineArr = append(lineArr, j.setLine(con, defaultLineArr, fatherKey, true))
// case element is a string
default:
stringedCon := fmt.Sprint(con)
// check if element is present in line info map
if lineStr, ok2 := j.LineInfo[stringedCon][fmt.Sprintf("%s.%s", father, key)]; ok2 {
if len(lineStr.(*fifo).Value) == 0 {
continue
}
lineArr = append(lineArr, map[string]*model.LineObject{
"_kics__default": {
Line: lineStr.(*fifo).pop(),
},
})
}
}
}
return lineArr
}
// must get all and choose the smallest one
func (j *jsonLine) getMapDefaultLine(v []interface{}, father string) int {
returnNumber := -1
for _, contentEntry := range v {
linesNumbers := make([]int, 0)
if c, ok := contentEntry.(map[string]interface{}); ok {
for key := range c {
if _, ok2 := j.LineInfo[key][father]; !ok2 {
continue
}
line := j.LineInfo[key][father]
if len(line.(*fifo).Value) == 0 {
continue
}
linesNumbers = append(linesNumbers, line.(*fifo).head())
}
if len(linesNumbers) > 0 {
sort.Ints(linesNumbers)
returnNumber = linesNumbers[0]
}
}
}
return returnNumber
}
// SET OF TOOLS TO ASSIST WITH JSON LINE
func (f *fifo) add(elements ...int) {
f.Value = append(f.Value, elements...)
}
func (f *fifo) pop() int {
firstElement := f.Value[0]
f.Value = f.Value[1:]
return firstElement
}
func (f *fifo) head() int {
return f.Value[0]
}
package json
import (
"bytes"
"encoding/json"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/resolver/file"
)
// Parser defines a parser type
type Parser struct {
shouldIdent bool
resolvedFiles map[string]model.ResolvedFile
}
// Resolve - replace or modifies in-memory content before parsing
func (p *Parser) Resolve(fileContent []byte, filename string, resolveReferences bool, maxResolverDepth int) ([]byte, error) {
// Resolve files passed as arguments with file resolver (e.g. file://)
res := file.NewResolver(json.Unmarshal, json.Marshal, p.SupportedExtensions())
resolvedFilesCache := make(map[string]file.ResolvedFile)
resolved := res.Resolve(fileContent, filename, 0, maxResolverDepth, resolvedFilesCache, resolveReferences)
p.resolvedFiles = res.ResolvedFiles
if len(res.ResolvedFiles) == 0 {
return fileContent, nil
}
return resolved, nil
}
// Parse parses json file and returns it as a Document
func (p *Parser) Parse(_ string, fileContent []byte) ([]model.Document, []int, error) {
r := model.Document{}
err := json.Unmarshal(fileContent, &r)
if err != nil {
var r []model.Document
err = json.Unmarshal(fileContent, &r)
return r, []int{}, err
}
jLine := initializeJSONLine(fileContent)
kicsJSON := jLine.setLineInfo(r)
// Try to parse JSON as Terraform plan
kicsPlan, err := parseTFPlan(kicsJSON)
if err != nil {
// JSON is not a tf plan
return []model.Document{kicsJSON}, []int{}, nil
}
p.shouldIdent = true
return []model.Document{kicsPlan}, []int{}, nil
}
// SupportedExtensions returns extensions supported by this parser, which is json extension
func (p *Parser) SupportedExtensions() []string {
return []string{".json"}
}
// GetKind returns JSON constant kind
func (p *Parser) GetKind() model.FileKind {
return model.KindJSON
}
// SupportedTypes returns types supported by this parser, which are cloudFormation
func (p *Parser) SupportedTypes() map[string]bool {
return map[string]bool{
"ansible": true,
"cloudformation": true,
"openapi": true,
"azureresourcemanager": true,
"terraform": true,
"kubernetes": true,
}
}
// GetCommentToken return an empty string, since JSON does not have comment token
func (p *Parser) GetCommentToken() string {
return ""
}
// StringifyContent converts original content into string formatted version
func (p *Parser) StringifyContent(content []byte) (string, error) {
if p.shouldIdent {
var out bytes.Buffer
err := json.Indent(&out, content, "", " ")
if err != nil {
return "", err
}
return out.String(), nil
}
return string(content), nil
}
// GetResolvedFiles returns resolved files
func (p *Parser) GetResolvedFiles() map[string]model.ResolvedFile {
return p.resolvedFiles
}
package json
import (
"encoding/json"
"github.com/Checkmarx/kics/v2/pkg/model"
hcl_plan "github.com/hashicorp/terraform-json"
)
// KicsPlan is an auxiliary structure for parsing tfplans as a KICS Document
type KicsPlan struct {
Resource map[string]KicsPlanResource `json:"resource"`
}
// KicsPlanResource is an auxiliary structure for parsing tfplans as a KICS Document
type KicsPlanResource map[string]KicsPlanNamedResource
// KicsPlanNamedResource is an auxiliary structure for parsing tfplans as a KICS Document
type KicsPlanNamedResource map[string]interface{}
// parseTFPlan unmarshals Document as a plan so it can be rebuilt with only
// the required information
func parseTFPlan(doc model.Document) (model.Document, error) {
var plan *hcl_plan.Plan
b, err := json.Marshal(doc)
if err != nil {
return model.Document{}, err
}
// Unmarshal our Document as a plan so we are able retrieve planned_values
// in a easier way
err = json.Unmarshal(b, &plan)
if err != nil {
// Consider as regular JSON and not tfplan
return model.Document{}, err
}
parsedPlan := readPlan(plan)
return parsedPlan, nil
}
// readPlan will get the information needed and parse it in a way KICS understands it
func readPlan(plan *hcl_plan.Plan) model.Document {
kp := KicsPlan{
Resource: make(map[string]KicsPlanResource),
}
kp.readModule(plan.PlannedValues.RootModule)
doc := model.Document{}
tmpDocBytes, err := json.Marshal(kp)
if err != nil {
return model.Document{}
}
err = json.Unmarshal(tmpDocBytes, &doc)
if err != nil {
return model.Document{}
}
return doc
}
// readModule will iterate over all planned_value getting the information required
func (kp *KicsPlan) readModule(module *hcl_plan.StateModule) {
// initialize all the types interfaces
for _, resource := range module.Resources {
convNamedRes := make(map[string]KicsPlanNamedResource)
kp.Resource[resource.Type] = convNamedRes
}
// fill in all the types interfaces
for _, resource := range module.Resources {
kp.Resource[resource.Type][resource.Name] = resource.AttributeValues
}
for _, childModule := range module.ChildModules {
kp.readModule(childModule)
}
}
package parser
import "github.com/antlr4-go/antlr/v4"
type CustomSyntaxError struct {
line, column int
msg string
}
type CustomErrorListener struct {
*antlr.DefaultErrorListener
Errors []*CustomSyntaxError
}
func NewCustomErrorListener() *CustomErrorListener {
return &CustomErrorListener{
DefaultErrorListener: antlr.NewDefaultErrorListener(),
Errors: make([]*CustomSyntaxError, 0),
}
}
func (c *CustomErrorListener) HasErrors() bool {
return len(c.Errors) > 0
}
func (c *CustomErrorListener) SyntaxError(recognizer antlr.Recognizer,
offendingSymbol interface{}, line, column int, msg string, e antlr.RecognitionException) {
c.Errors = append(c.Errors, &CustomSyntaxError{
line: line,
column: column,
msg: msg,
})
}
// Code generated from JSONFilter.g4 by ANTLR 4.13.1. DO NOT EDIT.
package parser // JSONFilter
import "github.com/antlr4-go/antlr/v4"
type BaseJSONFilterVisitor struct {
*antlr.BaseParseTreeVisitor
}
func (v *BaseJSONFilterVisitor) VisitAwsjsonfilter(ctx *AwsjsonfilterContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseJSONFilterVisitor) VisitDotnotation(ctx *DotnotationContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseJSONFilterVisitor) VisitFilter_expr_parenthesized(ctx *Filter_expr_parenthesizedContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseJSONFilterVisitor) VisitFilter_expr_and(ctx *Filter_expr_andContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseJSONFilterVisitor) VisitFilter_expr_exp(ctx *Filter_expr_expContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseJSONFilterVisitor) VisitFilter_expr_or(ctx *Filter_expr_orContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseJSONFilterVisitor) VisitExp(ctx *ExpContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseJSONFilterVisitor) VisitSelector(ctx *SelectorContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseJSONFilterVisitor) VisitQualifiedidentifier(ctx *QualifiedidentifierContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseJSONFilterVisitor) VisitMember(ctx *MemberContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseJSONFilterVisitor) VisitOperator(ctx *OperatorContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *BaseJSONFilterVisitor) VisitLiteral(ctx *LiteralContext) interface{} {
return v.VisitChildren(ctx)
}
// Code generated from JSONFilter.g4 by ANTLR 4.13.1. DO NOT EDIT.
package parser
import (
"fmt"
"github.com/antlr4-go/antlr/v4"
"sync"
"unicode"
)
// Suppress unused import error
var _ = fmt.Printf
var _ = sync.Once{}
var _ = unicode.IsLetter
type JSONFilterLexer struct {
*antlr.BaseLexer
channelNames []string
modeNames []string
// TODO: EOF string
}
var JSONFilterLexerLexerStaticData struct {
once sync.Once
serializedATN []int32
ChannelNames []string
ModeNames []string
LiteralNames []string
SymbolicNames []string
RuleNames []string
PredictionContextCache *antlr.PredictionContextCache
atn *antlr.ATN
decisionToDFA []*antlr.DFA
}
func jsonfilterlexerLexerInit() {
staticData := &JSONFilterLexerLexerStaticData
staticData.ChannelNames = []string{
"DEFAULT_TOKEN_CHANNEL", "HIDDEN",
}
staticData.ModeNames = []string{
"DEFAULT_MODE",
}
staticData.LiteralNames = []string{
"", "'$.'", "'*'", "'{'", "'}'", "'('", "')'", "'['", "']'", "'.'",
"'&&'", "'||'", "'='", "'!='", "'>'", "'<'", "'>='", "'<='", "'IS'",
"'NOT'", "'NULL'", "'EXISTS'", "'TRUE'", "'FALSE'",
}
staticData.SymbolicNames = []string{
"", "SEL_START", "STAR", "LCURLY", "RCURLY", "LPAREN", "RPAREN", "LBRACKET",
"RBRACKET", "DOT", "AND", "OR", "EQUALS", "NOT_EQUALS", "GT", "LT",
"GE", "LE", "IS", "NOT", "NULL", "EXISTS", "TRUE", "FALSE", "INDENTIFIER",
"STRING", "NUMBER", "WS",
}
staticData.RuleNames = []string{
"SEL_START", "STAR", "LCURLY", "RCURLY", "LPAREN", "RPAREN", "LBRACKET",
"RBRACKET", "DOT", "AND", "OR", "EQUALS", "NOT_EQUALS", "GT", "LT",
"GE", "LE", "IS", "NOT", "NULL", "EXISTS", "TRUE", "FALSE", "INDENTIFIER",
"STRING", "ESC", "UNICODE", "HEX", "SAFECODEPOINT", "NUMBER", "INT",
"EXP", "WS",
}
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 0, 27, 207, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
31, 7, 31, 2, 32, 7, 32, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3,
1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9,
1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1,
13, 1, 13, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17,
1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1,
19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21,
1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 5,
23, 140, 8, 23, 10, 23, 12, 23, 143, 9, 23, 1, 24, 1, 24, 1, 24, 5, 24,
148, 8, 24, 10, 24, 12, 24, 151, 9, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1,
25, 3, 25, 158, 8, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27,
1, 27, 1, 28, 1, 28, 1, 29, 3, 29, 171, 8, 29, 1, 29, 1, 29, 1, 29, 4,
29, 176, 8, 29, 11, 29, 12, 29, 177, 3, 29, 180, 8, 29, 1, 29, 3, 29, 183,
8, 29, 1, 30, 1, 30, 1, 30, 5, 30, 188, 8, 30, 10, 30, 12, 30, 191, 9,
30, 3, 30, 193, 8, 30, 1, 31, 1, 31, 3, 31, 197, 8, 31, 1, 31, 1, 31, 1,
32, 4, 32, 202, 8, 32, 11, 32, 12, 32, 203, 1, 32, 1, 32, 0, 0, 33, 1,
1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11,
23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20,
41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 0, 53, 0, 55, 0, 57, 0, 59,
26, 61, 0, 63, 0, 65, 27, 1, 0, 10, 2, 0, 65, 90, 97, 122, 3, 0, 48, 57,
65, 90, 97, 122, 8, 0, 34, 34, 47, 47, 92, 92, 98, 98, 102, 102, 110, 110,
114, 114, 116, 116, 3, 0, 48, 57, 65, 70, 97, 102, 3, 0, 0, 31, 34, 34,
92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 48, 57, 1, 0, 49, 57, 2, 0, 69, 69,
101, 101, 3, 0, 9, 10, 13, 13, 32, 32, 212, 0, 1, 1, 0, 0, 0, 0, 3, 1,
0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1,
0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19,
1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0,
27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0,
0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0,
0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0,
0, 0, 0, 59, 1, 0, 0, 0, 0, 65, 1, 0, 0, 0, 1, 67, 1, 0, 0, 0, 3, 70, 1,
0, 0, 0, 5, 72, 1, 0, 0, 0, 7, 74, 1, 0, 0, 0, 9, 76, 1, 0, 0, 0, 11, 78,
1, 0, 0, 0, 13, 80, 1, 0, 0, 0, 15, 82, 1, 0, 0, 0, 17, 84, 1, 0, 0, 0,
19, 86, 1, 0, 0, 0, 21, 89, 1, 0, 0, 0, 23, 92, 1, 0, 0, 0, 25, 94, 1,
0, 0, 0, 27, 97, 1, 0, 0, 0, 29, 99, 1, 0, 0, 0, 31, 101, 1, 0, 0, 0, 33,
104, 1, 0, 0, 0, 35, 107, 1, 0, 0, 0, 37, 110, 1, 0, 0, 0, 39, 114, 1,
0, 0, 0, 41, 119, 1, 0, 0, 0, 43, 126, 1, 0, 0, 0, 45, 131, 1, 0, 0, 0,
47, 137, 1, 0, 0, 0, 49, 144, 1, 0, 0, 0, 51, 154, 1, 0, 0, 0, 53, 159,
1, 0, 0, 0, 55, 165, 1, 0, 0, 0, 57, 167, 1, 0, 0, 0, 59, 170, 1, 0, 0,
0, 61, 192, 1, 0, 0, 0, 63, 194, 1, 0, 0, 0, 65, 201, 1, 0, 0, 0, 67, 68,
5, 36, 0, 0, 68, 69, 5, 46, 0, 0, 69, 2, 1, 0, 0, 0, 70, 71, 5, 42, 0,
0, 71, 4, 1, 0, 0, 0, 72, 73, 5, 123, 0, 0, 73, 6, 1, 0, 0, 0, 74, 75,
5, 125, 0, 0, 75, 8, 1, 0, 0, 0, 76, 77, 5, 40, 0, 0, 77, 10, 1, 0, 0,
0, 78, 79, 5, 41, 0, 0, 79, 12, 1, 0, 0, 0, 80, 81, 5, 91, 0, 0, 81, 14,
1, 0, 0, 0, 82, 83, 5, 93, 0, 0, 83, 16, 1, 0, 0, 0, 84, 85, 5, 46, 0,
0, 85, 18, 1, 0, 0, 0, 86, 87, 5, 38, 0, 0, 87, 88, 5, 38, 0, 0, 88, 20,
1, 0, 0, 0, 89, 90, 5, 124, 0, 0, 90, 91, 5, 124, 0, 0, 91, 22, 1, 0, 0,
0, 92, 93, 5, 61, 0, 0, 93, 24, 1, 0, 0, 0, 94, 95, 5, 33, 0, 0, 95, 96,
5, 61, 0, 0, 96, 26, 1, 0, 0, 0, 97, 98, 5, 62, 0, 0, 98, 28, 1, 0, 0,
0, 99, 100, 5, 60, 0, 0, 100, 30, 1, 0, 0, 0, 101, 102, 5, 62, 0, 0, 102,
103, 5, 61, 0, 0, 103, 32, 1, 0, 0, 0, 104, 105, 5, 60, 0, 0, 105, 106,
5, 61, 0, 0, 106, 34, 1, 0, 0, 0, 107, 108, 5, 73, 0, 0, 108, 109, 5, 83,
0, 0, 109, 36, 1, 0, 0, 0, 110, 111, 5, 78, 0, 0, 111, 112, 5, 79, 0, 0,
112, 113, 5, 84, 0, 0, 113, 38, 1, 0, 0, 0, 114, 115, 5, 78, 0, 0, 115,
116, 5, 85, 0, 0, 116, 117, 5, 76, 0, 0, 117, 118, 5, 76, 0, 0, 118, 40,
1, 0, 0, 0, 119, 120, 5, 69, 0, 0, 120, 121, 5, 88, 0, 0, 121, 122, 5,
73, 0, 0, 122, 123, 5, 83, 0, 0, 123, 124, 5, 84, 0, 0, 124, 125, 5, 83,
0, 0, 125, 42, 1, 0, 0, 0, 126, 127, 5, 84, 0, 0, 127, 128, 5, 82, 0, 0,
128, 129, 5, 85, 0, 0, 129, 130, 5, 69, 0, 0, 130, 44, 1, 0, 0, 0, 131,
132, 5, 70, 0, 0, 132, 133, 5, 65, 0, 0, 133, 134, 5, 76, 0, 0, 134, 135,
5, 83, 0, 0, 135, 136, 5, 69, 0, 0, 136, 46, 1, 0, 0, 0, 137, 141, 7, 0,
0, 0, 138, 140, 7, 1, 0, 0, 139, 138, 1, 0, 0, 0, 140, 143, 1, 0, 0, 0,
141, 139, 1, 0, 0, 0, 141, 142, 1, 0, 0, 0, 142, 48, 1, 0, 0, 0, 143, 141,
1, 0, 0, 0, 144, 149, 5, 34, 0, 0, 145, 148, 3, 51, 25, 0, 146, 148, 3,
57, 28, 0, 147, 145, 1, 0, 0, 0, 147, 146, 1, 0, 0, 0, 148, 151, 1, 0,
0, 0, 149, 147, 1, 0, 0, 0, 149, 150, 1, 0, 0, 0, 150, 152, 1, 0, 0, 0,
151, 149, 1, 0, 0, 0, 152, 153, 5, 34, 0, 0, 153, 50, 1, 0, 0, 0, 154,
157, 5, 92, 0, 0, 155, 158, 7, 2, 0, 0, 156, 158, 3, 53, 26, 0, 157, 155,
1, 0, 0, 0, 157, 156, 1, 0, 0, 0, 158, 52, 1, 0, 0, 0, 159, 160, 5, 117,
0, 0, 160, 161, 3, 55, 27, 0, 161, 162, 3, 55, 27, 0, 162, 163, 3, 55,
27, 0, 163, 164, 3, 55, 27, 0, 164, 54, 1, 0, 0, 0, 165, 166, 7, 3, 0,
0, 166, 56, 1, 0, 0, 0, 167, 168, 8, 4, 0, 0, 168, 58, 1, 0, 0, 0, 169,
171, 7, 5, 0, 0, 170, 169, 1, 0, 0, 0, 170, 171, 1, 0, 0, 0, 171, 172,
1, 0, 0, 0, 172, 179, 3, 61, 30, 0, 173, 175, 5, 46, 0, 0, 174, 176, 7,
6, 0, 0, 175, 174, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 175, 1, 0, 0,
0, 177, 178, 1, 0, 0, 0, 178, 180, 1, 0, 0, 0, 179, 173, 1, 0, 0, 0, 179,
180, 1, 0, 0, 0, 180, 182, 1, 0, 0, 0, 181, 183, 3, 63, 31, 0, 182, 181,
1, 0, 0, 0, 182, 183, 1, 0, 0, 0, 183, 60, 1, 0, 0, 0, 184, 193, 5, 48,
0, 0, 185, 189, 7, 7, 0, 0, 186, 188, 7, 6, 0, 0, 187, 186, 1, 0, 0, 0,
188, 191, 1, 0, 0, 0, 189, 187, 1, 0, 0, 0, 189, 190, 1, 0, 0, 0, 190,
193, 1, 0, 0, 0, 191, 189, 1, 0, 0, 0, 192, 184, 1, 0, 0, 0, 192, 185,
1, 0, 0, 0, 193, 62, 1, 0, 0, 0, 194, 196, 7, 8, 0, 0, 195, 197, 7, 5,
0, 0, 196, 195, 1, 0, 0, 0, 196, 197, 1, 0, 0, 0, 197, 198, 1, 0, 0, 0,
198, 199, 3, 61, 30, 0, 199, 64, 1, 0, 0, 0, 200, 202, 7, 9, 0, 0, 201,
200, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 201, 1, 0, 0, 0, 203, 204,
1, 0, 0, 0, 204, 205, 1, 0, 0, 0, 205, 206, 6, 32, 0, 0, 206, 66, 1, 0,
0, 0, 13, 0, 141, 147, 149, 157, 170, 177, 179, 182, 189, 192, 196, 203,
1, 6, 0, 0,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
atn := staticData.atn
staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState))
decisionToDFA := staticData.decisionToDFA
for index, state := range atn.DecisionToState {
decisionToDFA[index] = antlr.NewDFA(state, index)
}
}
// JSONFilterLexerInit initializes any static state used to implement JSONFilterLexer. By default the
// static state used to implement the lexer is lazily initialized during the first call to
// NewJSONFilterLexer(). You can call this function if you wish to initialize the static state ahead
// of time.
func JSONFilterLexerInit() {
staticData := &JSONFilterLexerLexerStaticData
staticData.once.Do(jsonfilterlexerLexerInit)
}
// NewJSONFilterLexer produces a new lexer instance for the optional input antlr.CharStream.
func NewJSONFilterLexer(input antlr.CharStream) *JSONFilterLexer {
JSONFilterLexerInit()
l := new(JSONFilterLexer)
l.BaseLexer = antlr.NewBaseLexer(input)
staticData := &JSONFilterLexerLexerStaticData
l.Interpreter = antlr.NewLexerATNSimulator(l, staticData.atn, staticData.decisionToDFA, staticData.PredictionContextCache)
l.channelNames = staticData.ChannelNames
l.modeNames = staticData.ModeNames
l.RuleNames = staticData.RuleNames
l.LiteralNames = staticData.LiteralNames
l.SymbolicNames = staticData.SymbolicNames
l.GrammarFileName = "JSONFilter.g4"
// TODO: l.EOF = antlr.TokenEOF
return l
}
// JSONFilterLexer tokens.
const (
JSONFilterLexerSEL_START = 1
JSONFilterLexerSTAR = 2
JSONFilterLexerLCURLY = 3
JSONFilterLexerRCURLY = 4
JSONFilterLexerLPAREN = 5
JSONFilterLexerRPAREN = 6
JSONFilterLexerLBRACKET = 7
JSONFilterLexerRBRACKET = 8
JSONFilterLexerDOT = 9
JSONFilterLexerAND = 10
JSONFilterLexerOR = 11
JSONFilterLexerEQUALS = 12
JSONFilterLexerNOT_EQUALS = 13
JSONFilterLexerGT = 14
JSONFilterLexerLT = 15
JSONFilterLexerGE = 16
JSONFilterLexerLE = 17
JSONFilterLexerIS = 18
JSONFilterLexerNOT = 19
JSONFilterLexerNULL = 20
JSONFilterLexerEXISTS = 21
JSONFilterLexerTRUE = 22
JSONFilterLexerFALSE = 23
JSONFilterLexerINDENTIFIER = 24
JSONFilterLexerSTRING = 25
JSONFilterLexerNUMBER = 26
JSONFilterLexerWS = 27
)
// Code generated from JSONFilter.g4 by ANTLR 4.13.1. DO NOT EDIT.
package parser // JSONFilter
import (
"fmt"
"strconv"
"sync"
"github.com/antlr4-go/antlr/v4"
)
// Suppress unused import errors
var _ = fmt.Printf
var _ = strconv.Itoa
var _ = sync.Once{}
type JSONFilterParser struct {
*antlr.BaseParser
}
var JSONFilterParserStaticData struct {
once sync.Once
serializedATN []int32
LiteralNames []string
SymbolicNames []string
RuleNames []string
PredictionContextCache *antlr.PredictionContextCache
atn *antlr.ATN
decisionToDFA []*antlr.DFA
}
func jsonfilterParserInit() {
staticData := &JSONFilterParserStaticData
staticData.LiteralNames = []string{
"", "'$.'", "'*'", "'{'", "'}'", "'('", "')'", "'['", "']'", "'.'",
"'&&'", "'||'", "'='", "'!='", "'>'", "'<'", "'>='", "'<='", "'IS'",
"'NOT'", "'NULL'", "'EXISTS'", "'TRUE'", "'FALSE'",
}
staticData.SymbolicNames = []string{
"", "SEL_START", "STAR", "LCURLY", "RCURLY", "LPAREN", "RPAREN", "LBRACKET",
"RBRACKET", "DOT", "AND", "OR", "EQUALS", "NOT_EQUALS", "GT", "LT",
"GE", "LE", "IS", "NOT", "NULL", "EXISTS", "TRUE", "FALSE", "INDENTIFIER",
"STRING", "NUMBER", "WS",
}
staticData.RuleNames = []string{
"awsjsonfilter", "dotnotation", "filter_expr", "exp", "selector", "qualifiedidentifier",
"member", "operator", "literal",
}
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 1, 27, 90, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7,
4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 1, 0, 1, 0, 1, 1, 1,
1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 31, 8, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 5, 2, 39, 8, 2, 10, 2, 12, 2, 42, 9, 2, 1,
3, 1, 3, 1, 3, 1, 3, 3, 3, 48, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5,
5, 5, 56, 8, 5, 10, 5, 12, 5, 59, 9, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 4,
6, 66, 8, 6, 11, 6, 12, 6, 67, 3, 6, 70, 8, 6, 1, 7, 1, 7, 1, 8, 1, 8,
1, 8, 4, 8, 77, 8, 8, 11, 8, 12, 8, 78, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1,
8, 1, 8, 3, 8, 88, 8, 8, 1, 8, 0, 1, 4, 9, 0, 2, 4, 6, 8, 10, 12, 14, 16,
0, 2, 1, 0, 12, 19, 2, 0, 2, 2, 26, 26, 95, 0, 18, 1, 0, 0, 0, 2, 20, 1,
0, 0, 0, 4, 30, 1, 0, 0, 0, 6, 43, 1, 0, 0, 0, 8, 49, 1, 0, 0, 0, 10, 52,
1, 0, 0, 0, 12, 69, 1, 0, 0, 0, 14, 71, 1, 0, 0, 0, 16, 87, 1, 0, 0, 0,
18, 19, 3, 2, 1, 0, 19, 1, 1, 0, 0, 0, 20, 21, 5, 3, 0, 0, 21, 22, 3, 4,
2, 0, 22, 23, 5, 4, 0, 0, 23, 3, 1, 0, 0, 0, 24, 25, 6, 2, -1, 0, 25, 26,
5, 5, 0, 0, 26, 27, 3, 4, 2, 0, 27, 28, 5, 6, 0, 0, 28, 31, 1, 0, 0, 0,
29, 31, 3, 6, 3, 0, 30, 24, 1, 0, 0, 0, 30, 29, 1, 0, 0, 0, 31, 40, 1,
0, 0, 0, 32, 33, 10, 3, 0, 0, 33, 34, 5, 10, 0, 0, 34, 39, 3, 4, 2, 4,
35, 36, 10, 2, 0, 0, 36, 37, 5, 11, 0, 0, 37, 39, 3, 4, 2, 3, 38, 32, 1,
0, 0, 0, 38, 35, 1, 0, 0, 0, 39, 42, 1, 0, 0, 0, 40, 38, 1, 0, 0, 0, 40,
41, 1, 0, 0, 0, 41, 5, 1, 0, 0, 0, 42, 40, 1, 0, 0, 0, 43, 44, 3, 8, 4,
0, 44, 47, 3, 14, 7, 0, 45, 48, 3, 16, 8, 0, 46, 48, 3, 10, 5, 0, 47, 45,
1, 0, 0, 0, 47, 46, 1, 0, 0, 0, 48, 7, 1, 0, 0, 0, 49, 50, 5, 1, 0, 0,
50, 51, 3, 10, 5, 0, 51, 9, 1, 0, 0, 0, 52, 57, 3, 12, 6, 0, 53, 54, 5,
9, 0, 0, 54, 56, 3, 12, 6, 0, 55, 53, 1, 0, 0, 0, 56, 59, 1, 0, 0, 0, 57,
55, 1, 0, 0, 0, 57, 58, 1, 0, 0, 0, 58, 11, 1, 0, 0, 0, 59, 57, 1, 0, 0,
0, 60, 70, 5, 24, 0, 0, 61, 65, 5, 24, 0, 0, 62, 63, 5, 7, 0, 0, 63, 64,
5, 26, 0, 0, 64, 66, 5, 8, 0, 0, 65, 62, 1, 0, 0, 0, 66, 67, 1, 0, 0, 0,
67, 65, 1, 0, 0, 0, 67, 68, 1, 0, 0, 0, 68, 70, 1, 0, 0, 0, 69, 60, 1,
0, 0, 0, 69, 61, 1, 0, 0, 0, 70, 13, 1, 0, 0, 0, 71, 72, 7, 0, 0, 0, 72,
15, 1, 0, 0, 0, 73, 76, 5, 26, 0, 0, 74, 75, 5, 9, 0, 0, 75, 77, 7, 1,
0, 0, 76, 74, 1, 0, 0, 0, 77, 78, 1, 0, 0, 0, 78, 76, 1, 0, 0, 0, 78, 79,
1, 0, 0, 0, 79, 88, 1, 0, 0, 0, 80, 88, 5, 25, 0, 0, 81, 88, 5, 26, 0,
0, 82, 88, 5, 20, 0, 0, 83, 88, 5, 21, 0, 0, 84, 88, 5, 22, 0, 0, 85, 88,
5, 23, 0, 0, 86, 88, 5, 24, 0, 0, 87, 73, 1, 0, 0, 0, 87, 80, 1, 0, 0,
0, 87, 81, 1, 0, 0, 0, 87, 82, 1, 0, 0, 0, 87, 83, 1, 0, 0, 0, 87, 84,
1, 0, 0, 0, 87, 85, 1, 0, 0, 0, 87, 86, 1, 0, 0, 0, 88, 17, 1, 0, 0, 0,
9, 30, 38, 40, 47, 57, 67, 69, 78, 87,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
atn := staticData.atn
staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState))
decisionToDFA := staticData.decisionToDFA
for index, state := range atn.DecisionToState {
decisionToDFA[index] = antlr.NewDFA(state, index)
}
}
// JSONFilterParserInit initializes any static state used to implement JSONFilterParser. By default the
// static state used to implement the parser is lazily initialized during the first call to
// NewJSONFilterParser(). You can call this function if you wish to initialize the static state ahead
// of time.
func JSONFilterParserInit() {
staticData := &JSONFilterParserStaticData
staticData.once.Do(jsonfilterParserInit)
}
// NewJSONFilterParser produces a new parser instance for the optional input antlr.TokenStream.
func NewJSONFilterParser(input antlr.TokenStream) *JSONFilterParser {
JSONFilterParserInit()
this := new(JSONFilterParser)
this.BaseParser = antlr.NewBaseParser(input)
staticData := &JSONFilterParserStaticData
this.Interpreter = antlr.NewParserATNSimulator(this, staticData.atn, staticData.decisionToDFA, staticData.PredictionContextCache)
this.RuleNames = staticData.RuleNames
this.LiteralNames = staticData.LiteralNames
this.SymbolicNames = staticData.SymbolicNames
this.GrammarFileName = "JSONFilter.g4"
return this
}
// JSONFilterParser tokens.
const (
JSONFilterParserEOF = antlr.TokenEOF
JSONFilterParserSEL_START = 1
JSONFilterParserSTAR = 2
JSONFilterParserLCURLY = 3
JSONFilterParserRCURLY = 4
JSONFilterParserLPAREN = 5
JSONFilterParserRPAREN = 6
JSONFilterParserLBRACKET = 7
JSONFilterParserRBRACKET = 8
JSONFilterParserDOT = 9
JSONFilterParserAND = 10
JSONFilterParserOR = 11
JSONFilterParserEQUALS = 12
JSONFilterParserNOT_EQUALS = 13
JSONFilterParserGT = 14
JSONFilterParserLT = 15
JSONFilterParserGE = 16
JSONFilterParserLE = 17
JSONFilterParserIS = 18
JSONFilterParserNOT = 19
JSONFilterParserNULL = 20
JSONFilterParserEXISTS = 21
JSONFilterParserTRUE = 22
JSONFilterParserFALSE = 23
JSONFilterParserINDENTIFIER = 24
JSONFilterParserSTRING = 25
JSONFilterParserNUMBER = 26
JSONFilterParserWS = 27
)
// JSONFilterParser rules.
const (
JSONFilterParserRULE_awsjsonfilter = 0
JSONFilterParserRULE_dotnotation = 1
JSONFilterParserRULE_filter_expr = 2
JSONFilterParserRULE_exp = 3
JSONFilterParserRULE_selector = 4
JSONFilterParserRULE_qualifiedidentifier = 5
JSONFilterParserRULE_member = 6
JSONFilterParserRULE_operator = 7
JSONFilterParserRULE_literal = 8
)
// IAwsjsonfilterContext is an interface to support dynamic dispatch.
type IAwsjsonfilterContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
Dotnotation() IDotnotationContext
// IsAwsjsonfilterContext differentiates from other interfaces.
IsAwsjsonfilterContext()
}
type AwsjsonfilterContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyAwsjsonfilterContext() *AwsjsonfilterContext {
var p = new(AwsjsonfilterContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_awsjsonfilter
return p
}
func InitEmptyAwsjsonfilterContext(p *AwsjsonfilterContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_awsjsonfilter
}
func (*AwsjsonfilterContext) IsAwsjsonfilterContext() {}
func NewAwsjsonfilterContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *AwsjsonfilterContext {
var p = new(AwsjsonfilterContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = JSONFilterParserRULE_awsjsonfilter
return p
}
func (s *AwsjsonfilterContext) GetParser() antlr.Parser { return s.parser }
func (s *AwsjsonfilterContext) Dotnotation() IDotnotationContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IDotnotationContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IDotnotationContext)
}
func (s *AwsjsonfilterContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *AwsjsonfilterContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *AwsjsonfilterContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case JSONFilterVisitor:
return t.VisitAwsjsonfilter(s)
default:
return t.VisitChildren(s)
}
}
func (p *JSONFilterParser) Awsjsonfilter() (localctx IAwsjsonfilterContext) {
localctx = NewAwsjsonfilterContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 0, JSONFilterParserRULE_awsjsonfilter)
p.EnterOuterAlt(localctx, 1)
{
p.SetState(18)
p.Dotnotation()
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IDotnotationContext is an interface to support dynamic dispatch.
type IDotnotationContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
LCURLY() antlr.TerminalNode
Filter_expr() IFilter_exprContext
RCURLY() antlr.TerminalNode
// IsDotnotationContext differentiates from other interfaces.
IsDotnotationContext()
}
type DotnotationContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyDotnotationContext() *DotnotationContext {
var p = new(DotnotationContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_dotnotation
return p
}
func InitEmptyDotnotationContext(p *DotnotationContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_dotnotation
}
func (*DotnotationContext) IsDotnotationContext() {}
func NewDotnotationContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *DotnotationContext {
var p = new(DotnotationContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = JSONFilterParserRULE_dotnotation
return p
}
func (s *DotnotationContext) GetParser() antlr.Parser { return s.parser }
func (s *DotnotationContext) LCURLY() antlr.TerminalNode {
return s.GetToken(JSONFilterParserLCURLY, 0)
}
func (s *DotnotationContext) Filter_expr() IFilter_exprContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IFilter_exprContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IFilter_exprContext)
}
func (s *DotnotationContext) RCURLY() antlr.TerminalNode {
return s.GetToken(JSONFilterParserRCURLY, 0)
}
func (s *DotnotationContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *DotnotationContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *DotnotationContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case JSONFilterVisitor:
return t.VisitDotnotation(s)
default:
return t.VisitChildren(s)
}
}
func (p *JSONFilterParser) Dotnotation() (localctx IDotnotationContext) {
localctx = NewDotnotationContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 2, JSONFilterParserRULE_dotnotation)
p.EnterOuterAlt(localctx, 1)
{
p.SetState(20)
p.Match(JSONFilterParserLCURLY)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(21)
p.filter_expr(0)
}
{
p.SetState(22)
p.Match(JSONFilterParserRCURLY)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IFilter_exprContext is an interface to support dynamic dispatch.
type IFilter_exprContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// IsFilter_exprContext differentiates from other interfaces.
IsFilter_exprContext()
}
type Filter_exprContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyFilter_exprContext() *Filter_exprContext {
var p = new(Filter_exprContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_filter_expr
return p
}
func InitEmptyFilter_exprContext(p *Filter_exprContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_filter_expr
}
func (*Filter_exprContext) IsFilter_exprContext() {}
func NewFilter_exprContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *Filter_exprContext {
var p = new(Filter_exprContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = JSONFilterParserRULE_filter_expr
return p
}
func (s *Filter_exprContext) GetParser() antlr.Parser { return s.parser }
func (s *Filter_exprContext) CopyAll(ctx *Filter_exprContext) {
s.CopyFrom(&ctx.BaseParserRuleContext)
}
func (s *Filter_exprContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *Filter_exprContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
type Filter_expr_parenthesizedContext struct {
Filter_exprContext
}
func NewFilter_expr_parenthesizedContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Filter_expr_parenthesizedContext {
var p = new(Filter_expr_parenthesizedContext)
InitEmptyFilter_exprContext(&p.Filter_exprContext)
p.parser = parser
p.CopyAll(ctx.(*Filter_exprContext))
return p
}
func (s *Filter_expr_parenthesizedContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *Filter_expr_parenthesizedContext) LPAREN() antlr.TerminalNode {
return s.GetToken(JSONFilterParserLPAREN, 0)
}
func (s *Filter_expr_parenthesizedContext) Filter_expr() IFilter_exprContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IFilter_exprContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IFilter_exprContext)
}
func (s *Filter_expr_parenthesizedContext) RPAREN() antlr.TerminalNode {
return s.GetToken(JSONFilterParserRPAREN, 0)
}
func (s *Filter_expr_parenthesizedContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case JSONFilterVisitor:
return t.VisitFilter_expr_parenthesized(s)
default:
return t.VisitChildren(s)
}
}
type Filter_expr_andContext struct {
Filter_exprContext
lhs IFilter_exprContext
rhs IFilter_exprContext
}
func NewFilter_expr_andContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Filter_expr_andContext {
var p = new(Filter_expr_andContext)
InitEmptyFilter_exprContext(&p.Filter_exprContext)
p.parser = parser
p.CopyAll(ctx.(*Filter_exprContext))
return p
}
func (s *Filter_expr_andContext) GetLhs() IFilter_exprContext { return s.lhs }
func (s *Filter_expr_andContext) GetRhs() IFilter_exprContext { return s.rhs }
func (s *Filter_expr_andContext) SetLhs(v IFilter_exprContext) { s.lhs = v }
func (s *Filter_expr_andContext) SetRhs(v IFilter_exprContext) { s.rhs = v }
func (s *Filter_expr_andContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *Filter_expr_andContext) AND() antlr.TerminalNode {
return s.GetToken(JSONFilterParserAND, 0)
}
func (s *Filter_expr_andContext) AllFilter_expr() []IFilter_exprContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IFilter_exprContext); ok {
len++
}
}
tst := make([]IFilter_exprContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IFilter_exprContext); ok {
tst[i] = t.(IFilter_exprContext)
i++
}
}
return tst
}
func (s *Filter_expr_andContext) Filter_expr(i int) IFilter_exprContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IFilter_exprContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IFilter_exprContext)
}
func (s *Filter_expr_andContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case JSONFilterVisitor:
return t.VisitFilter_expr_and(s)
default:
return t.VisitChildren(s)
}
}
type Filter_expr_expContext struct {
Filter_exprContext
}
func NewFilter_expr_expContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Filter_expr_expContext {
var p = new(Filter_expr_expContext)
InitEmptyFilter_exprContext(&p.Filter_exprContext)
p.parser = parser
p.CopyAll(ctx.(*Filter_exprContext))
return p
}
func (s *Filter_expr_expContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *Filter_expr_expContext) Exp() IExpContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IExpContext)
}
func (s *Filter_expr_expContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case JSONFilterVisitor:
return t.VisitFilter_expr_exp(s)
default:
return t.VisitChildren(s)
}
}
type Filter_expr_orContext struct {
Filter_exprContext
lhs IFilter_exprContext
rhs IFilter_exprContext
}
func NewFilter_expr_orContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *Filter_expr_orContext {
var p = new(Filter_expr_orContext)
InitEmptyFilter_exprContext(&p.Filter_exprContext)
p.parser = parser
p.CopyAll(ctx.(*Filter_exprContext))
return p
}
func (s *Filter_expr_orContext) GetLhs() IFilter_exprContext { return s.lhs }
func (s *Filter_expr_orContext) GetRhs() IFilter_exprContext { return s.rhs }
func (s *Filter_expr_orContext) SetLhs(v IFilter_exprContext) { s.lhs = v }
func (s *Filter_expr_orContext) SetRhs(v IFilter_exprContext) { s.rhs = v }
func (s *Filter_expr_orContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *Filter_expr_orContext) OR() antlr.TerminalNode {
return s.GetToken(JSONFilterParserOR, 0)
}
func (s *Filter_expr_orContext) AllFilter_expr() []IFilter_exprContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IFilter_exprContext); ok {
len++
}
}
tst := make([]IFilter_exprContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IFilter_exprContext); ok {
tst[i] = t.(IFilter_exprContext)
i++
}
}
return tst
}
func (s *Filter_expr_orContext) Filter_expr(i int) IFilter_exprContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IFilter_exprContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IFilter_exprContext)
}
func (s *Filter_expr_orContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case JSONFilterVisitor:
return t.VisitFilter_expr_or(s)
default:
return t.VisitChildren(s)
}
}
func (p *JSONFilterParser) Filter_expr() (localctx IFilter_exprContext) {
return p.filter_expr(0)
}
func (p *JSONFilterParser) filter_expr(_p int) (localctx IFilter_exprContext) {
var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext()
_parentState := p.GetState()
localctx = NewFilter_exprContext(p, p.GetParserRuleContext(), _parentState)
var _prevctx IFilter_exprContext = localctx
var _ antlr.ParserRuleContext = _prevctx // TODO: To prevent unused variable warning.
_startState := 4
p.EnterRecursionRule(localctx, 4, JSONFilterParserRULE_filter_expr, _p)
var _alt int
p.EnterOuterAlt(localctx, 1)
p.SetState(30)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetTokenStream().LA(1) {
case JSONFilterParserLPAREN:
localctx = NewFilter_expr_parenthesizedContext(p, localctx)
p.SetParserRuleContext(localctx)
_prevctx = localctx
{
p.SetState(25)
p.Match(JSONFilterParserLPAREN)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(26)
p.filter_expr(0)
}
{
p.SetState(27)
p.Match(JSONFilterParserRPAREN)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case JSONFilterParserSEL_START:
localctx = NewFilter_expr_expContext(p, localctx)
p.SetParserRuleContext(localctx)
_prevctx = localctx
{
p.SetState(29)
p.Exp()
}
default:
p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil))
goto errorExit
}
p.GetParserRuleContext().SetStop(p.GetTokenStream().LT(-1))
p.SetState(40)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 2, p.GetParserRuleContext())
if p.HasError() {
goto errorExit
}
for _alt != 2 && _alt != antlr.ATNInvalidAltNumber {
if _alt == 1 {
if p.GetParseListeners() != nil {
p.TriggerExitRuleEvent()
}
_prevctx = localctx
p.SetState(38)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 1, p.GetParserRuleContext()) {
case 1:
localctx = NewFilter_expr_andContext(p, NewFilter_exprContext(p, _parentctx, _parentState))
localctx.(*Filter_expr_andContext).lhs = _prevctx
p.PushNewRecursionContext(localctx, _startState, JSONFilterParserRULE_filter_expr)
p.SetState(32)
if !(p.Precpred(p.GetParserRuleContext(), 3)) {
p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 3)", ""))
goto errorExit
}
{
p.SetState(33)
p.Match(JSONFilterParserAND)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(34)
var _x = p.filter_expr(4)
localctx.(*Filter_expr_andContext).rhs = _x
}
case 2:
localctx = NewFilter_expr_orContext(p, NewFilter_exprContext(p, _parentctx, _parentState))
localctx.(*Filter_expr_orContext).lhs = _prevctx
p.PushNewRecursionContext(localctx, _startState, JSONFilterParserRULE_filter_expr)
p.SetState(35)
if !(p.Precpred(p.GetParserRuleContext(), 2)) {
p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 2)", ""))
goto errorExit
}
{
p.SetState(36)
p.Match(JSONFilterParserOR)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(37)
var _x = p.filter_expr(3)
localctx.(*Filter_expr_orContext).rhs = _x
}
case antlr.ATNInvalidAltNumber:
goto errorExit
}
}
p.SetState(42)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 2, p.GetParserRuleContext())
if p.HasError() {
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.UnrollRecursionContexts(_parentctx)
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IExpContext is an interface to support dynamic dispatch.
type IExpContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
Selector() ISelectorContext
Operator() IOperatorContext
Literal() ILiteralContext
Qualifiedidentifier() IQualifiedidentifierContext
// IsExpContext differentiates from other interfaces.
IsExpContext()
}
type ExpContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyExpContext() *ExpContext {
var p = new(ExpContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_exp
return p
}
func InitEmptyExpContext(p *ExpContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_exp
}
func (*ExpContext) IsExpContext() {}
func NewExpContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ExpContext {
var p = new(ExpContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = JSONFilterParserRULE_exp
return p
}
func (s *ExpContext) GetParser() antlr.Parser { return s.parser }
func (s *ExpContext) Selector() ISelectorContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(ISelectorContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(ISelectorContext)
}
func (s *ExpContext) Operator() IOperatorContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IOperatorContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IOperatorContext)
}
func (s *ExpContext) Literal() ILiteralContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(ILiteralContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(ILiteralContext)
}
func (s *ExpContext) Qualifiedidentifier() IQualifiedidentifierContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IQualifiedidentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IQualifiedidentifierContext)
}
func (s *ExpContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *ExpContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *ExpContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case JSONFilterVisitor:
return t.VisitExp(s)
default:
return t.VisitChildren(s)
}
}
func (p *JSONFilterParser) Exp() (localctx IExpContext) {
localctx = NewExpContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 6, JSONFilterParserRULE_exp)
p.EnterOuterAlt(localctx, 1)
{
p.SetState(43)
p.Selector()
}
{
p.SetState(44)
p.Operator()
}
p.SetState(47)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 3, p.GetParserRuleContext()) {
case 1:
{
p.SetState(45)
p.Literal()
}
case 2:
{
p.SetState(46)
p.Qualifiedidentifier()
}
case antlr.ATNInvalidAltNumber:
goto errorExit
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// ISelectorContext is an interface to support dynamic dispatch.
type ISelectorContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
SEL_START() antlr.TerminalNode
Qualifiedidentifier() IQualifiedidentifierContext
// IsSelectorContext differentiates from other interfaces.
IsSelectorContext()
}
type SelectorContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptySelectorContext() *SelectorContext {
var p = new(SelectorContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_selector
return p
}
func InitEmptySelectorContext(p *SelectorContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_selector
}
func (*SelectorContext) IsSelectorContext() {}
func NewSelectorContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *SelectorContext {
var p = new(SelectorContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = JSONFilterParserRULE_selector
return p
}
func (s *SelectorContext) GetParser() antlr.Parser { return s.parser }
func (s *SelectorContext) SEL_START() antlr.TerminalNode {
return s.GetToken(JSONFilterParserSEL_START, 0)
}
func (s *SelectorContext) Qualifiedidentifier() IQualifiedidentifierContext {
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IQualifiedidentifierContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
}
return t.(IQualifiedidentifierContext)
}
func (s *SelectorContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *SelectorContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *SelectorContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case JSONFilterVisitor:
return t.VisitSelector(s)
default:
return t.VisitChildren(s)
}
}
func (p *JSONFilterParser) Selector() (localctx ISelectorContext) {
localctx = NewSelectorContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 8, JSONFilterParserRULE_selector)
p.EnterOuterAlt(localctx, 1)
{
p.SetState(49)
p.Match(JSONFilterParserSEL_START)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(50)
p.Qualifiedidentifier()
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IQualifiedidentifierContext is an interface to support dynamic dispatch.
type IQualifiedidentifierContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
AllMember() []IMemberContext
Member(i int) IMemberContext
AllDOT() []antlr.TerminalNode
DOT(i int) antlr.TerminalNode
// IsQualifiedidentifierContext differentiates from other interfaces.
IsQualifiedidentifierContext()
}
type QualifiedidentifierContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyQualifiedidentifierContext() *QualifiedidentifierContext {
var p = new(QualifiedidentifierContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_qualifiedidentifier
return p
}
func InitEmptyQualifiedidentifierContext(p *QualifiedidentifierContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_qualifiedidentifier
}
func (*QualifiedidentifierContext) IsQualifiedidentifierContext() {}
func NewQualifiedidentifierContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *QualifiedidentifierContext {
var p = new(QualifiedidentifierContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = JSONFilterParserRULE_qualifiedidentifier
return p
}
func (s *QualifiedidentifierContext) GetParser() antlr.Parser { return s.parser }
func (s *QualifiedidentifierContext) AllMember() []IMemberContext {
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IMemberContext); ok {
len++
}
}
tst := make([]IMemberContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IMemberContext); ok {
tst[i] = t.(IMemberContext)
i++
}
}
return tst
}
func (s *QualifiedidentifierContext) Member(i int) IMemberContext {
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IMemberContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
}
return t.(IMemberContext)
}
func (s *QualifiedidentifierContext) AllDOT() []antlr.TerminalNode {
return s.GetTokens(JSONFilterParserDOT)
}
func (s *QualifiedidentifierContext) DOT(i int) antlr.TerminalNode {
return s.GetToken(JSONFilterParserDOT, i)
}
func (s *QualifiedidentifierContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *QualifiedidentifierContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *QualifiedidentifierContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case JSONFilterVisitor:
return t.VisitQualifiedidentifier(s)
default:
return t.VisitChildren(s)
}
}
func (p *JSONFilterParser) Qualifiedidentifier() (localctx IQualifiedidentifierContext) {
localctx = NewQualifiedidentifierContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 10, JSONFilterParserRULE_qualifiedidentifier)
var _alt int
p.EnterOuterAlt(localctx, 1)
{
p.SetState(52)
p.Member()
}
p.SetState(57)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 4, p.GetParserRuleContext())
if p.HasError() {
goto errorExit
}
for _alt != 2 && _alt != antlr.ATNInvalidAltNumber {
if _alt == 1 {
{
p.SetState(53)
p.Match(JSONFilterParserDOT)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(54)
p.Member()
}
}
p.SetState(59)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 4, p.GetParserRuleContext())
if p.HasError() {
goto errorExit
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IMemberContext is an interface to support dynamic dispatch.
type IMemberContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
INDENTIFIER() antlr.TerminalNode
AllLBRACKET() []antlr.TerminalNode
LBRACKET(i int) antlr.TerminalNode
AllNUMBER() []antlr.TerminalNode
NUMBER(i int) antlr.TerminalNode
AllRBRACKET() []antlr.TerminalNode
RBRACKET(i int) antlr.TerminalNode
// IsMemberContext differentiates from other interfaces.
IsMemberContext()
}
type MemberContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyMemberContext() *MemberContext {
var p = new(MemberContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_member
return p
}
func InitEmptyMemberContext(p *MemberContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_member
}
func (*MemberContext) IsMemberContext() {}
func NewMemberContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *MemberContext {
var p = new(MemberContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = JSONFilterParserRULE_member
return p
}
func (s *MemberContext) GetParser() antlr.Parser { return s.parser }
func (s *MemberContext) INDENTIFIER() antlr.TerminalNode {
return s.GetToken(JSONFilterParserINDENTIFIER, 0)
}
func (s *MemberContext) AllLBRACKET() []antlr.TerminalNode {
return s.GetTokens(JSONFilterParserLBRACKET)
}
func (s *MemberContext) LBRACKET(i int) antlr.TerminalNode {
return s.GetToken(JSONFilterParserLBRACKET, i)
}
func (s *MemberContext) AllNUMBER() []antlr.TerminalNode {
return s.GetTokens(JSONFilterParserNUMBER)
}
func (s *MemberContext) NUMBER(i int) antlr.TerminalNode {
return s.GetToken(JSONFilterParserNUMBER, i)
}
func (s *MemberContext) AllRBRACKET() []antlr.TerminalNode {
return s.GetTokens(JSONFilterParserRBRACKET)
}
func (s *MemberContext) RBRACKET(i int) antlr.TerminalNode {
return s.GetToken(JSONFilterParserRBRACKET, i)
}
func (s *MemberContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *MemberContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *MemberContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case JSONFilterVisitor:
return t.VisitMember(s)
default:
return t.VisitChildren(s)
}
}
func (p *JSONFilterParser) Member() (localctx IMemberContext) {
localctx = NewMemberContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 12, JSONFilterParserRULE_member)
var _alt int
p.SetState(69)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 6, p.GetParserRuleContext()) {
case 1:
p.EnterOuterAlt(localctx, 1)
{
p.SetState(60)
p.Match(JSONFilterParserINDENTIFIER)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case 2:
p.EnterOuterAlt(localctx, 2)
{
p.SetState(61)
p.Match(JSONFilterParserINDENTIFIER)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(65)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_alt = 1
for ok := true; ok; ok = _alt != 2 && _alt != antlr.ATNInvalidAltNumber {
switch _alt {
case 1:
{
p.SetState(62)
p.Match(JSONFilterParserLBRACKET)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(63)
p.Match(JSONFilterParserNUMBER)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(64)
p.Match(JSONFilterParserRBRACKET)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
default:
p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil))
goto errorExit
}
p.SetState(67)
p.GetErrorHandler().Sync(p)
_alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 5, p.GetParserRuleContext())
if p.HasError() {
goto errorExit
}
}
case antlr.ATNInvalidAltNumber:
goto errorExit
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// IOperatorContext is an interface to support dynamic dispatch.
type IOperatorContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
EQUALS() antlr.TerminalNode
NOT_EQUALS() antlr.TerminalNode
IS() antlr.TerminalNode
NOT() antlr.TerminalNode
GT() antlr.TerminalNode
LT() antlr.TerminalNode
GE() antlr.TerminalNode
LE() antlr.TerminalNode
// IsOperatorContext differentiates from other interfaces.
IsOperatorContext()
}
type OperatorContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyOperatorContext() *OperatorContext {
var p = new(OperatorContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_operator
return p
}
func InitEmptyOperatorContext(p *OperatorContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_operator
}
func (*OperatorContext) IsOperatorContext() {}
func NewOperatorContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *OperatorContext {
var p = new(OperatorContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = JSONFilterParserRULE_operator
return p
}
func (s *OperatorContext) GetParser() antlr.Parser { return s.parser }
func (s *OperatorContext) EQUALS() antlr.TerminalNode {
return s.GetToken(JSONFilterParserEQUALS, 0)
}
func (s *OperatorContext) NOT_EQUALS() antlr.TerminalNode {
return s.GetToken(JSONFilterParserNOT_EQUALS, 0)
}
func (s *OperatorContext) IS() antlr.TerminalNode {
return s.GetToken(JSONFilterParserIS, 0)
}
func (s *OperatorContext) NOT() antlr.TerminalNode {
return s.GetToken(JSONFilterParserNOT, 0)
}
func (s *OperatorContext) GT() antlr.TerminalNode {
return s.GetToken(JSONFilterParserGT, 0)
}
func (s *OperatorContext) LT() antlr.TerminalNode {
return s.GetToken(JSONFilterParserLT, 0)
}
func (s *OperatorContext) GE() antlr.TerminalNode {
return s.GetToken(JSONFilterParserGE, 0)
}
func (s *OperatorContext) LE() antlr.TerminalNode {
return s.GetToken(JSONFilterParserLE, 0)
}
func (s *OperatorContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *OperatorContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *OperatorContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case JSONFilterVisitor:
return t.VisitOperator(s)
default:
return t.VisitChildren(s)
}
}
func (p *JSONFilterParser) Operator() (localctx IOperatorContext) {
localctx = NewOperatorContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 14, JSONFilterParserRULE_operator)
var _la int
p.EnterOuterAlt(localctx, 1)
{
p.SetState(71)
_la = p.GetTokenStream().LA(1)
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&1044480) != 0) {
p.GetErrorHandler().RecoverInline(p)
} else {
p.GetErrorHandler().ReportMatch(p)
p.Consume()
}
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
// ILiteralContext is an interface to support dynamic dispatch.
type ILiteralContext interface {
antlr.ParserRuleContext
// GetParser returns the parser.
GetParser() antlr.Parser
// Getter signatures
AllNUMBER() []antlr.TerminalNode
NUMBER(i int) antlr.TerminalNode
AllDOT() []antlr.TerminalNode
DOT(i int) antlr.TerminalNode
AllSTAR() []antlr.TerminalNode
STAR(i int) antlr.TerminalNode
STRING() antlr.TerminalNode
NULL() antlr.TerminalNode
EXISTS() antlr.TerminalNode
TRUE() antlr.TerminalNode
FALSE() antlr.TerminalNode
INDENTIFIER() antlr.TerminalNode
// IsLiteralContext differentiates from other interfaces.
IsLiteralContext()
}
type LiteralContext struct {
antlr.BaseParserRuleContext
parser antlr.Parser
}
func NewEmptyLiteralContext() *LiteralContext {
var p = new(LiteralContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_literal
return p
}
func InitEmptyLiteralContext(p *LiteralContext) {
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1)
p.RuleIndex = JSONFilterParserRULE_literal
}
func (*LiteralContext) IsLiteralContext() {}
func NewLiteralContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *LiteralContext {
var p = new(LiteralContext)
antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState)
p.parser = parser
p.RuleIndex = JSONFilterParserRULE_literal
return p
}
func (s *LiteralContext) GetParser() antlr.Parser { return s.parser }
func (s *LiteralContext) AllNUMBER() []antlr.TerminalNode {
return s.GetTokens(JSONFilterParserNUMBER)
}
func (s *LiteralContext) NUMBER(i int) antlr.TerminalNode {
return s.GetToken(JSONFilterParserNUMBER, i)
}
func (s *LiteralContext) AllDOT() []antlr.TerminalNode {
return s.GetTokens(JSONFilterParserDOT)
}
func (s *LiteralContext) DOT(i int) antlr.TerminalNode {
return s.GetToken(JSONFilterParserDOT, i)
}
func (s *LiteralContext) AllSTAR() []antlr.TerminalNode {
return s.GetTokens(JSONFilterParserSTAR)
}
func (s *LiteralContext) STAR(i int) antlr.TerminalNode {
return s.GetToken(JSONFilterParserSTAR, i)
}
func (s *LiteralContext) STRING() antlr.TerminalNode {
return s.GetToken(JSONFilterParserSTRING, 0)
}
func (s *LiteralContext) NULL() antlr.TerminalNode {
return s.GetToken(JSONFilterParserNULL, 0)
}
func (s *LiteralContext) EXISTS() antlr.TerminalNode {
return s.GetToken(JSONFilterParserEXISTS, 0)
}
func (s *LiteralContext) TRUE() antlr.TerminalNode {
return s.GetToken(JSONFilterParserTRUE, 0)
}
func (s *LiteralContext) FALSE() antlr.TerminalNode {
return s.GetToken(JSONFilterParserFALSE, 0)
}
func (s *LiteralContext) INDENTIFIER() antlr.TerminalNode {
return s.GetToken(JSONFilterParserINDENTIFIER, 0)
}
func (s *LiteralContext) GetRuleContext() antlr.RuleContext {
return s
}
func (s *LiteralContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) string {
return antlr.TreesStringTree(s, ruleNames, recog)
}
func (s *LiteralContext) Accept(visitor antlr.ParseTreeVisitor) interface{} {
switch t := visitor.(type) {
case JSONFilterVisitor:
return t.VisitLiteral(s)
default:
return t.VisitChildren(s)
}
}
func (p *JSONFilterParser) Literal() (localctx ILiteralContext) {
localctx = NewLiteralContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 16, JSONFilterParserRULE_literal)
var _la int
var _alt int
p.SetState(87)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 8, p.GetParserRuleContext()) {
case 1:
p.EnterOuterAlt(localctx, 1)
{
p.SetState(73)
p.Match(JSONFilterParserNUMBER)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
p.SetState(76)
p.GetErrorHandler().Sync(p)
if p.HasError() {
goto errorExit
}
_alt = 1
for ok := true; ok; ok = _alt != 2 && _alt != antlr.ATNInvalidAltNumber {
switch _alt {
case 1:
{
p.SetState(74)
p.Match(JSONFilterParserDOT)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
{
p.SetState(75)
_la = p.GetTokenStream().LA(1)
if !(_la == JSONFilterParserSTAR || _la == JSONFilterParserNUMBER) {
p.GetErrorHandler().RecoverInline(p)
} else {
p.GetErrorHandler().ReportMatch(p)
p.Consume()
}
}
default:
p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil))
goto errorExit
}
p.SetState(78)
p.GetErrorHandler().Sync(p)
_alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 7, p.GetParserRuleContext())
if p.HasError() {
goto errorExit
}
}
case 2:
p.EnterOuterAlt(localctx, 2)
{
p.SetState(80)
p.Match(JSONFilterParserSTRING)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case 3:
p.EnterOuterAlt(localctx, 3)
{
p.SetState(81)
p.Match(JSONFilterParserNUMBER)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case 4:
p.EnterOuterAlt(localctx, 4)
{
p.SetState(82)
p.Match(JSONFilterParserNULL)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case 5:
p.EnterOuterAlt(localctx, 5)
{
p.SetState(83)
p.Match(JSONFilterParserEXISTS)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case 6:
p.EnterOuterAlt(localctx, 6)
{
p.SetState(84)
p.Match(JSONFilterParserTRUE)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case 7:
p.EnterOuterAlt(localctx, 7)
{
p.SetState(85)
p.Match(JSONFilterParserFALSE)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case 8:
p.EnterOuterAlt(localctx, 8)
{
p.SetState(86)
p.Match(JSONFilterParserINDENTIFIER)
if p.HasError() {
// Recognition error - abort rule
goto errorExit
}
}
case antlr.ATNInvalidAltNumber:
goto errorExit
}
errorExit:
if p.HasError() {
v := p.GetError()
localctx.SetException(v)
p.GetErrorHandler().ReportError(p, v)
p.GetErrorHandler().Recover(p, v)
p.SetError(nil)
}
p.ExitRule()
return localctx
goto errorExit // Trick to prevent compiler error if the label is not used
}
func (p *JSONFilterParser) Sempred(localctx antlr.RuleContext, ruleIndex, predIndex int) bool {
switch ruleIndex {
case 2:
var t *Filter_exprContext = nil
if localctx != nil {
t = localctx.(*Filter_exprContext)
}
return p.Filter_expr_Sempred(t, predIndex)
default:
panic("No predicate with index: " + fmt.Sprint(ruleIndex))
}
}
func (p *JSONFilterParser) Filter_expr_Sempred(localctx antlr.RuleContext, predIndex int) bool {
switch predIndex {
case 0:
return p.Precpred(p.GetParserRuleContext(), 3)
case 1:
return p.Precpred(p.GetParserRuleContext(), 2)
default:
panic("No predicate with index: " + fmt.Sprint(predIndex))
}
}
package parser // JSONFilter
import (
"github.com/antlr4-go/antlr/v4"
)
const (
AND = "&&"
OR = "||"
)
type AWSJSONFilter struct {
FilterExpression interface{} `json:"_kics_filter_expr"`
}
type FilterExp struct {
Op interface{} `json:"_op"`
Left interface{} `json:"_left"`
Right interface{} `json:"_right"`
}
type FilterSelector struct {
Selector interface{} `json:"_selector"`
Op interface{} `json:"_op"`
Value interface{} `json:"_value"`
}
type JSONFilterTreeVisitor struct {
*antlr.BaseParseTreeVisitor
}
func NewJSONFilterPrinterVisitor() *JSONFilterTreeVisitor {
return &JSONFilterTreeVisitor{
&antlr.BaseParseTreeVisitor{},
}
}
func (v *JSONFilterTreeVisitor) VisitAll(tree antlr.ParseTree) AWSJSONFilter {
return AWSJSONFilter{
FilterExpression: v.Visit(tree),
}
}
func (v *JSONFilterTreeVisitor) Visit(tree antlr.ParseTree) interface{} {
return tree.Accept(v)
}
func (v *JSONFilterTreeVisitor) VisitChildren(node antlr.RuleNode) interface{} {
children := node.GetChildren()
for _, child := range children {
child.(antlr.ParseTree).Accept(v)
}
return nil
}
func (v *JSONFilterTreeVisitor) VisitAwsjsonfilter(ctx *AwsjsonfilterContext) interface{} {
return v.Visit(ctx.Dotnotation())
}
func (v *JSONFilterTreeVisitor) VisitDotnotation(ctx *DotnotationContext) interface{} {
return v.Visit(ctx.Filter_expr())
}
func (v *JSONFilterTreeVisitor) VisitFilter_expr_parenthesized(ctx *Filter_expr_parenthesizedContext) interface{} {
return v.Visit(ctx.Filter_expr())
}
func (v *JSONFilterTreeVisitor) VisitFilter_expr_and(ctx *Filter_expr_andContext) interface{} {
return FilterExp{
Op: AND,
Left: v.Visit(ctx.GetLhs()),
Right: v.Visit(ctx.GetRhs()),
}
}
func (v *JSONFilterTreeVisitor) VisitFilter_expr_exp(ctx *Filter_expr_expContext) interface{} {
return v.Visit(ctx.Exp())
}
func (v *JSONFilterTreeVisitor) VisitFilter_expr_or(ctx *Filter_expr_orContext) interface{} {
return FilterExp{
Op: OR,
Left: v.Visit(ctx.GetLhs()),
Right: v.Visit(ctx.GetRhs()),
}
}
func (v *JSONFilterTreeVisitor) VisitQualifiedidentifier(ctx *QualifiedidentifierContext) interface{} {
return v.VisitChildren(ctx)
}
func (v *JSONFilterTreeVisitor) VisitExp(ctx *ExpContext) interface{} {
var value interface{}
if ctx.Literal() != nil {
value = v.Visit(ctx.Literal())
} else {
value = v.Visit(ctx.Qualifiedidentifier())
}
selector := FilterSelector{
Selector: v.Visit(ctx.Selector()),
Op: v.Visit(ctx.Operator()),
Value: value,
}
return selector
}
func (v *JSONFilterTreeVisitor) VisitSelector(ctx *SelectorContext) interface{} {
return ctx.GetText()
}
func (v *JSONFilterTreeVisitor) VisitMember(ctx *MemberContext) interface{} {
return ctx.GetText()
}
func (v *JSONFilterTreeVisitor) VisitOperator(ctx *OperatorContext) interface{} {
return ctx.GetText()
}
func (v *JSONFilterTreeVisitor) VisitLiteral(ctx *LiteralContext) interface{} {
return ctx.GetText()
}
package parser
import (
"bytes"
"errors"
"os"
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/rs/zerolog/log"
)
type kindParser interface {
GetKind() model.FileKind
GetCommentToken() string
SupportedExtensions() []string
SupportedTypes() map[string]bool
Parse(filePath string, fileContent []byte) ([]model.Document, []int, error)
Resolve(fileContent []byte, filename string, _ bool, _ int) ([]byte, error)
StringifyContent(content []byte) (string, error)
GetResolvedFiles() map[string]model.ResolvedFile
}
// Builder is a representation of parsers that will be construct
type Builder struct {
parsers []kindParser
}
// NewBuilder creates a new Builder's reference
func NewBuilder() *Builder {
log.Debug().Msg("parser.NewBuilder()")
return &Builder{}
}
// Add is a function that adds a new parser to the caller and returns it
func (b *Builder) Add(p kindParser) *Builder {
b.parsers = append(b.parsers, p)
return b
}
// Build prepares parsers and associates a parser to its extension and returns it
func (b *Builder) Build(types, cloudProviders []string) ([]*Parser, error) {
parserSlice := make([]*Parser, 0, len(b.parsers))
for _, parser := range b.parsers {
supportedTypes := parser.SupportedTypes()
if contains(types, supportedTypes) {
extensions := make(model.Extensions, len(b.parsers))
var platforms []string
for _, ext := range parser.SupportedExtensions() {
extensions[ext] = struct{}{}
}
for key := range supportedTypes {
platforms = append(platforms, key)
}
parserSlice = append(parserSlice, &Parser{
parsers: parser,
extensions: extensions,
Platform: platforms,
})
}
}
return parserSlice, nil
}
// ErrNotSupportedFile represents an error when a file is not supported by KICS
var ErrNotSupportedFile = errors.New("unsupported file to parse")
// Parser is a struct that associates a parser to its supported extensions
type Parser struct {
parsers kindParser
extensions model.Extensions
Platform []string
}
// ParsedDocument is a struct containing data retrieved from parsing
type ParsedDocument struct {
Docs []model.Document
Kind model.FileKind
Content string
IgnoreLines []int
CountLines int
ResolvedFiles map[string]model.ResolvedFile
IsMinified bool
}
// CommentsCommands gets commands on comments in the file beginning, before the code starts
func (c *Parser) CommentsCommands(filePath string, fileContent []byte) model.CommentsCommands {
if c.isValidExtension(filePath) {
commentsCommands := make(model.CommentsCommands)
commentToken := c.parsers.GetCommentToken()
if commentToken != "" {
lines := strings.Split(string(fileContent), "\n")
for _, line := range lines {
line = strings.TrimSpace(line)
if line == "" {
continue
}
if strings.HasSuffix(filePath, ".yaml") && strings.HasPrefix(line, "---") {
continue
}
if !strings.HasPrefix(line, commentToken) {
break
}
fields := strings.Fields(strings.TrimSpace(strings.TrimPrefix(line, commentToken)))
if len(fields) > 1 && fields[0] == "kics-scan" && fields[1] != "" {
commandParameters := strings.SplitN(fields[1], "=", 2)
if len(commandParameters) > 1 {
commentsCommands[commandParameters[0]] = commandParameters[1]
} else {
commentsCommands[commandParameters[0]] = ""
}
}
}
}
return commentsCommands
}
return nil
}
// Parse executes a parser on the fileContent and returns the file content as a Document, the file kind and
// an error, if an error has occurred
func (c *Parser) Parse(
filePath string,
fileContent []byte,
openAPIResolveReferences, isMinified bool,
maxResolverDepth int) (ParsedDocument, error) {
fileContent = utils.DecryptAnsibleVault(fileContent, os.Getenv("ANSIBLE_VAULT_PASSWORD_FILE"))
if c.isValidExtension(filePath) {
resolved, err := c.parsers.Resolve(fileContent, filePath, openAPIResolveReferences, maxResolverDepth)
if err != nil {
return ParsedDocument{}, err
}
obj, igLines, err := c.parsers.Parse(filePath, resolved)
if err != nil {
return ParsedDocument{}, err
}
cont, err := c.parsers.StringifyContent(fileContent)
if err != nil {
log.Error().Msgf("failed to stringify original content: %s", err)
cont = string(fileContent)
}
return ParsedDocument{
Docs: obj,
Kind: c.parsers.GetKind(),
Content: cont,
IgnoreLines: igLines,
CountLines: bytes.Count(resolved, []byte{'\n'}) + 1,
ResolvedFiles: c.parsers.GetResolvedFiles(),
IsMinified: isMinified,
}, nil
}
return ParsedDocument{
Docs: nil,
Kind: "break",
Content: "",
IgnoreLines: []int{},
}, ErrNotSupportedFile
}
// SupportedExtensions returns extensions supported by KICS
func (c *Parser) SupportedExtensions() model.Extensions {
return c.extensions
}
func contains(types []string, supportedTypes map[string]bool) bool {
if types[0] == "" {
return true
}
for _, t := range types {
if _, ok := supportedTypes[strings.ToLower(t)]; ok {
return true
}
}
return false
}
func (c *Parser) isValidExtension(filePath string) bool {
ext, _ := utils.GetExtension(filePath)
_, ok := c.extensions[ext]
return ok
}
package comment
import (
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
)
// comment is a comment token
type comment hclsyntax.Token
// position returns the position of the comment
func (c *comment) position() hcl.Pos {
return hcl.Pos{Line: c.Range.End.Line + 1, Column: c.Range.End.Column, Byte: c.Range.End.Byte}
}
// value returns the value of a comment
func (c *comment) value() (value model.CommentCommand) {
comment := strings.ToLower(string(c.Bytes))
// check if we are working with kics command
if model.KICSCommentRgxp.MatchString(comment) {
comment = model.KICSCommentRgxp.ReplaceAllString(comment, "")
comment = strings.Trim(comment, "\n")
commands := strings.Split(strings.Trim(comment, "\r"), " ")
value = model.ProcessCommands(commands)
return
}
return model.CommentCommand(comment)
}
// Ignore is a map of commands to ignore
type Ignore map[model.CommentCommand][]hcl.Pos
// Build builds the Ignore map
func (i *Ignore) build(ignoreLine, ignoreBlock, ignoreComment []hcl.Pos) {
ignoreStruct := map[model.CommentCommand][]hcl.Pos{
model.IgnoreLine: ignoreLine,
model.IgnoreBlock: ignoreBlock,
model.IgnoreComment: ignoreComment,
}
*i = ignoreStruct
}
// ///////////////////////////
// LINES TO IGNORE //
// ///////////////////////////
// GetIgnoreLines returns the lines to ignore from a comment
func GetIgnoreLines(ignore Ignore, body *hclsyntax.Body) (lines []int) {
lines = make([]int, 0)
for _, position := range ignore[model.IgnoreBlock] {
lines = append(lines, checkBlock(body, position)...)
}
lines = append(lines, getLinesFromPos(ignore[model.IgnoreLine])...)
lines = append(lines, getLinesFromPos(ignore[model.IgnoreComment])...)
return
}
// getLinesFromPos will return a list of lines from a list of positions
func getLinesFromPos(positions []hcl.Pos) (lines []int) {
lines = make([]int, 0)
for _, position := range positions {
lines = append(lines, position.Line)
}
return
}
// checkBlock checks if the position is inside a block and returns the lines to ignore
func checkBlock(body *hclsyntax.Body, position hcl.Pos) (lines []int) {
lines = make([]int, 0)
blocks := body.BlocksAtPos(position)
for _, block := range blocks {
lines = append(lines, getLinesFromBlock(block, position)...)
}
return
}
// getLinesFromBlock returns the lines to ignore from a block
func getLinesFromBlock(block *hcl.Block, position hcl.Pos) (lines []int) {
lines = make([]int, 0)
if checkBlockRange(block, position) {
rangeBlock := block.Body.(*hclsyntax.Body).Range()
lines = append(lines, model.Range(rangeBlock.Start.Line, rangeBlock.End.Line)...)
} else {
// check in attributes
attribute := block.Body.(*hclsyntax.Body).AttributeAtPos(position)
lines = append(lines, getLinesFromAttr(attribute)...)
}
return
}
// getLinesFromAttr returns the lines to ignore from an attribute
func getLinesFromAttr(atr *hcl.Attribute) (lines []int) {
lines = make([]int, 0)
if atr == nil {
return
}
lines = append(lines, model.Range(atr.Range.Start.Line, atr.Range.End.Line)...)
return
}
// checkBlockRange checks if the position is inside a block
func checkBlockRange(block *hcl.Block, position hcl.Pos) bool {
return block.TypeRange.End == position
}
// ///////////////////////////
// COMMENT PARSER //
// ///////////////////////////
// ParseComments parses the comments and returns the kics commands
func ParseComments(src []byte, filename string) (Ignore, error) {
comments, diags := hclsyntax.LexConfig(src, filename, hcl.Pos{Line: 0, Column: 0})
if diags != nil && diags.HasErrors() {
return Ignore{}, diags.Errs()[0]
}
ig := processTokens(comments)
return ig, nil
}
// processTokens goes over the tokens and returns the kics commands
func processTokens(tokens hclsyntax.Tokens) (ig Ignore) {
ignoreLines := make([]hcl.Pos, 0)
ignoreBlocks := make([]hcl.Pos, 0)
ignoreComments := make([]hcl.Pos, 0)
for i := range tokens {
// token is not a comment
if tokens[i].Type != hclsyntax.TokenComment || i+1 > len(tokens) {
continue
}
// case: CONFIGURATION = X # comment
if i > 0 && tokens[i-1].Range.Start.Line == tokens[i].Range.Start.Line {
continue
}
ignoreLines, ignoreBlocks, ignoreComments = processComment((*comment)(&tokens[i]),
(*comment)(&tokens[i+1]), ignoreLines, ignoreBlocks, ignoreComments)
}
ig = make(map[model.CommentCommand][]hcl.Pos)
ig.build(ignoreLines, ignoreBlocks, ignoreComments)
return ig
}
// processComment analyzes the comment to determine which type of kics command the comment is
func processComment(comment *comment, tokenToIgnore *comment,
ignoreLine, ignoreBlock, ignoreComments []hcl.Pos) (ignoreLineR, ignoreBlockR, ignoreCommentsR []hcl.Pos) {
ignoreLineR = ignoreLine
ignoreBlockR = ignoreBlock
ignoreCommentsR = ignoreComments
switch comment.value() {
case model.IgnoreLine:
// comment is of type kics ignore-line
ignoreLineR = append(ignoreLineR, tokenToIgnore.position(), hcl.Pos{Line: comment.position().Line - 1})
case model.IgnoreBlock:
// comment is of type kics ignore-block
ignoreBlockR = append(ignoreBlockR, tokenToIgnore.position(), hcl.Pos{Line: comment.position().Line - 1})
default:
// comment is not of type kics ignore
ignoreCommentsR = append(ignoreCommentsR, hcl.Pos{Line: comment.position().Line - 1})
return
}
return
}
package converter
import (
"fmt"
"strconv"
"strings"
sentryReport "github.com/Checkmarx/kics/v2/internal/sentry"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/parser/terraform/functions"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
"github.com/rs/zerolog/log"
"github.com/zclconf/go-cty/cty"
ctyconvert "github.com/zclconf/go-cty/cty/convert"
ctyjson "github.com/zclconf/go-cty/cty/json"
)
// VariableMap represents a set of terraform input variables
type VariableMap map[string]cty.Value
var inputVarMap = make(VariableMap)
// This file is attributed to https://github.com/tmccombs/hcl2json.
// convertBlock() is manipulated for combining the both blocks and labels for one given resource.
// DefaultConverted an hcl File to a toJson serializable object
// This assumes that the body is a hclsyntax.Body
var DefaultConverted = func(file *hcl.File, inputVariables VariableMap) (model.Document, error) {
inputVarMap = inputVariables
c := converter{bytes: file.Bytes}
body, err := c.convertBody(file.Body.(*hclsyntax.Body), 0)
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Location: "var DefaultConverted",
Err: err,
Kind: model.KindTerraform,
Message: "Failed to convert body in terraform parser",
}, false)
if er, ok := err.(*hcl.Diagnostic); ok && er.Subject != nil {
return nil, err
}
return nil, err
}
return body, nil
}
type converter struct {
bytes []byte
}
const kicsLinesKey = "_kics_"
func (c *converter) rangeSource(r hcl.Range) string {
return string(c.bytes[r.Start.Byte:r.End.Byte])
}
func (c *converter) convertBody(body *hclsyntax.Body, defLine int) (model.Document, error) {
var err error
var v string
countValue := body.Attributes["count"]
count := -1
if countValue != nil {
value, err := countValue.Expr.Value(nil)
if err == nil {
switch value.Type() {
case cty.String:
v = value.AsString()
case cty.Number:
v = value.AsBigFloat().String()
}
intValue, err := strconv.Atoi(v)
if err == nil {
count = intValue
}
}
}
if count == 0 {
return nil, nil
}
out := make(model.Document)
kicsS := make(map[string]model.LineObject)
// set kics line for the body
kicsS["_kics__default"] = model.LineObject{
Line: defLine,
}
if body.Attributes != nil {
for key, value := range body.Attributes {
out[key], err = c.convertExpression(value.Expr)
// set kics line for the body value
kicsS[kicsLinesKey+key] = model.LineObject{
Line: value.SrcRange.Start.Line,
Arr: c.getArrLines(value.Expr),
}
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Location: "func convertBody",
Err: err,
Kind: model.KindTerraform,
Message: "Failed to convert Expression in terraform parser",
}, false)
return nil, err
}
}
}
for _, block := range body.Blocks {
// set kics line for block
kicsS[kicsLinesKey+block.Type] = model.LineObject{
Line: block.TypeRange.Start.Line,
}
err = c.convertBlock(block, out, block.TypeRange.Start.Line)
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Location: "func convertBody",
Err: err,
Kind: model.KindTerraform,
Message: "Failed to convert block in terraform parser",
}, false)
return nil, err
}
}
out["_kics_lines"] = kicsS
return out, nil
}
// getArrLines will get line information for the array elements
func (c *converter) getArrLines(expr hclsyntax.Expression) []map[string]*model.LineObject {
arr := make([]map[string]*model.LineObject, 0)
if v, ok := expr.(*hclsyntax.TupleConsExpr); ok {
for _, ex := range v.Exprs {
arrEx := make(map[string]*model.LineObject)
// set default line of array
arrEx["_kics__default"] = &model.LineObject{
Line: ex.Range().Start.Line,
}
switch valType := ex.(type) {
case *hclsyntax.ObjectConsExpr:
arrEx["_kics__default"] = &model.LineObject{
Line: ex.Range().Start.Line + 1,
}
// set lines for array elements
for _, item := range valType.Items {
key, err := c.convertKey(item.KeyExpr)
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Location: "func getArrLines",
Err: err,
Kind: model.KindTerraform,
Message: "Failed to convert key in terraform parser",
}, false)
return nil
}
arrEx[kicsLinesKey+key] = &model.LineObject{
Line: item.KeyExpr.Range().Start.Line,
}
}
case *hclsyntax.TupleConsExpr:
// set lines for array elements if type is different than array, map/object
arrEx["_kics__default"] = &model.LineObject{
Arr: c.getArrLines(valType),
}
}
arr = append(arr, arrEx)
}
}
return arr
}
func (c *converter) convertBlock(block *hclsyntax.Block, out model.Document, defLine int) error {
var key = block.Type
value, err := c.convertBody(block.Body, defLine)
if err != nil {
return err
}
if value == nil {
return nil
}
for _, label := range block.Labels {
if inner, exists := out[key]; exists {
var ok bool
out, ok = inner.(model.Document)
if !ok {
return fmt.Errorf("unable to convert Block to JSON: %v.%v", block.Type, strings.Join(block.Labels, "."))
}
} else {
obj := make(model.Document)
out[key] = obj
out = obj
}
key = label
}
if current, exists := out[key]; exists {
if list, ok := current.([]interface{}); ok {
out[key] = append(list, value)
} else {
out[key] = []interface{}{current, value}
}
} else {
out[key] = value
}
return nil
}
func (c *converter) convertExpression(expr hclsyntax.Expression) (interface{}, error) {
// assume it is hcl syntax (because, um, it is)
switch value := expr.(type) {
case *hclsyntax.LiteralValueExpr:
return ctyjson.SimpleJSONValue{Value: value.Val}, nil
case *hclsyntax.TemplateExpr:
return c.convertTemplate(value)
case *hclsyntax.TemplateWrapExpr:
return c.convertExpression(value.Wrapped)
case *hclsyntax.TupleConsExpr:
list := make([]interface{}, 0)
for _, ex := range value.Exprs {
elem, err := c.convertExpression(ex)
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Location: "func convertExpression",
Err: err,
Kind: model.KindTerraform,
Message: "Failed to convert expression in terraform parser",
}, false)
return nil, err
}
list = append(list, elem)
}
return list, nil
case *hclsyntax.ObjectConsExpr:
return c.objectConsExpr(value)
case *hclsyntax.FunctionCallExpr:
return c.evalFunction(expr)
case *hclsyntax.ConditionalExpr:
expressionEvaluated, err := expr.Value(&hcl.EvalContext{
Variables: inputVarMap,
Functions: functions.TerraformFuncs,
})
if err != nil {
return c.wrapExpr(expr)
}
return ctyjson.SimpleJSONValue{Value: expressionEvaluated}, nil
default:
// try to evaluate with variables and functions
valueConverted, _ := expr.Value(&hcl.EvalContext{
Variables: inputVarMap,
Functions: functions.TerraformFuncs,
})
if !checkDynamicKnownTypes(valueConverted) {
return ctyjson.SimpleJSONValue{Value: valueConverted}, nil
}
return c.wrapExpr(expr)
}
}
func checkValue(val cty.Value) bool {
if val.Type().HasDynamicTypes() || !val.IsKnown() {
return true
}
if !val.Type().IsPrimitiveType() && checkDynamicKnownTypes(val) {
return true
}
return false
}
func checkDynamicKnownTypes(valueConverted cty.Value) bool {
if !valueConverted.Type().HasDynamicTypes() && valueConverted.IsKnown() {
if valueConverted.Type().FriendlyName() == "tuple" {
for _, val := range valueConverted.AsValueSlice() {
if checkValue(val) {
return true
}
}
}
if valueConverted.Type().FriendlyName() == "object" {
for _, val := range valueConverted.AsValueMap() {
if checkValue(val) {
return true
}
}
}
return false
}
return true
}
func (c *converter) objectConsExpr(value *hclsyntax.ObjectConsExpr) (model.Document, error) {
m := make(model.Document)
for _, item := range value.Items {
key, err := c.convertKey(item.KeyExpr)
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Location: "func objectConsExpr",
Err: err,
Kind: model.KindTerraform,
Message: "Failed to convert key in terraform parser",
}, false)
return nil, err
}
m[key], err = c.convertExpression(item.ValueExpr)
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Location: "func objectConsExpr",
Err: err,
Kind: model.KindTerraform,
Message: "Failed to convert expression in terraform parser",
}, false)
return nil, err
}
}
return m, nil
}
func (c *converter) convertKey(keyExpr hclsyntax.Expression) (string, error) {
// a key should never have dynamic input
if k, isKeyExpr := keyExpr.(*hclsyntax.ObjectConsKeyExpr); isKeyExpr {
keyExpr = k.Wrapped
if _, isTraversal := keyExpr.(*hclsyntax.ScopeTraversalExpr); isTraversal {
return c.rangeSource(keyExpr.Range()), nil
}
}
return c.convertStringPart(keyExpr)
}
func (c *converter) convertTemplate(t *hclsyntax.TemplateExpr) (string, error) {
if t.IsStringLiteral() {
// safe because the value is just the string
v, err := t.Value(nil)
if err != nil {
return "", err
}
return v.AsString(), nil
}
builder := &strings.Builder{}
for _, part := range t.Parts {
s, err := c.convertStringPart(part)
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Location: "func convertTemplate",
Err: err,
Kind: model.KindTerraform,
Message: "Failed to convert string part in terraform parser",
}, false)
return "", err
}
builder.WriteString(s)
}
s := builder.String()
builder.Reset()
builder = nil
return s, nil
}
func (c *converter) convertStringPart(expr hclsyntax.Expression) (string, error) {
switch v := expr.(type) {
case *hclsyntax.LiteralValueExpr:
s, err := ctyconvert.Convert(v.Val, cty.String)
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Location: "func convertStringPart",
Err: err,
Kind: model.KindTerraform,
Message: "Failed to cty convert in terraform parser",
}, false)
return "", err
}
return s.AsString(), nil
case *hclsyntax.TemplateExpr:
return c.convertTemplate(v)
case *hclsyntax.TemplateWrapExpr:
return c.convertStringPart(v.Wrapped)
case *hclsyntax.ConditionalExpr:
return c.convertTemplateConditional(v)
case *hclsyntax.TemplateJoinExpr:
return c.convertTemplateFor(v.Tuple.(*hclsyntax.ForExpr))
case *hclsyntax.ParenthesesExpr:
return c.convertStringPart(v.Expression)
default:
// try to evaluate with variables
valueConverted, _ := expr.Value(&hcl.EvalContext{
Variables: inputVarMap,
})
if valueConverted.Type().FriendlyName() == "string" {
return valueConverted.AsString(), nil
}
// treating as an embedded expression
return c.wrapExpr(expr)
}
}
func (c *converter) convertTemplateConditional(expr *hclsyntax.ConditionalExpr) (string, error) {
builder := &strings.Builder{}
builder.WriteString("%{if ")
builder.WriteString(c.rangeSource(expr.Condition.Range()))
builder.WriteString("}")
trueResult, err := c.convertStringPart(expr.TrueResult)
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Location: "func convertTemplateConditional",
Err: err,
Kind: model.KindTerraform,
Message: "Failed to convert string part terraform parser",
}, false)
return "", nil
}
builder.WriteString(trueResult)
falseResult, err := c.convertStringPart(expr.FalseResult)
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Location: "func convertTemplateConditional",
Err: err,
Kind: model.KindTerraform,
Message: "Failed to convert string part terraform parser",
}, false)
return "", nil
}
if falseResult != "" {
builder.WriteString("%{else}")
builder.WriteString(falseResult)
}
builder.WriteString("%{endif}")
s := builder.String()
builder.Reset()
builder = nil
return s, nil
}
func (c *converter) convertTemplateFor(expr *hclsyntax.ForExpr) (string, error) {
builder := &strings.Builder{}
builder.WriteString("%{for ")
if expr.KeyVar != "" {
builder.WriteString(expr.KeyVar)
builder.WriteString(", ")
}
builder.WriteString(expr.ValVar)
builder.WriteString(" in ")
builder.WriteString(c.rangeSource(expr.CollExpr.Range()))
builder.WriteString("}")
templ, err := c.convertStringPart(expr.ValExpr)
if err != nil {
sentryReport.ReportSentry(&sentryReport.Report{
Location: "func convertTemplateFor",
Err: err,
Kind: model.KindTerraform,
Message: "Failed to convert string part terraform parser",
}, false)
return "", err
}
builder.WriteString(templ)
builder.WriteString("%{endfor}")
s := builder.String()
builder.Reset()
builder = nil
return s, nil
}
func (c *converter) wrapExpr(expr hclsyntax.Expression) (string, error) {
expression := c.rangeSource(expr.Range())
if strings.HasPrefix(expression, "var.") {
log.Trace().Msgf("Variable ${%s} value not found", expression)
}
return "${" + expression + "}", nil
}
func (c *converter) evalFunction(expression hclsyntax.Expression) (interface{}, error) {
expressionEvaluated, err := expression.Value(&hcl.EvalContext{
Variables: inputVarMap,
Functions: functions.TerraformFuncs,
})
if err != nil {
for _, expressionError := range err {
if expressionError.Summary == "Unknown variable" {
jsonPath := c.rangeSource(expressionError.Expression.Range())
rootKey := strings.Split(jsonPath, ".")[0]
if strings.Contains(jsonPath, ".") {
jsonCtyValue, convertErr := createEntryInputVar(strings.Split(jsonPath, ".")[1:], jsonPath)
if convertErr != nil {
return c.wrapExpr(expression)
}
inputVarMap[rootKey] = jsonCtyValue
} else {
inputVarMap[rootKey] = cty.StringVal(jsonPath)
}
}
}
expressionEvaluated, err = expression.Value(&hcl.EvalContext{
Variables: inputVarMap,
Functions: functions.TerraformFuncs,
})
if err != nil {
return c.wrapExpr(expression)
}
}
if !expressionEvaluated.HasWhollyKnownType() {
// in some cases, the expression is evaluated with no error but the type is unknown.
// this causes the json marshaling of the Document later on to fail with an error, and the entire scan fails.
// Therefore, we prefer to wrap it as a string and continue the scan.
return c.wrapExpr(expression)
}
return ctyjson.SimpleJSONValue{Value: expressionEvaluated}, nil
}
func createEntryInputVar(path []string, defaultValue string) (cty.Value, error) {
mapJSON := "{"
closeMap := "}"
for idx, key := range path {
if idx+1 < len(path) {
mapJSON += fmt.Sprintf("%q:{", key)
closeMap += "}"
} else {
mapJSON += fmt.Sprintf("%q: %q", key, defaultValue)
}
}
mapJSON += closeMap
jsonType, err := ctyjson.ImpliedType([]byte(mapJSON))
if err != nil {
return cty.NilVal, err
}
value, err := ctyjson.Unmarshal([]byte(mapJSON), jsonType)
if err != nil {
return cty.NilVal, err
}
return value, nil
}
package terraform
import (
"bytes"
"encoding/json"
"path/filepath"
"sync"
"github.com/Checkmarx/kics/v2/pkg/builder/engine"
"github.com/Checkmarx/kics/v2/pkg/parser/terraform/functions"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hcldec"
"github.com/hashicorp/hcl/v2/hclsyntax"
"github.com/rs/zerolog/log"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/gocty"
ctyjson "github.com/zclconf/go-cty/cty/json"
)
type dataSourcePolicyCondition struct {
Test string `json:"test,omitempty"`
Variable string `json:"variable,omitempty"`
Values []string `json:"values,omitempty"`
}
type dataSourcePolicyPrincipal struct {
Type string `json:"type,omitempty"`
Identifiers []string `json:"identifiers,omitempty"`
}
type dataSourcePolicyStatement struct {
Actions []string `json:"actions"`
Condition dataSourcePolicyCondition `json:"condition"`
Effect string `json:"effect"`
NotActions []string `json:"not_actions"`
NotPrincipals dataSourcePolicyPrincipal `json:"not_principals"`
NotResources []string `json:"not_resources"`
Principals dataSourcePolicyPrincipal `json:"principals"`
Resources []string `json:"resources"`
Sid string `json:"sid"`
}
type dataSourcePolicy struct {
ID string `json:"id"`
Statement []dataSourcePolicyStatement `json:"statement"`
Version string `json:"version"`
}
type dataSource struct {
Value dataSourcePolicy `json:"value"`
}
type convertedPolicyCondition map[string]map[string][]string
type convertedPolicyPrincipal map[string][]string
type convertedPolicyStatement struct {
Actions []string `json:"Actions,omitempty"`
Condition convertedPolicyCondition `json:"Condition,omitempty"`
Effect string `json:"Effect,omitempty"`
NotActions []string `json:"Not_actions,omitempty"`
NotPrincipals convertedPolicyPrincipal `json:"Not_principals,omitempty"`
NotResources []string `json:"Not_resources,omitempty"`
Principals convertedPolicyPrincipal `json:"Principals,omitempty"`
Resources []string `json:"Resources,omitempty"`
Sid string `json:"Sid,omitempty"`
}
type convertedPolicy struct {
ID string `json:"Id,omitempty"`
Statement []convertedPolicyStatement `json:"Statement,omitempty"`
Version string `json:"Version,omitempty"`
}
var mutexData = &sync.Mutex{}
func getDataSourcePolicy(currentPath string) {
tfFiles, err := filepath.Glob(filepath.Join(currentPath, "*.tf"))
if err != nil {
log.Error().Msg("Error getting .tf files to parse data source")
return
}
if len(tfFiles) == 0 {
return
}
jsonMap := make(map[string]map[string]string)
for _, tfFile := range tfFiles {
parsedFile, parseErr := parseFile(tfFile, true)
if parseErr != nil {
log.Debug().Msgf("Error trying to parse file %s for data source.", tfFile)
continue
}
body, ok := parsedFile.Body.(*hclsyntax.Body)
if !ok {
continue
}
for _, block := range body.Blocks {
if block.Type == "data" && block.Labels[0] == "aws_iam_policy_document" && len(block.Labels) > 1 {
policyJSON := parseDataSourceBody(block.Body)
jsonMap[block.Labels[1]] = map[string]string{
"json": policyJSON,
}
}
}
}
policyResource := map[string]map[string]map[string]string{
"aws_iam_policy_document": jsonMap,
}
data, err := gocty.ToCtyValue(policyResource, cty.Map(cty.Map(cty.Map(cty.String))))
if err != nil {
log.Error().Msgf("Error trying to convert policy to cty value: %s", err)
return
}
mutexData.Lock()
inputVariableMap["data"] = data
mutexData.Unlock()
}
func decodeDataSourcePolicy(value cty.Value) dataSourcePolicy {
jsonified, err := ctyjson.Marshal(value, cty.DynamicPseudoType)
if err != nil {
log.Error().Msgf("Error trying to decode data source block: %s", err)
return dataSourcePolicy{}
}
var data dataSource
err = json.Unmarshal(jsonified, &data)
if err != nil {
log.Error().Msgf("Error trying to encode data source json: %s", err)
return dataSourcePolicy{}
}
return data.Value
}
func getPrincipalSpec() *hcldec.ObjectSpec {
return &hcldec.ObjectSpec{
"type": &hcldec.AttrSpec{
Name: "type",
Type: cty.String,
Required: false,
},
"identifiers": &hcldec.AttrSpec{
Name: "identifiers",
Type: cty.List(cty.String),
Required: false,
},
}
}
func getConditionalSpec() *hcldec.ObjectSpec {
return &hcldec.ObjectSpec{
"test": &hcldec.AttrSpec{
Name: "test",
Type: cty.String,
Required: false,
},
"variable": &hcldec.AttrSpec{
Name: "variable",
Type: cty.String,
Required: false,
},
"values": &hcldec.AttrSpec{
Name: "values",
Type: cty.List(cty.String),
Required: false,
},
}
}
func getStatementSpec() *hcldec.BlockListSpec {
return &hcldec.BlockListSpec{
TypeName: "statement",
Nested: &hcldec.ObjectSpec{
"sid": &hcldec.AttrSpec{
Name: "sid",
Type: cty.String,
Required: false,
},
"effect": &hcldec.AttrSpec{
Name: "effect",
Type: cty.String,
Required: false,
},
"actions": &hcldec.AttrSpec{
Name: "actions",
Type: cty.List(cty.String),
Required: false,
},
"not_actions": &hcldec.AttrSpec{
Name: "not_actions",
Type: cty.List(cty.String),
Required: false,
},
"resources": &hcldec.AttrSpec{
Name: "resources",
Type: cty.List(cty.String),
Required: false,
},
"not_resources": &hcldec.AttrSpec{
Name: "not_resources",
Type: cty.List(cty.String),
Required: false,
},
"principals": &hcldec.BlockSpec{
TypeName: "principals",
Nested: getPrincipalSpec(),
},
"not_principals": &hcldec.BlockSpec{
TypeName: "not_principals",
Nested: getPrincipalSpec(),
},
"condition": &hcldec.BlockSpec{
TypeName: "condition",
Nested: getConditionalSpec(),
},
},
}
}
func parseDataSourceBody(body *hclsyntax.Body) string {
dataSourceSpec := &hcldec.ObjectSpec{
"id": &hcldec.AttrSpec{
Name: "id",
Type: cty.String,
Required: false,
},
"version": &hcldec.AttrSpec{
Name: "version",
Type: cty.String,
Required: false,
},
"statement": getStatementSpec(),
}
resolveDataResources(body)
target, decodeErrs := hcldec.Decode(body, dataSourceSpec, &hcl.EvalContext{
Variables: inputVariableMap,
Functions: functions.TerraformFuncs,
})
// check decode errors
for _, decErr := range decodeErrs {
if decErr.Summary != "Unknown variable" {
log.Debug().Msgf("Error trying to eval data source block: %s", decErr.Summary)
return ""
}
log.Debug().Msg("Dismissed Error when decoding policy: Found unknown variable")
}
dataSourceJSON := decodeDataSourcePolicy(target)
convertedDataSource := convertedPolicy{
ID: dataSourceJSON.ID,
Version: dataSourceJSON.Version,
}
statements := make([]convertedPolicyStatement, len(dataSourceJSON.Statement))
for idx := range dataSourceJSON.Statement {
var convertedCondition convertedPolicyCondition
if dataSourceJSON.Statement[idx].Condition.Variable != "" {
convertedCondition = convertedPolicyCondition{
dataSourceJSON.Statement[idx].Condition.Test: map[string][]string{
dataSourceJSON.Statement[idx].Condition.Variable: dataSourceJSON.Statement[idx].Condition.Values,
},
}
}
var convertedPrincipal convertedPolicyPrincipal
if dataSourceJSON.Statement[idx].Principals.Type != "" {
convertedPrincipal = convertedPolicyPrincipal{
dataSourceJSON.Statement[idx].Principals.Type: dataSourceJSON.Statement[idx].Principals.Identifiers,
}
}
var convertedNotPrincipal convertedPolicyPrincipal
if dataSourceJSON.Statement[idx].NotPrincipals.Type != "" {
convertedNotPrincipal = convertedPolicyPrincipal{
dataSourceJSON.Statement[idx].NotPrincipals.Type: dataSourceJSON.Statement[idx].NotPrincipals.Identifiers,
}
}
convertedStatement := convertedPolicyStatement{
Actions: dataSourceJSON.Statement[idx].Actions,
Effect: dataSourceJSON.Statement[idx].Effect,
NotActions: dataSourceJSON.Statement[idx].NotActions,
NotResources: dataSourceJSON.Statement[idx].NotResources,
Resources: dataSourceJSON.Statement[idx].Resources,
Sid: dataSourceJSON.Statement[idx].Sid,
Condition: convertedCondition,
NotPrincipals: convertedNotPrincipal,
Principals: convertedPrincipal,
}
statements[idx] = convertedStatement
}
convertedDataSource.Statement = statements
buffer := &bytes.Buffer{}
encoder := json.NewEncoder(buffer)
encoder.SetEscapeHTML(false)
err := encoder.Encode(convertedDataSource)
if err != nil {
log.Error().Msgf("Error trying to encoding data source json: %s", err)
return ""
}
return buffer.String()
}
// resolveDataResources resolves the data resources expressions into LiteralValueExpr
func resolveDataResources(body *hclsyntax.Body) {
for _, block := range body.Blocks {
if resources, ok := block.Body.Attributes["resources"]; ok &&
block.Type == "statement" {
resolveTuple(resources.Expr)
}
}
}
func resolveTuple(expr hclsyntax.Expression) {
e := engine.Engine{}
if v, ok := expr.(*hclsyntax.TupleConsExpr); ok {
for i, ex := range v.Exprs {
striExpr, err := e.ExpToString(ex)
if err != nil {
log.Error().Msgf("Error trying to ExpToString: %s", err)
}
v.Exprs[i] = &hclsyntax.LiteralValueExpr{
Val: cty.StringVal(striExpr),
SrcRange: v.Exprs[i].Range(),
}
}
}
}
package functions
import (
"encoding/base64"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/function"
"github.com/zclconf/go-cty/cty/function/stdlib"
)
// Base64EncodeFunc - https://www.terraform.io/docs/language/functions/base64encode.html
var Base64EncodeFunc = function.New(&function.Spec{
Params: []function.Parameter{
{
Name: "val",
Type: cty.DynamicPseudoType,
AllowDynamicType: true,
AllowNull: true,
},
},
Type: function.StaticReturnType(cty.String),
Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
val := args[0]
if !val.IsWhollyKnown() {
// We can't serialize unknowns, so if the value is unknown or
// contains any _nested_ unknowns then our result must be
// unknown.
return cty.UnknownVal(retType), nil
}
if val.IsNull() {
return cty.StringVal("null"), nil
}
encoded := base64.StdEncoding.EncodeToString([]byte(val.AsString()))
return cty.StringVal(encoded), nil
},
})
// TerraformFuncs contains all functions, if KICS has to override a function
// it should create a file in this package and add/change this function key here
var TerraformFuncs = map[string]function.Function{
"abs": stdlib.AbsoluteFunc,
"base64encode": Base64EncodeFunc,
"ceil": stdlib.CeilFunc,
"chomp": stdlib.ChompFunc,
"coalescelist": stdlib.CoalesceListFunc,
"compact": stdlib.CompactFunc,
"concat": stdlib.ConcatFunc,
"contains": stdlib.ContainsFunc,
"csvdecode": stdlib.CSVDecodeFunc,
"distinct": stdlib.DistinctFunc,
"element": stdlib.ElementFunc,
"chunklist": stdlib.ChunklistFunc,
"flatten": stdlib.FlattenFunc,
"floor": stdlib.FloorFunc,
"format": stdlib.FormatFunc,
"formatdate": stdlib.FormatDateFunc,
"formatlist": stdlib.FormatListFunc,
"indent": stdlib.IndentFunc,
"join": stdlib.JoinFunc,
"jsondecode": stdlib.JSONDecodeFunc,
"jsonencode": stdlib.JSONEncodeFunc,
"keys": stdlib.KeysFunc,
"log": stdlib.LogFunc,
"lower": stdlib.LowerFunc,
"max": stdlib.MaxFunc,
"merge": stdlib.MergeFunc,
"min": stdlib.MinFunc,
"parseint": stdlib.ParseIntFunc,
"pow": stdlib.PowFunc,
"range": stdlib.RangeFunc,
"regex": stdlib.RegexFunc,
"regexall": stdlib.RegexAllFunc,
"reverse": stdlib.ReverseListFunc,
"setintersection": stdlib.SetIntersectionFunc,
"setproduct": stdlib.SetProductFunc,
"setsubtract": stdlib.SetSubtractFunc,
"setunion": stdlib.SetUnionFunc,
"signum": stdlib.SignumFunc,
"slice": stdlib.SliceFunc,
"sort": stdlib.SortFunc,
"split": stdlib.SplitFunc,
"strrev": stdlib.ReverseFunc,
"substr": stdlib.SubstrFunc,
"timeadd": stdlib.TimeAddFunc,
"title": stdlib.TitleFunc,
"trim": stdlib.TrimFunc,
"trimprefix": stdlib.TrimPrefixFunc,
"trimspace": stdlib.TrimSpaceFunc,
"trimsuffix": stdlib.TrimSuffixFunc,
"upper": stdlib.UpperFunc,
"values": stdlib.ValuesFunc,
"zipmap": stdlib.ZipmapFunc,
}
package terraform
import (
"os"
"path/filepath"
"regexp"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/parser/terraform/comment"
"github.com/Checkmarx/kics/v2/pkg/parser/terraform/converter"
"github.com/Checkmarx/kics/v2/pkg/parser/utils"
masterUtils "github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
ctyjson "github.com/zclconf/go-cty/cty/json"
)
// RetriesDefaultValue is default number of times a parser will retry to execute
const RetriesDefaultValue = 50
// Converter returns content json, error line, error
type Converter func(file *hcl.File, inputVariables converter.VariableMap) (model.Document, error)
// Parser struct that contains the function to parse file and the number of retries if something goes wrong
type Parser struct {
convertFunc Converter
numOfRetries int
terraformVarsPath string
}
// NewDefault initializes a parser with Parser default values
func NewDefault() *Parser {
return &Parser{
numOfRetries: RetriesDefaultValue,
convertFunc: converter.DefaultConverted,
}
}
// NewDefaultWithVarsPath initializes a parser with the default values using a variables path
func NewDefaultWithVarsPath(terraformVarsPath string) *Parser {
parser := NewDefault()
parser.terraformVarsPath = terraformVarsPath
return parser
}
// Resolve - replace or modifies in-memory content before parsing
func (p *Parser) Resolve(fileContent []byte, filename string, _ bool, _ int) ([]byte, error) {
// handle panic during resolve process
defer func() {
if r := recover(); r != nil {
errMessage := "Recovered from panic during resolve of file " + filename
masterUtils.HandlePanic(r, errMessage)
}
}()
getInputVariables(filepath.Dir(filename), string(fileContent), p.terraformVarsPath)
getDataSourcePolicy(filepath.Dir(filename))
return fileContent, nil
}
func processContent(elements model.Document, content, path string) {
var certInfo map[string]interface{}
if content != "" {
certInfo = utils.AddCertificateInfo(path, content)
if certInfo != nil {
elements["certificate_body"] = certInfo
}
}
}
func processElements(elements model.Document, path string) {
for k, v3 := range elements { // resource elements
if k != "certificate_body" {
continue
}
switch value := v3.(type) {
case string:
content := utils.CheckCertificate(value)
processContent(elements, content, path)
case ctyjson.SimpleJSONValue:
content := utils.CheckCertificate(value.Value.AsString())
processContent(elements, content, path)
}
}
}
func processResourcesElements(resourcesElements model.Document, path string) error {
for _, v2 := range resourcesElements {
switch t := v2.(type) {
case []interface{}:
return errors.New("failed to process resources")
case interface{}:
if elements, ok := t.(model.Document); ok {
processElements(elements, path)
}
}
}
return nil
}
func processResources(doc model.Document, path string) error {
var resourcesElements model.Document
defer func() {
if r := recover(); r != nil {
errMessage := "Recovered from panic during process of resources in file " + path
masterUtils.HandlePanic(r, errMessage)
}
}()
for _, resources := range doc {
switch t := resources.(type) {
case []interface{}: // support the case of nameless resources - where we get a list of resources
for _, value := range t {
resourcesElements = value.(model.Document)
err := processResourcesElements(resourcesElements, path)
if err != nil {
return err
}
}
case interface{}:
resourcesElements = t.(model.Document)
err := processResourcesElements(resourcesElements, path)
if err != nil {
return err
}
}
}
return nil
}
func addExtraInfo(json []model.Document, path string) ([]model.Document, error) {
// handle panic during resource processing
defer func() {
if r := recover(); r != nil {
errMessage := "Recovered from panic during resource processing for file " + path
masterUtils.HandlePanic(r, errMessage)
}
}()
for _, documents := range json { // iterate over documents
if resources, ok := documents["resource"].(model.Document); ok {
err := processResources(resources, path)
if err != nil {
return []model.Document{}, err
}
}
}
return json, nil
}
func parseFile(filename string, shouldReplaceDataSource bool) (*hcl.File, error) {
file, err := os.ReadFile(filename)
if err != nil {
return nil, err
}
if shouldReplaceDataSource {
replaceDataIdentifiers := regexp.MustCompile(`(data\.[A-Za-z0-9._-]+)`)
file = []byte(replaceDataIdentifiers.ReplaceAllString(string(file), "\"$1\""))
}
parsedFile, _ := hclsyntax.ParseConfig(file, filename, hcl.Pos{Line: 1, Column: 1})
return parsedFile, nil
}
// Parse execute parser for the content in a file
func (p *Parser) Parse(path string, content []byte) ([]model.Document, []int, error) {
file, diagnostics := hclsyntax.ParseConfig(content, filepath.Base(path), hcl.Pos{Byte: 0, Line: 1, Column: 1})
defer func() {
if r := recover(); r != nil {
errMessage := "Recovered from panic during parsing of file " + path
masterUtils.HandlePanic(r, errMessage)
}
}()
if diagnostics != nil && diagnostics.HasErrors() && len(diagnostics.Errs()) > 0 {
err := diagnostics.Errs()[0]
return nil, []int{}, err
}
ignore, err := comment.ParseComments(content, path)
if err != nil {
log.Err(err).Msg("failed to parse comments")
}
linesToIgnore := comment.GetIgnoreLines(ignore, file.Body.(*hclsyntax.Body))
fc, parseErr := p.convertFunc(file, inputVariableMap)
json, err := addExtraInfo([]model.Document{fc}, path)
if err != nil {
return json, []int{}, errors.Wrap(err, "failed terraform parse")
}
return json, linesToIgnore, errors.Wrap(parseErr, "failed terraform parse")
}
// SupportedExtensions returns Terraform extensions
func (p *Parser) SupportedExtensions() []string {
return []string{".tf", ".tfvars"}
}
// SupportedTypes returns types supported by this parser, which are terraform
func (p *Parser) SupportedTypes() map[string]bool {
return map[string]bool{"terraform": true}
}
// GetKind returns Terraform kind parser
func (p *Parser) GetKind() model.FileKind {
return model.KindTerraform
}
// GetCommentToken return the comment token of Terraform - #
func (p *Parser) GetCommentToken() string {
return "#"
}
// StringifyContent converts original content into string formatted version
func (p *Parser) StringifyContent(content []byte) (string, error) {
return string(content), nil
}
// GetResolvedFiles returns the files that are resolved
func (p *Parser) GetResolvedFiles() map[string]model.ResolvedFile {
return make(map[string]model.ResolvedFile)
}
package terraform
import (
"fmt"
"os"
"path/filepath"
"regexp"
"strings"
"github.com/Checkmarx/kics/v2/pkg/parser/terraform/converter"
"github.com/hashicorp/hcl/v2"
"github.com/hashicorp/hcl/v2/hclsyntax"
"github.com/rs/zerolog/log"
"github.com/zclconf/go-cty/cty"
)
var inputVariableMap = make(converter.VariableMap)
func mergeMaps(baseMap, newItems converter.VariableMap) {
for key, value := range newItems {
baseMap[key] = value
}
}
func setInputVariablesDefaultValues(filename string) (converter.VariableMap, error) {
parsedFile, err := parseFile(filename, false)
if err != nil || parsedFile == nil {
return nil, err
}
content, _, _ := parsedFile.Body.PartialContent(&hcl.BodySchema{
Blocks: []hcl.BlockHeaderSchema{
{
Type: "variable",
LabelNames: []string{"name"},
},
},
})
defaultValuesMap := make(converter.VariableMap)
for _, block := range content.Blocks {
if len(block.Labels) == 0 || block.Labels[0] == "" {
continue
}
attr, _ := block.Body.JustAttributes()
if len(attr) == 0 {
continue
}
if defaultValue, exists := attr["default"]; exists {
defaultVar, _ := defaultValue.Expr.Value(nil)
defaultValuesMap[block.Labels[0]] = defaultVar
}
}
return defaultValuesMap, nil
}
func checkTfvarsValid(f *hcl.File, filename string) error {
content, _, _ := f.Body.PartialContent(&hcl.BodySchema{
Blocks: []hcl.BlockHeaderSchema{
{
Type: "variable",
LabelNames: []string{"name"},
},
},
})
if len(content.Blocks) > 0 {
return fmt.Errorf("failed to get variables from %s, .tfvars file is used to assing values not to declare new variables", filename)
}
return nil
}
func getInputVariablesFromFile(filename string) (converter.VariableMap, error) {
parsedFile, err := parseFile(filename, false)
if err != nil || parsedFile == nil {
return nil, err
}
err = checkTfvarsValid(parsedFile, filename)
if err != nil {
return nil, err
}
attrs := parsedFile.Body.(*hclsyntax.Body).Attributes
variables := make(converter.VariableMap)
for name, attr := range attrs {
value, _ := attr.Expr.Value(&hcl.EvalContext{})
variables[name] = value
}
return variables, nil
}
func getInputVariables(currentPath, fileContent, terraformVarsPath string) {
variablesMap := make(converter.VariableMap)
tfFiles, err := filepath.Glob(filepath.Join(currentPath, "*.tf"))
if err != nil {
log.Error().Msg("Error getting .tf files")
}
for _, tfFile := range tfFiles {
variables, errDefaultValues := setInputVariablesDefaultValues(tfFile)
if errDefaultValues != nil {
log.Error().Msgf("Error getting default values from %s", tfFile)
log.Err(errDefaultValues)
continue
}
mergeMaps(variablesMap, variables)
}
tfVarsFiles, err := filepath.Glob(filepath.Join(currentPath, "*.auto.tfvars"))
if err != nil {
log.Error().Msg("Error getting .auto.tfvars files")
}
_, err = os.Stat(filepath.Join(currentPath, "terraform.tfvars"))
if err != nil {
log.Trace().Msgf("terraform.tfvars not found on %s", currentPath)
} else {
tfVarsFiles = append(tfVarsFiles, filepath.Join(currentPath, "terraform.tfvars"))
}
for _, tfVarsFile := range tfVarsFiles {
variables, errInputVariables := getInputVariablesFromFile(tfVarsFile)
if errInputVariables != nil {
log.Error().Msgf("Error getting values from %s", tfVarsFile)
log.Err(errInputVariables)
continue
}
mergeMaps(variablesMap, variables)
}
// If the flag is empty let's look for the value in the first written line of the file
if terraformVarsPath == "" {
terraformVarsPathRegex := regexp.MustCompile(`(?m)^\s*// kics_terraform_vars: ([\w/\\.:-]+)\r?\n`)
terraformVarsPathMatch := terraformVarsPathRegex.FindStringSubmatch(fileContent)
if terraformVarsPathMatch != nil {
// There is a path tp the variables file in the file so that will be the path to the variables tf file
terraformVarsPath = terraformVarsPathMatch[1]
// If the path contains ":" assume its a global path
if !strings.Contains(terraformVarsPath, ":") {
// If not then add the current folder path before so that the comment path can be relative
terraformVarsPath = filepath.Join(currentPath, terraformVarsPath)
}
}
}
// If the terraformVarsPath is empty, this means that it is not in the flag
// and it is not in the first written line of the file
if terraformVarsPath != "" {
_, err = os.Stat(terraformVarsPath)
if err != nil {
log.Trace().Msgf("%s file not found", terraformVarsPath)
} else {
variables, errInputVariables := getInputVariablesFromFile(terraformVarsPath)
if errInputVariables != nil {
log.Error().Msgf("Error getting values from %s", terraformVarsPath)
log.Err(errInputVariables)
} else {
mergeMaps(variablesMap, variables)
}
}
}
inputVariableMap["var"] = cty.ObjectVal(variablesMap)
}
package utils
import (
"crypto/rsa"
"crypto/x509"
"encoding/pem"
"errors"
"os"
"path/filepath"
"regexp"
"github.com/rs/zerolog/log"
)
type certInfo struct {
date [3]int
rsaKeyBytes int
}
// CheckCertificate verifies if the attribute 'certificate_body' refers a file
func CheckCertificate(content string) string {
var re = regexp.MustCompile(`[0-9a-zA-Z-/\\_.]+\.pem`)
match := re.FindString(content)
return match
}
func getCertificateInfo(filePath string) (certInfo, error) {
certPEM, err := os.ReadFile(filePath)
if err != nil {
return certInfo{}, err
}
block, _ := pem.Decode(certPEM)
if block == nil {
return certInfo{}, errors.New("failed to parse the certificate PEM")
}
cert, err := x509.ParseCertificate(block.Bytes)
if err != nil {
return certInfo{}, err
}
var certDate [3]int
certDate[0] = cert.NotAfter.Year()
certDate[1] = int(cert.NotAfter.Month())
certDate[2] = cert.NotAfter.Day()
var rsaBytes int
switch t := cert.PublicKey.(type) {
case *rsa.PublicKey:
_ = t
rsaBytes = cert.PublicKey.(*rsa.PublicKey).Size()
default:
rsaBytes = -1
}
return certInfo{date: certDate, rsaKeyBytes: rsaBytes}, nil
}
// AddCertificateInfo gets and adds certificate information of a certificate file
func AddCertificateInfo(path, content string) map[string]interface{} {
var filePath string
_, err := os.Stat(content)
if err != nil { // content is not a full valid path or is an incomplete path
log.Trace().Msgf("path to the certificate content is not a valid: %s", content)
filePath = filepath.Join(filepath.Dir(path), content)
} else { // content is a full valid path
filePath = content
}
date, err := getCertificateInfo(filePath)
if err == nil {
attributes := make(map[string]interface{})
attributes["file"] = filePath
attributes["expiration_date"] = date.date
if date.rsaKeyBytes != -1 {
attributes["rsa_key_bytes"] = date.rsaKeyBytes
}
return attributes
}
log.Error().Msgf("Failed to get certificate path %s: %s", filePath, err)
return nil
}
package json
import (
"bytes"
"github.com/Checkmarx/kics/v2/pkg/parser/utils"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/resolver/file"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
"gopkg.in/yaml.v3"
)
// Parser defines a parser type
type Parser struct {
resolvedFiles map[string]model.ResolvedFile
}
// Resolve - replace or modifies in-memory content before parsing
func (p *Parser) Resolve(fileContent []byte, filename string, resolveReferences bool, maxResolverDepth int) ([]byte, error) {
// Resolve files passed as arguments with file resolver (e.g. file://)
res := file.NewResolver(yaml.Unmarshal, yaml.Marshal, p.SupportedExtensions())
resolvedFilesCache := make(map[string]file.ResolvedFile)
resolved := res.Resolve(fileContent, filename, 0, maxResolverDepth, resolvedFilesCache, resolveReferences)
p.resolvedFiles = res.ResolvedFiles
if len(res.ResolvedFiles) == 0 {
return fileContent, nil
}
return resolved, nil
}
// Parse parses yaml/yml file and returns it as a Document
func (p *Parser) Parse(filePath string, fileContent []byte) ([]model.Document, []int, error) {
model.NewIgnore.Reset()
var documents []model.Document
dec := yaml.NewDecoder(bytes.NewReader(fileContent))
doc := emptyDocument()
for dec.Decode(doc) == nil {
if len(*doc) > 0 {
documents = append(documents, *doc)
}
doc = emptyDocument()
}
if len(documents) == 0 {
return nil, []int{}, errors.Wrap(errors.New("invalid yaml"), "failed to parse yaml")
}
linesToIgnore := model.NewIgnore.GetLines()
return convertKeysToString(addExtraInfo(documents, filePath)), linesToIgnore, nil
}
// convertKeysToString goes through every document to convert map[interface{}]interface{}
// to map[string]interface{}
func convertKeysToString(docs []model.Document) []model.Document {
documents := make([]model.Document, 0, len(docs))
for _, doc := range docs {
for key, value := range doc {
doc[key] = convert(value)
}
documents = append(documents, doc)
}
return documents
}
// convert goes recursively through the keys in the given value and converts nested maps type of map[interface{}]interface{}
// to map[string]interface{}
func convert(value interface{}) interface{} {
switch t := value.(type) {
case map[interface{}]interface{}:
mapStr := map[string]interface{}{}
for key, val := range t {
if t, ok := key.(string); ok {
mapStr[t] = convert(val)
}
}
return mapStr
case []interface{}:
for key, val := range t {
t[key] = convert(val)
}
case model.Document:
for key, val := range t {
t[key] = convert(val)
}
}
return value
}
// SupportedExtensions returns extensions supported by this parser, which are yaml and yml extension
func (p *Parser) SupportedExtensions() []string {
return []string{".yaml", ".yml"}
}
// SupportedTypes returns types supported by this parser, which are ansible, cloudFormation, k8s
func (p *Parser) SupportedTypes() map[string]bool {
return map[string]bool{
"ansible": true,
"cloudformation": true,
"kubernetes": true,
"crossplane": true,
"knative": true,
"openapi": true,
"googledeploymentmanager": true,
"dockercompose": true,
"pulumi": true,
"serverlessfw": true,
"cicd": true,
}
}
// GetKind returns YAML constant kind
func (p *Parser) GetKind() model.FileKind {
return model.KindYAML
}
func processCertContent(elements map[string]interface{}, content, filePath string) {
var certInfo map[string]interface{}
if content != "" {
certInfo = utils.AddCertificateInfo(filePath, content)
if certInfo != nil {
elements["certificate"] = certInfo
}
}
}
func processElements(elements map[string]interface{}, filePath string) {
if elements["certificate"] != nil {
processCertContent(elements, utils.CheckCertificate(elements["certificate"].(string)), filePath)
}
}
func addExtraInfo(documents []model.Document, filePath string) []model.Document {
for _, documentPlaybooks := range documents { // iterate over documents
if playbooks, ok := documentPlaybooks["playbooks"]; ok {
processPlaybooks(playbooks, filePath)
}
}
return documents
}
func processPlaybooks(playbooks interface{}, filePath string) {
sliceResources, ok := playbooks.([]interface{})
if !ok { // prevent panic if playbooks is not a slice
log.Warn().Msgf("Failed to parse playbooks: %s", filePath)
return
}
for _, resources := range sliceResources { // iterate over playbooks
processPlaybooksElements(resources, filePath)
}
}
func processPlaybooksElements(resources interface{}, filePath string) {
mapResources, ok := resources.(map[string]interface{})
if !ok {
log.Warn().Msgf("Failed to parse playbooks elements: %s", filePath)
return
}
for _, value := range mapResources {
mapValue, ok := value.(map[string]interface{})
if !ok {
continue
}
processElements(mapValue, filePath)
}
}
// GetCommentToken return the comment token of YAML - #
func (p *Parser) GetCommentToken() string {
return "#"
}
// StringifyContent converts original content into string formatted version
func (p *Parser) StringifyContent(content []byte) (string, error) {
return string(content), nil
}
func emptyDocument() *model.Document {
return &model.Document{}
}
// GetResolvedFiles returns resolved files
func (p *Parser) GetResolvedFiles() map[string]model.ResolvedFile {
return p.resolvedFiles
}
package printer
import (
"io"
"os"
"path/filepath"
"strings"
consoleHelpers "github.com/Checkmarx/kics/v2/internal/console/helpers"
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/gookit/color"
"github.com/pkg/errors"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
)
// NoColor - disables ASCII color codes
func NoColor(opt interface{}, changed bool) error {
noColor := opt.(bool)
if noColor {
color.Disable()
consoleLogger.NoColor = true
}
return nil
}
// Verbose - redirects log entries to stdout
func Verbose(opt interface{}, changed bool) error {
verbose := opt.(bool)
if verbose {
consoleLogger = zerolog.ConsoleWriter{Out: os.Stdout}
outConsoleLogger = os.Stdout
}
return nil
}
// Silent - disables stdout output
func Silent(opt interface{}) error {
silent := opt.(bool)
if silent {
color.SetOutput(io.Discard)
os.Stdout = nil
log.Logger = log.Output(zerolog.MultiLevelWriter(io.Discard, outFileLogger.(io.Writer)))
}
return nil
}
// CI - enable only log messages to CLI output
func CI(opt interface{}) error {
ci := opt.(bool)
if ci {
color.SetOutput(io.Discard)
log.Logger = log.Output(zerolog.MultiLevelWriter(outConsoleLogger, outFileLogger.(io.Writer)))
os.Stdout = nil
}
return nil
}
// LogFormat - configures the logs format (JSON,pretty).
func LogFormat(logFormat string) error {
if logFormat == constants.LogFormatJSON {
log.Logger = log.Output(zerolog.MultiLevelWriter(outConsoleLogger, loggerFile.(io.Writer)))
outFileLogger = loggerFile
outConsoleLogger = os.Stdout
} else if logFormat == constants.LogFormatPretty {
fileLogger = consoleHelpers.CustomConsoleWriter(&zerolog.ConsoleWriter{Out: loggerFile.(io.Writer), NoColor: true})
log.Logger = log.Output(zerolog.MultiLevelWriter(consoleLogger, fileLogger))
outFileLogger = fileLogger
outConsoleLogger = zerolog.ConsoleWriter{Out: os.Stdout, NoColor: true}
} else {
return errors.New("invalid log format")
}
return nil
}
// LogPath - sets the log files location
func LogPath(opt interface{}, changed bool) error {
logPath := opt.(string)
var err error
if !changed {
if loggerFile == nil {
loggerFile = io.Discard
return nil
}
return nil
}
if logPath == "" {
logPath, err = constants.GetDefaultLogPath()
if err != nil {
return err
}
} else if filepath.Dir(logPath) != "." {
if createErr := os.MkdirAll(filepath.Dir(logPath), os.ModePerm); createErr != nil {
return createErr
}
}
loggerFile, err = os.OpenFile(logPath, os.O_CREATE|os.O_WRONLY, os.ModePerm)
if err != nil {
return err
}
return nil
}
// LogFile - enables write to log file
func LogFile(opt interface{}, changed bool) error {
logFile := opt.(bool)
if logFile {
logPath, err := constants.GetDefaultLogPath()
if err != nil {
return err
}
loggerFile, err = os.OpenFile(filepath.Clean(logPath), os.O_CREATE|os.O_WRONLY, os.ModePerm)
if err != nil {
return err
}
fileLogger = consoleHelpers.CustomConsoleWriter(&zerolog.ConsoleWriter{Out: loggerFile.(io.Writer), NoColor: true})
}
return nil
}
// LogLevel - sets log level
func LogLevel(opt interface{}, changed bool) error {
logLevel := opt.(string)
switch strings.ToUpper(logLevel) {
case "TRACE":
zerolog.SetGlobalLevel(zerolog.TraceLevel)
case "DEBUG":
zerolog.SetGlobalLevel(zerolog.DebugLevel)
case "INFO":
zerolog.SetGlobalLevel(zerolog.InfoLevel)
case "WARN":
zerolog.SetGlobalLevel(zerolog.WarnLevel)
case "ERROR":
zerolog.SetGlobalLevel(zerolog.ErrorLevel)
case "FATAL":
zerolog.SetGlobalLevel(zerolog.FatalLevel)
}
return nil
}
type LogSink struct {
logs []string
}
func NewLogger(logs *LogSink) zerolog.Logger {
if logs == nil {
return log.Logger
}
return zerolog.New(logs)
}
func (l *LogSink) Write(p []byte) (n int, err error) {
l.logs = append(l.logs, string(p))
return len(p), nil
}
func (l *LogSink) Index(i int) string {
return l.logs[i]
}
package printer
import (
"fmt"
"io"
"sort"
"strconv"
"strings"
"github.com/Checkmarx/kics/v2/pkg/utils"
consoleFlags "github.com/Checkmarx/kics/v2/internal/console/flags"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/gookit/color"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
"github.com/spf13/pflag"
)
const (
charsLimitPerLine = 255
)
var (
optionsMap = map[string]func(opt interface{}, changed bool) error{
consoleFlags.CIFlag: func(opt interface{}, changed bool) error {
return nil
},
consoleFlags.LogFileFlag: LogFile,
consoleFlags.LogLevelFlag: LogLevel,
consoleFlags.LogPathFlag: LogPath,
consoleFlags.SilentFlag: func(opt interface{}, changed bool) error {
return nil
},
consoleFlags.VerboseFlag: Verbose,
consoleFlags.LogFormatFlag: func(opt interface{}, changed bool) error {
return nil
},
consoleFlags.NoColorFlag: NoColor,
}
optionsOrderMap = map[int]string{
1: consoleFlags.CIFlag,
2: consoleFlags.LogFileFlag,
3: consoleFlags.LogLevelFlag,
4: consoleFlags.LogPathFlag,
5: consoleFlags.SilentFlag,
6: consoleFlags.VerboseFlag,
7: consoleFlags.LogFormatFlag,
8: consoleFlags.NoColorFlag,
}
consoleLogger = zerolog.ConsoleWriter{Out: io.Discard}
fileLogger = zerolog.ConsoleWriter{Out: io.Discard}
outFileLogger interface{}
outConsoleLogger = io.Discard
loggerFile interface{}
initialized bool
)
// Printer wil print console output with colors
// Medium is for medium sevevity results
// High is for high sevevity results
// Low is for low sevevity results
// Info is for info sevevity results
// Success is for successful prints
// Line is the color to print the line with the vulnerability
// minVersion is a bool that if true will print the results output in a minimum version
type Printer struct {
Critical color.RGBColor
Medium color.RGBColor
High color.RGBColor
Low color.RGBColor
Info color.RGBColor
Success color.RGBColor
Line color.RGBColor
VersionMessage color.RGBColor
ContributionMessage color.RGBColor
minimal bool
}
// WordWrap Wraps text at the specified number of words
func WordWrap(s, indentation string, limit int) string {
if strings.TrimSpace(s) == "" {
return s
}
wordSlice := strings.Fields(s)
var result string
for len(wordSlice) >= 1 {
result = result + indentation + strings.Join(wordSlice[:limit], " ") + "\r\n"
wordSlice = wordSlice[limit:]
if len(wordSlice) < limit {
limit = len(wordSlice)
}
}
return result
}
// PrintResult prints on output the summary results
func PrintResult(summary *model.Summary, printer *Printer, usingCustomQueries bool) error {
log.Debug().Msg("helpers.PrintResult()")
fmt.Printf("\n\n")
for index := range summary.Queries {
idx := len(summary.Queries) - index - 1
if summary.Queries[idx].Severity == model.SeverityTrace {
continue
}
fmt.Printf(
"%s, Severity: %s, Results: %d\n",
printer.PrintBySev(summary.Queries[idx].QueryName, string(summary.Queries[idx].Severity)),
printer.PrintBySev(string(summary.Queries[idx].Severity), string(summary.Queries[idx].Severity)),
len(summary.Queries[idx].Files),
)
if summary.Queries[idx].Experimental {
fmt.Println("Note: this is an experimental query")
}
if !printer.minimal {
if summary.Queries[idx].CISDescriptionID != "" {
fmt.Printf("%s %s\n", printer.Bold("Description ID:"), summary.Queries[idx].CISDescriptionIDFormatted)
fmt.Printf("%s %s\n", printer.Bold("Title:"), summary.Queries[idx].CISDescriptionTitle)
fmt.Printf("%s %s\n", printer.Bold("Description:"), summary.Queries[idx].CISDescriptionTextFormatted)
} else {
fmt.Printf("%s %s\n", printer.Bold("Description:"), summary.Queries[idx].Description)
}
fmt.Printf("%s %s\n", printer.Bold("Platform:"), summary.Queries[idx].Platform)
if summary.Queries[idx].CWE != "" {
fmt.Printf("%s %s\n", printer.Bold("CWE:"), summary.Queries[idx].CWE)
}
// checks if should print queries URL DOCS based on the use of custom queries and invalid ids
if !usingCustomQueries && validQueryID(summary.Queries[idx].QueryID) {
queryURLId := summary.Queries[idx].QueryID
queryURLPlatform := strings.ToLower(summary.Queries[idx].Platform)
if queryURLPlatform == "common" && strings.Contains(strings.ToLower(summary.Queries[idx].QueryName), "passwords and secrets") {
queryURLId = "a88baa34-e2ad-44ea-ad6f-8cac87bc7c71"
}
fmt.Printf("%s %s\n\n",
printer.Bold("Learn more about this vulnerability:"),
fmt.Sprintf("https://docs.kics.io/latest/queries/%s-queries/%s%s",
queryURLPlatform,
normalizeURLCloudProvider(summary.Queries[idx].CloudProvider),
queryURLId))
}
}
printFiles(&summary.Queries[idx], printer)
}
fmt.Printf("\nResults Summary:\n")
printSeverityCounter(model.SeverityCritical, summary.SeveritySummary.SeverityCounters[model.SeverityCritical], printer.Critical)
printSeverityCounter(model.SeverityHigh, summary.SeveritySummary.SeverityCounters[model.SeverityHigh], printer.High)
printSeverityCounter(model.SeverityMedium, summary.SeveritySummary.SeverityCounters[model.SeverityMedium], printer.Medium)
printSeverityCounter(model.SeverityLow, summary.SeveritySummary.SeverityCounters[model.SeverityLow], printer.Low)
printSeverityCounter(model.SeverityInfo, summary.SeveritySummary.SeverityCounters[model.SeverityInfo], printer.Info)
fmt.Printf("TOTAL: %d\n\n", summary.SeveritySummary.TotalCounter)
log.Info().Msgf("Scanned Files: %d", summary.ScannedFiles)
log.Info().Msgf("Parsed Files: %d", summary.ParsedFiles)
log.Info().Msgf("Scanned Lines: %d", summary.ScannedFilesLines)
log.Info().Msgf("Parsed Lines: %d", summary.ParsedFilesLines)
log.Info().Msgf("Ignored Lines: %d", summary.IgnoredFilesLines)
log.Info().Msgf("Queries loaded: %d", summary.TotalQueries)
log.Info().Msgf("Queries failed to execute: %d", summary.FailedToExecuteQueries)
log.Info().Msg("Inspector stopped")
return nil
}
func printSeverityCounter(severity string, counter int, printColor color.RGBColor) {
fmt.Printf("%s: %d\n", printColor.Sprint(severity), counter)
}
func printFiles(query *model.QueryResult, printer *Printer) {
for fileIdx := range query.Files {
fmt.Printf("\t%s %s:%s\n", printer.PrintBySev(fmt.Sprintf("[%d]:", fileIdx+1), string(query.Severity)),
query.Files[fileIdx].FileName, printer.Success.Sprint(query.Files[fileIdx].Line))
if !printer.minimal {
fmt.Println()
for _, line := range *query.Files[fileIdx].VulnLines {
if len(line.Line) > charsLimitPerLine {
line.Line = line.Line[:charsLimitPerLine]
}
if line.Position == query.Files[fileIdx].Line {
printer.Line.Printf("\t\t%03d: %s\n", line.Position, line.Line)
} else {
fmt.Printf("\t\t%03d: %s\n", line.Position, line.Line)
}
}
fmt.Print("\n\n")
}
}
}
// SetupPrinter - configures stdout and log options with given FlagSet
func SetupPrinter(flags *pflag.FlagSet) error {
err := validateFlags()
if err != nil {
return err
}
keys := make([]int, 0, len(optionsOrderMap))
for k := range optionsOrderMap {
keys = append(keys, k)
}
sort.Ints(keys)
for _, key := range keys {
f := flags.Lookup(optionsOrderMap[key])
switch f.Value.Type() {
case "string":
value := f.Value.String()
err = optionsMap[optionsOrderMap[key]](value, f.Changed)
if err != nil {
return err
}
case "bool":
value, errBool := strconv.ParseBool(f.Value.String())
if errBool != nil {
return err
}
err = optionsMap[optionsOrderMap[key]](value, f.Changed)
if err != nil {
return err
}
}
}
// LogFormat needs to be the last option
logFormat := strings.ToLower(consoleFlags.GetStrFlag(consoleFlags.LogFormatFlag))
err = LogFormat(logFormat)
if err != nil {
return err
}
err = Silent(consoleFlags.GetBoolFlag(consoleFlags.SilentFlag))
if err != nil {
return err
}
err = CI(consoleFlags.GetBoolFlag(consoleFlags.CIFlag))
if err != nil {
return err
}
initialized = true
return nil
}
// IsInitialized returns true if printer is ready, false otherwise
func IsInitialized() bool {
return initialized
}
// NewPrinter initializes a new Printer
func NewPrinter(minimal bool) *Printer {
return &Printer{
Critical: color.HEX("#ff0000"),
Medium: color.HEX("#ff7213"),
High: color.HEX("#bb2124"),
Low: color.HEX("#edd57e"),
Success: color.HEX("#22bb33"),
Info: color.HEX("#5bc0de"),
Line: color.HEX("#f0ad4e"),
VersionMessage: color.HEX("#ff9913"),
ContributionMessage: color.HEX("ffe313"),
minimal: minimal,
}
}
// PrintBySev will print the output with the specific severity color given the severity of the result
func (p *Printer) PrintBySev(content, sev string) string {
switch strings.ToUpper(sev) {
case model.SeverityCritical:
return p.Critical.Sprintf("%s", content)
case model.SeverityHigh:
return p.High.Sprintf("%s", content)
case model.SeverityMedium:
return p.Medium.Sprintf("%s", content)
case model.SeverityLow:
return p.Low.Sprintf("%s", content)
case model.SeverityInfo:
return p.Info.Sprintf("%s", content)
}
return content
}
// Bold returns the output in a bold format
func (p *Printer) Bold(content string) string {
return color.Bold.Sprintf("%s", content)
}
func validQueryID(queryID string) bool {
if queryID == "" {
return false
} else if queryID != "" {
return utils.ValidateUUID(queryID)
}
return true
}
func normalizeURLCloudProvider(cloudProvider string) string {
cloudProvider = strings.ToLower(cloudProvider)
if cloudProvider == "common" {
cloudProvider = ""
} else if cloudProvider != "" {
cloudProvider += "/"
}
return cloudProvider
}
package printer
import (
"time"
consoleFlags "github.com/Checkmarx/kics/v2/internal/console/flags"
"github.com/rs/zerolog"
)
func validateFlags() error {
verboseFlag := consoleFlags.GetBoolFlag(consoleFlags.VerboseFlag)
silentFlag := consoleFlags.GetBoolFlag(consoleFlags.SilentFlag)
ciFlag := consoleFlags.GetBoolFlag(consoleFlags.CIFlag)
if silentFlag && verboseFlag {
return consoleFlags.FormatNewError(consoleFlags.SilentFlag, consoleFlags.VerboseFlag)
}
if verboseFlag && ciFlag {
return consoleFlags.FormatNewError(consoleFlags.VerboseFlag, consoleFlags.CIFlag)
}
if silentFlag && ciFlag {
return consoleFlags.FormatNewError(consoleFlags.SilentFlag, consoleFlags.CIFlag)
}
return nil
}
// PrintScanDuration prints the scan duration
func PrintScanDuration(logger *zerolog.Logger, elapsed time.Duration) {
if consoleFlags.GetBoolFlag(consoleFlags.CIFlag) {
elapsedStrFormat := "Scan duration: %vms\n"
(*logger).Info().Msgf(elapsedStrFormat, elapsed.Milliseconds())
} else {
elapsedStrFormat := "Scan duration: %v\n"
(*logger).Info().Msgf(elapsedStrFormat, elapsed)
}
}
package circle
import (
"fmt"
"io"
"time"
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/cheggaaa/pb/v3"
)
const (
barWidth = 0
sleepTimeout = 100
)
// ProgressBar is a struct that holds the required fields for
// a Circle Progress Bar
type ProgressBar struct {
label string
pBar *pb.ProgressBar
close func() error
}
// NewProgressBar creates a new instance of a Circle Progress Bar
func NewProgressBar(label string, silent bool) ProgressBar {
newPb := pb.New64(constants.MaxInteger)
tmp := fmt.Sprintf(`{{ %q }} {{(cycle . "\\" "-" "|" "/" "-" "|" )}}`, label)
newPb.SetWidth(barWidth)
newPb.SetTemplateString(tmp)
if silent {
newPb.SetWriter(io.Discard)
}
newPb.Start()
return ProgressBar{
label: label,
pBar: newPb,
close: func() error {
newPb.Finish()
return nil
},
}
}
// Start initializes the Circle Progress Bar
func (p ProgressBar) Start() {
for { // increment until the Close func is called
p.pBar.Increment()
// lower cpu usage from infinite loop
time.Sleep(time.Millisecond * sleepTimeout)
}
}
// Close stops the Circle Progress Bar and
// changes the template to done
func (p ProgressBar) Close() error {
p.pBar.SetTemplateString(fmt.Sprintf("%sDone", p.label))
return p.close()
}
package counter
import (
"io"
"sync"
"github.com/cheggaaa/pb/v3"
"github.com/rs/zerolog/log"
)
// ProgressBar is a struct that holds the required fields for
// a Counter Progress Bar
type ProgressBar struct {
label string
total int64
currentProgress int64
progress chan int64
pBar *pb.ProgressBar
close func() error
wg *sync.WaitGroup
}
const (
barWidth = 80
)
// NewProgressBar creates a new instance of a Counter Progress Bar
func NewProgressBar(label string, total int64, progress chan int64, wg *sync.WaitGroup, silent bool) ProgressBar {
newPb := pb.New64(total)
newPb.SetMaxWidth(barWidth)
newPb.Set("prefix", label)
newPb.SetTemplateString(`{{string . "prefix"}}{{bar . }} {{percent . }}`)
if silent {
newPb.SetWriter(io.Discard)
}
newPb.Start()
return ProgressBar{
label: label,
total: total,
progress: progress,
pBar: newPb,
wg: wg,
close: func() error {
newPb.Finish()
return nil
},
}
}
// Start initializes the Counter Progress Bar
func (p ProgressBar) Start() {
defer func() {
err := p.Close()
if err != nil {
log.Error().Msgf("failed to stop progress bar %v", err)
}
p.wg.Done()
}()
for {
newProgress, ok := <-p.progress
p.currentProgress += newProgress
p.pBar.Increment()
if !ok || p.currentProgress >= p.pBar.Total() {
break
}
}
}
// Close stops the Counter Progress Bar
func (p ProgressBar) Close() error { return p.close() }
package progress
import (
"sync"
"github.com/Checkmarx/kics/v2/pkg/progress/circle"
"github.com/Checkmarx/kics/v2/pkg/progress/counter"
)
// PBar is the interface for the types of available progress bars (Circle ,Counter)
// Start initializes the Progress Bar execution
// Close stops the Progress Bar execution
type PBar interface {
Start()
Close() error
}
// PbBuilder is the struct that contains the progress bar Builders
// Silent is set to true when all progress bars should be silent
type PbBuilder struct {
Silent bool
}
// InitializePbBuilder creates an instace of a PbBuilder
func InitializePbBuilder(noProgress, ci, silentFlag bool) *PbBuilder {
pbbuilder := PbBuilder{
Silent: false,
}
if noProgress || ci || silentFlag {
pbbuilder = PbBuilder{
Silent: true,
}
}
return &pbbuilder
}
// BuildCounter builds and returns a Counter Progress Bar
func (i *PbBuilder) BuildCounter(label string, total int, wg *sync.WaitGroup, progressChannel chan int64) PBar {
return counter.NewProgressBar(label, int64(total), progressChannel, wg, i.Silent)
}
// BuildCircle builds and returns a Circle Progress Bar
func (i *PbBuilder) BuildCircle(label string) PBar {
return circle.NewProgressBar(label, i.Silent)
}
package remediation
import (
"encoding/json"
"os"
"path/filepath"
"sort"
"strings"
"github.com/rs/zerolog/log"
)
// Report includes all query results
type Report struct {
Queries []Query `json:"queries"`
}
// Query includes all the files that presents a result related to the queryID
type Query struct {
Files []File `json:"files"`
QueryID string `json:"query_id"`
}
// File presents the result information related to the file
type File struct {
FilePath string `json:"file_name"`
Line int `json:"line"`
Remediation string `json:"remediation"`
RemediationType string `json:"remediation_type"`
SimilarityID string `json:"similarity_id"`
SearchKey string `json:"search_key"`
ExpectedValue string `json:"expected_value"`
ActualValue string `json:"actual_value"`
}
// Remediation presents all the relevant information for the fix
type Remediation struct {
Line int
Remediation string
SimilarityID string
QueryID string
SearchKey string
ExpectedValue string
ActualValue string
}
// Set includes all the replacements and additions related to a file
type Set struct {
Replacement []Remediation
Addition []Remediation
}
// RemediateFile remediationSets the replacements first and secondly, the additions sorted down
func (s *Summary) RemediateFile(filePath string, remediationSet Set, openAPIResolveReferences bool, maxResolverDepth int) error {
filepath.Clean(filePath)
content, err := os.ReadFile(filePath)
if err != nil {
log.Error().Msgf("failed to read file: %s", err)
return err
}
lines := strings.Split(string(content), "\n")
// do replacements first
if len(remediationSet.Replacement) > 0 {
for i := range remediationSet.Replacement {
r := remediationSet.Replacement[i]
remediatedLines := replacement(&r, lines)
if len(remediatedLines) > 0 && willRemediate(remediatedLines, filePath, &r, openAPIResolveReferences, maxResolverDepth) {
lines = s.writeRemediation(remediatedLines, lines, filePath, r.SimilarityID)
}
}
}
// do additions after
if len(remediationSet.Addition) > 0 {
// descending order
sort.Slice(remediationSet.Addition, func(i, j int) bool {
return remediationSet.Addition[i].Line > remediationSet.Addition[j].Line
})
for i := range remediationSet.Addition {
a := remediationSet.Addition[i]
remediatedLines := addition(&a, &lines)
if len(remediatedLines) > 0 && willRemediate(remediatedLines, filePath, &a, openAPIResolveReferences, maxResolverDepth) {
lines = s.writeRemediation(remediatedLines, lines, filePath, a.SimilarityID)
}
}
}
return nil
}
// ReplacementInfo presents the relevant information to do the replacement
type ReplacementInfo struct {
Before string `json:"before"`
After string `json:"after"`
}
func replacement(r *Remediation, lines []string) []string {
originalLine := lines[r.Line-1]
var replacement ReplacementInfo
err := json.Unmarshal([]byte(r.Remediation), &replacement)
if err != nil || replacement == (ReplacementInfo{}) {
return []string{}
}
remediated := strings.Replace(lines[r.Line-1], replacement.Before, replacement.After, 1)
if originalLine == remediated {
log.Info().Msgf("remediation '%s' is already done", r.SimilarityID)
return []string{}
}
// replace the original line with remediation
lines[r.Line-1] = remediated
return lines
}
func addition(r *Remediation, lines *[]string) []string {
fatherNumberLine := r.Line - 1
if len(*lines) <= fatherNumberLine+1 {
return []string{}
}
firstLine := strings.Split(r.Remediation, "\n")[0]
if strings.TrimSpace((*lines)[fatherNumberLine+1]) == strings.TrimSpace(firstLine) {
log.Info().Msgf("remediation '%s' is already done", r.SimilarityID)
return []string{}
}
begin := make([]string, len(*lines))
end := make([]string, len(*lines))
copy(begin, *lines)
copy(end, *lines)
begin = begin[:fatherNumberLine+1]
end = end[fatherNumberLine+1:]
before := getBefore((*lines)[fatherNumberLine+1])
remediation := begin
remediation = append(remediation, before+r.Remediation)
remediation = append(remediation, end...)
return remediation
}
const (
FilePermMode = 0600 // File permissions mode with read and write only
)
func (s *Summary) writeRemediation(remediatedLines, lines []string, filePath, similarityID string) []string {
remediated := []byte(strings.Join(remediatedLines, "\n"))
mode := os.FileMode(FilePermMode)
if err := os.WriteFile(filePath, remediated, mode); err != nil {
log.Error().Msgf("failed to write file: %s", err)
return lines
}
log.Info().Msgf("file '%s' was remediated with '%s'", filePath, similarityID)
s.ActualRemediationDoneNumber++
return remediatedLines
}
package remediation
import (
"context"
"encoding/json"
"errors"
"time"
"github.com/Checkmarx/kics/v2/pkg/engine"
"github.com/Checkmarx/kics/v2/pkg/kics"
"github.com/Checkmarx/kics/v2/pkg/minified"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/scan"
"github.com/open-policy-agent/opa/topdown"
"github.com/Checkmarx/kics/v2/internal/console/flags"
"github.com/Checkmarx/kics/v2/internal/tracker"
"github.com/Checkmarx/kics/v2/pkg/engine/source"
"github.com/Checkmarx/kics/v2/pkg/parser"
buildahParser "github.com/Checkmarx/kics/v2/pkg/parser/buildah"
dockerParser "github.com/Checkmarx/kics/v2/pkg/parser/docker"
protoParser "github.com/Checkmarx/kics/v2/pkg/parser/grpc"
jsonParser "github.com/Checkmarx/kics/v2/pkg/parser/json"
terraformParser "github.com/Checkmarx/kics/v2/pkg/parser/terraform"
yamlParser "github.com/Checkmarx/kics/v2/pkg/parser/yaml"
"github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/open-policy-agent/opa/rego"
"github.com/rs/zerolog/log"
)
type runQueryInfo struct {
payload model.Documents
query *engine.PreparedQuery
inspector *engine.Inspector
tmpFile string
files model.FileMetadatas
}
// scanTmpFile scans a temporary file against a specific query
func scanTmpFile(
tmpFile, queryID string,
remediated []byte,
openAPIResolveReferences bool,
maxResolverDepth int) ([]model.Vulnerability, error) {
// get payload
files, err := getPayload(tmpFile, remediated, openAPIResolveReferences, maxResolverDepth)
if err != nil {
log.Err(err)
return []model.Vulnerability{}, err
}
if len(files) == 0 {
log.Error().Msg("failed to get payload")
return []model.Vulnerability{}, errors.New("failed to get payload")
}
payload := files.Combine(false)
// init scan
inspector, err := initScan(queryID)
if err != nil {
log.Err(err)
return []model.Vulnerability{}, err
}
// load query
query, err := loadQuery(inspector, queryID)
if err != nil {
log.Err(err)
return []model.Vulnerability{}, err
}
// run query
info := &runQueryInfo{
payload: payload,
query: query,
inspector: inspector,
tmpFile: tmpFile,
files: files,
}
return runQuery(info), nil
}
// getPayload gets the payload of a file
func getPayload(filePath string, content []byte, openAPIResolveReferences bool, maxResolverDepth int) (model.FileMetadatas, error) {
ext, _ := utils.GetExtension(filePath)
var p []*parser.Parser
var err error
switch ext {
case ".dockerfile", "Dockerfile", "possibleDockerfile", ".ubi8", ".debian":
p, err = parser.NewBuilder().Add(&dockerParser.Parser{}).Build([]string{""}, []string{""})
case ".tf":
p, err = parser.NewBuilder().Add(terraformParser.NewDefault()).Build([]string{""}, []string{""})
case ".proto":
p, err = parser.NewBuilder().Add(&protoParser.Parser{}).Build([]string{""}, []string{""})
case ".yaml", ".yml":
p, err = parser.NewBuilder().Add(&yamlParser.Parser{}).Build([]string{""}, []string{""})
case ".json":
p, err = parser.NewBuilder().Add(&jsonParser.Parser{}).Build([]string{""}, []string{""})
case ".sh":
p, err = parser.NewBuilder().Add(&buildahParser.Parser{}).Build([]string{""}, []string{""})
}
if err != nil {
log.Error().Msgf("failed to get parser: %s", err)
return model.FileMetadatas{}, err
}
if len(p) == 0 {
log.Info().Msg("failed to get parser")
return model.FileMetadatas{}, errors.New("failed to get parser")
}
isMinified := minified.IsMinified(filePath, content)
documents, er := p[0].Parse(filePath, content, openAPIResolveReferences, isMinified, maxResolverDepth)
if er != nil {
log.Error().Msgf("failed to parse file '%s': %s", filePath, er)
return model.FileMetadatas{}, er
}
var files model.FileMetadatas
for _, document := range documents.Docs {
_, err = json.Marshal(document)
if err != nil {
continue
}
file := model.FileMetadata{
FilePath: filePath,
Document: kics.PrepareScanDocument(document, documents.Kind),
LineInfoDocument: document,
Commands: p[0].CommentsCommands(filePath, content),
OriginalData: string(content),
LinesOriginalData: utils.SplitLines(string(content)),
IsMinified: documents.IsMinified,
}
files = append(files, file)
}
return files, nil
}
// runQuery runs a query and returns its results
func runQuery(r *runQueryInfo) []model.Vulnerability {
queryExecTimeout := time.Duration(flags.GetIntFlag(flags.QueryExecTimeoutFlag)) * time.Second
timeoutCtx, cancel := context.WithTimeout(context.Background(), queryExecTimeout)
defer cancel()
options := []rego.EvalOption{rego.EvalInput(r.payload)}
results, err := r.query.OpaQuery.Eval(timeoutCtx, options...)
if err != nil {
if topdown.IsCancel(err) {
log.Err(err)
}
log.Err(err)
}
ctx := context.Background()
queryCtx := &engine.QueryContext{
Ctx: ctx,
Query: r.query,
BaseScanPaths: []string{r.tmpFile},
Files: r.files.ToMap(),
}
timeoutCtxToDecode, cancelDecode := context.WithTimeout(context.Background(), queryExecTimeout)
defer cancelDecode()
decoded, err := r.inspector.DecodeQueryResults(queryCtx, timeoutCtxToDecode, results)
if err != nil {
log.Err(err)
}
return decoded
}
func initScan(queryID string) (*engine.Inspector, error) {
scanParams := &scan.Parameters{
QueriesPath: flags.GetMultiStrFlag(flags.QueriesPath),
Platform: flags.GetMultiStrFlag(flags.TypeFlag),
CloudProvider: flags.GetMultiStrFlag(flags.CloudProviderFlag),
LibrariesPath: flags.GetStrFlag(flags.LibrariesPath),
PreviewLines: flags.GetIntFlag(flags.PreviewLinesFlag),
QueryExecTimeout: flags.GetIntFlag(flags.QueryExecTimeoutFlag),
ExperimentalQueries: flags.GetBoolFlag(flags.ExperimentalQueriesFlag),
}
c := &scan.Client{
ScanParams: scanParams,
}
_, err := c.GetQueryPath()
if err != nil {
log.Err(err)
return &engine.Inspector{}, err
}
queriesSource := source.NewFilesystemSource(
c.ScanParams.QueriesPath,
c.ScanParams.Platform,
c.ScanParams.CloudProvider,
c.ScanParams.LibrariesPath,
c.ScanParams.ExperimentalQueries)
includeQueries := source.IncludeQueries{
ByIDs: []string{queryID},
}
queryFilter := source.QueryInspectorParameters{
IncludeQueries: includeQueries,
}
t, err := tracker.NewTracker(c.ScanParams.PreviewLines)
if err != nil {
log.Err(err)
return &engine.Inspector{}, err
}
ctx := context.Background()
inspector, err := engine.NewInspector(ctx,
queriesSource,
engine.DefaultVulnerabilityBuilder,
t,
&queryFilter,
make(map[string]bool),
c.ScanParams.QueryExecTimeout,
c.ScanParams.UseOldSeverities,
false,
c.ScanParams.ParallelScanFlag,
c.ScanParams.KicsComputeNewSimID,
)
return inspector, err
}
func loadQuery(inspector *engine.Inspector, queryID string) (*engine.PreparedQuery, error) {
if len(inspector.QueryLoader.QueriesMetadata) == 1 {
queryOpa, err := inspector.QueryLoader.LoadQuery(context.Background(), &inspector.QueryLoader.QueriesMetadata[0])
if err != nil {
log.Err(err)
return &engine.PreparedQuery{}, err
}
query := &engine.PreparedQuery{
OpaQuery: *queryOpa,
Metadata: inspector.QueryLoader.QueriesMetadata[0],
}
return query, nil
}
return &engine.PreparedQuery{}, errors.New("unable to load query" + queryID)
}
package remediation
import (
"os"
"path/filepath"
"regexp"
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/rs/zerolog/log"
)
// Summary represents the information about the number of selected remediation and remediation done
type Summary struct {
SelectedRemediationNumber int
ActualRemediationDoneNumber int
}
// GetRemediationSets collects all the replacements and additions per file
func (s *Summary) GetRemediationSets(results Report, include []string) map[string]interface{} {
remediationSets := make(map[string]interface{})
vulns := getVulns(results)
if len(vulns) > 0 {
remediationSets = s.GetRemediationSetsFromVulns(vulns, include)
}
return remediationSets
}
func shouldRemediate(file *File, include []string) bool {
if file.Remediation != "" &&
file.RemediationType != "" &&
(include[0] == "all" || utils.Contains(file.SimilarityID, include)) &&
filepath.Ext(file.FilePath) == ".tf" { // temporary
return true
}
return false
}
func getBefore(line string) string {
re := regexp.MustCompile(`^[\s-]*`)
before := re.FindAll([]byte(line), -1)
return string(before[0])
}
// willRemediate verifies if the remediation actually removes the result
func willRemediate(
remediated []string,
originalFileName string,
remediation *Remediation,
openAPIResolveReferences bool,
maxResolverDepth int) bool {
filepath.Clean(originalFileName)
// create temporary file
tmpFile := filepath.Join(os.TempDir(), "temporary-remediation-"+utils.NextRandom()+"-"+filepath.Base(originalFileName))
f, err := os.OpenFile(tmpFile, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
if err != nil {
log.Error().Msgf("failed to open temporary file for remediation '%s': %s", remediation.SimilarityID, err)
return false
}
content := []byte(strings.Join(remediated, "\n"))
defer func(f *os.File) {
err = f.Close()
if err != nil {
log.Err(err).Msgf("failed to close file: %s", tmpFile)
}
}(f)
if _, err = f.Write(content); err != nil {
log.Error().Msgf("failed to write temporary file for remediation '%s': %s", remediation.SimilarityID, err)
return false
}
// scan the temporary file to verify if the remediation removed the result
results, err := scanTmpFile(tmpFile, remediation.QueryID, content, openAPIResolveReferences, maxResolverDepth)
if err != nil {
log.Error().Msgf("failed to get results of query %s: %s", remediation.QueryID, err)
return false
}
err = os.Remove(tmpFile)
if err != nil {
log.Err(err)
}
return removedResult(results, remediation)
}
func removedResult(results []model.Vulnerability, remediation *Remediation) bool {
for i := range results {
result := results[i]
if result.SearchKey == remediation.SearchKey &&
result.KeyActualValue == remediation.ActualValue &&
result.KeyExpectedValue == remediation.ExpectedValue {
log.Info().Msgf("failed to remediate '%s'", remediation.SimilarityID)
return false
}
}
return true
}
// CreateTempFile creates a temporary file with the content as the file pointed in the filePathCopyFrom
func CreateTempFile(filePathCopyFrom, tmpFilePath string) string {
filepath.Clean(filePathCopyFrom)
filepath.Clean(tmpFilePath)
f, err := os.OpenFile(tmpFilePath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
if err != nil {
log.Error().Msgf("failed to open file '%s': %s", tmpFilePath, err)
return ""
}
content, err := os.ReadFile(filePathCopyFrom)
defer func(f *os.File) {
err = f.Close()
if err != nil {
log.Err(err).Msgf("failed to close file: %s", tmpFilePath)
}
}(f)
if err != nil {
log.Error().Msgf("failed to read file '%s': %s", filePathCopyFrom, err)
return ""
}
if _, err = f.Write(content); err != nil {
log.Error().Msgf("failed to write file '%s': %s", tmpFilePath, err)
return ""
}
return tmpFilePath
}
// GetRemediationSetsFromVulns collects all the replacements and additions per file from []model.Vulnerability
func (s *Summary) GetRemediationSetsFromVulns(vulnerabilities []model.Vulnerability, include []string) map[string]interface{} {
remediationSets := make(map[string]interface{})
for i := range vulnerabilities {
vuln := vulnerabilities[i]
file := File{
FilePath: vuln.FileName,
Line: vuln.Line,
Remediation: vuln.Remediation,
RemediationType: vuln.RemediationType,
SimilarityID: vuln.SimilarityID,
}
var remediationSet Set
if shouldRemediate(&file, include) {
s.SelectedRemediationNumber++
r := &Remediation{
Line: file.Line,
Remediation: file.Remediation,
SimilarityID: file.SimilarityID,
QueryID: vuln.QueryID,
SearchKey: vuln.SearchKey,
ExpectedValue: vuln.KeyExpectedValue,
ActualValue: vuln.KeyActualValue,
}
if file.RemediationType == "replacement" {
remediationSet.Replacement = append(remediationSet.Replacement, *r)
}
if file.RemediationType == "addition" {
remediationSet.Addition = append(remediationSet.Addition, *r)
}
if _, ok := remediationSets[file.FilePath]; !ok {
remediationSets[file.FilePath] = remediationSet
continue
}
updatedRemediationSet := remediationSets[file.FilePath].(Set)
updatedRemediationSet.Addition = append(updatedRemediationSet.Addition, remediationSet.Addition...)
updatedRemediationSet.Replacement = append(updatedRemediationSet.Replacement, remediationSet.Replacement...)
remediationSets[file.FilePath] = updatedRemediationSet
}
}
return remediationSets
}
func getVulns(results Report) []model.Vulnerability {
vulns := []model.Vulnerability{}
for i := range results.Queries {
query := results.Queries[i]
for j := range query.Files {
file := query.Files[j]
vuln := &model.Vulnerability{
FileName: file.FilePath,
Line: file.Line,
Remediation: file.Remediation,
RemediationType: file.RemediationType,
SimilarityID: file.SimilarityID,
QueryID: query.QueryID,
SearchKey: file.SearchKey,
KeyExpectedValue: file.ExpectedValue,
KeyActualValue: file.ActualValue,
}
vulns = append(vulns, *vuln)
}
}
return vulns
}
package report
import (
"strings"
reportModel "github.com/Checkmarx/kics/v2/pkg/report/model"
)
// PrintASFFReport prints the ASFF report in the given path and filename with the given body
func PrintASFFReport(path, filename string, body interface{}) error {
if !strings.HasPrefix(filename, "asff-") {
filename = "asff-" + filename
}
if body != "" {
summary, err := getSummary(body)
if err != nil {
return err
}
body = reportModel.BuildASFF(&summary)
}
return ExportJSONReport(path, filename, body)
}
package report
import (
"strings"
reportModel "github.com/Checkmarx/kics/v2/pkg/report/model"
)
// PrintCodeClimateReport prints the code climate report in the given path and filename with the given body
func PrintCodeClimateReport(path, filename string, body interface{}) error {
if !strings.HasPrefix(filename, "codeclimate") {
filename = "codeclimate-" + filename
}
if body != "" {
summary, err := getSummary(body)
if err != nil {
return err
}
body = reportModel.BuildCodeClimateReport(&summary)
}
return ExportJSONReport(path, filename, body)
}
package report
import (
"encoding/json"
"encoding/xml"
"fmt"
"html/template"
"os"
"path/filepath"
"strconv"
"strings"
"time"
"github.com/Checkmarx/kics/v2/pkg/model"
reportModel "github.com/Checkmarx/kics/v2/pkg/report/model"
"github.com/gocarina/gocsv"
"github.com/rs/zerolog/log"
)
var (
stringsSeverity = map[string]model.Severity{
"critical": model.AllSeverities[0],
"high": model.AllSeverities[1],
"medium": model.AllSeverities[2],
"low": model.AllSeverities[3],
"info": model.AllSeverities[4],
}
templateFuncs = template.FuncMap{
"lower": strings.ToLower,
"sprintf": fmt.Sprintf,
"severity": getSeverities,
"getCurrentTime": getCurrentTime,
"trimSpaces": trimSpaces,
"toString": toString,
}
)
func toString(value interface{}) string {
switch v := value.(type) {
case string:
return v
case int:
return strconv.Itoa(v)
default:
return fmt.Sprintf("%v", v)
}
}
func trimSpaces(value string) string {
return strings.TrimPrefix(value, " ")
}
func getSeverities(severity string) model.Severity {
return stringsSeverity[severity]
}
func getCurrentTime() string {
dt := time.Now()
return dt.Format("01/02/2006 15:04")
}
func fileCreationReport(path, filename string) {
log.Info().Str("fileName", filename).Msgf("Results saved to file %s", path)
}
func closeFile(path, filename string, file *os.File) {
err := file.Close()
if err != nil {
log.Err(err).Msgf("Failed to close file %s", path)
}
fileCreationReport(path, filename)
}
func getPlatforms(queries model.QueryResultSlice) string {
platforms := make([]string, 0)
alreadyAdded := make(map[string]string)
for idx := range queries {
if _, ok := alreadyAdded[queries[idx].Platform]; !ok {
alreadyAdded[queries[idx].Platform] = ""
platforms = append(platforms, queries[idx].Platform)
}
}
return strings.Join(platforms, ", ")
}
// ExportJSONReport - encodes a given body to a JSON file in a given filepath
func ExportJSONReport(path, filename string, body interface{}) error {
if !strings.Contains(filename, ".") {
filename += jsonExtension
}
fullPath := filepath.Join(path, filename)
f, err := os.OpenFile(filepath.Clean(fullPath), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
if err != nil {
return err
}
defer closeFile(fullPath, filename, f)
encoder := json.NewEncoder(f)
encoder.SetIndent("", "\t")
return encoder.Encode(body)
}
func getSummary(body interface{}) (sum model.Summary, err error) {
var summary model.Summary
result, err := json.Marshal(body)
if err != nil {
return model.Summary{}, err
}
if err := json.Unmarshal(result, &summary); err != nil {
return model.Summary{}, err
}
return summary, nil
}
func exportXMLReport(path, filename string, body interface{}) error {
if !strings.HasSuffix(filename, ".xml") {
filename += ".xml"
}
fullPath := filepath.Join(path, filename)
f, err := os.OpenFile(filepath.Clean(fullPath), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
if err != nil {
return err
}
defer closeFile(fullPath, filename, f)
if _, err = f.WriteString(xml.Header); err != nil {
log.Debug().Err(err).Msg("Failed to write XML header")
}
encoder := xml.NewEncoder(f)
encoder.Indent("", "\t")
return encoder.Encode(body)
}
func exportCSVReport(path, filename string, body []reportModel.CSVReport) error {
fullPath := filepath.Join(path, filename)
f, err := os.OpenFile(filepath.Clean(fullPath), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
if err != nil {
return err
}
defer closeFile(fullPath, filename, f)
return gocsv.MarshalFile(&body, f)
}
package report
import (
"strings"
reportModel "github.com/Checkmarx/kics/v2/pkg/report/model"
)
// PrintCSVReport prints the CSV report in the given path and filename with the given body
func PrintCSVReport(path, filename string, body interface{}) error {
if !strings.HasSuffix(filename, ".csv") {
filename += ".csv"
}
var report []reportModel.CSVReport
if body != "" {
summary, err := getSummary(body)
if err != nil {
return err
}
report = reportModel.BuildCSVReport(&summary)
}
return exportCSVReport(path, filename, report)
}
package report
import (
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
reportModel "github.com/Checkmarx/kics/v2/pkg/report/model"
)
// PrintCycloneDxReport prints the CycloneDX report in the given path and filename with the given body
func PrintCycloneDxReport(path, filename string, body interface{}) error {
filePaths := make(map[string]string)
if !strings.HasPrefix(filename, "cyclonedx-") {
filename = "cyclonedx-" + filename
}
if body != "" {
if s, ok := body.(*model.Summary); ok {
filePaths = s.FilePaths
}
summary, err := getSummary(body)
if err != nil {
return err
}
body = reportModel.BuildCycloneDxReport(&summary, filePaths)
}
return exportXMLReport(path, filename, body)
}
package report
import (
"strings"
reportModel "github.com/Checkmarx/kics/v2/pkg/report/model"
)
// PrintGitlabSASTReport creates a report file on sarif format
func PrintGitlabSASTReport(path, filename string, body interface{}) error {
filename = strings.ReplaceAll(filename, ".glsast", "")
if !strings.HasSuffix(filename, jsonExtension) {
filename += jsonExtension
}
if !strings.HasPrefix(filename, "gl-sast-") {
filename = "gl-sast-" + filename
}
if body != "" {
summary, err := getSummary(body)
if err != nil {
return err
}
gitlabSASTReport := reportModel.NewGitlabSASTReport(summary.Times.Start, summary.Times.End)
for idxQuery := range summary.Queries {
for idxFile := range summary.Queries[idxQuery].Files {
gitlabSASTReport.BuildGitlabSASTVulnerability(&summary.Queries[idxQuery], &summary.Queries[idxQuery].Files[idxFile])
}
}
body = gitlabSASTReport
}
return ExportJSONReport(path, filename, body)
}
package report
import (
"bytes"
_ "embed" // used for embedding report static files
"html/template"
"os"
"path/filepath"
"strings"
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/rs/zerolog/log"
"github.com/tdewolff/minify/v2"
minifyCSS "github.com/tdewolff/minify/v2/css"
minifyHtml "github.com/tdewolff/minify/v2/html"
minifyJS "github.com/tdewolff/minify/v2/js"
)
var (
//go:embed template/html/report.tmpl
htmlTemplate string
//go:embed template/html/report.css
cssTemplate string
//go:embed template/html/report.js
jsTemplate string
//go:embed template/html/github.svg
githubSVG string
//go:embed template/html/info.svg
infoSVG string
//go:embed template/html/vulnerability_fill.svg
vulnerabilityFillSVG string
//go:embed template/html/vulnerability_out.svg
vulnerabilityOutSVG string
)
const (
textHTML = "text/html"
)
var svgMap = map[string]string{
"github.svg": githubSVG,
"info.svg": infoSVG,
"vulnerability_fill.svg": vulnerabilityFillSVG,
"vulnerability_out.svg": vulnerabilityOutSVG,
}
func includeSVG(name string) template.HTML {
return template.HTML(svgMap[name]) //nolint
}
func includeCSS(name string) template.HTML {
minifier := minify.New()
minifier.AddFunc("text/css", minifyCSS.Minify)
cssMinified, err := minifier.String("text/css", cssTemplate)
if err != nil {
return ""
}
return template.HTML("<style>" + cssMinified + "</style>") //nolint
}
func includeJS(name string) template.HTML {
minifier := minify.New()
minifier.AddFunc("text/javascript", minifyJS.Minify)
jsMinified, err := minifier.String("text/javascript", jsTemplate)
if err != nil {
return ""
}
return template.HTML("<script>" + jsMinified + "</script>") //nolint
}
func getPaths(paths []string) string {
return strings.Join(paths, ", ")
}
func getVersion() string {
return constants.Version
}
// PrintHTMLReport creates a report file on HTML format
func PrintHTMLReport(path, filename string, body interface{}) error {
if !strings.HasSuffix(filename, ".html") {
filename += ".html"
}
templateFuncs["includeSVG"] = includeSVG
templateFuncs["includeCSS"] = includeCSS
templateFuncs["includeJS"] = includeJS
templateFuncs["getPaths"] = getPaths
templateFuncs["getPlatforms"] = getPlatforms
templateFuncs["getVersion"] = getVersion
fullPath := filepath.Join(path, filename)
t := template.Must(template.New("report.tmpl").Funcs(templateFuncs).Parse(htmlTemplate))
f, err := os.OpenFile(filepath.Clean(fullPath), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
if err != nil {
return err
}
defer closeFile(fullPath, filename, f)
var buffer bytes.Buffer
err = t.Execute(&buffer, body)
if err != nil {
return err
}
minifier := minify.New()
minifier.AddFunc(textHTML, minifyHtml.Minify)
minifier.Add(textHTML, &minifyHtml.Minifier{
KeepDocumentTags: true,
KeepEndTags: true,
KeepQuotes: true,
})
minifierWriter := minifier.Writer(textHTML, f)
defer func() {
if closeErr := minifierWriter.Close(); closeErr != nil {
log.Err(closeErr).Msg("Error closing file")
}
}()
_, err = minifierWriter.Write(buffer.Bytes())
return err
}
package report
import "github.com/Checkmarx/kics/v2/internal/constants"
const jsonExtension = ".json"
// PrintJSONReport prints on JSON file the summary results
func PrintJSONReport(path, filename string, body interface{}) error {
if body != "" {
summary, err := getSummary(body)
if err != nil {
return err
}
for idx := range summary.Queries {
summary.Queries[idx].CISBenchmarkName = ""
summary.Queries[idx].CISBenchmarkVersion = ""
summary.Queries[idx].CISDescriptionID = ""
summary.Queries[idx].CISDescriptionText = ""
summary.Queries[idx].CISRationaleText = ""
}
summary.Version = constants.Version
body = summary
}
return ExportJSONReport(path, filename, body)
}
package report
import (
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
reportModel "github.com/Checkmarx/kics/v2/pkg/report/model"
)
// PrintJUnitReport prints the JUnit report in the given path and filename with the given body
func PrintJUnitReport(path, filename string, body interface{}) error {
if !strings.HasPrefix(filename, "junit-") {
filename = "junit-" + filename
}
summary := model.Summary{}
if body != "" {
var err error
summary, err = getSummary(body)
if err != nil {
return err
}
}
jUnitReport := reportModel.NewJUnitReport(summary.Times.End.Sub(summary.Times.Start).String())
for idx := range summary.Queries {
jUnitReport.GenerateTestEntry(&summary.Queries[idx])
}
jUnitReport.FinishReport()
return exportXMLReport(path, filename, jUnitReport)
}
package model
import (
"fmt"
"os"
"strings"
"time"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/rs/zerolog/log"
)
// AwsAccountInfo contains all the relevant information of the user AWS account
type AwsAccountInfo struct {
AwsAccountID string
AwsRegion string
}
// AwsSecurityFinding contains all the relevant information to build a finding
type AwsSecurityFinding struct {
AwsAccountID string `json:"AwsAccountId"`
Compliance Compliance
CreatedAt string
Description string
GeneratorID string `json:"GeneratorId"`
ID string `json:"Id"`
ProductArn string
Remediation Remediation
Resources []Resource
SchemaVersion string
Severity Severity
Title string
Types []string
UpdatedAt string
CWE string
}
// AsffRecommendation includes the recommendation to avoid the finding
type AsffRecommendation struct {
Text string
}
// Remediation contains the recommendation
type Remediation struct {
Recommendation AsffRecommendation
}
// Resource contains the ID and the type of the target resource
type Resource struct {
ID string `json:"Id"`
Type string
}
// Severity contains the original severity (KICS severity) and the label severity (ASFF severity)
type Severity struct {
Original string
Label string
}
// Compliance contains the status of the finding
type Compliance struct {
Status string
}
// BuildASFF builds the ASFF report
func BuildASFF(summary *model.Summary) []AwsSecurityFinding {
findings := []AwsSecurityFinding{}
awsAccountInfo := getAwsAccountInfo()
if awsAccountInfo.incompleteAwsAccountInfo() {
variables := "AWS_ACCOUNT_ID, AWS_REGION"
log.Debug().Msg(fmt.Sprintf("failed to get AWS account information: check your environment variables (%s)", variables))
}
for idx := range summary.Queries {
query := summary.Queries[idx]
findingPerQuery := awsAccountInfo.getFindingsPerQuery(&query)
findings = append(findings, findingPerQuery...)
}
return findings
}
func (a *AwsAccountInfo) getFindingsPerQuery(query *model.QueryResult) []AwsSecurityFinding {
var findings []AwsSecurityFinding
if query.CloudProvider == "AWS" {
for i := range query.Files {
finding := a.getFinding(query, &query.Files[i])
findings = append(findings, finding)
}
}
return findings
}
func (a *AwsAccountInfo) getFinding(query *model.QueryResult, file *model.VulnerableFile) AwsSecurityFinding {
awsAccountID := a.AwsAccountID
awsRegion := a.AwsRegion
timeFormatted := time.Now().Format(time.RFC3339)
arn := "arn:aws:securityhub:%s:%s:product/%s/default"
arn = fmt.Sprintf(arn, awsRegion, awsAccountID, awsAccountID)
severity := string(query.Severity)
if severity == "INFO" {
severity = "INFORMATIONAL"
}
finding := AwsSecurityFinding{
AwsAccountID: *aws.String(awsAccountID),
CreatedAt: *aws.String(timeFormatted),
Description: *aws.String(getDescription(query, "asff")),
GeneratorID: *aws.String(query.QueryID),
ID: *aws.String(fmt.Sprintf("%s/%s/%s", awsRegion, awsAccountID, file.SimilarityID)),
ProductArn: *aws.String(arn),
Resources: []Resource{
{
ID: *aws.String(query.QueryID),
Type: *aws.String("Other"),
},
},
SchemaVersion: *aws.String("2018-10-08"),
Severity: Severity{
Original: *aws.String(string(query.Severity)),
Label: *aws.String(severity),
},
Title: *aws.String(query.QueryName),
Types: []string{*aws.String("Software and Configuration Checks/Vulnerabilities/KICS")},
UpdatedAt: *aws.String(timeFormatted),
Remediation: Remediation{
Recommendation: AsffRecommendation{
Text: *aws.String(fmt.Sprintf(
"Problem found on '%s' file in line %d. Expected value: %s. Actual value: %s.",
file.FileName,
file.Line,
strings.TrimRight(file.KeyExpectedValue, "."),
strings.TrimRight(file.KeyActualValue, "."),
)),
},
},
Compliance: Compliance{Status: *aws.String("FAILED")},
CWE: *aws.String(query.CWE),
}
return finding
}
func getEnv(env string) string {
if os.Getenv(env) != "" {
return os.Getenv(env)
}
return env
}
func getAwsAccountInfo() *AwsAccountInfo {
awsAccountInfo := AwsAccountInfo{
AwsAccountID: getEnv("AWS_ACCOUNT_ID"),
AwsRegion: getEnv("AWS_REGION"),
}
return &awsAccountInfo
}
func (a *AwsAccountInfo) incompleteAwsAccountInfo() bool {
if a.AwsAccountID == "" || a.AwsRegion == "" {
return true
}
return false
}
package model
import "github.com/Checkmarx/kics/v2/pkg/model"
type lines struct {
Begin int `json:"begin"`
}
type location struct {
Path string `json:"path"`
Lines lines `json:"lines"`
}
// CodeClimateReport struct contains all the info to create the code climate report
type CodeClimateReport struct {
Type string `json:"type"`
CheckName string `json:"check_name"`
CWE string `json:"cwe,omitempty"`
Description string `json:"description"`
Categories []string `json:"categories"`
Location location `json:"location"`
Severity string `json:"severity"`
Fingerprint string `json:"fingerprint"`
}
var severityMap = map[string]string{
model.SeverityTrace: "info",
model.SeverityInfo: "info",
model.SeverityLow: "minor",
model.SeverityMedium: "major",
model.SeverityHigh: "critical",
model.SeverityCritical: "blocker",
}
// BuildCodeClimateReport builds the code climate report
func BuildCodeClimateReport(summary *model.Summary) []CodeClimateReport {
var codeClimateReport []CodeClimateReport
for i := range summary.Queries {
for j := range summary.Queries[i].Files {
codeClimateReport = append(codeClimateReport, CodeClimateReport{
Type: "issue",
CheckName: summary.Queries[i].QueryName,
CWE: summary.Queries[i].CWE,
Description: summary.Queries[i].Description,
Categories: []string{"Security"},
Location: location{
Path: summary.Queries[i].Files[j].FileName,
Lines: lines{Begin: summary.Queries[i].Files[j].Line},
},
Severity: severityMap[string(summary.Queries[i].Severity)],
Fingerprint: summary.Queries[i].Files[j].SimilarityID,
})
}
}
return codeClimateReport
}
package model
import "github.com/Checkmarx/kics/v2/pkg/model"
// CSVReport struct contains all the info to create the csv report
type CSVReport struct {
QueryName string `csv:"query_name"`
QueryID string `csv:"query_id"`
QueryURI string `csv:"query_uri"`
Severity string `csv:"severity"`
Platform string `csv:"platform"`
CWE string `csv:"cwe,omitempty"`
CloudProvider string `csv:"cloud_provider"`
Category string `csv:"category"`
DescriptionID string `csv:"description_id"`
Description string `csv:"description"`
CISDescriptionIDFormatted string `csv:"cis_description_id"`
CISDescriptionTitle string `csv:"cis_description_title"`
CISDescriptionTextFormatted string `csv:"cis_description_text"`
FileName string `csv:"file_name"`
SimilarityID string `csv:"similarity_id"`
Line int `csv:"line"`
IssueType string `csv:"issue_type"`
SearchKey string `csv:"search_key"`
SearchLine int `csv:"search_line"`
SearchValue string `csv:"search_value"`
ExpectedValue string `csv:"expected_value"`
ActualValue string `csv:"actual_value"`
}
// BuildCSVReport builds the CSV report
func BuildCSVReport(summary *model.Summary) []CSVReport {
csvReport := []CSVReport{}
for i := range summary.Queries {
for j := range summary.Queries[i].Files {
csvReport = append(csvReport, CSVReport{
QueryName: summary.Queries[i].QueryName,
QueryID: summary.Queries[i].QueryID,
QueryURI: summary.Queries[i].QueryURI,
Severity: string(summary.Queries[i].Severity),
Platform: summary.Queries[i].Platform,
CWE: summary.Queries[i].CWE,
CloudProvider: summary.Queries[i].CloudProvider,
Category: summary.Queries[i].Category,
DescriptionID: summary.Queries[i].DescriptionID,
Description: summary.Queries[i].Description,
CISDescriptionIDFormatted: summary.Queries[i].CISDescriptionIDFormatted,
CISDescriptionTitle: summary.Queries[i].CISDescriptionTitle,
CISDescriptionTextFormatted: summary.Queries[i].CISDescriptionTextFormatted,
FileName: summary.Queries[i].Files[j].FileName,
SimilarityID: summary.Queries[i].Files[j].SimilarityID,
Line: summary.Queries[i].Files[j].Line,
IssueType: string(summary.Queries[i].Files[j].IssueType),
SearchKey: summary.Queries[i].Files[j].SearchKey,
SearchLine: summary.Queries[i].Files[j].SearchLine,
SearchValue: summary.Queries[i].Files[j].SearchValue,
ExpectedValue: summary.Queries[i].Files[j].KeyExpectedValue,
ActualValue: summary.Queries[i].Files[j].KeyActualValue,
})
}
}
return csvReport
}
package model
import (
"crypto/sha256"
"encoding/hex"
"encoding/xml"
"fmt"
"os"
"path/filepath"
"strings"
"time"
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/google/uuid"
"github.com/rs/zerolog/log"
)
var cycloneDxSeverityLevelEquivalence = map[model.Severity]string{
"INFO": "None",
"LOW": "Low",
"MEDIUM": "Medium",
"HIGH": "High",
"CRITICAL": "Critical",
}
// CycloneDxReport includes all the properties considered relevant for the CycloneDX Report
type CycloneDxReport struct {
XMLName xml.Name `xml:"bom"`
// bom tag information
XMLNS string `xml:"xmlns,attr"`
SerialNumber string `xml:"serialNumber,attr"`
XMLNSV string `xml:"xmlns:v,attr"`
Version int `xml:"version,attr"`
// bom body information
Metadata *Metadata `xml:"metadata"`
Components Components `xml:"components"`
}
// Metadata includes the relevant additional information about the CycloneDX report
type Metadata struct {
Timestamp string `xml:"timestamp"` // the timestamp when the CycloneDX report is created
Tools *[]Tool `xml:"tools>tool"` // array of tools used to create the CycloneDX report
}
// Tool includes the information about the tool used to create the CycloneDX report
type Tool struct {
Vendor string `xml:"vendor"`
Name string `xml:"name"`
Version string `xml:"version"`
}
// Components is a list of components
type Components struct {
Components []Component `xml:"component"`
}
// Component includes the CycloneDX component structure properties considered relevant
type Component struct {
// component tag information
Type string `xml:"type,attr"`
BomRef string `xml:"bom-ref,attr"`
// component body information
Name string `xml:"name"`
Version string `xml:"version"`
Hashes []Hash `xml:"hashes>hash"`
Purl string `xml:"purl"`
Vulnerabilities []Vulnerability `xml:"v:vulnerabilities>v:vulnerability"`
}
// Hash includes the algorithm used in the HASH function and the output of it (content)
type Hash struct {
Alg string `xml:"alg,attr"`
Content string `xml:",chardata"`
}
// Vulnerability includes all the relevant information about the vulnerability
type Vulnerability struct {
// vulnerability tag information
Ref string `xml:"ref,attr"`
// vulnerability body information
ID string `xml:"v:id"`
CWE string `xml:"v:cwe"`
Source Source `xml:"v:source"`
Ratings []Rating `xml:"v:ratings>v:rating"`
Description string `xml:"v:description"`
Recommendations []Recommendation `xml:"v:recommendations>v:recommendation"`
}
// Source includes information about the origin where the vulnerability was reported
type Source struct {
Name string `xml:"name"`
URL string `xml:"url"`
}
// Rating includes the severity of the vulnerability and the method used to classify it
type Rating struct {
Severity string `xml:"v:severity"`
Method string `xml:"v:method"`
}
// Recommendation describes how the vulnerability should be avoided
type Recommendation struct {
Recommendation string `xml:""`
}
func match(fileNames []model.VulnerableFile, fileName string) bool {
for i := range fileNames {
if fileNames[i].FileName == fileName {
return true
}
}
return false
}
func getAllFiles(summary *model.Summary) []model.VulnerableFile {
var fileNames []model.VulnerableFile
for i := range summary.Queries {
files := summary.Queries[i].Files
for idx := range files {
if !match(fileNames, files[idx].FileName) {
fileNames = append(fileNames, files[idx])
}
}
}
return fileNames
}
func generateSha256(filePath string, filePaths map[string]string) string {
file := filePaths[filePath]
content, err := os.ReadFile(filepath.Clean(file))
if err != nil {
log.Trace().Msgf("failed to read %s", file)
return ""
}
hashSum := sha256.Sum256(content)
return hex.EncodeToString(hashSum[:])
}
func getPurl(filePath, version string) string {
return fmt.Sprintf("pkg:generic/%s@%s", filePath, version)
}
func getDescription(query *model.QueryResult, format string) string {
queryDescription := query.Description
if query.CISDescriptionTextFormatted != "" {
queryDescription = query.CISDescriptionTextFormatted
}
if format == "asff" {
return queryDescription
}
description := fmt.Sprintf("[%s].[%s]: %s", query.Platform, query.QueryName, queryDescription)
return description
}
func getVulnerabilitiesByFile(query *model.QueryResult, fileName, purl string) []Vulnerability {
vulns := make([]Vulnerability, 0)
for idx := range query.Files {
file := query.Files[idx]
if fileName == file.FileName {
vuln := Vulnerability{
Ref: purl + query.QueryID,
ID: query.QueryID,
CWE: query.CWE,
Source: Source{
Name: "KICS",
URL: "https://kics.io/",
},
Ratings: []Rating{
{
Severity: cycloneDxSeverityLevelEquivalence[query.Severity],
Method: "Other",
},
},
Description: getDescription(query, "cyclonedx"),
Recommendations: []Recommendation{
{
Recommendation: fmt.Sprintf(
"Problem found in line %d. Expected value: %s. Actual value: %s.",
file.Line,
strings.TrimRight(file.KeyExpectedValue, "."),
strings.TrimRight(file.KeyActualValue, "."),
),
},
},
}
vulns = append(vulns, vuln)
}
}
return vulns
}
func getVulnerabilities(fileName, purl string, summary *model.Summary) []Vulnerability {
vulns := make([]Vulnerability, 0)
for i := range summary.Queries {
query := summary.Queries[i]
vulns = append(vulns, getVulnerabilitiesByFile(&query, fileName, purl)...)
}
return vulns
}
// InitCycloneDxReport inits the CycloneDx report with no components (consequently, no vulnerabilities)
func InitCycloneDxReport() *CycloneDxReport {
metadata := Metadata{
Timestamp: time.Now().Format(time.RFC3339),
Tools: &[]Tool{
{
Vendor: "Checkmarx",
Name: "KICS",
Version: constants.Version,
},
},
}
return &CycloneDxReport{
XMLNS: "http://cyclonedx.org/schema/bom/1.5",
XMLNSV: "http://cyclonedx.org/schema/ext/vulnerability/1.0",
SerialNumber: "urn:uuid:" + uuid.New().String(),
Version: 1,
Metadata: &metadata,
}
}
// BuildCycloneDxReport builds the CycloneDX report
func BuildCycloneDxReport(summary *model.Summary, filePaths map[string]string) *CycloneDxReport {
var component Component
var vuln []Vulnerability
var version, sha, purl, filePath string
bom := InitCycloneDxReport()
files := getAllFiles(summary)
for i := range files {
filePath = strings.Replace(files[i].FileName, "\\", "/", -1)
sha = generateSha256(files[i].FileName, filePaths)
index := 12
if len(sha) < index {
log.Trace().Msgf("failed to generate SHA-256 for %s", filePath)
continue
}
version = fmt.Sprintf("0.0.0-%s", sha[0:12])
purl = getPurl(filePath, version)
vuln = getVulnerabilities(files[i].FileName, purl, summary)
component = Component{
Type: "file",
BomRef: purl,
Name: filePath,
Version: version,
Purl: purl,
Hashes: []Hash{
{
Alg: "SHA-256",
Content: sha,
},
},
Vulnerabilities: vuln,
}
bom.Components.Components = append(bom.Components.Components, component)
}
return bom
}
package model
import (
"fmt"
"strings"
"time"
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/Checkmarx/kics/v2/pkg/model"
"golang.org/x/text/cases"
"golang.org/x/text/language"
)
const timeFormat = "2006-01-02T15:04:05" // YYYY-MM-DDTHH:MM:SS a.k.a ISO8601
type gitlabSASTReport struct {
Schema string `json:"schema"`
SchemaVersion string `json:"version"`
Scan gitlabSASTScan `json:"scan"`
Vulnerabilities []gitlabSASTVulnerability `json:"vulnerabilities"`
}
type gitlabSASTScan struct {
Analyzer gitlabSASTAnalyzer `json:"analyzer"`
StartTime string `json:"start_time"`
EndTime string `json:"end_time"`
Status string `json:"status"`
Scantype string `json:"type"`
Scanner gitlabSASTScanner `json:"scanner"`
}
type gitlabSASTScanner struct {
ID string `json:"id"`
Name string `json:"name"`
URL string `json:"url"`
Version string `json:"version"`
Vendor gitlabSASTScannerVendor `json:"vendor"`
}
type gitlabSASTScannerVendor struct {
Name string `json:"name"`
}
type gitlabSASTVulnerabilityDetails map[string]interface{}
type gitlabSASTVulnerability struct {
ID string `json:"id"`
Severity string `json:"severity"`
Name string `json:"name"`
CWE string `json:"cwe,omitempty"`
Links []gitlabSASTVulnerabilityLink `json:"links"`
Location gitlabSASTVulnerabilityLocation `json:"location"`
Identifiers []gitlabSASTVulnerabilityIdentifier `json:"identifiers"`
Details gitlabSASTVulnerabilityDetails `json:"details,omitempty"`
}
type gitlabSASTVulnerabilityLink struct {
URL string `json:"url"`
}
type gitlabSASTVulnerabilityLocation struct {
File string `json:"file"`
Start int `json:"start_line"`
End int `json:"end_line"`
}
type gitlabSASTVulnerabilityIdentifier struct {
IdentifierType string `json:"type"`
Name string `json:"name"`
URL string `json:"url"`
Value string `json:"value"`
}
type gitlabSASTAnalyzer struct {
ID string `json:"id"`
Name string `json:"name"`
Version string `json:"version"`
Vendor gitlabSASTScannerVendor `json:"vendor"`
}
// GitlabSASTReport represents a usable gitlab sast report reference
type GitlabSASTReport interface {
BuildGitlabSASTVulnerability(issue *model.QueryResult, file *model.VulnerableFile)
}
// NewGitlabSASTReport initializes a new instance of GitlabSASTReport to be used
func NewGitlabSASTReport(start, end time.Time) GitlabSASTReport {
return &gitlabSASTReport{
Schema: "https://gitlab.com/gitlab-org/security-products/security-report-schemas/-/raw/v15.0.6/dist/sast-report-format.json",
SchemaVersion: "15.0.6",
Scan: initGitlabSASTScan(start, end),
Vulnerabilities: make([]gitlabSASTVulnerability, 0),
}
}
func initGitlabSASTScan(start, end time.Time) gitlabSASTScan {
return gitlabSASTScan{
Analyzer: gitlabSASTAnalyzer{
ID: "keeping-infrastructure-as-code-secure",
Name: constants.Fullname,
Version: constants.Version,
Vendor: gitlabSASTScannerVendor{
Name: "Checkmarx",
},
},
Status: "success",
Scantype: "sast",
StartTime: start.Format(timeFormat),
EndTime: end.Format(timeFormat),
Scanner: gitlabSASTScanner{
ID: "keeping-infrastructure-as-code-secure",
Name: constants.Fullname,
URL: constants.URL,
Vendor: gitlabSASTScannerVendor{
Name: "Checkmarx",
},
Version: constants.Version,
},
}
}
// BuildGitlabSASTVulnerability adds a new vulnerability struct to vulnerability slice
func (glsr *gitlabSASTReport) BuildGitlabSASTVulnerability(issue *model.QueryResult, file *model.VulnerableFile) {
if len(issue.Files) > 0 {
vulnerability := gitlabSASTVulnerability{
ID: file.SimilarityID,
Severity: cases.Title(language.Und).String(strings.ToLower(string(issue.Severity))),
Name: issue.QueryName,
CWE: issue.CWE,
Links: []gitlabSASTVulnerabilityLink{
{
URL: issue.QueryURI,
},
},
Location: gitlabSASTVulnerabilityLocation{
File: file.FileName,
Start: file.Line,
End: file.Line,
},
Identifiers: []gitlabSASTVulnerabilityIdentifier{
{
IdentifierType: "kics",
Name: constants.Fullname,
URL: fmt.Sprintf("https://docs.kics.io/latest/queries/%s-queries", strings.ToLower(issue.Platform)),
Value: issue.QueryID,
},
},
}
if issue.CISDescriptionID != "" {
vulnerability.Details = gitlabSASTVulnerabilityDetails{
"cisTitle": issue.CISDescriptionTitle,
"cisId": issue.CISDescriptionIDFormatted,
}
}
glsr.Vulnerabilities = append(glsr.Vulnerabilities, vulnerability)
}
}
package model
import (
"encoding/xml"
"fmt"
"strings"
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/Checkmarx/kics/v2/pkg/model"
)
type junitTestSuites struct {
XMLName xml.Name `xml:"testsuites"`
Name string `xml:"name,attr"`
Time string `xml:"time,attr"`
Failures string `xml:"failures,attr"`
TestSuites []junitTestSuite `xml:"testsuite"`
}
type junitTestSuite struct {
XMLName xml.Name `xml:"testsuite"`
Name string `xml:"name,attr"`
Failures string `xml:"failures,attr"`
Tests string `xml:"tests,attr"`
TestCases []junitTestCase `xml:"testcase"`
failCount int
}
type junitTestCase struct {
XMLName xml.Name `xml:"testcase"`
CWE string `xml:"cwe,attr,omitempty"`
Name string `xml:"name,attr"`
ClassName string `xml:"classname,attr"`
Failures []junitFailure `xml:"failure"`
}
type junitFailure struct {
XMLName xml.Name `xml:"failure"`
Type string `xml:"type,attr"` // Query name
Message string `xml:"message,attr"` // File name + line number
}
// JUnitReport is a JUnit report representation
type JUnitReport interface {
GenerateTestEntry(query *model.QueryResult)
FinishReport()
}
// NewJUnitReport creates a new JUnit report instance
func NewJUnitReport(time string) JUnitReport {
return &junitTestSuites{
Name: fmt.Sprintf("KICS %s", constants.Version),
Time: time,
Failures: "",
TestSuites: []junitTestSuite{},
}
}
// GenerateTestEntry generates a new test entry for failed tests on KICS scan
func (jUnit *junitTestSuites) GenerateTestEntry(query *model.QueryResult) {
queryDescription := query.Description
if query.CISDescriptionTextFormatted != "" {
queryDescription = query.CISDescriptionTextFormatted
}
failedTestCases := []junitTestCase{}
for idx := range query.Files {
failedTestCase := junitTestCase{
Name: fmt.Sprintf("%s: %s file in line %d", query.QueryName, query.Files[idx].FileName, query.Files[idx].Line),
ClassName: query.Platform,
CWE: query.CWE,
Failures: []junitFailure{},
}
failedTest := junitFailure{
Type: queryDescription,
Message: fmt.Sprintf(
"[Severity: %s, Query description: %s] Problem found on '%s' file in line %d. Expected value: %s. Actual value: %s.",
query.Severity,
queryDescription,
query.Files[idx].FileName,
query.Files[idx].Line,
strings.TrimRight(query.Files[idx].KeyExpectedValue, "."),
strings.TrimRight(query.Files[idx].KeyActualValue, "."),
),
}
failedTestCase.Failures = append(failedTestCase.Failures, failedTest)
failedTestCases = append(failedTestCases, failedTestCase)
}
newTestSuite := junitTestSuite{
Name: query.Platform,
Failures: "",
Tests: "",
failCount: len(query.Files),
TestCases: failedTestCases,
}
jUnit.TestSuites = append(jUnit.TestSuites, newTestSuite)
}
// FinishReport finishes the report, adding the total number of failed tests for each platform and the total number of failed tests
func (jUnit *junitTestSuites) FinishReport() {
failsCount := 0
for idx := range jUnit.TestSuites {
failsCount += jUnit.TestSuites[idx].failCount
jUnit.TestSuites[idx].Failures = fmt.Sprintf("%d", jUnit.TestSuites[idx].failCount)
jUnit.TestSuites[idx].Tests = fmt.Sprintf("%d", jUnit.TestSuites[idx].failCount)
}
jUnit.Failures = fmt.Sprintf("%d", failsCount)
}
package model
import (
"encoding/csv"
"encoding/json"
"os"
"path/filepath"
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/google/uuid"
"github.com/rs/zerolog/log"
)
var categoriesNotFound = make(map[string]bool)
var severityLevelEquivalence = map[model.Severity]string{
"INFO": "none",
"LOW": "note",
"MEDIUM": "warning",
"HIGH": "error",
"CRITICAL": "error",
}
var targetTemplate = sarifDescriptorReference{
ToolComponent: sarifComponentReference{
ComponentReferenceGUID: "58cdcc6f-fe41-4724-bfb3-131a93df4c3f",
ComponentReferenceName: "Categories",
ComponentReferenceIndex: 0,
},
}
var cweTemplate = cweDescriptorReference{
ToolComponent: cweComponentReference{
ComponentReferenceGUID: "1489b0c4-d7ce-4d31-af66-6382a01202e3",
ComponentReferenceName: "CWE",
},
}
type sarifProperties map[string]interface{}
type ruleMetadata struct {
queryID string
queryName string
queryDescription string
queryURI string
queryCategory string
queryCwe string
severity model.Severity
}
type ruleCISMetadata struct {
descriptionText string
id string
title string
}
type sarifMessage struct {
Text string `json:"text"`
MessageProperties sarifProperties `json:"properties,omitempty"`
}
type sarifComponentReference struct {
ComponentReferenceName string `json:"name,omitempty"`
ComponentReferenceGUID string `json:"guid,omitempty"`
ComponentReferenceIndex int `json:"index,omitempty"`
}
type cweComponentReference struct {
ComponentReferenceGUID string `json:"guid"`
ComponentReferenceName string `json:"name"`
}
type sarifDescriptorReference struct {
ReferenceID string `json:"id,omitempty"`
ReferenceGUID string `json:"guid,omitempty"`
ReferenceIndex int `json:"index,omitempty"`
ToolComponent sarifComponentReference `json:"toolComponent,omitempty"`
}
type cweMessage struct {
Text string `json:"text"`
}
type cweCsv struct {
CweID string `json:"id"`
FullDescription cweMessage `json:"fullDescription"`
ShortDescription cweMessage `json:"shortDescription"`
GUID string `json:"guid"`
HelpURI string `json:"helpUri"`
}
type cweDescriptorReference struct {
ReferenceID string `json:"id"`
ReferenceGUID string `json:"guid"`
ToolComponent cweComponentReference `json:"toolComponent"`
}
type sarifConfiguration struct {
Level string `json:"level"`
}
type sarifRelationship struct {
Relationship sarifDescriptorReference `json:"target,omitempty"`
}
type sarifRule struct {
RuleID string `json:"id"`
RuleName string `json:"name"`
RuleShortDescription sarifMessage `json:"shortDescription"`
RuleFullDescription sarifMessage `json:"fullDescription"`
DefaultConfiguration sarifConfiguration `json:"defaultConfiguration"`
HelpURI string `json:"helpUri"`
Relationships []sarifRelationship `json:"relationships,omitempty"`
RuleProperties sarifProperties `json:"properties,omitempty"`
}
type sarifDriver struct {
ToolName string `json:"name"`
ToolVersion string `json:"version"`
ToolFullName string `json:"fullName"`
ToolURI string `json:"informationUri"`
Rules []sarifRule `json:"rules"`
}
type sarifTool struct {
Driver sarifDriver `json:"driver"`
}
type sarifRegion struct {
StartLine int `json:"startLine"`
}
type sarifArtifactLocation struct {
ArtifactURI string `json:"uri"`
}
type sarifPhysicalLocation struct {
ArtifactLocation sarifArtifactLocation `json:"artifactLocation"`
Region sarifRegion `json:"region"`
}
type sarifLocation struct {
PhysicalLocation sarifPhysicalLocation `json:"physicalLocation"`
}
type sarifResult struct {
ResultRuleID string `json:"ruleId"`
ResultRuleIndex int `json:"ruleIndex"`
ResultKind string `json:"kind"`
ResultMessage sarifMessage `json:"message"`
ResultLocations []sarifLocation `json:"locations"`
}
type taxonomyDefinitions struct {
DefinitionGUID string `json:"guid,omitempty"`
DefinitionName string `json:"name,omitempty"`
DefinitionID string `json:"id"`
DefinitionShortDescription cweMessage `json:"shortDescription"`
DefinitionFullDescription cweMessage `json:"fullDescription"`
HelpURI string `json:"helpUri,omitempty"`
}
type cweTaxonomiesWrapper struct {
Taxonomies sarifTaxonomy `json:"taxonomies"`
}
type sarifTaxonomy struct {
TaxonomyGUID string `json:"guid"`
TaxonomyName string `json:"name"`
TaxonomyFullDescription sarifMessage `json:"fullDescription,omitempty"`
TaxonomyShortDescription sarifMessage `json:"shortDescription"`
TaxonomyDownloadURI string `json:"downloadUri,omitempty"`
TaxonomyInformationURI string `json:"informationUri,omitempty"`
TaxonomyIsComprehensive bool `json:"isComprehensive,omitempty"`
TaxonomyLanguage string `json:"language,omitempty"`
TaxonomyMinRequiredLocDataSemanticVersion string `json:"minimumRequiredLocalizedDataSemanticVersion,omitempty"`
TaxonomyOrganization string `json:"organization,omitempty"`
TaxonomyRealeaseDateUtc string `json:"releaseDateUtc,omitempty"`
TaxonomyDefinitions []taxonomyDefinitions `json:"taxa"`
}
// SarifRun - sarifRun is a component of the SARIF report
type SarifRun struct {
Tool sarifTool `json:"tool"`
Results []sarifResult `json:"results"`
Taxonomies []sarifTaxonomy `json:"taxonomies"`
}
// SarifReport represents a usable sarif report reference
type SarifReport interface {
BuildSarifIssue(issue *model.QueryResult) string
RebuildTaxonomies(cwes []string, guids map[string]string)
GetGUIDFromRelationships(idx int, cweID string) string
}
type sarifReport struct {
Schema string `json:"$schema"`
SarifVersion string `json:"version"`
Runs []SarifRun `json:"runs"`
}
func initSarifTool() sarifTool {
return sarifTool{
Driver: sarifDriver{
ToolName: "KICS",
ToolVersion: constants.Version,
ToolFullName: constants.Fullname,
ToolURI: constants.URL,
Rules: make([]sarifRule, 0),
},
}
}
func initSarifCategories() []taxonomyDefinitions {
allCategories := []taxonomyDefinitions{noCategory}
for _, category := range categories {
allCategories = append(allCategories, category)
}
return allCategories
}
// initCweCategories is responsible for building the CWE taxa field, inside taxonomies
func initCweCategories(cweIDs []string, guids map[string]string) []taxonomyDefinitions {
absPath, err := filepath.Abs(".")
if err != nil {
return []taxonomyDefinitions{}
}
cweSDCSVPath := filepath.Join(absPath, "assets", "cwe_csv", "Software-Development-CWE.csv")
cweSDCsvList, err := readCWECsvInfo(cweSDCSVPath)
if err != nil {
return []taxonomyDefinitions{}
}
cweHDCSVPath := filepath.Join(absPath, "assets", "cwe_csv", "Hardware-Design-CWE.csv")
cweHDCsvList, err := readCWECsvInfo(cweHDCSVPath)
if err != nil {
return []taxonomyDefinitions{}
}
cweRCCSVPath := filepath.Join(absPath, "assets", "cwe_csv", "Research-Concepts-CWE.csv")
cweRCCsvList, err := readCWECsvInfo(cweRCCSVPath)
if err != nil {
return []taxonomyDefinitions{}
}
var taxonomyList []taxonomyDefinitions
for _, cweID := range cweIDs {
var matchingCweEntry cweCsv
var found bool
matchingCweEntry, found = buildMatchingCWEEntry(cweID, cweSDCsvList)
if !found {
matchingCweEntry, found = buildMatchingCWEEntry(cweID, cweHDCsvList)
}
if !found {
matchingCweEntry, found = buildMatchingCWEEntry(cweID, cweRCCsvList)
}
if !found {
continue
}
guid, exists := guids[cweID]
if !exists {
continue
}
taxonomy := taxonomyDefinitions{
DefinitionID: matchingCweEntry.CweID,
DefinitionGUID: guid,
DefinitionFullDescription: matchingCweEntry.FullDescription,
DefinitionShortDescription: matchingCweEntry.ShortDescription,
HelpURI: matchingCweEntry.HelpURI,
}
taxonomyList = append(taxonomyList, taxonomy)
}
return taxonomyList
}
func initSarifTaxonomies() []sarifTaxonomy {
var taxonomies []sarifTaxonomy
// Categories
if targetTemplate.ToolComponent.ComponentReferenceName == "Categories" {
categories := sarifTaxonomy{
TaxonomyGUID: targetTemplate.ToolComponent.ComponentReferenceGUID,
TaxonomyName: targetTemplate.ToolComponent.ComponentReferenceName,
TaxonomyShortDescription: sarifMessage{
Text: "Vulnerabilities categories",
},
TaxonomyFullDescription: sarifMessage{
Text: "This taxonomy contains the types an issue can assume",
},
TaxonomyDefinitions: initSarifCategories(),
}
taxonomies = append(taxonomies, categories)
}
if cweTemplate.ToolComponent.ComponentReferenceName == "CWE" {
cweInfo, err := readCWETaxonomyInfo()
if err != nil {
return taxonomies
}
cweTaxonomy := sarifTaxonomy{
TaxonomyGUID: cweInfo.TaxonomyGUID,
TaxonomyName: cweInfo.TaxonomyName,
TaxonomyInformationURI: cweInfo.TaxonomyInformationURI,
TaxonomyIsComprehensive: cweInfo.TaxonomyIsComprehensive,
TaxonomyLanguage: cweInfo.TaxonomyLanguage,
TaxonomyOrganization: cweInfo.TaxonomyOrganization,
TaxonomyRealeaseDateUtc: cweInfo.TaxonomyRealeaseDateUtc,
TaxonomyMinRequiredLocDataSemanticVersion: cweInfo.TaxonomyMinRequiredLocDataSemanticVersion,
TaxonomyDownloadURI: cweInfo.TaxonomyDownloadURI,
TaxonomyFullDescription: sarifMessage{Text: cweInfo.TaxonomyFullDescription.Text},
TaxonomyShortDescription: sarifMessage{Text: cweInfo.TaxonomyShortDescription.Text},
TaxonomyDefinitions: []taxonomyDefinitions{},
}
taxonomies = append(taxonomies, cweTaxonomy)
}
return taxonomies
}
func initSarifRun() []SarifRun {
return []SarifRun{
{
Tool: initSarifTool(),
Results: make([]sarifResult, 0),
Taxonomies: initSarifTaxonomies(),
},
}
}
// NewSarifReport creates and start a new sarif report with default values respecting SARIF schema 2.1.0
func NewSarifReport() SarifReport {
return &sarifReport{
Schema: "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",
SarifVersion: "2.1.0",
Runs: initSarifRun(),
}
}
func (sr *sarifReport) findSarifCategory(category string) int {
for idx, taxonomy := range sr.Runs[0].Taxonomies[0].TaxonomyDefinitions {
if taxonomy.DefinitionName == category {
return idx
}
}
return 0
}
// readCWETaxonomyInfo is responsible for reading the CWE taxonomy info from the json file
func readCWETaxonomyInfo() (sarifTaxonomy, error) {
var wrapper cweTaxonomiesWrapper
absPath, err := filepath.Abs(".")
if err != nil {
return sarifTaxonomy{}, err
}
filePath := filepath.Join(absPath, "assets", "cwe_csv", "cwe_taxonomies_latest.json")
fileContent, err := os.ReadFile(filepath.Clean(filePath))
if err != nil {
return sarifTaxonomy{}, err
}
err = json.Unmarshal(fileContent, &wrapper)
if err != nil {
return sarifTaxonomy{}, err
}
return wrapper.Taxonomies, nil
}
func generateGUID() string {
id := uuid.New()
return id.String()
}
// readCWECsvInfo is responsible for reading the CWE taxonomy info from the corresponding csv file
func readCWECsvInfo(filePath string) ([]cweCsv, error) {
file, err := os.Open(filepath.Clean(filePath))
if err != nil {
return nil, err
}
defer file.Close()
reader := csv.NewReader(file)
reader.FieldsPerRecord = -1 // Note: -1 means records may have a variable number of fields in the csv file
records, err := reader.ReadAll()
if err != nil {
return nil, err
}
var cweEntries []cweCsv
numRecords := 23
for _, record := range records {
if len(record) >= numRecords {
cweEntry := cweCsv{
CweID: record[0],
FullDescription: cweMessage{
Text: record[5],
},
ShortDescription: cweMessage{
Text: record[4],
},
GUID: generateGUID(),
HelpURI: "https://cwe.mitre.org/data/definitions/" + record[0] + ".html",
}
// Check if Extended Description is empty, fill it with Description if so
if cweEntry.FullDescription.Text == "" {
cweEntry.FullDescription.Text = record[4]
}
cweEntries = append(cweEntries, cweEntry)
}
}
return cweEntries, nil
}
func getAllCweInfos() (cweSD, cweHD, cweRC []cweCsv, cweErr error) {
var cweToFileName = map[string]string{
"SD": "Software-Development-CWE.csv",
"HD": "Hardware-Design-CWE.csv",
"RC": "Research-Concepts-CWE.csv",
}
mapCweInfos := map[string][]cweCsv{}
absPath, err := filepath.Abs(".")
if err != nil {
return nil, nil, nil, err
}
for cweKey, cweFileName := range cweToFileName {
cweInfoPath := filepath.Join(absPath, "assets", "cwe_csv", cweFileName)
cweInfo, err := readCWECsvInfo(cweInfoPath)
if err != nil {
return nil, nil, nil, err
}
mapCweInfos[cweKey] = cweInfo
}
return mapCweInfos["SD"], mapCweInfos["HD"], mapCweInfos["RC"], nil
}
func buildMatchingCWEEntry(cweID string, cweCSVList []cweCsv) (cweInfos cweCsv, found bool) {
for _, cweEntry := range cweCSVList {
if cweEntry.CweID == cweID {
return cweEntry, true
}
}
return cweCsv{}, false
}
// buildCweCategory builds the CWE category in taxonomies, with info from CWE and CSV
func (sr *sarifReport) buildCweCategory(cweID string) sarifDescriptorReference {
cweSDCsvList, cweHDCsvList, cweRCCsvList, err := getAllCweInfos()
if err != nil {
return sarifDescriptorReference{}
}
var matchingCweEntry cweCsv
var found bool
matchingCweEntry, found = buildMatchingCWEEntry(cweID, cweSDCsvList)
if !found {
matchingCweEntry, found = buildMatchingCWEEntry(cweID, cweHDCsvList)
}
if !found {
matchingCweEntry, found = buildMatchingCWEEntry(cweID, cweRCCsvList)
}
if !found {
return sarifDescriptorReference{}
}
cwe := sarifDescriptorReference{
ReferenceID: matchingCweEntry.CweID,
ReferenceGUID: generateGUID(),
ToolComponent: sarifComponentReference{
ComponentReferenceGUID: "1489b0c4-d7ce-4d31-af66-6382a01202e3",
ComponentReferenceName: "CWE",
},
}
return cwe
}
func (sr *sarifReport) buildSarifCategory(category string) sarifDescriptorReference {
target := targetTemplate
categoryIndex := sr.findSarifCategory(category)
if categoryIndex >= 0 {
taxonomy := sr.Runs[0].Taxonomies[0].TaxonomyDefinitions[categoryIndex]
target.ReferenceID = taxonomy.DefinitionID
}
target.ReferenceIndex = categoryIndex
if categoryIndex == -1 {
if _, exists := categoriesNotFound[category]; !exists {
log.Warn().Msgf("Category %s not found.", category)
categoriesNotFound[category] = true
}
}
return target
}
func (sr *sarifReport) findSarifRuleIndex(ruleID string) int {
for idx := range sr.Runs[0].Tool.Driver.Rules {
if sr.Runs[0].Tool.Driver.Rules[idx].RuleID == ruleID {
return idx
}
}
return -1
}
func (sr *sarifReport) buildSarifRule(queryMetadata *ruleMetadata, cisMetadata ruleCISMetadata) int {
index := sr.findSarifRuleIndex(queryMetadata.queryID)
if index < 0 {
helpURI := "https://docs.kics.io/"
if queryMetadata.queryURI != "" {
helpURI = queryMetadata.queryURI
}
target := sr.buildSarifCategory(queryMetadata.queryCategory)
cwe := sr.buildCweCategory(queryMetadata.queryCwe)
var relationships []sarifRelationship
if cwe.ReferenceID != "" {
relationships = []sarifRelationship{
{Relationship: target},
{Relationship: cwe},
}
} else {
relationships = []sarifRelationship{
{Relationship: target},
}
}
rule := sarifRule{
RuleID: queryMetadata.queryID,
RuleName: queryMetadata.queryName,
RuleShortDescription: sarifMessage{Text: queryMetadata.queryName},
RuleFullDescription: sarifMessage{Text: queryMetadata.queryDescription},
DefaultConfiguration: sarifConfiguration{Level: severityLevelEquivalence[queryMetadata.severity]},
Relationships: relationships,
HelpURI: helpURI,
RuleProperties: nil,
}
if cisMetadata.id != "" {
rule.RuleFullDescription.Text = cisMetadata.descriptionText
rule.RuleProperties = sarifProperties{
"cisId": cisMetadata.id,
"cisTitle": cisMetadata.title,
}
}
sr.Runs[0].Tool.Driver.Rules = append(sr.Runs[0].Tool.Driver.Rules, rule)
index = len(sr.Runs[0].Tool.Driver.Rules) - 1
}
return index
}
// GetGUIDFromRelationships gets the GUID from the relationship for each CWE item
func (sr *sarifReport) GetGUIDFromRelationships(idx int, cweID string) string {
if len(sr.Runs) > 0 {
if len(sr.Runs[0].Tool.Driver.Rules) > 0 {
relationships := sr.Runs[0].Tool.Driver.Rules[idx].Relationships
for _, relationship := range relationships {
target := relationship.Relationship
if target.ReferenceID == cweID {
return target.ReferenceGUID
}
}
}
}
return ""
}
// RebuildTaxonomies builds the taxonomies with the CWEs and the GUIDs coming from each relationships field
func (sr *sarifReport) RebuildTaxonomies(cwes []string, guids map[string]string) {
if len(cwes) > 0 {
result := initCweCategories(cwes, guids)
if len(sr.Runs) > 0 {
if len(sr.Runs[0].Taxonomies) == 2 {
sr.Runs[0].Taxonomies[1].TaxonomyDefinitions = result
}
}
}
}
// BuildSarifIssue creates a new entries in Results (one for each file) and new entry in Rules and Taxonomy if necessary
func (sr *sarifReport) BuildSarifIssue(issue *model.QueryResult) string {
if len(issue.Files) > 0 {
metadata := ruleMetadata{
queryID: issue.QueryID,
queryName: issue.QueryName,
queryDescription: issue.Description,
queryURI: issue.QueryURI,
queryCategory: issue.Category,
queryCwe: issue.CWE,
severity: issue.Severity,
}
cisDescriptions := ruleCISMetadata{
id: issue.CISDescriptionIDFormatted,
title: issue.CISDescriptionTitle,
descriptionText: issue.CISDescriptionTextFormatted,
}
ruleIndex := sr.buildSarifRule(&metadata, cisDescriptions)
kind := "fail"
if severityLevelEquivalence[issue.Severity] == "none" {
kind = "informational"
}
for idx := range issue.Files {
line := issue.Files[idx].Line
if line < 1 {
line = 1
}
result := sarifResult{
ResultRuleID: issue.QueryID,
ResultRuleIndex: ruleIndex,
ResultKind: kind,
ResultMessage: sarifMessage{
Text: issue.Files[idx].KeyActualValue,
MessageProperties: sarifProperties{
"platform": issue.Platform,
},
},
ResultLocations: []sarifLocation{
{
PhysicalLocation: sarifPhysicalLocation{
ArtifactLocation: sarifArtifactLocation{ArtifactURI: issue.Files[idx].FileName},
Region: sarifRegion{StartLine: line},
},
},
},
}
sr.Runs[0].Results = append(sr.Runs[0].Results, result)
}
return issue.CWE
}
return ""
}
package model
const categoryIdentifier = "CAT"
var noCategory = taxonomyDefinitions{
DefinitionID: categoryIdentifier + "000",
DefinitionName: "Undefined Category",
DefinitionShortDescription: cweMessage{Text: "Category is not defined"},
DefinitionFullDescription: cweMessage{Text: "Category is not defined"},
}
func createSarifCategory(identifier, name, description string) taxonomyDefinitions {
return taxonomyDefinitions{
DefinitionID: identifier,
DefinitionName: name,
DefinitionShortDescription: cweMessage{
Text: description,
},
DefinitionFullDescription: cweMessage{
Text: description,
},
}
}
var categories = map[string]taxonomyDefinitions{
"Access Control": createSarifCategory(categoryIdentifier+"001", "Access Control", "Service permission and identity management"),
"Availability": createSarifCategory(categoryIdentifier+"002", "Availability", "Reliability and Scalability"),
"Backup": createSarifCategory(categoryIdentifier+"003", "Backup", "Survivability and Recovery"),
"Best Practices": createSarifCategory(categoryIdentifier+"004", "Best Practices", "Metadata management"),
"Build Process": createSarifCategory(
categoryIdentifier+"005",
"Build Process",
"Insecure configurations when building/deploying",
),
"Encryption": createSarifCategory(categoryIdentifier+"006", "Encryption", "Data Security and Encryption configuration"),
"Insecure Configurations": createSarifCategory(
categoryIdentifier+"007",
"Insecure Configurations",
"Configurations which expose the application unnecessarily",
),
"Insecure Defaults": createSarifCategory(
categoryIdentifier+"008",
"Insecure Defaults",
"Configurations that are insecure by default",
),
"Networking and Firewall": createSarifCategory(
categoryIdentifier+"009",
"Networking and Firewall",
"Network port exposure and firewall configuration",
),
"Observability": createSarifCategory(categoryIdentifier+"010", "Observability", "Logging and Monitoring"),
"Resource Management": createSarifCategory(
categoryIdentifier+"011",
"Resource Management",
"Resource and privilege limit configuration",
),
"Secret Management": createSarifCategory(categoryIdentifier+"012", "Secret Management", "Secret and Key management"),
"Supply-Chain": createSarifCategory(categoryIdentifier+"013", "Supply-Chain", "Dependency version management"),
"Structure and Semantics": createSarifCategory(
categoryIdentifier+"014",
"Structure and Semantics",
"Malformed document structure or inadequate semantics",
),
"Bill Of Materials": createSarifCategory(categoryIdentifier+"015", "Bill Of Materials", "List of resources provisioned"),
}
package model
import (
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/Checkmarx/kics/v2/pkg/model"
)
// severitySonarQubeEquivalence maps the severity of the KICS to the SonarQube equivalent
var severitySonarQubeEquivalence = map[model.Severity]string{
"INFO": "INFO",
"LOW": "MINOR",
"MEDIUM": "MAJOR",
"HIGH": "CRITICAL",
"CRITICAL": "BLOCKER",
}
// categorySonarQubeEquivalence maps the category to the SonarQube equivalent
var categorySonarQubeEquivalence = map[string]string{
"Access Control": "VULNERABILITY",
"Availability": "VULNERABILITY",
"Backup": "VULNERABILITY",
"Best Practices": "CODE_SMELL",
"Build Process": "VULNERABILITY",
"Encryption": "VULNERABILITY",
"Insecure Configurations": "CODE_SMELL",
"Insecure Defaults": "CODE_SMELL",
"Networking and Firewall": "VULNERABILITY",
"Observability": "VULNERABILITY",
"Resource Management": "VULNERABILITY",
"Secret Management": "VULNERABILITY",
"Supply-Chain": "VULNERABILITY",
"Structure and Semantics": "CODE_SMELL",
}
// SonarQubeReportBuilder is the builder for the SonarQubeReport struct
type SonarQubeReportBuilder struct {
version string
report *SonarQubeReport
}
// SonarQubeReport is a list of issues for SonarQube Report
type SonarQubeReport struct {
Issues []Issue `json:"issues"`
}
// Issue is a single issue for SonarQube Report
type Issue struct {
EngineID string `json:"engineId"`
RuleID string `json:"ruleId"`
Severity string `json:"severity"`
CWE string `json:"cwe,omitempty"`
Type string `json:"type"`
PrimaryLocation *Location `json:"primaryLocation"`
SecondaryLocations []*Location `json:"secondaryLocations,omitempty"`
}
// Location is the location for the vulnerability in the SonarQube Report
type Location struct {
Message string `json:"message"`
FilePath string `json:"filePath"`
TextRange *Range `json:"textRange"`
}
// Range is the range for the vulnerability in the SonarQube Report
type Range struct {
StartLine int `json:"startLine"`
}
// NewSonarQubeRepory creates a new SonarQubeReportBuilder instance
func NewSonarQubeRepory() *SonarQubeReportBuilder {
return &SonarQubeReportBuilder{
version: "KICS " + constants.Version,
report: &SonarQubeReport{
Issues: make([]Issue, 0),
},
}
}
// BuildReport builds the SonarQubeReport from the given QueryResults
func (s *SonarQubeReportBuilder) BuildReport(summary *model.Summary) *SonarQubeReport {
for i := range summary.Queries {
s.buildIssue(&summary.Queries[i])
}
return s.report
}
// buildIssue builds the issue from the given QueryResult and adds it to the SonarQubeReport
func (s *SonarQubeReportBuilder) buildIssue(query *model.QueryResult) {
issue := Issue{
EngineID: s.version,
RuleID: query.QueryID,
Severity: severitySonarQubeEquivalence[query.Severity],
CWE: query.CWE,
Type: categorySonarQubeEquivalence[query.Category],
PrimaryLocation: buildLocation(0, query),
SecondaryLocations: buildSecondaryLocation(query),
}
s.report.Issues = append(s.report.Issues, issue)
}
// buildSecondaryLocation builds the secondary location for the SonarQube Report
func buildSecondaryLocation(query *model.QueryResult) []*Location {
locations := make([]*Location, 0)
for i := range query.Files[1:] {
locations = append(locations, buildLocation(i+1, query))
}
return locations
}
// buildLocation builds the location for the SonarQube Report
func buildLocation(index int, query *model.QueryResult) *Location {
message := query.Description
if query.CISDescriptionID != "" {
message = query.CISDescriptionID
}
return &Location{
Message: message,
FilePath: query.Files[index].FileName,
TextRange: &Range{
StartLine: query.Files[index].Line,
},
}
}
package report
import (
_ "embed" // used for embedding report static files
"fmt"
"path/filepath"
"time"
"github.com/Checkmarx/kics/v2/internal/constants"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/johnfercher/maroto/pkg/color"
"github.com/johnfercher/maroto/pkg/consts"
"github.com/johnfercher/maroto/pkg/pdf"
"github.com/johnfercher/maroto/pkg/props"
"github.com/rs/zerolog/log"
)
const (
defaultTextSize = 8
smallTextSize = 6
pgMarginLeft = 10
pgMarginTop = 15
pgMarginRight = 10
rowXSmall = 3
rowSmall = 4
rowMedium = 5
rowLarge = 8
rowXLarge = 15
colOne = 1
colTwo = 2
colThree = 3
colFour = 4
colFive = 5
colSix = 6
colEight = 8
colNine = 9
colTen = 10
colFullPage = 12
colRuneSlitter = 20
textSize = 10
)
var (
grayColor = getGrayColor()
//go:embed assets/vuln
vulnImageBase64 string
//go:embed assets/checkmarx-logo
checkmarxLogo string
)
func createQueryEntryMetadataField(m pdf.Maroto, label, value string, textSize int) {
m.Col(colTwo, func() {
m.Text(label, props.Text{
Size: float64(textSize),
Align: consts.Left,
Extrapolate: false,
})
})
m.Col(colTwo, func() {
m.Text(value, props.Text{
Size: float64(textSize),
Align: consts.Left,
Extrapolate: false,
})
})
}
func createQueriesTable(m pdf.Maroto, queries []model.QueryResult) error {
for i := range queries {
m.SetBackgroundColor(color.NewWhite())
queryName := queries[i].QueryName
resultsCount := fmt.Sprint(len(queries[i].Files))
severity := string(queries[i].Severity)
platform := queries[i].Platform
cwe := queries[i].CWE
category := queries[i].Category
description := queries[i].Description
var err error
m.Row(rowLarge, func() {
m.Col(colOne, func() {
err = m.Base64Image(vulnImageBase64, consts.Png, props.Rect{
Center: false,
Percent: 50,
Left: 2,
})
})
m.Col(colNine, func() {
m.Text(queryName, props.Text{
Size: 11,
Style: consts.Bold,
Align: consts.Left,
Extrapolate: false,
})
})
m.Col(colOne, func() {
m.Text("Results", props.Text{
Size: 8,
Style: consts.Bold,
Align: consts.Right,
Extrapolate: false,
})
})
m.Col(colOne, func() {
m.Text(resultsCount, props.Text{
Size: 8,
Style: consts.Bold,
Align: consts.Right,
Extrapolate: false,
})
})
})
if err != nil {
return err
}
m.Row(colFive, func() {
createQueryEntryMetadataField(m, "Severity", severity, textSize)
})
m.Row(colThree, func() {
createQueryEntryMetadataField(m, "Platform", platform, defaultTextSize)
})
m.Row(colFour, func() {
createQueryEntryMetadataField(m, "Cwe", cwe, defaultTextSize)
})
m.Row(colSix, func() {
createQueryEntryMetadataField(m, "Category", category, defaultTextSize)
})
if queries[i].CISDescriptionID != "" {
createCISRows(m, &queries[i])
} else {
createDescription(m, description)
}
createResultsTable(m, &queries[i])
}
return nil
}
func createDescription(m pdf.Maroto, description string) {
m.Row(colFive, func() {
m.Col(colTwo, func() {
m.Text("Description", props.Text{
Size: float64(textSize),
Align: consts.Left,
Style: consts.Bold,
Extrapolate: false,
})
})
})
m.Row(getRowLength(description), func() {
m.Col(colFullPage, func() {
m.Text(description, props.Text{
Size: float64(defaultTextSize),
Align: consts.Left,
Extrapolate: false,
})
})
})
m.Row(colFive, func() {
m.ColSpace(0)
})
}
func createCISRows(m pdf.Maroto, query *model.QueryResult) {
cisID := query.CISDescriptionIDFormatted
description := query.CISDescriptionTextFormatted
title := query.CISDescriptionTitle
m.Row(colFive, func() {
m.Col(colTwo, func() {
m.Text("Description ID", props.Text{
Size: float64(defaultTextSize),
Align: consts.Left,
Style: consts.Bold,
Extrapolate: false,
})
})
m.Col(colEight, func() {
m.Text(cisID, props.Text{
Size: float64(defaultTextSize),
Align: consts.Left,
Extrapolate: false,
})
})
})
m.Row(colFive, func() {
m.Col(colTwo, func() {
m.Text("Title", props.Text{
Size: float64(defaultTextSize),
Align: consts.Left,
Style: consts.Bold,
Extrapolate: false,
})
})
m.Col(colEight, func() {
m.Text(title, props.Text{
Size: float64(defaultTextSize),
Align: consts.Left,
Extrapolate: false,
})
})
})
createDescription(m, description)
}
func getRowLength(value string) float64 {
length := float64(len(value))
x := 2.5
result := ((length / x) / colRuneSlitter) + 1
return result
}
func createResultsTable(m pdf.Maroto, query *model.QueryResult) {
for idx := range query.Files {
if idx%2 == 0 {
m.SetBackgroundColor(grayColor)
} else {
m.SetBackgroundColor(color.NewWhite())
}
filePath := query.Files[idx].FileName
fileLine := fmt.Sprintf("%s:%s", filePath, fmt.Sprint(query.Files[idx].Line))
m.Row(colFive, func() {
m.Col(colFullPage, func() {
m.Text(fileLine, props.Text{
Size: defaultTextSize,
Align: consts.Left,
Extrapolate: false,
})
})
})
expectedValue := fmt.Sprintf("Expected: %s", query.Files[idx].KeyExpectedValue)
m.Row(colFive, func() {
m.Col(colFullPage, func() {
m.Text(expectedValue, props.Text{
Size: smallTextSize,
Align: consts.Left,
Extrapolate: false,
})
})
})
}
m.SetBackgroundColor(color.NewWhite())
m.Line(1.0)
}
func createHeaderArea(m pdf.Maroto) {
m.Row(rowLarge, func() {
m.Col(colSix, func() {
_ = m.Base64Image(checkmarxLogo, consts.Png, props.Rect{})
})
})
m.Row(colFive, func() {
m.ColSpace(0)
})
m.SetBackgroundColor(getPurpleColor())
m.Row(rowXLarge, func() {
m.Col(colSix, func() {
m.Text(" KICS REPORT", props.Text{
Size: 25,
Style: consts.Bold,
Align: consts.Left,
Extrapolate: false,
Color: color.NewWhite(),
})
})
m.Col(colSix, func() {
m.Text(fmt.Sprintf("%s ", constants.Version), props.Text{
Size: 25,
Style: consts.Bold,
Align: consts.Right,
Extrapolate: false,
Color: color.NewWhite(),
})
})
})
m.SetBackgroundColor(color.NewWhite())
m.Row(rowXSmall, func() {
m.ColSpace(colFullPage)
})
}
func createFooterArea(m pdf.Maroto) {
m.Row(rowMedium, func() {
m.Col(colOne, func() {
m.Text("https://kics.io")
})
})
}
// PrintPdfReport creates a report file on the PDF format
func PrintPdfReport(path, filename string, body interface{}) error {
startTime := time.Now()
log.Info().Msg("Started generating pdf report")
summary := body.(*model.Summary)
m := pdf.NewMaroto(consts.Portrait, consts.A4)
m.SetPageMargins(pgMarginLeft, pgMarginTop, pgMarginRight)
m.SetFirstPageNb(1)
m.SetAliasNbPages("{total}")
m.RegisterHeader(func() {
createHeaderArea(m)
})
m.RegisterFooter(func() {
createFooterArea(m)
})
m.SetBackgroundColor(color.NewWhite())
createFirstPageHeader(m, summary)
m.Line(1.0)
err := createQueriesTable(m, summary.Queries)
if err != nil {
return err
}
err = m.OutputFileAndClose(filepath.Join(path, fmt.Sprintf("%s.pdf", filename)))
if err != nil {
return err
}
log.Info().Msgf("Generate report duration: %v", time.Since(startTime))
fileCreationReport(filepath.Join(path, filename+".pdf"), filename)
return err
}
func createDateField(m pdf.Maroto, label string, scanTime time.Time) {
m.Row(colFour, func() {
m.Col(colTwo, func() {
m.Text(label, props.Text{
Size: defaultTextSize,
Align: consts.Left,
Extrapolate: false,
})
})
m.Col(colTwo, func() {
m.Text(scanTime.Format("15:04:05, Jan 02 2006"), props.Text{
Size: defaultTextSize,
Align: consts.Left,
Extrapolate: false,
})
})
})
}
func createDateArea(m pdf.Maroto, summary *model.Summary) {
createDateField(m, "START TIME", summary.Start)
createDateField(m, "END TIME", summary.End)
}
func createPlatformsArea(m pdf.Maroto, summary *model.Summary) {
m.Row(rowSmall, func() {
m.Col(colTwo, func() {
m.Text("PLATFORMS", props.Text{
Size: defaultTextSize,
Align: consts.Left,
Extrapolate: false,
})
})
m.Col(colTen, func() {
m.Text(getPlatforms(summary.Queries), props.Text{
Size: defaultTextSize,
Align: consts.Left,
Extrapolate: false,
})
})
})
}
func createSummaryResultsField(m pdf.Maroto, label, value string, mColor color.Color) {
m.Col(colOne, func() {
m.Text(label, props.Text{
Size: defaultTextSize,
Align: consts.Left,
Style: consts.Bold,
Extrapolate: false,
Color: mColor,
})
})
m.Col(colOne, func() {
m.Text(value, props.Text{
Size: defaultTextSize,
Align: consts.Left,
Style: consts.Bold,
Extrapolate: false,
Color: mColor,
})
})
}
func createSummaryArea(m pdf.Maroto, summary *model.Summary) {
criticalSeverityCount := fmt.Sprint(summary.SeverityCounters["CRITICAL"])
highSeverityCount := fmt.Sprint(summary.SeverityCounters["HIGH"])
mediumSeverityCount := fmt.Sprint(summary.SeverityCounters["MEDIUM"])
lowSeverityCount := fmt.Sprint(summary.SeverityCounters["LOW"])
infoSeverityCount := fmt.Sprint(summary.SeverityCounters["INFO"])
totalCount := fmt.Sprint(summary.TotalCounter)
m.Row(rowMedium, func() {
createSummaryResultsField(m, "CRITICAL", criticalSeverityCount, getPureRedColor())
createSummaryResultsField(m, "HIGH", highSeverityCount, getRedColor())
createSummaryResultsField(m, "MEDIUM", mediumSeverityCount, getOrangeColor())
createSummaryResultsField(m, "LOW", lowSeverityCount, getYellowColor())
createSummaryResultsField(m, "INFO", infoSeverityCount, getBlueColor())
m.ColSpace(colOne)
m.Col(colOne, func() {
m.Text("TOTAL", props.Text{
Size: defaultTextSize,
Right: 10.0,
Align: consts.Right,
Style: consts.Bold,
Extrapolate: false,
})
})
m.Col(colOne, func() {
m.Text(totalCount, props.Text{
Size: defaultTextSize,
Align: consts.Left,
Style: consts.Bold,
Extrapolate: false,
})
})
})
}
func createFirstPageHeader(m pdf.Maroto, summary *model.Summary) {
createSummaryArea(m, summary)
createPlatformsArea(m, summary)
createDateArea(m, summary)
m.Row(rowSmall, func() {
m.Col(colTwo, func() {
m.Text("SCANNED PATHS:", props.Text{
Size: defaultTextSize,
Align: consts.Left,
Extrapolate: false,
})
})
})
for i := range summary.ScannedPaths {
scannedPaths := summary.ScannedPaths[i]
m.Row(rowSmall, func() {
m.Col(colFullPage, func() {
m.Text(fmt.Sprintf("- %s", scannedPaths), props.Text{
Size: defaultTextSize,
Align: consts.Left,
Extrapolate: true,
})
})
})
}
m.Row(rowXSmall, func() {
m.ColSpace(colFullPage)
})
}
func getGrayColor() color.Color {
return color.Color{
Red: 200,
Green: 200,
Blue: 200,
}
}
func getPureRedColor() color.Color {
return color.Color{
Red: 250,
Green: 0,
Blue: 0,
}
}
func getRedColor() color.Color {
return color.Color{
Red: 200,
Green: 0,
Blue: 0,
}
}
func getYellowColor() color.Color {
return color.Color{
Red: 206,
Green: 182,
Blue: 26,
}
}
func getOrangeColor() color.Color {
return color.Color{
Red: 255,
Green: 165,
Blue: 0,
}
}
func getBlueColor() color.Color {
return color.Color{
Red: 0,
Green: 0,
Blue: 200,
}
}
func getPurpleColor() color.Color {
return color.Color{
Red: 80,
Green: 62,
Blue: 158,
}
}
package report
import (
"strings"
reportModel "github.com/Checkmarx/kics/v2/pkg/report/model"
)
// PrintSarifReport creates a report file on sarif format, fetching the ID and GUID from relationships to be inputted to taxonomies field
func PrintSarifReport(path, filename string, body interface{}) error {
if !strings.HasSuffix(filename, ".sarif") {
filename += ".sarif"
}
if body != "" {
summary, err := getSummary(body)
if err != nil {
return err
}
sarifReport := reportModel.NewSarifReport()
auxID := []string{}
auxGUID := map[string]string{}
for idx := range summary.Queries {
x := sarifReport.BuildSarifIssue(&summary.Queries[idx])
if x != "" {
auxID = append(auxID, x)
guid := sarifReport.GetGUIDFromRelationships(idx, x)
auxGUID[x] = guid
}
}
sarifReport.RebuildTaxonomies(auxID, auxGUID)
body = sarifReport
}
return ExportJSONReport(path, filename, body)
}
package report
import (
"strings"
reportModel "github.com/Checkmarx/kics/v2/pkg/report/model"
)
// PrintSonarQubeReport prints the SonarQube report in the given path and filename with the given body
func PrintSonarQubeReport(path, filename string, body interface{}) error {
if !strings.HasSuffix(filename, ".json") {
filename += ".json"
}
if !strings.HasPrefix(filename, "sonarqube-") {
filename = "sonarqube-" + filename
}
if body != "" {
summary, err := getSummary(body)
if err != nil {
return err
}
SonarQubeReport := reportModel.NewSonarQubeRepory()
body = SonarQubeReport.BuildReport(&summary)
}
return ExportJSONReport(path, filename, body)
}
package file
import (
"encoding/json"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"regexp"
"strconv"
"strings"
"github.com/Checkmarx/kics/v2/pkg/analyzer"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/rs/zerolog/log"
"gopkg.in/yaml.v3"
)
// ResolvedFile - used for caching the already resolved files
type ResolvedFile struct {
fileContent []byte
resolvedFileObject any
}
// Resolver - replace or modifies in-memory content before parsing
type Resolver struct {
unmarshler func(fileContent []byte, v any) error
marshler func(v any) ([]byte, error)
ResolvedFiles map[string]model.ResolvedFile
Extension []string
}
// NewResolver returns a new Resolver
func NewResolver(
unmarshler func(fileContent []byte, v any) error,
marshler func(v any) ([]byte, error),
ext []string) *Resolver {
return &Resolver{
unmarshler: unmarshler,
marshler: marshler,
ResolvedFiles: make(map[string]model.ResolvedFile),
Extension: ext,
}
}
func isOpenAPI(fileContent []byte) bool {
regexToRun :=
[]*regexp.Regexp{analyzer.OpenAPIRegexInfo,
analyzer.OpenAPIRegexPath,
analyzer.OpenAPIRegex}
for _, regex := range regexToRun {
if !regex.Match(fileContent) {
return false
}
}
return true
}
// Resolve - replace or modifies in-memory content before parsing
func (r *Resolver) Resolve(fileContent []byte, path string,
resolveCount, maxResolverDepth int, resolvedFilesCache map[string]ResolvedFile,
resolveReferences bool) []byte {
// handle panic during resolve process
defer func() {
if r := recover(); r != nil {
err := fmt.Errorf("panic: %v", r)
log.Err(err).Msg("Recovered from panic during resolve of file " + path)
}
}()
if !resolveReferences && isOpenAPI(fileContent) {
return fileContent
}
if utils.Contains(filepath.Ext(path), []string{".yml", ".yaml"}) {
return r.yamlResolve(fileContent, path, resolveCount, maxResolverDepth, resolvedFilesCache, resolveReferences)
}
var obj any
err := r.unmarshler(fileContent, &obj)
if err != nil {
return fileContent
}
// resolve the paths
obj, _ = r.walk(fileContent, obj, obj, path, resolveCount, maxResolverDepth, resolvedFilesCache, false, resolveReferences)
b, err := json.MarshalIndent(obj, "", "")
if err == nil {
return b
}
return fileContent
}
func (r *Resolver) walk(
originalFileContent []byte,
fullObject interface{},
value any,
path string,
resolveCount, maxResolverDepth int,
resolvedFilesCache map[string]ResolvedFile,
refBool, resolveReferences bool) (any, bool) {
// go over the value and replace paths with the real content
switch typedValue := value.(type) {
case string:
if filepath.Base(path) != typedValue {
return r.resolvePath(
originalFileContent, fullObject, typedValue, path, resolveCount,
maxResolverDepth, resolvedFilesCache, refBool, resolveReferences)
}
return value, false
case []any:
for i, v := range typedValue {
typedValue[i], _ = r.walk(
originalFileContent, fullObject, v, path, resolveCount,
maxResolverDepth, resolvedFilesCache, refBool, resolveReferences)
}
return typedValue, false
case map[string]any:
return r.handleMap(
originalFileContent, fullObject, typedValue, path, resolveCount,
maxResolverDepth, resolvedFilesCache, resolveReferences)
default:
return value, false
}
}
func (r *Resolver) handleMap(
originalFileContent []byte,
fullObject interface{},
value map[string]interface{},
path string,
resolveCount, maxResolverDepth int,
resolvedFilesCache map[string]ResolvedFile,
resolveReferences bool,
) (any, bool) {
for k, v := range value {
isRef := strings.Contains(strings.ToLower(k), "$ref")
val, res := r.walk(originalFileContent, fullObject, v, path, resolveCount, maxResolverDepth, resolvedFilesCache, isRef, resolveReferences)
// check if it is a ref then add new details
if valMap, ok := val.(map[string]interface{}); (ok || !res) && isRef {
// Create RefMetadata and add it to the resolved value map
if valMap == nil {
valMap = make(map[string]interface{})
}
valMap["RefMetadata"] = make(map[string]interface{})
valMap["RefMetadata"].(map[string]interface{})["$ref"] = v
valMap["RefMetadata"].(map[string]interface{})["alone"] = len(value) == 1
return valMap, false
}
if isRef && res {
return val, false
}
value[k] = val
}
return value, false
}
func (r *Resolver) yamlResolve(fileContent []byte, path string,
resolveCount, maxResolverDepth int, resolvedFilesCache map[string]ResolvedFile,
resolveReferences bool) []byte {
var obj yaml.Node
err := r.unmarshler(fileContent, &obj)
if err != nil {
return fileContent
}
fullObjectCopy := obj
// resolve the paths
obj, _ = r.yamlWalk(
fileContent, &fullObjectCopy, &obj, path, resolveCount,
maxResolverDepth, resolvedFilesCache, false, resolveReferences, false)
if obj.Kind == 1 && len(obj.Content) == 1 {
obj = *obj.Content[0]
}
b, err := r.marshler(obj)
if err != nil {
return fileContent
}
return b
}
func (r *Resolver) yamlWalk(
originalFileContent []byte,
fullObject *yaml.Node,
value *yaml.Node,
path string,
resolveCount, maxResolverDepth int,
resolvedFilesCache map[string]ResolvedFile,
refBool, resolveReferences, ansibleVars bool) (yaml.Node, bool) {
// go over the value and replace paths with the real content
switch value.Kind {
case yaml.ScalarNode:
if filepath.Base(path) != value.Value {
return r.resolveYamlPath(originalFileContent, fullObject,
value, path,
resolveCount, maxResolverDepth, resolvedFilesCache,
refBool, resolveReferences, ansibleVars)
}
return *value, false
default:
refBool := false
ansibleVars := false
for i := range value.Content {
if i >= 1 {
refBool = strings.Contains(value.Content[i-1].Value, "$ref")
ansibleVars = strings.Contains(value.Content[i-1].Value, "include_vars")
}
resolved, ok := r.yamlWalk(originalFileContent, fullObject,
value.Content[i], path,
resolveCount, maxResolverDepth, resolvedFilesCache,
refBool, resolveReferences, ansibleVars)
if i >= 1 && refBool && (resolved.Kind == yaml.MappingNode || !ok) {
// Create RefMetadata and add it to yaml Node
if !ok {
resolved = yaml.Node{
Kind: yaml.MappingNode,
}
}
originalValueNode := &yaml.Node{
Kind: yaml.ScalarNode,
Value: "$ref",
}
refAloneKeyNode := &yaml.Node{
Kind: yaml.ScalarNode,
Value: "alone",
}
refAloneValueNode := &yaml.Node{
Kind: yaml.ScalarNode,
Value: strconv.FormatBool(len(value.Content) == 2),
}
refMetadataKeyNode := &yaml.Node{
Kind: yaml.ScalarNode,
Value: "RefMetadata",
}
refMetadataValueNode := &yaml.Node{
Kind: yaml.MappingNode,
}
refMetadataValueNode.Content = append(refMetadataValueNode.Content,
originalValueNode, value.Content[i], refAloneKeyNode, refAloneValueNode)
resolved.Content = append(resolved.Content, refMetadataKeyNode, refMetadataValueNode)
return resolved, false
}
value.Content[i] = &resolved
}
return *value, false
}
}
// isPath returns true if the value is a valid path
func (r *Resolver) resolveYamlPath(
originalFileContent []byte,
fullObject *yaml.Node,
v *yaml.Node,
filePath string,
resolveCount, maxResolverDepth int,
resolvedFilesCache map[string]ResolvedFile,
refBool, resolveReferences, ansibleVars bool) (yaml.Node, bool) {
value := v.Value
if resolveCount >= maxResolverDepth || (strings.HasPrefix(value, "#") && !refBool) || (value == "#" && refBool) {
return *v, false
}
var splitPath []string
var obj *yaml.Node
sameFileResolve := false
if strings.HasPrefix(value, "#") { // same file resolve
sameFileResolve = true
path := filePath + value
splitPath = strings.Split(path, "#") // splitting by removing the section to look for in the file
obj = fullObject
} else { // external file resolve
value = checkServerlessFileReference(value)
exists, path, onlyFilePath, filename := findFilePath(filepath.Dir(filePath), value, ansibleVars, r.Extension)
if !exists {
return *v, false
}
// Check if file has already been resolved, if not resolve it and save it for future references
if _, ok := resolvedFilesCache[filename]; !ok {
if ret, isError := r.resolveFile(
value, onlyFilePath, resolveCount, maxResolverDepth,
resolvedFilesCache, true, resolveReferences); isError {
if retYaml, yamlNode := ret.(yaml.Node); yamlNode {
return retYaml, false
} else {
return *v, false
}
}
}
r.ResolvedFiles[getPathFromString(value)] = model.ResolvedFile{
Content: resolvedFilesCache[filename].fileContent,
Path: path,
LinesContent: utils.SplitLines(string(resolvedFilesCache[filename].fileContent)),
}
node, _ := resolvedFilesCache[filename].resolvedFileObject.(yaml.Node)
obj = &node
if strings.Contains(strings.ToLower(value), "!ref") { // Cloudformation !Ref check
return *obj, false
}
if !strings.Contains(path, "#") {
return *obj, true
}
}
return r.returnResolveYamlPathValue(splitPath, sameFileResolve, filePath, originalFileContent, obj, v)
}
func (r *Resolver) returnResolveYamlPathValue(
splitPath []string,
sameFileResolve bool,
filePath string,
originalFileContent []byte,
obj, v *yaml.Node) (yaml.Node, bool) {
if len(splitPath) > 1 {
if sameFileResolve {
r.ResolvedFiles[filePath] = model.ResolvedFile{
Content: originalFileContent,
Path: filePath,
LinesContent: utils.SplitLines(string(originalFileContent)),
}
}
section, err := findSectionYaml(obj, splitPath[1])
// Check if there was an error finding the section or if the reference is circular
if err == nil && !checkIfCircularYaml(v.Value, §ion) {
return section, true
}
}
return *v, false
}
func (r *Resolver) resolveFile(
value string,
filePath string,
resolveCount, maxResolverDepth int,
resolvedFilesCache map[string]ResolvedFile,
yamlResolve, resolveReferences bool) (any, bool) {
// open the file with the content to replace
file, err := os.Open(filepath.Clean(filePath))
if err != nil {
return value, true
}
defer func(file *os.File) {
err = file.Close()
if err != nil {
log.Err(err).Msgf("failed to close resolved file: %s", filePath)
}
}(file)
// read the content
fileContent, _ := io.ReadAll(file)
resolvedFile := r.Resolve(fileContent, filePath, resolveCount+1, maxResolverDepth, resolvedFilesCache, resolveReferences)
if yamlResolve {
var obj yaml.Node
err = r.unmarshler(resolvedFile, &obj) // parse the content
if err != nil {
return value, true
}
if obj.Kind == 1 && len(obj.Content) == 1 {
obj = *obj.Content[0]
}
resolvedFilesCache[filePath] = ResolvedFile{fileContent, obj}
} else {
var obj any
err = r.unmarshler(resolvedFile, &obj) // parse the content
if err != nil {
return value, true
}
resolvedFilesCache[filePath] = ResolvedFile{fileContent, obj}
}
return nil, false
}
func getPathFromString(path string) string {
lastIndex := strings.LastIndex(path, "#")
if lastIndex == -1 {
return path
}
return path[:lastIndex]
}
// isPath returns true if the value is a valid path
func (r *Resolver) resolvePath(
originalFileContent []byte,
fullObject interface{},
value, filePath string,
resolveCount, maxResolverDepth int,
resolvedFilesCache map[string]ResolvedFile,
refBool bool, resolveReferences bool) (any, bool) {
if resolveCount >= maxResolverDepth || (strings.HasPrefix(value, "#") && !refBool) || (value == "#" && refBool) {
return value, false
}
var splitPath []string
var obj any
sameFileResolve := false
if strings.HasPrefix(value, "#") { // same file resolve
sameFileResolve = true
path := filePath + value
splitPath = strings.Split(path, "#") // splitting by removing the section to look for in the file
obj = fullObject
} else { // external file resolve
path := filepath.Join(filepath.Dir(filePath), value)
splitPath = strings.Split(path, "#") // splitting by removing the section to look for in the file
// index 0 contains the path of the file while the other indexes contain the sections (e.g. path = "./definitions.json#User/schema")
onlyFilePath := splitPath[0]
_, err := os.Stat(onlyFilePath)
if err != nil || !contains(filepath.Ext(onlyFilePath), r.Extension) {
return value, false
}
// Check if file has already been resolved, if not resolve it and save it for future references
if _, ok := resolvedFilesCache[onlyFilePath]; !ok {
if ret, isError := r.resolveFile(
value, onlyFilePath, resolveCount, maxResolverDepth,
resolvedFilesCache, false, resolveReferences); isError {
return ret, false
}
}
r.ResolvedFiles[getPathFromString(value)] = model.ResolvedFile{
Content: resolvedFilesCache[onlyFilePath].fileContent,
Path: path,
LinesContent: utils.SplitLines(string(resolvedFilesCache[onlyFilePath].fileContent)),
}
obj = resolvedFilesCache[onlyFilePath].resolvedFileObject
// Cloudformation !Ref check
if strings.Contains(strings.ToLower(value), "!ref") || len(splitPath) == 1 {
return obj, false
}
}
return r.resolvePathReturnValue(value, filePath, splitPath, sameFileResolve, originalFileContent, obj, maxResolverDepth)
}
func (r *Resolver) resolvePathReturnValue(
value, filePath string,
splitPath []string,
sameFileResolve bool,
originalFileContent []byte,
obj any,
maxResolverDepth int) (any, bool) {
if len(splitPath) > 1 {
if sameFileResolve {
r.ResolvedFiles[filePath] = model.ResolvedFile{
Content: originalFileContent,
Path: filePath,
LinesContent: utils.SplitLines(string(originalFileContent)),
}
}
section, err := findSection(obj, splitPath[1])
// Check if there was an error finding the section or if the reference is circular
if err != nil || checkIfCircular(value, section, maxResolverDepth) {
return value, false
}
if sectionMap, ok := section.(map[string]interface{}); ok {
newSectionMap := make(map[string]interface{})
for k, v := range sectionMap {
newSectionMap[k] = v
}
section = newSectionMap
}
return section, true
}
return value, false
}
func findSectionYaml(object *yaml.Node, sectionsString string) (yaml.Node, error) {
object = object.Content[0]
sectionsString = strings.ReplaceAll(sectionsString, "\\", "/")
sections := strings.Split(sectionsString[1:], "/")
for _, section := range sections {
found := false
for index, node := range object.Content {
if node.Value == section {
object = object.Content[index+1]
found = true
break
}
}
if !found {
return *object, errors.New("section not present in file")
}
}
return *object, nil
}
func checkIfCircularYaml(circularValue string, yamlSection *yaml.Node) bool {
if len(yamlSection.Content) == 0 {
return false
}
for index := 0; index < len(yamlSection.Content)-1; index += 1 {
// if there is a reference to the same value that was resolved it is a circular definition
if yamlSection.Content[index].Value == "$ref" && yamlSection.Content[index+1].Value == circularValue {
return true
} else if checkIfCircularYaml(circularValue, yamlSection.Content[index]) {
return true
}
}
return checkIfCircularYaml(circularValue, yamlSection.Content[len(yamlSection.Content)-1])
}
func findSection(object interface{}, sectionsString string) (interface{}, error) {
sectionsString = strings.ReplaceAll(sectionsString, "\\", "/")
sections := strings.Split(sectionsString[1:], "/")
for _, section := range sections {
if sectionObjectTemp, ok := object.(map[string]interface{}); ok {
if sectionObject, ok := sectionObjectTemp[section]; ok {
object = sectionObject
} else {
return object, errors.New("section not present in file")
}
} else {
return object, errors.New("section not of map type")
}
}
return object, nil
}
func checkIfCircular(circularValue string, section interface{}, maxResolverDepth int) bool {
if maxResolverDepth > 0 {
sectionAsMap, okMap := section.(map[string]interface{})
sectionAsList, okList := section.([]interface{})
if !okList && !okMap {
return false
}
if okMap {
for key, val := range sectionAsMap {
// if there is a reference to the same value that was resolved it is a circular definition
if key == "$ref" && val == circularValue {
return true
} else if checkIfCircular(circularValue, val, maxResolverDepth-1) {
return true
}
}
} else {
for _, listSection := range sectionAsList {
if checkIfCircular(circularValue, listSection, maxResolverDepth-1) {
return true
}
}
}
}
return false
}
func contains(elem string, list []string) bool {
for _, e := range list {
if elem == e {
return true
}
}
return false
}
func checkServerlessFileReference(value string) string {
re := regexp.MustCompile(`^\${file\((.*\.(yaml|yml))\)}$`)
matches := re.FindStringSubmatch(value)
if len(matches) > 1 {
return matches[1]
}
return value
}
func findFilePath(
folderPath, filename string,
ansibleVars bool,
extensions []string) (exists bool, path, onlyFilePath, cleanFilePath string) {
path = filepath.Join(folderPath, filename)
if ansibleVars {
if exists, ansibleVarsPath := findAnsibleVarsPath(folderPath, filename); !exists {
return false, "", "", ""
} else {
path = ansibleVarsPath
}
} else if _, err := os.Stat(path); err != nil {
return false, "", "", ""
}
if !contains(filepath.Ext(path), extensions) {
return false, "", "", ""
}
onlyFilePath = getPathFromString(path)
return true, path, onlyFilePath, filepath.Clean(onlyFilePath)
}
func findAnsibleVarsPath(folderPath, filename string) (exists bool, ansibleVarsPath string) {
possiblePaths := []string{
filepath.Join(folderPath, "vars", filename),
filepath.Join(folderPath, filename),
}
for _, path := range possiblePaths {
if _, err := os.Stat(path); err == nil {
return true, path
}
}
return false, ""
}
package helm
import (
"fmt"
"io"
"log"
"os"
"path/filepath"
"strings"
"github.com/pkg/errors"
"helm.sh/helm/v3/pkg/action"
"helm.sh/helm/v3/pkg/chart"
"helm.sh/helm/v3/pkg/chart/loader"
"helm.sh/helm/v3/pkg/chartutil"
"helm.sh/helm/v3/pkg/cli"
"helm.sh/helm/v3/pkg/cli/values"
"helm.sh/helm/v3/pkg/getter"
"helm.sh/helm/v3/pkg/release"
)
// credit: https://github.com/helm/helm
var (
settings = cli.New()
)
func runInstall(args []string, client *action.Install,
valueOpts *values.Options) (*release.Release, []string, error) {
log.SetOutput(io.Discard)
defer log.SetOutput(os.Stderr)
if client.Version == "" && client.Devel {
client.Version = ">0.0.0-0"
}
name, charts, err := client.NameAndChart(args)
if err != nil {
return nil, []string{}, err
}
client.ReleaseName = name
cp, err := client.ChartPathOptions.LocateChart(charts, settings)
if err != nil {
return nil, []string{}, err
}
p := getter.All(settings)
vals, err := valueOpts.MergeValues(p)
if err != nil {
return nil, []string{}, err
}
// Check chart dependencies to make sure all are present in /charts
chartRequested, err := loader.Load(cp)
if err != nil {
return nil, []string{}, err
}
excluded := getExcluded(chartRequested, cp)
chartRequested = setID(chartRequested)
if instErr := checkIfInstallable(chartRequested); instErr != nil {
return nil, []string{}, instErr
}
client.Namespace = "kics-namespace"
helmRelease, err := client.Run(chartRequested, vals)
if err != nil {
return nil, []string{}, err
}
return helmRelease, excluded, nil
}
// checkIfInstallable validates if a chart can be installed
//
// Application chart type is only installable
func checkIfInstallable(ch *chart.Chart) error {
switch ch.Metadata.Type {
case "", "application":
return nil
}
return errors.Errorf("%s charts are not installable", ch.Metadata.Type)
}
// newClient will create a new instance on helm client used to render the chart
func newClient() *action.Install {
cfg := new(action.Configuration)
client := action.NewInstall(cfg)
client.DryRun = true
client.ReleaseName = "kics-helm"
client.Replace = true // Skip the name check
client.ClientOnly = true
client.APIVersions = chartutil.VersionSet([]string{})
client.IncludeCRDs = false
return client
}
// setID will add auxiliary lines for each template as well as its dependencies
func setID(chartReq *chart.Chart) *chart.Chart {
for _, temp := range chartReq.Templates {
temp = addID(temp)
if temp != nil {
continue
}
}
for _, dep := range chartReq.Dependencies() {
dep = setID(dep)
if dep != nil {
continue
}
}
return chartReq
}
// addID will add auxiliary lines used to detect line
// one for each "apiVersion:" where the id will be the line
func addID(file *chart.File) *chart.File {
split := strings.Split(string(file.Data), "\n")
for i := 0; i < len(split); i++ {
if strings.Contains(split[i], "apiVersion:") {
split = append(split, "")
copy(split[i+1:], split[i:])
split[i] = fmt.Sprintf("# KICS_HELM_ID_%d:", i)
i++
}
}
file.Data = []byte(strings.Join(split, "\n"))
return file
}
// getExcluded will return all files rendered to be excluded from scan
func getExcluded(charterino *chart.Chart, chartpath string) []string {
excluded := make([]string, 0)
for _, file := range charterino.Raw {
excluded = append(excluded, filepath.Join(chartpath, file.Name))
}
return excluded
}
package helm
import (
"path/filepath"
"regexp"
"strconv"
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
masterUtils "github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/pkg/errors"
"helm.sh/helm/v3/pkg/chart"
"helm.sh/helm/v3/pkg/cli/values"
"helm.sh/helm/v3/pkg/release"
)
// Resolver is an instance of the helm resolver
type Resolver struct {
}
// splitManifest keeps the information of the manifest splitted by source
type splitManifest struct {
path string
content []byte
original []byte
splitID string
splitIDMap map[int]interface{}
}
const (
kicsHelmID = "# KICS_HELM_ID_"
)
// Resolve will render the passed helm chart and return its content ready for parsing
func (r *Resolver) Resolve(filePath string) (model.ResolvedFiles, error) {
// handle panic during resolve process
defer func() {
if r := recover(); r != nil {
errMessage := "Recovered from panic during resolve of file " + filePath
masterUtils.HandlePanic(r, errMessage)
}
}()
splits, excluded, err := renderHelm(filePath)
if err != nil { // return error to be logged
return model.ResolvedFiles{}, errors.New("failed to render helm chart")
}
var rfiles = model.ResolvedFiles{
Excluded: excluded,
}
for _, split := range *splits {
subFolder := filepath.Base(filePath)
splitPath := strings.Split(split.path, getPathSeparator(split.path))
splited := filepath.Join(splitPath[1:]...)
origpath := filepath.Join(filepath.Dir(filePath), subFolder, splited)
rfiles.File = append(rfiles.File, model.ResolvedHelm{
FileName: origpath,
Content: split.content,
OriginalData: split.original,
SplitID: split.splitID,
IDInfo: split.splitIDMap,
})
}
return rfiles, nil
}
// SupportedTypes returns the supported fileKinds for this resolver
func (r *Resolver) SupportedTypes() []model.FileKind {
return []model.FileKind{model.KindHELM}
}
// renderHelm will use helm library to render helm charts
func renderHelm(path string) (*[]splitManifest, []string, error) {
client := newClient()
manifest, excluded, err := runInstall([]string{path}, client, &values.Options{})
if err != nil {
return nil, []string{}, err
}
splitted, err := splitManifestYAML(manifest)
if err != nil {
return nil, []string{}, err
}
return splitted, excluded, nil
}
// splitManifestYAML will split the rendered file and return its content by template as well as the template path
func splitManifestYAML(template *release.Release) (*[]splitManifest, error) {
sources := make([]*chart.File, 0)
sources = updateName(sources, template.Chart, template.Chart.Name())
var splitedManifest []splitManifest
splitedSource := strings.Split(template.Manifest, "---") // split manifest by '---'
origData := toMap(sources)
for _, splited := range splitedSource {
var lineID string
for _, line := range strings.Split(splited, "\n") {
if strings.Contains(line, kicsHelmID) {
lineID = line // get auxiliary line id
break
}
}
path := strings.Split(strings.TrimPrefix(splited, "\n# Source: "), "\n") // get source of split yaml
// ignore auxiliary files used to render chart
if path[0] == "" {
continue
}
if origData[filepath.FromSlash(path[0])] == nil {
continue
}
idMap, err := getIDMap(origData[filepath.FromSlash(path[0])])
if err != nil {
return nil, err
}
splitedManifest = append(splitedManifest, splitManifest{
path: path[0],
content: []byte(strings.ReplaceAll(splited, "\r", "")),
original: origData[filepath.FromSlash(path[0])], // get original data from template
splitID: lineID,
splitIDMap: idMap,
})
}
return &splitedManifest, nil
}
// toMap will convert to map original data having the path as it's key
func toMap(files []*chart.File) map[string][]byte {
mapFiles := make(map[string][]byte)
for _, file := range files {
mapFiles[file.Name] = []byte(strings.ReplaceAll(string(file.Data), "\r", ""))
}
return mapFiles
}
// updateName will update the templates name as well as its dependencies
func updateName(template []*chart.File, charts *chart.Chart, name string) []*chart.File {
if name != charts.Name() {
name = filepath.Join(name, charts.Name())
}
for _, temp := range charts.Templates {
temp.Name = filepath.Join(name, temp.Name)
}
template = append(template, charts.Templates...)
for _, dep := range charts.Dependencies() {
template = updateName(template, dep, filepath.Join(name, "charts"))
}
return template
}
// getIdMap will construct a map with ids with the corresponding lines as keys
// for use in detector
func getIDMap(originalData []byte) (map[int]interface{}, error) {
ids := make(map[int]interface{})
mapLines := make(map[int]int)
idHelm := -1
for line, stringLine := range strings.Split(string(originalData), "\n") {
if strings.Contains(stringLine, kicsHelmID) {
id, err := strconv.Atoi(strings.TrimSuffix(strings.TrimPrefix(stringLine, kicsHelmID), ":"))
if err != nil {
return nil, err
}
if idHelm == -1 {
idHelm = id
mapLines[line] = line
} else {
ids[idHelm] = mapLines
mapLines = make(map[int]int)
idHelm = id
mapLines[line] = line
}
} else if idHelm != -1 {
mapLines[line] = line
}
}
ids[idHelm] = mapLines
return ids, nil
}
func getPathSeparator(path string) string {
if matched, err := regexp.MatchString(`[a-zA-Z0-9_\/-]+(\[a-zA-Z0-9_\/-]+)*`, path); matched && err == nil {
return "/"
} else if matched, err := regexp.MatchString(`[a-z0-9_.$-]+(\\[a-z0-9_.$-]+)*`, path); matched && err == nil {
return "\\"
}
return ""
}
package resolver
import (
"os"
"path/filepath"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/rs/zerolog/log"
)
// kindResolver is a type of resolver interface (ex: helm resolver)
// Resolve will render file/template
// SupportedTypes will return the file kinds that the resolver supports
type kindResolver interface {
Resolve(filePath string) (model.ResolvedFiles, error)
SupportedTypes() []model.FileKind
}
// Resolver is a struct containing the resolvers by file kind
type Resolver struct {
resolvers map[model.FileKind]kindResolver
}
// Builder is a struct used to create a new resolver
type Builder struct {
resolvers []kindResolver
}
// NewBuilder creates a new Builder's reference
func NewBuilder() *Builder {
return &Builder{}
}
// Add will add kindResolvers for building the resolver
func (b *Builder) Add(p kindResolver) *Builder {
log.Debug().Msgf("resolver.Add()")
b.resolvers = append(b.resolvers, p)
return b
}
// Build will create a new instance of a resolver
func (b *Builder) Build() (*Resolver, error) {
log.Debug().Msg("resolver.Build()")
resolvers := make(map[model.FileKind]kindResolver, len(b.resolvers))
for _, resolver := range b.resolvers {
for _, typeRes := range resolver.SupportedTypes() {
resolvers[typeRes] = resolver
}
}
return &Resolver{
resolvers: resolvers,
}, nil
}
// Resolve will resolve the files according to its type
func (r *Resolver) Resolve(filePath string, kind model.FileKind) (model.ResolvedFiles, error) {
if r, ok := r.resolvers[kind]; ok {
obj, err := r.Resolve(filePath)
if err != nil {
return model.ResolvedFiles{}, err
}
log.Debug().Msgf("resolver.Resolve() rendered file: %s", filePath)
return obj, nil
}
// need to log here
return model.ResolvedFiles{}, nil
}
// GetType will analyze the filepath to determine which resolver to use
func (r *Resolver) GetType(filePath string) model.FileKind {
_, err := os.Stat(filepath.Join(filePath, "Chart.yaml"))
if err == nil {
return model.KindHELM
}
return model.KindCOMMON
}
package scan
import (
"context"
"time"
"github.com/Checkmarx/kics/v2/internal/storage"
"github.com/Checkmarx/kics/v2/internal/tracker"
"github.com/Checkmarx/kics/v2/pkg/descriptions"
consolePrinter "github.com/Checkmarx/kics/v2/pkg/printer"
"github.com/Checkmarx/kics/v2/pkg/progress"
"github.com/rs/zerolog/log"
)
// Parameters represents all available scan parameters
type Parameters struct {
CloudProvider []string
DisableFullDesc bool
ExcludeCategories []string
ExcludePaths []string
ExcludeQueries []string
ExcludeResults []string
ExcludeSeverities []string
ExperimentalQueries bool
IncludeQueries []string
InputData string
OutputName string
OutputPath string
Path []string
PayloadPath string
PreviewLines int
QueriesPath []string
LibrariesPath string
ReportFormats []string
Platform []string
ExcludePlatform []string
TerraformVarsPath string
QueryExecTimeout int
LineInfoPayload bool
DisableSecrets bool
SecretsRegexesPath string
ChangedDefaultQueryPath bool
ChangedDefaultLibrariesPath bool
ScanID string
BillOfMaterials bool
ExcludeGitIgnore bool
OpenAPIResolveReferences bool
ParallelScanFlag int
MaxFileSizeFlag int
UseOldSeverities bool
MaxResolverDepth int
KicsComputeNewSimID bool
}
// Client represents a scan client
type Client struct {
ScanParams *Parameters
ScanStartTime time.Time
Tracker *tracker.CITracker
Storage *storage.MemoryStorage
ExcludeResultsMap map[string]bool
Printer *consolePrinter.Printer
ProBarBuilder *progress.PbBuilder
}
// NewClient initializes the client with all the required parameters
func NewClient(params *Parameters, proBarBuilder *progress.PbBuilder, customPrint *consolePrinter.Printer) (*Client, error) {
t, err := tracker.NewTracker(params.PreviewLines)
if err != nil {
log.Err(err)
return nil, err
}
descriptions.CheckVersion(t)
store := storage.NewMemoryStorage()
excludeResultsMap := getExcludeResultsMap(params.ExcludeResults)
return &Client{
ScanParams: params,
Tracker: t,
ProBarBuilder: proBarBuilder,
Storage: store,
ExcludeResultsMap: excludeResultsMap,
Printer: customPrint,
}, nil
}
// PerformScan executes executeScan and postScan
func (c *Client) PerformScan(ctx context.Context) error {
c.ScanStartTime = time.Now()
scanResults, err := c.executeScan(ctx)
if err != nil {
log.Err(err)
return err
}
postScanError := c.postScan(scanResults)
if postScanError != nil {
log.Err(postScanError)
return postScanError
}
return nil
}
package scan
import (
_ "embed" // Embed kics CLI img and scan-flags
"os"
"path/filepath"
"sort"
"strings"
"time"
consoleHelpers "github.com/Checkmarx/kics/v2/internal/console/helpers"
"github.com/Checkmarx/kics/v2/pkg/descriptions"
"github.com/Checkmarx/kics/v2/pkg/engine/provider"
"github.com/Checkmarx/kics/v2/pkg/model"
consolePrinter "github.com/Checkmarx/kics/v2/pkg/printer"
"github.com/Checkmarx/kics/v2/pkg/progress"
"github.com/Checkmarx/kics/v2/pkg/report"
"github.com/rs/zerolog/log"
)
func (c *Client) getSummary(results []model.Vulnerability, end time.Time, pathParameters model.PathParameters) model.Summary {
counters := model.Counters{
ScannedFiles: c.Tracker.FoundFiles,
ScannedFilesLines: c.Tracker.FoundCountLines,
ParsedFilesLines: c.Tracker.ParsedCountLines,
ParsedFiles: c.Tracker.ParsedFiles,
IgnoredFilesLines: c.Tracker.IgnoreCountLines,
TotalQueries: c.Tracker.LoadedQueries,
FailedToExecuteQueries: c.Tracker.ExecutingQueries - c.Tracker.ExecutedQueries,
FailedSimilarityID: c.Tracker.FailedSimilarityID,
}
summary := model.CreateSummary(counters, results, c.ScanParams.ScanID, pathParameters.PathExtractionMap, c.Tracker.Version)
summary.Times = model.Times{
Start: c.ScanStartTime,
End: end,
}
if c.ScanParams.DisableFullDesc {
log.Warn().Msg("Skipping descriptions because provided disable flag is set")
} else {
err := descriptions.RequestAndOverrideDescriptions(&summary)
if err != nil {
log.Warn().Msgf("Unable to get descriptions: %s", err)
log.Warn().Msgf("Using default descriptions")
}
}
return summary
}
func (c *Client) resolveOutputs(
summary *model.Summary,
documents model.Documents,
printer *consolePrinter.Printer,
proBarBuilder progress.PbBuilder,
) error {
log.Debug().Msg("console.resolveOutputs()")
usingCustomQueries := usingCustomQueries(c.ScanParams.QueriesPath)
if err := consolePrinter.PrintResult(summary, printer, usingCustomQueries); err != nil {
return err
}
if c.ScanParams.PayloadPath != "" {
if err := report.ExportJSONReport(
filepath.Dir(c.ScanParams.PayloadPath),
filepath.Base(c.ScanParams.PayloadPath),
documents,
); err != nil {
return err
}
}
return printOutput(
c.ScanParams.OutputPath,
c.ScanParams.OutputName,
summary, c.ScanParams.ReportFormats,
proBarBuilder,
)
}
func printOutput(outputPath, filename string, body interface{}, formats []string, proBarBuilder progress.PbBuilder) error {
log.Debug().Msg("console.printOutput()")
if outputPath == "" {
return nil
}
if len(formats) == 0 {
formats = []string{"json"}
}
log.Debug().Msgf("Output formats provided [%v]", strings.Join(formats, ","))
err := consoleHelpers.GenerateReport(outputPath, filename, body, formats, proBarBuilder)
return err
}
// postScan is responsible for the output results
func (c *Client) postScan(scanResults *Results) error {
if scanResults == nil {
log.Info().Msg("No files were scanned")
scanResults = &Results{
Results: []model.Vulnerability{},
ExtractedPaths: provider.ExtractedPath{},
Files: model.FileMetadatas{},
FailedQueries: map[string]error{},
}
}
// mask results preview if Secrets Scan is disabled
if c.ScanParams.DisableSecrets {
err := maskPreviewLines(c.ScanParams.SecretsRegexesPath, scanResults)
if err != nil {
log.Err(err)
return err
}
}
sort.Strings(c.ScanParams.Path)
summary := c.getSummary(scanResults.Results, time.Now(), model.PathParameters{
ScannedPaths: c.ScanParams.Path,
PathExtractionMap: scanResults.ExtractedPaths.ExtractionMap,
})
if err := c.resolveOutputs(
&summary,
scanResults.Files.Combine(c.ScanParams.LineInfoPayload),
c.Printer,
*c.ProBarBuilder); err != nil {
log.Err(err)
return err
}
deleteExtractionFolder(scanResults.ExtractedPaths.ExtractionMap)
logger := consolePrinter.NewLogger(nil)
consolePrinter.PrintScanDuration(&logger, time.Since(c.ScanStartTime))
printVersionCheck(c.Printer, &summary)
contributionAppeal(c.Printer, c.ScanParams.QueriesPath)
exitCode := consoleHelpers.ResultsExitCode(&summary)
if consoleHelpers.ShowError("results") && exitCode != 0 {
os.Exit(exitCode)
}
return nil
}
// Package scan implements functions and helpers to ensure the proper scan of the specified files
package scan
import (
"encoding/json"
"regexp"
"strings"
"github.com/Checkmarx/kics/v2/pkg/engine/secrets"
"github.com/Checkmarx/kics/v2/pkg/model"
)
func maskPreviewLines(secretsPath string, scanResults *Results) error {
secretsRegexRulesContent, err := getSecretsRegexRules(secretsPath)
if err != nil {
return err
}
var allRegexQueries secrets.RegexRuleStruct
err = json.Unmarshal([]byte(secretsRegexRulesContent), &allRegexQueries)
if err != nil {
return err
}
allowRules, err := secrets.CompileRegex(allRegexQueries.AllowRules)
if err != nil {
return err
}
rules, err := compileRegexQueries(allRegexQueries.Rules)
if err != nil {
return err
}
for i := range scanResults.Results {
item := scanResults.Results[i]
hideSecret(item.VulnLines, &allowRules, &rules)
}
return nil
}
func compileRegexQueries(allRegexQueries []secrets.RegexQuery) ([]secrets.RegexQuery, error) {
for i := range allRegexQueries {
compiledRegexp, err := regexp.Compile(allRegexQueries[i].RegexStr)
if err != nil {
return allRegexQueries, err
}
allRegexQueries[i].Regex = compiledRegexp
for j := range allRegexQueries[i].AllowRules {
allRegexQueries[i].AllowRules[j].Regex = regexp.MustCompile(allRegexQueries[i].AllowRules[j].RegexStr)
}
}
return allRegexQueries, nil
}
func hideSecret(lines *[]model.CodeLine, allowRules *[]secrets.AllowRule, rules *[]secrets.RegexQuery) {
for idx, line := range *lines {
for i := range *rules {
rule := (*rules)[i]
isSecret, groups := isSecret(line.Line, &rule, allowRules)
// if not a secret skip to next line
if !isSecret {
continue
}
if len(rule.Entropies) == 0 {
maskSecret(&rule, lines, idx)
}
if len(groups[0]) > 0 {
for _, entropy := range rule.Entropies {
// if matched group does not exist continue
if len(groups[0]) <= entropy.Group {
return
}
isMatch, _ := secrets.CheckEntropyInterval(
entropy,
groups[0][entropy.Group],
)
if isMatch {
maskSecret(&rule, lines, idx)
}
}
}
}
}
}
func maskSecret(rule *secrets.RegexQuery, lines *[]model.CodeLine, idx int) {
if rule.SpecialMask == "all" {
(*lines)[idx].Line = "<SECRET-MASKED-ON-PURPOSE>"
return
}
regex := rule.RegexStr
line := (*lines)[idx]
if rule.SpecialMask != "" {
regex = "(.+)" + rule.SpecialMask
}
var re = regexp.MustCompile(regex)
match := re.FindString(line.Line)
if rule.SpecialMask != "" {
match = line.Line[len(match):]
}
if match != "" {
(*lines)[idx].Line = strings.Replace(line.Line, match, "<SECRET-MASKED-ON-PURPOSE>", 1)
} else {
(*lines)[idx].Line = "<SECRET-MASKED-ON-PURPOSE>"
}
}
// repurposed isSecret from inspector
func isSecret(line string, rule *secrets.RegexQuery, allowRules *[]secrets.AllowRule) (isSecretRet bool, groups [][]string) {
if secrets.IsAllowRule(line, rule, *allowRules) {
return false, [][]string{}
}
groups = rule.Regex.FindAllStringSubmatch(line, -1)
for _, group := range groups {
splitedText := strings.Split(line, "\n")
maxSplit := -1
for i, splited := range splitedText {
if len(groups) < rule.Multiline.DetectLineGroup {
if strings.Contains(splited, group[rule.Multiline.DetectLineGroup]) && i > maxSplit {
maxSplit = i
}
}
}
if maxSplit == -1 {
continue
}
secret, newGroups := isSecret(strings.Join(append(splitedText[:maxSplit], splitedText[maxSplit+1:]...), "\n"), rule, allowRules)
if !secret {
continue
}
groups = append(groups, newGroups...)
}
if len(groups) > 0 {
return true, groups
}
return false, [][]string{}
}
// Package scan implements functions and helpers to ensure the proper scan of the specified files
package scan
import (
"context"
"os"
"github.com/Checkmarx/kics/v2/assets"
"github.com/Checkmarx/kics/v2/pkg/engine"
"github.com/Checkmarx/kics/v2/pkg/engine/provider"
"github.com/Checkmarx/kics/v2/pkg/engine/secrets"
"github.com/Checkmarx/kics/v2/pkg/engine/source"
"github.com/Checkmarx/kics/v2/pkg/kics"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/Checkmarx/kics/v2/pkg/parser"
ansibleConfigParser "github.com/Checkmarx/kics/v2/pkg/parser/ansible/ini/config"
ansibleHostsParser "github.com/Checkmarx/kics/v2/pkg/parser/ansible/ini/hosts"
bicepParser "github.com/Checkmarx/kics/v2/pkg/parser/bicep"
buildahParser "github.com/Checkmarx/kics/v2/pkg/parser/buildah"
dockerParser "github.com/Checkmarx/kics/v2/pkg/parser/docker"
protoParser "github.com/Checkmarx/kics/v2/pkg/parser/grpc"
jsonParser "github.com/Checkmarx/kics/v2/pkg/parser/json"
terraformParser "github.com/Checkmarx/kics/v2/pkg/parser/terraform"
yamlParser "github.com/Checkmarx/kics/v2/pkg/parser/yaml"
"github.com/Checkmarx/kics/v2/pkg/resolver"
"github.com/Checkmarx/kics/v2/pkg/resolver/helm"
"github.com/Checkmarx/kics/v2/pkg/scanner"
"github.com/rs/zerolog/log"
)
// Results represents a result generated by a single scan
type Results struct {
Results []model.Vulnerability
ExtractedPaths provider.ExtractedPath
Files model.FileMetadatas
FailedQueries map[string]error
}
type executeScanParameters struct {
services []*kics.Service
inspector *engine.Inspector
extractedPaths provider.ExtractedPath
}
func (c *Client) initScan(ctx context.Context) (*executeScanParameters, error) {
progressBar := c.ProBarBuilder.BuildCircle("Preparing Scan Assets: ")
go progressBar.Start()
extractedPaths, err := c.prepareAndAnalyzePaths(ctx)
if err != nil {
log.Err(err)
return nil, err
}
if len(extractedPaths.Path) == 0 {
return nil, nil
}
paramsPlatforms := c.ScanParams.Platform
useDifferentPlatformQueries(¶msPlatforms)
querySource := source.NewFilesystemSource(
c.ScanParams.QueriesPath,
paramsPlatforms,
c.ScanParams.CloudProvider,
c.ScanParams.LibrariesPath,
c.ScanParams.ExperimentalQueries)
queryFilter := c.createQueryFilter()
inspector, err := engine.NewInspector(ctx,
querySource,
engine.DefaultVulnerabilityBuilder,
c.Tracker,
queryFilter,
c.ExcludeResultsMap,
c.ScanParams.QueryExecTimeout,
c.ScanParams.UseOldSeverities,
true,
c.ScanParams.ParallelScanFlag,
c.ScanParams.KicsComputeNewSimID,
)
if err != nil {
return nil, err
}
secretsRegexRulesContent, err := getSecretsRegexRules(c.ScanParams.SecretsRegexesPath)
if err != nil {
return nil, err
}
isCustomSecretsRegexes := c.ScanParams.SecretsRegexesPath != ""
secretsInspector, err := secrets.NewInspector(
ctx,
c.ExcludeResultsMap,
c.Tracker,
queryFilter,
c.ScanParams.DisableSecrets,
c.ScanParams.QueryExecTimeout,
secretsRegexRulesContent,
isCustomSecretsRegexes,
)
if err != nil {
log.Err(err)
return nil, err
}
services, err := c.createService(
inspector,
secretsInspector,
extractedPaths.Path,
c.Tracker,
c.Storage,
querySource,
)
if err != nil {
log.Err(err)
return nil, err
}
if err := progressBar.Close(); err != nil {
log.Debug().Msgf("Failed to close progress bar: %s", err.Error())
}
return &executeScanParameters{
services: services,
inspector: inspector,
extractedPaths: extractedPaths,
}, nil
}
func (c *Client) executeScan(ctx context.Context) (*Results, error) {
executeScanParameters, err := c.initScan(ctx)
if err != nil {
log.Err(err)
return nil, err
}
if executeScanParameters == nil {
return nil, nil
}
if err = scanner.PrepareAndScan(
ctx,
c.ScanParams.ScanID, c.ScanParams.OpenAPIResolveReferences, c.ScanParams.MaxResolverDepth, *c.ProBarBuilder,
executeScanParameters.services); err != nil {
log.Err(err)
return nil, err
}
failedQueries := executeScanParameters.inspector.GetFailedQueries()
results, err := c.Storage.GetVulnerabilities(ctx, c.ScanParams.ScanID)
if err != nil {
log.Err(err)
return nil, err
}
files, err := c.Storage.GetFiles(ctx, c.ScanParams.ScanID)
if err != nil {
log.Err(err)
return nil, err
}
return &Results{
Results: results,
ExtractedPaths: executeScanParameters.extractedPaths,
Files: files,
FailedQueries: failedQueries,
}, nil
}
func useDifferentPlatformQueries(platforms *[]string) {
hasBicep := false
hasARM := false
for _, platform := range *platforms {
if platform == "bicep" {
hasBicep = true
}
if platform == "azureresourcemanager" {
hasARM = true
}
if hasARM && hasBicep {
break
}
}
if hasBicep && !hasARM {
*platforms = append(*platforms, "azureresourcemanager")
}
}
func getExcludeResultsMap(excludeResults []string) map[string]bool {
excludeResultsMap := make(map[string]bool)
for _, er := range excludeResults {
excludeResultsMap[er] = true
}
return excludeResultsMap
}
func getSecretsRegexRules(regexRulesPath string) (regexRulesContent string, err error) {
if regexRulesPath != "" {
b, err := os.ReadFile(regexRulesPath)
if err != nil {
return regexRulesContent, err
}
regexRulesContent = string(b)
} else {
regexRulesContent = assets.SecretsQueryRegexRulesJSON
}
return regexRulesContent, nil
}
func (c *Client) createQueryFilter() *source.QueryInspectorParameters {
excludeQueries := source.ExcludeQueries{
ByIDs: c.ScanParams.ExcludeQueries,
ByCategories: c.ScanParams.ExcludeCategories,
BySeverities: c.ScanParams.ExcludeSeverities,
}
includeQueries := source.IncludeQueries{
ByIDs: c.ScanParams.IncludeQueries,
}
queryFilter := source.QueryInspectorParameters{
IncludeQueries: includeQueries,
ExcludeQueries: excludeQueries,
ExperimentalQueries: c.ScanParams.ExperimentalQueries,
InputDataPath: c.ScanParams.InputData,
BomQueries: c.ScanParams.BillOfMaterials,
}
return &queryFilter
}
func (c *Client) createService(
inspector *engine.Inspector,
secretsInspector *secrets.Inspector,
paths []string,
t kics.Tracker,
store kics.Storage,
querySource *source.FilesystemSource) ([]*kics.Service, error) {
filesSource, err := c.getFileSystemSourceProvider(paths)
if err != nil {
return nil, err
}
combinedParser, err := parser.NewBuilder().
Add(&jsonParser.Parser{}).
Add(&yamlParser.Parser{}).
Add(terraformParser.NewDefaultWithVarsPath(c.ScanParams.TerraformVarsPath)).
Add(&bicepParser.Parser{}).
Add(&dockerParser.Parser{}).
Add(&protoParser.Parser{}).
Add(&buildahParser.Parser{}).
Add(&ansibleConfigParser.Parser{}).
Add(&ansibleHostsParser.Parser{}).
Build(querySource.Types, querySource.CloudProviders)
if err != nil {
return nil, err
}
// combinedResolver to be used to resolve files and templates
combinedResolver, err := resolver.NewBuilder().
Add(&helm.Resolver{}).
Build()
if err != nil {
return nil, err
}
services := make([]*kics.Service, 0, len(combinedParser))
for _, parser := range combinedParser {
services = append(
services,
&kics.Service{
SourceProvider: filesSource,
Storage: store,
Parser: parser,
Inspector: inspector,
SecretsInspector: secretsInspector,
Tracker: t,
Resolver: combinedResolver,
MaxFileSize: c.ScanParams.MaxFileSizeFlag,
},
)
}
return services, nil
}
func (c *Client) getFileSystemSourceProvider(paths []string) (*provider.FileSystemSourceProvider, error) {
var excludePaths []string
if c.ScanParams.PayloadPath != "" {
excludePaths = append(excludePaths, c.ScanParams.PayloadPath)
}
if len(c.ScanParams.ExcludePaths) > 0 {
excludePaths = append(excludePaths, c.ScanParams.ExcludePaths...)
}
filesSource, err := provider.NewFileSystemSourceProvider(paths, excludePaths)
if err != nil {
return nil, err
}
return filesSource, nil
}
package scan
import (
"context"
"fmt"
"os"
"path/filepath"
"regexp"
"strings"
consoleHelpers "github.com/Checkmarx/kics/v2/internal/console/helpers"
"github.com/Checkmarx/kics/v2/pkg/analyzer"
"github.com/Checkmarx/kics/v2/pkg/engine/provider"
"github.com/Checkmarx/kics/v2/pkg/model"
consolePrinter "github.com/Checkmarx/kics/v2/pkg/printer"
"github.com/Checkmarx/kics/v2/pkg/utils"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
var (
kuberneterRegex = regexp.MustCompile(`^kuberneter::`)
)
func (c *Client) prepareAndAnalyzePaths(ctx context.Context) (provider.ExtractedPath, error) {
queryExPaths, libExPaths, err := c.preparePaths()
if err != nil {
return provider.ExtractedPath{}, err
}
regularPaths, kuberneterPaths := extractPathType(c.ScanParams.Path)
kuberneterExPaths, err := provider.GetKuberneterSources(ctx, kuberneterPaths, c.ScanParams.OutputPath)
if err != nil {
return provider.ExtractedPath{}, err
}
regularExPaths, err := provider.GetSources(regularPaths)
if err != nil {
return provider.ExtractedPath{}, err
}
allPaths := combinePaths(kuberneterExPaths, regularExPaths, queryExPaths, libExPaths)
if len(allPaths.Path) == 0 {
return provider.ExtractedPath{}, nil
}
log.Info().Msgf("Total files in the project: %d", getTotalFiles(allPaths.Path))
a := &analyzer.Analyzer{
Paths: allPaths.Path,
Types: c.ScanParams.Platform,
ExcludeTypes: c.ScanParams.ExcludePlatform,
Exc: c.ScanParams.ExcludePaths,
GitIgnoreFileName: ".gitignore",
ExcludeGitIgnore: c.ScanParams.ExcludeGitIgnore,
MaxFileSize: c.ScanParams.MaxFileSizeFlag,
}
pathTypes, errAnalyze := analyzePaths(a)
if errAnalyze != nil {
return provider.ExtractedPath{}, errAnalyze
}
if len(pathTypes.Types) == 0 {
return provider.ExtractedPath{}, nil
}
c.ScanParams.Platform = pathTypes.Types
c.ScanParams.ExcludePaths = pathTypes.Exc
return allPaths, nil
}
func combinePaths(kuberneter, regular, query, library provider.ExtractedPath) provider.ExtractedPath {
var combinedPaths provider.ExtractedPath
paths := make([]string, 0)
combinedPathsEx := make(map[string]model.ExtractedPathObject)
paths = append(paths, kuberneter.Path...)
paths = append(paths, regular.Path...)
combinedPaths.Path = paths
for k, v := range regular.ExtractionMap {
combinedPathsEx[k] = v
}
for k, v := range kuberneter.ExtractionMap {
combinedPathsEx[k] = v
}
for k, v := range query.ExtractionMap {
combinedPathsEx[k] = v
}
for k, v := range library.ExtractionMap {
combinedPathsEx[k] = v
}
combinedPaths.ExtractionMap = combinedPathsEx
return combinedPaths
}
func (c *Client) preparePaths() (queryExtPath, libExtPath provider.ExtractedPath, err error) {
queryExtPath, err = c.GetQueryPath()
if err != nil {
return provider.ExtractedPath{}, provider.ExtractedPath{}, err
}
libExtPath, err = c.getLibraryPath()
if err != nil {
return queryExtPath, provider.ExtractedPath{}, err
}
return queryExtPath, libExtPath, nil
}
// GetQueryPath gets all the queries paths
func (c *Client) GetQueryPath() (provider.ExtractedPath, error) {
queriesPath := make([]string, 0)
extPath := provider.ExtractedPath{
Path: []string{},
ExtractionMap: make(map[string]model.ExtractedPathObject),
}
if c.ScanParams.ChangedDefaultQueryPath {
for _, queryPath := range c.ScanParams.QueriesPath {
extractedPath, errExtractQueries := resolvePath(queryPath, "queries-path")
if errExtractQueries != nil {
return extPath, errExtractQueries
}
extPath = extractedPath
queriesPath = append(queriesPath, extractedPath.Path[0])
}
} else {
log.Debug().Msgf("Looking for queries in executable path and in current work directory")
defaultQueryPath, errDefaultQueryPath := consoleHelpers.GetDefaultQueryPath(c.ScanParams.QueriesPath[0])
if errDefaultQueryPath != nil {
return extPath, errors.Wrap(errDefaultQueryPath, "unable to find queries")
}
queriesPath = append(queriesPath, defaultQueryPath)
}
c.ScanParams.QueriesPath = queriesPath
return extPath, nil
}
func (c *Client) getLibraryPath() (provider.ExtractedPath, error) {
extPath := provider.ExtractedPath{
Path: []string{},
ExtractionMap: make(map[string]model.ExtractedPathObject),
}
if c.ScanParams.ChangedDefaultLibrariesPath {
extractedLibrariesPath, errExtractLibraries := resolvePath(c.ScanParams.LibrariesPath, "libraries-path")
if errExtractLibraries != nil {
return extPath, errExtractLibraries
}
extPath = extractedLibrariesPath
c.ScanParams.LibrariesPath = extractedLibrariesPath.Path[0]
}
return extPath, nil
}
func resolvePath(flagContent, flagName string) (provider.ExtractedPath, error) {
extractedPath, errExtractPath := provider.GetSources([]string{flagContent})
if errExtractPath != nil {
return extractedPath, errExtractPath
}
if len(extractedPath.Path) != 1 {
return extractedPath, fmt.Errorf("could not find a valid path (--%s) on %s", flagName, flagContent)
}
log.Debug().Msgf("Trying to load path (--%s) from %s", flagName, flagContent)
return extractedPath, nil
}
// analyzePaths will analyze the paths to scan to determine which type of queries to load
// and which files should be ignored, it then updates the types and exclude flags variables
// with the results found
func analyzePaths(a *analyzer.Analyzer) (model.AnalyzedPaths, error) {
var err error
var pathsFlag model.AnalyzedPaths
excluded := make([]string, 0)
pathsFlag, err = analyzer.Analyze(a)
if err != nil {
log.Err(err)
return model.AnalyzedPaths{}, err
}
logLoadingQueriesType(pathsFlag.Types)
excluded = append(excluded, a.Exc...)
excluded = append(excluded, pathsFlag.Exc...)
pathsFlag.Exc = excluded
return pathsFlag, nil
}
func logLoadingQueriesType(types []string) {
if len(types) == 0 {
log.Info().Msg("No queries were loaded")
return
}
log.Info().Msgf("Loading queries of type: %s", strings.Join(types, ", "))
}
func extractPathType(paths []string) (regular, kuberneter []string) {
for _, path := range paths {
if kuberneterRegex.MatchString(path) {
kuberneter = append(kuberneter, kuberneterRegex.ReplaceAllString(path, ""))
} else {
regular = append(regular, path)
}
}
return
}
func deleteExtractionFolder(extractionMap map[string]model.ExtractedPathObject) {
for extractionFile := range extractionMap {
if strings.Contains(extractionFile, "kics-extract-kuberneter") {
continue
}
err := os.RemoveAll(extractionFile)
if err != nil {
log.Err(err).Msg("Failed to delete KICS extraction folder")
}
}
}
func contributionAppeal(customPrint *consolePrinter.Printer, queriesPath []string) {
if usingCustomQueries(queriesPath) {
msg := "\nAre you using a custom query? If so, feel free to contribute to KICS!\n"
contributionPage := "Check out how to do it: https://github.com/Checkmarx/kics/blob/master/docs/CONTRIBUTING.md\n"
output := customPrint.ContributionMessage.Sprintf("%s", msg+contributionPage)
fmt.Println(output)
}
}
func usingCustomQueries(queriesPath []string) bool {
return !utils.ContainsInString(filepath.Join("assets", "queries"), queriesPath)
}
// printVersionCheck - Prints and logs warning if not using KICS latest version
func printVersionCheck(customPrint *consolePrinter.Printer, s *model.Summary) {
if !s.LatestVersion.Latest {
message := fmt.Sprintf("A new version 'v%s' of KICS is available, please consider updating", s.LatestVersion.LatestVersionTag)
fmt.Println(customPrint.VersionMessage.Sprintf("%s", message))
log.Warn().Msgf("%s", message)
}
}
func getTotalFiles(paths []string) int {
files := 0
for _, path := range paths {
if err := filepath.Walk(path, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() {
files++
}
return nil
}); err != nil {
log.Error().Msgf("failed to walk path %s: %s", path, err)
}
}
return files
}
package scanner
import (
"context"
"fmt"
"sync"
"github.com/Checkmarx/kics/v2/internal/metrics"
"github.com/Checkmarx/kics/v2/pkg/kics"
"github.com/Checkmarx/kics/v2/pkg/progress"
)
type serviceSlice []*kics.Service
func PrepareAndScan(
ctx context.Context,
scanID string,
openAPIResolveReferences bool,
maxResolverDepth int,
proBarBuilder progress.PbBuilder,
services serviceSlice,
) error {
metrics.Metric.Start("prepare_sources")
var wg sync.WaitGroup
wgDone := make(chan bool)
errCh := make(chan error)
var wgProg sync.WaitGroup
for _, service := range services {
wg.Add(1)
go service.PrepareSources(ctx, scanID, openAPIResolveReferences, maxResolverDepth, &wg, errCh)
}
go func() {
defer func() {
close(wgDone)
}()
wg.Wait()
wgProg.Wait()
}()
select {
case <-wgDone:
metrics.Metric.Stop()
err := StartScan(ctx, scanID, proBarBuilder, services)
if err != nil {
return err
}
break
case err := <-errCh:
close(errCh)
return err
}
return nil
}
// StartScan will run concurrent scans by parser
func StartScan(ctx context.Context, scanID string,
proBarBuilder progress.PbBuilder, services serviceSlice) error {
defer metrics.Metric.Stop()
metrics.Metric.Start("start_scan")
var wg sync.WaitGroup
wgDone := make(chan bool)
errCh := make(chan error)
currentQuery := make(chan int64, 1)
var wgProg sync.WaitGroup
total := services.GetQueriesLength()
if total != 0 {
startProgressBar(total, &wgProg, currentQuery, proBarBuilder)
}
for _, service := range services {
wg.Add(1)
go service.StartScan(ctx, scanID, errCh, &wg, currentQuery)
}
go func() {
defer func() {
close(currentQuery)
close(wgDone)
fmt.Println("\r")
}()
wg.Wait()
wgProg.Wait()
}()
select {
case <-wgDone:
break
case err := <-errCh:
close(errCh)
return err
}
return nil
}
// GetQueriesLength returns the Total of queries for all Services
func (s serviceSlice) GetQueriesLength() int {
count := 0
for _, service := range s {
count += service.Inspector.LenQueriesByPlat(service.Parser.Platform)
count += service.SecretsInspector.GetQueriesLength()
}
return count
}
func startProgressBar(total int, wg *sync.WaitGroup, progressChannel chan int64, proBarBuilder progress.PbBuilder) {
wg.Add(1)
progressBar := proBarBuilder.BuildCounter("Executing queries: ", total, wg, progressChannel)
go progressBar.Start()
}
package utils
import (
"regexp"
"github.com/rs/zerolog/log"
vault "github.com/sosedoff/ansible-vault-go"
)
// DecryptAnsibleVault verifies if the fileContent is encrypted by ansible-vault. If yes, the function decrypts it
func DecryptAnsibleVault(fileContent []byte, secret string) []byte {
match, err := regexp.MatchString(`^\s*\$ANSIBLE_VAULT.*`, string(fileContent))
if err != nil {
return fileContent
}
if secret != "" && match {
content, err := vault.Decrypt(string(fileContent), secret)
if err == nil {
log.Info().Msg("Decrypting Ansible Vault file")
fileContent = []byte(content)
}
}
return fileContent
}
package utils
import (
"reflect"
"strings"
)
// Contains if a function to check if list contains target
func Contains(target, list interface{}) bool {
listVal := reflect.ValueOf(list)
if listVal.Kind() == reflect.Slice || listVal.Kind() == reflect.Array {
for i := 0; i < listVal.Len(); i++ {
if listVal.Index(i).Interface() == target {
return true
}
}
}
return false
}
// ContainsInString verifies if some string in list contains the specified target
func ContainsInString(target string, list []string) bool {
for i := 0; i < len(list); i++ {
if strings.Contains(list[i], target) {
return true
}
}
return false
}
package utils
import (
"bufio"
"bytes"
"fmt"
"os"
"path/filepath"
"strings"
"github.com/rs/zerolog/log"
"golang.org/x/tools/godoc/util"
)
// GetExtension gets the extension of a file path
func GetExtension(path string) (string, error) {
targets := []string{"Dockerfile", "tfvars"}
// Get file information
fileInfo, err := os.Stat(path)
if err != nil {
return "", fmt.Errorf("file %s not found", path)
}
if fileInfo.IsDir() {
return "", fmt.Errorf("the path %s is a directory", path)
}
ext := filepath.Ext(path)
if ext == "" {
base := filepath.Base(path)
if Contains(base, targets) {
ext = base
} else {
isText, err := isTextFile(path)
if err != nil {
return "", err
}
if isText {
if readPossibleDockerFile(path) {
ext = "possibleDockerfile"
}
}
}
}
return ext, nil
}
func readPossibleDockerFile(path string) bool {
path = filepath.Clean(path)
if strings.HasSuffix(path, "gitignore") {
return true
}
file, err := os.Open(path)
if err != nil {
return false
}
defer file.Close()
// Create a scanner to read the file line by line
scanner := bufio.NewScanner(file)
// Read lines from the file
for scanner.Scan() {
if strings.HasPrefix(scanner.Text(), "FROM") {
return true
} else if strings.HasPrefix(scanner.Text(), "#") {
continue
} else {
return false
}
}
return false
}
func isTextFile(path string) (bool, error) {
info, err := os.Stat(path)
if err != nil {
log.Error().Msgf("failed to get file info: %s", err)
return false, err
}
if info.IsDir() {
return false, nil
}
content, err := os.ReadFile(filepath.Clean(path))
if err != nil {
log.Error().Msgf("failed to analyze file: %s", err)
return false, err
}
content = bytes.Replace(content, []byte("\r"), []byte(""), -1)
isText := util.IsText(content)
return isText, nil
}
// Package utils contains various utility functions to use in other packages
package utils
import (
"bufio"
"os"
"path/filepath"
"github.com/rs/zerolog/log"
)
// LineCounter get the number of lines of a given file
func LineCounter(path string) (int, error) {
file, err := os.Open(filepath.Clean(path))
if err != nil {
return 0, err
}
defer func() {
if err := file.Close(); err != nil {
log.Err(err).Msgf("failed to close '%s'", filepath.Clean(path))
}
}()
scanner := bufio.NewScanner(file)
lineCount := 0
for scanner.Scan() {
lineCount++
}
if err := scanner.Err(); err != nil {
return 0, err
}
return lineCount, nil
}
package utils
import "sort"
// SortedKeys returns a sorted slice with all map keys
func SortedKeys(mapToSort map[string]string) []string {
keys := make([]string, 0, len(mapToSort))
for k := range mapToSort {
keys = append(keys, k)
}
sort.Strings(keys)
return keys
}
// MergeMaps merges two maps
func MergeMaps(map1, map2 map[string]interface{}) {
for key, value := range map2 {
map1[key] = value
}
}
package utils
import (
"fmt"
"github.com/rs/zerolog/log"
)
func HandlePanic(r any, errMessage string) {
err := fmt.Errorf("panic: %v", r)
log.Err(err).Msg(errMessage)
}
package utils
import (
"os"
"strconv"
"sync"
"time"
)
// ======== Golang way to create random number for tmp dir naming =============
var rand uint32
var randmu sync.Mutex
const tempDirFormat = 1e9
func reseed() uint32 {
return uint32(int32(time.Now().UnixNano()) + int32(os.Getpid())) //nolint:gosec
}
// NextRandom returns a random number
func NextRandom() string {
randmu.Lock()
r := rand
if r == 0 {
r = reseed()
}
r = r*1664525 + 1013904223 // constants from Numerical Recipes
rand = r
randmu.Unlock()
return strconv.Itoa(int(tempDirFormat + r%tempDirFormat))[1:]
}
// ==============================================================================
package utils
import "strings"
// SplitLines splits the document by line
func SplitLines(content string) *[]string {
text := strings.ReplaceAll(content, "\r", "")
split := strings.Split(text, "\n")
return &split
}
// Package utils contains various utility functions to use in other packages
package utils
import "regexp"
// ValidateUUID checks if the given id is valid by the format UUID using regex expression
func ValidateUUID(id string) bool {
uuidRegex := "^[0-9a-fA-F]{8}\\b-[0-9a-fA-F]{4}\\b-[0-9a-fA-F]{4}\\b-[0-9a-fA-F]{4}\\b-[0-9a-fA-F]{12}$"
if matched, _ := regexp.MatchString(uuidRegex, id); matched {
return true
}
return false
}
package test
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"strings"
"github.com/Checkmarx/kics/v2/pkg/model"
"github.com/spf13/cobra"
)
const (
// ValidUUIDRegex is a constant representing a regular expression rule to validate UUID string
ValidUUIDRegex = `(?i)^[0-9a-f]{8}-[0-9a-f]{4}-[0-5][0-9a-f]{3}-[089ab][0-9a-f]{3}-[0-9a-f]{12}$`
positive = "positive.tf"
positiveYamlSonar = "../../../test/fixtures/test_critical_custom_queries/amazon_mq_broker_encryption_disabled/test/positive1.yaml"
positiveYaml = "test/fixtures/test_critical_custom_queries/amazon_mq_broker_encryption_disabled/test/positive1.yaml"
)
type execute func() error
// CaptureOutput changes default stdout to intercept into a buffer, converts it to string and returns it
func CaptureOutput(funcToExec execute) (string, error) {
old := os.Stdout
r, w, _ := os.Pipe()
os.Stdout = w
err := funcToExec()
outC := make(chan string)
go func() {
var buf bytes.Buffer
if _, errs := io.Copy(&buf, r); errs != nil {
return
}
outC <- buf.String()
}()
if errs := w.Close(); errs != nil {
return "", errs
}
os.Stdout = old
out := <-outC
return out, err
}
// CaptureCommandOutput set cobra command args, if necessary, then capture the output
func CaptureCommandOutput(cmd *cobra.Command, args []string) (string, error) {
if len(args) > 0 {
cmd.SetArgs(args)
}
return CaptureOutput(cmd.Execute)
}
// ChangeCurrentDir gets current working directory and changes to its parent until finds the desired directory
// or fail
func ChangeCurrentDir(desiredDir string) error {
for currentDir, err := os.Getwd(); GetCurrentDirName(currentDir) != desiredDir; currentDir, err = os.Getwd() {
if err == nil {
if err = os.Chdir(".."); err != nil {
fmt.Print(formatCurrentDirError(err))
return errors.New(formatCurrentDirError(err))
}
} else {
return errors.New(formatCurrentDirError(err))
}
}
return nil
}
func formatCurrentDirError(err error) string {
return fmt.Sprintf("change path error = %v", err)
}
// GetCurrentDirName returns current working directory
func GetCurrentDirName(path string) string {
dirs := strings.Split(path, string(os.PathSeparator))
if dirs[len(dirs)-1] == "" && len(dirs) > 1 {
return dirs[len(dirs)-2]
}
return dirs[len(dirs)-1]
}
// StringifyStruct stringify struct for pretty print
func StringifyStruct(v interface{}) (string, error) {
jsonValue, err := json.MarshalIndent(v, "", " ")
if err != nil {
return "", err
}
return string(jsonValue), nil
}
// MapToStringSlice extract slice of keys from a map[string]string
func MapToStringSlice(stringKeyMap map[string]string) []string {
keys := make([]string, len(stringKeyMap))
i := 0
for k := range stringKeyMap {
keys[i] = k
i++
}
return keys
}
var queryHigh = model.QueryResult{ //nolint
QueryName: "ALB protocol is HTTP",
QueryID: "de7f5e83-da88-4046-871f-ea18504b1d43",
Description: "ALB protocol is HTTP Description",
DescriptionID: "504b1d43",
CISDescriptionIDFormatted: "testCISID",
CISDescriptionTitle: "testCISTitle",
CISDescriptionTextFormatted: "testCISDescription",
Severity: model.SeverityHigh,
Files: []model.VulnerableFile{
{
FileName: positive,
Line: 25,
IssueType: "MissingAttribute",
SearchKey: "aws_alb_listener[front_end].default_action.redirect",
KeyExpectedValue: "'default_action.redirect.protocol' is equal 'HTTPS'",
KeyActualValue: "'default_action.redirect.protocol' is missing",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
{
FileName: positive,
Line: 19,
IssueType: "IncorrectValue",
SearchKey: "aws_alb_listener[front_end].default_action.redirect",
KeyExpectedValue: "'default_action.redirect.protocol' is equal 'HTTPS'",
KeyActualValue: "'default_action.redirect.protocol' is equal 'HTTP'",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
},
CWE: "",
}
var queryMedium = model.QueryResult{
QueryName: "AmazonMQ Broker Encryption Disabled",
Description: "AmazonMQ Broker should have Encryption Options defined",
QueryID: "3db3f534-e3a3-487f-88c7-0a9fbf64b702",
CloudProvider: "AWS",
Severity: model.SeverityMedium,
Files: []model.VulnerableFile{
{
FileName: positive,
Line: 1,
IssueType: "MissingAttribute",
SimilarityID: "6b76f7a507e200bb2c73468ec9649b099da96a4efa0f49a3bdc88e12476d8ee7",
SearchKey: "resource.aws_mq_broker[positive1]",
KeyExpectedValue: "resource.aws_mq_broker[positive1].encryption_options is defined",
KeyActualValue: "resource.aws_mq_broker[positive1].encryption_options is not defined",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
},
CWE: "",
}
var queryMedium2 = model.QueryResult{
QueryName: "GuardDuty Detector Disabled",
QueryID: "704dadd3-54fc-48ac-b6a0-02f170011473",
Severity: model.SeverityMedium,
Files: []model.VulnerableFile{
{
FileName: filepath.Join("assets", "queries", "terraform", "aws", "guardduty_detector_disabled", "test", "positive.tf"),
Line: 2,
IssueType: "IncorrectValue",
SearchKey: "aws_guardduty_detector[positive1].enable",
KeyExpectedValue: "GuardDuty Detector should be Enabled",
KeyActualValue: "GuardDuty Detector is not Enabled",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
},
Platform: "Terraform",
Description: "Make sure that Amazon GuardDuty is Enabled",
CWE: "",
}
var queryInfo = model.QueryResult{
QueryName: "Resource Not Using Tags",
QueryID: "e38a8e0a-b88b-4902-b3fe-b0fcb17d5c10",
Severity: model.SeverityInfo,
Files: []model.VulnerableFile{
{
FileName: filepath.Join("assets", "queries", "terraform", "aws", "guardduty_detector_disabled", "test", "negative.tf"),
Line: 1,
IssueType: "MissingAttribute",
SearchKey: "aws_guardduty_detector[{{negative1}}]",
KeyExpectedValue: "aws_guardduty_detector[{{negative1}}].tags is defined and not null",
KeyActualValue: "aws_guardduty_detector[{{negative1}}].tags is undefined or null",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
{
FileName: filepath.Join("assets", "queries", "terraform", "aws", "guardduty_detector_disabled", "test", "positive.tf"),
Line: 1,
IssueType: "MissingAttribute",
SearchKey: "aws_guardduty_detector[{{positive1}}]",
KeyExpectedValue: "aws_guardduty_detector[{{positive1}}].tags is defined and not null",
KeyActualValue: "aws_guardduty_detector[{{positive1}}].tags is undefined or null",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
},
Platform: "Terraform",
Description: "AWS services resource tags are an essential part of managing components",
}
var queryHighExperimental = model.QueryResult{
QueryName: "ALB protocol is HTTP",
QueryID: "de7f5e83-da88-4046-871f-ea18504b1d43",
Description: "ALB protocol is HTTP Description",
DescriptionID: "504b1d43",
CISDescriptionIDFormatted: "testCISID",
CISDescriptionTitle: "testCISTitle",
CISDescriptionTextFormatted: "testCISDescription",
Severity: model.SeverityHigh,
Experimental: true,
Files: []model.VulnerableFile{
{
FileName: positive,
Line: 25,
IssueType: "MissingAttribute",
SearchKey: "aws_alb_listener[front_end].default_action.redirect",
KeyExpectedValue: "'default_action.redirect.protocol' is equal 'HTTPS'",
KeyActualValue: "'default_action.redirect.protocol' is missing",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
{
FileName: positive,
Line: 19,
IssueType: "IncorrectValue",
SearchKey: "aws_alb_listener[front_end].default_action.redirect",
KeyExpectedValue: "'default_action.redirect.protocol' is equal 'HTTPS'",
KeyActualValue: "'default_action.redirect.protocol' is equal 'HTTP'",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
},
}
var queryMediumCycloneCWE = model.QueryResult{
QueryName: "GuardDuty Detector Disabled",
QueryID: "704dadd3-54fc-48ac-b6a0-02f170011473",
Severity: model.SeverityMedium,
Files: []model.VulnerableFile{
{
FileName: filepath.Join("assets", "queries", "terraform", "aws", "guardduty_detector_disabled", "test", "negative.tf"),
Line: 2,
IssueType: "IncorrectValue",
SearchKey: "aws_guardduty_detector[negative1].enable",
KeyExpectedValue: "GuardDuty Detector should be Enabled",
KeyActualValue: "GuardDuty Detector is not Enabled",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
},
Platform: "Terraform",
Description: "Make sure that Amazon GuardDuty is Enabled",
CWE: "22",
}
var queryMediumCWE = model.QueryResult{
QueryName: "AmazonMQ Broker Encryption Disabled",
Description: "AmazonMQ Broker should have Encryption Options defined",
QueryID: "3db3f534-e3a3-487f-88c7-0a9fbf64b702",
CloudProvider: "AWS",
Severity: model.SeverityMedium,
Files: []model.VulnerableFile{
{
FileName: positive,
Line: 1,
IssueType: "MissingAttribute",
SimilarityID: "6b76f7a507e200bb2c73468ec9649b099da96a4efa0f49a3bdc88e12476d8ee7",
SearchKey: "resource.aws_mq_broker[positive1]",
KeyExpectedValue: "resource.aws_mq_broker[positive1].encryption_options is defined",
KeyActualValue: "resource.aws_mq_broker[positive1].encryption_options is not defined",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
},
CWE: "22",
}
var queryHighCWE = model.QueryResult{ //nolint
QueryName: "AMI Not Encrypted",
QueryID: "97707503-a22c-4cd7-b7c0-f088fa7cf830",
Description: "AWS AMI Encryption is not enabled",
DescriptionID: "a4342f0",
CISDescriptionIDFormatted: "testCISID",
CISDescriptionTitle: "testCISTitle",
CISDescriptionTextFormatted: "testCISDescription",
Severity: model.SeverityHigh,
Files: []model.VulnerableFile{
{
FileName: positive,
Line: 30,
IssueType: "MissingAttribute",
SearchKey: "aws_alb_listener[front_end].default_action.redirect",
KeyExpectedValue: "'default_action.redirect.protocol' is equal 'HTTPS'",
KeyActualValue: "'default_action.redirect.protocol' is missing",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
{
FileName: positive,
Line: 35,
IssueType: "IncorrectValue",
SearchKey: "aws_alb_listener[front_end].default_action.redirect",
KeyExpectedValue: "'default_action.redirect.protocol' is equal 'HTTPS'",
KeyActualValue: "'default_action.redirect.protocol' is equal 'HTTP'",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
},
CWE: "22",
}
var queryCritical = model.QueryResult{
QueryName: "AmazonMQ Broker Encryption Disabled",
QueryID: "316278b3-87ac-444c-8f8f-a733a28da609",
Description: "AmazonMQ Broker should have Encryption Options defined",
DescriptionID: "c5d562d9",
CISDescriptionIDFormatted: "testCISID",
CISDescriptionTitle: "testCISTitle",
CISDescriptionTextFormatted: "testCISDescription",
CloudProvider: "AWS",
Severity: model.SeverityCritical,
Files: []model.VulnerableFile{
{
FileName: positiveYaml,
Line: 6,
IssueType: "MissingAttribute",
SearchKey: "aws_alb_listener[front_end].default_action.redirect",
KeyExpectedValue: "'default_action.redirect.protocol' is equal 'HTTPS'",
KeyActualValue: "'default_action.redirect.protocol' is missing",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
},
}
var queryLowCICDCloudProvider = model.QueryResult{
QueryName: "Unpinned Actions Full Length Commit SHA",
QueryID: "555ab8f9-2001-455e-a077-f2d0f41e2fb9",
Description: "Pinning an action to a full length commit SHA is currently the only way to use an action as an immutable release.",
DescriptionID: "9cb8402d",
Platform: "CICD",
CloudProvider: "COMMON",
Severity: model.SeverityLow,
Files: []model.VulnerableFile{
{
FileName: positive,
Line: 12,
IssueType: "IncorrectValue",
SearchKey: "uses={{thollander/actions-comment-pull-request@v2}}",
KeyExpectedValue: "Action is not pinned to a full length commit SHA.",
KeyActualValue: "Action pinned to a full length commit SHA.",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
},
}
var queryHighPasswordsAndSecrets = model.QueryResult{
QueryName: "Passwords And Secrets - AWS Secret Key",
QueryID: "83ab47ff-381d-48cd-bac5-fb32222f54af",
Description: "Query to find passwords and secrets in infrastructure code.",
DescriptionID: "d69d8a89",
Platform: "Common",
CloudProvider: "common",
Severity: model.SeverityHigh,
Files: []model.VulnerableFile{
{
FileName: positive,
Line: 15,
IssueType: "RedundantAttribute",
SearchKey: "",
KeyExpectedValue: "Hardcoded secret key should not appear in source",
KeyActualValue: "Hardcoded secret key appears in source",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
},
}
var queryCriticalSonar = model.QueryResult{
QueryName: "AmazonMQ Broker Encryption Disabled",
QueryID: "316278b3-87ac-444c-8f8f-a733a28da609",
Description: "AmazonMQ Broker should have Encryption Options defined",
DescriptionID: "c5d562d9",
CISDescriptionIDFormatted: "testCISID",
CISDescriptionTitle: "testCISTitle",
CISDescriptionTextFormatted: "testCISDescription",
CloudProvider: "AWS",
Severity: model.SeverityCritical,
Files: []model.VulnerableFile{
{
FileName: positiveYamlSonar,
Line: 6,
IssueType: "MissingAttribute",
SearchKey: "aws_alb_listener[front_end].default_action.redirect",
KeyExpectedValue: "'default_action.redirect.protocol' is equal 'HTTPS'",
KeyActualValue: "'default_action.redirect.protocol' is missing",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
},
}
var SummaryMockCriticalSonar = model.Summary{
Counters: model.Counters{
ScannedFiles: 2,
ParsedFiles: 2,
FailedToScanFiles: 0,
TotalQueries: 1,
FailedToExecuteQueries: 0,
},
Queries: []model.QueryResult{
queryCriticalSonar,
},
SeveritySummary: model.SeveritySummary{
ScanID: "console",
SeverityCounters: map[model.Severity]int{
model.SeverityInfo: 0,
model.SeverityLow: 0,
model.SeverityMedium: 0,
model.SeverityHigh: 0,
model.SeverityCritical: 1,
},
TotalCounter: 1,
},
ScannedPaths: []string{
"./",
},
}
var SummaryMockCritical = model.Summary{
Counters: model.Counters{
ScannedFiles: 2,
ParsedFiles: 2,
FailedToScanFiles: 0,
TotalQueries: 1,
FailedToExecuteQueries: 0,
},
Queries: []model.QueryResult{
queryCritical,
},
SeveritySummary: model.SeveritySummary{
ScanID: "console",
SeverityCounters: map[model.Severity]int{
model.SeverityInfo: 0,
model.SeverityLow: 0,
model.SeverityMedium: 0,
model.SeverityHigh: 0,
model.SeverityCritical: 1,
},
TotalCounter: 1,
},
ScannedPaths: []string{
"./",
},
}
var queryCriticalASFF = model.QueryResult{
QueryName: "AmazonMQ Broker Encryption Disabled",
QueryID: "316278b3-87ac-444c-8f8f-a733a28da609",
Description: "AmazonMQ Broker should have Encryption Options defined",
DescriptionID: "c5d562d9",
CloudProvider: "AWS",
Severity: model.SeverityCritical,
Files: []model.VulnerableFile{
{
FileName: positiveYaml,
Line: 6,
IssueType: "MissingAttribute",
SearchKey: "aws_alb_listener[front_end].default_action.redirect",
KeyExpectedValue: "'default_action.redirect.protocol' is equal 'HTTPS'",
KeyActualValue: "'default_action.redirect.protocol' is missing",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
},
CWE: "22",
}
var SummaryMockCriticalFullPathASFF = model.Summary{
Counters: model.Counters{
ScannedFiles: 2,
ParsedFiles: 2,
FailedToScanFiles: 0,
TotalQueries: 1,
FailedToExecuteQueries: 0,
},
Queries: []model.QueryResult{
queryCriticalASFF,
},
SeveritySummary: model.SeveritySummary{
ScanID: "console",
SeverityCounters: map[model.Severity]int{
model.SeverityInfo: 0,
model.SeverityLow: 0,
model.SeverityMedium: 0,
model.SeverityHigh: 0,
model.SeverityCritical: 1,
},
TotalCounter: 1,
},
ScannedPaths: []string{
"./",
},
}
// SummaryMock a summary to be used without running kics scan
var SummaryMock = model.Summary{
Counters: model.Counters{
ScannedFiles: 1,
ParsedFiles: 1,
FailedToScanFiles: 0,
TotalQueries: 1,
FailedToExecuteQueries: 0,
},
Queries: []model.QueryResult{
queryHigh,
},
SeveritySummary: model.SeveritySummary{
ScanID: "console",
SeverityCounters: map[model.Severity]int{
model.SeverityInfo: 0,
model.SeverityLow: 0,
model.SeverityMedium: 0,
model.SeverityHigh: 2,
model.SeverityCritical: 0,
},
TotalCounter: 2,
},
ScannedPaths: []string{
"./",
},
}
var queryCriticalCLI = model.QueryResult{
QueryName: "Run Block Injection",
QueryID: "20f14e1a-a899-4e79-9f09-b6a84cd4649b",
Description: "GitHub Actions workflows can be triggered by a variety of events. Every workflow trigger is provided with a GitHub context that contains information about the triggering event, such as which user triggered it, the branch name, and other event context details. Some of this event data, like the base repository name, hash value of a changeset, or pull request number, is unlikely to be controlled or used for injection by the user that triggered the event.", //nolint
DescriptionID: "02044a75",
CISDescriptionIDFormatted: "testCISID",
CISDescriptionTitle: "testCISTitle",
CISDescriptionTextFormatted: "testCISDescription",
Severity: model.SeverityCritical,
Files: []model.VulnerableFile{
{
FileName: positive,
Line: 10,
IssueType: "MissingAttribute",
SearchKey: "aws_alb_listener[front_end].default_action.redirect",
KeyExpectedValue: "'default_action.redirect.protocol' is equal 'HTTPS'",
KeyActualValue: "'default_action.redirect.protocol' is missing",
Value: nil,
VulnLines: &[]model.CodeLine{},
},
},
CWE: "",
}
// SummaryMockCWE a summary to be used with cwe field complete
var SummaryMockCWE = model.Summary{
Counters: model.Counters{
ScannedFiles: 1,
ParsedFiles: 1,
FailedToScanFiles: 0,
TotalQueries: 1,
FailedToExecuteQueries: 0,
},
Queries: []model.QueryResult{
queryHighCWE,
},
SeveritySummary: model.SeveritySummary{
ScanID: "console",
SeverityCounters: map[model.Severity]int{
model.SeverityInfo: 0,
model.SeverityLow: 0,
model.SeverityMedium: 0,
model.SeverityHigh: 2,
model.SeverityCritical: 0,
},
TotalCounter: 2,
},
ScannedPaths: []string{
"./",
},
}
// SimpleSummaryMockAsff a simple summary to be used with cwe field complete
var SimpleSummaryMockAsff = model.Summary{
Counters: model.Counters{
ScannedFiles: 1,
ParsedFiles: 1,
FailedToScanFiles: 0,
TotalQueries: 1,
FailedToExecuteQueries: 0,
},
Queries: []model.QueryResult{
queryMediumCWE,
},
SeveritySummary: model.SeveritySummary{
ScanID: "console",
SeverityCounters: map[model.Severity]int{
model.SeverityInfo: 0,
model.SeverityLow: 0,
model.SeverityMedium: 1,
model.SeverityHigh: 2,
model.SeverityCritical: 0,
},
TotalCounter: 1,
},
LatestVersion: model.Version{
Latest: true,
},
}
// ComplexSummaryMock a summary with more results to be used without running kics scan
var ComplexSummaryMock = model.Summary{
Counters: model.Counters{
ScannedFiles: 2,
ParsedFiles: 2,
FailedToScanFiles: 0,
TotalQueries: 3,
FailedToExecuteQueries: 0,
},
Queries: []model.QueryResult{
queryHigh,
queryMedium,
queryHighCWE,
queryCriticalCLI,
},
SeveritySummary: model.SeveritySummary{
ScanID: "console",
SeverityCounters: map[model.Severity]int{
model.SeverityInfo: 0,
model.SeverityLow: 0,
model.SeverityMedium: 1,
model.SeverityHigh: 2,
model.SeverityCritical: 2,
},
TotalCounter: 5,
},
LatestVersion: model.Version{
Latest: true,
},
}
var ComplexSummaryMockWithExperimental = model.Summary{
Counters: model.Counters{
ScannedFiles: 2,
ParsedFiles: 2,
FailedToScanFiles: 0,
TotalQueries: 2,
FailedToExecuteQueries: 0,
},
Queries: []model.QueryResult{
queryHighExperimental,
queryMedium,
},
SeveritySummary: model.SeveritySummary{
ScanID: "console",
SeverityCounters: map[model.Severity]int{
model.SeverityInfo: 0,
model.SeverityLow: 0,
model.SeverityMedium: 1,
model.SeverityHigh: 2,
model.SeverityCritical: 0,
},
TotalCounter: 3,
},
LatestVersion: model.Version{
Latest: true,
},
}
// ExampleSummaryMock a summary with specific results to CycloneDX report tests
var ExampleSummaryMock = model.Summary{
Counters: model.Counters{
ScannedFiles: 2,
ParsedFiles: 2,
FailedToScanFiles: 0,
TotalQueries: 2,
FailedToExecuteQueries: 0,
},
Queries: []model.QueryResult{
queryInfo,
queryMedium2,
},
SeveritySummary: model.SeveritySummary{
ScanID: "console",
SeverityCounters: map[model.Severity]int{
model.SeverityInfo: 2,
model.SeverityLow: 0,
model.SeverityMedium: 1,
model.SeverityHigh: 0,
model.SeverityCritical: 0,
},
TotalCounter: 3,
},
ScannedPaths: []string{
"./",
},
}
// ExampleSummaryMockCWE a summary with specific results to CycloneDX report tests with cwe field complete
var ExampleSummaryMockCWE = model.Summary{
Counters: model.Counters{
ScannedFiles: 1,
ParsedFiles: 1,
FailedToScanFiles: 0,
TotalQueries: 1,
FailedToExecuteQueries: 0,
},
Queries: []model.QueryResult{
queryMediumCycloneCWE,
},
SeveritySummary: model.SeveritySummary{
ScanID: "console",
SeverityCounters: map[model.Severity]int{
model.SeverityInfo: 0,
model.SeverityLow: 0,
model.SeverityMedium: 1,
model.SeverityHigh: 0,
},
TotalCounter: 1,
},
ScannedPaths: []string{
"./",
},
}
// SimpleSummaryMock a summary with specific results to ASFF report tests
var SimpleSummaryMock = model.Summary{
Counters: model.Counters{
ScannedFiles: 1,
ParsedFiles: 1,
FailedToScanFiles: 0,
TotalQueries: 1,
FailedToExecuteQueries: 0,
},
Queries: []model.QueryResult{
queryMedium,
},
SeveritySummary: model.SeveritySummary{
ScanID: "console",
SeverityCounters: map[model.Severity]int{
model.SeverityInfo: 0,
model.SeverityLow: 0,
model.SeverityMedium: 1,
model.SeverityHigh: 0,
model.SeverityCritical: 0,
},
TotalCounter: 1,
},
ScannedPaths: []string{
"./",
},
}
// ExampleSummaryMockWithCloudProviderCommon a summary with "common" as cloud provider to console tests
var ExampleSummaryMockWithCloudProviderCommon = model.Summary{
Counters: model.Counters{
ScannedFiles: 1,
ParsedFiles: 1,
FailedToScanFiles: 0,
TotalQueries: 1,
FailedToExecuteQueries: 0,
},
Queries: []model.QueryResult{
queryLowCICDCloudProvider,
},
SeveritySummary: model.SeveritySummary{
ScanID: "console",
SeverityCounters: map[model.Severity]int{
model.SeverityInfo: 0,
model.SeverityLow: 1,
model.SeverityMedium: 0,
model.SeverityHigh: 0,
model.SeverityCritical: 0,
},
TotalCounter: 1,
},
ScannedPaths: []string{
"./",
},
}
// ExampleSummaryMockWithPasswordsAndSecretsCommonQuery a summary using the "Passwords And Secrets" common query that contains multiple Ids
var ExampleSummaryMockWithPasswordsAndSecretsCommonQuery = model.Summary{
Counters: model.Counters{
ScannedFiles: 1,
ParsedFiles: 1,
FailedToScanFiles: 0,
TotalQueries: 1,
FailedToExecuteQueries: 0,
},
Queries: []model.QueryResult{
queryHighPasswordsAndSecrets,
},
SeveritySummary: model.SeveritySummary{
ScanID: "console",
SeverityCounters: map[model.Severity]int{
model.SeverityInfo: 0,
model.SeverityLow: 0,
model.SeverityMedium: 0,
model.SeverityHigh: 1,
model.SeverityCritical: 0,
},
TotalCounter: 1,
},
ScannedPaths: []string{
"./",
},
}