-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathgraphql.go
More file actions
230 lines (202 loc) · 6.05 KB
/
graphql.go
File metadata and controls
230 lines (202 loc) · 6.05 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
package cmd
import (
"bufio"
"encoding/json"
"fmt"
"os"
"regexp"
"strconv"
"strings"
"github.com/itchyny/gojq"
"github.com/opslevel/opslevel-go/v2025"
"github.com/spf13/cobra"
)
var (
keyValueExp = regexp.MustCompile(`([\w-]+)=(.*)`)
hasNextPageExp = regexp.MustCompile(`"hasNextPage":([\w]+)`)
endCursorExp = regexp.MustCompile(`"endCursor":\"([\w]+)\"`)
)
// graphqlCmd represents the graphql command
var graphqlCmd = &cobra.Command{
Use: "graphql",
Short: "Make authenticated raw GraphQL requests",
Long: `Make authenticated raw GraphQL requests.
Pass one or more '-f/--field' values in "key=value" format to add graphql variables
to the request payload.
In '--paginate' mode, all pages of results will sequentially be requested until
there are no more pages of results. This requires that the
original query accepts an '$endCursor: String' variable and that it fetches the
'pageInfo{ hasNextPage, endCursor }' set of fields from a collection.
Note that only the first 'endCursor' value found in the response body will be used
so ensure you are only paginating on 1 resource. Nested resources pagination will
not work and will cause odd results or errors.
Use '--aggregate'' to specify a JQ expression to use as an function
for the results. In '--paginate' mode it will use this expression to aggregate
the results into a JSON list.
Use '-q' to specify the graphql request body.
Pass "-" to read from standard input.
If the value starts with "@" it is interpreted as a filename to read from.
`,
Example: `opslevel graphql --paginate -a=".account.services.nodes[]" -q='
query ($endCursor: String) {
account {
services(first: 5, after: $endCursor) {
nodes {
name
aliases
owner {
name
}
}
pageInfo {
hasNextPage
endCursor
}
}
}
}'
opslevel graphql -f "owner=platform" -f "tier=tier_1" --paginate -a=".account.services.nodes[]" -q='
query ($endCursor: String, $owner: String!, $tier: String!) {
account {
services(first: 1, after: $endCursor, ownerAlias: $owner, tierAlias: $tier) {
nodes {
name
aliases
}
pageInfo {
hasNextPage
endCursor
}
}
}
}'
opslevel graphql -f "id=XXXXXX" -H "GraphQL-Visibility=internal" -a=".account.configFile.yaml" -q='
query ($id: ID!){
account {
configFile(id: $id) {
yaml
}
}
}' | jq -r '.[0]' > opslevel.yml
`,
Run: func(cmd *cobra.Command, args []string) {
flags := cmd.Flags()
headersValue, err := flags.GetStringArray("header")
headers := map[string]string{}
for _, value := range headersValue {
matches := keyValueExp.FindStringSubmatch(value)
headers[matches[1]] = matches[2]
}
cobra.CheckErr(err)
paginate, err := flags.GetBool("paginate")
cobra.CheckErr(err)
aggregate, err := flags.GetString("aggregate")
cobra.CheckErr(err)
jq, err := gojq.Parse(aggregate)
cobra.CheckErr(err)
aggregation, err := gojq.Compile(jq)
cobra.CheckErr(err)
queryValue, err := flags.GetString("query")
cobra.CheckErr(err)
queryParsed, err := convert(queryValue)
cobra.CheckErr(err)
query, ok := queryParsed.(string)
if !ok {
handleErr("error parsing query flag value", fmt.Errorf("'%#v' is not a string", queryParsed))
}
operationName, err := flags.GetString("operationName")
cobra.CheckErr(err)
fields, err := flags.GetStringArray("field")
cobra.CheckErr(err)
variables := map[string]interface{}{}
for _, field := range fields {
matches := keyValueExp.FindStringSubmatch(field)
value, err := convert(matches[2])
if err != nil {
handleErr(fmt.Sprintf("error parsing variable '%s'", field), err)
}
variables[matches[1]] = value
}
client := getClientGQL(opslevel.SetHeaders(headers))
var output []interface{}
hasNextPage := true
for hasNextPage {
data, err := client.ExecRaw(query, variables, opslevel.WithName(operationName))
cobra.CheckErr(err)
output = append(output, handleAggregate(data, aggregation)...)
if paginate {
hasNextPage, err = strconv.ParseBool(string(hasNextPageExp.FindSubmatch(data)[1]))
cobra.CheckErr(err)
// don't try to parse endCursor unless we know there's another page
if hasNextPage {
variables["endCursor"] = string(endCursorExp.FindSubmatch(data)[1])
}
} else {
hasNextPage = false
}
}
json, err := json.Marshal(output)
cobra.CheckErr(err)
fmt.Println(string(json))
},
}
func init() {
rootCmd.AddCommand(graphqlCmd)
graphqlCmd.Flags().StringArrayP("header", "H", nil, "Add a HTTP request header in `key=value` format")
graphqlCmd.Flags().BoolP("paginate", "p", false, "Automatically make additional requests to fetch all pages of results")
graphqlCmd.Flags().StringP("aggregate", "a", ".", "JQ expression to use to aggregate results")
graphqlCmd.Flags().StringP("query", "q", "", "The query or mutation body to use")
graphqlCmd.Flags().StringP("operationName", "o", "", "The query or mutation 'operation name' to use")
graphqlCmd.Flags().StringArrayP("field", "f", nil, "Add a variable in `key=value` format")
}
func handleErr(msg string, err error) {
wrappedErr := fmt.Errorf("%s | %w", msg, err)
cobra.CheckErr(wrappedErr)
}
func convert(v string) (interface{}, error) {
if v == "-" {
reader := bufio.NewReader(os.Stdin)
data, err := reader.ReadString('\n')
if err != nil {
return nil, err
}
return convert(data)
}
if strings.HasPrefix(v, "@") {
b, err := os.ReadFile(v[1:])
if err != nil {
return "", err
}
return convert(string(b))
}
if n, err := strconv.Atoi(v); err == nil {
return n, nil
}
switch v {
case "true":
return true, nil
case "false":
return false, nil
case "null":
return nil, nil
}
return v, nil
}
func handleAggregate(data []byte, aggregation *gojq.Code) []interface{} {
var parsed map[string]interface{}
err := json.Unmarshal(data, &parsed)
cobra.CheckErr(err)
iter := aggregation.Run(parsed)
var output []interface{}
for {
value, ok := iter.Next()
if !ok {
break
}
if err, ok := value.(error); ok {
cobra.CheckErr(err)
}
output = append(output, value)
}
return output
}