-
-
Notifications
You must be signed in to change notification settings - Fork 19
179 lines (152 loc) · 7.38 KB
/
copilot-setup-steps.yml
File metadata and controls
179 lines (152 loc) · 7.38 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
name: "Copilot Setup Steps"
# Automatically run the setup steps when they are changed to allow for easy validation, and
# allow manual testing through the repository's "Actions" tab
on:
workflow_dispatch:
push:
paths:
- .github/workflows/copilot-setup-steps.yml
pull_request:
paths:
- .github/workflows/copilot-setup-steps.yml
permissions:
contents: read
jobs:
# The job MUST be called `copilot-setup-steps` or it will not be picked up by Copilot.
copilot-setup-steps:
runs-on: ubuntu-latest
# Set the permissions to the lowest permissions possible needed for your steps.
# Copilot will be given its own token for its operations.
permissions:
# If you want to clone the repository as part of your setup steps, for example to install dependencies,
# you'll need the `contents: read` permission. If you don't clone the repository in your setup steps,
# Copilot will do this for you automatically after the steps complete.
contents: read
# You can define any steps you want, and they will run before the agent starts.
# If you do not check out your code, Copilot will do this for you.
steps:
- name: Harden the runner (Audit all outbound calls)
uses: step-security/harden-runner@fa2e9d605c4eeb9fcad4c99c224cee0c6c7f3594 # v2.16.0
with:
egress-policy: audit
- name: Checkout code
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
- name: Set up Node.js
uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0
with:
node-version: "20"
cache: "npm"
- name: Install JavaScript dependencies
run: npm ci
# Download session log files from Azure Blob Storage (optional)
# This step only runs if Azure Storage credentials are configured in the `copilot` environment
- name: Download Copilot session logs from Azure Blob Storage
if: ${{ vars.COPILOT_STORAGE_ACCOUNT != '' }}
env:
AZURE_STORAGE_ACCOUNT: ${{ vars.COPILOT_STORAGE_ACCOUNT }}
AZURE_STORAGE_CONTAINER: ${{ vars.COPILOT_STORAGE_CONTAINER || 'copilot-session-logs' }}
AZURE_DATASET_ID: ${{ vars.COPILOT_DATASET_ID || 'default' }}
run: |
echo "📦 Downloading session logs from Azure Blob Storage..."
# Create directory for session logs
mkdir -p ./session-logs
# Calculate date range (last 7 days)
# Use GNU date if available, otherwise BSD date (macOS)
if date --version >/dev/null 2>&1; then
# GNU date (Linux)
END_DATE=$(date -u +%Y-%m-%d)
START_DATE=$(date -u -d '7 days ago' +%Y-%m-%d)
else
# BSD date (macOS)
END_DATE=$(date -u +%Y-%m-%d)
START_DATE=$(date -u -v-7d +%Y-%m-%d)
fi
echo "Date range: $START_DATE to $END_DATE"
# Check if Azure CLI is available
if ! command -v az &> /dev/null; then
echo "⚠️ Azure CLI not installed, skipping session log download"
exit 0
fi
# Check if authenticated (will be true if using Entra ID via azure/login action)
if ! az account show &> /dev/null; then
echo "⚠️ Not authenticated with Azure, skipping session log download"
echo "💡 To enable: configure Azure authentication in the copilot environment"
exit 0
fi
# Download blobs from the dataset path
echo "Downloading from container: $AZURE_STORAGE_CONTAINER, dataset: $AZURE_DATASET_ID"
# Download all blobs matching the dataset path pattern
# This downloads: {dataset}/{machine-id}/{date}/session-*.json.gz
az storage blob download-batch \
--account-name "$AZURE_STORAGE_ACCOUNT" \
--source "$AZURE_STORAGE_CONTAINER" \
--destination ./session-logs \
--pattern "${AZURE_DATASET_ID}/*/*/*.json.gz" \
--auth-mode login \
--no-progress || {
echo "⚠️ Failed to download session logs, continuing without them"
exit 0
}
# Count downloaded files
FILE_COUNT=$(find ./session-logs -name "*.json.gz" | wc -l)
echo "✅ Downloaded $FILE_COUNT session log files"
# Decompress all files
if [ "$FILE_COUNT" -gt 0 ]; then
echo "📂 Decompressing session logs..."
find ./session-logs -name "*.gz" -exec gunzip {} \;
echo "✅ Session logs ready in ./session-logs"
fi
- name: Display session log summary
if: ${{ vars.COPILOT_STORAGE_ACCOUNT != '' }}
run: |
if [ -d ./session-logs ]; then
echo "📊 Session log summary:"
echo "Total files: $(find ./session-logs -name "*.json" | wc -l)"
echo "Directory structure:"
tree -L 3 ./session-logs || ls -R ./session-logs
else
echo "No session logs available"
fi
# Download aggregated usage data from Azure Table Storage (optional)
# Uses the existing load-table-data.js helper script from the azure-storage-loader skill
- name: Install Azure Storage loader dependencies
if: ${{ vars.COPILOT_STORAGE_ACCOUNT != '' }}
run: |
cd .github/skills/azure-storage-loader
npm ci --production
- name: Download aggregated usage data from Azure Table Storage
if: ${{ vars.COPILOT_STORAGE_ACCOUNT != '' }}
env:
AZURE_STORAGE_ACCOUNT: ${{ vars.COPILOT_STORAGE_ACCOUNT }}
AZURE_TABLE_NAME: ${{ vars.COPILOT_TABLE_NAME || 'usageAggDaily' }}
AZURE_DATASET_ID: ${{ vars.COPILOT_DATASET_ID || 'default' }}
AZURE_TABLE_DATA_DAYS: ${{ vars.COPILOT_TABLE_DATA_DAYS || '30' }}
AZURE_STORAGE_KEY: ${{ secrets.COPILOT_STORAGE_KEY }}
run: |
echo "📊 Downloading aggregated usage data from Azure Table Storage..."
mkdir -p ./usage-data
# Calculate date range
END_DATE=$(date -u +%Y-%m-%d)
START_DATE=$(date -u -d "${AZURE_TABLE_DATA_DAYS} days ago" +%Y-%m-%d)
echo "Date range: $START_DATE to $END_DATE"
# Build command arguments
ARGS=(--storageAccount "$AZURE_STORAGE_ACCOUNT")
ARGS+=(--tableName "$AZURE_TABLE_NAME")
ARGS+=(--datasetId "$AZURE_DATASET_ID")
ARGS+=(--startDate "$START_DATE")
ARGS+=(--endDate "$END_DATE")
ARGS+=(--output ./usage-data/usage-agg-daily.json)
# Use shared key if available, otherwise rely on Entra ID (DefaultAzureCredential)
if [ -n "$AZURE_STORAGE_KEY" ]; then
ARGS+=(--sharedKey "$AZURE_STORAGE_KEY")
fi
node .github/skills/azure-storage-loader/load-table-data.js "${ARGS[@]}" || {
echo "⚠️ Failed to download aggregated usage data, continuing without it"
exit 0
}
if [ -f ./usage-data/usage-agg-daily.json ]; then
ENTITY_COUNT=$(node -e "const d=require('./usage-data/usage-agg-daily.json'); console.log(d.length)")
echo "✅ Downloaded $ENTITY_COUNT aggregated usage records to ./usage-data/usage-agg-daily.json"
else
echo "⚠️ No aggregated usage data file created"
fi