-
-
Notifications
You must be signed in to change notification settings - Fork 1
541 lines (467 loc) · 20.1 KB
/
upstream-sync.yml
File metadata and controls
541 lines (467 loc) · 20.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
name: Upstream Sync
on:
workflow_dispatch:
inputs:
target_version:
description: 'Target CyberChef version (e.g., v10.19.5) - leave empty for latest'
required: false
type: string
issues:
types: [labeled]
permissions:
contents: write
pull-requests: write
issues: write
jobs:
sync:
# Only run if manually triggered OR if issue labeled with 'upstream-sync-approved'
if: |
github.event_name == 'workflow_dispatch' ||
(github.event_name == 'issues' && github.event.label.name == 'upstream-sync-approved')
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup Node.js
uses: actions/setup-node@v4
with:
node-version: '22'
cache: 'npm'
- name: Configure git
run: |
git config user.name "github-actions[bot]"
git config user.email "github-actions[bot]@users.noreply.github.com"
- name: Ensure ref-proj exists and is updated
id: update_ref
env:
TARGET_VERSION: ${{ inputs.target_version }}
run: |
# Ensure ref-proj directory exists
mkdir -p ref-proj
# Clone or update upstream reference
if [ -d "ref-proj/CyberChef/.git" ]; then
echo "Updating existing ref-proj/CyberChef"
cd ref-proj/CyberChef
git fetch origin --tags
BEFORE_SHA=$(git rev-parse HEAD)
if [ -n "$TARGET_VERSION" ]; then
# Checkout specific version
git checkout "$TARGET_VERSION"
echo "Checked out $TARGET_VERSION"
else
# Pull latest master
git pull origin master
fi
AFTER_SHA=$(git rev-parse HEAD)
echo "before_sha=$BEFORE_SHA" >> $GITHUB_OUTPUT
echo "after_sha=$AFTER_SHA" >> $GITHUB_OUTPUT
cd ../..
else
echo "Cloning upstream to ref-proj/CyberChef"
cd ref-proj
git clone https://github.com/gchq/CyberChef.git
cd CyberChef
if [ -n "$TARGET_VERSION" ]; then
git checkout "$TARGET_VERSION"
fi
AFTER_SHA=$(git rev-parse HEAD)
echo "before_sha=initial" >> $GITHUB_OUTPUT
echo "after_sha=$AFTER_SHA" >> $GITHUB_OUTPUT
cd ../..
fi
# Get version info
cd ref-proj/CyberChef
UPSTREAM_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "unknown")
UPSTREAM_VERSION=$(node -p "require('./package.json').version" 2>/dev/null || echo "unknown")
echo "tag=$UPSTREAM_TAG" >> $GITHUB_OUTPUT
echo "version=$UPSTREAM_VERSION" >> $GITHUB_OUTPUT
cd ../..
- name: Create sync branch
env:
TARGET: ${{ steps.update_ref.outputs.tag }}
run: |
BRANCH_NAME="upstream-sync-${TARGET}"
git checkout -b "$BRANCH_NAME"
echo "branch=$BRANCH_NAME" >> $GITHUB_ENV
- name: Identify operation changes
id: changes
run: |
# Find new operations (in ref but not in main)
echo "Finding new operations..."
NEW_OPS=$(comm -13 \
<(ls src/core/operations/*.mjs 2>/dev/null | xargs -n1 basename | sort) \
<(ls ref-proj/CyberChef/src/core/operations/*.mjs 2>/dev/null | xargs -n1 basename | sort))
NEW_COUNT=$(echo "$NEW_OPS" | grep -v '^$' | wc -l)
echo "new_count=$NEW_COUNT" >> $GITHUB_OUTPUT
echo "Found $NEW_COUNT new operations"
# Find modified operations (different checksums)
echo "Finding modified operations..."
MODIFIED_OPS=""
MODIFIED_COUNT=0
for op in src/core/operations/*.mjs; do
if [ -f "$op" ]; then
basename_op=$(basename "$op")
ref_op="ref-proj/CyberChef/src/core/operations/$basename_op"
if [ -f "$ref_op" ]; then
if ! cmp -s "$op" "$ref_op"; then
MODIFIED_OPS="$MODIFIED_OPS$basename_op"$'\n'
MODIFIED_COUNT=$((MODIFIED_COUNT + 1))
fi
fi
fi
done
echo "modified_count=$MODIFIED_COUNT" >> $GITHUB_OUTPUT
echo "Found $MODIFIED_COUNT modified operations"
# Find deleted operations (in main but not in ref)
DELETED_OPS=$(comm -23 \
<(ls src/core/operations/*.mjs 2>/dev/null | xargs -n1 basename | sort) \
<(ls ref-proj/CyberChef/src/core/operations/*.mjs 2>/dev/null | xargs -n1 basename | sort))
DELETED_COUNT=$(echo "$DELETED_OPS" | grep -v '^$' | wc -l)
echo "deleted_count=$DELETED_COUNT" >> $GITHUB_OUTPUT
echo "Found $DELETED_COUNT deleted operations"
# Save lists to files
echo "$NEW_OPS" > /tmp/new_ops.txt
echo "$MODIFIED_OPS" > /tmp/modified_ops.txt
echo "$DELETED_OPS" > /tmp/deleted_ops.txt
# Determine if sync is needed
TOTAL_CHANGES=$((NEW_COUNT + MODIFIED_COUNT + DELETED_COUNT))
if [ $TOTAL_CHANGES -gt 0 ]; then
echo "needs_sync=true" >> $GITHUB_OUTPUT
echo "Total changes: $TOTAL_CHANGES"
else
echo "needs_sync=false" >> $GITHUB_OUTPUT
echo "No changes detected"
fi
- name: Sync operation files
if: steps.changes.outputs.needs_sync == 'true'
id: sync
run: |
echo "Syncing operation files..."
SYNCED_FILES=0
FAILED_FILES=0
# Copy new operations
while IFS= read -r op; do
if [ -n "$op" ]; then
echo "Adding new operation: $op"
if cp "ref-proj/CyberChef/src/core/operations/$op" "src/core/operations/$op"; then
SYNCED_FILES=$((SYNCED_FILES + 1))
else
echo "Failed to copy $op"
FAILED_FILES=$((FAILED_FILES + 1))
fi
fi
done < /tmp/new_ops.txt
# Copy modified operations
while IFS= read -r op; do
if [ -n "$op" ]; then
echo "Updating modified operation: $op"
if cp "ref-proj/CyberChef/src/core/operations/$op" "src/core/operations/$op"; then
SYNCED_FILES=$((SYNCED_FILES + 1))
else
echo "Failed to copy $op"
FAILED_FILES=$((FAILED_FILES + 1))
fi
fi
done < /tmp/modified_ops.txt
# Delete removed operations (with confirmation)
while IFS= read -r op; do
if [ -n "$op" ]; then
echo "Note: Operation removed upstream: $op (keeping in MCP for now)"
# We don't auto-delete in case it's still useful for MCP
fi
done < /tmp/deleted_ops.txt
echo "synced_files=$SYNCED_FILES" >> $GITHUB_OUTPUT
echo "failed_files=$FAILED_FILES" >> $GITHUB_OUTPUT
if [ $FAILED_FILES -gt 0 ]; then
echo "status=partial" >> $GITHUB_OUTPUT
echo "WARNING: Some files failed to sync"
else
echo "status=success" >> $GITHUB_OUTPUT
echo "All files synced successfully"
fi
- name: Verify no excluded files introduced by sync
if: steps.changes.outputs.needs_sync == 'true'
run: |
echo "Verifying sync did not introduce excluded files..."
EXCLUDED_FOUND=0
# Check git diff for any new/modified files outside src/core/operations/
# The sync should ONLY touch src/core/operations/*.mjs files
CHANGED_FILES=$(git diff --name-only HEAD 2>/dev/null || git diff --cached --name-only 2>/dev/null || true)
UNEXPECTED_FILES=""
while IFS= read -r file; do
if [ -n "$file" ]; then
case "$file" in
src/core/operations/*.mjs)
# Expected - operation files are the sync target
;;
src/web/*)
echo "ERROR: Sync introduced src/web/ file: $file"
EXCLUDED_FOUND=1
UNEXPECTED_FILES="$UNEXPECTED_FILES $file"
;;
tests/browser/*)
echo "ERROR: Sync introduced tests/browser/ file: $file"
EXCLUDED_FOUND=1
UNEXPECTED_FILES="$UNEXPECTED_FILES $file"
;;
nightwatch.json|postcss.config.js|.devcontainer/*)
echo "ERROR: Sync introduced excluded file: $file"
EXCLUDED_FOUND=1
UNEXPECTED_FILES="$UNEXPECTED_FILES $file"
;;
*)
echo "Note: Non-operation file changed: $file (reviewing...)"
;;
esac
fi
done <<< "$CHANGED_FILES"
if [ $EXCLUDED_FOUND -eq 1 ]; then
echo "CRITICAL: Sync introduced excluded files! Reverting sync changes."
echo "Unexpected files: $UNEXPECTED_FILES"
exit 1
else
echo "Verification passed: Sync only modified operation files"
fi
- name: Install dependencies
if: steps.changes.outputs.needs_sync == 'true'
run: npm install
- name: Apply Node 22 compatibility patches
if: steps.changes.outputs.needs_sync == 'true'
run: |
# Apply SlowBuffer patches for Node 22 compatibility
if [ -f "node_modules/avsc/lib/types.js" ]; then
sed -i 's/new SlowBuffer/Buffer.alloc/g' node_modules/avsc/lib/types.js
echo "Applied avsc patch"
fi
if [ -f "node_modules/buffer-equal-constant-time/index.js" ]; then
sed -i 's/SlowBuffer/Buffer/g' node_modules/buffer-equal-constant-time/index.js
echo "Applied buffer-equal-constant-time patch"
fi
- name: Regenerate OperationConfig
if: steps.changes.outputs.needs_sync == 'true'
run: |
npx grunt configTests
echo "OperationConfig.json regenerated"
# Show stats
OPERATION_COUNT=$(node -p "Object.keys(require('./src/core/config/OperationConfig.json')).length")
echo "Total operations: $OPERATION_COUNT"
echo "operation_count=$OPERATION_COUNT" >> $GITHUB_ENV
- name: Run tests
if: steps.changes.outputs.needs_sync == 'true'
id: tests
run: |
# Run core tests
if npm test; then
echo "core_passed=true" >> $GITHUB_OUTPUT
else
echo "core_passed=false" >> $GITHUB_OUTPUT
echo "WARNING: Core tests failed"
fi
# Run MCP validation tests
if npm run test:mcp; then
echo "mcp_passed=true" >> $GITHUB_OUTPUT
else
echo "mcp_passed=false" >> $GITHUB_OUTPUT
echo "WARNING: MCP tests failed"
fi
- name: Run lint
if: steps.changes.outputs.needs_sync == 'true'
id: lint
run: |
if npm run lint; then
echo "lint_passed=true" >> $GITHUB_OUTPUT
else
echo "lint_passed=false" >> $GITHUB_OUTPUT
echo "WARNING: Lint failed"
fi
- name: Update baseline
if: steps.changes.outputs.needs_sync == 'true' && steps.tests.outputs.mcp_passed == 'true'
run: |
# Regenerate baseline with new tool inventory
node --input-type=module << 'EOF'
import OperationConfig from './src/core/config/OperationConfig.json' with { type: 'json' };
import { readFileSync, writeFileSync } from 'fs';
function sanitizeToolName(name) {
if (!name) return null;
const sanitized = 'cyberchef_' + name.toLowerCase()
.replace(/[^a-z0-9_]/g, '_')
.replace(/_+/g, '_')
.replace(/^_|_$/g, '');
if (sanitized === 'cyberchef_') return null;
return sanitized;
}
const pkg = JSON.parse(readFileSync('./package.json', 'utf-8'));
const baseline = {
version: pkg.mcpVersion,
timestamp: new Date().toISOString(),
tool_count: Object.keys(OperationConfig).length + 2,
tools: {
cyberchef_bake: { type: 'meta', description: 'Execute a CyberChef recipe' },
cyberchef_search: { type: 'meta', description: 'Search for available CyberChef operations' }
}
};
Object.keys(OperationConfig).forEach(opName => {
const op = OperationConfig[opName];
const toolName = sanitizeToolName(opName);
if (toolName) {
baseline.tools[toolName] = {
operation: opName,
description: op.description || '',
args_count: op.args ? op.args.length : 0,
arg_types: op.args ? op.args.map(a => a.type) : []
};
}
});
writeFileSync('./tests/mcp/baseline.json', JSON.stringify(baseline, null, 2), 'utf-8');
console.log('Baseline updated:', baseline.tool_count, 'tools');
EOF
- name: Generate sync report
if: steps.changes.outputs.needs_sync == 'true'
run: |
# Generate detailed sync report
cat > /tmp/sync_report.md << 'EOF'
## Sync Report
### Files Changed
- New operations: ${{ steps.changes.outputs.new_count }}
- Modified operations: ${{ steps.changes.outputs.modified_count }}
- Deleted operations: ${{ steps.changes.outputs.deleted_count }}
- Total synced files: ${{ steps.sync.outputs.synced_files }}
### New Operations
```
EOF
cat /tmp/new_ops.txt >> /tmp/sync_report.md
cat >> /tmp/sync_report.md << 'EOF'
```
### Modified Operations
```
EOF
cat /tmp/modified_ops.txt >> /tmp/sync_report.md
cat >> /tmp/sync_report.md << 'EOF'
```
### Deleted Operations (Kept in MCP)
```
EOF
cat /tmp/deleted_ops.txt >> /tmp/sync_report.md
echo '```' >> /tmp/sync_report.md
- name: Commit changes
if: steps.changes.outputs.needs_sync == 'true'
env:
TARGET: ${{ steps.update_ref.outputs.tag }}
NEW_COUNT: ${{ steps.changes.outputs.new_count }}
MODIFIED_COUNT: ${{ steps.changes.outputs.modified_count }}
DELETED_COUNT: ${{ steps.changes.outputs.deleted_count }}
run: |
git add src/core/operations/
git add -f src/core/config/OperationConfig.json
git add tests/mcp/baseline.json
COMMIT_MSG="chore(upstream): selective sync from CyberChef $TARGET
* Synced $NEW_COUNT new operations
* Updated $MODIFIED_COUNT modified operations
* Noted $DELETED_COUNT deleted operations (kept in MCP)
* Regenerated OperationConfig.json ($operation_count operations)
* Updated baseline.json
* Applied Node 22 compatibility patches
Sync method: Selective file copying (preserves MCP modifications)
Generated with upstream-sync workflow"
git commit -m "$COMMIT_MSG" || echo "No changes to commit"
- name: Push branch
if: steps.changes.outputs.needs_sync == 'true'
env:
BRANCH: ${{ env.branch }}
run: |
git push origin "$BRANCH" --force
- name: Create Pull Request
if: steps.changes.outputs.needs_sync == 'true'
env:
GH_TOKEN: ${{ github.token }}
TARGET: ${{ steps.update_ref.outputs.tag }}
NEW_COUNT: ${{ steps.changes.outputs.new_count }}
MODIFIED_COUNT: ${{ steps.changes.outputs.modified_count }}
DELETED_COUNT: ${{ steps.changes.outputs.deleted_count }}
CORE_TEST: ${{ steps.tests.outputs.core_passed }}
MCP_TEST: ${{ steps.tests.outputs.mcp_passed }}
LINT: ${{ steps.lint.outputs.lint_passed }}
run: |
# Read sync report
SYNC_REPORT=$(cat /tmp/sync_report.md)
# Determine test status
if [ "$CORE_TEST" == "true" ] && [ "$MCP_TEST" == "true" ] && [ "$LINT" == "true" ]; then
TEST_STATUS="✅ All tests and linting passed"
else
TEST_STATUS="⚠️ **WARNING:** Some checks failed\n"
[ "$CORE_TEST" != "true" ] && TEST_STATUS="${TEST_STATUS}- Core tests: FAILED\n"
[ "$MCP_TEST" != "true" ] && TEST_STATUS="${TEST_STATUS}- MCP tests: FAILED\n"
[ "$LINT" != "true" ] && TEST_STATUS="${TEST_STATUS}- Lint: FAILED\n"
fi
PR_BODY="## Upstream Sync: $TARGET
This PR synchronizes operations with upstream CyberChef release $TARGET using **selective file syncing**.
### Sync Method
✅ **Selective File Sync** (NOT full merge)
- Only syncs \`src/core/operations/\` files
- Preserves MCP-specific modifications
- Excludes \`src/web/\`, \`tests/browser/\`, and other removed files
- Maintains package.json, Gruntfile.js, and MCP workflows
### Changes Summary
- **New Operations**: $NEW_COUNT
- **Modified Operations**: $MODIFIED_COUNT
- **Deleted Operations**: $DELETED_COUNT (kept in MCP)
- **Total Operations**: $operation_count
$SYNC_REPORT
### Test Results
$TEST_STATUS
### Verification Checklist
- [x] Only operation files synced
- [x] No excluded files (src/web/, tests/browser/, etc.)
- [x] MCP-specific files preserved
- [x] OperationConfig.json regenerated
- [x] Baseline.json updated
- [ ] Review upstream changelog for breaking changes
- [ ] Manual testing of new/modified operations
- [ ] Update CHANGELOG.md if needed
- [ ] Update version in package.json if needed
### Rollback
If issues are found, use the rollback workflow:
\`\`\`bash
gh workflow run rollback.yml -f reason=\"Issues with $TARGET sync\"
\`\`\`
---
Generated automatically by upstream-sync workflow (selective sync mode)"
gh pr create \
--title "chore(upstream): selective sync with CyberChef $TARGET" \
--label "upstream-sync,automated" \
--body "$PR_BODY" || echo "PR already exists"
- name: No changes needed
if: steps.changes.outputs.needs_sync != 'true'
run: |
echo "No synchronization needed - all operations are up-to-date"
echo "### Sync Status: Up-to-date" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "All operation files match upstream." >> $GITHUB_STEP_SUMMARY
- name: Summary
if: always()
env:
TARGET: ${{ steps.update_ref.outputs.tag }}
BRANCH: ${{ env.branch }}
NEEDS_SYNC: ${{ steps.changes.outputs.needs_sync }}
NEW_COUNT: ${{ steps.changes.outputs.new_count }}
MODIFIED_COUNT: ${{ steps.changes.outputs.modified_count }}
SYNCED_FILES: ${{ steps.sync.outputs.synced_files }}
run: |
echo "### Upstream Sync Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- **Target Version:** $TARGET" >> $GITHUB_STEP_SUMMARY
echo "- **Sync Method:** Selective file copying" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
if [ "$NEEDS_SYNC" == "true" ]; then
echo "#### Changes" >> $GITHUB_STEP_SUMMARY
echo "- New operations: $NEW_COUNT" >> $GITHUB_STEP_SUMMARY
echo "- Modified operations: $MODIFIED_COUNT" >> $GITHUB_STEP_SUMMARY
echo "- Files synced: $SYNCED_FILES" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "- **Branch:** $BRANCH" >> $GITHUB_STEP_SUMMARY
echo "- **Result:** Pull request created" >> $GITHUB_STEP_SUMMARY
else
echo "- **Result:** No changes needed" >> $GITHUB_STEP_SUMMARY
fi