Skip to content

Commit 8a533d7

Browse files
committed
Simplify rsync-antora-reference httpdocs-path
Closes gh-44864
1 parent 0405d65 commit 8a533d7

File tree

2 files changed

+336
-51
lines changed

2 files changed

+336
-51
lines changed

.github/workflows/deploy-docs.yml

Lines changed: 51 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -18,54 +18,54 @@ jobs:
1818
if: github.repository_owner == 'spring-projects'
1919
runs-on: ubuntu-latest
2020
steps:
21-
- name: Check Out
22-
uses: actions/checkout@v4
23-
with:
24-
fetch-depth: 0
25-
- name: Fetch Main Branch
26-
run: git fetch origin main:main
27-
- name: Set Up Node
28-
uses: actions/setup-node@v4
29-
with:
30-
node-version: 20
31-
- name: Check Out 'package.json' From Main
32-
run: node run.js --only-checkout
33-
- name: Cache Files
34-
uses: actions/cache@v4
35-
with:
36-
key: antora-${{ hashFiles('package-lock.json', 'antora-playbook.yml') }}
37-
path: |
38-
~/.npm
39-
~/.cache/antora
40-
- name: Install and Run Antora
41-
env:
42-
ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }}
43-
ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }}
44-
BUILD_REFNAME: ${{ github.event.inputs.build-refname }}
45-
BUILD_VERSION: ${{ github.event.inputs.build-version }}
46-
run: node run.js --no-checkout
47-
- name: Sync Documentation
48-
uses: spring-io/spring-doc-actions/rsync-antora-reference@v0.0.20
49-
with:
50-
docs-username: ${{ secrets.DOCS_USERNAME }}
51-
docs-host: ${{ secrets.DOCS_HOST }}
52-
docs-ssh-key: ${{ secrets.DOCS_SSH_KEY }}
53-
docs-ssh-host-key: ${{ secrets.DOCS_SSH_HOST_KEY }}
54-
httpdocs-path: /spring-boot/antora/reference
55-
env:
56-
BUILD_REFNAME: ${{ github.event.inputs.build-refname }}
57-
BUILD_VERSION: ${{ github.event.inputs.build-version }}
58-
- name: Bust Cloudflare Cache
59-
uses: spring-io/spring-doc-actions/bust-cloudflare-antora-cache@v0.0.20
60-
with:
61-
context-root: spring-boot
62-
context-path: /
63-
cloudflare-zone-id: ${{ secrets.CLOUDFLARE_ZONE_ID }}
64-
cloudflare-cache-token: ${{ secrets.CLOUDFLARE_CACHE_TOKEN }}
65-
- name: Send Notification
66-
if: failure()
67-
uses: ./.github/actions/send-notification
68-
with:
69-
run-name: ${{ format('{0} | Build and Deploy Docs', github.ref_name) }}
70-
status: ${{ job.status }}
71-
webhook-url: ${{ secrets.GOOGLE_CHAT_WEBHOOK_URL }}
21+
- name: Check Out
22+
uses: actions/checkout@v4
23+
with:
24+
fetch-depth: 0
25+
- name: Fetch Main Branch
26+
run: git fetch origin main:main
27+
- name: Set Up Node
28+
uses: actions/setup-node@v4
29+
with:
30+
node-version: 20
31+
- name: Check Out 'package.json' From Main
32+
run: node run.js --only-checkout
33+
- name: Cache Files
34+
uses: actions/cache@v4
35+
with:
36+
key: antora-${{ hashFiles('package-lock.json', 'antora-playbook.yml') }}
37+
path: |
38+
~/.npm
39+
~/.cache/antora
40+
- name: Install and Run Antora
41+
env:
42+
ARTIFACTORY_USERNAME: ${{ secrets.ARTIFACTORY_USERNAME }}
43+
ARTIFACTORY_PASSWORD: ${{ secrets.ARTIFACTORY_PASSWORD }}
44+
BUILD_REFNAME: ${{ github.event.inputs.build-refname }}
45+
BUILD_VERSION: ${{ github.event.inputs.build-version }}
46+
run: node run.js --no-checkout
47+
- name: Sync Documentation
48+
uses: spring-io/spring-doc-actions/rsync-antora-reference@v0.0.20
49+
with:
50+
docs-username: ${{ secrets.DOCS_USERNAME }}
51+
docs-host: ${{ secrets.DOCS_HOST }}
52+
docs-ssh-key: ${{ secrets.DOCS_SSH_KEY }}
53+
docs-ssh-host-key: ${{ secrets.DOCS_SSH_HOST_KEY }}
54+
httpdocs-path: /spring-boot/antora
55+
env:
56+
BUILD_REFNAME: ${{ github.event.inputs.build-refname }}
57+
BUILD_VERSION: ${{ github.event.inputs.build-version }}
58+
- name: Bust Cloudflare Cache
59+
uses: spring-io/spring-doc-actions/bust-cloudflare-antora-cache@v0.0.20
60+
with:
61+
context-root: spring-boot
62+
context-path: /
63+
cloudflare-zone-id: ${{ secrets.CLOUDFLARE_ZONE_ID }}
64+
cloudflare-cache-token: ${{ secrets.CLOUDFLARE_CACHE_TOKEN }}
65+
- name: Send Notification
66+
if: failure()
67+
uses: ./.github/actions/send-notification
68+
with:
69+
run-name: ${{ format('{0} | Build and Deploy Docs', github.ref_name) }}
70+
status: ${{ job.status }}
71+
webhook-url: ${{ secrets.GOOGLE_CHAT_WEBHOOK_URL }}
Lines changed: 285 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,285 @@
1+
diff --git a/node_modules/@vscode/gulp-vinyl-zip/lib/src/index.js b/node_modules/@vscode/gulp-vinyl-zip/lib/src/index.js
2+
index 17d902d..0448dec 100644
3+
--- a/node_modules/@vscode/gulp-vinyl-zip/lib/src/index.js
4+
+++ b/node_modules/@vscode/gulp-vinyl-zip/lib/src/index.js
5+
@@ -1,135 +1,157 @@
6+
-'use strict';
7+
-
8+
-var fs = require('fs');
9+
-var constants = fs.constants;
10+
-var yauzl = require('yauzl');
11+
-var File = require('../vinyl-zip');
12+
-var queue = require('queue');
13+
-var through = require('through');
14+
-var map = require('through2').obj;
15+
-
16+
-function modeFromEntry(entry) {
17+
- var attr = entry.externalFileAttributes >> 16 || 33188;
18+
-
19+
- // The following constants are not available on all platforms:
20+
- // 448 = constants.S_IRWXU, 56 = constants.S_IRWXG, 7 = constants.S_IRWXO
21+
- return [448, 56, 7]
22+
- .map(function (mask) { return attr & mask; })
23+
- .reduce(function (a, b) { return a + b; }, attr & constants.S_IFMT);
24+
+'use strict'
25+
+
26+
+// This is fork of vinyl-zip with the following updates:
27+
+// - unzipFile has an additional `.on('error'` handler
28+
+// - toStream has an additional `zip.on('error'` handler
29+
+
30+
+const fs = require('fs')
31+
+const constants = fs.constants
32+
+const yauzl = require('yauzl')
33+
+const File = require('vinyl')
34+
+const queue = require('queue')
35+
+const through = require('through')
36+
+const map = require('through2').obj
37+
+
38+
+function modeFromEntry (entry) {
39+
+ const attr = entry.externalFileAttributes >> 16 || 33188
40+
+ return [448, 56, 7]
41+
+ .map(function (mask) {
42+
+ return attr & mask
43+
+ })
44+
+ .reduce(function (a, b) {
45+
+ return a + b
46+
+ }, attr & constants.S_IFMT)
47+
}
48+
49+
-function mtimeFromEntry(entry) {
50+
- return yauzl.dosDateTimeToDate(entry.lastModFileDate, entry.lastModFileTime);
51+
+function mtimeFromEntry (entry) {
52+
+ return yauzl.dosDateTimeToDate(entry.lastModFileDate, entry.lastModFileTime)
53+
}
54+
55+
-function toStream(zip) {
56+
- var result = through();
57+
- var q = queue();
58+
- var didErr = false;
59+
-
60+
- q.on('error', function (err) {
61+
- didErr = true;
62+
- result.emit('error', err);
63+
- });
64+
-
65+
- zip.on('entry', function (entry) {
66+
- if (didErr) { return; }
67+
-
68+
- var stat = new fs.Stats();
69+
- stat.mode = modeFromEntry(entry);
70+
- stat.mtime = mtimeFromEntry(entry);
71+
-
72+
- // directories
73+
- if (/\/$/.test(entry.fileName)) {
74+
- stat.mode = (stat.mode & ~constants.S_IFMT) | constants.S_IFDIR;
75+
- }
76+
-
77+
- var file = {
78+
- path: entry.fileName,
79+
- stat: stat
80+
- };
81+
-
82+
- if (stat.isFile()) {
83+
- stat.size = entry.uncompressedSize;
84+
- if (entry.uncompressedSize === 0) {
85+
- file.contents = Buffer.alloc(0);
86+
- result.emit('data', new File(file));
87+
- } else {
88+
- q.push(function (cb) {
89+
- zip.openReadStream(entry, function (err, readStream) {
90+
- if (err) { return cb(err); }
91+
- file.contents = readStream;
92+
- result.emit('data', new File(file));
93+
- cb();
94+
- });
95+
- });
96+
-
97+
- q.start();
98+
- }
99+
- } else if (stat.isSymbolicLink()) {
100+
- stat.size = entry.uncompressedSize;
101+
- q.push(function (cb) {
102+
- zip.openReadStream(entry, function (err, readStream) {
103+
- if (err) { return cb(err); }
104+
- file.symlink = '';
105+
- readStream.on('data', function (c) { file.symlink += c; });
106+
- readStream.on('error', cb);
107+
- readStream.on('end', function () {
108+
- result.emit('data', new File(file));
109+
- cb();
110+
- });
111+
- });
112+
- });
113+
-
114+
- q.start();
115+
- } else if (stat.isDirectory()) {
116+
- result.emit('data', new File(file));
117+
- } else {
118+
- result.emit('data', new File(file));
119+
- }
120+
- });
121+
-
122+
- zip.on('end', function () {
123+
- if (didErr) {
124+
- return;
125+
- }
126+
-
127+
- if (q.length === 0) {
128+
- result.end();
129+
- } else {
130+
- q.on('end', function () {
131+
- result.end();
132+
- });
133+
- }
134+
- });
135+
-
136+
- return result;
137+
+function toStream (zip) {
138+
+ const result = through()
139+
+ const q = queue()
140+
+ let didErr = false
141+
+
142+
+ q.on('error', function (err) {
143+
+ didErr = true
144+
+ result.emit('error', err)
145+
+ })
146+
+
147+
+ zip.on('error', function (err) {
148+
+ didErr = true
149+
+ result.emit('error', err)
150+
+ })
151+
+
152+
+ zip.on('entry', function (entry) {
153+
+ if (didErr) {
154+
+ return
155+
+ }
156+
+
157+
+ const stat = new fs.Stats()
158+
+ stat.mode = modeFromEntry(entry)
159+
+ stat.mtime = mtimeFromEntry(entry)
160+
+
161+
+ // directories
162+
+ if (/\/$/.test(entry.fileName)) {
163+
+ stat.mode = (stat.mode & ~constants.S_IFMT) | constants.S_IFDIR
164+
+ }
165+
+
166+
+ const file = {
167+
+ path: entry.fileName,
168+
+ stat,
169+
+ }
170+
+
171+
+ if (stat.isFile()) {
172+
+ stat.size = entry.uncompressedSize
173+
+ if (entry.uncompressedSize === 0) {
174+
+ file.contents = Buffer.alloc(0)
175+
+ result.emit('data', new File(file))
176+
+ } else {
177+
+ q.push(function (cb) {
178+
+ zip.openReadStream(entry, function (err, readStream) {
179+
+ if (err) {
180+
+ return cb(err)
181+
+ }
182+
+ file.contents = readStream
183+
+ result.emit('data', new File(file))
184+
+ cb()
185+
+ })
186+
+ })
187+
+
188+
+ q.start()
189+
+ }
190+
+ } else if (stat.isSymbolicLink()) {
191+
+ stat.size = entry.uncompressedSize
192+
+ q.push(function (cb) {
193+
+ zip.openReadStream(entry, function (err, readStream) {
194+
+ if (err) {
195+
+ return cb(err)
196+
+ }
197+
+ file.symlink = ''
198+
+ readStream.on('data', function (c) {
199+
+ file.symlink += c
200+
+ })
201+
+ readStream.on('error', cb)
202+
+ readStream.on('end', function () {
203+
+ result.emit('data', new File(file))
204+
+ cb()
205+
+ })
206+
+ })
207+
+ })
208+
+
209+
+ q.start()
210+
+ } else if (stat.isDirectory()) {
211+
+ result.emit('data', new File(file))
212+
+ } else {
213+
+ result.emit('data', new File(file))
214+
+ }
215+
+ })
216+
+
217+
+ zip.on('end', function () {
218+
+ if (didErr) {
219+
+ return
220+
+ }
221+
+
222+
+ if (q.length === 0) {
223+
+ result.end()
224+
+ } else {
225+
+ q.on('end', function () {
226+
+ result.end()
227+
+ })
228+
+ }
229+
+ })
230+
+
231+
+ return result
232+
}
233+
234+
-function unzipFile(zipPath) {
235+
- var result = through();
236+
- yauzl.open(zipPath, function (err, zip) {
237+
- if (err) { return result.emit('error', err); }
238+
- toStream(zip).pipe(result);
239+
- });
240+
- return result;
241+
+function unzipFile (zipPath) {
242+
+ const result = through()
243+
+ yauzl.open(zipPath, function (err, zip) {
244+
+ if (err) {
245+
+ return result.emit('error', err)
246+
+ }
247+
+ toStream(zip)
248+
+ .on('error', (err) => result.emit('error', err))
249+
+ .pipe(result)
250+
+ })
251+
+ return result
252+
}
253+
254+
-function unzip() {
255+
- return map(function (file, enc, next) {
256+
- if (!file.isBuffer()) return next(new Error('Only supports buffers'));
257+
- yauzl.fromBuffer(file.contents, (err, zip) => {
258+
- if (err) return this.emit('error', err);
259+
- toStream(zip)
260+
- .on('error', next)
261+
- .on('data', (data) => this.push(data))
262+
- .on('end', next);
263+
- });
264+
- });
265+
+function unzip () {
266+
+ return map(function (file, enc, next) {
267+
+ if (!file.isBuffer()) return next(new Error('Only supports buffers'))
268+
+ yauzl.fromBuffer(file.contents, (err, zip) => {
269+
+ if (err) return this.emit('error', err)
270+
+ toStream(zip)
271+
+ .on('error', next)
272+
+ .on('data', (data) => this.push(data))
273+
+ .on('end', next)
274+
+ })
275+
+ })
276+
}
277+
278+
-function src(zipPath) {
279+
- return zipPath ? unzipFile(zipPath) : unzip();
280+
+function src (zipPath) {
281+
+ return zipPath ? unzipFile(zipPath) : unzip()
282+
}
283+
284+
-module.exports = src;
285+
+module.exports = src

0 commit comments

Comments
 (0)