Skip to content

Commit d1be391

Browse files
committed
feat: 兼容旧的服务端
1 parent dd7aa00 commit d1be391

File tree

4 files changed

+27
-4
lines changed

4 files changed

+27
-4
lines changed

dist/index.js

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

dist/preview.js

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@slimkit/plus-editor",
3-
"version": "1.2.7",
3+
"version": "1.2.8",
44
"description": "rich-text editor for plus",
55
"main": "dist/main.bundle.js",
66
"repository": "[email protected]:mutoe/plus-editor.git",

src/uploader.ts

Lines changed: 24 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import Spark from 'spark-md5'
44
let userToken: string = ''
55
let apiV2BaseUrl: string = ''
66
let storage = { channel: 'public' }
7+
let crawlerUrl: string = ''
78

89
try {
910
const sp = new URL(window.location.href).searchParams
@@ -23,6 +24,7 @@ window.setUploaderOptions = options => {
2324
options = JSON.parse(options)
2425
}
2526

27+
crawlerUrl = ''
2628
userToken = options.userToken || ''
2729
apiV2BaseUrl = options.apiV2BaseUrl || ''
2830

@@ -282,6 +284,26 @@ async function getMediaInfo(file: File) {
282284
return mf
283285
}
284286

287+
let crawlerUrlPromise: Promise<string> | null
288+
async function getCrawlerUrl() {
289+
if (!crawlerUrl) {
290+
if (!crawlerUrlPromise) {
291+
const url = `${apiV2BaseUrl.replace(/\/+$/, '')}/crawl`
292+
crawlerUrlPromise = axios
293+
.head(url)
294+
.then(() => (crawlerUrl = url))
295+
.catch(() => (crawlerUrl = 'https://thinksns.zhibocloud.cn/api/v2/crawl'))
296+
.finally(() => {
297+
crawlerUrlPromise = null
298+
})
299+
}
300+
301+
return await crawlerUrlPromise
302+
}
303+
304+
return crawlerUrl
305+
}
306+
285307
async function uploadRemoteImage(params: {
286308
src: string
287309
remoteId: string
@@ -301,6 +323,7 @@ async function uploadRemoteImage(params: {
301323
try {
302324
const contentType = 'image/png'
303325
const filename = `${Date.now()}.png`
326+
const crawlerUrl = await getCrawlerUrl()
304327
const { blob, file, buff } = await new Promise((resolve, reject) => {
305328
const img = document.createElement('img')
306329
img.addEventListener('load', () => {
@@ -327,7 +350,7 @@ async function uploadRemoteImage(params: {
327350
})
328351
img.addEventListener('error', () => reject())
329352
img.crossOrigin = 'anonymous'
330-
img.src = `${apiV2BaseUrl.replace(/\/+$/, '')}/crawl?url=${encodeURIComponent(params.src)}`
353+
img.src = `${crawlerUrl}?url=${encodeURIComponent(params.src)}`
331354
})
332355

333356
const spark = new Spark.ArrayBuffer()

0 commit comments

Comments
 (0)