feat: bump eslint & oxlint (#14452)

#### PR Dependency Tree


* **PR #14452** 👈

This tree was auto-generated by
[Charcoal](https://github.com/danerwilliams/charcoal)

<!-- This is an auto-generated comment: release notes by coderabbit.ai
-->
## Summary by CodeRabbit

* **Bug Fixes**
* Improved null-safety, dependency tracking, upload validation, and
error logging for more reliable uploads, clipboard, calendar linking,
telemetry, PDF/theme printing, and preview/zoom behavior.
* Tightened handling of all-day calendar events (missing date now
reported).

* **Deprecations**
  * Removed deprecated RadioButton and RadioButtonGroup; use RadioGroup.

* **Chores**
* Unified and upgraded linting/config, reorganized imports, and
standardized binary handling for more consistent builds and tooling.
<!-- end of auto-generated comment: release notes by coderabbit.ai -->
This commit is contained in:
DarkSky
2026-02-16 13:52:08 +08:00
committed by GitHub
parent 792164edd1
commit 728e02cab7
156 changed files with 1230 additions and 1066 deletions

View File

@@ -29,6 +29,36 @@ const SHOULD_MANUAL_REDIRECT =
BUILD_CONFIG.isAndroid || BUILD_CONFIG.isIOS || BUILD_CONFIG.isElectron;
const UPLOAD_REQUEST_TIMEOUT = 0;
function toStrictArrayBuffer(
data: ArrayBuffer | ArrayBufferLike | ArrayBufferView
): ArrayBuffer {
if (data instanceof ArrayBuffer) {
return data;
}
if (ArrayBuffer.isView(data)) {
if (data.buffer instanceof ArrayBuffer) {
if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) {
return data.buffer;
}
return data.buffer.slice(
data.byteOffset,
data.byteOffset + data.byteLength
);
}
const bytes = new Uint8Array(data.buffer, data.byteOffset, data.byteLength);
const copy = new Uint8Array(bytes.byteLength);
copy.set(bytes);
return copy.buffer;
}
const bytes = new Uint8Array(data);
const copy = new Uint8Array(bytes.byteLength);
copy.set(bytes);
return copy.buffer;
}
export class CloudBlobStorage extends BlobStorageBase {
static readonly identifier = 'CloudBlobStorage';
override readonly isReadonly = false;
@@ -127,8 +157,11 @@ export class CloudBlobStorage extends BlobStorageBase {
if (upload.method === BlobUploadMethod.PRESIGNED) {
try {
if (!upload.uploadUrl) {
throw new Error('Missing upload URL for presigned upload.');
}
await this.uploadViaPresigned(
upload.uploadUrl!,
upload.uploadUrl,
upload.headers,
blob.data,
signal
@@ -143,15 +176,20 @@ export class CloudBlobStorage extends BlobStorageBase {
if (upload.method === BlobUploadMethod.MULTIPART) {
try {
if (!upload.uploadId || !upload.partSize) {
throw new Error(
'Missing upload ID or part size for multipart upload.'
);
}
const parts = await this.uploadViaMultipart(
blob.key,
upload.uploadId!,
upload.partSize!,
upload.uploadId,
upload.partSize,
blob.data,
upload.uploadedParts,
signal
);
await this.completeUpload(blob.key, upload.uploadId!, parts, signal);
await this.completeUpload(blob.key, upload.uploadId, parts, signal);
return;
} catch {
if (upload.uploadId) {
@@ -216,7 +254,9 @@ export class CloudBlobStorage extends BlobStorageBase {
query: setBlobMutation,
variables: {
workspaceId: this.options.id,
blob: new File([blob.data], blob.key, { type: blob.mime }),
blob: new File([toStrictArrayBuffer(blob.data)], blob.key, {
type: blob.mime,
}),
},
context: { signal },
timeout: UPLOAD_REQUEST_TIMEOUT,
@@ -232,7 +272,7 @@ export class CloudBlobStorage extends BlobStorageBase {
const res = await this.fetchWithTimeout(uploadUrl, {
method: 'PUT',
headers: headers ?? undefined,
body: data,
body: toStrictArrayBuffer(data),
signal,
timeout: UPLOAD_REQUEST_TIMEOUT,
});
@@ -275,7 +315,7 @@ export class CloudBlobStorage extends BlobStorageBase {
{
method: 'PUT',
headers: part.workspace.blobUploadPartUrl.headers ?? undefined,
body: chunk,
body: toStrictArrayBuffer(chunk),
signal,
timeout: UPLOAD_REQUEST_TIMEOUT,
}

View File

@@ -141,10 +141,10 @@ export class CloudIndexerStorage extends IndexerStorageBase {
}
override async refreshIfNeed(): Promise<void> {
return Promise.resolve();
return;
}
override async indexVersion(): Promise<number> {
return Promise.resolve(1);
return 1;
}
}

View File

@@ -222,6 +222,6 @@ export class IndexedDBIndexerStorage extends IndexerStorageBase {
// Get the current indexer version
// increase this number to re-index all docs
async indexVersion(): Promise<number> {
return Promise.resolve(1);
return 1;
}
}

View File

@@ -1,4 +1,5 @@
import { merge, Observable, of, Subject } from 'rxjs';
import type { Observable } from 'rxjs';
import { merge, of, Subject } from 'rxjs';
import { filter, throttleTime } from 'rxjs/operators';
import { share } from '../../../connection';
@@ -194,9 +195,9 @@ export class SqliteIndexerStorage extends IndexerStorageBase {
const schema = IndexerSchema[table];
for (const [field, values] of document.fields) {
const fieldSchema = schema[field];
// @ts-expect-error
// @ts-expect-error -- IndexerSchema uses runtime-keyed fields from each table schema.
const shouldIndex = fieldSchema.index !== false;
// @ts-expect-error
// @ts-expect-error -- IndexerSchema uses runtime-keyed fields from each table schema.
const shouldStore = fieldSchema.store !== false;
if (!shouldStore && !shouldIndex) continue;

View File

@@ -86,9 +86,9 @@ export class DummyIndexerStorage extends IndexerStorageBase {
return Promise.resolve();
}
override async refreshIfNeed(): Promise<void> {
return Promise.resolve();
return;
}
override async indexVersion(): Promise<number> {
return Promise.resolve(0);
return 0;
}
}

View File

@@ -190,6 +190,7 @@ export class BlobSyncImpl implements BlobSync {
): Promise<void> {
return Promise.race([
Promise.all(
// eslint-disable-next-line @typescript-eslint/await-thenable
peerId
? [this.fullDownloadPeer(peerId)]
: this.peers.map(p => this.fullDownloadPeer(p.peerId))

View File

@@ -125,8 +125,8 @@ export class TelemetryManager {
private mergeContext(event: TelemetryEvent): TelemetryEvent {
const mergedUserProps = {
...(this.context.userProperties ?? {}),
...(event.userProperties ?? {}),
...this.context.userProperties,
...event.userProperties,
};
const mergedContext = {

View File

@@ -1,6 +1,6 @@
import { Buffer } from 'node:buffer';
import type { Buffer } from 'node:buffer';
import { stringify as stringifyQuery } from 'node:querystring';
import { Readable } from 'node:stream';
import type { Readable } from 'node:stream';
import aws4 from 'aws4';
import { XMLParser } from 'fast-xml-parser';
@@ -180,16 +180,16 @@ export function parseListPartsXml(xml: string): ParsedListParts {
function buildEndpoint(config: S3CompatConfig) {
const url = new URL(config.endpoint);
if (config.forcePathStyle) {
const segments = url.pathname.split('/').filter(Boolean);
if (segments[0] !== config.bucket) {
const firstSegment = url.pathname.split('/').find(Boolean);
if (firstSegment !== config.bucket) {
url.pathname = joinPath(url.pathname, config.bucket);
}
return url;
}
const pathSegments = url.pathname.split('/').filter(Boolean);
const firstSegment = url.pathname.split('/').find(Boolean);
const hostHasBucket = url.hostname.startsWith(`${config.bucket}.`);
const pathHasBucket = pathSegments[0] === config.bucket;
const pathHasBucket = firstSegment === config.bucket;
if (!hostHasBucket && !pathHasBucket) {
url.hostname = `${config.bucket}.${url.hostname}`;
}
@@ -297,7 +297,7 @@ export class S3Compat implements S3CompatClient {
const expiresInSeconds = this.presignConfig.expiresInSeconds;
const path = this.buildObjectPath(key);
const queryString = buildQuery({
...(query ?? {}),
...query,
'X-Amz-Expires': expiresInSeconds,
});
const requestPath = queryString ? `${path}?${queryString}` : path;