mirror of
https://github.com/AppFlowy-IO/AppFlowy.git
synced 2024-08-30 18:12:39 +00:00
refactor: rename structs
This commit is contained in:
parent
441627783b
commit
6078e46d3d
@ -84,8 +84,8 @@ class DocumentBloc extends Bloc<DocumentEvent, DocumentState> {
|
||||
listener.start();
|
||||
final result = await repo.openDocument();
|
||||
result.fold(
|
||||
(doc) {
|
||||
document = _decodeJsonToDocument(doc.deltaJson);
|
||||
(block) {
|
||||
document = _decodeJsonToDocument(block.deltaJson);
|
||||
_subscription = document.changes.listen((event) {
|
||||
final delta = event.item2;
|
||||
final documentDelta = document.toDelta();
|
||||
|
@ -10,14 +10,14 @@ class DocumentRepository {
|
||||
required this.docId,
|
||||
});
|
||||
|
||||
Future<Either<DocumentDelta, FlowyError>> openDocument() {
|
||||
Future<Either<BlockDelta, FlowyError>> openDocument() {
|
||||
final request = ViewId(value: docId);
|
||||
return FolderEventOpenView(request).send();
|
||||
}
|
||||
|
||||
Future<Either<DocumentDelta, FlowyError>> composeDelta({required String data}) {
|
||||
final request = DocumentDelta.create()
|
||||
..docId = docId
|
||||
Future<Either<BlockDelta, FlowyError>> composeDelta({required String data}) {
|
||||
final request = BlockDelta.create()
|
||||
..blockId = docId
|
||||
..deltaJson = data;
|
||||
return FolderEventApplyDocDelta(request).send();
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ import 'package:flowy_sdk/protobuf/flowy-error/errors.pb.dart';
|
||||
class ShareRepo {
|
||||
Future<Either<ExportData, FlowyError>> export(String docId, ExportType type) {
|
||||
final request = ExportPayload.create()
|
||||
..docId = docId
|
||||
..viewId = docId
|
||||
..exportType = type;
|
||||
|
||||
return FolderEventExportDocument(request).send();
|
||||
|
@ -271,14 +271,14 @@ class FolderEventOpenView {
|
||||
ViewId request;
|
||||
FolderEventOpenView(this.request);
|
||||
|
||||
Future<Either<DocumentDelta, FlowyError>> send() {
|
||||
Future<Either<BlockDelta, FlowyError>> send() {
|
||||
final request = FFIRequest.create()
|
||||
..event = FolderEvent.OpenView.toString()
|
||||
..payload = requestToBytes(this.request);
|
||||
|
||||
return Dispatch.asyncRequest(request)
|
||||
.then((bytesResult) => bytesResult.fold(
|
||||
(okBytes) => left(DocumentDelta.fromBuffer(okBytes)),
|
||||
(okBytes) => left(BlockDelta.fromBuffer(okBytes)),
|
||||
(errBytes) => right(FlowyError.fromBuffer(errBytes)),
|
||||
));
|
||||
}
|
||||
@ -378,17 +378,17 @@ class FolderEventDeleteAllTrash {
|
||||
}
|
||||
|
||||
class FolderEventApplyDocDelta {
|
||||
DocumentDelta request;
|
||||
BlockDelta request;
|
||||
FolderEventApplyDocDelta(this.request);
|
||||
|
||||
Future<Either<DocumentDelta, FlowyError>> send() {
|
||||
Future<Either<BlockDelta, FlowyError>> send() {
|
||||
final request = FFIRequest.create()
|
||||
..event = FolderEvent.ApplyDocDelta.toString()
|
||||
..payload = requestToBytes(this.request);
|
||||
|
||||
return Dispatch.asyncRequest(request)
|
||||
.then((bytesResult) => bytesResult.fold(
|
||||
(okBytes) => left(DocumentDelta.fromBuffer(okBytes)),
|
||||
(okBytes) => left(BlockDelta.fromBuffer(okBytes)),
|
||||
(errBytes) => right(FlowyError.fromBuffer(errBytes)),
|
||||
));
|
||||
}
|
||||
|
@ -12,15 +12,15 @@ import 'package:protobuf/protobuf.dart' as $pb;
|
||||
|
||||
import 'revision.pb.dart' as $0;
|
||||
|
||||
class CreateDocParams extends $pb.GeneratedMessage {
|
||||
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'CreateDocParams', createEmptyInstance: create)
|
||||
class CreateBlockParams extends $pb.GeneratedMessage {
|
||||
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'CreateBlockParams', createEmptyInstance: create)
|
||||
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'id')
|
||||
..aOM<$0.RepeatedRevision>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'revisions', subBuilder: $0.RepeatedRevision.create)
|
||||
..hasRequiredFields = false
|
||||
;
|
||||
|
||||
CreateDocParams._() : super();
|
||||
factory CreateDocParams({
|
||||
CreateBlockParams._() : super();
|
||||
factory CreateBlockParams({
|
||||
$core.String? id,
|
||||
$0.RepeatedRevision? revisions,
|
||||
}) {
|
||||
@ -33,26 +33,26 @@ class CreateDocParams extends $pb.GeneratedMessage {
|
||||
}
|
||||
return _result;
|
||||
}
|
||||
factory CreateDocParams.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
|
||||
factory CreateDocParams.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
|
||||
factory CreateBlockParams.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
|
||||
factory CreateBlockParams.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
|
||||
@$core.Deprecated(
|
||||
'Using this can add significant overhead to your binary. '
|
||||
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
|
||||
'Will be removed in next major version')
|
||||
CreateDocParams clone() => CreateDocParams()..mergeFromMessage(this);
|
||||
CreateBlockParams clone() => CreateBlockParams()..mergeFromMessage(this);
|
||||
@$core.Deprecated(
|
||||
'Using this can add significant overhead to your binary. '
|
||||
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
|
||||
'Will be removed in next major version')
|
||||
CreateDocParams copyWith(void Function(CreateDocParams) updates) => super.copyWith((message) => updates(message as CreateDocParams)) as CreateDocParams; // ignore: deprecated_member_use
|
||||
CreateBlockParams copyWith(void Function(CreateBlockParams) updates) => super.copyWith((message) => updates(message as CreateBlockParams)) as CreateBlockParams; // ignore: deprecated_member_use
|
||||
$pb.BuilderInfo get info_ => _i;
|
||||
@$core.pragma('dart2js:noInline')
|
||||
static CreateDocParams create() => CreateDocParams._();
|
||||
CreateDocParams createEmptyInstance() => create();
|
||||
static $pb.PbList<CreateDocParams> createRepeated() => $pb.PbList<CreateDocParams>();
|
||||
static CreateBlockParams create() => CreateBlockParams._();
|
||||
CreateBlockParams createEmptyInstance() => create();
|
||||
static $pb.PbList<CreateBlockParams> createRepeated() => $pb.PbList<CreateBlockParams>();
|
||||
@$core.pragma('dart2js:noInline')
|
||||
static CreateDocParams getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<CreateDocParams>(create);
|
||||
static CreateDocParams? _defaultInstance;
|
||||
static CreateBlockParams getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<CreateBlockParams>(create);
|
||||
static CreateBlockParams? _defaultInstance;
|
||||
|
||||
@$pb.TagNumber(1)
|
||||
$core.String get id => $_getSZ(0);
|
||||
@ -75,8 +75,8 @@ class CreateDocParams extends $pb.GeneratedMessage {
|
||||
$0.RepeatedRevision ensureRevisions() => $_ensure(1);
|
||||
}
|
||||
|
||||
class DocumentInfo extends $pb.GeneratedMessage {
|
||||
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'DocumentInfo', createEmptyInstance: create)
|
||||
class BlockInfo extends $pb.GeneratedMessage {
|
||||
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'BlockInfo', createEmptyInstance: create)
|
||||
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'docId')
|
||||
..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'text')
|
||||
..aInt64(3, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'revId')
|
||||
@ -84,8 +84,8 @@ class DocumentInfo extends $pb.GeneratedMessage {
|
||||
..hasRequiredFields = false
|
||||
;
|
||||
|
||||
DocumentInfo._() : super();
|
||||
factory DocumentInfo({
|
||||
BlockInfo._() : super();
|
||||
factory BlockInfo({
|
||||
$core.String? docId,
|
||||
$core.String? text,
|
||||
$fixnum.Int64? revId,
|
||||
@ -106,26 +106,26 @@ class DocumentInfo extends $pb.GeneratedMessage {
|
||||
}
|
||||
return _result;
|
||||
}
|
||||
factory DocumentInfo.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
|
||||
factory DocumentInfo.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
|
||||
factory BlockInfo.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
|
||||
factory BlockInfo.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
|
||||
@$core.Deprecated(
|
||||
'Using this can add significant overhead to your binary. '
|
||||
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
|
||||
'Will be removed in next major version')
|
||||
DocumentInfo clone() => DocumentInfo()..mergeFromMessage(this);
|
||||
BlockInfo clone() => BlockInfo()..mergeFromMessage(this);
|
||||
@$core.Deprecated(
|
||||
'Using this can add significant overhead to your binary. '
|
||||
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
|
||||
'Will be removed in next major version')
|
||||
DocumentInfo copyWith(void Function(DocumentInfo) updates) => super.copyWith((message) => updates(message as DocumentInfo)) as DocumentInfo; // ignore: deprecated_member_use
|
||||
BlockInfo copyWith(void Function(BlockInfo) updates) => super.copyWith((message) => updates(message as BlockInfo)) as BlockInfo; // ignore: deprecated_member_use
|
||||
$pb.BuilderInfo get info_ => _i;
|
||||
@$core.pragma('dart2js:noInline')
|
||||
static DocumentInfo create() => DocumentInfo._();
|
||||
DocumentInfo createEmptyInstance() => create();
|
||||
static $pb.PbList<DocumentInfo> createRepeated() => $pb.PbList<DocumentInfo>();
|
||||
static BlockInfo create() => BlockInfo._();
|
||||
BlockInfo createEmptyInstance() => create();
|
||||
static $pb.PbList<BlockInfo> createRepeated() => $pb.PbList<BlockInfo>();
|
||||
@$core.pragma('dart2js:noInline')
|
||||
static DocumentInfo getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<DocumentInfo>(create);
|
||||
static DocumentInfo? _defaultInstance;
|
||||
static BlockInfo getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<BlockInfo>(create);
|
||||
static BlockInfo? _defaultInstance;
|
||||
|
||||
@$pb.TagNumber(1)
|
||||
$core.String get docId => $_getSZ(0);
|
||||
@ -227,56 +227,56 @@ class ResetDocumentParams extends $pb.GeneratedMessage {
|
||||
$0.RepeatedRevision ensureRevisions() => $_ensure(1);
|
||||
}
|
||||
|
||||
class DocumentDelta extends $pb.GeneratedMessage {
|
||||
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'DocumentDelta', createEmptyInstance: create)
|
||||
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'docId')
|
||||
class BlockDelta extends $pb.GeneratedMessage {
|
||||
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'BlockDelta', createEmptyInstance: create)
|
||||
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'blockId')
|
||||
..aOS(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'deltaJson')
|
||||
..hasRequiredFields = false
|
||||
;
|
||||
|
||||
DocumentDelta._() : super();
|
||||
factory DocumentDelta({
|
||||
$core.String? docId,
|
||||
BlockDelta._() : super();
|
||||
factory BlockDelta({
|
||||
$core.String? blockId,
|
||||
$core.String? deltaJson,
|
||||
}) {
|
||||
final _result = create();
|
||||
if (docId != null) {
|
||||
_result.docId = docId;
|
||||
if (blockId != null) {
|
||||
_result.blockId = blockId;
|
||||
}
|
||||
if (deltaJson != null) {
|
||||
_result.deltaJson = deltaJson;
|
||||
}
|
||||
return _result;
|
||||
}
|
||||
factory DocumentDelta.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
|
||||
factory DocumentDelta.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
|
||||
factory BlockDelta.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
|
||||
factory BlockDelta.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
|
||||
@$core.Deprecated(
|
||||
'Using this can add significant overhead to your binary. '
|
||||
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
|
||||
'Will be removed in next major version')
|
||||
DocumentDelta clone() => DocumentDelta()..mergeFromMessage(this);
|
||||
BlockDelta clone() => BlockDelta()..mergeFromMessage(this);
|
||||
@$core.Deprecated(
|
||||
'Using this can add significant overhead to your binary. '
|
||||
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
|
||||
'Will be removed in next major version')
|
||||
DocumentDelta copyWith(void Function(DocumentDelta) updates) => super.copyWith((message) => updates(message as DocumentDelta)) as DocumentDelta; // ignore: deprecated_member_use
|
||||
BlockDelta copyWith(void Function(BlockDelta) updates) => super.copyWith((message) => updates(message as BlockDelta)) as BlockDelta; // ignore: deprecated_member_use
|
||||
$pb.BuilderInfo get info_ => _i;
|
||||
@$core.pragma('dart2js:noInline')
|
||||
static DocumentDelta create() => DocumentDelta._();
|
||||
DocumentDelta createEmptyInstance() => create();
|
||||
static $pb.PbList<DocumentDelta> createRepeated() => $pb.PbList<DocumentDelta>();
|
||||
static BlockDelta create() => BlockDelta._();
|
||||
BlockDelta createEmptyInstance() => create();
|
||||
static $pb.PbList<BlockDelta> createRepeated() => $pb.PbList<BlockDelta>();
|
||||
@$core.pragma('dart2js:noInline')
|
||||
static DocumentDelta getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<DocumentDelta>(create);
|
||||
static DocumentDelta? _defaultInstance;
|
||||
static BlockDelta getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<BlockDelta>(create);
|
||||
static BlockDelta? _defaultInstance;
|
||||
|
||||
@$pb.TagNumber(1)
|
||||
$core.String get docId => $_getSZ(0);
|
||||
$core.String get blockId => $_getSZ(0);
|
||||
@$pb.TagNumber(1)
|
||||
set docId($core.String v) { $_setString(0, v); }
|
||||
set blockId($core.String v) { $_setString(0, v); }
|
||||
@$pb.TagNumber(1)
|
||||
$core.bool hasDocId() => $_has(0);
|
||||
$core.bool hasBlockId() => $_has(0);
|
||||
@$pb.TagNumber(1)
|
||||
void clearDocId() => clearField(1);
|
||||
void clearBlockId() => clearField(1);
|
||||
|
||||
@$pb.TagNumber(2)
|
||||
$core.String get deltaJson => $_getSZ(1);
|
||||
@ -363,14 +363,14 @@ class NewDocUser extends $pb.GeneratedMessage {
|
||||
void clearDocId() => clearField(3);
|
||||
}
|
||||
|
||||
class DocumentId extends $pb.GeneratedMessage {
|
||||
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'DocumentId', createEmptyInstance: create)
|
||||
class BlockId extends $pb.GeneratedMessage {
|
||||
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'BlockId', createEmptyInstance: create)
|
||||
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'value')
|
||||
..hasRequiredFields = false
|
||||
;
|
||||
|
||||
DocumentId._() : super();
|
||||
factory DocumentId({
|
||||
BlockId._() : super();
|
||||
factory BlockId({
|
||||
$core.String? value,
|
||||
}) {
|
||||
final _result = create();
|
||||
@ -379,26 +379,26 @@ class DocumentId extends $pb.GeneratedMessage {
|
||||
}
|
||||
return _result;
|
||||
}
|
||||
factory DocumentId.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
|
||||
factory DocumentId.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
|
||||
factory BlockId.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
|
||||
factory BlockId.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
|
||||
@$core.Deprecated(
|
||||
'Using this can add significant overhead to your binary. '
|
||||
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
|
||||
'Will be removed in next major version')
|
||||
DocumentId clone() => DocumentId()..mergeFromMessage(this);
|
||||
BlockId clone() => BlockId()..mergeFromMessage(this);
|
||||
@$core.Deprecated(
|
||||
'Using this can add significant overhead to your binary. '
|
||||
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
|
||||
'Will be removed in next major version')
|
||||
DocumentId copyWith(void Function(DocumentId) updates) => super.copyWith((message) => updates(message as DocumentId)) as DocumentId; // ignore: deprecated_member_use
|
||||
BlockId copyWith(void Function(BlockId) updates) => super.copyWith((message) => updates(message as BlockId)) as BlockId; // ignore: deprecated_member_use
|
||||
$pb.BuilderInfo get info_ => _i;
|
||||
@$core.pragma('dart2js:noInline')
|
||||
static DocumentId create() => DocumentId._();
|
||||
DocumentId createEmptyInstance() => create();
|
||||
static $pb.PbList<DocumentId> createRepeated() => $pb.PbList<DocumentId>();
|
||||
static BlockId create() => BlockId._();
|
||||
BlockId createEmptyInstance() => create();
|
||||
static $pb.PbList<BlockId> createRepeated() => $pb.PbList<BlockId>();
|
||||
@$core.pragma('dart2js:noInline')
|
||||
static DocumentId getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<DocumentId>(create);
|
||||
static DocumentId? _defaultInstance;
|
||||
static BlockId getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<BlockId>(create);
|
||||
static BlockId? _defaultInstance;
|
||||
|
||||
@$pb.TagNumber(1)
|
||||
$core.String get value => $_getSZ(0);
|
||||
|
@ -8,20 +8,20 @@
|
||||
import 'dart:core' as $core;
|
||||
import 'dart:convert' as $convert;
|
||||
import 'dart:typed_data' as $typed_data;
|
||||
@$core.Deprecated('Use createDocParamsDescriptor instead')
|
||||
const CreateDocParams$json = const {
|
||||
'1': 'CreateDocParams',
|
||||
@$core.Deprecated('Use createBlockParamsDescriptor instead')
|
||||
const CreateBlockParams$json = const {
|
||||
'1': 'CreateBlockParams',
|
||||
'2': const [
|
||||
const {'1': 'id', '3': 1, '4': 1, '5': 9, '10': 'id'},
|
||||
const {'1': 'revisions', '3': 2, '4': 1, '5': 11, '6': '.RepeatedRevision', '10': 'revisions'},
|
||||
],
|
||||
};
|
||||
|
||||
/// Descriptor for `CreateDocParams`. Decode as a `google.protobuf.DescriptorProto`.
|
||||
final $typed_data.Uint8List createDocParamsDescriptor = $convert.base64Decode('Cg9DcmVhdGVEb2NQYXJhbXMSDgoCaWQYASABKAlSAmlkEi8KCXJldmlzaW9ucxgCIAEoCzIRLlJlcGVhdGVkUmV2aXNpb25SCXJldmlzaW9ucw==');
|
||||
@$core.Deprecated('Use documentInfoDescriptor instead')
|
||||
const DocumentInfo$json = const {
|
||||
'1': 'DocumentInfo',
|
||||
/// Descriptor for `CreateBlockParams`. Decode as a `google.protobuf.DescriptorProto`.
|
||||
final $typed_data.Uint8List createBlockParamsDescriptor = $convert.base64Decode('ChFDcmVhdGVCbG9ja1BhcmFtcxIOCgJpZBgBIAEoCVICaWQSLwoJcmV2aXNpb25zGAIgASgLMhEuUmVwZWF0ZWRSZXZpc2lvblIJcmV2aXNpb25z');
|
||||
@$core.Deprecated('Use blockInfoDescriptor instead')
|
||||
const BlockInfo$json = const {
|
||||
'1': 'BlockInfo',
|
||||
'2': const [
|
||||
const {'1': 'doc_id', '3': 1, '4': 1, '5': 9, '10': 'docId'},
|
||||
const {'1': 'text', '3': 2, '4': 1, '5': 9, '10': 'text'},
|
||||
@ -30,8 +30,8 @@ const DocumentInfo$json = const {
|
||||
],
|
||||
};
|
||||
|
||||
/// Descriptor for `DocumentInfo`. Decode as a `google.protobuf.DescriptorProto`.
|
||||
final $typed_data.Uint8List documentInfoDescriptor = $convert.base64Decode('CgxEb2N1bWVudEluZm8SFQoGZG9jX2lkGAEgASgJUgVkb2NJZBISCgR0ZXh0GAIgASgJUgR0ZXh0EhUKBnJldl9pZBgDIAEoA1IFcmV2SWQSHgoLYmFzZV9yZXZfaWQYBCABKANSCWJhc2VSZXZJZA==');
|
||||
/// Descriptor for `BlockInfo`. Decode as a `google.protobuf.DescriptorProto`.
|
||||
final $typed_data.Uint8List blockInfoDescriptor = $convert.base64Decode('CglCbG9ja0luZm8SFQoGZG9jX2lkGAEgASgJUgVkb2NJZBISCgR0ZXh0GAIgASgJUgR0ZXh0EhUKBnJldl9pZBgDIAEoA1IFcmV2SWQSHgoLYmFzZV9yZXZfaWQYBCABKANSCWJhc2VSZXZJZA==');
|
||||
@$core.Deprecated('Use resetDocumentParamsDescriptor instead')
|
||||
const ResetDocumentParams$json = const {
|
||||
'1': 'ResetDocumentParams',
|
||||
@ -43,17 +43,17 @@ const ResetDocumentParams$json = const {
|
||||
|
||||
/// Descriptor for `ResetDocumentParams`. Decode as a `google.protobuf.DescriptorProto`.
|
||||
final $typed_data.Uint8List resetDocumentParamsDescriptor = $convert.base64Decode('ChNSZXNldERvY3VtZW50UGFyYW1zEhUKBmRvY19pZBgBIAEoCVIFZG9jSWQSLwoJcmV2aXNpb25zGAIgASgLMhEuUmVwZWF0ZWRSZXZpc2lvblIJcmV2aXNpb25z');
|
||||
@$core.Deprecated('Use documentDeltaDescriptor instead')
|
||||
const DocumentDelta$json = const {
|
||||
'1': 'DocumentDelta',
|
||||
@$core.Deprecated('Use blockDeltaDescriptor instead')
|
||||
const BlockDelta$json = const {
|
||||
'1': 'BlockDelta',
|
||||
'2': const [
|
||||
const {'1': 'doc_id', '3': 1, '4': 1, '5': 9, '10': 'docId'},
|
||||
const {'1': 'block_id', '3': 1, '4': 1, '5': 9, '10': 'blockId'},
|
||||
const {'1': 'delta_json', '3': 2, '4': 1, '5': 9, '10': 'deltaJson'},
|
||||
],
|
||||
};
|
||||
|
||||
/// Descriptor for `DocumentDelta`. Decode as a `google.protobuf.DescriptorProto`.
|
||||
final $typed_data.Uint8List documentDeltaDescriptor = $convert.base64Decode('Cg1Eb2N1bWVudERlbHRhEhUKBmRvY19pZBgBIAEoCVIFZG9jSWQSHQoKZGVsdGFfanNvbhgCIAEoCVIJZGVsdGFKc29u');
|
||||
/// Descriptor for `BlockDelta`. Decode as a `google.protobuf.DescriptorProto`.
|
||||
final $typed_data.Uint8List blockDeltaDescriptor = $convert.base64Decode('CgpCbG9ja0RlbHRhEhkKCGJsb2NrX2lkGAEgASgJUgdibG9ja0lkEh0KCmRlbHRhX2pzb24YAiABKAlSCWRlbHRhSnNvbg==');
|
||||
@$core.Deprecated('Use newDocUserDescriptor instead')
|
||||
const NewDocUser$json = const {
|
||||
'1': 'NewDocUser',
|
||||
@ -66,13 +66,13 @@ const NewDocUser$json = const {
|
||||
|
||||
/// Descriptor for `NewDocUser`. Decode as a `google.protobuf.DescriptorProto`.
|
||||
final $typed_data.Uint8List newDocUserDescriptor = $convert.base64Decode('CgpOZXdEb2NVc2VyEhcKB3VzZXJfaWQYASABKAlSBnVzZXJJZBIVCgZyZXZfaWQYAiABKANSBXJldklkEhUKBmRvY19pZBgDIAEoCVIFZG9jSWQ=');
|
||||
@$core.Deprecated('Use documentIdDescriptor instead')
|
||||
const DocumentId$json = const {
|
||||
'1': 'DocumentId',
|
||||
@$core.Deprecated('Use blockIdDescriptor instead')
|
||||
const BlockId$json = const {
|
||||
'1': 'BlockId',
|
||||
'2': const [
|
||||
const {'1': 'value', '3': 1, '4': 1, '5': 9, '10': 'value'},
|
||||
],
|
||||
};
|
||||
|
||||
/// Descriptor for `DocumentId`. Decode as a `google.protobuf.DescriptorProto`.
|
||||
final $typed_data.Uint8List documentIdDescriptor = $convert.base64Decode('CgpEb2N1bWVudElkEhQKBXZhbHVlGAEgASgJUgV2YWx1ZQ==');
|
||||
/// Descriptor for `BlockId`. Decode as a `google.protobuf.DescriptorProto`.
|
||||
final $typed_data.Uint8List blockIdDescriptor = $convert.base64Decode('CgdCbG9ja0lkEhQKBXZhbHVlGAEgASgJUgV2YWx1ZQ==');
|
||||
|
@ -15,19 +15,19 @@ export 'share.pbenum.dart';
|
||||
|
||||
class ExportPayload extends $pb.GeneratedMessage {
|
||||
static final $pb.BuilderInfo _i = $pb.BuilderInfo(const $core.bool.fromEnvironment('protobuf.omit_message_names') ? '' : 'ExportPayload', createEmptyInstance: create)
|
||||
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'docId')
|
||||
..aOS(1, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'viewId')
|
||||
..e<ExportType>(2, const $core.bool.fromEnvironment('protobuf.omit_field_names') ? '' : 'exportType', $pb.PbFieldType.OE, defaultOrMaker: ExportType.Text, valueOf: ExportType.valueOf, enumValues: ExportType.values)
|
||||
..hasRequiredFields = false
|
||||
;
|
||||
|
||||
ExportPayload._() : super();
|
||||
factory ExportPayload({
|
||||
$core.String? docId,
|
||||
$core.String? viewId,
|
||||
ExportType? exportType,
|
||||
}) {
|
||||
final _result = create();
|
||||
if (docId != null) {
|
||||
_result.docId = docId;
|
||||
if (viewId != null) {
|
||||
_result.viewId = viewId;
|
||||
}
|
||||
if (exportType != null) {
|
||||
_result.exportType = exportType;
|
||||
@ -56,13 +56,13 @@ class ExportPayload extends $pb.GeneratedMessage {
|
||||
static ExportPayload? _defaultInstance;
|
||||
|
||||
@$pb.TagNumber(1)
|
||||
$core.String get docId => $_getSZ(0);
|
||||
$core.String get viewId => $_getSZ(0);
|
||||
@$pb.TagNumber(1)
|
||||
set docId($core.String v) { $_setString(0, v); }
|
||||
set viewId($core.String v) { $_setString(0, v); }
|
||||
@$pb.TagNumber(1)
|
||||
$core.bool hasDocId() => $_has(0);
|
||||
$core.bool hasViewId() => $_has(0);
|
||||
@$pb.TagNumber(1)
|
||||
void clearDocId() => clearField(1);
|
||||
void clearViewId() => clearField(1);
|
||||
|
||||
@$pb.TagNumber(2)
|
||||
ExportType get exportType => $_getN(1);
|
||||
|
@ -24,13 +24,13 @@ final $typed_data.Uint8List exportTypeDescriptor = $convert.base64Decode('CgpFeH
|
||||
const ExportPayload$json = const {
|
||||
'1': 'ExportPayload',
|
||||
'2': const [
|
||||
const {'1': 'doc_id', '3': 1, '4': 1, '5': 9, '10': 'docId'},
|
||||
const {'1': 'view_id', '3': 1, '4': 1, '5': 9, '10': 'viewId'},
|
||||
const {'1': 'export_type', '3': 2, '4': 1, '5': 14, '6': '.ExportType', '10': 'exportType'},
|
||||
],
|
||||
};
|
||||
|
||||
/// Descriptor for `ExportPayload`. Decode as a `google.protobuf.DescriptorProto`.
|
||||
final $typed_data.Uint8List exportPayloadDescriptor = $convert.base64Decode('Cg1FeHBvcnRQYXlsb2FkEhUKBmRvY19pZBgBIAEoCVIFZG9jSWQSLAoLZXhwb3J0X3R5cGUYAiABKA4yCy5FeHBvcnRUeXBlUgpleHBvcnRUeXBl');
|
||||
final $typed_data.Uint8List exportPayloadDescriptor = $convert.base64Decode('Cg1FeHBvcnRQYXlsb2FkEhcKB3ZpZXdfaWQYASABKAlSBnZpZXdJZBIsCgtleHBvcnRfdHlwZRgCIAEoDjILLkV4cG9ydFR5cGVSCmV4cG9ydFR5cGU=');
|
||||
@$core.Deprecated('Use exportDataDescriptor instead')
|
||||
const ExportData$json = const {
|
||||
'1': 'ExportData',
|
||||
|
1
frontend/rust-lib/Cargo.lock
generated
1
frontend/rust-lib/Cargo.lock
generated
@ -1108,6 +1108,7 @@ name = "flowy-sync"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"async-stream",
|
||||
"async-trait",
|
||||
"bytes",
|
||||
"dashmap",
|
||||
"diesel",
|
||||
|
@ -3,11 +3,12 @@ use crate::web_socket::{make_document_ws_manager, EditorCommandSender};
|
||||
use crate::{
|
||||
errors::FlowyError,
|
||||
queue::{EditorCommand, EditorCommandQueue},
|
||||
DocumentUser, DocumentWSReceiver,
|
||||
BlockUser,
|
||||
};
|
||||
use bytes::Bytes;
|
||||
use flowy_collaboration::entities::ws_data::ServerRevisionWSData;
|
||||
use flowy_collaboration::{
|
||||
entities::{document_info::DocumentInfo, revision::Revision},
|
||||
entities::{document_info::BlockInfo, revision::Revision},
|
||||
errors::CollaborateResult,
|
||||
util::make_delta_from_revisions,
|
||||
};
|
||||
@ -19,10 +20,11 @@ use lib_ot::{
|
||||
core::{Interval, Operation},
|
||||
rich_text::{RichTextAttribute, RichTextDelta},
|
||||
};
|
||||
use lib_ws::WSConnectState;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::{mpsc, oneshot};
|
||||
|
||||
pub struct ClientDocumentEditor {
|
||||
pub struct ClientBlockEditor {
|
||||
pub doc_id: String,
|
||||
#[allow(dead_code)]
|
||||
rev_manager: Arc<RevisionManager>,
|
||||
@ -30,16 +32,16 @@ pub struct ClientDocumentEditor {
|
||||
edit_cmd_tx: EditorCommandSender,
|
||||
}
|
||||
|
||||
impl ClientDocumentEditor {
|
||||
impl ClientBlockEditor {
|
||||
pub(crate) async fn new(
|
||||
doc_id: &str,
|
||||
user: Arc<dyn DocumentUser>,
|
||||
user: Arc<dyn BlockUser>,
|
||||
mut rev_manager: RevisionManager,
|
||||
rev_web_socket: Arc<dyn RevisionWebSocket>,
|
||||
cloud_service: Arc<dyn RevisionCloudService>,
|
||||
) -> FlowyResult<Arc<Self>> {
|
||||
let document_info = rev_manager
|
||||
.load::<DocumentInfoBuilder, DocumentRevisionCompact>(cloud_service)
|
||||
.load::<BlockInfoBuilder, DocumentRevisionCompact>(cloud_service)
|
||||
.await?;
|
||||
let delta = document_info.delta()?;
|
||||
let rev_manager = Arc::new(rev_manager);
|
||||
@ -138,9 +140,9 @@ impl ClientDocumentEditor {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn document_json(&self) -> FlowyResult<String> {
|
||||
pub async fn block_json(&self) -> FlowyResult<String> {
|
||||
let (ret, rx) = oneshot::channel::<CollaborateResult<String>>();
|
||||
let msg = EditorCommand::ReadDocumentAsJson { ret };
|
||||
let msg = EditorCommand::ReadBlockJson { ret };
|
||||
let _ = self.edit_cmd_tx.send(msg).await;
|
||||
let json = rx.await.map_err(internal_error)??;
|
||||
Ok(json)
|
||||
@ -163,12 +165,16 @@ impl ClientDocumentEditor {
|
||||
self.ws_manager.stop();
|
||||
}
|
||||
|
||||
pub(crate) fn ws_handler(&self) -> Arc<dyn DocumentWSReceiver> {
|
||||
self.ws_manager.clone()
|
||||
pub(crate) async fn receive_ws_data(&self, data: ServerRevisionWSData) -> Result<(), FlowyError> {
|
||||
self.ws_manager.receive_ws_data(data).await
|
||||
}
|
||||
|
||||
pub(crate) fn receive_ws_state(&self, state: &WSConnectState) {
|
||||
self.ws_manager.connect_state_changed(state.clone());
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Drop for ClientDocumentEditor {
|
||||
impl std::ops::Drop for ClientBlockEditor {
|
||||
fn drop(&mut self) {
|
||||
tracing::trace!("{} ClientDocumentEditor was dropped", self.doc_id)
|
||||
}
|
||||
@ -176,7 +182,7 @@ impl std::ops::Drop for ClientDocumentEditor {
|
||||
|
||||
// The edit queue will exit after the EditorCommandSender was dropped.
|
||||
fn spawn_edit_queue(
|
||||
user: Arc<dyn DocumentUser>,
|
||||
user: Arc<dyn BlockUser>,
|
||||
rev_manager: Arc<RevisionManager>,
|
||||
delta: RichTextDelta,
|
||||
) -> EditorCommandSender {
|
||||
@ -187,10 +193,10 @@ fn spawn_edit_queue(
|
||||
}
|
||||
|
||||
#[cfg(feature = "flowy_unit_test")]
|
||||
impl ClientDocumentEditor {
|
||||
impl ClientBlockEditor {
|
||||
pub async fn doc_json(&self) -> FlowyResult<String> {
|
||||
let (ret, rx) = oneshot::channel::<CollaborateResult<String>>();
|
||||
let msg = EditorCommand::ReadDocumentAsJson { ret };
|
||||
let msg = EditorCommand::ReadBlockJson { ret };
|
||||
let _ = self.edit_cmd_tx.send(msg).await;
|
||||
let s = rx.await.map_err(internal_error)??;
|
||||
Ok(s)
|
||||
@ -198,7 +204,7 @@ impl ClientDocumentEditor {
|
||||
|
||||
pub async fn doc_delta(&self) -> FlowyResult<RichTextDelta> {
|
||||
let (ret, rx) = oneshot::channel::<CollaborateResult<RichTextDelta>>();
|
||||
let msg = EditorCommand::ReadDocumentAsDelta { ret };
|
||||
let msg = EditorCommand::ReadBlockDelta { ret };
|
||||
let _ = self.edit_cmd_tx.send(msg).await;
|
||||
let delta = rx.await.map_err(internal_error)??;
|
||||
Ok(delta)
|
||||
@ -209,16 +215,16 @@ impl ClientDocumentEditor {
|
||||
}
|
||||
}
|
||||
|
||||
struct DocumentInfoBuilder();
|
||||
impl RevisionObjectBuilder for DocumentInfoBuilder {
|
||||
type Output = DocumentInfo;
|
||||
struct BlockInfoBuilder();
|
||||
impl RevisionObjectBuilder for BlockInfoBuilder {
|
||||
type Output = BlockInfo;
|
||||
|
||||
fn build_object(object_id: &str, revisions: Vec<Revision>) -> FlowyResult<Self::Output> {
|
||||
let (base_rev_id, rev_id) = revisions.last().unwrap().pair_rev_id();
|
||||
let mut delta = make_delta_from_revisions(revisions)?;
|
||||
correct_delta(&mut delta);
|
||||
|
||||
Result::<DocumentInfo, FlowyError>::Ok(DocumentInfo {
|
||||
Result::<BlockInfo, FlowyError>::Ok(BlockInfo {
|
||||
doc_id: object_id.to_owned(),
|
||||
text: delta.to_json(),
|
||||
rev_id,
|
||||
|
@ -11,13 +11,13 @@ pub mod errors {
|
||||
pub const DOCUMENT_SYNC_INTERVAL_IN_MILLIS: u64 = 1000;
|
||||
|
||||
use crate::errors::FlowyError;
|
||||
use flowy_collaboration::entities::document_info::{CreateDocParams, DocumentId, DocumentInfo, ResetDocumentParams};
|
||||
use flowy_collaboration::entities::document_info::{BlockId, BlockInfo, CreateBlockParams, ResetDocumentParams};
|
||||
use lib_infra::future::FutureResult;
|
||||
|
||||
pub trait DocumentCloudService: Send + Sync {
|
||||
fn create_document(&self, token: &str, params: CreateDocParams) -> FutureResult<(), FlowyError>;
|
||||
pub trait BlockCloudService: Send + Sync {
|
||||
fn create_block(&self, token: &str, params: CreateBlockParams) -> FutureResult<(), FlowyError>;
|
||||
|
||||
fn read_document(&self, token: &str, params: DocumentId) -> FutureResult<Option<DocumentInfo>, FlowyError>;
|
||||
fn read_block(&self, token: &str, params: BlockId) -> FutureResult<Option<BlockInfo>, FlowyError>;
|
||||
|
||||
fn update_document(&self, token: &str, params: ResetDocumentParams) -> FutureResult<(), FlowyError>;
|
||||
fn update_block(&self, token: &str, params: ResetDocumentParams) -> FutureResult<(), FlowyError>;
|
||||
}
|
||||
|
@ -1,76 +1,64 @@
|
||||
use crate::{editor::ClientDocumentEditor, errors::FlowyError, DocumentCloudService};
|
||||
use async_trait::async_trait;
|
||||
use crate::{editor::ClientBlockEditor, errors::FlowyError, BlockCloudService};
|
||||
use bytes::Bytes;
|
||||
use dashmap::DashMap;
|
||||
use flowy_collaboration::entities::{
|
||||
document_info::{DocumentDelta, DocumentId},
|
||||
document_info::{BlockDelta, BlockId},
|
||||
revision::{md5, RepeatedRevision, Revision},
|
||||
ws_data::ServerRevisionWSData,
|
||||
};
|
||||
use flowy_database::ConnectionPool;
|
||||
use flowy_error::FlowyResult;
|
||||
use flowy_sync::{RevisionCache, RevisionCloudService, RevisionManager, RevisionWebSocket};
|
||||
use flowy_sync::{RevisionCloudService, RevisionManager, RevisionPersistence, RevisionWebSocket};
|
||||
use lib_infra::future::FutureResult;
|
||||
use lib_ws::WSConnectState;
|
||||
use std::{convert::TryInto, sync::Arc};
|
||||
|
||||
pub trait DocumentUser: Send + Sync {
|
||||
pub trait BlockUser: Send + Sync {
|
||||
fn user_dir(&self) -> Result<String, FlowyError>;
|
||||
fn user_id(&self) -> Result<String, FlowyError>;
|
||||
fn token(&self) -> Result<String, FlowyError>;
|
||||
fn db_pool(&self) -> Result<Arc<ConnectionPool>, FlowyError>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub(crate) trait DocumentWSReceiver: Send + Sync {
|
||||
async fn receive_ws_data(&self, data: ServerRevisionWSData) -> Result<(), FlowyError>;
|
||||
fn connect_state_changed(&self, state: WSConnectState);
|
||||
}
|
||||
type WebSocketDataReceivers = Arc<DashMap<String, Arc<dyn DocumentWSReceiver>>>;
|
||||
pub struct FlowyDocumentManager {
|
||||
cloud_service: Arc<dyn DocumentCloudService>,
|
||||
ws_data_receivers: WebSocketDataReceivers,
|
||||
pub struct BlockManager {
|
||||
cloud_service: Arc<dyn BlockCloudService>,
|
||||
rev_web_socket: Arc<dyn RevisionWebSocket>,
|
||||
document_handlers: Arc<DocumentEditorHandlers>,
|
||||
document_user: Arc<dyn DocumentUser>,
|
||||
block_handlers: Arc<BlockEditorHandlers>,
|
||||
document_user: Arc<dyn BlockUser>,
|
||||
}
|
||||
|
||||
impl FlowyDocumentManager {
|
||||
impl BlockManager {
|
||||
pub fn new(
|
||||
cloud_service: Arc<dyn DocumentCloudService>,
|
||||
document_user: Arc<dyn DocumentUser>,
|
||||
cloud_service: Arc<dyn BlockCloudService>,
|
||||
document_user: Arc<dyn BlockUser>,
|
||||
rev_web_socket: Arc<dyn RevisionWebSocket>,
|
||||
) -> Self {
|
||||
let ws_data_receivers = Arc::new(DashMap::new());
|
||||
let document_handlers = Arc::new(DocumentEditorHandlers::new());
|
||||
let block_handlers = Arc::new(BlockEditorHandlers::new());
|
||||
Self {
|
||||
cloud_service,
|
||||
ws_data_receivers,
|
||||
rev_web_socket,
|
||||
document_handlers,
|
||||
block_handlers,
|
||||
document_user,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(&self) -> FlowyResult<()> {
|
||||
listen_ws_state_changed(self.rev_web_socket.clone(), self.ws_data_receivers.clone());
|
||||
listen_ws_state_changed(self.rev_web_socket.clone(), self.block_handlers.clone());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, doc_id), fields(doc_id), err)]
|
||||
pub async fn open_document<T: AsRef<str>>(&self, doc_id: T) -> Result<Arc<ClientDocumentEditor>, FlowyError> {
|
||||
let doc_id = doc_id.as_ref();
|
||||
tracing::Span::current().record("doc_id", &doc_id);
|
||||
self.get_editor(doc_id).await
|
||||
#[tracing::instrument(level = "debug", skip(self, block_id), fields(block_id), err)]
|
||||
pub async fn open_block<T: AsRef<str>>(&self, block_id: T) -> Result<Arc<ClientBlockEditor>, FlowyError> {
|
||||
let block_id = block_id.as_ref();
|
||||
tracing::Span::current().record("block_id", &block_id);
|
||||
self.get_block_editor(block_id).await
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip(self, doc_id), fields(doc_id), err)]
|
||||
pub fn close_document<T: AsRef<str>>(&self, doc_id: T) -> Result<(), FlowyError> {
|
||||
let doc_id = doc_id.as_ref();
|
||||
tracing::Span::current().record("doc_id", &doc_id);
|
||||
self.document_handlers.remove(doc_id);
|
||||
self.ws_data_receivers.remove(doc_id);
|
||||
#[tracing::instrument(level = "trace", skip(self, block_id), fields(block_id), err)]
|
||||
pub fn close_block<T: AsRef<str>>(&self, block_id: T) -> Result<(), FlowyError> {
|
||||
let block_id = block_id.as_ref();
|
||||
tracing::Span::current().record("block_id", &block_id);
|
||||
self.block_handlers.remove(block_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -78,18 +66,17 @@ impl FlowyDocumentManager {
|
||||
pub fn delete<T: AsRef<str>>(&self, doc_id: T) -> Result<(), FlowyError> {
|
||||
let doc_id = doc_id.as_ref();
|
||||
tracing::Span::current().record("doc_id", &doc_id);
|
||||
self.document_handlers.remove(doc_id);
|
||||
self.ws_data_receivers.remove(doc_id);
|
||||
self.block_handlers.remove(doc_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, delta), fields(doc_id = %delta.doc_id), err)]
|
||||
pub async fn receive_local_delta(&self, delta: DocumentDelta) -> Result<DocumentDelta, FlowyError> {
|
||||
let editor = self.get_editor(&delta.doc_id).await?;
|
||||
#[tracing::instrument(level = "debug", skip(self, delta), fields(doc_id = %delta.block_id), err)]
|
||||
pub async fn receive_local_delta(&self, delta: BlockDelta) -> Result<BlockDelta, FlowyError> {
|
||||
let editor = self.get_block_editor(&delta.block_id).await?;
|
||||
let _ = editor.compose_local_delta(Bytes::from(delta.delta_json)).await?;
|
||||
let document_json = editor.document_json().await?;
|
||||
Ok(DocumentDelta {
|
||||
doc_id: delta.doc_id.clone(),
|
||||
let document_json = editor.block_json().await?;
|
||||
Ok(BlockDelta {
|
||||
block_id: delta.block_id.clone(),
|
||||
delta_json: document_json,
|
||||
})
|
||||
}
|
||||
@ -105,9 +92,9 @@ impl FlowyDocumentManager {
|
||||
pub async fn receive_ws_data(&self, data: Bytes) {
|
||||
let result: Result<ServerRevisionWSData, protobuf::ProtobufError> = data.try_into();
|
||||
match result {
|
||||
Ok(data) => match self.ws_data_receivers.get(&data.object_id) {
|
||||
Ok(data) => match self.block_handlers.get(&data.object_id) {
|
||||
None => tracing::error!("Can't find any source handler for {:?}-{:?}", data.object_id, data.ty),
|
||||
Some(handler) => match handler.receive_ws_data(data).await {
|
||||
Some(block_editor) => match block_editor.receive_ws_data(data).await {
|
||||
Ok(_) => {}
|
||||
Err(e) => tracing::error!("{}", e),
|
||||
},
|
||||
@ -119,59 +106,57 @@ impl FlowyDocumentManager {
|
||||
}
|
||||
}
|
||||
|
||||
impl FlowyDocumentManager {
|
||||
async fn get_editor(&self, doc_id: &str) -> FlowyResult<Arc<ClientDocumentEditor>> {
|
||||
match self.document_handlers.get(doc_id) {
|
||||
impl BlockManager {
|
||||
async fn get_block_editor(&self, block_id: &str) -> FlowyResult<Arc<ClientBlockEditor>> {
|
||||
match self.block_handlers.get(block_id) {
|
||||
None => {
|
||||
let db_pool = self.document_user.db_pool()?;
|
||||
self.make_editor(doc_id, db_pool).await
|
||||
self.make_block_editor(block_id, db_pool).await
|
||||
}
|
||||
Some(editor) => Ok(editor),
|
||||
}
|
||||
}
|
||||
|
||||
async fn make_editor(
|
||||
async fn make_block_editor(
|
||||
&self,
|
||||
doc_id: &str,
|
||||
block_id: &str,
|
||||
pool: Arc<ConnectionPool>,
|
||||
) -> Result<Arc<ClientDocumentEditor>, FlowyError> {
|
||||
) -> Result<Arc<ClientBlockEditor>, FlowyError> {
|
||||
let user = self.document_user.clone();
|
||||
let token = self.document_user.token()?;
|
||||
let rev_manager = self.make_rev_manager(doc_id, pool.clone())?;
|
||||
let rev_manager = self.make_rev_manager(block_id, pool.clone())?;
|
||||
let cloud_service = Arc::new(DocumentRevisionCloudServiceImpl {
|
||||
token,
|
||||
server: self.cloud_service.clone(),
|
||||
});
|
||||
let doc_editor =
|
||||
ClientDocumentEditor::new(doc_id, user, rev_manager, self.rev_web_socket.clone(), cloud_service).await?;
|
||||
self.ws_data_receivers
|
||||
.insert(doc_id.to_string(), doc_editor.ws_handler());
|
||||
self.document_handlers.insert(doc_id, &doc_editor);
|
||||
ClientBlockEditor::new(block_id, user, rev_manager, self.rev_web_socket.clone(), cloud_service).await?;
|
||||
self.block_handlers.insert(block_id, &doc_editor);
|
||||
Ok(doc_editor)
|
||||
}
|
||||
|
||||
fn make_rev_manager(&self, doc_id: &str, pool: Arc<ConnectionPool>) -> Result<RevisionManager, FlowyError> {
|
||||
let user_id = self.document_user.user_id()?;
|
||||
let cache = Arc::new(RevisionCache::new(&user_id, doc_id, pool));
|
||||
Ok(RevisionManager::new(&user_id, doc_id, cache))
|
||||
let rev_persistence = Arc::new(RevisionPersistence::new(&user_id, doc_id, pool));
|
||||
Ok(RevisionManager::new(&user_id, doc_id, rev_persistence))
|
||||
}
|
||||
}
|
||||
|
||||
struct DocumentRevisionCloudServiceImpl {
|
||||
token: String,
|
||||
server: Arc<dyn DocumentCloudService>,
|
||||
server: Arc<dyn BlockCloudService>,
|
||||
}
|
||||
|
||||
impl RevisionCloudService for DocumentRevisionCloudServiceImpl {
|
||||
#[tracing::instrument(level = "trace", skip(self))]
|
||||
fn fetch_object(&self, user_id: &str, object_id: &str) -> FutureResult<Vec<Revision>, FlowyError> {
|
||||
let params: DocumentId = object_id.to_string().into();
|
||||
let params: BlockId = object_id.to_string().into();
|
||||
let server = self.server.clone();
|
||||
let token = self.token.clone();
|
||||
let user_id = user_id.to_string();
|
||||
|
||||
FutureResult::new(async move {
|
||||
match server.read_document(&token, params).await? {
|
||||
match server.read_block(&token, params).await? {
|
||||
None => Err(FlowyError::record_not_found().context("Remote doesn't have this document")),
|
||||
Some(doc) => {
|
||||
let delta_data = Bytes::from(doc.text.clone());
|
||||
@ -185,51 +170,50 @@ impl RevisionCloudService for DocumentRevisionCloudServiceImpl {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DocumentEditorHandlers {
|
||||
inner: DashMap<String, Arc<ClientDocumentEditor>>,
|
||||
pub struct BlockEditorHandlers {
|
||||
inner: DashMap<String, Arc<ClientBlockEditor>>,
|
||||
}
|
||||
|
||||
impl DocumentEditorHandlers {
|
||||
impl BlockEditorHandlers {
|
||||
fn new() -> Self {
|
||||
Self { inner: DashMap::new() }
|
||||
}
|
||||
|
||||
pub(crate) fn insert(&self, doc_id: &str, doc: &Arc<ClientDocumentEditor>) {
|
||||
if self.inner.contains_key(doc_id) {
|
||||
log::warn!("Doc:{} already exists in cache", doc_id);
|
||||
pub(crate) fn insert(&self, block_id: &str, doc: &Arc<ClientBlockEditor>) {
|
||||
if self.inner.contains_key(block_id) {
|
||||
log::warn!("Doc:{} already exists in cache", block_id);
|
||||
}
|
||||
self.inner.insert(doc_id.to_string(), doc.clone());
|
||||
self.inner.insert(block_id.to_string(), doc.clone());
|
||||
}
|
||||
|
||||
pub(crate) fn contains(&self, doc_id: &str) -> bool {
|
||||
self.inner.get(doc_id).is_some()
|
||||
pub(crate) fn contains(&self, block_id: &str) -> bool {
|
||||
self.inner.get(block_id).is_some()
|
||||
}
|
||||
|
||||
pub(crate) fn get(&self, doc_id: &str) -> Option<Arc<ClientDocumentEditor>> {
|
||||
if !self.contains(doc_id) {
|
||||
pub(crate) fn get(&self, block_id: &str) -> Option<Arc<ClientBlockEditor>> {
|
||||
if !self.contains(block_id) {
|
||||
return None;
|
||||
}
|
||||
let opened_doc = self.inner.get(doc_id).unwrap();
|
||||
let opened_doc = self.inner.get(block_id).unwrap();
|
||||
Some(opened_doc.clone())
|
||||
}
|
||||
|
||||
pub(crate) fn remove(&self, id: &str) {
|
||||
let doc_id = id.to_string();
|
||||
if let Some(editor) = self.get(id) {
|
||||
pub(crate) fn remove(&self, block_id: &str) {
|
||||
if let Some(editor) = self.get(block_id) {
|
||||
editor.stop()
|
||||
}
|
||||
self.inner.remove(&doc_id);
|
||||
self.inner.remove(block_id);
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip(web_socket, receivers))]
|
||||
fn listen_ws_state_changed(web_socket: Arc<dyn RevisionWebSocket>, receivers: WebSocketDataReceivers) {
|
||||
#[tracing::instrument(level = "trace", skip(web_socket, handlers))]
|
||||
fn listen_ws_state_changed(web_socket: Arc<dyn RevisionWebSocket>, handlers: Arc<BlockEditorHandlers>) {
|
||||
tokio::spawn(async move {
|
||||
let mut notify = web_socket.subscribe_state_changed().await;
|
||||
while let Ok(state) = notify.recv().await {
|
||||
for receiver in receivers.iter() {
|
||||
receiver.value().connect_state_changed(state.clone());
|
||||
}
|
||||
handlers.inner.iter().for_each(|handler| {
|
||||
handler.receive_ws_state(&state);
|
||||
})
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::web_socket::EditorCommandReceiver;
|
||||
use crate::DocumentUser;
|
||||
use crate::BlockUser;
|
||||
use async_stream::stream;
|
||||
use flowy_collaboration::util::make_delta_from_revisions;
|
||||
use flowy_collaboration::{
|
||||
@ -21,14 +21,14 @@ use tokio::sync::{oneshot, RwLock};
|
||||
// serial.
|
||||
pub(crate) struct EditorCommandQueue {
|
||||
document: Arc<RwLock<ClientDocument>>,
|
||||
user: Arc<dyn DocumentUser>,
|
||||
user: Arc<dyn BlockUser>,
|
||||
rev_manager: Arc<RevisionManager>,
|
||||
receiver: Option<EditorCommandReceiver>,
|
||||
}
|
||||
|
||||
impl EditorCommandQueue {
|
||||
pub(crate) fn new(
|
||||
user: Arc<dyn DocumentUser>,
|
||||
user: Arc<dyn BlockUser>,
|
||||
rev_manager: Arc<RevisionManager>,
|
||||
delta: RichTextDelta,
|
||||
receiver: EditorCommandReceiver,
|
||||
@ -161,11 +161,11 @@ impl EditorCommandQueue {
|
||||
let _ = self.save_local_delta(delta, md5).await?;
|
||||
let _ = ret.send(Ok(()));
|
||||
}
|
||||
EditorCommand::ReadDocumentAsJson { ret } => {
|
||||
EditorCommand::ReadBlockJson { ret } => {
|
||||
let data = self.document.read().await.to_json();
|
||||
let _ = ret.send(Ok(data));
|
||||
}
|
||||
EditorCommand::ReadDocumentAsDelta { ret } => {
|
||||
EditorCommand::ReadBlockDelta { ret } => {
|
||||
let delta = self.document.read().await.delta().clone();
|
||||
let _ = ret.send(Ok(delta));
|
||||
}
|
||||
@ -265,11 +265,11 @@ pub(crate) enum EditorCommand {
|
||||
Redo {
|
||||
ret: Ret<()>,
|
||||
},
|
||||
ReadDocumentAsJson {
|
||||
ReadBlockJson {
|
||||
ret: Ret<String>,
|
||||
},
|
||||
#[allow(dead_code)]
|
||||
ReadDocumentAsDelta {
|
||||
ReadBlockDelta {
|
||||
ret: Ret<RichTextDelta>,
|
||||
},
|
||||
}
|
||||
@ -289,8 +289,8 @@ impl std::fmt::Debug for EditorCommand {
|
||||
EditorCommand::CanRedo { .. } => "CanRedo",
|
||||
EditorCommand::Undo { .. } => "Undo",
|
||||
EditorCommand::Redo { .. } => "Redo",
|
||||
EditorCommand::ReadDocumentAsJson { .. } => "ReadDocumentAsJson",
|
||||
EditorCommand::ReadDocumentAsDelta { .. } => "ReadDocumentAsDelta",
|
||||
EditorCommand::ReadBlockJson { .. } => "ReadDocumentAsJson",
|
||||
EditorCommand::ReadBlockDelta { .. } => "ReadDocumentAsDelta",
|
||||
};
|
||||
f.write_str(s)
|
||||
}
|
||||
|
@ -1,10 +1,9 @@
|
||||
use crate::{queue::EditorCommand, DocumentWSReceiver, DOCUMENT_SYNC_INTERVAL_IN_MILLIS};
|
||||
use async_trait::async_trait;
|
||||
use crate::{queue::EditorCommand, DOCUMENT_SYNC_INTERVAL_IN_MILLIS};
|
||||
use bytes::Bytes;
|
||||
use flowy_collaboration::{
|
||||
entities::{
|
||||
revision::RevisionRange,
|
||||
ws_data::{ClientRevisionWSData, NewDocumentUser, ServerRevisionWSData, ServerRevisionWSDataType},
|
||||
ws_data::{ClientRevisionWSData, NewDocumentUser, ServerRevisionWSDataType},
|
||||
},
|
||||
errors::CollaborateResult,
|
||||
};
|
||||
@ -30,26 +29,23 @@ pub(crate) async fn make_document_ws_manager(
|
||||
rev_manager: Arc<RevisionManager>,
|
||||
rev_web_socket: Arc<dyn RevisionWebSocket>,
|
||||
) -> Arc<RevisionWebSocketManager> {
|
||||
let composite_sink_provider = Arc::new(CompositeWSSinkDataProvider::new(&doc_id, rev_manager.clone()));
|
||||
let resolve_target = Arc::new(DocumentRevisionResolveTarget { edit_cmd_tx });
|
||||
let resolver = RevisionConflictResolver::<RichTextAttributes>::new(
|
||||
let ws_data_provider = Arc::new(WSDataProvider::new(&doc_id, Arc::new(rev_manager.clone())));
|
||||
let resolver = Arc::new(BlockConflictResolver { edit_cmd_tx });
|
||||
let conflict_controller = ConflictController::<RichTextAttributes>::new(
|
||||
&user_id,
|
||||
resolve_target,
|
||||
Arc::new(composite_sink_provider.clone()),
|
||||
resolver,
|
||||
Arc::new(ws_data_provider.clone()),
|
||||
rev_manager,
|
||||
);
|
||||
let ws_stream_consumer = Arc::new(DocumentWSSteamConsumerAdapter {
|
||||
resolver: Arc::new(resolver),
|
||||
});
|
||||
|
||||
let sink_provider = Arc::new(DocumentWSSinkDataProviderAdapter(composite_sink_provider));
|
||||
let ws_data_stream = Arc::new(BlockRevisionWSDataStream::new(conflict_controller));
|
||||
let ws_data_sink = Arc::new(BlockWSDataSink(ws_data_provider));
|
||||
let ping_duration = Duration::from_millis(DOCUMENT_SYNC_INTERVAL_IN_MILLIS);
|
||||
let ws_manager = Arc::new(RevisionWebSocketManager::new(
|
||||
"Document",
|
||||
&doc_id,
|
||||
rev_web_socket,
|
||||
sink_provider,
|
||||
ws_stream_consumer,
|
||||
ws_data_sink,
|
||||
ws_data_stream,
|
||||
ping_duration,
|
||||
));
|
||||
listen_document_ws_state(&user_id, &doc_id, ws_manager.scribe_state());
|
||||
@ -69,18 +65,26 @@ fn listen_document_ws_state(_user_id: &str, _doc_id: &str, mut subscriber: broad
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) struct DocumentWSSteamConsumerAdapter {
|
||||
resolver: Arc<RevisionConflictResolver<RichTextAttributes>>,
|
||||
pub(crate) struct BlockRevisionWSDataStream {
|
||||
conflict_controller: Arc<ConflictController<RichTextAttributes>>,
|
||||
}
|
||||
|
||||
impl RevisionWSSteamConsumer for DocumentWSSteamConsumerAdapter {
|
||||
impl BlockRevisionWSDataStream {
|
||||
pub fn new(conflict_controller: ConflictController<RichTextAttributes>) -> Self {
|
||||
Self {
|
||||
conflict_controller: Arc::new(conflict_controller),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RevisionWSDataStream for BlockRevisionWSDataStream {
|
||||
fn receive_push_revision(&self, bytes: Bytes) -> BoxResultFuture<(), FlowyError> {
|
||||
let resolver = self.resolver.clone();
|
||||
let resolver = self.conflict_controller.clone();
|
||||
Box::pin(async move { resolver.receive_bytes(bytes).await })
|
||||
}
|
||||
|
||||
fn receive_ack(&self, id: String, ty: ServerRevisionWSDataType) -> BoxResultFuture<(), FlowyError> {
|
||||
let resolver = self.resolver.clone();
|
||||
let resolver = self.conflict_controller.clone();
|
||||
Box::pin(async move { resolver.ack_revision(id, ty).await })
|
||||
}
|
||||
|
||||
@ -90,24 +94,24 @@ impl RevisionWSSteamConsumer for DocumentWSSteamConsumerAdapter {
|
||||
}
|
||||
|
||||
fn pull_revisions_in_range(&self, range: RevisionRange) -> BoxResultFuture<(), FlowyError> {
|
||||
let resolver = self.resolver.clone();
|
||||
let resolver = self.conflict_controller.clone();
|
||||
Box::pin(async move { resolver.send_revisions(range).await })
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct DocumentWSSinkDataProviderAdapter(pub(crate) Arc<CompositeWSSinkDataProvider>);
|
||||
impl RevisionWSSinkDataProvider for DocumentWSSinkDataProviderAdapter {
|
||||
pub(crate) struct BlockWSDataSink(pub(crate) Arc<WSDataProvider>);
|
||||
impl RevisionWSDataIterator for BlockWSDataSink {
|
||||
fn next(&self) -> FutureResult<Option<ClientRevisionWSData>, FlowyError> {
|
||||
let sink_provider = self.0.clone();
|
||||
FutureResult::new(async move { sink_provider.next().await })
|
||||
}
|
||||
}
|
||||
|
||||
struct DocumentRevisionResolveTarget {
|
||||
struct BlockConflictResolver {
|
||||
edit_cmd_tx: EditorCommandSender,
|
||||
}
|
||||
|
||||
impl ResolverTarget<RichTextAttributes> for DocumentRevisionResolveTarget {
|
||||
impl ConflictResolver<RichTextAttributes> for BlockConflictResolver {
|
||||
fn compose_delta(&self, delta: Delta<RichTextAttributes>) -> BoxResultFuture<DeltaMD5, FlowyError> {
|
||||
let tx = self.edit_cmd_tx.clone();
|
||||
Box::pin(async move {
|
||||
@ -157,24 +161,3 @@ impl ResolverTarget<RichTextAttributes> for DocumentRevisionResolveTarget {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// RevisionWebSocketManager registers itself as a DocumentWSReceiver for each
|
||||
// opened document.
|
||||
#[async_trait]
|
||||
impl DocumentWSReceiver for RevisionWebSocketManager {
|
||||
#[tracing::instrument(level = "debug", skip(self, data), err)]
|
||||
async fn receive_ws_data(&self, data: ServerRevisionWSData) -> Result<(), FlowyError> {
|
||||
let _ = self.ws_passthrough_tx.send(data).await.map_err(|e| {
|
||||
let err_msg = format!("{} passthrough error: {}", self.object_id, e);
|
||||
FlowyError::internal().context(err_msg)
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn connect_state_changed(&self, state: WSConnectState) {
|
||||
match self.state_passthrough_tx.send(state) {
|
||||
Ok(_) => {}
|
||||
Err(e) => tracing::error!("{}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use flowy_collaboration::entities::revision::RevisionState;
|
||||
use flowy_document::editor::ClientDocumentEditor;
|
||||
use flowy_document::editor::ClientBlockEditor;
|
||||
use flowy_document::DOCUMENT_SYNC_INTERVAL_IN_MILLIS;
|
||||
use flowy_test::{helper::ViewTest, FlowySDKTest};
|
||||
use lib_ot::{core::Interval, rich_text::RichTextDelta};
|
||||
@ -19,7 +19,7 @@ pub enum EditorScript {
|
||||
|
||||
pub struct EditorTest {
|
||||
pub sdk: FlowySDKTest,
|
||||
pub editor: Arc<ClientDocumentEditor>,
|
||||
pub editor: Arc<ClientBlockEditor>,
|
||||
}
|
||||
|
||||
impl EditorTest {
|
||||
@ -27,7 +27,7 @@ impl EditorTest {
|
||||
let sdk = FlowySDKTest::default();
|
||||
let _ = sdk.init_user().await;
|
||||
let test = ViewTest::new(&sdk).await;
|
||||
let editor = sdk.document_manager.open_document(&test.view.id).await.unwrap();
|
||||
let editor = sdk.document_manager.open_block(&test.view.id).await.unwrap();
|
||||
Self { sdk, editor }
|
||||
}
|
||||
|
||||
|
@ -6,7 +6,7 @@ use flowy_sync::RevisionWebSocket;
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
use flowy_collaboration::{client_folder::FolderPad, entities::ws_data::ServerRevisionWSData};
|
||||
use flowy_document::FlowyDocumentManager;
|
||||
use flowy_document::BlockManager;
|
||||
|
||||
use std::{collections::HashMap, convert::TryInto, fmt::Formatter, sync::Arc};
|
||||
use tokio::sync::RwLock as TokioRwLock;
|
||||
@ -71,7 +71,7 @@ impl FolderManager {
|
||||
user: Arc<dyn WorkspaceUser>,
|
||||
cloud_service: Arc<dyn FolderCouldServiceV1>,
|
||||
database: Arc<dyn WorkspaceDatabase>,
|
||||
document_manager: Arc<FlowyDocumentManager>,
|
||||
document_manager: Arc<BlockManager>,
|
||||
web_socket: Arc<dyn RevisionWebSocket>,
|
||||
) -> Self {
|
||||
if let Ok(user_id) = user.user_id() {
|
||||
|
@ -63,9 +63,9 @@ pub fn create(folder: Arc<FolderManager>) -> Module {
|
||||
.event(FolderEvent::UpdateView, update_view_handler)
|
||||
.event(FolderEvent::DeleteView, delete_view_handler)
|
||||
.event(FolderEvent::DuplicateView, duplicate_view_handler)
|
||||
.event(FolderEvent::OpenView, open_document_handler)
|
||||
.event(FolderEvent::OpenView, open_view_handler)
|
||||
.event(FolderEvent::CloseView, close_view_handler)
|
||||
.event(FolderEvent::ApplyDocDelta, document_delta_handler);
|
||||
.event(FolderEvent::ApplyDocDelta, block_delta_handler);
|
||||
|
||||
module = module
|
||||
.event(FolderEvent::ReadTrash, read_trash_handler)
|
||||
@ -130,7 +130,7 @@ pub enum FolderEvent {
|
||||
#[event()]
|
||||
CopyLink = 206,
|
||||
|
||||
#[event(input = "ViewId", output = "DocumentDelta")]
|
||||
#[event(input = "ViewId", output = "BlockDelta")]
|
||||
OpenView = 207,
|
||||
|
||||
#[event(input = "ViewId")]
|
||||
@ -151,7 +151,7 @@ pub enum FolderEvent {
|
||||
#[event()]
|
||||
DeleteAllTrash = 304,
|
||||
|
||||
#[event(input = "DocumentDelta", output = "DocumentDelta")]
|
||||
#[event(input = "BlockDelta", output = "BlockDelta")]
|
||||
ApplyDocDelta = 400,
|
||||
|
||||
#[event(input = "ExportPayload", output = "ExportData")]
|
||||
|
@ -6,12 +6,12 @@ use crate::{
|
||||
errors::FlowyError,
|
||||
services::{AppController, TrashController, ViewController},
|
||||
};
|
||||
use lib_dispatch::prelude::{data_result, Data, DataResult, Unit};
|
||||
use lib_dispatch::prelude::{data_result, AppData, Data, DataResult};
|
||||
use std::{convert::TryInto, sync::Arc};
|
||||
|
||||
pub(crate) async fn create_app_handler(
|
||||
data: Data<CreateAppPayload>,
|
||||
controller: Unit<Arc<AppController>>,
|
||||
controller: AppData<Arc<AppController>>,
|
||||
) -> DataResult<App, FlowyError> {
|
||||
let params: CreateAppParams = data.into_inner().try_into()?;
|
||||
let detail = controller.create_app_from_params(params).await?;
|
||||
@ -21,8 +21,8 @@ pub(crate) async fn create_app_handler(
|
||||
|
||||
pub(crate) async fn delete_app_handler(
|
||||
data: Data<AppId>,
|
||||
app_controller: Unit<Arc<AppController>>,
|
||||
trash_controller: Unit<Arc<TrashController>>,
|
||||
app_controller: AppData<Arc<AppController>>,
|
||||
trash_controller: AppData<Arc<TrashController>>,
|
||||
) -> Result<(), FlowyError> {
|
||||
let params: AppId = data.into_inner();
|
||||
let trash = app_controller
|
||||
@ -39,7 +39,7 @@ pub(crate) async fn delete_app_handler(
|
||||
#[tracing::instrument(skip(data, controller))]
|
||||
pub(crate) async fn update_app_handler(
|
||||
data: Data<UpdateAppPayload>,
|
||||
controller: Unit<Arc<AppController>>,
|
||||
controller: AppData<Arc<AppController>>,
|
||||
) -> Result<(), FlowyError> {
|
||||
let params: UpdateAppParams = data.into_inner().try_into()?;
|
||||
let _ = controller.update_app(params).await?;
|
||||
@ -49,8 +49,8 @@ pub(crate) async fn update_app_handler(
|
||||
#[tracing::instrument(skip(data, app_controller, view_controller))]
|
||||
pub(crate) async fn read_app_handler(
|
||||
data: Data<AppId>,
|
||||
app_controller: Unit<Arc<AppController>>,
|
||||
view_controller: Unit<Arc<ViewController>>,
|
||||
app_controller: AppData<Arc<AppController>>,
|
||||
view_controller: AppData<Arc<ViewController>>,
|
||||
) -> DataResult<App, FlowyError> {
|
||||
let params: AppId = data.into_inner();
|
||||
let mut app = app_controller.read_app(params.clone()).await?;
|
||||
|
@ -8,8 +8,8 @@ use crate::controller::FolderId;
|
||||
use flowy_collaboration::util::make_delta_from_revisions;
|
||||
use flowy_error::{FlowyError, FlowyResult};
|
||||
use flowy_sync::{
|
||||
RevisionCache, RevisionCloudService, RevisionCompact, RevisionManager, RevisionObjectBuilder, RevisionWebSocket,
|
||||
RevisionWebSocketManager,
|
||||
RevisionCloudService, RevisionCompact, RevisionManager, RevisionObjectBuilder, RevisionPersistence,
|
||||
RevisionWebSocket, RevisionWebSocketManager,
|
||||
};
|
||||
use lib_infra::future::FutureResult;
|
||||
use lib_ot::core::PlainAttributes;
|
||||
@ -33,8 +33,8 @@ impl FolderEditor {
|
||||
pool: Arc<ConnectionPool>,
|
||||
web_socket: Arc<dyn RevisionWebSocket>,
|
||||
) -> FlowyResult<Self> {
|
||||
let cache = Arc::new(RevisionCache::new(user_id, folder_id.as_ref(), pool));
|
||||
let mut rev_manager = RevisionManager::new(user_id, folder_id.as_ref(), cache);
|
||||
let rev_persistence = Arc::new(RevisionPersistence::new(user_id, folder_id.as_ref(), pool));
|
||||
let mut rev_manager = RevisionManager::new(user_id, folder_id.as_ref(), rev_persistence);
|
||||
let cloud = Arc::new(FolderRevisionCloudServiceImpl {
|
||||
token: token.to_string(),
|
||||
});
|
||||
|
@ -3,12 +3,12 @@ use crate::{
|
||||
errors::FlowyError,
|
||||
services::TrashController,
|
||||
};
|
||||
use lib_dispatch::prelude::{data_result, Data, DataResult, Unit};
|
||||
use lib_dispatch::prelude::{data_result, AppData, Data, DataResult};
|
||||
use std::sync::Arc;
|
||||
|
||||
#[tracing::instrument(skip(controller), err)]
|
||||
pub(crate) async fn read_trash_handler(
|
||||
controller: Unit<Arc<TrashController>>,
|
||||
controller: AppData<Arc<TrashController>>,
|
||||
) -> DataResult<RepeatedTrash, FlowyError> {
|
||||
let repeated_trash = controller.read_trash().await?;
|
||||
data_result(repeated_trash)
|
||||
@ -17,7 +17,7 @@ pub(crate) async fn read_trash_handler(
|
||||
#[tracing::instrument(skip(identifier, controller), err)]
|
||||
pub(crate) async fn putback_trash_handler(
|
||||
identifier: Data<TrashId>,
|
||||
controller: Unit<Arc<TrashController>>,
|
||||
controller: AppData<Arc<TrashController>>,
|
||||
) -> Result<(), FlowyError> {
|
||||
let _ = controller.putback(&identifier.id).await?;
|
||||
Ok(())
|
||||
@ -26,20 +26,20 @@ pub(crate) async fn putback_trash_handler(
|
||||
#[tracing::instrument(skip(identifiers, controller), err)]
|
||||
pub(crate) async fn delete_trash_handler(
|
||||
identifiers: Data<RepeatedTrashId>,
|
||||
controller: Unit<Arc<TrashController>>,
|
||||
controller: AppData<Arc<TrashController>>,
|
||||
) -> Result<(), FlowyError> {
|
||||
let _ = controller.delete(identifiers.into_inner()).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(controller), err)]
|
||||
pub(crate) async fn restore_all_trash_handler(controller: Unit<Arc<TrashController>>) -> Result<(), FlowyError> {
|
||||
pub(crate) async fn restore_all_trash_handler(controller: AppData<Arc<TrashController>>) -> Result<(), FlowyError> {
|
||||
let _ = controller.restore_all_trash().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(controller), err)]
|
||||
pub(crate) async fn delete_all_trash_handler(controller: Unit<Arc<TrashController>>) -> Result<(), FlowyError> {
|
||||
pub(crate) async fn delete_all_trash_handler(controller: AppData<Arc<TrashController>>) -> Result<(), FlowyError> {
|
||||
let _ = controller.delete_all_trash().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use bytes::Bytes;
|
||||
use flowy_collaboration::entities::{
|
||||
document_info::{DocumentDelta, DocumentId},
|
||||
document_info::{BlockDelta, BlockId},
|
||||
revision::{RepeatedRevision, Revision},
|
||||
};
|
||||
|
||||
@ -22,7 +22,7 @@ use crate::{
|
||||
},
|
||||
};
|
||||
use flowy_database::kv::KV;
|
||||
use flowy_document::FlowyDocumentManager;
|
||||
use flowy_document::BlockManager;
|
||||
use flowy_folder_data_model::entities::share::{ExportData, ExportParams};
|
||||
use lib_infra::uuid_string;
|
||||
|
||||
@ -33,7 +33,7 @@ pub(crate) struct ViewController {
|
||||
cloud_service: Arc<dyn FolderCouldServiceV1>,
|
||||
persistence: Arc<FolderPersistence>,
|
||||
trash_controller: Arc<TrashController>,
|
||||
document_manager: Arc<FlowyDocumentManager>,
|
||||
block_manager: Arc<BlockManager>,
|
||||
}
|
||||
|
||||
impl ViewController {
|
||||
@ -42,19 +42,19 @@ impl ViewController {
|
||||
persistence: Arc<FolderPersistence>,
|
||||
cloud_service: Arc<dyn FolderCouldServiceV1>,
|
||||
trash_can: Arc<TrashController>,
|
||||
document_manager: Arc<FlowyDocumentManager>,
|
||||
document_manager: Arc<BlockManager>,
|
||||
) -> Self {
|
||||
Self {
|
||||
user,
|
||||
cloud_service,
|
||||
persistence,
|
||||
trash_controller: trash_can,
|
||||
document_manager,
|
||||
block_manager: document_manager,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn initialize(&self) -> Result<(), FlowyError> {
|
||||
let _ = self.document_manager.init()?;
|
||||
let _ = self.block_manager.init()?;
|
||||
self.listen_trash_can_event();
|
||||
Ok(())
|
||||
}
|
||||
@ -72,7 +72,7 @@ impl ViewController {
|
||||
let repeated_revision: RepeatedRevision =
|
||||
Revision::initial_revision(&user_id, ¶ms.view_id, delta_data).into();
|
||||
let _ = self
|
||||
.document_manager
|
||||
.block_manager
|
||||
.reset_with_revisions(¶ms.view_id, repeated_revision)
|
||||
.await?;
|
||||
let view = self.create_view_on_server(params).await?;
|
||||
@ -95,7 +95,7 @@ impl ViewController {
|
||||
let user_id = self.user.user_id()?;
|
||||
let repeated_revision: RepeatedRevision = Revision::initial_revision(&user_id, view_id, delta_data).into();
|
||||
let _ = self
|
||||
.document_manager
|
||||
.block_manager
|
||||
.reset_with_revisions(view_id, repeated_revision)
|
||||
.await?;
|
||||
Ok(())
|
||||
@ -143,42 +143,42 @@ impl ViewController {
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self), err)]
|
||||
pub(crate) async fn open_document(&self, doc_id: &str) -> Result<DocumentDelta, FlowyError> {
|
||||
let editor = self.document_manager.open_document(doc_id).await?;
|
||||
KV::set_str(LATEST_VIEW_ID, doc_id.to_owned());
|
||||
let document_json = editor.document_json().await?;
|
||||
Ok(DocumentDelta {
|
||||
doc_id: doc_id.to_string(),
|
||||
pub(crate) async fn open_view(&self, view_id: &str) -> Result<BlockDelta, FlowyError> {
|
||||
let editor = self.block_manager.open_block(view_id).await?;
|
||||
KV::set_str(LATEST_VIEW_ID, view_id.to_owned());
|
||||
let document_json = editor.block_json().await?;
|
||||
Ok(BlockDelta {
|
||||
block_id: view_id.to_string(),
|
||||
delta_json: document_json,
|
||||
})
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self), err)]
|
||||
pub(crate) async fn close_view(&self, doc_id: &str) -> Result<(), FlowyError> {
|
||||
let _ = self.document_manager.close_document(doc_id)?;
|
||||
let _ = self.block_manager.close_block(doc_id)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self,params), fields(doc_id = %params.value), err)]
|
||||
pub(crate) async fn delete_view(&self, params: DocumentId) -> Result<(), FlowyError> {
|
||||
pub(crate) async fn delete_view(&self, params: BlockId) -> Result<(), FlowyError> {
|
||||
if let Some(view_id) = KV::get_str(LATEST_VIEW_ID) {
|
||||
if view_id == params.value {
|
||||
let _ = KV::remove(LATEST_VIEW_ID);
|
||||
}
|
||||
}
|
||||
let _ = self.document_manager.close_document(¶ms.value)?;
|
||||
let _ = self.block_manager.close_block(¶ms.value)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self), err)]
|
||||
pub(crate) async fn duplicate_view(&self, doc_id: &str) -> Result<(), FlowyError> {
|
||||
pub(crate) async fn duplicate_view(&self, view_id: &str) -> Result<(), FlowyError> {
|
||||
let view = self
|
||||
.persistence
|
||||
.begin_transaction(|transaction| transaction.read_view(doc_id))
|
||||
.begin_transaction(|transaction| transaction.read_view(view_id))
|
||||
.await?;
|
||||
|
||||
let editor = self.document_manager.open_document(doc_id).await?;
|
||||
let document_json = editor.document_json().await?;
|
||||
let editor = self.block_manager.open_block(view_id).await?;
|
||||
let document_json = editor.block_json().await?;
|
||||
let duplicate_params = CreateViewParams {
|
||||
belong_to_id: view.belong_to_id.clone(),
|
||||
name: format!("{} (copy)", &view.name),
|
||||
@ -194,9 +194,9 @@ impl ViewController {
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, params), err)]
|
||||
pub(crate) async fn export_doc(&self, params: ExportParams) -> Result<ExportData, FlowyError> {
|
||||
let editor = self.document_manager.open_document(¶ms.doc_id).await?;
|
||||
let delta_json = editor.document_json().await?;
|
||||
pub(crate) async fn export_view(&self, params: ExportParams) -> Result<ExportData, FlowyError> {
|
||||
let editor = self.block_manager.open_block(¶ms.view_id).await?;
|
||||
let delta_json = editor.block_json().await?;
|
||||
Ok(ExportData {
|
||||
data: delta_json,
|
||||
export_type: params.export_type,
|
||||
@ -234,8 +234,8 @@ impl ViewController {
|
||||
Ok(view)
|
||||
}
|
||||
|
||||
pub(crate) async fn receive_document_delta(&self, params: DocumentDelta) -> Result<DocumentDelta, FlowyError> {
|
||||
let doc = self.document_manager.receive_local_delta(params).await?;
|
||||
pub(crate) async fn receive_delta(&self, params: BlockDelta) -> Result<BlockDelta, FlowyError> {
|
||||
let doc = self.block_manager.receive_local_delta(params).await?;
|
||||
Ok(doc)
|
||||
}
|
||||
|
||||
@ -312,7 +312,7 @@ impl ViewController {
|
||||
fn listen_trash_can_event(&self) {
|
||||
let mut rx = self.trash_controller.subscribe();
|
||||
let persistence = self.persistence.clone();
|
||||
let document_manager = self.document_manager.clone();
|
||||
let document_manager = self.block_manager.clone();
|
||||
let trash_controller = self.trash_controller.clone();
|
||||
let _ = tokio::spawn(async move {
|
||||
loop {
|
||||
@ -340,7 +340,7 @@ impl ViewController {
|
||||
#[tracing::instrument(level = "trace", skip(persistence, document_manager, trash_can))]
|
||||
async fn handle_trash_event(
|
||||
persistence: Arc<FolderPersistence>,
|
||||
document_manager: Arc<FlowyDocumentManager>,
|
||||
document_manager: Arc<BlockManager>,
|
||||
trash_can: Arc<TrashController>,
|
||||
event: TrashEvent,
|
||||
) {
|
||||
|
@ -8,14 +8,14 @@ use crate::{
|
||||
errors::FlowyError,
|
||||
services::{TrashController, ViewController},
|
||||
};
|
||||
use flowy_collaboration::entities::document_info::DocumentDelta;
|
||||
use flowy_collaboration::entities::document_info::BlockDelta;
|
||||
use flowy_folder_data_model::entities::share::{ExportData, ExportParams, ExportPayload};
|
||||
use lib_dispatch::prelude::{data_result, Data, DataResult, Unit};
|
||||
use lib_dispatch::prelude::{data_result, AppData, Data, DataResult};
|
||||
use std::{convert::TryInto, sync::Arc};
|
||||
|
||||
pub(crate) async fn create_view_handler(
|
||||
data: Data<CreateViewPayload>,
|
||||
controller: Unit<Arc<ViewController>>,
|
||||
controller: AppData<Arc<ViewController>>,
|
||||
) -> DataResult<View, FlowyError> {
|
||||
let params: CreateViewParams = data.into_inner().try_into()?;
|
||||
let view = controller.create_view_from_params(params).await?;
|
||||
@ -24,7 +24,7 @@ pub(crate) async fn create_view_handler(
|
||||
|
||||
pub(crate) async fn read_view_handler(
|
||||
data: Data<ViewId>,
|
||||
controller: Unit<Arc<ViewController>>,
|
||||
controller: AppData<Arc<ViewController>>,
|
||||
) -> DataResult<View, FlowyError> {
|
||||
let view_id: ViewId = data.into_inner();
|
||||
let mut view = controller.read_view(view_id.clone()).await?;
|
||||
@ -38,7 +38,7 @@ pub(crate) async fn read_view_handler(
|
||||
#[tracing::instrument(skip(data, controller), err)]
|
||||
pub(crate) async fn update_view_handler(
|
||||
data: Data<UpdateViewPayload>,
|
||||
controller: Unit<Arc<ViewController>>,
|
||||
controller: AppData<Arc<ViewController>>,
|
||||
) -> Result<(), FlowyError> {
|
||||
let params: UpdateViewParams = data.into_inner().try_into()?;
|
||||
let _ = controller.update_view(params).await?;
|
||||
@ -46,18 +46,18 @@ pub(crate) async fn update_view_handler(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) async fn document_delta_handler(
|
||||
data: Data<DocumentDelta>,
|
||||
controller: Unit<Arc<ViewController>>,
|
||||
) -> DataResult<DocumentDelta, FlowyError> {
|
||||
let doc = controller.receive_document_delta(data.into_inner()).await?;
|
||||
data_result(doc)
|
||||
pub(crate) async fn block_delta_handler(
|
||||
data: Data<BlockDelta>,
|
||||
controller: AppData<Arc<ViewController>>,
|
||||
) -> DataResult<BlockDelta, FlowyError> {
|
||||
let block_delta = controller.receive_delta(data.into_inner()).await?;
|
||||
data_result(block_delta)
|
||||
}
|
||||
|
||||
pub(crate) async fn delete_view_handler(
|
||||
data: Data<RepeatedViewId>,
|
||||
view_controller: Unit<Arc<ViewController>>,
|
||||
trash_controller: Unit<Arc<TrashController>>,
|
||||
view_controller: AppData<Arc<ViewController>>,
|
||||
trash_controller: AppData<Arc<TrashController>>,
|
||||
) -> Result<(), FlowyError> {
|
||||
let params: RepeatedViewId = data.into_inner();
|
||||
for view_id in ¶ms.items {
|
||||
@ -75,18 +75,18 @@ pub(crate) async fn delete_view_handler(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) async fn open_document_handler(
|
||||
pub(crate) async fn open_view_handler(
|
||||
data: Data<ViewId>,
|
||||
controller: Unit<Arc<ViewController>>,
|
||||
) -> DataResult<DocumentDelta, FlowyError> {
|
||||
controller: AppData<Arc<ViewController>>,
|
||||
) -> DataResult<BlockDelta, FlowyError> {
|
||||
let view_id: ViewId = data.into_inner();
|
||||
let doc = controller.open_document(&view_id.value).await?;
|
||||
let doc = controller.open_view(&view_id.value).await?;
|
||||
data_result(doc)
|
||||
}
|
||||
|
||||
pub(crate) async fn close_view_handler(
|
||||
data: Data<ViewId>,
|
||||
controller: Unit<Arc<ViewController>>,
|
||||
controller: AppData<Arc<ViewController>>,
|
||||
) -> Result<(), FlowyError> {
|
||||
let view_id: ViewId = data.into_inner();
|
||||
let _ = controller.close_view(&view_id.value).await?;
|
||||
@ -96,7 +96,7 @@ pub(crate) async fn close_view_handler(
|
||||
#[tracing::instrument(skip(data, controller), err)]
|
||||
pub(crate) async fn duplicate_view_handler(
|
||||
data: Data<ViewId>,
|
||||
controller: Unit<Arc<ViewController>>,
|
||||
controller: AppData<Arc<ViewController>>,
|
||||
) -> Result<(), FlowyError> {
|
||||
let view_id: ViewId = data.into_inner();
|
||||
let _ = controller.duplicate_view(&view_id.value).await?;
|
||||
@ -106,9 +106,9 @@ pub(crate) async fn duplicate_view_handler(
|
||||
#[tracing::instrument(skip(data, controller), err)]
|
||||
pub(crate) async fn export_handler(
|
||||
data: Data<ExportPayload>,
|
||||
controller: Unit<Arc<ViewController>>,
|
||||
controller: AppData<Arc<ViewController>>,
|
||||
) -> DataResult<ExportData, FlowyError> {
|
||||
let params: ExportParams = data.into_inner().try_into()?;
|
||||
let data = controller.export_doc(params).await?;
|
||||
let data = controller.export_view(params).await?;
|
||||
data_result(data)
|
||||
}
|
||||
|
@ -21,44 +21,36 @@ pub(crate) async fn make_folder_ws_manager(
|
||||
web_socket: Arc<dyn RevisionWebSocket>,
|
||||
folder_pad: Arc<RwLock<FolderPad>>,
|
||||
) -> Arc<RevisionWebSocketManager> {
|
||||
let composite_sink_provider = Arc::new(CompositeWSSinkDataProvider::new(folder_id, rev_manager.clone()));
|
||||
let resolve_target = Arc::new(FolderRevisionResolveTarget { folder_pad });
|
||||
let resolver = RevisionConflictResolver::<PlainAttributes>::new(
|
||||
user_id,
|
||||
resolve_target,
|
||||
Arc::new(composite_sink_provider.clone()),
|
||||
rev_manager,
|
||||
);
|
||||
|
||||
let ws_stream_consumer = Arc::new(FolderWSStreamConsumerAdapter {
|
||||
resolver: Arc::new(resolver),
|
||||
});
|
||||
|
||||
let sink_provider = Arc::new(FolderWSSinkDataProviderAdapter(composite_sink_provider));
|
||||
let ws_data_provider = Arc::new(WSDataProvider::new(folder_id, Arc::new(rev_manager.clone())));
|
||||
let resolver = Arc::new(FolderConflictResolver { folder_pad });
|
||||
let conflict_controller =
|
||||
ConflictController::<PlainAttributes>::new(user_id, resolver, Arc::new(ws_data_provider.clone()), rev_manager);
|
||||
let ws_data_stream = Arc::new(FolderRevisionWSDataStream::new(conflict_controller));
|
||||
let ws_data_sink = Arc::new(FolderWSDataSink(ws_data_provider));
|
||||
let ping_duration = Duration::from_millis(FOLDER_SYNC_INTERVAL_IN_MILLIS);
|
||||
Arc::new(RevisionWebSocketManager::new(
|
||||
"Folder",
|
||||
folder_id,
|
||||
web_socket,
|
||||
sink_provider,
|
||||
ws_stream_consumer,
|
||||
ws_data_sink,
|
||||
ws_data_stream,
|
||||
ping_duration,
|
||||
))
|
||||
}
|
||||
|
||||
pub(crate) struct FolderWSSinkDataProviderAdapter(Arc<CompositeWSSinkDataProvider>);
|
||||
impl RevisionWSSinkDataProvider for FolderWSSinkDataProviderAdapter {
|
||||
pub(crate) struct FolderWSDataSink(Arc<WSDataProvider>);
|
||||
impl RevisionWSDataIterator for FolderWSDataSink {
|
||||
fn next(&self) -> FutureResult<Option<ClientRevisionWSData>, FlowyError> {
|
||||
let sink_provider = self.0.clone();
|
||||
FutureResult::new(async move { sink_provider.next().await })
|
||||
}
|
||||
}
|
||||
|
||||
struct FolderRevisionResolveTarget {
|
||||
struct FolderConflictResolver {
|
||||
folder_pad: Arc<RwLock<FolderPad>>,
|
||||
}
|
||||
|
||||
impl ResolverTarget<PlainAttributes> for FolderRevisionResolveTarget {
|
||||
impl ConflictResolver<PlainAttributes> for FolderConflictResolver {
|
||||
fn compose_delta(&self, delta: Delta<PlainAttributes>) -> BoxResultFuture<DeltaMD5, FlowyError> {
|
||||
let folder_pad = self.folder_pad.clone();
|
||||
Box::pin(async move {
|
||||
@ -101,18 +93,26 @@ impl ResolverTarget<PlainAttributes> for FolderRevisionResolveTarget {
|
||||
}
|
||||
}
|
||||
|
||||
struct FolderWSStreamConsumerAdapter {
|
||||
resolver: Arc<RevisionConflictResolver<PlainAttributes>>,
|
||||
struct FolderRevisionWSDataStream {
|
||||
conflict_controller: Arc<ConflictController<PlainAttributes>>,
|
||||
}
|
||||
|
||||
impl RevisionWSSteamConsumer for FolderWSStreamConsumerAdapter {
|
||||
impl FolderRevisionWSDataStream {
|
||||
pub fn new(conflict_controller: ConflictController<PlainAttributes>) -> Self {
|
||||
Self {
|
||||
conflict_controller: Arc::new(conflict_controller),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RevisionWSDataStream for FolderRevisionWSDataStream {
|
||||
fn receive_push_revision(&self, bytes: Bytes) -> BoxResultFuture<(), FlowyError> {
|
||||
let resolver = self.resolver.clone();
|
||||
let resolver = self.conflict_controller.clone();
|
||||
Box::pin(async move { resolver.receive_bytes(bytes).await })
|
||||
}
|
||||
|
||||
fn receive_ack(&self, id: String, ty: ServerRevisionWSDataType) -> BoxResultFuture<(), FlowyError> {
|
||||
let resolver = self.resolver.clone();
|
||||
let resolver = self.conflict_controller.clone();
|
||||
Box::pin(async move { resolver.ack_revision(id, ty).await })
|
||||
}
|
||||
|
||||
@ -122,7 +122,7 @@ impl RevisionWSSteamConsumer for FolderWSStreamConsumerAdapter {
|
||||
}
|
||||
|
||||
fn pull_revisions_in_range(&self, range: RevisionRange) -> BoxResultFuture<(), FlowyError> {
|
||||
let resolver = self.resolver.clone();
|
||||
let resolver = self.conflict_controller.clone();
|
||||
Box::pin(async move { resolver.send_revisions(range).await })
|
||||
}
|
||||
}
|
||||
|
@ -10,13 +10,13 @@ use flowy_folder_data_model::entities::{
|
||||
workspace::{CurrentWorkspaceSetting, RepeatedWorkspace, WorkspaceId, *},
|
||||
};
|
||||
|
||||
use lib_dispatch::prelude::{data_result, Data, DataResult, Unit};
|
||||
use lib_dispatch::prelude::{data_result, AppData, Data, DataResult};
|
||||
use std::{convert::TryInto, sync::Arc};
|
||||
|
||||
#[tracing::instrument(skip(data, controller), err)]
|
||||
pub(crate) async fn create_workspace_handler(
|
||||
data: Data<CreateWorkspacePayload>,
|
||||
controller: Unit<Arc<WorkspaceController>>,
|
||||
controller: AppData<Arc<WorkspaceController>>,
|
||||
) -> DataResult<Workspace, FlowyError> {
|
||||
let controller = controller.get_ref().clone();
|
||||
let params: CreateWorkspaceParams = data.into_inner().try_into()?;
|
||||
@ -26,7 +26,7 @@ pub(crate) async fn create_workspace_handler(
|
||||
|
||||
#[tracing::instrument(skip(controller), err)]
|
||||
pub(crate) async fn read_workspace_apps_handler(
|
||||
controller: Unit<Arc<WorkspaceController>>,
|
||||
controller: AppData<Arc<WorkspaceController>>,
|
||||
) -> DataResult<RepeatedApp, FlowyError> {
|
||||
let repeated_app = controller.read_current_workspace_apps().await?;
|
||||
data_result(repeated_app)
|
||||
@ -35,7 +35,7 @@ pub(crate) async fn read_workspace_apps_handler(
|
||||
#[tracing::instrument(skip(data, controller), err)]
|
||||
pub(crate) async fn open_workspace_handler(
|
||||
data: Data<WorkspaceId>,
|
||||
controller: Unit<Arc<WorkspaceController>>,
|
||||
controller: AppData<Arc<WorkspaceController>>,
|
||||
) -> DataResult<Workspace, FlowyError> {
|
||||
let params: WorkspaceId = data.into_inner();
|
||||
let workspaces = controller.open_workspace(params).await?;
|
||||
@ -45,7 +45,7 @@ pub(crate) async fn open_workspace_handler(
|
||||
#[tracing::instrument(skip(data, folder), err)]
|
||||
pub(crate) async fn read_workspaces_handler(
|
||||
data: Data<WorkspaceId>,
|
||||
folder: Unit<Arc<FolderManager>>,
|
||||
folder: AppData<Arc<FolderManager>>,
|
||||
) -> DataResult<RepeatedWorkspace, FlowyError> {
|
||||
let params: WorkspaceId = data.into_inner();
|
||||
let user_id = folder.user.user_id()?;
|
||||
@ -71,7 +71,7 @@ pub(crate) async fn read_workspaces_handler(
|
||||
|
||||
#[tracing::instrument(skip(folder), err)]
|
||||
pub async fn read_cur_workspace_handler(
|
||||
folder: Unit<Arc<FolderManager>>,
|
||||
folder: AppData<Arc<FolderManager>>,
|
||||
) -> DataResult<CurrentWorkspaceSetting, FlowyError> {
|
||||
let workspace_id = get_current_workspace()?;
|
||||
let user_id = folder.user.user_id()?;
|
||||
@ -96,7 +96,7 @@ pub async fn read_cur_workspace_handler(
|
||||
|
||||
#[tracing::instrument(level = "trace", skip(folder_manager), err)]
|
||||
fn read_workspaces_on_server(
|
||||
folder_manager: Unit<Arc<FolderManager>>,
|
||||
folder_manager: AppData<Arc<FolderManager>>,
|
||||
user_id: String,
|
||||
params: WorkspaceId,
|
||||
) -> Result<(), FlowyError> {
|
||||
|
@ -1,4 +1,4 @@
|
||||
use flowy_collaboration::entities::document_info::DocumentInfo;
|
||||
use flowy_collaboration::entities::document_info::BlockInfo;
|
||||
use flowy_folder::event_map::FolderEvent::*;
|
||||
use flowy_folder_data_model::entities::view::{RepeatedViewId, ViewId};
|
||||
use flowy_folder_data_model::entities::workspace::WorkspaceId;
|
||||
@ -159,14 +159,14 @@ pub async fn delete_view(sdk: &FlowySDKTest, view_ids: Vec<String>) {
|
||||
.await;
|
||||
}
|
||||
|
||||
pub async fn open_document(sdk: &FlowySDKTest, view_id: &str) -> DocumentInfo {
|
||||
pub async fn open_document(sdk: &FlowySDKTest, view_id: &str) -> BlockInfo {
|
||||
let view_id: ViewId = view_id.into();
|
||||
FolderEventBuilder::new(sdk.clone())
|
||||
.event(OpenView)
|
||||
.payload(view_id)
|
||||
.async_send()
|
||||
.await
|
||||
.parse::<DocumentInfo>()
|
||||
.parse::<BlockInfo>()
|
||||
}
|
||||
|
||||
pub async fn read_trash(sdk: &FlowySDKTest) -> RepeatedTrash {
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::helper::*;
|
||||
use flowy_collaboration::entities::{document_info::DocumentInfo, revision::RevisionState};
|
||||
use flowy_collaboration::entities::{document_info::BlockInfo, revision::RevisionState};
|
||||
use flowy_folder::{errors::ErrorCode, services::folder_editor::FolderEditor};
|
||||
use flowy_folder_data_model::entities::{
|
||||
app::{App, RepeatedApp},
|
||||
@ -58,7 +58,7 @@ pub struct FolderTest {
|
||||
pub app: App,
|
||||
pub view: View,
|
||||
pub trash: Vec<Trash>,
|
||||
pub document_info: Option<DocumentInfo>,
|
||||
pub document_info: Option<BlockInfo>,
|
||||
// pub folder_editor:
|
||||
}
|
||||
|
||||
|
@ -1,12 +1,12 @@
|
||||
use crate::{entities::NetworkState, ws::connection::FlowyWebSocketConnect};
|
||||
use flowy_error::FlowyError;
|
||||
use lib_dispatch::prelude::{Data, Unit};
|
||||
use lib_dispatch::prelude::{AppData, Data};
|
||||
use std::sync::Arc;
|
||||
|
||||
#[tracing::instrument(skip(data, ws_manager))]
|
||||
pub async fn update_network_ty(
|
||||
data: Data<NetworkState>,
|
||||
ws_manager: Unit<Arc<FlowyWebSocketConnect>>,
|
||||
ws_manager: AppData<Arc<FlowyWebSocketConnect>>,
|
||||
) -> Result<(), FlowyError> {
|
||||
let network_state = data.into_inner();
|
||||
ws_manager.update_network_type(&network_state.ty);
|
||||
|
@ -2,45 +2,45 @@ use crate::{
|
||||
configuration::*,
|
||||
request::{HttpRequestBuilder, ResponseMiddleware},
|
||||
};
|
||||
use flowy_collaboration::entities::document_info::{CreateDocParams, DocumentId, DocumentInfo, ResetDocumentParams};
|
||||
use flowy_document::DocumentCloudService;
|
||||
use flowy_collaboration::entities::document_info::{BlockId, BlockInfo, CreateBlockParams, ResetDocumentParams};
|
||||
use flowy_document::BlockCloudService;
|
||||
use flowy_error::FlowyError;
|
||||
use http_flowy::response::FlowyResponse;
|
||||
use lazy_static::lazy_static;
|
||||
use lib_infra::future::FutureResult;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub struct DocumentHttpCloudService {
|
||||
pub struct BlockHttpCloudService {
|
||||
config: ClientServerConfiguration,
|
||||
}
|
||||
|
||||
impl DocumentHttpCloudService {
|
||||
impl BlockHttpCloudService {
|
||||
pub fn new(config: ClientServerConfiguration) -> Self {
|
||||
Self { config }
|
||||
}
|
||||
}
|
||||
|
||||
impl DocumentCloudService for DocumentHttpCloudService {
|
||||
fn create_document(&self, token: &str, params: CreateDocParams) -> FutureResult<(), FlowyError> {
|
||||
impl BlockCloudService for BlockHttpCloudService {
|
||||
fn create_block(&self, token: &str, params: CreateBlockParams) -> FutureResult<(), FlowyError> {
|
||||
let token = token.to_owned();
|
||||
let url = self.config.doc_url();
|
||||
FutureResult::new(async move { create_document_request(&token, params, &url).await })
|
||||
}
|
||||
|
||||
fn read_document(&self, token: &str, params: DocumentId) -> FutureResult<Option<DocumentInfo>, FlowyError> {
|
||||
fn read_block(&self, token: &str, params: BlockId) -> FutureResult<Option<BlockInfo>, FlowyError> {
|
||||
let token = token.to_owned();
|
||||
let url = self.config.doc_url();
|
||||
FutureResult::new(async move { read_document_request(&token, params, &url).await })
|
||||
}
|
||||
|
||||
fn update_document(&self, token: &str, params: ResetDocumentParams) -> FutureResult<(), FlowyError> {
|
||||
fn update_block(&self, token: &str, params: ResetDocumentParams) -> FutureResult<(), FlowyError> {
|
||||
let token = token.to_owned();
|
||||
let url = self.config.doc_url();
|
||||
FutureResult::new(async move { reset_doc_request(&token, params, &url).await })
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn create_document_request(token: &str, params: CreateDocParams, url: &str) -> Result<(), FlowyError> {
|
||||
pub async fn create_document_request(token: &str, params: CreateBlockParams, url: &str) -> Result<(), FlowyError> {
|
||||
let _ = request_builder()
|
||||
.post(&url.to_owned())
|
||||
.header(HEADER_TOKEN, token)
|
||||
@ -50,11 +50,7 @@ pub async fn create_document_request(token: &str, params: CreateDocParams, url:
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn read_document_request(
|
||||
token: &str,
|
||||
params: DocumentId,
|
||||
url: &str,
|
||||
) -> Result<Option<DocumentInfo>, FlowyError> {
|
||||
pub async fn read_document_request(token: &str, params: BlockId, url: &str) -> Result<Option<BlockInfo>, FlowyError> {
|
||||
let doc = request_builder()
|
||||
.get(&url.to_owned())
|
||||
.header(HEADER_TOKEN, token)
|
||||
|
@ -1,5 +1,5 @@
|
||||
use flowy_collaboration::{
|
||||
entities::{document_info::DocumentInfo, folder_info::FolderInfo},
|
||||
entities::{document_info::BlockInfo, folder_info::FolderInfo},
|
||||
errors::CollaborateError,
|
||||
protobuf::{RepeatedRevision as RepeatedRevisionPB, Revision as RevisionPB},
|
||||
server_document::*,
|
||||
@ -111,7 +111,7 @@ impl FolderCloudPersistence for LocalDocumentCloudPersistence {
|
||||
}
|
||||
|
||||
impl DocumentCloudPersistence for LocalDocumentCloudPersistence {
|
||||
fn read_document(&self, doc_id: &str) -> BoxResultFuture<DocumentInfo, CollaborateError> {
|
||||
fn read_document(&self, doc_id: &str) -> BoxResultFuture<BlockInfo, CollaborateError> {
|
||||
let storage = self.storage.clone();
|
||||
let doc_id = doc_id.to_owned();
|
||||
Box::pin(async move {
|
||||
@ -127,7 +127,7 @@ impl DocumentCloudPersistence for LocalDocumentCloudPersistence {
|
||||
&self,
|
||||
doc_id: &str,
|
||||
repeated_revision: RepeatedRevisionPB,
|
||||
) -> BoxResultFuture<Option<DocumentInfo>, CollaborateError> {
|
||||
) -> BoxResultFuture<Option<BlockInfo>, CollaborateError> {
|
||||
let doc_id = doc_id.to_owned();
|
||||
let storage = self.storage.clone();
|
||||
Box::pin(async move {
|
||||
|
@ -4,7 +4,7 @@ use bytes::Bytes;
|
||||
use flowy_collaboration::{
|
||||
client_document::default::initial_delta_string,
|
||||
entities::{
|
||||
document_info::{CreateDocParams, DocumentId, DocumentInfo, ResetDocumentParams},
|
||||
document_info::{BlockId, BlockInfo, CreateBlockParams, ResetDocumentParams},
|
||||
ws_data::{ClientRevisionWSData, ClientRevisionWSDataType},
|
||||
},
|
||||
errors::CollaborateError,
|
||||
@ -248,7 +248,7 @@ impl RevisionUser for LocalRevisionUser {
|
||||
}
|
||||
}
|
||||
|
||||
use flowy_document::DocumentCloudService;
|
||||
use flowy_document::BlockCloudService;
|
||||
use flowy_folder_data_model::entities::{
|
||||
app::{App, AppId, CreateAppParams, RepeatedApp, UpdateAppParams},
|
||||
trash::{RepeatedTrash, RepeatedTrashId},
|
||||
@ -406,13 +406,13 @@ impl UserCloudService for LocalServer {
|
||||
}
|
||||
}
|
||||
|
||||
impl DocumentCloudService for LocalServer {
|
||||
fn create_document(&self, _token: &str, _params: CreateDocParams) -> FutureResult<(), FlowyError> {
|
||||
impl BlockCloudService for LocalServer {
|
||||
fn create_block(&self, _token: &str, _params: CreateBlockParams) -> FutureResult<(), FlowyError> {
|
||||
FutureResult::new(async { Ok(()) })
|
||||
}
|
||||
|
||||
fn read_document(&self, _token: &str, params: DocumentId) -> FutureResult<Option<DocumentInfo>, FlowyError> {
|
||||
let doc = DocumentInfo {
|
||||
fn read_block(&self, _token: &str, params: BlockId) -> FutureResult<Option<BlockInfo>, FlowyError> {
|
||||
let doc = BlockInfo {
|
||||
doc_id: params.value,
|
||||
text: initial_delta_string(),
|
||||
rev_id: 0,
|
||||
@ -421,7 +421,7 @@ impl DocumentCloudService for LocalServer {
|
||||
FutureResult::new(async { Ok(Some(doc)) })
|
||||
}
|
||||
|
||||
fn update_document(&self, _token: &str, _params: ResetDocumentParams) -> FutureResult<(), FlowyError> {
|
||||
fn update_block(&self, _token: &str, _params: ResetDocumentParams) -> FutureResult<(), FlowyError> {
|
||||
FutureResult::new(async { Ok(()) })
|
||||
}
|
||||
}
|
||||
|
@ -3,11 +3,11 @@ use flowy_collaboration::entities::ws_data::ClientRevisionWSData;
|
||||
use flowy_database::ConnectionPool;
|
||||
use flowy_document::{
|
||||
errors::{internal_error, FlowyError},
|
||||
DocumentCloudService, DocumentUser, FlowyDocumentManager,
|
||||
BlockCloudService, BlockManager, BlockUser,
|
||||
};
|
||||
use flowy_net::ClientServerConfiguration;
|
||||
use flowy_net::{
|
||||
http_server::document::DocumentHttpCloudService, local_server::LocalServer, ws::connection::FlowyWebSocketConnect,
|
||||
http_server::document::BlockHttpCloudService, local_server::LocalServer, ws::connection::FlowyWebSocketConnect,
|
||||
};
|
||||
use flowy_sync::{RevisionWebSocket, WSStateReceiver};
|
||||
use flowy_user::services::UserSession;
|
||||
@ -23,15 +23,15 @@ impl DocumentDepsResolver {
|
||||
ws_conn: Arc<FlowyWebSocketConnect>,
|
||||
user_session: Arc<UserSession>,
|
||||
server_config: &ClientServerConfiguration,
|
||||
) -> Arc<FlowyDocumentManager> {
|
||||
) -> Arc<BlockManager> {
|
||||
let user = Arc::new(DocumentUserImpl(user_session));
|
||||
let ws_sender = Arc::new(DocumentWebSocketImpl(ws_conn.clone()));
|
||||
let cloud_service: Arc<dyn DocumentCloudService> = match local_server {
|
||||
None => Arc::new(DocumentHttpCloudService::new(server_config.clone())),
|
||||
let ws_sender = Arc::new(BlockWebSocket(ws_conn.clone()));
|
||||
let cloud_service: Arc<dyn BlockCloudService> = match local_server {
|
||||
None => Arc::new(BlockHttpCloudService::new(server_config.clone())),
|
||||
Some(local_server) => local_server,
|
||||
};
|
||||
|
||||
let manager = Arc::new(FlowyDocumentManager::new(cloud_service, user, ws_sender));
|
||||
let manager = Arc::new(BlockManager::new(cloud_service, user, ws_sender));
|
||||
let receiver = Arc::new(DocumentWSMessageReceiverImpl(manager.clone()));
|
||||
ws_conn.add_ws_message_receiver(receiver).unwrap();
|
||||
|
||||
@ -40,7 +40,7 @@ impl DocumentDepsResolver {
|
||||
}
|
||||
|
||||
struct DocumentUserImpl(Arc<UserSession>);
|
||||
impl DocumentUser for DocumentUserImpl {
|
||||
impl BlockUser for DocumentUserImpl {
|
||||
fn user_dir(&self) -> Result<String, FlowyError> {
|
||||
let dir = self.0.user_dir().map_err(|e| FlowyError::unauthorized().context(e))?;
|
||||
|
||||
@ -64,8 +64,8 @@ impl DocumentUser for DocumentUserImpl {
|
||||
}
|
||||
}
|
||||
|
||||
struct DocumentWebSocketImpl(Arc<FlowyWebSocketConnect>);
|
||||
impl RevisionWebSocket for DocumentWebSocketImpl {
|
||||
struct BlockWebSocket(Arc<FlowyWebSocketConnect>);
|
||||
impl RevisionWebSocket for BlockWebSocket {
|
||||
fn send(&self, data: ClientRevisionWSData) -> BoxResultFuture<(), FlowyError> {
|
||||
let bytes: Bytes = data.try_into().unwrap();
|
||||
let msg = WebSocketRawMessage {
|
||||
@ -90,7 +90,7 @@ impl RevisionWebSocket for DocumentWebSocketImpl {
|
||||
}
|
||||
}
|
||||
|
||||
struct DocumentWSMessageReceiverImpl(Arc<FlowyDocumentManager>);
|
||||
struct DocumentWSMessageReceiverImpl(Arc<BlockManager>);
|
||||
impl WSMessageReceiver for DocumentWSMessageReceiverImpl {
|
||||
fn source(&self) -> WSChannel {
|
||||
WSChannel::Document
|
||||
|
@ -1,7 +1,7 @@
|
||||
use bytes::Bytes;
|
||||
use flowy_collaboration::entities::ws_data::ClientRevisionWSData;
|
||||
use flowy_database::ConnectionPool;
|
||||
use flowy_document::FlowyDocumentManager;
|
||||
use flowy_document::BlockManager;
|
||||
use flowy_folder::{
|
||||
controller::FolderManager,
|
||||
errors::{internal_error, FlowyError},
|
||||
@ -24,12 +24,12 @@ impl FolderDepsResolver {
|
||||
local_server: Option<Arc<LocalServer>>,
|
||||
user_session: Arc<UserSession>,
|
||||
server_config: &ClientServerConfiguration,
|
||||
document_manager: &Arc<FlowyDocumentManager>,
|
||||
document_manager: &Arc<BlockManager>,
|
||||
ws_conn: Arc<FlowyWebSocketConnect>,
|
||||
) -> Arc<FolderManager> {
|
||||
let user: Arc<dyn WorkspaceUser> = Arc::new(WorkspaceUserImpl(user_session.clone()));
|
||||
let database: Arc<dyn WorkspaceDatabase> = Arc::new(WorkspaceDatabaseImpl(user_session));
|
||||
let web_socket = Arc::new(FolderWebSocketImpl(ws_conn.clone()));
|
||||
let web_socket = Arc::new(FolderWebSocket(ws_conn.clone()));
|
||||
let cloud_service: Arc<dyn FolderCouldServiceV1> = match local_server {
|
||||
None => Arc::new(FolderHttpCloudService::new(server_config.clone())),
|
||||
Some(local_server) => local_server,
|
||||
@ -78,8 +78,8 @@ impl WorkspaceUser for WorkspaceUserImpl {
|
||||
}
|
||||
}
|
||||
|
||||
struct FolderWebSocketImpl(Arc<FlowyWebSocketConnect>);
|
||||
impl RevisionWebSocket for FolderWebSocketImpl {
|
||||
struct FolderWebSocket(Arc<FlowyWebSocketConnect>);
|
||||
impl RevisionWebSocket for FolderWebSocket {
|
||||
fn send(&self, data: ClientRevisionWSData) -> BoxResultFuture<(), FlowyError> {
|
||||
let bytes: Bytes = data.try_into().unwrap();
|
||||
let msg = WebSocketRawMessage {
|
||||
|
@ -3,7 +3,7 @@ pub mod module;
|
||||
pub use flowy_net::get_client_server_configuration;
|
||||
|
||||
use crate::deps_resolve::*;
|
||||
use flowy_document::FlowyDocumentManager;
|
||||
use flowy_document::BlockManager;
|
||||
use flowy_folder::{controller::FolderManager, errors::FlowyError};
|
||||
use flowy_net::ClientServerConfiguration;
|
||||
use flowy_net::{
|
||||
@ -85,7 +85,7 @@ pub struct FlowySDK {
|
||||
#[allow(dead_code)]
|
||||
config: FlowySDKConfig,
|
||||
pub user_session: Arc<UserSession>,
|
||||
pub document_manager: Arc<FlowyDocumentManager>,
|
||||
pub document_manager: Arc<BlockManager>,
|
||||
pub folder_manager: Arc<FolderManager>,
|
||||
pub dispatcher: Arc<EventDispatcher>,
|
||||
pub ws_conn: Arc<FlowyWebSocketConnect>,
|
||||
|
@ -26,7 +26,7 @@ serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = {version = "1.0"}
|
||||
futures-util = "0.3.15"
|
||||
async-stream = "0.3.2"
|
||||
|
||||
async-trait = "0.1.52"
|
||||
|
||||
[features]
|
||||
flowy_unit_test = ["lib-ot/flowy_unit_test"]
|
@ -18,15 +18,15 @@ use tokio::task::spawn_blocking;
|
||||
|
||||
pub const REVISION_WRITE_INTERVAL_IN_MILLIS: u64 = 600;
|
||||
|
||||
pub struct RevisionCache {
|
||||
pub struct RevisionPersistence {
|
||||
user_id: String,
|
||||
object_id: String,
|
||||
disk_cache: Arc<dyn RevisionDiskCache<Error = FlowyError>>,
|
||||
memory_cache: Arc<RevisionMemoryCache>,
|
||||
sync_seq: RwLock<SyncSequence>,
|
||||
}
|
||||
impl RevisionCache {
|
||||
pub fn new(user_id: &str, object_id: &str, pool: Arc<ConnectionPool>) -> RevisionCache {
|
||||
impl RevisionPersistence {
|
||||
pub fn new(user_id: &str, object_id: &str, pool: Arc<ConnectionPool>) -> RevisionPersistence {
|
||||
let disk_cache = Arc::new(SQLitePersistence::new(user_id, pool));
|
||||
let memory_cache = Arc::new(RevisionMemoryCache::new(object_id, Arc::new(disk_cache.clone())));
|
||||
let object_id = object_id.to_owned();
|
||||
|
@ -15,7 +15,7 @@ use std::{convert::TryFrom, sync::Arc};
|
||||
|
||||
pub type DeltaMD5 = String;
|
||||
|
||||
pub trait ResolverTarget<T>
|
||||
pub trait ConflictResolver<T>
|
||||
where
|
||||
T: Attributes + Send + Sync,
|
||||
{
|
||||
@ -24,35 +24,35 @@ where
|
||||
fn reset_delta(&self, delta: Delta<T>) -> BoxResultFuture<DeltaMD5, FlowyError>;
|
||||
}
|
||||
|
||||
pub trait ResolverRevisionSink: Send + Sync + 'static {
|
||||
pub trait ConflictRevisionSink: Send + Sync + 'static {
|
||||
fn send(&self, revisions: Vec<Revision>) -> BoxResultFuture<(), FlowyError>;
|
||||
fn ack(&self, rev_id: String, ty: ServerRevisionWSDataType) -> BoxResultFuture<(), FlowyError>;
|
||||
}
|
||||
|
||||
pub struct RevisionConflictResolver<T>
|
||||
pub struct ConflictController<T>
|
||||
where
|
||||
T: Attributes + Send + Sync,
|
||||
{
|
||||
user_id: String,
|
||||
target: Arc<dyn ResolverTarget<T> + Send + Sync>,
|
||||
rev_sink: Arc<dyn ResolverRevisionSink>,
|
||||
resolver: Arc<dyn ConflictResolver<T> + Send + Sync>,
|
||||
rev_sink: Arc<dyn ConflictRevisionSink>,
|
||||
rev_manager: Arc<RevisionManager>,
|
||||
}
|
||||
|
||||
impl<T> RevisionConflictResolver<T>
|
||||
impl<T> ConflictController<T>
|
||||
where
|
||||
T: Attributes + Send + Sync + DeserializeOwned + serde::Serialize,
|
||||
{
|
||||
pub fn new(
|
||||
user_id: &str,
|
||||
target: Arc<dyn ResolverTarget<T> + Send + Sync>,
|
||||
rev_sink: Arc<dyn ResolverRevisionSink>,
|
||||
resolver: Arc<dyn ConflictResolver<T> + Send + Sync>,
|
||||
rev_sink: Arc<dyn ConflictRevisionSink>,
|
||||
rev_manager: Arc<RevisionManager>,
|
||||
) -> Self {
|
||||
let user_id = user_id.to_owned();
|
||||
Self {
|
||||
user_id,
|
||||
target,
|
||||
resolver,
|
||||
rev_sink,
|
||||
rev_manager,
|
||||
}
|
||||
@ -104,20 +104,20 @@ where
|
||||
let TransformDeltas {
|
||||
client_prime,
|
||||
server_prime,
|
||||
} = self.target.transform_delta(new_delta).await?;
|
||||
} = self.resolver.transform_delta(new_delta).await?;
|
||||
|
||||
match server_prime {
|
||||
None => {
|
||||
// The server_prime is None means the client local revisions conflict with the
|
||||
// // server, and it needs to override the client delta.
|
||||
let md5 = self.target.reset_delta(client_prime).await?;
|
||||
let md5 = self.resolver.reset_delta(client_prime).await?;
|
||||
let repeated_revision = RepeatedRevision::new(revisions);
|
||||
assert_eq!(repeated_revision.last().unwrap().md5, md5);
|
||||
let _ = self.rev_manager.reset_object(repeated_revision).await?;
|
||||
Ok(None)
|
||||
}
|
||||
Some(server_prime) => {
|
||||
let md5 = self.target.compose_delta(client_prime.clone()).await?;
|
||||
let md5 = self.resolver.compose_delta(client_prime.clone()).await?;
|
||||
for revision in &revisions {
|
||||
let _ = self.rev_manager.add_remote_revision(revision).await?;
|
||||
}
|
||||
|
@ -1,11 +1,10 @@
|
||||
use crate::RevisionCache;
|
||||
use crate::{RevisionPersistence, WSDataProviderDataSource};
|
||||
use flowy_collaboration::{
|
||||
entities::revision::{RepeatedRevision, Revision, RevisionRange, RevisionState},
|
||||
util::{pair_rev_id_from_revisions, RevIdCounter},
|
||||
};
|
||||
use flowy_error::{FlowyError, FlowyResult};
|
||||
use lib_infra::future::FutureResult;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
pub trait RevisionCloudService: Send + Sync {
|
||||
@ -25,14 +24,14 @@ pub struct RevisionManager {
|
||||
pub object_id: String,
|
||||
user_id: String,
|
||||
rev_id_counter: RevIdCounter,
|
||||
rev_cache: Arc<RevisionCache>,
|
||||
rev_persistence: Arc<RevisionPersistence>,
|
||||
|
||||
#[cfg(feature = "flowy_unit_test")]
|
||||
rev_ack_notifier: tokio::sync::broadcast::Sender<i64>,
|
||||
}
|
||||
|
||||
impl RevisionManager {
|
||||
pub fn new(user_id: &str, object_id: &str, rev_cache: Arc<RevisionCache>) -> Self {
|
||||
pub fn new(user_id: &str, object_id: &str, rev_persistence: Arc<RevisionPersistence>) -> Self {
|
||||
let rev_id_counter = RevIdCounter::new(0);
|
||||
#[cfg(feature = "flowy_unit_test")]
|
||||
let (revision_ack_notifier, _) = tokio::sync::broadcast::channel(1);
|
||||
@ -41,7 +40,7 @@ impl RevisionManager {
|
||||
object_id: object_id.to_string(),
|
||||
user_id: user_id.to_owned(),
|
||||
rev_id_counter,
|
||||
rev_cache,
|
||||
rev_persistence,
|
||||
|
||||
#[cfg(feature = "flowy_unit_test")]
|
||||
rev_ack_notifier: revision_ack_notifier,
|
||||
@ -57,7 +56,7 @@ impl RevisionManager {
|
||||
object_id: self.object_id.clone(),
|
||||
user_id: self.user_id.clone(),
|
||||
cloud,
|
||||
rev_cache: self.rev_cache.clone(),
|
||||
rev_cache: self.rev_persistence.clone(),
|
||||
}
|
||||
.load()
|
||||
.await?;
|
||||
@ -68,7 +67,7 @@ impl RevisionManager {
|
||||
#[tracing::instrument(level = "debug", skip(self, revisions), err)]
|
||||
pub async fn reset_object(&self, revisions: RepeatedRevision) -> FlowyResult<()> {
|
||||
let rev_id = pair_rev_id_from_revisions(&revisions).1;
|
||||
let _ = self.rev_cache.reset(revisions.into_inner()).await?;
|
||||
let _ = self.rev_persistence.reset(revisions.into_inner()).await?;
|
||||
self.rev_id_counter.set(rev_id);
|
||||
Ok(())
|
||||
}
|
||||
@ -79,7 +78,7 @@ impl RevisionManager {
|
||||
return Err(FlowyError::internal().context("Delta data should be empty"));
|
||||
}
|
||||
|
||||
let _ = self.rev_cache.add_ack_revision(revision).await?;
|
||||
let _ = self.rev_persistence.add_ack_revision(revision).await?;
|
||||
self.rev_id_counter.set(revision.rev_id);
|
||||
Ok(())
|
||||
}
|
||||
@ -92,14 +91,14 @@ impl RevisionManager {
|
||||
if revision.delta_data.is_empty() {
|
||||
return Err(FlowyError::internal().context("Delta data should be empty"));
|
||||
}
|
||||
let rev_id = self.rev_cache.add_sync_revision::<C>(revision).await?;
|
||||
let rev_id = self.rev_persistence.add_sync_revision::<C>(revision).await?;
|
||||
self.rev_id_counter.set(rev_id);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self), err)]
|
||||
pub async fn ack_revision(&self, rev_id: i64) -> Result<(), FlowyError> {
|
||||
if self.rev_cache.ack_revision(rev_id).await.is_ok() {
|
||||
if self.rev_persistence.ack_revision(rev_id).await.is_ok() {
|
||||
#[cfg(feature = "flowy_unit_test")]
|
||||
let _ = self.rev_ack_notifier.send(rev_id);
|
||||
}
|
||||
@ -117,23 +116,39 @@ impl RevisionManager {
|
||||
}
|
||||
|
||||
pub async fn get_revisions_in_range(&self, range: RevisionRange) -> Result<Vec<Revision>, FlowyError> {
|
||||
let revisions = self.rev_cache.revisions_in_range(&range).await?;
|
||||
let revisions = self.rev_persistence.revisions_in_range(&range).await?;
|
||||
Ok(revisions)
|
||||
}
|
||||
|
||||
pub async fn next_sync_revision(&self) -> FlowyResult<Option<Revision>> {
|
||||
Ok(self.rev_cache.next_sync_revision().await?)
|
||||
Ok(self.rev_persistence.next_sync_revision().await?)
|
||||
}
|
||||
|
||||
pub async fn get_revision(&self, rev_id: i64) -> Option<Revision> {
|
||||
self.rev_cache.get(rev_id).await.map(|record| record.revision)
|
||||
self.rev_persistence.get(rev_id).await.map(|record| record.revision)
|
||||
}
|
||||
}
|
||||
|
||||
impl WSDataProviderDataSource for Arc<RevisionManager> {
|
||||
fn next_revision(&self) -> FutureResult<Option<Revision>, FlowyError> {
|
||||
let rev_manager = self.clone();
|
||||
FutureResult::new(async move { rev_manager.next_sync_revision().await })
|
||||
}
|
||||
|
||||
fn ack_revision(&self, rev_id: i64) -> FutureResult<(), FlowyError> {
|
||||
let rev_manager = self.clone();
|
||||
FutureResult::new(async move { (*rev_manager).ack_revision(rev_id).await })
|
||||
}
|
||||
|
||||
fn current_rev_id(&self) -> i64 {
|
||||
self.rev_id()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "flowy_unit_test")]
|
||||
impl RevisionManager {
|
||||
pub async fn revision_cache(&self) -> Arc<RevisionCache> {
|
||||
self.rev_cache.clone()
|
||||
pub async fn revision_cache(&self) -> Arc<RevisionPersistence> {
|
||||
self.rev_persistence.clone()
|
||||
}
|
||||
pub fn ack_notify(&self) -> tokio::sync::broadcast::Receiver<i64> {
|
||||
self.rev_ack_notifier.subscribe()
|
||||
@ -144,7 +159,7 @@ struct RevisionLoader {
|
||||
object_id: String,
|
||||
user_id: String,
|
||||
cloud: Arc<dyn RevisionCloudService>,
|
||||
rev_cache: Arc<RevisionCache>,
|
||||
rev_cache: Arc<RevisionPersistence>,
|
||||
}
|
||||
|
||||
impl RevisionLoader {
|
||||
|
@ -1,5 +1,6 @@
|
||||
use crate::{ResolverRevisionSink, RevisionManager};
|
||||
use crate::ConflictRevisionSink;
|
||||
use async_stream::stream;
|
||||
|
||||
use bytes::Bytes;
|
||||
use flowy_collaboration::entities::{
|
||||
revision::{RevId, Revision, RevisionRange},
|
||||
@ -20,7 +21,7 @@ use tokio::{
|
||||
};
|
||||
|
||||
// The consumer consumes the messages pushed by the web socket.
|
||||
pub trait RevisionWSSteamConsumer: Send + Sync {
|
||||
pub trait RevisionWSDataStream: Send + Sync {
|
||||
fn receive_push_revision(&self, bytes: Bytes) -> BoxResultFuture<(), FlowyError>;
|
||||
fn receive_ack(&self, id: String, ty: ServerRevisionWSDataType) -> BoxResultFuture<(), FlowyError>;
|
||||
fn receive_new_user_connect(&self, new_user: NewDocumentUser) -> BoxResultFuture<(), FlowyError>;
|
||||
@ -29,7 +30,7 @@ pub trait RevisionWSSteamConsumer: Send + Sync {
|
||||
|
||||
// The sink provides the data that will be sent through the web socket to the
|
||||
// backend.
|
||||
pub trait RevisionWSSinkDataProvider: Send + Sync {
|
||||
pub trait RevisionWSDataIterator: Send + Sync {
|
||||
fn next(&self) -> FutureResult<Option<ClientRevisionWSData>, FlowyError>;
|
||||
}
|
||||
|
||||
@ -42,8 +43,8 @@ pub trait RevisionWebSocket: Send + Sync + 'static {
|
||||
pub struct RevisionWebSocketManager {
|
||||
pub object_name: String,
|
||||
pub object_id: String,
|
||||
sink_provider: Arc<dyn RevisionWSSinkDataProvider>,
|
||||
stream_consumer: Arc<dyn RevisionWSSteamConsumer>,
|
||||
ws_data_sink: Arc<dyn RevisionWSDataIterator>,
|
||||
ws_data_stream: Arc<dyn RevisionWSDataStream>,
|
||||
rev_web_socket: Arc<dyn RevisionWebSocket>,
|
||||
pub ws_passthrough_tx: Sender<ServerRevisionWSData>,
|
||||
ws_passthrough_rx: Option<Receiver<ServerRevisionWSData>>,
|
||||
@ -61,8 +62,8 @@ impl RevisionWebSocketManager {
|
||||
object_name: &str,
|
||||
object_id: &str,
|
||||
rev_web_socket: Arc<dyn RevisionWebSocket>,
|
||||
sink_provider: Arc<dyn RevisionWSSinkDataProvider>,
|
||||
stream_consumer: Arc<dyn RevisionWSSteamConsumer>,
|
||||
ws_data_sink: Arc<dyn RevisionWSDataIterator>,
|
||||
ws_data_stream: Arc<dyn RevisionWSDataStream>,
|
||||
ping_duration: Duration,
|
||||
) -> Self {
|
||||
let (ws_passthrough_tx, ws_passthrough_rx) = mpsc::channel(1000);
|
||||
@ -73,8 +74,8 @@ impl RevisionWebSocketManager {
|
||||
let mut manager = RevisionWebSocketManager {
|
||||
object_id,
|
||||
object_name,
|
||||
sink_provider,
|
||||
stream_consumer,
|
||||
ws_data_sink,
|
||||
ws_data_stream,
|
||||
rev_web_socket,
|
||||
ws_passthrough_tx,
|
||||
ws_passthrough_rx: Some(ws_passthrough_rx),
|
||||
@ -86,11 +87,11 @@ impl RevisionWebSocketManager {
|
||||
}
|
||||
|
||||
fn run(&mut self, ping_duration: Duration) {
|
||||
let ws_msg_rx = self.ws_passthrough_rx.take().expect("Only take once");
|
||||
let ws_passthrough_rx = self.ws_passthrough_rx.take().expect("Only take once");
|
||||
let sink = RevisionWSSink::new(
|
||||
&self.object_id,
|
||||
&self.object_name,
|
||||
self.sink_provider.clone(),
|
||||
self.ws_data_sink.clone(),
|
||||
self.rev_web_socket.clone(),
|
||||
self.stop_sync_tx.subscribe(),
|
||||
ping_duration,
|
||||
@ -98,8 +99,8 @@ impl RevisionWebSocketManager {
|
||||
let stream = RevisionWSStream::new(
|
||||
&self.object_name,
|
||||
&self.object_id,
|
||||
self.stream_consumer.clone(),
|
||||
ws_msg_rx,
|
||||
self.ws_data_stream.clone(),
|
||||
ws_passthrough_rx,
|
||||
self.stop_sync_tx.subscribe(),
|
||||
);
|
||||
tokio::spawn(sink.run());
|
||||
@ -115,6 +116,22 @@ impl RevisionWebSocketManager {
|
||||
tracing::trace!("{} stop sync", self.object_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "debug", skip(self, data), err)]
|
||||
pub async fn receive_ws_data(&self, data: ServerRevisionWSData) -> Result<(), FlowyError> {
|
||||
let _ = self.ws_passthrough_tx.send(data).await.map_err(|e| {
|
||||
let err_msg = format!("{} passthrough error: {}", self.object_id, e);
|
||||
FlowyError::internal().context(err_msg)
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn connect_state_changed(&self, state: WSConnectState) {
|
||||
match self.state_passthrough_tx.send(state) {
|
||||
Ok(_) => {}
|
||||
Err(e) => tracing::error!("{}", e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Drop for RevisionWebSocketManager {
|
||||
@ -126,7 +143,7 @@ impl std::ops::Drop for RevisionWebSocketManager {
|
||||
pub struct RevisionWSStream {
|
||||
object_name: String,
|
||||
object_id: String,
|
||||
consumer: Arc<dyn RevisionWSSteamConsumer>,
|
||||
consumer: Arc<dyn RevisionWSDataStream>,
|
||||
ws_msg_rx: Option<mpsc::Receiver<ServerRevisionWSData>>,
|
||||
stop_rx: Option<SinkStopRx>,
|
||||
}
|
||||
@ -147,7 +164,7 @@ impl RevisionWSStream {
|
||||
pub fn new(
|
||||
object_name: &str,
|
||||
object_id: &str,
|
||||
consumer: Arc<dyn RevisionWSSteamConsumer>,
|
||||
consumer: Arc<dyn RevisionWSDataStream>,
|
||||
ws_msg_rx: mpsc::Receiver<ServerRevisionWSData>,
|
||||
stop_rx: SinkStopRx,
|
||||
) -> Self {
|
||||
@ -229,8 +246,8 @@ type SinkStopTx = broadcast::Sender<()>;
|
||||
pub struct RevisionWSSink {
|
||||
object_id: String,
|
||||
object_name: String,
|
||||
provider: Arc<dyn RevisionWSSinkDataProvider>,
|
||||
ws_sender: Arc<dyn RevisionWebSocket>,
|
||||
provider: Arc<dyn RevisionWSDataIterator>,
|
||||
rev_web_socket: Arc<dyn RevisionWebSocket>,
|
||||
stop_rx: Option<SinkStopRx>,
|
||||
ping_duration: Duration,
|
||||
}
|
||||
@ -239,8 +256,8 @@ impl RevisionWSSink {
|
||||
pub fn new(
|
||||
object_id: &str,
|
||||
object_name: &str,
|
||||
provider: Arc<dyn RevisionWSSinkDataProvider>,
|
||||
ws_sender: Arc<dyn RevisionWebSocket>,
|
||||
provider: Arc<dyn RevisionWSDataIterator>,
|
||||
rev_web_socket: Arc<dyn RevisionWebSocket>,
|
||||
stop_rx: SinkStopRx,
|
||||
ping_duration: Duration,
|
||||
) -> Self {
|
||||
@ -248,7 +265,7 @@ impl RevisionWSSink {
|
||||
object_id: object_id.to_owned(),
|
||||
object_name: object_name.to_owned(),
|
||||
provider,
|
||||
ws_sender,
|
||||
rev_web_socket,
|
||||
stop_rx: Some(stop_rx),
|
||||
ping_duration,
|
||||
}
|
||||
@ -294,7 +311,7 @@ impl RevisionWSSink {
|
||||
}
|
||||
Some(data) => {
|
||||
tracing::trace!("[{}]: send {}:{}-{:?}", self, data.object_id, data.id(), data.ty);
|
||||
self.ws_sender.send(data).await
|
||||
self.rev_web_socket.send(data).await
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -325,49 +342,55 @@ enum Source {
|
||||
Revision,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CompositeWSSinkDataProvider {
|
||||
object_id: String,
|
||||
container: Arc<RwLock<VecDeque<ClientRevisionWSData>>>,
|
||||
rev_manager: Arc<RevisionManager>,
|
||||
source: Arc<RwLock<Source>>,
|
||||
pub trait WSDataProviderDataSource: Send + Sync {
|
||||
fn next_revision(&self) -> FutureResult<Option<Revision>, FlowyError>;
|
||||
fn ack_revision(&self, rev_id: i64) -> FutureResult<(), FlowyError>;
|
||||
fn current_rev_id(&self) -> i64;
|
||||
}
|
||||
|
||||
impl CompositeWSSinkDataProvider {
|
||||
pub fn new(object_id: &str, rev_manager: Arc<RevisionManager>) -> Self {
|
||||
CompositeWSSinkDataProvider {
|
||||
#[derive(Clone)]
|
||||
pub struct WSDataProvider {
|
||||
object_id: String,
|
||||
rev_ws_data_list: Arc<RwLock<VecDeque<ClientRevisionWSData>>>,
|
||||
data_source: Arc<dyn WSDataProviderDataSource>,
|
||||
current_source: Arc<RwLock<Source>>,
|
||||
}
|
||||
|
||||
impl WSDataProvider {
|
||||
pub fn new(object_id: &str, data_source: Arc<dyn WSDataProviderDataSource>) -> Self {
|
||||
WSDataProvider {
|
||||
object_id: object_id.to_owned(),
|
||||
container: Arc::new(RwLock::new(VecDeque::new())),
|
||||
rev_manager,
|
||||
source: Arc::new(RwLock::new(Source::Custom)),
|
||||
rev_ws_data_list: Arc::new(RwLock::new(VecDeque::new())),
|
||||
data_source,
|
||||
current_source: Arc::new(RwLock::new(Source::Custom)),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn push_data(&self, data: ClientRevisionWSData) {
|
||||
self.container.write().await.push_back(data);
|
||||
self.rev_ws_data_list.write().await.push_back(data);
|
||||
}
|
||||
|
||||
pub async fn next(&self) -> FlowyResult<Option<ClientRevisionWSData>> {
|
||||
let source = self.source.read().await.clone();
|
||||
let source = self.current_source.read().await.clone();
|
||||
let data = match source {
|
||||
Source::Custom => match self.container.read().await.front() {
|
||||
Source::Custom => match self.rev_ws_data_list.read().await.front() {
|
||||
None => {
|
||||
*self.source.write().await = Source::Revision;
|
||||
*self.current_source.write().await = Source::Revision;
|
||||
Ok(None)
|
||||
}
|
||||
Some(data) => Ok(Some(data.clone())),
|
||||
},
|
||||
Source::Revision => {
|
||||
if !self.container.read().await.is_empty() {
|
||||
*self.source.write().await = Source::Custom;
|
||||
if !self.rev_ws_data_list.read().await.is_empty() {
|
||||
*self.current_source.write().await = Source::Custom;
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
match self.rev_manager.next_sync_revision().await? {
|
||||
match self.data_source.next_revision().await? {
|
||||
Some(rev) => Ok(Some(ClientRevisionWSData::from_revisions(&self.object_id, vec![rev]))),
|
||||
None => Ok(Some(ClientRevisionWSData::ping(
|
||||
&self.object_id,
|
||||
self.rev_manager.rev_id(),
|
||||
self.data_source.current_rev_id(),
|
||||
))),
|
||||
}
|
||||
}
|
||||
@ -376,10 +399,10 @@ impl CompositeWSSinkDataProvider {
|
||||
}
|
||||
|
||||
pub async fn ack_data(&self, id: String, _ty: ServerRevisionWSDataType) -> FlowyResult<()> {
|
||||
let source = self.source.read().await.clone();
|
||||
let source = self.current_source.read().await.clone();
|
||||
match source {
|
||||
Source::Custom => {
|
||||
let should_pop = match self.container.read().await.front() {
|
||||
let should_pop = match self.rev_ws_data_list.read().await.front() {
|
||||
None => false,
|
||||
Some(val) => {
|
||||
let expected_id = val.id();
|
||||
@ -392,7 +415,7 @@ impl CompositeWSSinkDataProvider {
|
||||
}
|
||||
};
|
||||
if should_pop {
|
||||
let _ = self.container.write().await.pop_front();
|
||||
let _ = self.rev_ws_data_list.write().await.pop_front();
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@ -400,14 +423,14 @@ impl CompositeWSSinkDataProvider {
|
||||
let rev_id = id.parse::<i64>().map_err(|e| {
|
||||
FlowyError::internal().context(format!("Parse {} rev_id from {} failed. {}", self.object_id, id, e))
|
||||
})?;
|
||||
let _ = self.rev_manager.ack_revision(rev_id).await?;
|
||||
let _ = self.data_source.ack_revision(rev_id).await?;
|
||||
Ok::<(), FlowyError>(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolverRevisionSink for Arc<CompositeWSSinkDataProvider> {
|
||||
impl ConflictRevisionSink for Arc<WSDataProvider> {
|
||||
fn send(&self, revisions: Vec<Revision>) -> BoxResultFuture<(), FlowyError> {
|
||||
let sink = self.clone();
|
||||
Box::pin(async move {
|
||||
|
@ -8,7 +8,7 @@ use std::{convert::TryInto, sync::Arc};
|
||||
#[tracing::instrument(name = "sign_in", skip(data, session), fields(email = %data.email), err)]
|
||||
pub async fn sign_in(
|
||||
data: Data<SignInPayload>,
|
||||
session: Unit<Arc<UserSession>>,
|
||||
session: AppData<Arc<UserSession>>,
|
||||
) -> DataResult<UserProfile, FlowyError> {
|
||||
let params: SignInParams = data.into_inner().try_into()?;
|
||||
let user_profile = session.sign_in(params).await?;
|
||||
@ -26,7 +26,7 @@ pub async fn sign_in(
|
||||
)]
|
||||
pub async fn sign_up(
|
||||
data: Data<SignUpPayload>,
|
||||
session: Unit<Arc<UserSession>>,
|
||||
session: AppData<Arc<UserSession>>,
|
||||
) -> DataResult<UserProfile, FlowyError> {
|
||||
let params: SignUpParams = data.into_inner().try_into()?;
|
||||
let user_profile = session.sign_up(params).await?;
|
||||
|
@ -7,25 +7,25 @@ use lib_dispatch::prelude::*;
|
||||
use std::{convert::TryInto, sync::Arc};
|
||||
|
||||
#[tracing::instrument(skip(session))]
|
||||
pub async fn init_user_handler(session: Unit<Arc<UserSession>>) -> Result<(), FlowyError> {
|
||||
pub async fn init_user_handler(session: AppData<Arc<UserSession>>) -> Result<(), FlowyError> {
|
||||
let _ = session.init_user().await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(session))]
|
||||
pub async fn check_user_handler(session: Unit<Arc<UserSession>>) -> DataResult<UserProfile, FlowyError> {
|
||||
pub async fn check_user_handler(session: AppData<Arc<UserSession>>) -> DataResult<UserProfile, FlowyError> {
|
||||
let user_profile = session.check_user().await?;
|
||||
data_result(user_profile)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(session))]
|
||||
pub async fn get_user_profile_handler(session: Unit<Arc<UserSession>>) -> DataResult<UserProfile, FlowyError> {
|
||||
pub async fn get_user_profile_handler(session: AppData<Arc<UserSession>>) -> DataResult<UserProfile, FlowyError> {
|
||||
let user_profile = session.user_profile().await?;
|
||||
data_result(user_profile)
|
||||
}
|
||||
|
||||
#[tracing::instrument(name = "sign_out", skip(session))]
|
||||
pub async fn sign_out(session: Unit<Arc<UserSession>>) -> Result<(), FlowyError> {
|
||||
pub async fn sign_out(session: AppData<Arc<UserSession>>) -> Result<(), FlowyError> {
|
||||
let _ = session.sign_out().await?;
|
||||
Ok(())
|
||||
}
|
||||
@ -33,7 +33,7 @@ pub async fn sign_out(session: Unit<Arc<UserSession>>) -> Result<(), FlowyError>
|
||||
#[tracing::instrument(name = "update_user", skip(data, session))]
|
||||
pub async fn update_user_handler(
|
||||
data: Data<UpdateUserPayload>,
|
||||
session: Unit<Arc<UserSession>>,
|
||||
session: AppData<Arc<UserSession>>,
|
||||
) -> Result<(), FlowyError> {
|
||||
let params: UpdateUserParams = data.into_inner().try_into()?;
|
||||
session.update_user(params).await?;
|
||||
|
@ -5,14 +5,14 @@ use crate::{
|
||||
};
|
||||
use std::{any::type_name, ops::Deref, sync::Arc};
|
||||
|
||||
pub struct Unit<T: ?Sized + Send + Sync>(Arc<T>);
|
||||
pub struct AppData<T: ?Sized + Send + Sync>(Arc<T>);
|
||||
|
||||
impl<T> Unit<T>
|
||||
impl<T> AppData<T>
|
||||
where
|
||||
T: Send + Sync,
|
||||
{
|
||||
pub fn new(data: T) -> Self {
|
||||
Unit(Arc::new(data))
|
||||
AppData(Arc::new(data))
|
||||
}
|
||||
|
||||
pub fn get_ref(&self) -> &T {
|
||||
@ -20,7 +20,7 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for Unit<T>
|
||||
impl<T> Deref for AppData<T>
|
||||
where
|
||||
T: ?Sized + Send + Sync,
|
||||
{
|
||||
@ -31,25 +31,25 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Clone for Unit<T>
|
||||
impl<T> Clone for AppData<T>
|
||||
where
|
||||
T: ?Sized + Send + Sync,
|
||||
{
|
||||
fn clone(&self) -> Unit<T> {
|
||||
Unit(self.0.clone())
|
||||
fn clone(&self) -> AppData<T> {
|
||||
AppData(self.0.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<Arc<T>> for Unit<T>
|
||||
impl<T> From<Arc<T>> for AppData<T>
|
||||
where
|
||||
T: ?Sized + Send + Sync,
|
||||
{
|
||||
fn from(arc: Arc<T>) -> Self {
|
||||
Unit(arc)
|
||||
AppData(arc)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> FromRequest for Unit<T>
|
||||
impl<T> FromRequest for AppData<T>
|
||||
where
|
||||
T: ?Sized + Send + Sync + 'static,
|
||||
{
|
||||
@ -58,7 +58,7 @@ where
|
||||
|
||||
#[inline]
|
||||
fn from_request(req: &EventRequest, _: &mut Payload) -> Self::Future {
|
||||
if let Some(data) = req.module_data::<Unit<T>>() {
|
||||
if let Some(data) = req.module_data::<AppData<T>>() {
|
||||
ready(Ok(data.clone()))
|
||||
} else {
|
||||
let msg = format!("Failed to get the module data of type: {}", type_name::<T>());
|
||||
|
@ -13,7 +13,7 @@ use pin_project::pin_project;
|
||||
|
||||
use crate::{
|
||||
errors::{DispatchError, InternalError},
|
||||
module::{container::ModuleDataMap, Unit},
|
||||
module::{container::ModuleDataMap, AppData},
|
||||
request::{payload::Payload, EventRequest, FromRequest},
|
||||
response::{EventResponse, Responder},
|
||||
service::{
|
||||
@ -75,7 +75,7 @@ impl Module {
|
||||
}
|
||||
|
||||
pub fn data<D: 'static + Send + Sync>(mut self, data: D) -> Self {
|
||||
Arc::get_mut(&mut self.module_data).unwrap().insert(Unit::new(data));
|
||||
Arc::get_mut(&mut self.module_data).unwrap().insert(AppData::new(data));
|
||||
|
||||
self
|
||||
}
|
||||
|
@ -6,7 +6,7 @@ use flowy_derive::ProtoBuf;
|
||||
use lib_ot::{errors::OTError, rich_text::RichTextDelta};
|
||||
|
||||
#[derive(ProtoBuf, Default, Debug, Clone)]
|
||||
pub struct CreateDocParams {
|
||||
pub struct CreateBlockParams {
|
||||
#[pb(index = 1)]
|
||||
pub id: String,
|
||||
|
||||
@ -15,7 +15,7 @@ pub struct CreateDocParams {
|
||||
}
|
||||
|
||||
#[derive(ProtoBuf, Default, Debug, Clone, Eq, PartialEq)]
|
||||
pub struct DocumentInfo {
|
||||
pub struct BlockInfo {
|
||||
#[pb(index = 1)]
|
||||
pub doc_id: String,
|
||||
|
||||
@ -29,14 +29,14 @@ pub struct DocumentInfo {
|
||||
pub base_rev_id: i64,
|
||||
}
|
||||
|
||||
impl DocumentInfo {
|
||||
impl BlockInfo {
|
||||
pub fn delta(&self) -> Result<RichTextDelta, OTError> {
|
||||
let delta = RichTextDelta::from_bytes(&self.text)?;
|
||||
Ok(delta)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::TryFrom<Revision> for DocumentInfo {
|
||||
impl std::convert::TryFrom<Revision> for BlockInfo {
|
||||
type Error = CollaborateError;
|
||||
|
||||
fn try_from(revision: Revision) -> Result<Self, Self::Error> {
|
||||
@ -48,7 +48,7 @@ impl std::convert::TryFrom<Revision> for DocumentInfo {
|
||||
let delta = RichTextDelta::from_bytes(&revision.delta_data)?;
|
||||
let doc_json = delta.to_json();
|
||||
|
||||
Ok(DocumentInfo {
|
||||
Ok(BlockInfo {
|
||||
doc_id: revision.object_id,
|
||||
text: doc_json,
|
||||
rev_id: revision.rev_id,
|
||||
@ -67,9 +67,9 @@ pub struct ResetDocumentParams {
|
||||
}
|
||||
|
||||
#[derive(ProtoBuf, Default, Debug, Clone)]
|
||||
pub struct DocumentDelta {
|
||||
pub struct BlockDelta {
|
||||
#[pb(index = 1)]
|
||||
pub doc_id: String,
|
||||
pub block_id: String,
|
||||
|
||||
#[pb(index = 2)]
|
||||
pub delta_json: String,
|
||||
@ -88,20 +88,20 @@ pub struct NewDocUser {
|
||||
}
|
||||
|
||||
#[derive(ProtoBuf, Default, Debug, Clone)]
|
||||
pub struct DocumentId {
|
||||
pub struct BlockId {
|
||||
#[pb(index = 1)]
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
impl std::convert::From<String> for DocumentId {
|
||||
fn from(doc_id: String) -> Self {
|
||||
DocumentId { value: doc_id }
|
||||
impl std::convert::From<String> for BlockId {
|
||||
fn from(value: String) -> Self {
|
||||
BlockId { value }
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::From<&String> for DocumentId {
|
||||
impl std::convert::From<&String> for BlockId {
|
||||
fn from(doc_id: &String) -> Self {
|
||||
DocumentId {
|
||||
BlockId {
|
||||
value: doc_id.to_owned(),
|
||||
}
|
||||
}
|
||||
|
@ -24,7 +24,7 @@
|
||||
// const _PROTOBUF_VERSION_CHECK: () = ::protobuf::VERSION_2_25_2;
|
||||
|
||||
#[derive(PartialEq,Clone,Default)]
|
||||
pub struct CreateDocParams {
|
||||
pub struct CreateBlockParams {
|
||||
// message fields
|
||||
pub id: ::std::string::String,
|
||||
pub revisions: ::protobuf::SingularPtrField<super::revision::RepeatedRevision>,
|
||||
@ -33,14 +33,14 @@ pub struct CreateDocParams {
|
||||
pub cached_size: ::protobuf::CachedSize,
|
||||
}
|
||||
|
||||
impl<'a> ::std::default::Default for &'a CreateDocParams {
|
||||
fn default() -> &'a CreateDocParams {
|
||||
<CreateDocParams as ::protobuf::Message>::default_instance()
|
||||
impl<'a> ::std::default::Default for &'a CreateBlockParams {
|
||||
fn default() -> &'a CreateBlockParams {
|
||||
<CreateBlockParams as ::protobuf::Message>::default_instance()
|
||||
}
|
||||
}
|
||||
|
||||
impl CreateDocParams {
|
||||
pub fn new() -> CreateDocParams {
|
||||
impl CreateBlockParams {
|
||||
pub fn new() -> CreateBlockParams {
|
||||
::std::default::Default::default()
|
||||
}
|
||||
|
||||
@ -104,7 +104,7 @@ impl CreateDocParams {
|
||||
}
|
||||
}
|
||||
|
||||
impl ::protobuf::Message for CreateDocParams {
|
||||
impl ::protobuf::Message for CreateBlockParams {
|
||||
fn is_initialized(&self) -> bool {
|
||||
for v in &self.revisions {
|
||||
if !v.is_initialized() {
|
||||
@ -187,8 +187,8 @@ impl ::protobuf::Message for CreateDocParams {
|
||||
Self::descriptor_static()
|
||||
}
|
||||
|
||||
fn new() -> CreateDocParams {
|
||||
CreateDocParams::new()
|
||||
fn new() -> CreateBlockParams {
|
||||
CreateBlockParams::new()
|
||||
}
|
||||
|
||||
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
|
||||
@ -197,29 +197,29 @@ impl ::protobuf::Message for CreateDocParams {
|
||||
let mut fields = ::std::vec::Vec::new();
|
||||
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
|
||||
"id",
|
||||
|m: &CreateDocParams| { &m.id },
|
||||
|m: &mut CreateDocParams| { &mut m.id },
|
||||
|m: &CreateBlockParams| { &m.id },
|
||||
|m: &mut CreateBlockParams| { &mut m.id },
|
||||
));
|
||||
fields.push(::protobuf::reflect::accessor::make_singular_ptr_field_accessor::<_, ::protobuf::types::ProtobufTypeMessage<super::revision::RepeatedRevision>>(
|
||||
"revisions",
|
||||
|m: &CreateDocParams| { &m.revisions },
|
||||
|m: &mut CreateDocParams| { &mut m.revisions },
|
||||
|m: &CreateBlockParams| { &m.revisions },
|
||||
|m: &mut CreateBlockParams| { &mut m.revisions },
|
||||
));
|
||||
::protobuf::reflect::MessageDescriptor::new_pb_name::<CreateDocParams>(
|
||||
"CreateDocParams",
|
||||
::protobuf::reflect::MessageDescriptor::new_pb_name::<CreateBlockParams>(
|
||||
"CreateBlockParams",
|
||||
fields,
|
||||
file_descriptor_proto()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn default_instance() -> &'static CreateDocParams {
|
||||
static instance: ::protobuf::rt::LazyV2<CreateDocParams> = ::protobuf::rt::LazyV2::INIT;
|
||||
instance.get(CreateDocParams::new)
|
||||
fn default_instance() -> &'static CreateBlockParams {
|
||||
static instance: ::protobuf::rt::LazyV2<CreateBlockParams> = ::protobuf::rt::LazyV2::INIT;
|
||||
instance.get(CreateBlockParams::new)
|
||||
}
|
||||
}
|
||||
|
||||
impl ::protobuf::Clear for CreateDocParams {
|
||||
impl ::protobuf::Clear for CreateBlockParams {
|
||||
fn clear(&mut self) {
|
||||
self.id.clear();
|
||||
self.revisions.clear();
|
||||
@ -227,20 +227,20 @@ impl ::protobuf::Clear for CreateDocParams {
|
||||
}
|
||||
}
|
||||
|
||||
impl ::std::fmt::Debug for CreateDocParams {
|
||||
impl ::std::fmt::Debug for CreateBlockParams {
|
||||
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
|
||||
::protobuf::text_format::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl ::protobuf::reflect::ProtobufValue for CreateDocParams {
|
||||
impl ::protobuf::reflect::ProtobufValue for CreateBlockParams {
|
||||
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
|
||||
::protobuf::reflect::ReflectValueRef::Message(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq,Clone,Default)]
|
||||
pub struct DocumentInfo {
|
||||
pub struct BlockInfo {
|
||||
// message fields
|
||||
pub doc_id: ::std::string::String,
|
||||
pub text: ::std::string::String,
|
||||
@ -251,14 +251,14 @@ pub struct DocumentInfo {
|
||||
pub cached_size: ::protobuf::CachedSize,
|
||||
}
|
||||
|
||||
impl<'a> ::std::default::Default for &'a DocumentInfo {
|
||||
fn default() -> &'a DocumentInfo {
|
||||
<DocumentInfo as ::protobuf::Message>::default_instance()
|
||||
impl<'a> ::std::default::Default for &'a BlockInfo {
|
||||
fn default() -> &'a BlockInfo {
|
||||
<BlockInfo as ::protobuf::Message>::default_instance()
|
||||
}
|
||||
}
|
||||
|
||||
impl DocumentInfo {
|
||||
pub fn new() -> DocumentInfo {
|
||||
impl BlockInfo {
|
||||
pub fn new() -> BlockInfo {
|
||||
::std::default::Default::default()
|
||||
}
|
||||
|
||||
@ -345,7 +345,7 @@ impl DocumentInfo {
|
||||
}
|
||||
}
|
||||
|
||||
impl ::protobuf::Message for DocumentInfo {
|
||||
impl ::protobuf::Message for BlockInfo {
|
||||
fn is_initialized(&self) -> bool {
|
||||
true
|
||||
}
|
||||
@ -446,8 +446,8 @@ impl ::protobuf::Message for DocumentInfo {
|
||||
Self::descriptor_static()
|
||||
}
|
||||
|
||||
fn new() -> DocumentInfo {
|
||||
DocumentInfo::new()
|
||||
fn new() -> BlockInfo {
|
||||
BlockInfo::new()
|
||||
}
|
||||
|
||||
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
|
||||
@ -456,39 +456,39 @@ impl ::protobuf::Message for DocumentInfo {
|
||||
let mut fields = ::std::vec::Vec::new();
|
||||
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
|
||||
"doc_id",
|
||||
|m: &DocumentInfo| { &m.doc_id },
|
||||
|m: &mut DocumentInfo| { &mut m.doc_id },
|
||||
|m: &BlockInfo| { &m.doc_id },
|
||||
|m: &mut BlockInfo| { &mut m.doc_id },
|
||||
));
|
||||
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
|
||||
"text",
|
||||
|m: &DocumentInfo| { &m.text },
|
||||
|m: &mut DocumentInfo| { &mut m.text },
|
||||
|m: &BlockInfo| { &m.text },
|
||||
|m: &mut BlockInfo| { &mut m.text },
|
||||
));
|
||||
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt64>(
|
||||
"rev_id",
|
||||
|m: &DocumentInfo| { &m.rev_id },
|
||||
|m: &mut DocumentInfo| { &mut m.rev_id },
|
||||
|m: &BlockInfo| { &m.rev_id },
|
||||
|m: &mut BlockInfo| { &mut m.rev_id },
|
||||
));
|
||||
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeInt64>(
|
||||
"base_rev_id",
|
||||
|m: &DocumentInfo| { &m.base_rev_id },
|
||||
|m: &mut DocumentInfo| { &mut m.base_rev_id },
|
||||
|m: &BlockInfo| { &m.base_rev_id },
|
||||
|m: &mut BlockInfo| { &mut m.base_rev_id },
|
||||
));
|
||||
::protobuf::reflect::MessageDescriptor::new_pb_name::<DocumentInfo>(
|
||||
"DocumentInfo",
|
||||
::protobuf::reflect::MessageDescriptor::new_pb_name::<BlockInfo>(
|
||||
"BlockInfo",
|
||||
fields,
|
||||
file_descriptor_proto()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn default_instance() -> &'static DocumentInfo {
|
||||
static instance: ::protobuf::rt::LazyV2<DocumentInfo> = ::protobuf::rt::LazyV2::INIT;
|
||||
instance.get(DocumentInfo::new)
|
||||
fn default_instance() -> &'static BlockInfo {
|
||||
static instance: ::protobuf::rt::LazyV2<BlockInfo> = ::protobuf::rt::LazyV2::INIT;
|
||||
instance.get(BlockInfo::new)
|
||||
}
|
||||
}
|
||||
|
||||
impl ::protobuf::Clear for DocumentInfo {
|
||||
impl ::protobuf::Clear for BlockInfo {
|
||||
fn clear(&mut self) {
|
||||
self.doc_id.clear();
|
||||
self.text.clear();
|
||||
@ -498,13 +498,13 @@ impl ::protobuf::Clear for DocumentInfo {
|
||||
}
|
||||
}
|
||||
|
||||
impl ::std::fmt::Debug for DocumentInfo {
|
||||
impl ::std::fmt::Debug for BlockInfo {
|
||||
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
|
||||
::protobuf::text_format::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl ::protobuf::reflect::ProtobufValue for DocumentInfo {
|
||||
impl ::protobuf::reflect::ProtobufValue for BlockInfo {
|
||||
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
|
||||
::protobuf::reflect::ReflectValueRef::Message(self)
|
||||
}
|
||||
@ -727,50 +727,50 @@ impl ::protobuf::reflect::ProtobufValue for ResetDocumentParams {
|
||||
}
|
||||
|
||||
#[derive(PartialEq,Clone,Default)]
|
||||
pub struct DocumentDelta {
|
||||
pub struct BlockDelta {
|
||||
// message fields
|
||||
pub doc_id: ::std::string::String,
|
||||
pub block_id: ::std::string::String,
|
||||
pub delta_json: ::std::string::String,
|
||||
// special fields
|
||||
pub unknown_fields: ::protobuf::UnknownFields,
|
||||
pub cached_size: ::protobuf::CachedSize,
|
||||
}
|
||||
|
||||
impl<'a> ::std::default::Default for &'a DocumentDelta {
|
||||
fn default() -> &'a DocumentDelta {
|
||||
<DocumentDelta as ::protobuf::Message>::default_instance()
|
||||
impl<'a> ::std::default::Default for &'a BlockDelta {
|
||||
fn default() -> &'a BlockDelta {
|
||||
<BlockDelta as ::protobuf::Message>::default_instance()
|
||||
}
|
||||
}
|
||||
|
||||
impl DocumentDelta {
|
||||
pub fn new() -> DocumentDelta {
|
||||
impl BlockDelta {
|
||||
pub fn new() -> BlockDelta {
|
||||
::std::default::Default::default()
|
||||
}
|
||||
|
||||
// string doc_id = 1;
|
||||
// string block_id = 1;
|
||||
|
||||
|
||||
pub fn get_doc_id(&self) -> &str {
|
||||
&self.doc_id
|
||||
pub fn get_block_id(&self) -> &str {
|
||||
&self.block_id
|
||||
}
|
||||
pub fn clear_doc_id(&mut self) {
|
||||
self.doc_id.clear();
|
||||
pub fn clear_block_id(&mut self) {
|
||||
self.block_id.clear();
|
||||
}
|
||||
|
||||
// Param is passed by value, moved
|
||||
pub fn set_doc_id(&mut self, v: ::std::string::String) {
|
||||
self.doc_id = v;
|
||||
pub fn set_block_id(&mut self, v: ::std::string::String) {
|
||||
self.block_id = v;
|
||||
}
|
||||
|
||||
// Mutable pointer to the field.
|
||||
// If field is not initialized, it is initialized with default value first.
|
||||
pub fn mut_doc_id(&mut self) -> &mut ::std::string::String {
|
||||
&mut self.doc_id
|
||||
pub fn mut_block_id(&mut self) -> &mut ::std::string::String {
|
||||
&mut self.block_id
|
||||
}
|
||||
|
||||
// Take field
|
||||
pub fn take_doc_id(&mut self) -> ::std::string::String {
|
||||
::std::mem::replace(&mut self.doc_id, ::std::string::String::new())
|
||||
pub fn take_block_id(&mut self) -> ::std::string::String {
|
||||
::std::mem::replace(&mut self.block_id, ::std::string::String::new())
|
||||
}
|
||||
|
||||
// string delta_json = 2;
|
||||
@ -800,7 +800,7 @@ impl DocumentDelta {
|
||||
}
|
||||
}
|
||||
|
||||
impl ::protobuf::Message for DocumentDelta {
|
||||
impl ::protobuf::Message for BlockDelta {
|
||||
fn is_initialized(&self) -> bool {
|
||||
true
|
||||
}
|
||||
@ -810,7 +810,7 @@ impl ::protobuf::Message for DocumentDelta {
|
||||
let (field_number, wire_type) = is.read_tag_unpack()?;
|
||||
match field_number {
|
||||
1 => {
|
||||
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.doc_id)?;
|
||||
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.block_id)?;
|
||||
},
|
||||
2 => {
|
||||
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.delta_json)?;
|
||||
@ -827,8 +827,8 @@ impl ::protobuf::Message for DocumentDelta {
|
||||
#[allow(unused_variables)]
|
||||
fn compute_size(&self) -> u32 {
|
||||
let mut my_size = 0;
|
||||
if !self.doc_id.is_empty() {
|
||||
my_size += ::protobuf::rt::string_size(1, &self.doc_id);
|
||||
if !self.block_id.is_empty() {
|
||||
my_size += ::protobuf::rt::string_size(1, &self.block_id);
|
||||
}
|
||||
if !self.delta_json.is_empty() {
|
||||
my_size += ::protobuf::rt::string_size(2, &self.delta_json);
|
||||
@ -839,8 +839,8 @@ impl ::protobuf::Message for DocumentDelta {
|
||||
}
|
||||
|
||||
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
|
||||
if !self.doc_id.is_empty() {
|
||||
os.write_string(1, &self.doc_id)?;
|
||||
if !self.block_id.is_empty() {
|
||||
os.write_string(1, &self.block_id)?;
|
||||
}
|
||||
if !self.delta_json.is_empty() {
|
||||
os.write_string(2, &self.delta_json)?;
|
||||
@ -875,8 +875,8 @@ impl ::protobuf::Message for DocumentDelta {
|
||||
Self::descriptor_static()
|
||||
}
|
||||
|
||||
fn new() -> DocumentDelta {
|
||||
DocumentDelta::new()
|
||||
fn new() -> BlockDelta {
|
||||
BlockDelta::new()
|
||||
}
|
||||
|
||||
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
|
||||
@ -884,44 +884,44 @@ impl ::protobuf::Message for DocumentDelta {
|
||||
descriptor.get(|| {
|
||||
let mut fields = ::std::vec::Vec::new();
|
||||
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
|
||||
"doc_id",
|
||||
|m: &DocumentDelta| { &m.doc_id },
|
||||
|m: &mut DocumentDelta| { &mut m.doc_id },
|
||||
"block_id",
|
||||
|m: &BlockDelta| { &m.block_id },
|
||||
|m: &mut BlockDelta| { &mut m.block_id },
|
||||
));
|
||||
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
|
||||
"delta_json",
|
||||
|m: &DocumentDelta| { &m.delta_json },
|
||||
|m: &mut DocumentDelta| { &mut m.delta_json },
|
||||
|m: &BlockDelta| { &m.delta_json },
|
||||
|m: &mut BlockDelta| { &mut m.delta_json },
|
||||
));
|
||||
::protobuf::reflect::MessageDescriptor::new_pb_name::<DocumentDelta>(
|
||||
"DocumentDelta",
|
||||
::protobuf::reflect::MessageDescriptor::new_pb_name::<BlockDelta>(
|
||||
"BlockDelta",
|
||||
fields,
|
||||
file_descriptor_proto()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn default_instance() -> &'static DocumentDelta {
|
||||
static instance: ::protobuf::rt::LazyV2<DocumentDelta> = ::protobuf::rt::LazyV2::INIT;
|
||||
instance.get(DocumentDelta::new)
|
||||
fn default_instance() -> &'static BlockDelta {
|
||||
static instance: ::protobuf::rt::LazyV2<BlockDelta> = ::protobuf::rt::LazyV2::INIT;
|
||||
instance.get(BlockDelta::new)
|
||||
}
|
||||
}
|
||||
|
||||
impl ::protobuf::Clear for DocumentDelta {
|
||||
impl ::protobuf::Clear for BlockDelta {
|
||||
fn clear(&mut self) {
|
||||
self.doc_id.clear();
|
||||
self.block_id.clear();
|
||||
self.delta_json.clear();
|
||||
self.unknown_fields.clear();
|
||||
}
|
||||
}
|
||||
|
||||
impl ::std::fmt::Debug for DocumentDelta {
|
||||
impl ::std::fmt::Debug for BlockDelta {
|
||||
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
|
||||
::protobuf::text_format::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl ::protobuf::reflect::ProtobufValue for DocumentDelta {
|
||||
impl ::protobuf::reflect::ProtobufValue for BlockDelta {
|
||||
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
|
||||
::protobuf::reflect::ReflectValueRef::Message(self)
|
||||
}
|
||||
@ -1164,7 +1164,7 @@ impl ::protobuf::reflect::ProtobufValue for NewDocUser {
|
||||
}
|
||||
|
||||
#[derive(PartialEq,Clone,Default)]
|
||||
pub struct DocumentId {
|
||||
pub struct BlockId {
|
||||
// message fields
|
||||
pub value: ::std::string::String,
|
||||
// special fields
|
||||
@ -1172,14 +1172,14 @@ pub struct DocumentId {
|
||||
pub cached_size: ::protobuf::CachedSize,
|
||||
}
|
||||
|
||||
impl<'a> ::std::default::Default for &'a DocumentId {
|
||||
fn default() -> &'a DocumentId {
|
||||
<DocumentId as ::protobuf::Message>::default_instance()
|
||||
impl<'a> ::std::default::Default for &'a BlockId {
|
||||
fn default() -> &'a BlockId {
|
||||
<BlockId as ::protobuf::Message>::default_instance()
|
||||
}
|
||||
}
|
||||
|
||||
impl DocumentId {
|
||||
pub fn new() -> DocumentId {
|
||||
impl BlockId {
|
||||
pub fn new() -> BlockId {
|
||||
::std::default::Default::default()
|
||||
}
|
||||
|
||||
@ -1210,7 +1210,7 @@ impl DocumentId {
|
||||
}
|
||||
}
|
||||
|
||||
impl ::protobuf::Message for DocumentId {
|
||||
impl ::protobuf::Message for BlockId {
|
||||
fn is_initialized(&self) -> bool {
|
||||
true
|
||||
}
|
||||
@ -1276,8 +1276,8 @@ impl ::protobuf::Message for DocumentId {
|
||||
Self::descriptor_static()
|
||||
}
|
||||
|
||||
fn new() -> DocumentId {
|
||||
DocumentId::new()
|
||||
fn new() -> BlockId {
|
||||
BlockId::new()
|
||||
}
|
||||
|
||||
fn descriptor_static() -> &'static ::protobuf::reflect::MessageDescriptor {
|
||||
@ -1286,56 +1286,56 @@ impl ::protobuf::Message for DocumentId {
|
||||
let mut fields = ::std::vec::Vec::new();
|
||||
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
|
||||
"value",
|
||||
|m: &DocumentId| { &m.value },
|
||||
|m: &mut DocumentId| { &mut m.value },
|
||||
|m: &BlockId| { &m.value },
|
||||
|m: &mut BlockId| { &mut m.value },
|
||||
));
|
||||
::protobuf::reflect::MessageDescriptor::new_pb_name::<DocumentId>(
|
||||
"DocumentId",
|
||||
::protobuf::reflect::MessageDescriptor::new_pb_name::<BlockId>(
|
||||
"BlockId",
|
||||
fields,
|
||||
file_descriptor_proto()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn default_instance() -> &'static DocumentId {
|
||||
static instance: ::protobuf::rt::LazyV2<DocumentId> = ::protobuf::rt::LazyV2::INIT;
|
||||
instance.get(DocumentId::new)
|
||||
fn default_instance() -> &'static BlockId {
|
||||
static instance: ::protobuf::rt::LazyV2<BlockId> = ::protobuf::rt::LazyV2::INIT;
|
||||
instance.get(BlockId::new)
|
||||
}
|
||||
}
|
||||
|
||||
impl ::protobuf::Clear for DocumentId {
|
||||
impl ::protobuf::Clear for BlockId {
|
||||
fn clear(&mut self) {
|
||||
self.value.clear();
|
||||
self.unknown_fields.clear();
|
||||
}
|
||||
}
|
||||
|
||||
impl ::std::fmt::Debug for DocumentId {
|
||||
impl ::std::fmt::Debug for BlockId {
|
||||
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
|
||||
::protobuf::text_format::fmt(self, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl ::protobuf::reflect::ProtobufValue for DocumentId {
|
||||
impl ::protobuf::reflect::ProtobufValue for BlockId {
|
||||
fn as_ref(&self) -> ::protobuf::reflect::ReflectValueRef {
|
||||
::protobuf::reflect::ReflectValueRef::Message(self)
|
||||
}
|
||||
}
|
||||
|
||||
static file_descriptor_proto_data: &'static [u8] = b"\
|
||||
\n\x13document_info.proto\x1a\x0erevision.proto\"R\n\x0fCreateDocParams\
|
||||
\x12\x0e\n\x02id\x18\x01\x20\x01(\tR\x02id\x12/\n\trevisions\x18\x02\x20\
|
||||
\x01(\x0b2\x11.RepeatedRevisionR\trevisions\"p\n\x0cDocumentInfo\x12\x15\
|
||||
\n\x13document_info.proto\x1a\x0erevision.proto\"T\n\x11CreateBlockParam\
|
||||
s\x12\x0e\n\x02id\x18\x01\x20\x01(\tR\x02id\x12/\n\trevisions\x18\x02\
|
||||
\x20\x01(\x0b2\x11.RepeatedRevisionR\trevisions\"m\n\tBlockInfo\x12\x15\
|
||||
\n\x06doc_id\x18\x01\x20\x01(\tR\x05docId\x12\x12\n\x04text\x18\x02\x20\
|
||||
\x01(\tR\x04text\x12\x15\n\x06rev_id\x18\x03\x20\x01(\x03R\x05revId\x12\
|
||||
\x1e\n\x0bbase_rev_id\x18\x04\x20\x01(\x03R\tbaseRevId\"]\n\x13ResetDocu\
|
||||
mentParams\x12\x15\n\x06doc_id\x18\x01\x20\x01(\tR\x05docId\x12/\n\trevi\
|
||||
sions\x18\x02\x20\x01(\x0b2\x11.RepeatedRevisionR\trevisions\"E\n\rDocum\
|
||||
entDelta\x12\x15\n\x06doc_id\x18\x01\x20\x01(\tR\x05docId\x12\x1d\n\ndel\
|
||||
ta_json\x18\x02\x20\x01(\tR\tdeltaJson\"S\n\nNewDocUser\x12\x17\n\x07use\
|
||||
r_id\x18\x01\x20\x01(\tR\x06userId\x12\x15\n\x06rev_id\x18\x02\x20\x01(\
|
||||
\x03R\x05revId\x12\x15\n\x06doc_id\x18\x03\x20\x01(\tR\x05docId\"\"\n\nD\
|
||||
ocumentId\x12\x14\n\x05value\x18\x01\x20\x01(\tR\x05valueb\x06proto3\
|
||||
sions\x18\x02\x20\x01(\x0b2\x11.RepeatedRevisionR\trevisions\"F\n\nBlock\
|
||||
Delta\x12\x19\n\x08block_id\x18\x01\x20\x01(\tR\x07blockId\x12\x1d\n\nde\
|
||||
lta_json\x18\x02\x20\x01(\tR\tdeltaJson\"S\n\nNewDocUser\x12\x17\n\x07us\
|
||||
er_id\x18\x01\x20\x01(\tR\x06userId\x12\x15\n\x06rev_id\x18\x02\x20\x01(\
|
||||
\x03R\x05revId\x12\x15\n\x06doc_id\x18\x03\x20\x01(\tR\x05docId\"\x1f\n\
|
||||
\x07BlockId\x12\x14\n\x05value\x18\x01\x20\x01(\tR\x05valueb\x06proto3\
|
||||
";
|
||||
|
||||
static file_descriptor_proto_lazy: ::protobuf::rt::LazyV2<::protobuf::descriptor::FileDescriptorProto> = ::protobuf::rt::LazyV2::INIT;
|
||||
|
@ -1,11 +1,11 @@
|
||||
syntax = "proto3";
|
||||
import "revision.proto";
|
||||
|
||||
message CreateDocParams {
|
||||
message CreateBlockParams {
|
||||
string id = 1;
|
||||
RepeatedRevision revisions = 2;
|
||||
}
|
||||
message DocumentInfo {
|
||||
message BlockInfo {
|
||||
string doc_id = 1;
|
||||
string text = 2;
|
||||
int64 rev_id = 3;
|
||||
@ -15,8 +15,8 @@ message ResetDocumentParams {
|
||||
string doc_id = 1;
|
||||
RepeatedRevision revisions = 2;
|
||||
}
|
||||
message DocumentDelta {
|
||||
string doc_id = 1;
|
||||
message BlockDelta {
|
||||
string block_id = 1;
|
||||
string delta_json = 2;
|
||||
}
|
||||
message NewDocUser {
|
||||
@ -24,6 +24,6 @@ message NewDocUser {
|
||||
int64 rev_id = 2;
|
||||
string doc_id = 3;
|
||||
}
|
||||
message DocumentId {
|
||||
message BlockId {
|
||||
string value = 1;
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::{
|
||||
entities::{document_info::DocumentInfo, ws_data::ServerRevisionWSDataBuilder},
|
||||
entities::{document_info::BlockInfo, ws_data::ServerRevisionWSDataBuilder},
|
||||
errors::{internal_error, CollaborateError, CollaborateResult},
|
||||
protobuf::{ClientRevisionWSData, RepeatedRevision as RepeatedRevisionPB, Revision as RevisionPB},
|
||||
server_document::document_pad::ServerDocument,
|
||||
@ -18,13 +18,13 @@ use tokio::{
|
||||
};
|
||||
|
||||
pub trait DocumentCloudPersistence: Send + Sync + Debug {
|
||||
fn read_document(&self, doc_id: &str) -> BoxResultFuture<DocumentInfo, CollaborateError>;
|
||||
fn read_document(&self, doc_id: &str) -> BoxResultFuture<BlockInfo, CollaborateError>;
|
||||
|
||||
fn create_document(
|
||||
&self,
|
||||
doc_id: &str,
|
||||
repeated_revision: RepeatedRevisionPB,
|
||||
) -> BoxResultFuture<Option<DocumentInfo>, CollaborateError>;
|
||||
) -> BoxResultFuture<Option<BlockInfo>, CollaborateError>;
|
||||
|
||||
fn read_document_revisions(
|
||||
&self,
|
||||
@ -181,7 +181,7 @@ impl ServerDocumentManager {
|
||||
}
|
||||
}
|
||||
|
||||
async fn create_document_handler(&self, doc: DocumentInfo) -> Result<Arc<OpenDocumentHandler>, CollaborateError> {
|
||||
async fn create_document_handler(&self, doc: BlockInfo) -> Result<Arc<OpenDocumentHandler>, CollaborateError> {
|
||||
let persistence = self.persistence.clone();
|
||||
let handle = spawn_blocking(|| OpenDocumentHandler::new(doc, persistence))
|
||||
.await
|
||||
@ -205,7 +205,7 @@ struct OpenDocumentHandler {
|
||||
}
|
||||
|
||||
impl OpenDocumentHandler {
|
||||
fn new(doc: DocumentInfo, persistence: Arc<dyn DocumentCloudPersistence>) -> Result<Self, CollaborateError> {
|
||||
fn new(doc: BlockInfo, persistence: Arc<dyn DocumentCloudPersistence>) -> Result<Self, CollaborateError> {
|
||||
let doc_id = doc.doc_id.clone();
|
||||
let (sender, receiver) = mpsc::channel(1000);
|
||||
let users = DashMap::new();
|
||||
|
@ -1,12 +1,12 @@
|
||||
use crate::{
|
||||
entities::{
|
||||
document_info::DocumentInfo,
|
||||
document_info::BlockInfo,
|
||||
folder_info::{FolderDelta, FolderInfo},
|
||||
revision::{RepeatedRevision, Revision},
|
||||
},
|
||||
errors::{CollaborateError, CollaborateResult},
|
||||
protobuf::{
|
||||
DocumentInfo as DocumentInfoPB, FolderInfo as FolderInfoPB, RepeatedRevision as RepeatedRevisionPB,
|
||||
BlockInfo as BlockInfoPB, FolderInfo as FolderInfoPB, RepeatedRevision as RepeatedRevisionPB,
|
||||
Revision as RevisionPB,
|
||||
},
|
||||
};
|
||||
@ -199,11 +199,11 @@ pub fn make_folder_pb_from_revisions_pb(
|
||||
pub fn make_document_info_from_revisions_pb(
|
||||
doc_id: &str,
|
||||
revisions: RepeatedRevisionPB,
|
||||
) -> Result<Option<DocumentInfo>, CollaborateError> {
|
||||
) -> Result<Option<BlockInfo>, CollaborateError> {
|
||||
match make_document_info_pb_from_revisions_pb(doc_id, revisions)? {
|
||||
None => Ok(None),
|
||||
Some(pb) => {
|
||||
let document_info: DocumentInfo = pb.try_into().map_err(|e| {
|
||||
let document_info: BlockInfo = pb.try_into().map_err(|e| {
|
||||
CollaborateError::internal().context(format!("Deserialize document info from pb failed: {}", e))
|
||||
})?;
|
||||
Ok(Some(document_info))
|
||||
@ -215,7 +215,7 @@ pub fn make_document_info_from_revisions_pb(
|
||||
pub fn make_document_info_pb_from_revisions_pb(
|
||||
doc_id: &str,
|
||||
mut revisions: RepeatedRevisionPB,
|
||||
) -> Result<Option<DocumentInfoPB>, CollaborateError> {
|
||||
) -> Result<Option<BlockInfoPB>, CollaborateError> {
|
||||
let revisions = revisions.take_items();
|
||||
if revisions.is_empty() {
|
||||
return Ok(None);
|
||||
@ -237,12 +237,12 @@ pub fn make_document_info_pb_from_revisions_pb(
|
||||
}
|
||||
|
||||
let text = document_delta.to_json();
|
||||
let mut document_info = DocumentInfoPB::new();
|
||||
document_info.set_doc_id(doc_id.to_owned());
|
||||
document_info.set_text(text);
|
||||
document_info.set_base_rev_id(base_rev_id);
|
||||
document_info.set_rev_id(rev_id);
|
||||
Ok(Some(document_info))
|
||||
let mut block_info = BlockInfoPB::new();
|
||||
block_info.set_doc_id(doc_id.to_owned());
|
||||
block_info.set_text(text);
|
||||
block_info.set_base_rev_id(base_rev_id);
|
||||
block_info.set_rev_id(rev_id);
|
||||
Ok(Some(block_info))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -32,7 +32,7 @@ impl std::convert::From<i32> for ExportType {
|
||||
#[derive(Default, ProtoBuf)]
|
||||
pub struct ExportPayload {
|
||||
#[pb(index = 1)]
|
||||
pub doc_id: String,
|
||||
pub view_id: String,
|
||||
|
||||
#[pb(index = 2)]
|
||||
pub export_type: ExportType,
|
||||
@ -40,7 +40,7 @@ pub struct ExportPayload {
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct ExportParams {
|
||||
pub doc_id: String,
|
||||
pub view_id: String,
|
||||
pub export_type: ExportType,
|
||||
}
|
||||
|
||||
@ -48,7 +48,7 @@ impl TryInto<ExportParams> for ExportPayload {
|
||||
type Error = ErrorCode;
|
||||
fn try_into(self) -> Result<ExportParams, Self::Error> {
|
||||
Ok(ExportParams {
|
||||
doc_id: self.doc_id,
|
||||
view_id: self.view_id,
|
||||
export_type: self.export_type,
|
||||
})
|
||||
}
|
||||
|
@ -26,7 +26,7 @@
|
||||
#[derive(PartialEq,Clone,Default)]
|
||||
pub struct ExportPayload {
|
||||
// message fields
|
||||
pub doc_id: ::std::string::String,
|
||||
pub view_id: ::std::string::String,
|
||||
pub export_type: ExportType,
|
||||
// special fields
|
||||
pub unknown_fields: ::protobuf::UnknownFields,
|
||||
@ -44,30 +44,30 @@ impl ExportPayload {
|
||||
::std::default::Default::default()
|
||||
}
|
||||
|
||||
// string doc_id = 1;
|
||||
// string view_id = 1;
|
||||
|
||||
|
||||
pub fn get_doc_id(&self) -> &str {
|
||||
&self.doc_id
|
||||
pub fn get_view_id(&self) -> &str {
|
||||
&self.view_id
|
||||
}
|
||||
pub fn clear_doc_id(&mut self) {
|
||||
self.doc_id.clear();
|
||||
pub fn clear_view_id(&mut self) {
|
||||
self.view_id.clear();
|
||||
}
|
||||
|
||||
// Param is passed by value, moved
|
||||
pub fn set_doc_id(&mut self, v: ::std::string::String) {
|
||||
self.doc_id = v;
|
||||
pub fn set_view_id(&mut self, v: ::std::string::String) {
|
||||
self.view_id = v;
|
||||
}
|
||||
|
||||
// Mutable pointer to the field.
|
||||
// If field is not initialized, it is initialized with default value first.
|
||||
pub fn mut_doc_id(&mut self) -> &mut ::std::string::String {
|
||||
&mut self.doc_id
|
||||
pub fn mut_view_id(&mut self) -> &mut ::std::string::String {
|
||||
&mut self.view_id
|
||||
}
|
||||
|
||||
// Take field
|
||||
pub fn take_doc_id(&mut self) -> ::std::string::String {
|
||||
::std::mem::replace(&mut self.doc_id, ::std::string::String::new())
|
||||
pub fn take_view_id(&mut self) -> ::std::string::String {
|
||||
::std::mem::replace(&mut self.view_id, ::std::string::String::new())
|
||||
}
|
||||
|
||||
// .ExportType export_type = 2;
|
||||
@ -96,7 +96,7 @@ impl ::protobuf::Message for ExportPayload {
|
||||
let (field_number, wire_type) = is.read_tag_unpack()?;
|
||||
match field_number {
|
||||
1 => {
|
||||
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.doc_id)?;
|
||||
::protobuf::rt::read_singular_proto3_string_into(wire_type, is, &mut self.view_id)?;
|
||||
},
|
||||
2 => {
|
||||
::protobuf::rt::read_proto3_enum_with_unknown_fields_into(wire_type, is, &mut self.export_type, 2, &mut self.unknown_fields)?
|
||||
@ -113,8 +113,8 @@ impl ::protobuf::Message for ExportPayload {
|
||||
#[allow(unused_variables)]
|
||||
fn compute_size(&self) -> u32 {
|
||||
let mut my_size = 0;
|
||||
if !self.doc_id.is_empty() {
|
||||
my_size += ::protobuf::rt::string_size(1, &self.doc_id);
|
||||
if !self.view_id.is_empty() {
|
||||
my_size += ::protobuf::rt::string_size(1, &self.view_id);
|
||||
}
|
||||
if self.export_type != ExportType::Text {
|
||||
my_size += ::protobuf::rt::enum_size(2, self.export_type);
|
||||
@ -125,8 +125,8 @@ impl ::protobuf::Message for ExportPayload {
|
||||
}
|
||||
|
||||
fn write_to_with_cached_sizes(&self, os: &mut ::protobuf::CodedOutputStream<'_>) -> ::protobuf::ProtobufResult<()> {
|
||||
if !self.doc_id.is_empty() {
|
||||
os.write_string(1, &self.doc_id)?;
|
||||
if !self.view_id.is_empty() {
|
||||
os.write_string(1, &self.view_id)?;
|
||||
}
|
||||
if self.export_type != ExportType::Text {
|
||||
os.write_enum(2, ::protobuf::ProtobufEnum::value(&self.export_type))?;
|
||||
@ -170,9 +170,9 @@ impl ::protobuf::Message for ExportPayload {
|
||||
descriptor.get(|| {
|
||||
let mut fields = ::std::vec::Vec::new();
|
||||
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeString>(
|
||||
"doc_id",
|
||||
|m: &ExportPayload| { &m.doc_id },
|
||||
|m: &mut ExportPayload| { &mut m.doc_id },
|
||||
"view_id",
|
||||
|m: &ExportPayload| { &m.view_id },
|
||||
|m: &mut ExportPayload| { &mut m.view_id },
|
||||
));
|
||||
fields.push(::protobuf::reflect::accessor::make_simple_field_accessor::<_, ::protobuf::types::ProtobufTypeEnum<ExportType>>(
|
||||
"export_type",
|
||||
@ -195,7 +195,7 @@ impl ::protobuf::Message for ExportPayload {
|
||||
|
||||
impl ::protobuf::Clear for ExportPayload {
|
||||
fn clear(&mut self) {
|
||||
self.doc_id.clear();
|
||||
self.view_id.clear();
|
||||
self.export_type = ExportType::Text;
|
||||
self.unknown_fields.clear();
|
||||
}
|
||||
@ -457,11 +457,11 @@ impl ::protobuf::reflect::ProtobufValue for ExportType {
|
||||
}
|
||||
|
||||
static file_descriptor_proto_data: &'static [u8] = b"\
|
||||
\n\x0bshare.proto\"T\n\rExportPayload\x12\x15\n\x06doc_id\x18\x01\x20\
|
||||
\x01(\tR\x05docId\x12,\n\x0bexport_type\x18\x02\x20\x01(\x0e2\x0b.Export\
|
||||
TypeR\nexportType\"N\n\nExportData\x12\x12\n\x04data\x18\x01\x20\x01(\tR\
|
||||
\x04data\x12,\n\x0bexport_type\x18\x02\x20\x01(\x0e2\x0b.ExportTypeR\nex\
|
||||
portType*.\n\nExportType\x12\x08\n\x04Text\x10\0\x12\x0c\n\x08Markdown\
|
||||
\n\x0bshare.proto\"V\n\rExportPayload\x12\x17\n\x07view_id\x18\x01\x20\
|
||||
\x01(\tR\x06viewId\x12,\n\x0bexport_type\x18\x02\x20\x01(\x0e2\x0b.Expor\
|
||||
tTypeR\nexportType\"N\n\nExportData\x12\x12\n\x04data\x18\x01\x20\x01(\t\
|
||||
R\x04data\x12,\n\x0bexport_type\x18\x02\x20\x01(\x0e2\x0b.ExportTypeR\ne\
|
||||
xportType*.\n\nExportType\x12\x08\n\x04Text\x10\0\x12\x0c\n\x08Markdown\
|
||||
\x10\x01\x12\x08\n\x04Link\x10\x02b\x06proto3\
|
||||
";
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
syntax = "proto3";
|
||||
|
||||
message ExportPayload {
|
||||
string doc_id = 1;
|
||||
string view_id = 1;
|
||||
ExportType export_type = 2;
|
||||
}
|
||||
message ExportData {
|
||||
|
Loading…
Reference in New Issue
Block a user